conflict_resolution
stringlengths
27
16k
<<<<<<< import com.google.common.collect.Iterators; ======= >>>>>>>
<<<<<<< Object value = null; float boost = AbstractQueryBuilder.DEFAULT_BOOST; ======= String value = null; float boost = 1.0f; >>>>>>> String value = null; float boost = AbstractQueryBuilder.DEFAULT_BOOST;
<<<<<<< import org.elasticsearch.env.Environment; ======= import org.elasticsearch.common.unit.TimeValue; >>>>>>> import org.elasticsearch.env.Environment; import org.elasticsearch.common.unit.TimeValue; <<<<<<< private final Environment env; ======= private final TimeValue defaultConnectionTimeout; private final TimeValue defaultReadTimeout; >>>>>>> private final Environment env; private final TimeValue defaultConnectionTimeout; private final TimeValue defaultReadTimeout; <<<<<<< this.env = env; ======= defaultConnectionTimeout = settings.getAsTime("watcher.http.default_connection_timeout", TimeValue.timeValueSeconds(10)); defaultReadTimeout = settings.getAsTime("watcher.http.default_read_timeout", TimeValue.timeValueSeconds(10)); >>>>>>> this.env = env; defaultConnectionTimeout = settings.getAsTime("watcher.http.default_connection_timeout", TimeValue.timeValueSeconds(10)); defaultReadTimeout = settings.getAsTime("watcher.http.default_read_timeout", TimeValue.timeValueSeconds(10));
<<<<<<< public static final String NAME = "mlt"; ======= // document inputs private List<String> likeTexts = new ArrayList<>(); private List<String> unlikeTexts = new ArrayList<>(); private List<Item> likeItems = new ArrayList<>(); private List<Item> unlikeItems = new ArrayList<>(); >>>>>>> public static final String NAME = "mlt"; // document inputs private List<String> likeTexts = new ArrayList<>(); private List<String> unlikeTexts = new ArrayList<>(); private List<Item> likeItems = new ArrayList<>(); private List<Item> unlikeItems = new ArrayList<>(); <<<<<<< private float boostTerms = -1; ======= private String[] stopWords = null; >>>>>>> private String[] stopWords = null; <<<<<<< static final MoreLikeThisQueryBuilder PROTOTYPE = new MoreLikeThisQueryBuilder(); ======= private float boost = -1; private String queryName; >>>>>>> static final MoreLikeThisQueryBuilder PROTOTYPE = new MoreLikeThisQueryBuilder(); <<<<<<< ======= /** * Number of terms that must match the generated query expressed in the * common syntax for minimum should match. Defaults to <tt>30%</tt>. * * @see org.elasticsearch.common.lucene.search.Queries#calculateMinShouldMatch(int, String) */ public MoreLikeThisQueryBuilder minimumShouldMatch(String minimumShouldMatch) { this.minimumShouldMatch = minimumShouldMatch; return this; } /** * Sets the boost factor to use when boosting terms. Defaults to <tt>1</tt>. */ public MoreLikeThisQueryBuilder boostTerms(float boostTerms) { this.boostTerms = boostTerms; return this; } /** * Whether to include the input documents. Defaults to <tt>false</tt> */ public MoreLikeThisQueryBuilder include(boolean include) { this.include = include; return this; } >>>>>>> /** * Number of terms that must match the generated query expressed in the * common syntax for minimum should match. Defaults to <tt>30%</tt>. * * @see org.elasticsearch.common.lucene.search.Queries#calculateMinShouldMatch(int, String) */ public MoreLikeThisQueryBuilder minimumShouldMatch(String minimumShouldMatch) { this.minimumShouldMatch = minimumShouldMatch; return this; } /** * Sets the boost factor to use when boosting terms. Defaults to <tt>1</tt>. */ public MoreLikeThisQueryBuilder boostTerms(float boostTerms) { this.boostTerms = boostTerms; return this; } /** * Whether to include the input documents. Defaults to <tt>false</tt> */ public MoreLikeThisQueryBuilder include(boolean include) { this.include = include; return this; } <<<<<<< ======= @Override public MoreLikeThisQueryBuilder boost(float boost) { this.boost = boost; return this; } /** * Sets the query name for the filter that can be used when searching for matched_filters per hit. */ public MoreLikeThisQueryBuilder queryName(String queryName) { this.queryName = queryName; return this; } /** * The text to use in order to find documents that are "like" this. */ @Deprecated public MoreLikeThisQueryBuilder likeText(String likeText) { return like(likeText); } @Deprecated public MoreLikeThisQueryBuilder ids(String... ids) { Item[] items = new Item[ids.length]; for (int i = 0; i < items.length; i++) { items[i] = new Item(null, null, ids[i]); } return like(items); } @Deprecated public MoreLikeThisQueryBuilder docs(Item... docs) { return like(docs); } /** * Sets the documents from which the terms should not be selected from. * * @Deprecated Use {@link #unlike(Item...)} instead */ @Deprecated public MoreLikeThisQueryBuilder ignoreLike(Item... docs) { return unlike(docs); } /** * Sets the text from which the terms should not be selected from. * * @Deprecated Use {@link #unlike(String...)} instead. */ @Deprecated public MoreLikeThisQueryBuilder ignoreLike(String... likeText) { return unlike(likeText); } /** * Adds a document to use in order to find documents that are "like" this. */ @Deprecated public MoreLikeThisQueryBuilder addItem(Item... likeItems) { return addLikeItem(likeItems); } >>>>>>> /** * The text to use in order to find documents that are "like" this. */ @Deprecated public MoreLikeThisQueryBuilder likeText(String likeText) { return like(likeText); } @Deprecated public MoreLikeThisQueryBuilder ids(String... ids) { Item[] items = new Item[ids.length]; for (int i = 0; i < items.length; i++) { items[i] = new Item(null, null, ids[i]); } return like(items); } @Deprecated public MoreLikeThisQueryBuilder docs(Item... docs) { return like(docs); } /** * Sets the documents from which the terms should not be selected from. * * @Deprecated Use {@link #unlike(Item...)} instead */ @Deprecated public MoreLikeThisQueryBuilder ignoreLike(Item... docs) { return unlike(docs); } /** * Sets the text from which the terms should not be selected from. * * @Deprecated Use {@link #unlike(String...)} instead. */ @Deprecated public MoreLikeThisQueryBuilder ignoreLike(String... likeText) { return unlike(likeText); } /** * Adds a document to use in order to find documents that are "like" this. */ @Deprecated public MoreLikeThisQueryBuilder addItem(Item... likeItems) { return addLikeItem(likeItems); } <<<<<<< String likeFieldName = MoreLikeThisQueryParser.Fields.LIKE.getPreferredName(); builder.startObject(NAME); ======= builder.startObject(MoreLikeThisQueryParser.NAME); >>>>>>> builder.startObject(NAME); <<<<<<< if (boostTerms != -1) { builder.field(MoreLikeThisQueryParser.Fields.BOOST_TERMS.getPreferredName(), boostTerms); } ======= if (stopWords != null && stopWords.length > 0) { builder.field(MoreLikeThisQueryParser.Field.STOP_WORDS.getPreferredName(), stopWords); } >>>>>>> if (stopWords != null && stopWords.length > 0) { builder.field(MoreLikeThisQueryParser.Field.STOP_WORDS.getPreferredName(), stopWords); } <<<<<<< @Override public String getWriteableName() { return NAME; } ======= private static void buildLikeField(XContentBuilder builder, String fieldName, List<String> texts, List<Item> items) throws IOException { builder.startArray(fieldName); for (String text : texts) { builder.value(text); } for (Item item : items) { builder.value(item); } builder.endArray(); } >>>>>>> private static void buildLikeField(XContentBuilder builder, String fieldName, List<String> texts, List<Item> items) throws IOException { builder.startArray(fieldName); for (String text : texts) { builder.value(text); } for (Item item : items) { builder.value(item); } builder.endArray(); } @Override public String getWriteableName() { return NAME; }
<<<<<<< private void updateHistoryEntry(AlertActionEntry entry) throws IOException { ======= public long getQueueSize() { return actionsToBeProcessed.size(); } public long getLargestQueueSize() { return largestQueueSize.get(); } private void updateHistoryEntry(AlertActionEntry entry, AlertActionState actionPerformed) throws IOException { ensureStarted(); entry.setState(actionPerformed); >>>>>>> private void updateHistoryEntry(AlertActionEntry entry) throws IOException { ensureStarted(); <<<<<<< private void updateHistoryEntry(AlertActionEntry entry, AlertActionState actionPerformed) throws IOException { entry.setEntryState(actionPerformed); updateHistoryEntry(entry); } public long getQueueSize() { return actionsToBeProcessed.size(); } public long getLargestQueueSize() { return largestQueueSize.get(); ======= private void ensureStarted() { if (!started.get()) { throw new ElasticsearchIllegalStateException("not started"); } >>>>>>> private void updateHistoryEntry(AlertActionEntry entry, AlertActionState actionPerformed) throws IOException { entry.setState(actionPerformed); updateHistoryEntry(entry); } public long getQueueSize() { ensureStarted(); return actionsToBeProcessed.size(); } public long getLargestQueueSize() { ensureStarted(); return largestQueueSize.get(); } private void ensureStarted() { if (!started.get()) { throw new ElasticsearchIllegalStateException("not started"); }
<<<<<<< 1, randomBoolean(), ShardRoutingState.UNASSIGNED, 0, new UnassignedInfo(randomFrom(UnassignedInfo.Reason.values()), "foobar")), TestShardRouting.newShardRouting("newest", 0, null, null, null, 1, randomBoolean(), ShardRoutingState.UNASSIGNED, 0, new UnassignedInfo(randomFrom(UnassignedInfo.Reason.values()), "foobar"))); ======= randomBoolean(), ShardRoutingState.UNASSIGNED, new UnassignedInfo(randomFrom(UnassignedInfo.Reason.values()), "foobar")), TestShardRouting.newShardRouting("newest", 0, null, null, null, randomBoolean(), ShardRoutingState.UNASSIGNED, new UnassignedInfo(randomFrom(UnassignedInfo.Reason.values()), "foobar"))); >>>>>>> 1, randomBoolean(), ShardRoutingState.UNASSIGNED, new UnassignedInfo(randomFrom(UnassignedInfo.Reason.values()), "foobar")), TestShardRouting.newShardRouting("newest", 0, null, null, null, 1, randomBoolean(), ShardRoutingState.UNASSIGNED, new UnassignedInfo(randomFrom(UnassignedInfo.Reason.values()), "foobar"))); <<<<<<< List<ShardRouting> shardRoutings = Arrays.asList(TestShardRouting.newShardRouting("oldest", 0, null, null, null, 1, randomBoolean(), ShardRoutingState.UNASSIGNED, 0, new UnassignedInfo(randomFrom(UnassignedInfo.Reason.values()), "foobar")), TestShardRouting.newShardRouting("newest", 0, null, null, null, 1, randomBoolean(), ShardRoutingState.UNASSIGNED, 0, new UnassignedInfo(randomFrom(UnassignedInfo.Reason.values()), "foobar"))); ======= List<ShardRouting> shardRoutings = Arrays.asList(TestShardRouting.newShardRouting("oldest", 0, null, null, null, randomBoolean(), ShardRoutingState.UNASSIGNED, new UnassignedInfo(randomFrom(UnassignedInfo.Reason.values()), "foobar")), TestShardRouting.newShardRouting("newest", 0, null, null, null, randomBoolean(), ShardRoutingState.UNASSIGNED, new UnassignedInfo(randomFrom(UnassignedInfo.Reason.values()), "foobar"))); >>>>>>> List<ShardRouting> shardRoutings = Arrays.asList(TestShardRouting.newShardRouting("oldest", 0, null, null, null, 1, randomBoolean(), ShardRoutingState.UNASSIGNED, new UnassignedInfo(randomFrom(UnassignedInfo.Reason.values()), "foobar")), TestShardRouting.newShardRouting("newest", 0, null, null, null, 1, randomBoolean(), ShardRoutingState.UNASSIGNED, new UnassignedInfo(randomFrom(UnassignedInfo.Reason.values()), "foobar"))); <<<<<<< shards.add(TestShardRouting.newShardRouting(indexMeta.name, randomIntBetween(1, 5), null, null, null, 1, randomBoolean(), ShardRoutingState.UNASSIGNED, randomIntBetween(0, 100), new UnassignedInfo(randomFrom(UnassignedInfo.Reason.values()), "foobar"))); ======= shards.add(TestShardRouting.newShardRouting(indexMeta.name, randomIntBetween(1, 5), null, null, null, randomBoolean(), ShardRoutingState.UNASSIGNED, new UnassignedInfo(randomFrom(UnassignedInfo.Reason.values()), "foobar"))); >>>>>>> shards.add(TestShardRouting.newShardRouting(indexMeta.name, randomIntBetween(1, 5), null, null, null, 1, randomBoolean(), ShardRoutingState.UNASSIGNED, new UnassignedInfo(randomFrom(UnassignedInfo.Reason.values()), "foobar")));
<<<<<<< seqNo = in.readVLong(); primaryTerm = in.readVLong(); ======= routedBasedOnClusterVersion = in.readVLong(); >>>>>>> routedBasedOnClusterVersion = in.readVLong(); seqNo = in.readVLong(); primaryTerm = in.readVLong(); <<<<<<< out.writeVLong(seqNo); out.writeVLong(primaryTerm); ======= out.writeVLong(routedBasedOnClusterVersion); } @Override public Task createTask(long id, String type, String action, String parentTaskNode, long parentTaskId) { return new ReplicationTask(id, type, action, getDescription(), parentTaskNode, parentTaskId); >>>>>>> out.writeVLong(routedBasedOnClusterVersion); out.writeVLong(seqNo); out.writeVLong(primaryTerm); } @Override public Task createTask(long id, String type, String action, String parentTaskNode, long parentTaskId) { return new ReplicationTask(id, type, action, getDescription(), parentTaskNode, parentTaskId);
<<<<<<< ======= public Set<UiType> webUis; public String primaryWeblinkName; >>>>>>> public String primaryWeblinkName;
<<<<<<< assertBusy(() -> assertThat(getTranslog(replica).totalOperations(), equalTo(0))); ======= assertBusy(() -> { assertThat(replica.getLastSyncedGlobalCheckpoint(), equalTo(19L)); assertThat(replica.estimateTranslogOperationsFromMinSeq(0), equalTo(0)); }); >>>>>>> assertBusy(() -> { assertThat(replica.getLastSyncedGlobalCheckpoint(), equalTo(19L)); assertThat(getTranslog(replica).totalOperations(), equalTo(0)); });
<<<<<<< private int shortCircuitCutoff = DEFAULT_SHORT_CIRCUIT_CUTOFF; private QueryInnerHits queryInnerHits; ======= private String queryName; >>>>>>> private QueryInnerHits queryInnerHits; <<<<<<< * Configures at what cut off point only to evaluate parent documents that contain the matching parent id terms * instead of evaluating all parent docs. */ public HasChildQueryBuilder shortCircuitCutoff(int shortCircuitCutoff) { if (shortCircuitCutoff < 0) { throw new IllegalArgumentException("[" + NAME + "] requires non-negative 'short_circuit_cutoff' field"); } this.shortCircuitCutoff = shortCircuitCutoff; return this; } /** * Sets inner hit definition in the scope of this query and reusing the defined type and query. ======= * Sets the query name for the filter that can be used when searching for matched_filters per hit. >>>>>>> * Sets the query name for the filter that can be used when searching for matched_filters per hit. <<<<<<< if (queryInnerHits != null) { try (XContentParser parser = queryInnerHits.getXcontentParser()) { XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT) { throw new IllegalStateException("start object expected but was: [" + token + "]"); } InnerHitsSubSearchContext innerHits = context.indexQueryParserService().getInnerHitsQueryParserHelper().parse(parser); if (innerHits != null) { ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries()); InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.mapperService(), childDocMapper); String name = innerHits.getName() != null ? innerHits.getName() : type; context.addInnerHits(name, parentChildInnerHits); } } } String parentType = parentFieldMapper.type(); DocumentMapper parentDocMapper = context.mapperService().documentMapper(parentType); if (parentDocMapper == null) { throw new QueryShardException(context, "[" + NAME + "] Type [" + type + "] points to a non existent parent type [" + parentType + "]"); ======= if (queryName != null) { builder.field("_name", queryName); >>>>>>> if (queryInnerHits != null) { try (XContentParser parser = queryInnerHits.getXcontentParser()) { XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT) { throw new IllegalStateException("start object expected but was: [" + token + "]"); } InnerHitsSubSearchContext innerHits = context.indexQueryParserService().getInnerHitsQueryParserHelper().parse(parser); if (innerHits != null) { ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries()); InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.mapperService(), childDocMapper); String name = innerHits.getName() != null ? innerHits.getName() : type; context.addInnerHits(name, parentChildInnerHits); } } } String parentType = parentFieldMapper.type(); DocumentMapper parentDocMapper = context.mapperService().documentMapper(parentType); if (parentDocMapper == null) { throw new QueryShardException(context, "[" + NAME + "] Type [" + type + "] points to a non existent parent type [" + parentType + "]");
<<<<<<< import org.elasticsearch.index.query.QueryShardException; ======= import org.elasticsearch.index.query.QueryParsingException; import org.elasticsearch.search.internal.SearchContext; >>>>>>> import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.search.internal.SearchContext; <<<<<<< public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryShardException { ======= public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context) throws IOException, QueryParsingException { >>>>>>> public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context) throws IOException, QueryShardException {
<<<<<<< public QueryBuilder<?> parseInnerQueryBuilder(QueryParseContext parseContext) throws IOException { parseContext.parseFieldMatcher(parseFieldMatcher); return parseContext.parseInnerQueryBuilder(); } @Nullable //norelease ======= >>>>>>> <<<<<<< private static ParsedQuery innerParse(QueryShardContext context, QueryBuilder<?> queryBuilder) throws IOException, QueryShardException { Query query = queryBuilder.toQuery(context); if (query == null) { query = Queries.newMatchNoDocsQuery(); } return new ParsedQuery(query, context.copyNamedQueries()); } ======= >>>>>>>
<<<<<<< scopedSettings.addSettingsUpdateConsumer(INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING, this::setSoftDeleteRetentionOperations); ======= scopedSettings.addSettingsUpdateConsumer(MAX_REGEX_LENGTH_SETTING, this::setMaxRegexLength); >>>>>>> scopedSettings.addSettingsUpdateConsumer(INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING, this::setSoftDeleteRetentionOperations); scopedSettings.addSettingsUpdateConsumer(MAX_REGEX_LENGTH_SETTING, this::setMaxRegexLength);
<<<<<<< ======= import org.apache.lucene.index.Term; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.ParseField; >>>>>>> import org.elasticsearch.common.ParseField;
<<<<<<< if (sOpType != null) { try { indexRequest.opType(sOpType); } catch (IllegalArgumentException eia){ try { XContentBuilder builder = channel.newErrorBuilder(); channel.sendResponse( new BytesRestResponse(BAD_REQUEST, builder.startObject().field("error", eia.getMessage()).endObject())); } catch (IOException e1) { logger.warn("Failed to send response", e1); return; } } } ======= >>>>>>>
<<<<<<< return new QuerySource(indexes, queryBuilder.toQuery(p), opts, getSort()); ======= return new QuerySource(indexes, p, opts, getSort()); >>>>>>> return new QuerySource(indexes, p, opts, getSort()); <<<<<<< private final List<ChangeSubIndex> indexes; ======= private final List<SubIndex> indexes; private final Predicate<ChangeData> predicate; >>>>>>> private final List<ChangeSubIndex> indexes; private final Predicate<ChangeData> predicate; <<<<<<< private QuerySource(List<ChangeSubIndex> indexes, Query query, QueryOptions opts, Sort sort) { ======= private QuerySource(List<SubIndex> indexes, Predicate<ChangeData> predicate, QueryOptions opts, Sort sort) throws QueryParseException { >>>>>>> private QuerySource(List<ChangeSubIndex> indexes, Predicate<ChangeData> predicate, QueryOptions opts, Sort sort) throws QueryParseException { <<<<<<< }), fields); ======= @Override public String toString() { return predicate.toString(); } })); >>>>>>> @Override public String toString() { return predicate.toString(); } }), fields);
<<<<<<< import java.util.*; ======= import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.function.Function; >>>>>>> import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.function.Function; <<<<<<< public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuilder<IndexMetaData>, ToXContent { public static final IndexMetaData PROTO = IndexMetaData.builder("") .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) .numberOfShards(1).numberOfReplicas(0).build(); ======= public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuilder<IndexMetaData>, ToXContent { >>>>>>> public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuilder<IndexMetaData>, ToXContent { <<<<<<< private IndexMetaData(String index, long version, long[] primaryTerms, State state, int numberOfShards, int numberOfReplicas, Settings settings, ======= private IndexMetaData(Index index, long version, State state, int numberOfShards, int numberOfReplicas, Settings settings, >>>>>>> private IndexMetaData(Index index, long version, long[] primaryTerms, State state, int numberOfShards, int numberOfReplicas, Settings settings, <<<<<<< if (primaryTerms == null) { initializePrimaryTerms(); } else if (primaryTerms.length != numberOfShards) { throw new IllegalStateException("primaryTerms length is [" + primaryTerms.length + "] but should be equal to number of shards [" + numberOfShards() + "]"); } return new IndexMetaData(index, version, primaryTerms, state, numberOfShards, numberOfReplicas, tmpSettings, mappings.build(), tmpAliases.build(), customs.build(), filledActiveAllocationIds.build(), requireFilters, includeFilters, excludeFilters, indexCreatedVersion, indexUpgradedVersion, minimumCompatibleLuceneVersion); ======= final String uuid = settings.get(SETTING_INDEX_UUID, INDEX_UUID_NA_VALUE); return new IndexMetaData(new Index(index, uuid), version, state, numberOfShards, numberOfReplicas, tmpSettings, mappings.build(), tmpAliases.build(), customs.build(), filledActiveAllocationIds.build(), requireFilters, includeFilters, excludeFilters, indexCreatedVersion, indexUpgradedVersion, minimumCompatibleLuceneVersion); >>>>>>> if (primaryTerms == null) { initializePrimaryTerms(); } else if (primaryTerms.length != numberOfShards) { throw new IllegalStateException("primaryTerms length is [" + primaryTerms.length + "] but should be equal to number of shards [" + numberOfShards() + "]"); } final String uuid = settings.get(SETTING_INDEX_UUID, INDEX_UUID_NA_VALUE); return new IndexMetaData(new Index(index, uuid), version, primaryTerms, state, numberOfShards, numberOfReplicas, tmpSettings, mappings.build(), tmpAliases.build(), customs.build(), filledActiveAllocationIds.build(), requireFilters, includeFilters, excludeFilters, indexCreatedVersion, indexUpgradedVersion, minimumCompatibleLuceneVersion);
<<<<<<< public static final String INDEX_REFRESH_INTERVAL = "index.refresh_interval"; ======= /** If we see no indexing operations after this much time for a given shard, we consider that shard inactive (default: 5 minutes). */ public static final String INDEX_SHARD_INACTIVE_TIME_SETTING = "index.shard.inactive_time"; private static final String INDICES_INACTIVE_TIME_SETTING = "indices.memory.shard_inactive_time"; >>>>>>> public static final String INDEX_REFRESH_INTERVAL = "index.refresh_interval"; <<<<<<< ======= // We start up inactive active.set(false); >>>>>>> <<<<<<< private void markLastWrite(Engine.Operation op) { lastWriteNS = op.startTime(); active.set(true); ======= private void markLastWrite() { if (active.getAndSet(true) == false) { indexEventListener.onShardActive(this); } >>>>>>> private void markLastWrite() { active.set(true); <<<<<<< logger.debug("shard is now inactive"); indicesLifecycle.onShardInactive(this); ======= updateBufferSize(IndexingMemoryController.INACTIVE_SHARD_INDEXING_BUFFER, IndexingMemoryController.INACTIVE_SHARD_TRANSLOG_BUFFER); logger.debug("marking shard as inactive (inactive_time=[{}]) indexing wise", inactiveTime); indexEventListener.onShardInactive(this); >>>>>>> logger.debug("shard is now inactive"); indexEventListener.onShardInactive(this); <<<<<<< /** * Asynchronously refreshes the engine for new search operations to reflect the latest * changes. */ public void refreshAsync(final String reason) { engineConfig.getThreadPool().executor(ThreadPool.Names.REFRESH).execute(new Runnable() { @Override public void run() { try { refresh(reason); } catch (EngineClosedException ex) { // ignore } } }); } final class EngineRefresher implements Runnable { ======= public IndexEventListener getIndexEventListener() { return indexEventListener; } public TimeValue getInactiveTime() { return inactiveTime; } class EngineRefresher implements Runnable { >>>>>>> public IndexEventListener getIndexEventListener() { return indexEventListener; } /** * Asynchronously refreshes the engine for new search operations to reflect the latest * changes. */ public void refreshAsync(final String reason) { engineConfig.getThreadPool().executor(ThreadPool.Names.REFRESH).execute(new Runnable() { @Override public void run() { try { refresh(reason); } catch (EngineClosedException ex) { // ignore } } }); } final class EngineRefresher implements Runnable {
<<<<<<< import org.elasticsearch.shield.authc.esnative.ReservedRealm; import org.elasticsearch.shield.authc.support.SecuredString; import org.elasticsearch.shield.authz.store.ReservedRolesStore; ======= import org.elasticsearch.xpack.security.authc.support.SecuredString; >>>>>>> <<<<<<< import java.io.IOException; import java.util.List; import java.util.Map; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; import static org.elasticsearch.shield.authc.support.UsernamePasswordToken.basicAuthHeaderValue; ======= import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.is; >>>>>>> import java.io.IOException; import java.util.List; import java.util.Map; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
<<<<<<< IndexResponse indexResponse = result.response(); UpdateResponse updateResponse = new UpdateResponse(indexResponse.getShardInfo(), indexResponse.getShardId(), indexResponse.getType(), indexResponse.getId(), indexResponse.getSeqNo(), indexResponse.getVersion(), indexResponse.isCreated()); ======= IndexResponse indexResponse = result.getResponse(); UpdateResponse updateResponse = new UpdateResponse(indexResponse.getShardInfo(), indexResponse.getShardId(), indexResponse.getType(), indexResponse.getId(), indexResponse.getVersion(), indexResponse.isCreated()); >>>>>>> IndexResponse indexResponse = result.getResponse(); UpdateResponse updateResponse = new UpdateResponse(indexResponse.getShardInfo(), indexResponse.getShardId(), indexResponse.getType(), indexResponse.getId(), indexResponse.getSeqNo(), indexResponse.getVersion(), indexResponse.isCreated());
<<<<<<< return new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexShard, flags), indexShard.commitStats(), indexShard.seqNoStats()); ======= return new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexService.cache().getPercolatorQueryCache(), indexShard, flags), indexShard.commitStats()); >>>>>>> return new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexService.cache().getPercolatorQueryCache(), indexShard, flags), indexShard.commitStats(), indexShard.seqNoStats());
<<<<<<< import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; ======= import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; >>>>>>> import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory;
<<<<<<< ======= import java.util.Arrays; import java.util.List; import org.elasticsearch.client.support.Headers; >>>>>>> import java.util.Arrays; import java.util.List;
<<<<<<< import com.idehub.GoogleAnalyticsBridge.GoogleAnalyticsBridgePackage; ======= import com.horcrux.svg.SvgPackage; >>>>>>> import com.idehub.GoogleAnalyticsBridge.GoogleAnalyticsBridgePackage; import com.horcrux.svg.SvgPackage;
<<<<<<< * Returns the ideal point for the given problem, or {@code null} if * one is not specified. * * @param problem the problem name * @return the ideal point */ public static double[] getIdealPoint(String problem) { return PROPERTIES.getDoubleArray(KEY_IDEALPT_PREFIX + problem, null); } /** * Returns the reference point for the given problem, or {@code null} if * one is not specified. * * @param problem the problem name * @return the reference point */ public static double[] getReferencePoint(String problem) { return PROPERTIES.getDoubleArray(KEY_REFPT_PREFIX + problem, null); } /** ======= * Returns {@code true} if fast non-dominated sorting should be used; * or {@code false} if the naive non-dominated sorting implementation is * preferred. The default is {@code false} since while the fast version * has better worst-case time complexity, the naive version tends to run * faster except for a small number of edge cases. * * @return {@code true} if fast non-dominated sorting should be used; * or {@code false} if the naive non-dominated sorting * implementation is preferred */ public static boolean useFastNondominatedSorting() { return PROPERTIES.getBoolean(KEY_FAST_NONDOMINATED_SORTING, false); } /** >>>>>>> * Returns the ideal point for the given problem, or {@code null} if * one is not specified. * * @param problem the problem name * @return the ideal point */ public static double[] getIdealPoint(String problem) { return PROPERTIES.getDoubleArray(KEY_IDEALPT_PREFIX + problem, null); } /** * Returns the reference point for the given problem, or {@code null} if * one is not specified. * * @param problem the problem name * @return the reference point */ public static double[] getReferencePoint(String problem) { return PROPERTIES.getDoubleArray(KEY_REFPT_PREFIX + problem, null); } /** * Returns {@code true} if fast non-dominated sorting should be used; * or {@code false} if the naive non-dominated sorting implementation is * preferred. The default is {@code false} since while the fast version * has better worst-case time complexity, the naive version tends to run * faster except for a small number of edge cases. * * @return {@code true} if fast non-dominated sorting should be used; * or {@code false} if the naive non-dominated sorting * implementation is preferred */ public static boolean useFastNondominatedSorting() { return PROPERTIES.getBoolean(KEY_FAST_NONDOMINATED_SORTING, false); } /**
<<<<<<< import com.google.gerrit.extensions.systemstatus.MessageOfTheDay; import com.google.gerrit.extensions.validators.CommentValidator; ======= >>>>>>> import com.google.gerrit.extensions.validators.CommentValidator;
<<<<<<< if (id.matches("^([0-9a-fA-F]{4," + RevId.LEN + "})$")) { return asChangeNotes(query.byCommit(id)); ======= if (id.matches("^([0-9a-fA-F]{" + RevId.ABBREV_LEN + "," + RevId.LEN + "})$")) { return asChangeControls(query.byCommit(id), user); >>>>>>> if (id.matches("^([0-9a-fA-F]{" + RevId.ABBREV_LEN + "," + RevId.LEN + "})$")) { return asChangeNotes(query.byCommit(id));
<<<<<<< import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; ======= import com.google.common.collect.Iterables; >>>>>>> import com.google.common.collect.FluentIterable; import com.google.common.collect.Iterables; import com.google.common.collect.ImmutableList; <<<<<<< List<TagInfo> result = getTags().get(); assertThat(result).hasSize(2); ======= String tag3Ref = Constants.R_TAGS + "vLatest"; PushCommand pushCmd = git.push(); pushCmd.setRefSpecs(new RefSpec(tag2.name + ":" + tag3Ref)); Iterable<PushResult> r = pushCmd.call(); assertThat(Iterables.getOnlyElement(r).getRemoteUpdate(tag3Ref).getStatus()) .isEqualTo(Status.OK); List<TagInfo> result = toTagInfoList(adminSession.get("/projects/" + project.get() + "/tags")); assertThat(result).hasSize(3); >>>>>>> String tag3Ref = Constants.R_TAGS + "vLatest"; PushCommand pushCmd = testRepo.git().push(); pushCmd.setRefSpecs(new RefSpec(tag2.name + ":" + tag3Ref)); Iterable<PushResult> r = pushCmd.call(); assertThat(Iterables.getOnlyElement(r).getRemoteUpdate(tag3Ref).getStatus()) .isEqualTo(Status.OK); List<TagInfo> result = getTags().get(); assertThat(result).hasSize(3);
<<<<<<< void deleteIdentityLinksByProcessInstanceId(String processInstanceId); ======= void deleteIdentityLinksByScopeIdAndScopeType(String scopeId, String scopeType); >>>>>>> void deleteIdentityLinksByProcessInstanceId(String processInstanceId); void deleteIdentityLinksByScopeIdAndScopeType(String scopeId, String scopeType);
<<<<<<< TimerJobEntity timerJob = jobManager.createTimerJob(timerEventDefinition, false, null, TimerStartEventJobHandler.TYPE, TimerEventHandler.createConfiguration(startEvent.getId(), timerEventDefinition.getEndDate())); ======= TimerEntity timer = TimerUtil.createTimerEntityForTimerEventDefinition(timerEventDefinition, false, null, TimerStartEventJobHandler.TYPE, TimerEventHandler.createConfiguration(startEvent.getId(), timerEventDefinition.getEndDate(), timerEventDefinition.getCalendarName())); >>>>>>> TimerJobEntity timerJob = jobManager.createTimerJob(timerEventDefinition, false, null, TimerStartEventJobHandler.TYPE, TimerEventHandler.createConfiguration(startEvent.getId(), timerEventDefinition.getEndDate(), timerEventDefinition.getCalendarName()));
<<<<<<< TimerJobEntity timer = TimerUtil.createTimerEntityForTimerEventDefinition((TimerEventDefinition) eventDefinition, false, null, TimerStartEventJobHandler.TYPE, TimerEventHandler.createConfiguration(startEvent.getId(), timerEventDefinition.getEndDate())); ======= TimerEntity timer = TimerUtil.createTimerEntityForTimerEventDefinition((TimerEventDefinition) eventDefinition, false, null, TimerStartEventJobHandler.TYPE, TimerEventHandler.createConfiguration(startEvent.getId(), timerEventDefinition.getEndDate(), timerEventDefinition.getCalendarName())); >>>>>>> TimerJobEntity timer = TimerUtil.createTimerEntityForTimerEventDefinition((TimerEventDefinition) eventDefinition, false, null, TimerStartEventJobHandler.TYPE, TimerEventHandler.createConfiguration(startEvent.getId(), timerEventDefinition.getEndDate(), timerEventDefinition.getCalendarName()));
<<<<<<< @Test @Deployment(resources = { "org/flowable/dmn/engine/test/deployment/oneDecisionTaskProcessFallBackToDefaultTenant.bpmn20.xml" }, tenantId = "flowable" ) public void testDecisionTaskExecutionInAnotherDeploymentAndTenant() { deployDecisionAndAssertProcessExecuted(); } @Test @Deployment(resources = { "org/flowable/dmn/engine/test/deployment/oneDecisionTaskProcess.bpmn20.xml" } ) public void testDecisionTaskExecutionInAnotherDeploymentAndTenantDefaultBehavior() { this.expectedException.expect(FlowableObjectNotFoundException.class); this.expectedException.expectMessage("no processes deployed with key 'oneDecisionTaskProcess' for tenant identifier 'flowable'"); deployDecisionAndAssertProcessExecuted(); } @Test @Deployment(resources = { "org/flowable/dmn/engine/test/deployment/oneDecisionTaskProcessFallBackToDefaultTenantFalse.bpmn20.xml" }, tenantId = "flowable" ) public void testDecisionTaskExecutionInAnotherDeploymentAndTenantFalse() { this.expectedException.expect(FlowableObjectNotFoundException.class); this.expectedException.expectMessage("No decision found for key: decision1 and tenant flowable"); deployDecisionAndAssertProcessExecuted(); } @Test @Deployment(resources = { "org/flowable/dmn/engine/test/deployment/oneDecisionTaskProcessFallBackToDefaultTenantFalse.bpmn20.xml" }, tenantId = "flowable" ) public void testDecisionTaskExecutionInAnotherDeploymentAndTenantFallbackFalseWithoutDeployment() { this.expectedException.expect(FlowableObjectNotFoundException.class); this.expectedException.expectMessage("No decision found for key: decision1 and tenant flowable"); org.flowable.engine.repository.Deployment deployment = repositoryService.createDeployment(). addClasspathResource("org/flowable/dmn/engine/test/deployment/simple.dmn"). tenantId("anotherTenant"). deploy(); try { assertDmnProcessExecuted(); } finally { this.repositoryService.deleteDeployment(deployment.getId(), true); } } @Test @Deployment(resources = { "org/flowable/dmn/engine/test/deployment/oneDecisionTaskProcessFallBackToDefaultTenant.bpmn20.xml" }, tenantId = "flowable" ) public void testDecisionTaskExecutionInAnotherDeploymentAndTenantFallbackTrueWithoutDeployment() { this.expectedException.expect(FlowableObjectNotFoundException.class); this.expectedException.expectMessage("No decision found for key: decision1. There was also no fall back decision table found without tenant."); org.flowable.engine.repository.Deployment deployment = repositoryService.createDeployment(). addClasspathResource("org/flowable/dmn/engine/test/deployment/simple.dmn"). tenantId("anotherTenant"). deploy(); try { assertDmnProcessExecuted(); } finally { this.repositoryService.deleteDeployment(deployment.getId(), true); } } protected void deployDecisionAndAssertProcessExecuted() { org.flowable.engine.repository.Deployment deployment = repositoryService.createDeployment(). addClasspathResource("org/flowable/dmn/engine/test/deployment/simple.dmn"). tenantId(""). deploy(); try { assertDmnProcessExecuted(); } finally { this.repositoryService.deleteDeployment(deployment.getId(), true); } } protected void assertDmnProcessExecuted() { ProcessInstance processInstance = runtimeService.startProcessInstanceByKeyAndTenantId( "oneDecisionTaskProcess", Collections.singletonMap("inputVariable1", (Object) 1), "flowable"); List<HistoricVariableInstance> variables = historyService.createHistoricVariableInstanceQuery() .processInstanceId(processInstance.getId()).orderByVariableName().asc().list(); assertEquals("inputVariable1", variables.get(0).getVariableName()); assertEquals(1, variables.get(0).getValue()); assertEquals("outputVariable1", variables.get(1).getVariableName()); assertEquals("result1", variables.get(1).getValue()); } ======= @Test @Deployment(resources = { "org/flowable/dmn/engine/test/deployment/oneDecisionTaskNoHitsErrorProcess.bpmn20.xml"}) public void testDecisionNotFound() { try { runtimeService.startProcessInstanceByKey("oneDecisionTaskProcess", Collections.singletonMap("inputVariable1", (Object) 2)); fail("Expected Exception"); } catch (Exception e) { assertTrue(e.getMessage().contains("Decision table for key [decision1] was not found")); } } >>>>>>> @Test @Deployment(resources = { "org/flowable/dmn/engine/test/deployment/oneDecisionTaskNoHitsErrorProcess.bpmn20.xml"}) public void testDecisionNotFound() { try { runtimeService.startProcessInstanceByKey("oneDecisionTaskProcess", Collections.singletonMap("inputVariable1", (Object) 2)); fail("Expected Exception"); } catch (Exception e) { assertTrue(e.getMessage().contains("Decision table for key [decision1] was not found")); } } @Test @Deployment(resources = { "org/flowable/dmn/engine/test/deployment/oneDecisionTaskProcessFallBackToDefaultTenant.bpmn20.xml" }, tenantId = "flowable" ) public void testDecisionTaskExecutionInAnotherDeploymentAndTenant() { deployDecisionAndAssertProcessExecuted(); } @Test @Deployment(resources = { "org/flowable/dmn/engine/test/deployment/oneDecisionTaskProcess.bpmn20.xml" } ) public void testDecisionTaskExecutionInAnotherDeploymentAndTenantDefaultBehavior() { this.expectedException.expect(FlowableObjectNotFoundException.class); this.expectedException.expectMessage("no processes deployed with key 'oneDecisionTaskProcess' for tenant identifier 'flowable'"); deployDecisionAndAssertProcessExecuted(); } @Test @Deployment(resources = { "org/flowable/dmn/engine/test/deployment/oneDecisionTaskProcessFallBackToDefaultTenantFalse.bpmn20.xml" }, tenantId = "flowable" ) public void testDecisionTaskExecutionInAnotherDeploymentAndTenantFalse() { this.expectedException.expect(FlowableObjectNotFoundException.class); this.expectedException.expectMessage("No decision found for key: decision1 and tenant flowable"); deployDecisionAndAssertProcessExecuted(); } @Test @Deployment(resources = { "org/flowable/dmn/engine/test/deployment/oneDecisionTaskProcessFallBackToDefaultTenantFalse.bpmn20.xml" }, tenantId = "flowable" ) public void testDecisionTaskExecutionInAnotherDeploymentAndTenantFallbackFalseWithoutDeployment() { this.expectedException.expect(FlowableObjectNotFoundException.class); this.expectedException.expectMessage("No decision found for key: decision1 and tenant flowable"); org.flowable.engine.repository.Deployment deployment = repositoryService.createDeployment(). addClasspathResource("org/flowable/dmn/engine/test/deployment/simple.dmn"). tenantId("anotherTenant"). deploy(); try { assertDmnProcessExecuted(); } finally { this.repositoryService.deleteDeployment(deployment.getId(), true); } } @Test @Deployment(resources = { "org/flowable/dmn/engine/test/deployment/oneDecisionTaskProcessFallBackToDefaultTenant.bpmn20.xml" }, tenantId = "flowable" ) public void testDecisionTaskExecutionInAnotherDeploymentAndTenantFallbackTrueWithoutDeployment() { this.expectedException.expect(FlowableObjectNotFoundException.class); this.expectedException.expectMessage("No decision found for key: decision1. There was also no fall back decision table found without tenant."); org.flowable.engine.repository.Deployment deployment = repositoryService.createDeployment(). addClasspathResource("org/flowable/dmn/engine/test/deployment/simple.dmn"). tenantId("anotherTenant"). deploy(); try { assertDmnProcessExecuted(); } finally { this.repositoryService.deleteDeployment(deployment.getId(), true); } } protected void deployDecisionAndAssertProcessExecuted() { org.flowable.engine.repository.Deployment deployment = repositoryService.createDeployment(). addClasspathResource("org/flowable/dmn/engine/test/deployment/simple.dmn"). tenantId(""). deploy(); try { assertDmnProcessExecuted(); } finally { this.repositoryService.deleteDeployment(deployment.getId(), true); } } protected void assertDmnProcessExecuted() { ProcessInstance processInstance = runtimeService.startProcessInstanceByKeyAndTenantId( "oneDecisionTaskProcess", Collections.singletonMap("inputVariable1", (Object) 1), "flowable"); List<HistoricVariableInstance> variables = historyService.createHistoricVariableInstanceQuery() .processInstanceId(processInstance.getId()).orderByVariableName().asc().list(); assertEquals("inputVariable1", variables.get(0).getVariableName()); assertEquals(1, variables.get(0).getValue()); assertEquals("outputVariable1", variables.get(1).getVariableName()); assertEquals("result1", variables.get(1).getValue()); }
<<<<<<< } else { processDiagramCanvas.drawThrowingNoneEvent(graphicInfo, scaleFactor); ======= } else if (throwEvent.getEventDefinitions().get(0) instanceof CompensateEventDefinition) { processDiagramCanvas.drawThrowingCompensateEvent(graphicInfo, scaleFactor); >>>>>>> } else if (throwEvent.getEventDefinitions().get(0) instanceof CompensateEventDefinition) { processDiagramCanvas.drawThrowingCompensateEvent(graphicInfo, scaleFactor); } else { processDiagramCanvas.drawThrowingNoneEvent(graphicInfo, scaleFactor); <<<<<<< } else if (boundaryEvent.getEventDefinitions().get(0) instanceof MessageEventDefinition) { processDiagramCanvas.drawCatchingMessageEvent(flowNode.getName(), graphicInfo, boundaryEvent.isCancelActivity(), scaleFactor); ======= } else if (boundaryEvent.getEventDefinitions().get(0) instanceof CompensateEventDefinition) { processDiagramCanvas.drawCatchingCompensateEvent(graphicInfo, boundaryEvent.isCancelActivity(), scaleFactor); >>>>>>> } else if (boundaryEvent.getEventDefinitions().get(0) instanceof MessageEventDefinition) { processDiagramCanvas.drawCatchingMessageEvent(flowNode.getName(), graphicInfo, boundaryEvent.isCancelActivity(), scaleFactor); } else if (boundaryEvent.getEventDefinitions().get(0) instanceof CompensateEventDefinition) { processDiagramCanvas.drawCatchingCompensateEvent(graphicInfo, boundaryEvent.isCancelActivity(), scaleFactor);
<<<<<<< /** * Update historic activity instance according to changes done in the runtime activity * @param activityInstance */ void updateHistoricActivityInstance(ActivityInstance activityInstance); /** * Create new historic activity instance from runtime activity instance * * @param activityInstance activity instance template */ void createHistoricActivityInstance(ActivityInstance activityInstance); ======= void updateHistoricActivityInstance(ActivityInstanceEntity activityInstance); /** * Record historic user task log entry * @param taskLogEntryBuilder historic user task log entry description */ void recordHistoricUserTaskLogEntry(HistoricTaskLogEntryBuilder taskLogEntryBuilder); /** * Delete historic user task log entry * @param logNumber log identifier */ void deleteHistoryUserTaskLog(long logNumber); >>>>>>> /** * Update historic activity instance according to changes done in the runtime activity * @param activityInstance */ void updateHistoricActivityInstance(ActivityInstance activityInstance); /** * Create new historic activity instance from runtime activity instance * * @param activityInstance activity instance template */ void createHistoricActivityInstance(ActivityInstance activityInstance); /** * Record historic user task log entry * @param taskLogEntryBuilder historic user task log entry description */ void recordHistoricUserTaskLogEntry(HistoricTaskLogEntryBuilder taskLogEntryBuilder); /** * Delete historic user task log entry * @param logNumber log identifier */ void deleteHistoryUserTaskLog(long logNumber);
<<<<<<< private static Class loadClass(ClassLoader classLoader, String className) throws ClassNotFoundException { ProcessEngineConfigurationImpl processEngineConfiguration = Context.getProcessEngineConfiguration(); boolean useClassForName = processEngineConfiguration == null || processEngineConfiguration.isUseClassForNameClassLoading(); return useClassForName ? Class.forName(className, true, classLoader) : classLoader.loadClass(className); } ======= public static boolean isGetter(Method method) { String name = method.getName(); Class< ? > type = method.getReturnType(); Class< ? > params[] = method.getParameterTypes(); if (!GETTER_PATTERN.matcher(name).matches()) { return false; } // special for isXXX boolean if (name.startsWith("is")) { return params.length == 0 && type.getSimpleName().equalsIgnoreCase("boolean"); } return params.length == 0 && !type.equals(Void.TYPE); } public static boolean isSetter(Method method, boolean allowBuilderPattern) { String name = method.getName(); Class< ? > type = method.getReturnType(); Class< ? > params[] = method.getParameterTypes(); if (!SETTER_PATTERN.matcher(name).matches()) { return false; } return params.length == 1 && (type.equals(Void.TYPE) || (allowBuilderPattern && method.getDeclaringClass().isAssignableFrom(type))); } public static boolean isSetter(Method method) { return isSetter(method, false); } public static String getGetterShorthandName(Method method) { if (!isGetter(method)) { return method.getName(); } String name = method.getName(); if (name.startsWith("get")) { name = name.substring(3); name = name.substring(0, 1).toLowerCase(Locale.ENGLISH) + name.substring(1); } else if (name.startsWith("is")) { name = name.substring(2); name = name.substring(0, 1).toLowerCase(Locale.ENGLISH) + name.substring(1); } return name; } public static String getSetterShorthandName(Method method) { if (!isSetter(method)) { return method.getName(); } String name = method.getName(); if (name.startsWith("set")) { name = name.substring(3); name = name.substring(0, 1).toLowerCase(Locale.ENGLISH) + name.substring(1); } return name; } >>>>>>> private static Class loadClass(ClassLoader classLoader, String className) throws ClassNotFoundException { ProcessEngineConfigurationImpl processEngineConfiguration = Context.getProcessEngineConfiguration(); boolean useClassForName = processEngineConfiguration == null || processEngineConfiguration.isUseClassForNameClassLoading(); return useClassForName ? Class.forName(className, true, classLoader) : classLoader.loadClass(className); } public static boolean isGetter(Method method) { String name = method.getName(); Class< ? > type = method.getReturnType(); Class< ? > params[] = method.getParameterTypes(); if (!GETTER_PATTERN.matcher(name).matches()) { return false; } // special for isXXX boolean if (name.startsWith("is")) { return params.length == 0 && type.getSimpleName().equalsIgnoreCase("boolean"); } return params.length == 0 && !type.equals(Void.TYPE); } public static boolean isSetter(Method method, boolean allowBuilderPattern) { String name = method.getName(); Class< ? > type = method.getReturnType(); Class< ? > params[] = method.getParameterTypes(); if (!SETTER_PATTERN.matcher(name).matches()) { return false; } return params.length == 1 && (type.equals(Void.TYPE) || (allowBuilderPattern && method.getDeclaringClass().isAssignableFrom(type))); } public static boolean isSetter(Method method) { return isSetter(method, false); } public static String getGetterShorthandName(Method method) { if (!isGetter(method)) { return method.getName(); } String name = method.getName(); if (name.startsWith("get")) { name = name.substring(3); name = name.substring(0, 1).toLowerCase(Locale.ENGLISH) + name.substring(1); } else if (name.startsWith("is")) { name = name.substring(2); name = name.substring(0, 1).toLowerCase(Locale.ENGLISH) + name.substring(1); } return name; } public static String getSetterShorthandName(Method method) { if (!isSetter(method)) { return method.getName(); } String name = method.getName(); if (name.startsWith("set")) { name = name.substring(3); name = name.substring(0, 1).toLowerCase(Locale.ENGLISH) + name.substring(1); } return name; }
<<<<<<< protected boolean isAsyncHistoryJsonGzipCompressionEnabled; protected boolean isAsyncHistoryJsonGroupingEnabled; protected int asyncHistoryJsonGroupingThreshold = 10; ======= // Job Manager //protected boolean isAsyncHistoryJsonGzipCompressionEnabled; //protected boolean isAsyncHistoryJsonGroupingEnabled; >>>>>>> protected boolean isAsyncHistoryJsonGzipCompressionEnabled; protected boolean isAsyncHistoryJsonGroupingEnabled; protected int asyncHistoryJsonGroupingThreshold = 10; <<<<<<< /** * The amount of time (in milliseconds) a job can maximum be in the 'executable' state before being deemed expired. * Note that this won't happen when using the threadpool based executor, as the acquire thread will fetch these kind of jobs earlier. * However, in the message queue based execution, it could be some job is posted to a queue but then never is locked nor executed. * * By default 24 hours, as this should be a very exceptional case. */ protected int asyncExecutorResetExpiredJobsMaxTimeout = 24 * 60 * 60 * 1000; ======= >>>>>>> /** * The amount of time (in milliseconds) a job can maximum be in the 'executable' state before being deemed expired. * Note that this won't happen when using the threadpool based executor, as the acquire thread will fetch these kind of jobs earlier. * However, in the message queue based execution, it could be some job is posted to a queue but then never is locked nor executed. * * By default 24 hours, as this should be a very exceptional case. */ protected int asyncExecutorResetExpiredJobsMaxTimeout = 24 * 60 * 60 * 1000;
<<<<<<< @Override public void createHistoricActivityInstance(ActivityInstance activityInstance) { // create (in the new job) new historic activity instance from runtime activityInstance template if (isHistoryLevelAtLeast(HistoryLevel.ACTIVITY, activityInstance.getProcessDefinitionId())) { if (activityInstance.getExecutionId() != null) { Map<String, String> data = new HashMap<>(); putIfNotNull(data, HistoryJsonConstants.RUNTIME_ACTIVITY_INSTANCE_ID, activityInstance.getId()); putIfNotNull(data, HistoryJsonConstants.PROCESS_DEFINITION_ID, activityInstance.getProcessDefinitionId()); putIfNotNull(data, HistoryJsonConstants.PROCESS_INSTANCE_ID, activityInstance.getProcessInstanceId()); putIfNotNull(data, HistoryJsonConstants.EXECUTION_ID, activityInstance.getExecutionId()); putIfNotNull(data, HistoryJsonConstants.ACTIVITY_ID, activityInstance.getActivityId()); putIfNotNull(data, HistoryJsonConstants.ACTIVITY_NAME, activityInstance.getActivityName()); putIfNotNull(data, HistoryJsonConstants.ACTIVITY_TYPE, activityInstance.getActivityType()); putIfNotNull(data, HistoryJsonConstants.START_TIME, activityInstance.getStartTime()); putIfNotNull(data, HistoryJsonConstants.END_TIME, activityInstance.getEndTime()); putIfNotNull(data, HistoryJsonConstants.TENANT_ID, activityInstance.getTenantId()); getAsyncHistorySession().addHistoricData(getJobServiceConfiguration(), HistoryJsonConstants.TYPE_ACTIVITY_FULL, data); } } } ======= @Override public void recordHistoricUserTaskLogEntry(HistoricTaskLogEntryBuilder taskLogEntryBuilder) { if (processEngineConfiguration.isEnableHistoricTaskLogging()) { Map<String, String> data = new HashMap<>(); putIfNotNull(data, HistoryJsonConstants.LOG_ENTRY_DATA, taskLogEntryBuilder.getData()); putIfNotNull(data, HistoryJsonConstants.PROCESS_INSTANCE_ID, taskLogEntryBuilder.getProcessInstanceId()); putIfNotNull(data, HistoryJsonConstants.EXECUTION_ID, taskLogEntryBuilder.getExecutionId()); putIfNotNull(data, HistoryJsonConstants.PROCESS_DEFINITION_ID, taskLogEntryBuilder.getProcessDefinitionId()); putIfNotNull(data, HistoryJsonConstants.TASK_ID, taskLogEntryBuilder.getTaskId()); putIfNotNull(data, HistoryJsonConstants.TENANT_ID, taskLogEntryBuilder.getTenantId()); putIfNotNull(data, HistoryJsonConstants.CREATE_TIME, taskLogEntryBuilder.getTimeStamp()); putIfNotNull(data, HistoryJsonConstants.USER_ID, taskLogEntryBuilder.getUserId()); putIfNotNull(data, HistoryJsonConstants.LOG_ENTRY_TYPE, taskLogEntryBuilder.getType()); getAsyncHistorySession().addHistoricData(getJobServiceConfiguration(), HistoryJsonConstants.TYPE_HISTORIC_TASK_LOG_RECORD, data, taskLogEntryBuilder.getTenantId()); } } @Override public void deleteHistoryUserTaskLog(long logNumber) { if (processEngineConfiguration.isEnableHistoricTaskLogging()) { Map<String, String> data = new HashMap<>(); putIfNotNull(data, HistoryJsonConstants.LOG_ENTRY_LOGNUMBER, logNumber); getAsyncHistorySession().addHistoricData(getJobServiceConfiguration(), HistoryJsonConstants.TYPE_HISTORIC_TASK_LOG_DELETE, data); } } >>>>>>> @Override public void createHistoricActivityInstance(ActivityInstance activityInstance) { // create (in the new job) new historic activity instance from runtime activityInstance template if (isHistoryLevelAtLeast(HistoryLevel.ACTIVITY, activityInstance.getProcessDefinitionId())) { if (activityInstance.getExecutionId() != null) { Map<String, String> data = new HashMap<>(); putIfNotNull(data, HistoryJsonConstants.RUNTIME_ACTIVITY_INSTANCE_ID, activityInstance.getId()); putIfNotNull(data, HistoryJsonConstants.PROCESS_DEFINITION_ID, activityInstance.getProcessDefinitionId()); putIfNotNull(data, HistoryJsonConstants.PROCESS_INSTANCE_ID, activityInstance.getProcessInstanceId()); putIfNotNull(data, HistoryJsonConstants.EXECUTION_ID, activityInstance.getExecutionId()); putIfNotNull(data, HistoryJsonConstants.ACTIVITY_ID, activityInstance.getActivityId()); putIfNotNull(data, HistoryJsonConstants.ACTIVITY_NAME, activityInstance.getActivityName()); putIfNotNull(data, HistoryJsonConstants.ACTIVITY_TYPE, activityInstance.getActivityType()); putIfNotNull(data, HistoryJsonConstants.START_TIME, activityInstance.getStartTime()); putIfNotNull(data, HistoryJsonConstants.END_TIME, activityInstance.getEndTime()); putIfNotNull(data, HistoryJsonConstants.TENANT_ID, activityInstance.getTenantId()); getAsyncHistorySession().addHistoricData(getJobServiceConfiguration(), HistoryJsonConstants.TYPE_ACTIVITY_FULL, data); } } } public void recordHistoricUserTaskLogEntry(HistoricTaskLogEntryBuilder taskLogEntryBuilder) { if (processEngineConfiguration.isEnableHistoricTaskLogging()) { Map<String, String> data = new HashMap<>(); putIfNotNull(data, HistoryJsonConstants.LOG_ENTRY_DATA, taskLogEntryBuilder.getData()); putIfNotNull(data, HistoryJsonConstants.PROCESS_INSTANCE_ID, taskLogEntryBuilder.getProcessInstanceId()); putIfNotNull(data, HistoryJsonConstants.EXECUTION_ID, taskLogEntryBuilder.getExecutionId()); putIfNotNull(data, HistoryJsonConstants.PROCESS_DEFINITION_ID, taskLogEntryBuilder.getProcessDefinitionId()); putIfNotNull(data, HistoryJsonConstants.TASK_ID, taskLogEntryBuilder.getTaskId()); putIfNotNull(data, HistoryJsonConstants.TENANT_ID, taskLogEntryBuilder.getTenantId()); putIfNotNull(data, HistoryJsonConstants.CREATE_TIME, taskLogEntryBuilder.getTimeStamp()); putIfNotNull(data, HistoryJsonConstants.USER_ID, taskLogEntryBuilder.getUserId()); putIfNotNull(data, HistoryJsonConstants.LOG_ENTRY_TYPE, taskLogEntryBuilder.getType()); getAsyncHistorySession().addHistoricData(getJobServiceConfiguration(), HistoryJsonConstants.TYPE_HISTORIC_TASK_LOG_RECORD, data, taskLogEntryBuilder.getTenantId()); } } @Override public void deleteHistoryUserTaskLog(long logNumber) { if (processEngineConfiguration.isEnableHistoricTaskLogging()) { Map<String, String> data = new HashMap<>(); putIfNotNull(data, HistoryJsonConstants.LOG_ENTRY_LOGNUMBER, logNumber); getAsyncHistorySession().addHistoricData(getJobServiceConfiguration(), HistoryJsonConstants.TYPE_HISTORIC_TASK_LOG_DELETE, data); } }
<<<<<<< import org.flowable.dmn.api.DmnEngineConfigurationApi; ======= import org.flowable.dmn.api.DmnHistoryService; >>>>>>> import org.flowable.dmn.api.DmnEngineConfigurationApi; import org.flowable.dmn.api.DmnHistoryService; <<<<<<< import org.flowable.dmn.engine.impl.persistence.entity.DmnResourceEntityManager; import org.flowable.dmn.engine.impl.persistence.entity.DmnResourceEntityManagerImpl; ======= import org.flowable.dmn.engine.impl.persistence.entity.HistoricDecisionExecutionEntityManager; import org.flowable.dmn.engine.impl.persistence.entity.HistoricDecisionExecutionEntityManagerImpl; import org.flowable.dmn.engine.impl.persistence.entity.ResourceEntityManager; import org.flowable.dmn.engine.impl.persistence.entity.ResourceEntityManagerImpl; >>>>>>> import org.flowable.dmn.engine.impl.persistence.entity.DmnResourceEntityManager; import org.flowable.dmn.engine.impl.persistence.entity.DmnResourceEntityManagerImpl; import org.flowable.dmn.engine.impl.persistence.entity.HistoricDecisionExecutionEntityManager; import org.flowable.dmn.engine.impl.persistence.entity.HistoricDecisionExecutionEntityManagerImpl; <<<<<<< import org.flowable.dmn.engine.impl.persistence.entity.data.DmnResourceDataManager; ======= import org.flowable.dmn.engine.impl.persistence.entity.data.HistoricDecisionExecutionDataManager; import org.flowable.dmn.engine.impl.persistence.entity.data.ResourceDataManager; >>>>>>> import org.flowable.dmn.engine.impl.persistence.entity.data.DmnResourceDataManager; import org.flowable.dmn.engine.impl.persistence.entity.data.HistoricDecisionExecutionDataManager; <<<<<<< import org.flowable.dmn.engine.impl.persistence.entity.data.impl.MybatisDmnResourceDataManager; ======= import org.flowable.dmn.engine.impl.persistence.entity.data.impl.MybatisHistoricDecisionExecutionDataManager; import org.flowable.dmn.engine.impl.persistence.entity.data.impl.MybatisResourceDataManager; >>>>>>> import org.flowable.dmn.engine.impl.persistence.entity.data.impl.MybatisDmnResourceDataManager; import org.flowable.dmn.engine.impl.persistence.entity.data.impl.MybatisHistoricDecisionExecutionDataManager; <<<<<<< protected DmnResourceDataManager resourceDataManager; ======= protected ResourceDataManager resourceDataManager; protected HistoricDecisionExecutionDataManager historicDecisionExecutionDataManager; >>>>>>> protected DmnResourceDataManager resourceDataManager; protected HistoricDecisionExecutionDataManager historicDecisionExecutionDataManager; <<<<<<< protected DmnResourceEntityManager resourceEntityManager; ======= protected ResourceEntityManager resourceEntityManager; protected HistoricDecisionExecutionEntityManager historicDecisionExecutionEntityManager; >>>>>>> protected DmnResourceEntityManager resourceEntityManager; protected HistoricDecisionExecutionEntityManager historicDecisionExecutionEntityManager;
<<<<<<< public TimerJobEntity prepareTimerEntity(ExecutionEntity executionEntity) { ======= public TimerEntity prepareTimerEntity(ExecutionEntity executionEntity) { // ACT-1415: timer-declaration on start-event may contain expressions NOT // evaluating variables but other context, evaluating should happen nevertheless VariableScope scopeForExpression = executionEntity; if (scopeForExpression == null) { scopeForExpression = NoExecutionVariableScope.getSharedInstance(); } String calendarNameValue = type.calendarName; if (this.calendarNameExpression != null) { calendarNameValue = (String) this.calendarNameExpression.getValue(scopeForExpression); } >>>>>>> public TimerJobEntity prepareTimerEntity(ExecutionEntity executionEntity) { // ACT-1415: timer-declaration on start-event may contain expressions NOT // evaluating variables but other context, evaluating should happen nevertheless VariableScope scopeForExpression = executionEntity; if (scopeForExpression == null) { scopeForExpression = NoExecutionVariableScope.getSharedInstance(); } String calendarNameValue = type.calendarName; if (this.calendarNameExpression != null) { calendarNameValue = (String) this.calendarNameExpression.getValue(scopeForExpression); }
<<<<<<< if (executorService != null) { executorService.execute(new ExecuteAsyncRunnable(job, commandExecutor)); ======= if (isActive) { threadPoolExecutor.execute(new ExecuteAsyncRunnable(job, commandExecutor)); >>>>>>> if (isActive) { executorService.execute(new ExecuteAsyncRunnable(job, commandExecutor));
<<<<<<< import org.flowable.eventregistry.impl.keydetector.JsonPathBasedInboundEventKeyDetector; ======= import org.flowable.eventregistry.impl.keydetector.JsonPointerBasedInboundEventKeyDetector; import org.flowable.eventregistry.impl.keydetector.InboundEventStaticKeyDetector; >>>>>>> import org.flowable.eventregistry.impl.keydetector.JsonPointerBasedInboundEventKeyDetector; <<<<<<< ======= import org.flowable.eventregistry.impl.tenantdetector.JsonPointerBasedInboundEventTenantDetector; >>>>>>>
<<<<<<< import org.flowable.common.engine.impl.logging.LoggingListener; import org.flowable.common.engine.impl.logging.LoggingSession; import org.flowable.common.engine.impl.logging.LoggingSessionFactory; ======= import org.flowable.common.engine.impl.lock.LockManager; import org.flowable.common.engine.impl.lock.LockManagerImpl; >>>>>>> import org.flowable.common.engine.impl.lock.LockManager; import org.flowable.common.engine.impl.lock.LockManagerImpl; import org.flowable.common.engine.impl.logging.LoggingListener; import org.flowable.common.engine.impl.logging.LoggingSession; import org.flowable.common.engine.impl.logging.LoggingSessionFactory;
<<<<<<< import java.util.Map; ======= import java.util.Map; import org.flowable.dmn.api.DecisionExecutionAuditContainer; >>>>>>> import java.util.Map; import org.flowable.dmn.api.DecisionExecutionAuditContainer; <<<<<<< import org.flowable.dmn.engine.impl.util.CommandContextUtil; ======= import org.flowable.dmn.engine.impl.ExecuteDecisionBuilderImpl; import org.flowable.dmn.engine.impl.interceptor.Command; import org.flowable.dmn.engine.impl.interceptor.CommandContext; >>>>>>> import org.flowable.dmn.engine.impl.ExecuteDecisionBuilderImpl; import org.flowable.dmn.engine.impl.util.CommandContextUtil; <<<<<<< import org.flowable.engine.common.impl.interceptor.Command; import org.flowable.engine.common.impl.interceptor.CommandContext; ======= >>>>>>> import org.flowable.engine.common.impl.interceptor.Command; import org.flowable.engine.common.impl.interceptor.CommandContext;
<<<<<<< package org.activiti.engine.test.bpmn.event.timer; import java.util.Calendar; import java.util.Date; import java.util.List; ======= >>>>>>>
<<<<<<< deleteExecutionEntities(executionEntityManager, rootExecutionEntity, deleteReason, execution.getCurrentFlowElement()); ======= deleteExecutionEntities(executionEntityManager, rootExecutionEntity, execution.getCurrentFlowElement(), deleteReason); >>>>>>> deleteExecutionEntities(executionEntityManager, rootExecutionEntity, execution.getCurrentFlowElement(), deleteReason); <<<<<<< ======= deleteExecutionEntities(executionEntityManager, scopeExecutionEntity, execution.getCurrentFlowElement(), deleteReason); >>>>>>> deleteExecutionEntities(executionEntityManager, scopeExecutionEntity, execution.getCurrentFlowElement(), deleteReason); <<<<<<< deleteExecutionEntities(executionEntityManager, miRootExecutionEntity, createDeleteReason(miRootExecutionEntity.getActivityId()), execution.getCurrentFlowElement()); ======= deleteExecutionEntities(executionEntityManager, miRootExecutionEntity, miRootExecutionEntity.getCurrentFlowElement(), createDeleteReason(miRootExecutionEntity.getActivityId())); >>>>>>> deleteExecutionEntities(executionEntityManager, miRootExecutionEntity, miRootExecutionEntity.getCurrentFlowElement(), createDeleteReason(miRootExecutionEntity.getActivityId())); <<<<<<< protected void deleteExecutionEntities(ExecutionEntityManager executionEntityManager, ExecutionEntity rootExecutionEntity, String deleteReason, FlowElement terminateEndEventElement) { ======= protected void deleteExecutionEntities(ExecutionEntityManager executionEntityManager, ExecutionEntity rootExecutionEntity, FlowElement terminateEndEvent, String deleteReason) { >>>>>>> protected void deleteExecutionEntities(ExecutionEntityManager executionEntityManager, ExecutionEntity rootExecutionEntity, FlowElement terminateEndEvent, String deleteReason) { <<<<<<< sendProcessInstanceCompletedEvent(childExecutions.get(i), terminateEndEventElement); ======= sendProcessInstanceCompletedEvent(childExecutions.get(i), terminateEndEvent); >>>>>>> sendProcessInstanceCompletedEvent(childExecutions.get(i), terminateEndEvent); <<<<<<< sendProcessInstanceCompletedEvent(rootExecutionEntity, terminateEndEventElement); ======= sendProcessInstanceCompletedEvent(rootExecutionEntity, terminateEndEvent); >>>>>>> sendProcessInstanceCompletedEvent(rootExecutionEntity, terminateEndEvent);
<<<<<<< import org.flowable.variable.service.impl.persistence.entity.VariableByteArrayRef; import org.flowable.variable.service.impl.persistence.entity.VariableInstanceEntity; ======= import org.flowable.variable.api.type.VariableScopeType; >>>>>>> import org.flowable.variable.api.type.VariableScopeType; import org.flowable.variable.service.impl.persistence.entity.VariableByteArrayRef; import org.flowable.variable.service.impl.persistence.entity.VariableInstanceEntity; <<<<<<< CountingTaskEntity countingTaskEntity = (CountingTaskEntity) task; if (countingTaskEntity.isCountEnabled() && countingTaskEntity.getIdentityLinkCount() > 0) { CommandContextUtil.getIdentityLinkService(commandContext).deleteIdentityLinksByTaskId(task.getId()); } if (countingTaskEntity.isCountEnabled() && countingTaskEntity.getVariableCount() > 0) { Map<String, VariableInstanceEntity> taskVariables = task.getVariableInstanceEntities(); ArrayList<VariableByteArrayRef> variableByteArrayRefs = new ArrayList<>(); for (VariableInstanceEntity variableInstanceEntity : taskVariables.values()) { if (variableInstanceEntity.getByteArrayRef() != null && variableInstanceEntity.getByteArrayRef().getId() != null) { variableByteArrayRefs.add(variableInstanceEntity.getByteArrayRef()); } } for (VariableByteArrayRef variableByteArrayRef : variableByteArrayRefs) { CommandContextUtil.getVariableServiceConfiguration(commandContext).getByteArrayEntityManager().deleteByteArrayById(variableByteArrayRef.getId()); } if (!taskVariables.isEmpty()) { CommandContextUtil.getVariableService(commandContext).deleteVariablesByTaskId(task.getId()); } CommandContextUtil.getVariableService(commandContext).deleteVariablesByTaskId(task.getId()); ======= CommandContextUtil.getIdentityLinkService(commandContext).deleteIdentityLinksByTaskId(task.getId()); CommandContextUtil.getVariableService(commandContext).deleteVariableInstanceMap(task.getVariableInstanceEntities()); if (cascade) { deleteHistoricTask(task.getId()); } else { CommandContextUtil.getCmmnHistoryManager(commandContext).recordTaskEnd(task, deleteReason); >>>>>>> CountingTaskEntity countingTaskEntity = (CountingTaskEntity) task; if (countingTaskEntity.isCountEnabled() && countingTaskEntity.getIdentityLinkCount() > 0) { CommandContextUtil.getIdentityLinkService(commandContext).deleteIdentityLinksByTaskId(task.getId()); } if (countingTaskEntity.isCountEnabled() && countingTaskEntity.getVariableCount() > 0) { Map<String, VariableInstanceEntity> taskVariables = task.getVariableInstanceEntities(); ArrayList<VariableByteArrayRef> variableByteArrayRefs = new ArrayList<>(); for (VariableInstanceEntity variableInstanceEntity : taskVariables.values()) { if (variableInstanceEntity.getByteArrayRef() != null && variableInstanceEntity.getByteArrayRef().getId() != null) { variableByteArrayRefs.add(variableInstanceEntity.getByteArrayRef()); } } for (VariableByteArrayRef variableByteArrayRef : variableByteArrayRefs) { CommandContextUtil.getVariableServiceConfiguration(commandContext).getByteArrayEntityManager().deleteByteArrayById(variableByteArrayRef.getId()); } if (!taskVariables.isEmpty()) { CommandContextUtil.getVariableService(commandContext).deleteVariablesByTaskId(task.getId()); } CommandContextUtil.getVariableService(commandContext).deleteVariablesByTaskId(task.getId()); } if (cascade) { deleteHistoricTask(task.getId()); } else { CommandContextUtil.getCmmnHistoryManager(commandContext).recordTaskEnd(task, deleteReason);
<<<<<<< eventRegistry.removeFlowableEventConsumer(testEventConsumer); List<EventDeployment> eventDeployments = eventRepositoryService.createDeploymentQuery().list(); for (EventDeployment eventDeployment : eventDeployments) { eventRepositoryService.deleteDeployment(eventDeployment.getId()); } ======= eventRegistry.removeFlowableEventRegistryEventConsumer(testEventConsumer); >>>>>>> List<EventDeployment> eventDeployments = eventRepositoryService.createDeploymentQuery().list(); for (EventDeployment eventDeployment : eventDeployments) { eventRepositoryService.deleteDeployment(eventDeployment.getId()); } eventRegistry.removeFlowableEventRegistryEventConsumer(testEventConsumer);
<<<<<<< import java.util.Map; ======= import java.util.Map; import org.flowable.dmn.api.DecisionExecutionAuditContainer; >>>>>>> import java.util.Map; import org.flowable.dmn.api.DecisionExecutionAuditContainer; <<<<<<< import org.flowable.dmn.engine.impl.util.CommandContextUtil; ======= import org.flowable.dmn.engine.impl.ExecuteDecisionBuilderImpl; import org.flowable.dmn.engine.impl.interceptor.Command; import org.flowable.dmn.engine.impl.interceptor.CommandContext; >>>>>>> import org.flowable.dmn.engine.impl.ExecuteDecisionBuilderImpl; import org.flowable.dmn.engine.impl.util.CommandContextUtil; <<<<<<< import org.flowable.engine.common.impl.interceptor.Command; import org.flowable.engine.common.impl.interceptor.CommandContext; ======= >>>>>>> import org.flowable.engine.common.impl.interceptor.Command; import org.flowable.engine.common.impl.interceptor.CommandContext;
<<<<<<< if (!realm.allowsEdit(AccountFieldName.USER_NAME)) { throw new MethodNotAllowedException("realm does not allow editing username"); } ======= if (input == null) { input = new UsernameInput(); } >>>>>>> <<<<<<< if (input == null || Strings.isNullOrEmpty(input.username)) { throw new BadRequestException("input required"); ======= if (realm.accountBelongsToRealm(externalIds.byAccount(accountId)) && !realm.allowsEdit(AccountFieldName.USER_NAME)) { throw new MethodNotAllowedException("realm does not allow editing username"); } if (Strings.isNullOrEmpty(input.username)) { return input.username; >>>>>>> if (realm.accountBelongsToRealm(externalIds.byAccount(accountId)) && !realm.allowsEdit(AccountFieldName.USER_NAME)) { throw new MethodNotAllowedException("realm does not allow editing username"); } if (input == null || Strings.isNullOrEmpty(input.username)) { throw new BadRequestException("input required");
<<<<<<< import org.activiti.engine.impl.variable.ValueFields; import org.activiti.engine.impl.variable.VariableType; ======= import org.activiti.engine.runtime.Execution; >>>>>>> import org.activiti.engine.impl.variable.ValueFields; import org.activiti.engine.impl.variable.VariableType; import org.activiti.engine.runtime.Execution;
<<<<<<< import javax.sql.DataSource; ======= import org.apache.ibatis.session.Configuration; import org.apache.ibatis.type.JdbcType; >>>>>>> import javax.sql.DataSource; import org.apache.ibatis.session.Configuration; import org.apache.ibatis.type.JdbcType; <<<<<<< @Override public CmmnEngineConfiguration setDataSource(DataSource dataSource) { this.dataSource = dataSource; return this; } ======= public HistoryLevel getHistoryLevel() { return historyLevel; } public CmmnEngineConfiguration setHistoryLevel(HistoryLevel historyLevel) { this.historyLevel = historyLevel; return this; } public ExpressionManager getExpressionManager() { return expressionManager; } public CmmnEngineConfiguration setExpressionManager(ExpressionManager expressionManager) { this.expressionManager = expressionManager; return this; } public List<FlowableFunctionDelegate> getFlowableFunctionDelegates() { return flowableFunctionDelegates; } public CmmnEngineConfiguration setFlowableFunctionDelegates(List<FlowableFunctionDelegate> flowableFunctionDelegates) { this.flowableFunctionDelegates = flowableFunctionDelegates; return this; } public List<FlowableFunctionDelegate> getCustomFlowableFunctionDelegates() { return customFlowableFunctionDelegates; } public CmmnEngineConfiguration setCustomFlowableFunctionDelegates(List<FlowableFunctionDelegate> customFlowableFunctionDelegates) { this.customFlowableFunctionDelegates = customFlowableFunctionDelegates; return this; } public DbSchemaManager getVariableDbSchemaManager() { return variableDbSchemaManager; } public CmmnEngineConfiguration setVariableDbSchemaManager(DbSchemaManager variableDbSchemaManager) { this.variableDbSchemaManager = variableDbSchemaManager; return this; } public VariableTypes getVariableTypes() { return variableTypes; } public CmmnEngineConfiguration setVariableTypes(VariableTypes variableTypes) { this.variableTypes = variableTypes; return this; } public List<VariableType> getCustomPreVariableTypes() { return customPreVariableTypes; } public CmmnEngineConfiguration setCustomPreVariableTypes(List<VariableType> customPreVariableTypes) { this.customPreVariableTypes = customPreVariableTypes; return this; } public List<VariableType> getCustomPostVariableTypes() { return customPostVariableTypes; } public CmmnEngineConfiguration setCustomPostVariableTypes(List<VariableType> customPostVariableTypes) { this.customPostVariableTypes = customPostVariableTypes; return this; } public VariableServiceConfiguration getVariableServiceConfiguration() { return variableServiceConfiguration; } public CmmnEngineConfiguration setVariableServiceConfiguration(VariableServiceConfiguration variableServiceConfiguration) { this.variableServiceConfiguration = variableServiceConfiguration; return this; } public InternalHistoryVariableManager getInternalHistoryVariableManager() { return internalHistoryVariableManager; } public CmmnEngineConfiguration setInternalHistoryVariableManager(InternalHistoryVariableManager internalHistoryVariableManager) { this.internalHistoryVariableManager = internalHistoryVariableManager; return this; } public boolean isSerializableVariableTypeTrackDeserializedObjects() { return serializableVariableTypeTrackDeserializedObjects; } public CmmnEngineConfiguration setSerializableVariableTypeTrackDeserializedObjects(boolean serializableVariableTypeTrackDeserializedObjects) { this.serializableVariableTypeTrackDeserializedObjects = serializableVariableTypeTrackDeserializedObjects; return this; } public ObjectMapper getObjectMapper() { return objectMapper; } public CmmnEngineConfiguration setObjectMapper(ObjectMapper objectMapper) { this.objectMapper = objectMapper; return this; } >>>>>>> @Override public CmmnEngineConfiguration setDataSource(DataSource dataSource) { this.dataSource = dataSource; return this; } public HistoryLevel getHistoryLevel() { return historyLevel; } public CmmnEngineConfiguration setHistoryLevel(HistoryLevel historyLevel) { this.historyLevel = historyLevel; return this; } public ExpressionManager getExpressionManager() { return expressionManager; } public CmmnEngineConfiguration setExpressionManager(ExpressionManager expressionManager) { this.expressionManager = expressionManager; return this; } public List<FlowableFunctionDelegate> getFlowableFunctionDelegates() { return flowableFunctionDelegates; } public CmmnEngineConfiguration setFlowableFunctionDelegates(List<FlowableFunctionDelegate> flowableFunctionDelegates) { this.flowableFunctionDelegates = flowableFunctionDelegates; return this; } public List<FlowableFunctionDelegate> getCustomFlowableFunctionDelegates() { return customFlowableFunctionDelegates; } public CmmnEngineConfiguration setCustomFlowableFunctionDelegates(List<FlowableFunctionDelegate> customFlowableFunctionDelegates) { this.customFlowableFunctionDelegates = customFlowableFunctionDelegates; return this; } public DbSchemaManager getVariableDbSchemaManager() { return variableDbSchemaManager; } public CmmnEngineConfiguration setVariableDbSchemaManager(DbSchemaManager variableDbSchemaManager) { this.variableDbSchemaManager = variableDbSchemaManager; return this; } public VariableTypes getVariableTypes() { return variableTypes; } public CmmnEngineConfiguration setVariableTypes(VariableTypes variableTypes) { this.variableTypes = variableTypes; return this; } public List<VariableType> getCustomPreVariableTypes() { return customPreVariableTypes; } public CmmnEngineConfiguration setCustomPreVariableTypes(List<VariableType> customPreVariableTypes) { this.customPreVariableTypes = customPreVariableTypes; return this; } public List<VariableType> getCustomPostVariableTypes() { return customPostVariableTypes; } public CmmnEngineConfiguration setCustomPostVariableTypes(List<VariableType> customPostVariableTypes) { this.customPostVariableTypes = customPostVariableTypes; return this; } public VariableServiceConfiguration getVariableServiceConfiguration() { return variableServiceConfiguration; } public CmmnEngineConfiguration setVariableServiceConfiguration(VariableServiceConfiguration variableServiceConfiguration) { this.variableServiceConfiguration = variableServiceConfiguration; return this; } public InternalHistoryVariableManager getInternalHistoryVariableManager() { return internalHistoryVariableManager; } public CmmnEngineConfiguration setInternalHistoryVariableManager(InternalHistoryVariableManager internalHistoryVariableManager) { this.internalHistoryVariableManager = internalHistoryVariableManager; return this; } public boolean isSerializableVariableTypeTrackDeserializedObjects() { return serializableVariableTypeTrackDeserializedObjects; } public CmmnEngineConfiguration setSerializableVariableTypeTrackDeserializedObjects(boolean serializableVariableTypeTrackDeserializedObjects) { this.serializableVariableTypeTrackDeserializedObjects = serializableVariableTypeTrackDeserializedObjects; return this; } public ObjectMapper getObjectMapper() { return objectMapper; } public CmmnEngineConfiguration setObjectMapper(ObjectMapper objectMapper) { this.objectMapper = objectMapper; return this; }
<<<<<<< import org.activiti.engine.impl.interceptor.*; ======= import org.activiti.engine.impl.interceptor.Command; import org.activiti.engine.impl.interceptor.CommandConfig; import org.activiti.engine.impl.interceptor.CommandContext; import org.activiti.engine.impl.interceptor.CommandContextFactory; import org.activiti.engine.impl.interceptor.CommandContextInterceptor; import org.activiti.engine.impl.interceptor.CommandExecutor; import org.activiti.engine.impl.interceptor.CommandInterceptor; import org.activiti.engine.impl.interceptor.CommandInvoker; import org.activiti.engine.impl.interceptor.DebugCommandInvoker; import org.activiti.engine.impl.interceptor.DelegateInterceptor; import org.activiti.engine.impl.interceptor.LogInterceptor; import org.activiti.engine.impl.interceptor.SessionFactory; import org.activiti.engine.impl.interceptor.TransactionContextInterceptor; >>>>>>> import org.activiti.engine.impl.interceptor.*; <<<<<<< protected ActivitiIdmEventDispatcher idmEventDispatcher; ======= >>>>>>> <<<<<<< ======= >>>>>>> <<<<<<< * Flag that can be set to configure or nota relational database is used. * This is useful for custom implementations that do not use relational databases at all. * * If true (default), the {@link ProcessEngineConfiguration#getDatabaseSchemaUpdate()} value will be used to determine * what needs to happen wrt the database schema. * * If false, no validation or schema creation will be done. That means that the database schema must have been * created 'manually' before but the engine does not validate whether the schema is correct. * The {@link ProcessEngineConfiguration#getDatabaseSchemaUpdate()} value will not be used. */ protected boolean usingRelationalDatabase = true; /** ======= >>>>>>> <<<<<<< Connection connection = null; try { connection = dataSource.getConnection(); DatabaseMetaData databaseMetaData = connection.getMetaData(); String databaseProductName = databaseMetaData.getDatabaseProductName(); log.debug("database product name: '{}'", databaseProductName); databaseType = databaseTypeMappings.getProperty(databaseProductName); if (databaseType == null) { throw new ActivitiException("couldn't deduct database type from database product name '" + databaseProductName + "'"); } log.debug("using database type: {}", databaseType); // Special care for MSSQL, as it has a hard limit of 2000 params per statement (incl bulk statement). // Especially with executions, with 100 as default, this limit is passed. if (DATABASE_TYPE_MSSQL.equals(databaseType)) { maxNrOfStatementsInBulkInsert = DEFAULT_MAX_NR_OF_STATEMENTS_BULK_INSERT_SQL_SERVER; } } catch (SQLException e) { log.error("Exception while initializing Database connection", e); } finally { try { if (connection != null) { connection.close(); } } catch (SQLException e) { log.error("Exception while closing the Database connection", e); } ======= super.initDatabaseType(); // Special care for MSSQL, as it has a hard limit of 2000 params per statement (incl bulk statement). // Especially with executions, with 100 as default, this limit is passed. if (DATABASE_TYPE_MSSQL.equals(databaseType)) { maxNrOfStatementsInBulkInsert = DEFAULT_MAX_NR_OF_STATEMENTS_BULK_INSERT_SQL_SERVER; >>>>>>> super.initDatabaseType(); // Special care for MSSQL, as it has a hard limit of 2000 params per statement (incl bulk statement). // Especially with executions, with 100 as default, this limit is passed. if (DATABASE_TYPE_MSSQL.equals(databaseType)) { maxNrOfStatementsInBulkInsert = DEFAULT_MAX_NR_OF_STATEMENTS_BULK_INSERT_SQL_SERVER; <<<<<<< public void initSqlSessionFactory() { if (sqlSessionFactory == null) { InputStream inputStream = null; try { inputStream = getMyBatisXmlConfigurationStream(); Environment environment = new Environment("default", transactionFactory, dataSource); Reader reader = new InputStreamReader(inputStream); Properties properties = new Properties(); properties.put("prefix", databaseTablePrefix); String wildcardEscapeClause = ""; if ((databaseWildcardEscapeCharacter != null) && (databaseWildcardEscapeCharacter.length() != 0)) { wildcardEscapeClause = " escape '" + databaseWildcardEscapeCharacter + "'"; } properties.put("wildcardEscapeClause", wildcardEscapeClause); //set default properties properties.put("limitBefore" , ""); properties.put("limitAfter" , ""); properties.put("limitBetween" , ""); properties.put("limitOuterJoinBetween" , ""); properties.put("limitBeforeNativeQuery" , ""); properties.put("orderBy" , "order by ${orderByColumns}"); properties.put("blobType" , "BLOB"); properties.put("boolValue" , "TRUE"); if (databaseType != null) { properties.load(getResourceAsStream("org/activiti/db/properties/"+databaseType+".properties")); } Configuration configuration = initMybatisConfiguration(environment, reader, properties); sqlSessionFactory = new DefaultSqlSessionFactory(configuration); } catch (Exception e) { throw new ActivitiException("Error while building ibatis SqlSessionFactory: " + e.getMessage(), e); } finally { IoUtil.closeSilently(inputStream); } } } ======= @Override >>>>>>> @Override <<<<<<< if(databaseType != null) { configuration.setDatabaseId(databaseType); ======= if (databaseType != null) { configuration.setDatabaseId(databaseType); >>>>>>> if (databaseType != null) { configuration.setDatabaseId(databaseType); <<<<<<< ======= public void initAppResourceCache() { if (appResourceCache == null) { if (appResourceCacheLimit <= 0) { appResourceCache = new DefaultDeploymentCache<Object>(); } else { appResourceCache = new DefaultDeploymentCache<Object>(appResourceCacheLimit); } } } >>>>>>> public void initAppResourceCache() { if (appResourceCache == null) { if (appResourceCacheLimit <= 0) { appResourceCache = new DefaultDeploymentCache<Object>(); } else { appResourceCache = new DefaultDeploymentCache<Object>(appResourceCacheLimit); } } } <<<<<<< public TransactionFactory getTransactionFactory() { return transactionFactory; } ======= >>>>>>>
<<<<<<< ======= import org.activiti.engine.delegate.DelegateExecution; >>>>>>>
<<<<<<< @Deployment(resources = { "org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml" }) public void testQueryStartedBefore() throws Exception { Calendar calendar = new GregorianCalendar(); calendar.set(Calendar.YEAR, 2010); calendar.set(Calendar.MONTH, 8); calendar.set(Calendar.DAY_OF_MONTH, 30); calendar.set(Calendar.HOUR_OF_DAY, 12); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.MILLISECOND, 0); Date noon = calendar.getTime(); processEngineConfiguration.getClock().setCurrentTime(noon); calendar.add(Calendar.HOUR_OF_DAY, 1); Date hourLater = calendar.getTime(); runtimeService.startProcessInstanceByKey("oneTaskProcess"); List<Execution> executions = runtimeService.createExecutionQuery().startedBefore(hourLater).list(); assertEquals(2, executions.size()); } @Deployment(resources = { "org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml" }) public void testQueryStartedAfter() throws Exception { Calendar calendar = new GregorianCalendar(); calendar.set(Calendar.YEAR, 2200); calendar.set(Calendar.MONTH, 8); calendar.set(Calendar.DAY_OF_MONTH, 30); calendar.set(Calendar.HOUR_OF_DAY, 12); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.MILLISECOND, 0); Date noon = calendar.getTime(); processEngineConfiguration.getClock().setCurrentTime(noon); calendar.add(Calendar.HOUR_OF_DAY, -1); Date hourEarlier = calendar.getTime(); runtimeService.startProcessInstanceByKey("oneTaskProcess"); List<Execution> executions = runtimeService.createExecutionQuery().startedAfter(hourEarlier).list(); assertEquals(2, executions.size()); } @Deployment(resources = { "org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml" }) public void testQueryStartedBy() throws Exception { final String authenticatedUser = "user1"; identityService.setAuthenticatedUserId(authenticatedUser); runtimeService.startProcessInstanceByKey("oneTaskProcess"); List<Execution> executions = runtimeService.createExecutionQuery().startedBy(authenticatedUser).list(); assertEquals(1, executions.size()); } ======= @Deployment(resources={"org/activiti/engine/test/api/runtime/multipleSubProcess.bpmn20.xml", "org/activiti/engine/test/api/runtime/subProcess.bpmn20.xml"}) public void testOnlySubProcessExecutions() throws Exception { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("multipleSubProcessTest"); List<Execution> executions = runtimeService.createExecutionQuery().onlySubProcessExecutions().list(); assertEquals(2, executions.size()); for (Execution execution : executions) { if (execution.getParentId() == null) { assertTrue(processInstance.getId() != execution.getProcessInstanceId()); } else if (execution.getParentId().equals(execution.getProcessInstanceId())) { assertEquals("embeddedSubprocess" , execution.getActivityId()); } else { fail(); } } } >>>>>>> @Deployment(resources = { "org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml" }) public void testQueryStartedBefore() throws Exception { Calendar calendar = new GregorianCalendar(); calendar.set(Calendar.YEAR, 2010); calendar.set(Calendar.MONTH, 8); calendar.set(Calendar.DAY_OF_MONTH, 30); calendar.set(Calendar.HOUR_OF_DAY, 12); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.MILLISECOND, 0); Date noon = calendar.getTime(); processEngineConfiguration.getClock().setCurrentTime(noon); calendar.add(Calendar.HOUR_OF_DAY, 1); Date hourLater = calendar.getTime(); runtimeService.startProcessInstanceByKey("oneTaskProcess"); List<Execution> executions = runtimeService.createExecutionQuery().startedBefore(hourLater).list(); assertEquals(2, executions.size()); } @Deployment(resources = { "org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml" }) public void testQueryStartedAfter() throws Exception { Calendar calendar = new GregorianCalendar(); calendar.set(Calendar.YEAR, 2200); calendar.set(Calendar.MONTH, 8); calendar.set(Calendar.DAY_OF_MONTH, 30); calendar.set(Calendar.HOUR_OF_DAY, 12); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.MILLISECOND, 0); Date noon = calendar.getTime(); processEngineConfiguration.getClock().setCurrentTime(noon); calendar.add(Calendar.HOUR_OF_DAY, -1); Date hourEarlier = calendar.getTime(); runtimeService.startProcessInstanceByKey("oneTaskProcess"); List<Execution> executions = runtimeService.createExecutionQuery().startedAfter(hourEarlier).list(); assertEquals(2, executions.size()); } @Deployment(resources = { "org/activiti/engine/test/api/oneTaskProcess.bpmn20.xml" }) public void testQueryStartedBy() throws Exception { final String authenticatedUser = "user1"; identityService.setAuthenticatedUserId(authenticatedUser); runtimeService.startProcessInstanceByKey("oneTaskProcess"); List<Execution> executions = runtimeService.createExecutionQuery().startedBy(authenticatedUser).list(); assertEquals(1, executions.size()); } @Deployment(resources={"org/activiti/engine/test/api/runtime/multipleSubProcess.bpmn20.xml", "org/activiti/engine/test/api/runtime/subProcess.bpmn20.xml"}) public void testOnlySubProcessExecutions() throws Exception { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("multipleSubProcessTest"); List<Execution> executions = runtimeService.createExecutionQuery().onlySubProcessExecutions().list(); assertEquals(2, executions.size()); for (Execution execution : executions) { if (execution.getParentId() == null) { assertTrue(processInstance.getId() != execution.getProcessInstanceId()); } else if (execution.getParentId().equals(execution.getProcessInstanceId())) { assertEquals("embeddedSubprocess" , execution.getActivityId()); } else { fail(); } } }
<<<<<<< public static String getPropertyValueAsString(String name, JsonNode objectNode) { String propertyValue = null; JsonNode propertyNode = getProperty(name, objectNode); if (propertyNode != null && !"null".equalsIgnoreCase(propertyNode.asText())) { propertyValue = propertyNode.asText(); } return propertyValue; } public static boolean getPropertyValueAsBoolean(String name, JsonNode objectNode) { return getPropertyValueAsBoolean(name, objectNode, false); } public static boolean getPropertyValueAsBoolean(String name, JsonNode objectNode, boolean defaultValue) { boolean result = defaultValue; String stringValue = getPropertyValueAsString(name, objectNode); if (PROPERTY_VALUE_YES.equalsIgnoreCase(stringValue) || "true".equalsIgnoreCase(stringValue)) { result = true; } else if (PROPERTY_VALUE_NO.equalsIgnoreCase(stringValue) || "false".equalsIgnoreCase(stringValue)) { result = false; } return result; } public static List<String> getPropertyValueAsList(String name, JsonNode objectNode) { List<String> resultList = new ArrayList<>(); JsonNode propertyNode = getProperty(name, objectNode); if (propertyNode != null && !"null".equalsIgnoreCase(propertyNode.asText())) { String propertyValue = propertyNode.asText(); String[] valueList = propertyValue.split(","); for (String value : valueList) { resultList.add(value.trim()); } } return resultList; } public static JsonNode getProperty(String name, JsonNode objectNode) { JsonNode propertyNode = null; if (objectNode.get(EDITOR_SHAPE_PROPERTIES) != null) { JsonNode propertiesNode = objectNode.get(EDITOR_SHAPE_PROPERTIES); propertyNode = propertiesNode.get(name); } return propertyNode; } public static String getPropertyFormKey(JsonNode elementNode, Map<String, String> formMap) { String formKey = getPropertyValueAsString(PROPERTY_FORMKEY, elementNode); if (StringUtils.isNotEmpty(formKey)) { return (formKey); } else { JsonNode formReferenceNode = CmmnModelJsonConverterUtil.getProperty(PROPERTY_FORM_REFERENCE, elementNode); if (formReferenceNode != null && formReferenceNode.get("id") != null) { if (formMap != null && formMap.containsKey(formReferenceNode.get("id").asText())) { return formMap.get(formReferenceNode.get("id").asText()); } } } return null; } /** * Usable for BPMN 2.0 editor json: traverses all child shapes (also nested), goes into the properties and sees if there is a matching property in the 'properties' of the childshape and returns * those in a list. * * Returns a map with said json nodes, with the key the name of the childshape. */ ======= >>>>>>> public static String getPropertyValueAsString(String name, JsonNode objectNode) { String propertyValue = null; JsonNode propertyNode = getProperty(name, objectNode); if (propertyNode != null && !"null".equalsIgnoreCase(propertyNode.asText())) { propertyValue = propertyNode.asText(); } return propertyValue; } public static boolean getPropertyValueAsBoolean(String name, JsonNode objectNode) { return getPropertyValueAsBoolean(name, objectNode, false); } public static boolean getPropertyValueAsBoolean(String name, JsonNode objectNode, boolean defaultValue) { boolean result = defaultValue; String stringValue = getPropertyValueAsString(name, objectNode); if (PROPERTY_VALUE_YES.equalsIgnoreCase(stringValue) || "true".equalsIgnoreCase(stringValue)) { result = true; } else if (PROPERTY_VALUE_NO.equalsIgnoreCase(stringValue) || "false".equalsIgnoreCase(stringValue)) { result = false; } return result; } public static List<String> getPropertyValueAsList(String name, JsonNode objectNode) { List<String> resultList = new ArrayList<>(); JsonNode propertyNode = getProperty(name, objectNode); if (propertyNode != null && !"null".equalsIgnoreCase(propertyNode.asText())) { String propertyValue = propertyNode.asText(); String[] valueList = propertyValue.split(","); for (String value : valueList) { resultList.add(value.trim()); } } return resultList; } public static JsonNode getProperty(String name, JsonNode objectNode) { JsonNode propertyNode = null; if (objectNode.get(EDITOR_SHAPE_PROPERTIES) != null) { JsonNode propertiesNode = objectNode.get(EDITOR_SHAPE_PROPERTIES); propertyNode = propertiesNode.get(name); } return propertyNode; } public static String getPropertyFormKey(JsonNode elementNode, Map<String, String> formMap) { String formKey = getPropertyValueAsString(PROPERTY_FORMKEY, elementNode); if (StringUtils.isNotEmpty(formKey)) { return (formKey); } else { JsonNode formReferenceNode = CmmnModelJsonConverterUtil.getProperty(PROPERTY_FORM_REFERENCE, elementNode); if (formReferenceNode != null && formReferenceNode.get("id") != null) { if (formMap != null && formMap.containsKey(formReferenceNode.get("id").asText())) { return formMap.get(formReferenceNode.get("id").asText()); } } } return null; } /** * Usable for BPMN 2.0 editor json: traverses all child shapes (also nested), goes into the properties and sees if there is a matching property in the 'properties' of the childshape and returns * those in a list. * * Returns a map with said json nodes, with the key the name of the childshape. */
<<<<<<< import java.util.logging.Logger; ======= >>>>>>> <<<<<<< ======= } catch (Exception e) { LOGGER.warn("Error parsing child elements for {}", elementName, e); >>>>>>>
<<<<<<< ======= assertFalse(responseNode.get("time").isNull()); >>>>>>> assertFalse(responseNode.get("time").isNull());
<<<<<<< protected Map<String, Set<String>> customUserIdentityLinks = new HashMap<String, Set<String>>(); protected Map<String, Set<String>> customGroupIdentityLinks = new HashMap<String, Set<String>>(); ======= protected Map<String, Set<String>> customUserIdentityLinks = new HashMap<String, Set<String>>(); protected Map<String, Set<String>> customGroupIdentityLinks = new HashMap<String, Set<String>>(); protected List<CustomProperty> customProperties = new ArrayList<CustomProperty>(); >>>>>>> protected Map<String, Set<String>> customUserIdentityLinks = new HashMap<String, Set<String>>(); protected Map<String, Set<String>> customGroupIdentityLinks = new HashMap<String, Set<String>>(); protected List<CustomProperty> customProperties = new ArrayList<CustomProperty>(); <<<<<<< return category; } public void setCategory(String category) { this.category = category; } public List<String> getCandidateUsers() { ======= return category; } public void setCategory(String category) { this.category = category; } public String getExtensionId() { return extensionId; } public void setExtensionId(String extensionId) { this.extensionId = extensionId; } public boolean isExtended() { return extensionId != null && !extensionId.isEmpty(); } public List<String> getCandidateUsers() { >>>>>>> return category; } public void setCategory(String category) { this.category = category; } public String getExtensionId() { return extensionId; } public void setExtensionId(String extensionId) { this.extensionId = extensionId; } public boolean isExtended() { return extensionId != null && !extensionId.isEmpty(); } public List<String> getCandidateUsers() { <<<<<<< ======= public List<CustomProperty> getCustomProperties() { return customProperties; } public void setCustomProperties(List<CustomProperty> customProperties) { this.customProperties = customProperties; } >>>>>>> public List<CustomProperty> getCustomProperties() { return customProperties; } public void setCustomProperties(List<CustomProperty> customProperties) { this.customProperties = customProperties; } <<<<<<< ======= setExtensionId(otherElement.getExtensionId()); >>>>>>> setExtensionId(otherElement.getExtensionId());
<<<<<<< if (processEngineConfiguration.isLoggingSessionEnabled()) { BpmnLoggingSessionUtil.addLoggingData(LoggingSessionConstants.TYPE_USER_TASK_CREATE, "User task '" + task.getName() + "' created", task, execution); } handleAssignments(taskService, beforeContext.getAssignee(), beforeContext.getOwner(), beforeContext.getCandidateUsers(), beforeContext.getCandidateGroups(), task, expressionManager, execution, processEngineConfiguration); ======= handleAssignments(taskService, beforeContext.getAssignee(), beforeContext.getOwner(), beforeContext.getCandidateUsers(), beforeContext.getCandidateGroups(), task, expressionManager, execution); if (processEngineConfiguration.getCreateUserTaskInterceptor() != null) { CreateUserTaskAfterContext afterContext = new CreateUserTaskAfterContext(userTask, task, execution); processEngineConfiguration.getCreateUserTaskInterceptor().afterCreateUserTask(afterContext); } >>>>>>> if (processEngineConfiguration.isLoggingSessionEnabled()) { BpmnLoggingSessionUtil.addLoggingData(LoggingSessionConstants.TYPE_USER_TASK_CREATE, "User task '" + task.getName() + "' created", task, execution); } handleAssignments(taskService, beforeContext.getAssignee(), beforeContext.getOwner(), beforeContext.getCandidateUsers(), beforeContext.getCandidateGroups(), task, expressionManager, execution, processEngineConfiguration); if (processEngineConfiguration.getCreateUserTaskInterceptor() != null) { CreateUserTaskAfterContext afterContext = new CreateUserTaskAfterContext(userTask, task, execution); processEngineConfiguration.getCreateUserTaskInterceptor().afterCreateUserTask(afterContext); } <<<<<<< ======= >>>>>>> <<<<<<< ======= >>>>>>> <<<<<<< ======= >>>>>>> <<<<<<< ======= * * @param str * @return >>>>>>> * * @param str * @return
<<<<<<< String REPORTING_SAVE_POPUP_NAME_TOO_LONG = "reporting.save.popup.name.too.long"; ======= String REPORTING_SAVE_POPUP_NAME_TOO_LONG = "reporting.save.popup..name.too.long"; String REPORTING_CREATE_TIME = "reporting.report.created"; >>>>>>> String REPORTING_SAVE_POPUP_NAME_TOO_LONG = "reporting.save.popup.name.too.long"; String REPORTING_SAVE_POPUP_NAME_TOO_LONG = "reporting.save.popup..name.too.long"; String REPORTING_CREATE_TIME = "reporting.report.created";
<<<<<<< ======= /** * Some databases have a limit of how many parameters one sql insert can have (eg SQL Server, 2000 params (!= insert statements) ). Tweak this parameter in case of exceptions indicating too much * is being put into one bulk insert, or make it higher if your database can cope with it and there are inserts with a huge amount of data. * <p> * By default: 100 (75 for mssql server as it has a hard limit of 2000 parameters in a statement) */ protected int maxNrOfStatementsInBulkInsert = 100; public int DEFAULT_MAX_NR_OF_STATEMENTS_BULK_INSERT_SQL_SERVER = 70; // currently Execution has most params (28). 2000 / 28 = 71. >>>>>>> <<<<<<< ======= public void initDbSqlSessionFactory() { if (dbSqlSessionFactory == null) { dbSqlSessionFactory = createDbSqlSessionFactory(); } dbSqlSessionFactory.setDatabaseType(databaseType); dbSqlSessionFactory.setIdGenerator(idGenerator); dbSqlSessionFactory.setSqlSessionFactory(sqlSessionFactory); dbSqlSessionFactory.setDbHistoryUsed(isDbHistoryUsed); dbSqlSessionFactory.setDatabaseTablePrefix(databaseTablePrefix); dbSqlSessionFactory.setTablePrefixIsSchema(tablePrefixIsSchema); dbSqlSessionFactory.setDatabaseCatalog(databaseCatalog); dbSqlSessionFactory.setDatabaseSchema(databaseSchema); dbSqlSessionFactory.setMaxNrOfStatementsInBulkInsert(maxNrOfStatementsInBulkInsert); initDbSqlSessionFactoryEntitySettings(); addSessionFactory(dbSqlSessionFactory); } >>>>>>> <<<<<<< ======= public DbSqlSessionFactory createDbSqlSessionFactory() { return new DbSqlSessionFactory(); } >>>>>>>
<<<<<<< taskEntityManager.insert(task, (ExecutionEntity) execution); // Handling assignments need to be done after the task is inserted, to have an id handleAssignments(taskEntityManager, activeTaskAssignee, activeTaskOwner, activeTaskCandidateUsers, activeTaskCandidateGroups, task, execution); ======= handleAssignments(activeTaskAssignee, activeTaskOwner, activeTaskCandidateUsers, activeTaskCandidateGroups, task, expressionManager, execution); >>>>>>> taskEntityManager.insert(task, (ExecutionEntity) execution); // Handling assignments need to be done after the task is inserted, to have an id handleAssignments(taskEntityManager, activeTaskAssignee, activeTaskOwner, activeTaskCandidateUsers, activeTaskCandidateGroups, task, expressionManager, execution); <<<<<<< protected void handleAssignments(TaskEntityManager taskEntityManager, String assignee, String owner, List<String> candidateUsers, List<String> candidateGroups, TaskEntity task, DelegateExecution execution) { ======= protected void handleAssignments(String assignee, String owner, List<String> candidateUsers, List<String> candidateGroups, TaskEntity task, ExpressionManager expressionManager, DelegateExecution execution) { >>>>>>> protected void handleAssignments(TaskEntityManager taskEntityManager, String assignee, String owner, List<String> candidateUsers, List<String> candidateGroups, TaskEntity task, ExpressionManager expressionManager, DelegateExecution execution) {
<<<<<<< ======= String isBlockingExpressionString = xtr.getAttributeValue(CmmnXmlConstants.FLOWABLE_EXTENSIONS_NAMESPACE, CmmnXmlConstants.ATTRIBUTE_IS_BLOCKING_EXPRESSION); if (StringUtils.isNotEmpty(isBlockingExpressionString)) { task.setBlockingExpression(isBlockingExpressionString); } String className = xtr.getAttributeValue(CmmnXmlConstants.FLOWABLE_EXTENSIONS_NAMESPACE, CmmnXmlConstants.ATTRIBUTE_CLASS); if (StringUtils.isNotEmpty(className)) { task.setClassName(className); } >>>>>>> String isBlockingExpressionString = xtr.getAttributeValue(CmmnXmlConstants.FLOWABLE_EXTENSIONS_NAMESPACE, CmmnXmlConstants.ATTRIBUTE_IS_BLOCKING_EXPRESSION); if (StringUtils.isNotEmpty(isBlockingExpressionString)) { task.setBlockingExpression(isBlockingExpressionString); }
<<<<<<< CommandContextUtil.getHistoricTaskService().deleteHistoricTaskLogEntriesForScopeDefinition(ScopeTypes.CMMN, caseDefinitionId); HistoricIdentityLinkEntityManager historicIdentityLinkEntityManager = getHistoricIdentityLinkEntityManager(); historicIdentityLinkEntityManager.deleteHistoricIdentityLinksByScopeDefinitionIdAndScopeType(caseDefinitionId, ScopeTypes.CMMN); ======= >>>>>>> CommandContextUtil.getHistoricTaskService().deleteHistoricTaskLogEntriesForScopeDefinition(ScopeTypes.CMMN, caseDefinitionId); HistoricIdentityLinkEntityManager historicIdentityLinkEntityManager = getHistoricIdentityLinkEntityManager(); historicIdentityLinkEntityManager.deleteHistoricIdentityLinksByScopeDefinitionIdAndScopeType(caseDefinitionId, ScopeTypes.CMMN);
<<<<<<< import org.flowable.engine.common.impl.persistence.deploy.DeploymentCache; import org.flowable.engine.delegate.event.FlowableEngineEventType; ======= import org.flowable.engine.common.api.delegate.event.FlowableEngineEventType; import org.flowable.engine.impl.persistence.deploy.DeploymentCache; >>>>>>> import org.flowable.engine.common.api.delegate.event.FlowableEngineEventType; import org.flowable.engine.common.impl.persistence.deploy.DeploymentCache;
<<<<<<< import org.flowable.job.service.InternalJobCompatibilityManager; ======= import org.flowable.job.service.HistoryJobProcessor; >>>>>>> import org.flowable.job.service.InternalJobCompatibilityManager; import org.flowable.job.service.HistoryJobProcessor;
<<<<<<< ======= import org.flowable.engine.ProcessInstanceMigrationService; import org.flowable.engine.ProcessInstanceQueryInterceptor; >>>>>>> import org.flowable.engine.ProcessMigrationService; <<<<<<< ScriptingEngineAwareEngineConfiguration, HasExpressionManagerEngineConfiguration, HasVariableTypes { ======= ScriptingEngineAwareEngineConfiguration, HasExpressionManagerEngineConfiguration { >>>>>>> ScriptingEngineAwareEngineConfiguration, HasExpressionManagerEngineConfiguration, HasVariableTypes { <<<<<<< return new JobServiceConfiguration(ScopeTypes.BPMN); ======= return new JobServiceConfiguration(); >>>>>>> return new JobServiceConfiguration(ScopeTypes.BPMN); <<<<<<< public void initHistoryCleaningManager() { if (historyCleaningManager == null) { historyCleaningManager = new DefaultHistoryCleaningManager(this); } } ======= >>>>>>> public void initHistoryCleaningManager() { if (historyCleaningManager == null) { historyCleaningManager = new DefaultHistoryCleaningManager(this); } } <<<<<<< flowableFunctionDelegates.addAll(shortHandExpressionFunctions); ======= for (FlowableShortHandExpressionFunction expressionFunction : shortHandExpressionFunctions) { flowableFunctionDelegates.add(expressionFunction); } >>>>>>> flowableFunctionDelegates.addAll(shortHandExpressionFunctions); <<<<<<< expressionEnhancers.addAll(shortHandExpressionFunctions); ======= for (FlowableShortHandExpressionFunction expressionFunction : shortHandExpressionFunctions) { expressionEnhancers.add(expressionFunction); } >>>>>>> expressionEnhancers.addAll(shortHandExpressionFunctions); <<<<<<< @Override ======= >>>>>>> <<<<<<< public String getAsyncExecutorTenantId() { return asyncExecutorTenantId; } public void setAsyncExecutorTenantId(String asyncExecutorTenantId) { this.asyncExecutorTenantId = asyncExecutorTenantId; } public boolean isEnableHistoryCleaning() { return enableHistoryCleaning; } public void setEnableHistoryCleaning(boolean enableHistoryCleaning) { this.enableHistoryCleaning = enableHistoryCleaning; } public String getHistoryCleaningTimeCycleConfig() { return historyCleaningTimeCycleConfig; } public void setHistoryCleaningTimeCycleConfig(String historyCleaningTimeCycleConfig) { this.historyCleaningTimeCycleConfig = historyCleaningTimeCycleConfig; } public int getCleanInstancesEndedAfterNumberOfDays() { return cleanInstancesEndedAfterNumberOfDays; } public void setCleanInstancesEndedAfterNumberOfDays(int cleanInstancesEndedAfterNumberOfDays) { this.cleanInstancesEndedAfterNumberOfDays = cleanInstancesEndedAfterNumberOfDays; } public HistoryCleaningManager getHistoryCleaningManager() { return historyCleaningManager; } public void setHistoryCleaningManager(HistoryCleaningManager historyCleaningManager) { this.historyCleaningManager = historyCleaningManager; } ======= >>>>>>> public String getAsyncExecutorTenantId() { return asyncExecutorTenantId; } public void setAsyncExecutorTenantId(String asyncExecutorTenantId) { this.asyncExecutorTenantId = asyncExecutorTenantId; } public String getBatchStatusTimeCycleConfig() { return batchStatusTimeCycleConfig; } public void setBatchStatusTimeCycleConfig(String batchStatusTimeCycleConfig) { this.batchStatusTimeCycleConfig = batchStatusTimeCycleConfig; } public boolean isEnableHistoryCleaning() { return enableHistoryCleaning; } public void setEnableHistoryCleaning(boolean enableHistoryCleaning) { this.enableHistoryCleaning = enableHistoryCleaning; } public String getHistoryCleaningTimeCycleConfig() { return historyCleaningTimeCycleConfig; } public void setHistoryCleaningTimeCycleConfig(String historyCleaningTimeCycleConfig) { this.historyCleaningTimeCycleConfig = historyCleaningTimeCycleConfig; } public int getCleanInstancesEndedAfterNumberOfDays() { return cleanInstancesEndedAfterNumberOfDays; } public void setCleanInstancesEndedAfterNumberOfDays(int cleanInstancesEndedAfterNumberOfDays) { this.cleanInstancesEndedAfterNumberOfDays = cleanInstancesEndedAfterNumberOfDays; } public HistoryCleaningManager getHistoryCleaningManager() { return historyCleaningManager; } public void setHistoryCleaningManager(HistoryCleaningManager historyCleaningManager) { this.historyCleaningManager = historyCleaningManager; }
<<<<<<< import org.activiti.engine.management.TableMetaData; import org.activiti.engine.management.TablePage; ======= import org.activiti.idm.api.Capability; >>>>>>> import org.activiti.engine.management.TableMetaData; import org.activiti.engine.management.TablePage; import org.activiti.idm.api.Capability;
<<<<<<< import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import org.flowable.dmn.engine.impl.mvel.MvelExecutionContext; import org.flowable.dmn.engine.impl.util.CommandContextUtil; import org.flowable.dmn.model.HitPolicy; import org.flowable.engine.common.api.FlowableException; ======= import java.util.ArrayList; import java.util.List; import java.util.Map; import org.flowable.dmn.engine.impl.context.Context; import org.flowable.dmn.engine.impl.mvel.MvelExecutionContext; import org.flowable.dmn.model.HitPolicy; import org.flowable.engine.common.api.FlowableException; >>>>>>> import java.util.ArrayList; import java.util.List; import java.util.Map; import org.flowable.dmn.engine.impl.mvel.MvelExecutionContext; import org.flowable.dmn.engine.impl.util.CommandContextUtil; import org.flowable.dmn.model.HitPolicy; import org.flowable.engine.common.api.FlowableException;
<<<<<<< database = ASBDatabase.getDatabase(); playerCache = new HashMap<UUID, Players>(); ======= database = BSBDatabase.getDatabase(); players = new HashMap<UUID, APlayer>(); >>>>>>> database = BSBDatabase.getDatabase(); playerCache = new HashMap<UUID, Players>();
<<<<<<< case IProblem.FeatureNotSupported: ======= case IProblem.SwitchExpressionsReturnWithinSwitchExpression: >>>>>>> case IProblem.FeatureNotSupported: case IProblem.SwitchExpressionsReturnWithinSwitchExpression: <<<<<<< case IProblem.FeatureNotSupported: ReorgCorrectionsSubProcessor.getNeedHigherComplianceProposals(context, problem, proposals); ======= case IProblem.SwitchExpressionsReturnWithinSwitchExpression: ReturnTypeSubProcessor.replaceReturnWithYieldStatementProposals(context, problem, proposals); break; >>>>>>> case IProblem.FeatureNotSupported: ReorgCorrectionsSubProcessor.getNeedHigherComplianceProposals(context, problem, proposals); case IProblem.SwitchExpressionsReturnWithinSwitchExpression: ReturnTypeSubProcessor.replaceReturnWithYieldStatementProposals(context, problem, proposals); break;
<<<<<<< ======= public void testEnablePreviewsAndOpenCompilerPropertiesProposals() throws Exception { fJProject1= JavaProjectHelper.createJavaProject("TestProject1", "bin"); fJProject1.setRawClasspath(Java14ProjectTestSetup.getDefaultClasspath(), null); JavaProjectHelper.set14CompilerOptions(fJProject1, false); fSourceFolder= JavaProjectHelper.addSourceContainer(fJProject1, "src"); IPackageFragment def= fSourceFolder.createPackageFragment("", false, null); def.createCompilationUnit("module-info.java", MODULE_INFO_FILE_CONTENT, false, null); IPackageFragment pack= fSourceFolder.createPackageFragment("test", false, null); String test= "" + "package test;\n" + "public record Rec1() {\n" + "}\n"; ICompilationUnit cu= pack.createCompilationUnit("Rec1.java", test, false, null); CompilationUnit astRoot= getASTRoot(cu); ArrayList<IJavaCompletionProposal> proposals= collectCorrections(cu, astRoot, 1, null); assertNumberOfProposals(proposals, 2); String label1= CorrectionMessages.PreviewFeaturesSubProcessor_enable_preview_features; assertProposalExists(proposals, label1); String label2= CorrectionMessages.PreviewFeaturesSubProcessor_open_compliance_properties_page_enable_preview_features; assertProposalExists(proposals, label2); } @Test public void testRecordSuppressWarningsProposals() throws Exception { fJProject1= JavaProjectHelper.createJavaProject("TestProject1", "bin"); fJProject1.setRawClasspath(Java14ProjectTestSetup.getDefaultClasspath(), null); JavaProjectHelper.set14CompilerOptions(fJProject1, true); Map<String, String> options= fJProject1.getOptions(false); options.put(JavaCore.COMPILER_PB_REPORT_PREVIEW_FEATURES, JavaCore.WARNING); fJProject1.setOptions(options); fSourceFolder= JavaProjectHelper.addSourceContainer(fJProject1, "src"); IPackageFragment def= fSourceFolder.createPackageFragment("", false, null); def.createCompilationUnit("module-info.java", MODULE_INFO_FILE_CONTENT, false, null); IPackageFragment pack= fSourceFolder.createPackageFragment("test", false, null); String test= "" + "package test;\n" + "public record Rec1() {\n" + "}\n"; ICompilationUnit cu= pack.createCompilationUnit("Rec1.java", test, false, null); CompilationUnit astRoot= getASTRoot(cu); ArrayList<IJavaCompletionProposal> proposals= collectCorrections(cu, astRoot, 1, null); assertNumberOfProposals(proposals, 2); String label= Messages.format(CorrectionMessages.SuppressWarningsSubProcessor_suppress_warnings_label, new String[] { "preview", "Rec1" }); assertProposalExists(proposals, label); CUCorrectionProposal proposal= (CUCorrectionProposal) proposals.get(0); String preview= getPreviewContent(proposal); String expected= "" + "package test;\n" + "@SuppressWarnings(\"preview\")\n" + "public record Rec1() {\n" + "}\n"; assertEqualStringsIgnoreOrder(new String[] { preview }, new String[] { expected }); } @Ignore("See bug 562103 comment 4") @Test public void testGetNeedHigherComplianceProposalsAndEnablePreviewsProposal() throws Exception { fJProject1= JavaProjectHelper.createJavaProject("TestProject1", "bin"); fJProject1.setRawClasspath(Java14ProjectTestSetup.getDefaultClasspath(), null); JavaProjectHelper.set13CompilerOptions(fJProject1, false); fSourceFolder= JavaProjectHelper.addSourceContainer(fJProject1, "src"); IPackageFragment def= fSourceFolder.createPackageFragment("", false, null); def.createCompilationUnit("module-info.java", MODULE_INFO_FILE_CONTENT, false, null); IPackageFragment pack= fSourceFolder.createPackageFragment("test", false, null); String test= "" + "package test;\n" + "public record Rec1() {\n" + "}\n"; ICompilationUnit cu= pack.createCompilationUnit("Rec1.java", test, false, null); CompilationUnit astRoot= getASTRoot(cu); ArrayList<IJavaCompletionProposal> proposals= collectCorrections(cu, astRoot, 1, null); assertNumberOfProposals(proposals, 1); String label1= Messages.format(CorrectionMessages.ReorgCorrectionsSubProcessor_change_project_compliance_description, "14"); String label2= CorrectionMessages.PreviewFeaturesSubProcessor_enable_preview_features; String label= Messages.format(CorrectionMessages.ReorgCorrectionsSubProcessor_combine_two_quickfixes, new String[] {label1, label2}); assertProposalExists(proposals, label); } @Test public void testNoEnablePreviewProposal() throws Exception { fJProject1= JavaProjectHelper.createJavaProject("TestProject1", "bin"); fJProject1.setRawClasspath(Java14ProjectTestSetup.getDefaultClasspath(), null); JavaProjectHelper.set14CompilerOptions(fJProject1, true); fSourceFolder= JavaProjectHelper.addSourceContainer(fJProject1, "src"); IPackageFragment def= fSourceFolder.createPackageFragment("", false, null); def.createCompilationUnit("module-info.java", MODULE_INFO_FILE_CONTENT, false, null); IPackageFragment pack= fSourceFolder.createPackageFragment("test", false, null); String test= "" + "package test;\n" + "public record Rec() {\n" + "}\n"; ICompilationUnit cu= pack.createCompilationUnit("Rec.java", test, false, null); CompilationUnit astRoot= getASTRoot(cu); ArrayList<ICompletionProposal> proposals= collectAllCorrections(cu, astRoot, 0); assertNumberOfProposals(proposals, 0); } @Test >>>>>>> public void testRecordSuppressWarningsProposals() throws Exception { fJProject1= JavaProjectHelper.createJavaProject("TestProject1", "bin"); fJProject1.setRawClasspath(Java14ProjectTestSetup.getDefaultClasspath(), null); JavaProjectHelper.set14CompilerOptions(fJProject1, true); Map<String, String> options= fJProject1.getOptions(false); options.put(JavaCore.COMPILER_PB_REPORT_PREVIEW_FEATURES, JavaCore.WARNING); fJProject1.setOptions(options); fSourceFolder= JavaProjectHelper.addSourceContainer(fJProject1, "src"); IPackageFragment def= fSourceFolder.createPackageFragment("", false, null); def.createCompilationUnit("module-info.java", MODULE_INFO_FILE_CONTENT, false, null); IPackageFragment pack= fSourceFolder.createPackageFragment("test", false, null); String test= "" + "package test;\n" + "public record Rec1() {\n" + "}\n"; ICompilationUnit cu= pack.createCompilationUnit("Rec1.java", test, false, null); CompilationUnit astRoot= getASTRoot(cu); ArrayList<IJavaCompletionProposal> proposals= collectCorrections(cu, astRoot, 1, null); assertNumberOfProposals(proposals, 2); String label= Messages.format(CorrectionMessages.SuppressWarningsSubProcessor_suppress_warnings_label, new String[] { "preview", "Rec1" }); assertProposalExists(proposals, label); CUCorrectionProposal proposal= (CUCorrectionProposal) proposals.get(0); String preview= getPreviewContent(proposal); String expected= "" + "package test;\n" + "@SuppressWarnings(\"preview\")\n" + "public record Rec1() {\n" + "}\n"; assertEqualStringsIgnoreOrder(new String[] { preview }, new String[] { expected }); } @Test
<<<<<<< ======= protected List<Transaction> convert(com.binance.dex.api.client.domain.jsonrpc.BlockInfoResult.Transaction txMessage) { try { byte[] value = txMessage.getTx(); int startIndex = getStartIndex(value); byte[] array = new byte[value.length - startIndex]; System.arraycopy(value, startIndex, array, 0, array.length); StdTx stdTx = StdTx.parseFrom(array); return stdTx.getMsgsList().stream() .map(byteString -> { byte[] bytes = byteString.toByteArray(); Transaction transaction = convert(bytes); if (null == transaction) { return null; } transaction.setHash(txMessage.getHash()); transaction.setHeight(txMessage.getHeight()); transaction.setCode(txMessage.getTx_result().getCode()); transaction.setMemo(stdTx.getMemo()); return transaction; }).filter(t -> null != t).collect(Collectors.toList()); } catch (InvalidProtocolBufferException e) { throw new RuntimeException(e); } } protected Transaction convert(byte[] bytes) { try { MessageType messageType = MessageType.getMessageType(bytes); if (null == messageType) { return null; } switch (messageType) { case Send: return convertTransfer(bytes); case NewOrder: return convertNewOrder(bytes); case CancelOrder: return convertCancelOrder(bytes); case TokenFreeze: return convertTokenFreeze(bytes); case TokenUnfreeze: return convertTokenUnfreeze(bytes); case Vote: return convertVote(bytes); case Issue: return convertIssue(bytes); case Burn: return convertBurn(bytes); case Mint: return convertMint(bytes); case SubmitProposal: return convertSubmitProposal(bytes); } return null; } catch (Exception e) { throw new RuntimeException(e); } } protected Transaction convertTransfer(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); Send send = Send.parseFrom(array); MultiTransfer transfer = new MultiTransfer(); transfer.setFromAddress(Crypto.encodeAddress(hrp, send.getInputsList().get(0).getAddress().toByteArray())); transfer.setOutputs(send.getOutputsList().stream().map(o -> { Output output = new Output(); output.setAddress(Crypto.encodeAddress(hrp, o.getAddress().toByteArray())); output.setTokens(o.getCoinsList().stream() .map(coin -> new OutputToken(coin.getDenom(), "" + coin.getAmount())) .collect(Collectors.toList())); return output; }).collect(Collectors.toList())); Transaction transaction = new Transaction(); transaction.setTxType(TxType.TRANSFER); transaction.setRealTx(transfer); return transaction; } protected Transaction convertNewOrder(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.NewOrder newOrderMessage = com.binance.dex.api.proto.NewOrder.parseFrom(array); NewOrder newOrder = new NewOrder(); newOrder.setSender(Crypto.encodeAddress(hrp, newOrderMessage.getSender().toByteArray())); newOrder.setSymbol(newOrderMessage.getSymbol()); newOrder.setOrderType(OrderType.fromValue(newOrderMessage.getOrdertype())); newOrder.setPrice("" + newOrderMessage.getPrice()); newOrder.setQuantity("" + newOrderMessage.getQuantity()); newOrder.setSide(OrderSide.fromValue(newOrderMessage.getSide())); newOrder.setTimeInForce(TimeInForce.fromValue(newOrderMessage.getTimeinforce())); Transaction transaction = new Transaction(); transaction.setTxType(TxType.NEW_ORDER); transaction.setRealTx(newOrder); return transaction; } protected Transaction convertCancelOrder(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.CancelOrder cancelOrderOrderMessage = com.binance.dex.api.proto.CancelOrder.parseFrom(array); CancelOrder cancelOrder = new CancelOrder(); cancelOrder.setSender(Crypto.encodeAddress(hrp, cancelOrderOrderMessage.getSender().toByteArray())); cancelOrder.setRefId(cancelOrderOrderMessage.getRefid()); cancelOrder.setSymbol(cancelOrderOrderMessage.getSymbol()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.CANCEL_ORDER); transaction.setRealTx(cancelOrder); return transaction; } protected Transaction convertTokenFreeze(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.TokenFreeze tokenFreezeMessage = com.binance.dex.api.proto.TokenFreeze.parseFrom(array); TokenFreeze tokenFreeze = new TokenFreeze(); tokenFreeze.setFrom(Crypto.encodeAddress(hrp, tokenFreezeMessage.getFrom().toByteArray())); tokenFreeze.setAmount("" + tokenFreezeMessage.getAmount()); tokenFreeze.setSymbol(tokenFreezeMessage.getSymbol()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.FREEZE_TOKEN); transaction.setRealTx(tokenFreeze); return transaction; } protected Transaction convertTokenUnfreeze(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.TokenUnfreeze tokenUnfreezeMessage = com.binance.dex.api.proto.TokenUnfreeze.parseFrom(array); TokenUnfreeze tokenUnfreeze = new TokenUnfreeze(); tokenUnfreeze.setFrom(Crypto.encodeAddress(hrp, tokenUnfreezeMessage.getFrom().toByteArray())); tokenUnfreeze.setSymbol(tokenUnfreezeMessage.getSymbol()); tokenUnfreeze.setAmount("" + tokenUnfreezeMessage.getAmount()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.UNFREEZE_TOKEN); transaction.setRealTx(tokenUnfreeze); return transaction; } protected Transaction convertVote(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.Vote voteMessage = com.binance.dex.api.proto.Vote.parseFrom(array); Vote vote = new Vote(); vote.setVoter(Crypto.encodeAddress(hrp, voteMessage.getVoter().toByteArray())); vote.setOption((int) voteMessage.getOption()); vote.setProposalId(voteMessage.getProposalId()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.VOTE); transaction.setRealTx(vote); return transaction; } protected Transaction convertIssue(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.Issue issueMessage = com.binance.dex.api.proto.Issue.parseFrom(array); Issue issue = new Issue(); issue.setFrom(Crypto.encodeAddress(hrp, issueMessage.getFrom().toByteArray())); issue.setName(issueMessage.getName()); issue.setSymbol(issueMessage.getSymbol()); issue.setTotalSupply(issueMessage.getTotalSupply()); issue.setMintable(issueMessage.getMintable()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.ISSUE); transaction.setRealTx(issue); return transaction; } protected Transaction convertBurn(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.Burn burnMessage = com.binance.dex.api.proto.Burn.parseFrom(array); Burn burn = new Burn(); burn.setFrom(Crypto.encodeAddress(hrp, burnMessage.getFrom().toByteArray())); burn.setSymbol(burnMessage.getSymbol()); burn.setAmount(burnMessage.getAmount()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.BURN); transaction.setRealTx(burn); return transaction; } protected Transaction convertMint(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.Mint mintMessage = com.binance.dex.api.proto.Mint.parseFrom(array); Mint mint = new Mint(); mint.setFrom(Crypto.encodeAddress(hrp, mintMessage.getFrom().toByteArray())); mint.setSymbol(mintMessage.getSymbol()); mint.setAmount(mintMessage.getAmount()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.MINT); transaction.setRealTx(mint); return transaction; } protected Transaction convertSubmitProposal(byte[] value) throws InvalidProtocolBufferException { byte[] array = new byte[value.length - 4]; System.arraycopy(value, 4, array, 0, array.length); com.binance.dex.api.proto.SubmitProposal proposalMessage = com.binance.dex.api.proto.SubmitProposal.parseFrom(array); SubmitProposal proposal = new SubmitProposal(); proposal.setTitle(proposalMessage.getTitle()); proposal.setDescription(proposalMessage.getDescription()); proposal.setProposalType(ProposalType.fromValue(proposalMessage.getProposalType())); proposal.setProposer(Crypto.encodeAddress(hrp, proposalMessage.getProposer().toByteArray())); if (null != proposalMessage.getInitialDepositList()) { proposal.setInitDeposit(proposalMessage.getInitialDepositList().stream() .map(com.binance.dex.api.client.encoding.message.Token::of).collect(Collectors.toList())); } proposal.setVotingPeriod(proposalMessage.getVotingPeriod()); Transaction transaction = new Transaction(); transaction.setTxType(TxType.SUBMIT_PROPOSAL); transaction.setRealTx(proposal); return transaction; } >>>>>>>
<<<<<<< * Reinitialize the tokenizer with new reader. * ======= * Resets the instance and the instance's {@link ScanningSymbolMatcher}. * If necessary, users should have first called this instance's * {@link #setReader(java.io.Reader)} since the matcher will be * reset to the current reader. >>>>>>> * Resets the instance and the instance's {@link ScanningSymbolMatcher}. * If necessary, users should have first called this instance's * {@link #setReader(java.io.Reader)} since the matcher will be * reset to the current reader. <<<<<<< stack.clear(); this.yyreset(input); ======= matcher.yyreset(input); matcher.reset(); >>>>>>> matcher.yyreset(input); matcher.reset(); <<<<<<< ======= /** * Closes the instance and the instance's {@link ScanningSymbolMatcher}. * @throws IOException if any error occurs while closing */ >>>>>>> /** * Closes the instance and the instance's {@link ScanningSymbolMatcher}. * @throws IOException if any error occurs while closing */ <<<<<<< public void yypush(int newState) { this.stack.push(yystate()); this.yybegin(newState); } public void yypop() { this.yybegin(this.stack.pop()); } /** * reset current yy state, and clear stack * @param newState state id */ public void yyjump(int newState) { yybegin(newState); this.stack.clear(); } ======= >>>>>>>
<<<<<<< ======= info.webUis = EnumSet.noneOf(UiType.class); info.webUis.add(UiType.POLYGERRIT); if (gerritOptions.enableGwtUi()) { info.webUis.add(UiType.GWT); } info.primaryWeblinkName = config.getString("gerrit", null, "primaryWeblinkName"); >>>>>>> info.primaryWeblinkName = config.getString("gerrit", null, "primaryWeblinkName");
<<<<<<< import com.google.gerrit.extensions.registration.DynamicSet; import com.google.gerrit.index.Index; import com.google.gerrit.index.IndexDefinition; import com.google.gerrit.index.Schema; import com.google.gerrit.server.config.GerritServerConfig; ======= import com.google.gerrit.extensions.events.LifecycleListener; >>>>>>> import com.google.gerrit.extensions.registration.DynamicSet; import com.google.gerrit.index.Index; import com.google.gerrit.index.IndexDefinition; import com.google.gerrit.index.Schema; <<<<<<< super(sitePaths, listeners, defs, VersionManager.getOnlineUpgrade(cfg)); ======= super(cfg.getConfig(), sitePaths, defs); >>>>>>> super(sitePaths, listeners, defs, VersionManager.getOnlineUpgrade(cfg.getConfig()));
<<<<<<< /** * Bitset index: 1 */ @Nullable private List<CharSequence> title_List = (List<CharSequence>) null; ======= /** * Bitset index: 1 */ private StringAttributeData titleViaValueShortcut_StringAttributeData = new StringAttributeData(); >>>>>>> /** * Bitset index: 1 */ @Nullable private List<CharSequence> title_List = (List<CharSequence>) null; /** * Bitset index: 2 */ private StringAttributeData titleViaValueShortcut_StringAttributeData = new StringAttributeData(); <<<<<<< ======= if (!assignedAttributes_epoxyGeneratedModel.get(1)) { throw new IllegalStateException("A value is required for setTitleViaValueShortcut"); } if (!assignedAttributes_epoxyGeneratedModel.get(0)) { throw new IllegalStateException("A value is required for setTitle"); } >>>>>>> if (!assignedAttributes_epoxyGeneratedModel.get(2)) { throw new IllegalStateException("A value is required for setTitleViaValueShortcut"); } <<<<<<< if (assignedAttributes_epoxyGeneratedModel.get(0)) { object.setTitle(title_StringAttributeData.toString(object.getContext())); } else if (assignedAttributes_epoxyGeneratedModel.get(1)) { object.setTitle(title_List); } else { object.setTitle((List<CharSequence>) null); } ======= object.setTitleViaValueShortcut(titleViaValueShortcut_StringAttributeData.toString(object.getContext())); object.setTitle(title_StringAttributeData.toString(object.getContext())); >>>>>>> object.setTitleViaValueShortcut(titleViaValueShortcut_StringAttributeData.toString(object.getContext())); if (assignedAttributes_epoxyGeneratedModel.get(0)) { object.setTitle(title_StringAttributeData.toString(object.getContext())); } else if (assignedAttributes_epoxyGeneratedModel.get(1)) { object.setTitle(title_List); } else { object.setTitle((List<CharSequence>) null); } <<<<<<< if (assignedAttributes_epoxyGeneratedModel.equals(that.assignedAttributes_epoxyGeneratedModel)) { if (assignedAttributes_epoxyGeneratedModel.get(0)) { if (title_StringAttributeData != null ? !title_StringAttributeData.equals(that.title_StringAttributeData) : that.title_StringAttributeData != null) { object.setTitle(title_StringAttributeData.toString(object.getContext())); } } else if (assignedAttributes_epoxyGeneratedModel.get(1)) { if (title_List != null ? !title_List.equals(that.title_List) : that.title_List != null) { object.setTitle(title_List); } } } else { if (assignedAttributes_epoxyGeneratedModel.get(0) && !that.assignedAttributes_epoxyGeneratedModel.get(0)) { object.setTitle(title_StringAttributeData.toString(object.getContext())); } else if (assignedAttributes_epoxyGeneratedModel.get(1) && !that.assignedAttributes_epoxyGeneratedModel.get(1)) { object.setTitle(title_List); } else { object.setTitle((List<CharSequence>) null); } ======= if (!titleViaValueShortcut_StringAttributeData.equals(that.titleViaValueShortcut_StringAttributeData)) { object.setTitleViaValueShortcut(titleViaValueShortcut_StringAttributeData.toString(object.getContext())); } if (!title_StringAttributeData.equals(that.title_StringAttributeData)) { object.setTitle(title_StringAttributeData.toString(object.getContext())); >>>>>>> if (!titleViaValueShortcut_StringAttributeData.equals(that.titleViaValueShortcut_StringAttributeData)) { object.setTitleViaValueShortcut(titleViaValueShortcut_StringAttributeData.toString(object.getContext())); } if (assignedAttributes_epoxyGeneratedModel.equals(that.assignedAttributes_epoxyGeneratedModel)) { if (assignedAttributes_epoxyGeneratedModel.get(0)) { if (title_StringAttributeData != null ? !title_StringAttributeData.equals(that.title_StringAttributeData) : that.title_StringAttributeData != null) { object.setTitle(title_StringAttributeData.toString(object.getContext())); } } else if (assignedAttributes_epoxyGeneratedModel.get(1)) { if (title_List != null ? !title_List.equals(that.title_List) : that.title_List != null) { object.setTitle(title_List); } } } else { if (assignedAttributes_epoxyGeneratedModel.get(0) && !that.assignedAttributes_epoxyGeneratedModel.get(0)) { object.setTitle(title_StringAttributeData.toString(object.getContext())); } else if (assignedAttributes_epoxyGeneratedModel.get(1) && !that.assignedAttributes_epoxyGeneratedModel.get(1)) { object.setTitle(title_List); } else { object.setTitle((List<CharSequence>) null); } <<<<<<< /** * <i>Optional</i>: Default value is (List<CharSequence>) null * * @see TestStringOverloadsView#setTitle(List<CharSequence>) */ public TestStringOverloadsViewModel_ title(@Nullable List<CharSequence> title) { assignedAttributes_epoxyGeneratedModel.set(1); assignedAttributes_epoxyGeneratedModel.clear(0); this.title_StringAttributeData = new StringAttributeData(); onMutation(); this.title_List = title; return this; } @Nullable public List<CharSequence> titleList() { return title_List; } ======= public CharSequence getTitleViaValueShortcut(Context context) { return titleViaValueShortcut_StringAttributeData.toString(context); } /** * <i>Required.</i> * * @see TestStringOverloadsView#setTitleViaValueShortcut(CharSequence) */ public TestStringOverloadsViewModel_ titleViaValueShortcut(CharSequence titleViaValueShortcut) { onMutation(); assignedAttributes_epoxyGeneratedModel.set(1); if (titleViaValueShortcut == null) { throw new IllegalArgumentException("titleViaValueShortcut cannot be null"); } titleViaValueShortcut_StringAttributeData.setValue(titleViaValueShortcut); return this; } /** * Throws if a value <= 0 is set. * <p> * <i>Required.</i> * * @see TestStringOverloadsView#setTitleViaValueShortcut(CharSequence) */ public TestStringOverloadsViewModel_ titleViaValueShortcut(@StringRes int stringRes) { onMutation(); assignedAttributes_epoxyGeneratedModel.set(1); titleViaValueShortcut_StringAttributeData.setValue(stringRes); return this; } /** * Throws if a value <= 0 is set. * <p> * <i>Required.</i> * * @see TestStringOverloadsView#setTitleViaValueShortcut(CharSequence) */ public TestStringOverloadsViewModel_ titleViaValueShortcut(@StringRes int stringRes, Object... formatArgs) { onMutation(); assignedAttributes_epoxyGeneratedModel.set(1); titleViaValueShortcut_StringAttributeData.setValue(stringRes, formatArgs); return this; } /** * Throws if a value <= 0 is set. * <p> * <i>Required.</i> * * @see TestStringOverloadsView#setTitleViaValueShortcut(CharSequence) */ public TestStringOverloadsViewModel_ titleViaValueShortcutQuantityRes(@PluralsRes int pluralRes, int quantity, Object... formatArgs) { onMutation(); assignedAttributes_epoxyGeneratedModel.set(1); titleViaValueShortcut_StringAttributeData.setValue(pluralRes, quantity, formatArgs); return this; } >>>>>>> /** * <i>Optional</i>: Default value is (List<CharSequence>) null * * @see TestStringOverloadsView#setTitle(List<CharSequence>) */ public TestStringOverloadsViewModel_ title(@Nullable List<CharSequence> title) { assignedAttributes_epoxyGeneratedModel.set(1); assignedAttributes_epoxyGeneratedModel.clear(0); this.title_StringAttributeData = new StringAttributeData(); onMutation(); this.title_List = title; return this; } @Nullable public List<CharSequence> titleList() { return title_List; } public CharSequence getTitleViaValueShortcut(Context context) { return titleViaValueShortcut_StringAttributeData.toString(context); } /** * <i>Required.</i> * * @see TestStringOverloadsView#setTitleViaValueShortcut(CharSequence) */ public TestStringOverloadsViewModel_ titleViaValueShortcut(CharSequence titleViaValueShortcut) { onMutation(); assignedAttributes_epoxyGeneratedModel.set(2); if (titleViaValueShortcut == null) { throw new IllegalArgumentException("titleViaValueShortcut cannot be null"); } titleViaValueShortcut_StringAttributeData.setValue(titleViaValueShortcut); return this; } /** * Throws if a value <= 0 is set. * <p> * <i>Required.</i> * * @see TestStringOverloadsView#setTitleViaValueShortcut(CharSequence) */ public TestStringOverloadsViewModel_ titleViaValueShortcut(@StringRes int stringRes) { onMutation(); assignedAttributes_epoxyGeneratedModel.set(2); titleViaValueShortcut_StringAttributeData.setValue(stringRes); return this; } /** * Throws if a value <= 0 is set. * <p> * <i>Required.</i> * * @see TestStringOverloadsView#setTitleViaValueShortcut(CharSequence) */ public TestStringOverloadsViewModel_ titleViaValueShortcut(@StringRes int stringRes, Object... formatArgs) { onMutation(); assignedAttributes_epoxyGeneratedModel.set(2); titleViaValueShortcut_StringAttributeData.setValue(stringRes, formatArgs); return this; } /** * Throws if a value <= 0 is set. * <p> * <i>Required.</i> * * @see TestStringOverloadsView#setTitleViaValueShortcut(CharSequence) */ public TestStringOverloadsViewModel_ titleViaValueShortcutQuantityRes(@PluralsRes int pluralRes, int quantity, Object... formatArgs) { onMutation(); assignedAttributes_epoxyGeneratedModel.set(2); titleViaValueShortcut_StringAttributeData.setValue(pluralRes, quantity, formatArgs); return this; } <<<<<<< this.title_List = (List<CharSequence>) null; ======= this.titleViaValueShortcut_StringAttributeData = new StringAttributeData(); >>>>>>> this.title_List = (List<CharSequence>) null; this.titleViaValueShortcut_StringAttributeData = new StringAttributeData(); <<<<<<< if (title_List != null ? !title_List.equals(that.title_List) : that.title_List != null) { return false; } ======= if (titleViaValueShortcut_StringAttributeData != null ? !titleViaValueShortcut_StringAttributeData.equals(that.titleViaValueShortcut_StringAttributeData) : that.titleViaValueShortcut_StringAttributeData != null) { return false; } >>>>>>> if (title_List != null ? !title_List.equals(that.title_List) : that.title_List != null) { return false; } if (titleViaValueShortcut_StringAttributeData != null ? !titleViaValueShortcut_StringAttributeData.equals(that.titleViaValueShortcut_StringAttributeData) : that.titleViaValueShortcut_StringAttributeData != null) { return false; } <<<<<<< result = 31 * result + (title_List != null ? title_List.hashCode() : 0); ======= result = 31 * result + (titleViaValueShortcut_StringAttributeData != null ? titleViaValueShortcut_StringAttributeData.hashCode() : 0); >>>>>>> result = 31 * result + (title_List != null ? title_List.hashCode() : 0); result = 31 * result + (titleViaValueShortcut_StringAttributeData != null ? titleViaValueShortcut_StringAttributeData.hashCode() : 0); <<<<<<< ", title_List=" + title_List + ======= ", titleViaValueShortcut_StringAttributeData=" + titleViaValueShortcut_StringAttributeData + >>>>>>> ", title_List=" + title_List + ", titleViaValueShortcut_StringAttributeData=" + titleViaValueShortcut_StringAttributeData +
<<<<<<< import static java.lang.Math.*; import static rlib.geom.util.AngleUtils.degreeToRadians; import static rlib.geom.util.AngleUtils.radiansToDegree; ======= import static java.lang.Math.max; import static java.lang.Math.min; import static java.lang.Math.sqrt; import static com.ss.rlib.geom.util.AngleUtils.degreeToRadians; import static com.ss.rlib.geom.util.AngleUtils.radiansToDegree; >>>>>>> import static com.ss.rlib.geom.util.AngleUtils.degreeToRadians; import static com.ss.rlib.geom.util.AngleUtils.radiansToDegree; import static java.lang.Math.max; import static java.lang.Math.min; import static java.lang.Math.sqrt;
<<<<<<< import org.zkoss.zkex.zul.Borderlayout; import org.zkoss.zkex.zul.Center; import org.zkoss.zkex.zul.North; import org.zkoss.zkex.zul.South; import org.zkoss.zul.Comboitem; ======= import org.zkoss.zul.Borderlayout; import org.zkoss.zul.Center; import org.zkoss.zul.North; import org.zkoss.zul.South; >>>>>>> import org.zkoss.zul.Borderlayout; import org.zkoss.zul.Center; import org.zkoss.zul.North; import org.zkoss.zul.South; import org.zkoss.zul.Comboitem;
<<<<<<< // Get Model and check the Attribute Set Instance from the context MProduct m_product = MProduct.get(Env.getCtx(), M_Product_ID); mTab.setValue("M_AttributeSetInstance_ID", m_product.getEnvAttributeSetInstance(ctx, WindowNo)); ======= // Set Attribute from context or, if null, from the Product if (Env.getContextAsInt(ctx, WindowNo, Env.TAB_INFO, "M_Product_ID") == M_Product_ID.intValue() && Env.getContextAsInt(ctx, WindowNo, Env.TAB_INFO, "M_AttributeSetInstance_ID") != 0) mTab.setValue("M_AttributeSetInstance_ID", Env.getContextAsInt(ctx, WindowNo, Env.TAB_INFO, "M_AttributeSetInstance_ID")); else { mTab.setValue("M_AttributeSetInstance_ID", asi.getM_AttributeSetInstance_ID()); } >>>>>>> // Set Attribute from context or, if null, from the Product // Get Model and check the Attribute Set Instance from the context MProduct m_product = MProduct.get(Env.getCtx(), M_Product_ID); mTab.setValue("M_AttributeSetInstance_ID", m_product.getEnvAttributeSetInstance(ctx, WindowNo)); if (Env.getContextAsInt(ctx, WindowNo, Env.TAB_INFO, "M_Product_ID") == M_Product_ID.intValue() && Env.getContextAsInt(ctx, WindowNo, Env.TAB_INFO, "M_AttributeSetInstance_ID") != 0) mTab.setValue("M_AttributeSetInstance_ID", Env.getContextAsInt(ctx, WindowNo, Env.TAB_INFO, "M_AttributeSetInstance_ID")); else { mTab.setValue("M_AttributeSetInstance_ID", asi.getM_AttributeSetInstance_ID()); }
<<<<<<< ======= * Set BPartner, update price list and locations * @param C_BPartner_ID id */ public void setC_BPartner_ID (int C_BPartner_ID) { log.fine( "CPOS.setC_BPartner_ID=" + C_BPartner_ID); if (C_BPartner_ID == 0) m_BPartner = null; else { m_BPartner = MBPartner.get(m_ctx, C_BPartner_ID); if (m_BPartner!=null) { m_CurrentOrder.setC_BPartner_ID(C_BPartner_ID); int M_PriceList_ID = m_BPartner.getM_PriceList_ID(); // Raul Muñoz if(M_PriceList_ID == 0) m_CurrentOrder.setM_PriceList_ID(m_POS.getM_PriceList_ID()); else m_CurrentOrder.setM_PriceList_ID(M_PriceList_ID); MBPartnerLocation [] bpLocations = m_BPartner.getLocations(true); if(bpLocations.length>0) { for(MBPartnerLocation loc:bpLocations) { if(loc.isBillTo()) m_CurrentOrder.setBill_Location_ID(loc.getC_BPartner_Location_ID()); if(loc.isShipTo()) m_CurrentOrder.setShip_Location_ID(loc.getC_BPartner_Location_ID()); } } } } // Sets Currency m_M_PriceList_Version_ID = 0; getM_PriceList_Version_ID(); } // setC_BPartner_ID /** >>>>>>>
<<<<<<< import com.google.gerrit.index.Index; import com.google.gerrit.index.Schema; import com.google.gerrit.index.Schema.Values; ======= import com.google.common.io.CharStreams; import com.google.gerrit.elasticsearch.builders.SearchSourceBuilder; import com.google.gerrit.elasticsearch.builders.XContentBuilder; >>>>>>> import com.google.common.io.CharStreams; import com.google.gerrit.elasticsearch.builders.SearchSourceBuilder; import com.google.gerrit.elasticsearch.builders.XContentBuilder; import com.google.gerrit.index.Index; import com.google.gerrit.index.Schema; import com.google.gerrit.index.Schema.Values; <<<<<<< private String toDoc(V v) throws IOException { try (XContentBuilder builder = jsonBuilder().startObject()) { for (Values<V> values : schema.buildFields(v)) { ======= protected String toDoc(V v) throws IOException { try (XContentBuilder closeable = new XContentBuilder()) { XContentBuilder builder = closeable.startObject(); for (Values<V> values : schema.buildFields(v, fillArgs)) { >>>>>>> protected String toDoc(V v) throws IOException { try (XContentBuilder closeable = new XContentBuilder()) { XContentBuilder builder = closeable.startObject(); for (Values<V> values : schema.buildFields(v)) {
<<<<<<< import com.google.gerrit.entities.Account; import com.google.gerrit.entities.BranchNameKey; import com.google.gerrit.entities.Change; import com.google.gerrit.entities.Patch; import com.google.gerrit.entities.PatchSet; import com.google.gerrit.entities.Project; import com.google.gerrit.entities.RefNames; ======= import com.google.gerrit.common.data.Permission; import com.google.gerrit.common.data.PermissionRule; >>>>>>> import com.google.gerrit.common.data.Permission; import com.google.gerrit.common.data.PermissionRule; import com.google.gerrit.entities.Account; import com.google.gerrit.entities.AccountGroup; import com.google.gerrit.entities.BranchNameKey; import com.google.gerrit.entities.Change; import com.google.gerrit.entities.Patch; import com.google.gerrit.entities.PatchSet; import com.google.gerrit.entities.Project; import com.google.gerrit.entities.RefNames; <<<<<<< ======= import com.google.gerrit.reviewdb.client.Account; import com.google.gerrit.reviewdb.client.AccountGroup; import com.google.gerrit.reviewdb.client.Branch; import com.google.gerrit.reviewdb.client.Change; import com.google.gerrit.reviewdb.client.Patch; import com.google.gerrit.reviewdb.client.PatchSet; import com.google.gerrit.reviewdb.client.Project; import com.google.gerrit.reviewdb.client.RefNames; >>>>>>>
<<<<<<< import org.adempiere.webui.AdempiereIdGenerator; import org.adempiere.webui.component.CWindowToolbar; import org.adempiere.webui.component.Bandbox; ======= import org.adempiere.exceptions.ValueChangeEvent; import org.adempiere.exceptions.ValueChangeListener; import org.adempiere.webui.component.*; >>>>>>> import org.adempiere.webui.AdempiereIdGenerator; import org.adempiere.webui.component.CWindowToolbar; import org.adempiere.webui.component.Bandbox; import org.adempiere.exceptions.ValueChangeEvent; import org.adempiere.exceptions.ValueChangeListener; <<<<<<< import org.adempiere.webui.component.StringBox; import org.adempiere.webui.event.ValueChangeEvent; import org.adempiere.webui.event.ValueChangeListener; ======= >>>>>>> import org.adempiere.webui.component.StringBox; <<<<<<< public void updateLabelStyle() { if (getLabel() != null) { String style = (isZoomable() ? STYLE_ZOOMABLE_LABEL : "") + (isMandatoryStyle() ? STYLE_EMPTY_MANDATORY_LABEL : STYLE_NORMAL_LABEL); getLabel().setStyle(style.intern()); } } public boolean isMandatoryStyle() { return mandatory && !readOnly && getGridField().isEditable(true) && isNullOrEmpty(); } public boolean isNullOrEmpty() { Object value = getValue(); return value == null || value.toString().trim().length() == 0; } public boolean isZoomable() { WEditorPopupMenu menu = getPopupMenu(); if (menu != null && menu.isZoomEnabled() && this instanceof IZoomableEditor) { return true; } else { return false; } } public int getRowIndex() { return rowIndex; } public void setRowIndex(int rowIndex) { this.rowIndex = rowIndex; } ======= public void addVetoableChangeListener(VetoableChangeListener listener) { // Not used in ZK } >>>>>>> public void updateLabelStyle() { if (getLabel() != null) { String style = (isZoomable() ? STYLE_ZOOMABLE_LABEL : "") + (isMandatoryStyle() ? STYLE_EMPTY_MANDATORY_LABEL : STYLE_NORMAL_LABEL); getLabel().setStyle(style.intern()); } } public boolean isMandatoryStyle() { return mandatory && !readOnly && getGridField().isEditable(true) && isNullOrEmpty(); } public boolean isNullOrEmpty() { Object value = getValue(); return value == null || value.toString().trim().length() == 0; } public boolean isZoomable() { WEditorPopupMenu menu = getPopupMenu(); if (menu != null && menu.isZoomEnabled() && this instanceof IZoomableEditor) { return true; } else { return false; } } public int getRowIndex() { return rowIndex; } public void setRowIndex(int rowIndex) { this.rowIndex = rowIndex; } public void addVetoableChangeListener(VetoableChangeListener listener) { // Not used in ZK }
<<<<<<< import org.compiere.model.MClient; ======= import org.compiere.model.GridFieldVO; import org.compiere.model.MLookup; >>>>>>> <<<<<<< * @author Michael Mckay [email protected] * <li>BF [ <a href="https://github.com/adempiere/adempiere/issues/495">495</a> ] Parameter Panel & SmartBrowser criteria do not set gridField value ======= * <a href="https://github.com/adempiere/adempiere/issues/566"> * @see FR [ 566 ] Process parameter don't have a parameter like only information</a> >>>>>>> * <a href="https://github.com/adempiere/adempiere/issues/566"> * @see FR [ 566 ] Process parameter don't have a parameter like only information</a> * @author Michael Mckay [email protected] * <li>BF [ <a href="https://github.com/adempiere/adempiere/issues/495">495</a> ] Parameter Panel & SmartBrowser criteria do not set gridField value <<<<<<< ======= fieldsInfoOnly = new HashMap<Integer, Boolean>(); fields = new ArrayList<GridField>(); fields_To = new ArrayList<GridField>(); >>>>>>> <<<<<<< /** Logger */ ======= /** Information Only */ private HashMap<Integer, Boolean> fieldsInfoOnly; /** Logger */ >>>>>>> /** Logger */ <<<<<<< ======= // private ArrayList<GridField> fields; private ArrayList<GridField> fields_To; >>>>>>> <<<<<<< ======= * Get Display from view To * @param index * @return */ public abstract String getDisplay_To(int index); /** * Get Value of Editor * @param index * @return */ public abstract Object getValue(int index); /** * Get Value of Editor To * @param index * @return */ public abstract Object getValue_To(int index); /** * Set value to Editor * @param index * @param value */ public abstract void setValue(int index, Object value); /** * Set to values to Editor * @param index * @param value */ public abstract void setValue_To(int index, Object value); /** * Is Information only * @param processParaID * @return */ public boolean isInfoOnly(int processParaID) { Boolean isInfoOnly = fieldsInfoOnly.get(processParaID); return isInfoOnly != null && isInfoOnly; } /** >>>>>>> <<<<<<< ======= * Dispose */ public void dispose() { fields.clear(); fields_To.clear(); } // dispose /** >>>>>>> <<<<<<< public boolean loadData() { log.config(""); // ASP MClient client = MClient.get(Env.getCtx()); String ASPFilter = ""; if (client.isUseASP()) ASPFilter = " AND ( p.AD_Process_Para_ID IN ( " // Just ASP subscribed process parameters for client " + " SELECT pp.AD_Process_Para_ID " + " FROM ASP_Process_Para pp, ASP_Process p, ASP_Level l, ASP_ClientLevel cl " + " WHERE p.ASP_Level_ID = l.ASP_Level_ID " + " AND cl.AD_Client_ID = " + client.getAD_Client_ID() + " AND cl.ASP_Level_ID = l.ASP_Level_ID " + " AND pp.ASP_Process_ID = p.ASP_Process_ID " + " AND pp.IsActive = 'Y' " + " AND p.IsActive = 'Y' " + " AND l.IsActive = 'Y' " + " AND cl.IsActive = 'Y' " + " AND pp.ASP_Status = 'S') " // Show + " OR p.AD_Process_Para_ID IN ( " // + show ASP exceptions for client + " SELECT AD_Process_Para_ID " + " FROM ASP_ClientException ce " + " WHERE ce.AD_Client_ID = " + client.getAD_Client_ID() + " AND ce.IsActive = 'Y' " + " AND ce.AD_Process_Para_ID IS NOT NULL " + " AND ce.AD_Tab_ID IS NULL " + " AND ce.AD_Field_ID IS NULL " + " AND ce.ASP_Status = 'S') " // Show + " ) " + " AND p.AD_Process_Para_ID NOT IN ( " // minus hide ASP exceptions for client + " SELECT AD_Process_Para_ID " + " FROM ASP_ClientException ce " + " WHERE ce.AD_Client_ID = " + client.getAD_Client_ID() + " AND ce.IsActive = 'Y' " + " AND ce.AD_Process_Para_ID IS NOT NULL " + " AND ce.AD_Tab_ID IS NULL " + " AND ce.AD_Field_ID IS NULL " + " AND ce.ASP_Status = 'H')"; // Hide // String sql = null; if (Env.isBaseLanguage(Env.getCtx(), "AD_Process_Para")) sql = "SELECT p.Name, p.Description, p.Help, " + "p.AD_Reference_ID, p.AD_Process_Para_ID, " + "p.FieldLength, p.IsMandatory, p.IsRange, p.ColumnName, " + "p.DefaultValue, p.DefaultValue2, p.VFormat, p.ValueMin, p.ValueMax, " + "p.SeqNo, p.AD_Reference_Value_ID, vr.Code AS ValidationCode, p.ReadOnlyLogic, p.DisplayLogic " + "FROM AD_Process_Para p" + " LEFT OUTER JOIN AD_Val_Rule vr ON (p.AD_Val_Rule_ID=vr.AD_Val_Rule_ID) " + "WHERE p.AD_Process_ID=?" // 1 + " AND p.IsActive='Y' " + ASPFilter + " ORDER BY SeqNo"; else sql = "SELECT t.Name, t.Description, t.Help, " + "p.AD_Reference_ID, p.AD_Process_Para_ID, " + "p.FieldLength, p.IsMandatory, p.IsRange, p.ColumnName, " + "p.DefaultValue, p.DefaultValue2, p.VFormat, p.ValueMin, p.ValueMax, " + "p.SeqNo, p.AD_Reference_Value_ID, vr.Code AS ValidationCode, p.ReadOnlyLogic, p.DisplayLogic " + "FROM AD_Process_Para p" + " INNER JOIN AD_Process_Para_Trl t ON (p.AD_Process_Para_ID=t.AD_Process_Para_ID)" + " LEFT OUTER JOIN AD_Val_Rule vr ON (p.AD_Val_Rule_ID=vr.AD_Val_Rule_ID) " + "WHERE p.AD_Process_ID=?" // 1 + " AND t.AD_Language='" + Env.getAD_Language(Env.getCtx()) + "'" + " AND p.IsActive='Y' " + ASPFilter + " ORDER BY SeqNo"; ======= private boolean loadData() { log.config(""); >>>>>>> public boolean loadData() { log.config(""); <<<<<<< PreparedStatement pstmt = null; ResultSet rs = null; try { pstmt = DB.prepareStatement(sql, null); pstmt.setInt(1, m_processInfo.getAD_Process_ID()); rs = pstmt.executeQuery(); while (rs.next()) { hasFields = true; createField (rs, m_WindowNo); } } catch(SQLException e) { throw new DBException(e, sql); } finally { DB.close(rs, pstmt); rs = null; pstmt = null; ======= MProcess process = MProcess.get(Env.getCtx(), m_processInfo.getAD_Process_ID()); // Load Parameter for(MProcessPara para : process.getASPParameters()) { hasFields = true; createField(para); >>>>>>> MProcess process = MProcess.get(Env.getCtx(), m_processInfo.getAD_Process_ID()); // Load Parameter for(MProcessPara para : process.getASPParameters()) { hasFields = true; createField(para, m_WindowNo); <<<<<<< public boolean createFieldsAndEditors() { // Create fields and editors and sync them return super.init(); ======= public boolean init() { // Instance Default view initComponents(); // clean up if (hasParameters()) { for (int i = 0; i < fields.size(); i++) { // Get Values GridField field = (GridField) fields.get(i); GridField field_To = (GridField) fields_To.get(i); // Create Fields createViewField(field, field_To); } // dynamicDisplay(); return true; } // dispose(); // Default return false; } /** * Create Field. * - creates Fields and adds it to m_mFields list * - creates Editor and adds it to m_vEditors list * Handles Ranges by adding additional mField/vEditor. * <p> * mFields are used for default value and mandatory checking; * vEditors are used to retrieve the value (no data binding) * * @param rs result set */ private void createField (MProcessPara processParameter) { // Create Field GridFieldVO voF = GridFieldVO.createParameter(Env.getCtx(), m_WindowNo, processParameter); GridField field = new GridField (voF); GridField field_To = null; fields.add(field); // add to Fields // if (voF.IsRange) { // GridFieldVO voF2 = GridFieldVO.createParameter(voF); // BR [ 298 ] voF2.DefaultValue = voF2.DefaultValue2; // Change Name voF2.ColumnName = voF2.ColumnName + "_To"; // field_To = new GridField (voF2); // fields_To.add (field_To); } else { fields_To.add (null); } } // createField /** * When exists a event in a field * @param field * @param newValue * @param propertyName */ public void fieldChange(GridField field, Object newValue, String propertyName) { if(field != null) { // Process dependences processDependencies (field); } // future processCallout (changedField); processNewValue(newValue, propertyName); } // vetoableChange /** * Evaluate Dependencies * @param changedField changed field */ private void processDependencies (GridField changedField) { String columnName = changedField.getColumnName(); for (GridField field : fields) { if (field == null || field == changedField) continue; verifyChangedField(field, columnName); } for (GridField field : fields_To) { if (field == null || field == changedField) continue; verifyChangedField(field, columnName); } } // processDependencies /** * Verify if a field is changed * @param field * @param columnName */ private void verifyChangedField(GridField field, String columnName) { ArrayList<String> list = field.getDependentOn(); if (list.contains(columnName)) { if (field.getLookup() instanceof MLookup) { MLookup mLookup = (MLookup)field.getLookup(); // if the lookup is dynamic (i.e. contains this columnName as variable) if (mLookup.getValidation().indexOf("@"+columnName+"@") != -1) { log.fine(columnName + " changed - " + field.getColumnName() + " set to null"); // invalidate current selection mLookup.refresh(); } } } >>>>>>> public boolean createFieldsAndEditors() { // Create fields and editors and sync them return super.init(); <<<<<<< String msg = validateFields(); // Valid if there is no message if (msg != null && msg.length() > 0) { m_processInfo.setSummary(msg); ======= /** * Mandatory fields * see - MTable.getMandatory */ StringBuffer sb = new StringBuffer(); int size = fields.size(); for (int i = 0; i < size; i++) { GridField field = (GridField) fields.get(i); // FR [ 566 ] Only Information if(field.isInfoOnly()) continue; // Validate field.validateValue(); // check context if (field.isMandatory(true)) { Object data = getValue(i); if (data == null || data.toString().length() == 0) { field.setInserting (true); // set editable (i.e. updateable) otherwise deadlock field.setError(true); if (sb.length() > 0) sb.append(", "); sb.append(field.getHeader()); } else { field.setError(false); } // Check for Range GridField field_To = (GridField) fields_To.get(i); // Validate if (field_To != null) { Object data_To = getValue_To(i); if (data_To == null || data_To.toString().length() == 0) { field.setInserting (true); // set editable (i.e. updateable) otherwise deadlock field_To.setError(true); if (sb.length() > 0) sb.append(", "); sb.append(field.getHeader()); } else { field_To.setError(false); } } // range field } // mandatory } // field loop // Valid if exists a error if (sb.length() != 0) { m_processInfo.setSummary(sb.toString()); >>>>>>> String msg = validateFields(); // Valid if there is no message if (msg != null && msg.length() > 0) { m_processInfo.setSummary(msg); <<<<<<< if (fieldTo != null) { ======= if (field_To != null) { >>>>>>> if (fieldTo != null) { <<<<<<< if (fieldTo != null && result2 != null) ======= if (field_To != null && result2 != null) >>>>>>> if (fieldTo != null && result2 != null) <<<<<<< } if (fieldTo != null && result2 != null) { ======= } if (field_To != null && result2 != null) { >>>>>>> } if (fieldTo != null && result2 != null) { <<<<<<< if (fieldTo != null && result2 != null) ======= if (field_To != null && result2 != null) >>>>>>> if (fieldTo != null && result2 != null) <<<<<<< if (fieldTo != null && result2 != null) ======= if (field_To != null && result2 != null) >>>>>>> if (fieldTo != null && result2 != null) <<<<<<< if (fieldTo != null) ======= if (field_To != null) >>>>>>> if (fieldTo != null) <<<<<<< if (fieldTo != null) ======= if (field_To != null) >>>>>>> if (fieldTo != null) <<<<<<< if (fieldTo != null) ======= if (field_To != null) >>>>>>> if (fieldTo != null) <<<<<<< if (fieldTo != null) ======= if (field_To != null) >>>>>>> if (fieldTo != null) <<<<<<< if (fieldTo != null) ======= if (field_To != null) >>>>>>> if (fieldTo != null) <<<<<<< ======= /** * Restore window context. * @author teo_sarca [ 1699826 ] * @see org.compiere.model.GridField#restoreValue() */ public void restoreContext() { for (int i = 0; i < fields.size(); i++) { // Get Values GridField mField = (GridField) fields.get(i); GridField mField_To = (GridField) fields_To.get(i); // Restore if (mField != null) mField.restoreValue(); // Restore To Value if (mField_To != null) mField_To.restoreValue(); } } /** * getIndex. Get m_mFields index from columnName * * @param columnName * @return int **/ public int getIndex(String columnName) { for (int i = 0; i < fields.size(); i++) { if (fields.get(i).getColumnName().equals(columnName)) { return i; } } return 0; } // getIndex /** * getIndex. Get m_mFields index from columnName_To * * @param columnName * @return int **/ public int getIndex_To(String columnName) { for (int i = 0; i < fields_To.size(); i++) { if (fields_To.get(i).getColumnName().equals(columnName)) { return i; } } return 0; } // getIndex /** * Get Grid Field * @param index * @return */ public GridField getField(int index) { return fields.get(index); } /** * Get Grid Field To * @param index * @return */ public GridField getField_To(int index) { return fields.get(index); } >>>>>>>
<<<<<<< void addObserver(String[] params) throws Exception; void removeNode(String[] params) throws Exception; void addAuthority(String[] params) throws Exception; void removeAuthority(String[] params) throws Exception; void queryAuthority(String[] params) throws Exception; ======= void addPbft(String[] params) throws Exception; void setSystemConfigByKey(String[] params) throws Exception; >>>>>>> void addObserver(String[] params) throws Exception; void removeNode(String[] params) throws Exception; void addAuthority(String[] params) throws Exception; void removeAuthority(String[] params) throws Exception; void queryAuthority(String[] params) throws Exception; void setSystemConfigByKey(String[] params) throws Exception;
<<<<<<< import org.fisco.bcos.web3j.protocol.core.methods.response.BcosBlock; import org.fisco.bcos.web3j.protocol.core.methods.response.BcosFilter; import org.fisco.bcos.web3j.protocol.core.methods.response.BcosLog; import org.fisco.bcos.web3j.protocol.core.methods.response.BcosSubscribe; import org.fisco.bcos.web3j.protocol.core.methods.response.BcosTransaction; import org.fisco.bcos.web3j.protocol.core.methods.response.BcosTransactionReceipt; import org.fisco.bcos.web3j.protocol.core.methods.response.BlockHash; import org.fisco.bcos.web3j.protocol.core.methods.response.BlockNumber; import org.fisco.bcos.web3j.protocol.core.methods.response.Call; import org.fisco.bcos.web3j.protocol.core.methods.response.Code; import org.fisco.bcos.web3j.protocol.core.methods.response.ConsensusStatus; import org.fisco.bcos.web3j.protocol.core.methods.response.GenerateGroup; import org.fisco.bcos.web3j.protocol.core.methods.response.GroupList; import org.fisco.bcos.web3j.protocol.core.methods.response.GroupPeers; import org.fisco.bcos.web3j.protocol.core.methods.response.Log; import org.fisco.bcos.web3j.protocol.core.methods.response.NodeIDList; import org.fisco.bcos.web3j.protocol.core.methods.response.NodeVersion; import org.fisco.bcos.web3j.protocol.core.methods.response.ObserverList; import org.fisco.bcos.web3j.protocol.core.methods.response.PbftView; import org.fisco.bcos.web3j.protocol.core.methods.response.Peers; import org.fisco.bcos.web3j.protocol.core.methods.response.PendingTransactions; import org.fisco.bcos.web3j.protocol.core.methods.response.PendingTxSize; import org.fisco.bcos.web3j.protocol.core.methods.response.SealerList; import org.fisco.bcos.web3j.protocol.core.methods.response.SendTransaction; import org.fisco.bcos.web3j.protocol.core.methods.response.StartGroup; import org.fisco.bcos.web3j.protocol.core.methods.response.SyncStatus; import org.fisco.bcos.web3j.protocol.core.methods.response.SystemConfig; import org.fisco.bcos.web3j.protocol.core.methods.response.TotalTransactionCount; import org.fisco.bcos.web3j.protocol.core.methods.response.Transaction; import org.fisco.bcos.web3j.protocol.core.methods.response.TransactionReceiptWithProof; import org.fisco.bcos.web3j.protocol.core.methods.response.TransactionWithProof; import org.fisco.bcos.web3j.protocol.core.methods.response.UninstallFilter; ======= import org.fisco.bcos.web3j.protocol.core.methods.request.GenerateGroupParams; import org.fisco.bcos.web3j.protocol.core.methods.response.BcosBlock; import org.fisco.bcos.web3j.protocol.core.methods.response.BcosFilter; import org.fisco.bcos.web3j.protocol.core.methods.response.BcosLog; import org.fisco.bcos.web3j.protocol.core.methods.response.BcosSubscribe; import org.fisco.bcos.web3j.protocol.core.methods.response.BcosTransaction; import org.fisco.bcos.web3j.protocol.core.methods.response.BcosTransactionReceipt; import org.fisco.bcos.web3j.protocol.core.methods.response.BlockHash; import org.fisco.bcos.web3j.protocol.core.methods.response.BlockNumber; import org.fisco.bcos.web3j.protocol.core.methods.response.Call; import org.fisco.bcos.web3j.protocol.core.methods.response.Code; import org.fisco.bcos.web3j.protocol.core.methods.response.ConsensusStatus; import org.fisco.bcos.web3j.protocol.core.methods.response.GenerateGroup; import org.fisco.bcos.web3j.protocol.core.methods.response.GroupList; import org.fisco.bcos.web3j.protocol.core.methods.response.GroupPeers; import org.fisco.bcos.web3j.protocol.core.methods.response.Log; import org.fisco.bcos.web3j.protocol.core.methods.response.NodeIDList; import org.fisco.bcos.web3j.protocol.core.methods.response.NodeVersion; import org.fisco.bcos.web3j.protocol.core.methods.response.ObserverList; import org.fisco.bcos.web3j.protocol.core.methods.response.PbftView; import org.fisco.bcos.web3j.protocol.core.methods.response.Peers; import org.fisco.bcos.web3j.protocol.core.methods.response.PendingTransactions; import org.fisco.bcos.web3j.protocol.core.methods.response.PendingTxSize; import org.fisco.bcos.web3j.protocol.core.methods.response.QueryGroupStatus; import org.fisco.bcos.web3j.protocol.core.methods.response.RecoverGroup; import org.fisco.bcos.web3j.protocol.core.methods.response.RemoveGroup; import org.fisco.bcos.web3j.protocol.core.methods.response.SealerList; import org.fisco.bcos.web3j.protocol.core.methods.response.SendTransaction; import org.fisco.bcos.web3j.protocol.core.methods.response.StartGroup; import org.fisco.bcos.web3j.protocol.core.methods.response.StopGroup; import org.fisco.bcos.web3j.protocol.core.methods.response.SyncStatus; import org.fisco.bcos.web3j.protocol.core.methods.response.SystemConfig; import org.fisco.bcos.web3j.protocol.core.methods.response.TotalTransactionCount; import org.fisco.bcos.web3j.protocol.core.methods.response.TransactionReceiptWithProof; import org.fisco.bcos.web3j.protocol.core.methods.response.TransactionWithProof; import org.fisco.bcos.web3j.protocol.core.methods.response.UninstallFilter; import org.fisco.bcos.web3j.protocol.rx.JsonRpc2_0Rx; >>>>>>> import org.fisco.bcos.web3j.protocol.core.methods.request.GenerateGroupParams; import org.fisco.bcos.web3j.protocol.core.methods.response.BcosBlock; import org.fisco.bcos.web3j.protocol.core.methods.response.BcosFilter; import org.fisco.bcos.web3j.protocol.core.methods.response.BcosLog; import org.fisco.bcos.web3j.protocol.core.methods.response.BcosSubscribe; import org.fisco.bcos.web3j.protocol.core.methods.response.BcosTransaction; import org.fisco.bcos.web3j.protocol.core.methods.response.BcosTransactionReceipt; import org.fisco.bcos.web3j.protocol.core.methods.response.BlockHash; import org.fisco.bcos.web3j.protocol.core.methods.response.BlockNumber; import org.fisco.bcos.web3j.protocol.core.methods.response.Call; import org.fisco.bcos.web3j.protocol.core.methods.response.Code; import org.fisco.bcos.web3j.protocol.core.methods.response.ConsensusStatus; import org.fisco.bcos.web3j.protocol.core.methods.response.GenerateGroup; import org.fisco.bcos.web3j.protocol.core.methods.response.GroupList; import org.fisco.bcos.web3j.protocol.core.methods.response.GroupPeers; import org.fisco.bcos.web3j.protocol.core.methods.response.Log; import org.fisco.bcos.web3j.protocol.core.methods.response.NodeIDList; import org.fisco.bcos.web3j.protocol.core.methods.response.NodeVersion; import org.fisco.bcos.web3j.protocol.core.methods.response.ObserverList; import org.fisco.bcos.web3j.protocol.core.methods.response.PbftView; import org.fisco.bcos.web3j.protocol.core.methods.response.Peers; import org.fisco.bcos.web3j.protocol.core.methods.response.PendingTransactions; import org.fisco.bcos.web3j.protocol.core.methods.response.PendingTxSize; import org.fisco.bcos.web3j.protocol.core.methods.response.QueryGroupStatus; import org.fisco.bcos.web3j.protocol.core.methods.response.RecoverGroup; import org.fisco.bcos.web3j.protocol.core.methods.response.RemoveGroup; import org.fisco.bcos.web3j.protocol.core.methods.response.SealerList; import org.fisco.bcos.web3j.protocol.core.methods.response.SendTransaction; import org.fisco.bcos.web3j.protocol.core.methods.response.StartGroup; import org.fisco.bcos.web3j.protocol.core.methods.response.StopGroup; import org.fisco.bcos.web3j.protocol.core.methods.response.SyncStatus; import org.fisco.bcos.web3j.protocol.core.methods.response.SystemConfig; import org.fisco.bcos.web3j.protocol.core.methods.response.TotalTransactionCount; import org.fisco.bcos.web3j.protocol.core.methods.response.Transaction; import org.fisco.bcos.web3j.protocol.core.methods.response.TransactionReceiptWithProof; import org.fisco.bcos.web3j.protocol.core.methods.response.TransactionWithProof; import org.fisco.bcos.web3j.protocol.core.methods.response.UninstallFilter; <<<<<<< public void sendRawTransaction(String signedTransactionData, TransactionSucCallback callback) throws IOException { Request<?, SendTransaction> request = sendRawTransaction(signedTransactionData); request.setNeedTransCallback(true); request.setTransactionSucCallback(callback); request.sendOnly(); } @Override ======= public void sendRawTransaction(String signedTransactionData, TransactionSucCallback callback) throws IOException { Request<?, SendTransaction> request = sendRawTransaction(signedTransactionData); request.setNeedTransCallback(true); request.setTransactionSucCallback(callback); request.sendOnly(); } @Override public Request<?, SendTransaction> sendRawTransactionAndGetProof(String signedTransactionData) { return new Request<>( "sendRawTransactionAndGetProof", Arrays.asList(groupId, signedTransactionData), web3jService, SendTransaction.class); } @Override public void sendRawTransactionAndGetProof( String signedTransactionData, TransactionSucCallback callback) throws IOException { Request<?, SendTransaction> request = sendRawTransactionAndGetProof(signedTransactionData); request.setNeedTransCallback(true); request.setTransactionSucCallback(callback); request.sendOnly(); } @Override >>>>>>> public void sendRawTransaction(String signedTransactionData, TransactionSucCallback callback) throws IOException { Request<?, SendTransaction> request = sendRawTransaction(signedTransactionData); request.setNeedTransCallback(true); request.setTransactionSucCallback(callback); request.sendOnly(); } @Override public Request<?, SendTransaction> sendRawTransactionAndGetProof(String signedTransactionData) { return new Request<>( "sendRawTransactionAndGetProof", Arrays.asList(groupId, signedTransactionData), web3jService, SendTransaction.class); } @Override public void sendRawTransactionAndGetProof( String signedTransactionData, TransactionSucCallback callback) throws IOException { Request<?, SendTransaction> request = sendRawTransactionAndGetProof(signedTransactionData); request.setNeedTransCallback(true); request.setTransactionSucCallback(callback); request.sendOnly(); } @Override <<<<<<< public Request<?, GenerateGroup> generateGroup( int groupID, int timestamp, List<String> nodeList) { return new Request<>( "generateGroup", Arrays.asList(groupID, timestamp, nodeList), web3jService, GenerateGroup.class); } @Override public Request<?, StartGroup> startGroup(int groupID) { return new Request<>("startGroup", Arrays.asList(groupID), web3jService, StartGroup.class); } @Override ======= public Request<?, GenerateGroup> generateGroup( int groupID, long timestamp, boolean enableFreeStorage, List<String> nodeList) { return new Request<>( "generateGroup", Arrays.asList( groupID, new GenerateGroupParams( String.valueOf(timestamp), enableFreeStorage, nodeList)), web3jService, GenerateGroup.class); } @Override public Request<?, StartGroup> startGroup(int groupID) { return new Request<>("startGroup", Arrays.asList(groupID), web3jService, StartGroup.class); } @Override public Request<?, StopGroup> stopGroup(int groupID) { return new Request<>("stopGroup", Arrays.asList(groupID), web3jService, StopGroup.class); } @Override public Request<?, RemoveGroup> removeGroup(int groupID) { return new Request<>( "removeGroup", Arrays.asList(groupID), web3jService, RemoveGroup.class); } @Override public Request<?, RecoverGroup> recoverGroup(int groupID) { return new Request<>( "recoverGroup", Arrays.asList(groupID), web3jService, RecoverGroup.class); } @Override public Request<?, QueryGroupStatus> queryGroupStatus(int groupID) { return new Request<>( "queryGroupStatus", Arrays.asList(groupID), web3jService, QueryGroupStatus.class); } @Override >>>>>>> public Request<?, GenerateGroup> generateGroup( int groupID, long timestamp, boolean enableFreeStorage, List<String> nodeList) { return new Request<>( "generateGroup", Arrays.asList( groupID, new GenerateGroupParams( String.valueOf(timestamp), enableFreeStorage, nodeList)), web3jService, GenerateGroup.class); } @Override public Request<?, StartGroup> startGroup(int groupID) { return new Request<>("startGroup", Arrays.asList(groupID), web3jService, StartGroup.class); } @Override public Request<?, StopGroup> stopGroup(int groupID) { return new Request<>("stopGroup", Arrays.asList(groupID), web3jService, StopGroup.class); } @Override public Request<?, RemoveGroup> removeGroup(int groupID) { return new Request<>( "removeGroup", Arrays.asList(groupID), web3jService, RemoveGroup.class); } @Override public Request<?, RecoverGroup> recoverGroup(int groupID) { return new Request<>( "recoverGroup", Arrays.asList(groupID), web3jService, RecoverGroup.class); } @Override public Request<?, QueryGroupStatus> queryGroupStatus(int groupID) { return new Request<>( "queryGroupStatus", Arrays.asList(groupID), web3jService, QueryGroupStatus.class); } @Override
<<<<<<< GroupMapping(Schema<InternalGroup> schema) { this.groups = ElasticMapping.createMapping(schema); ======= public GroupMapping(Schema<AccountGroup> schema, ElasticQueryAdapter adapter) { this.groups = ElasticMapping.createMapping(schema, adapter); >>>>>>> GroupMapping(Schema<InternalGroup> schema, ElasticQueryAdapter adapter) { this.groups = ElasticMapping.createMapping(schema, adapter);
<<<<<<< ======= import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.math.BigInteger; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Random; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.*; >>>>>>> <<<<<<< import java.io.*; import java.math.BigInteger; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Random; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; ======= >>>>>>> import java.io.*; import java.math.BigInteger; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Random; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; <<<<<<< String percent = "0.00%"; System.out.print(dateFormat.format(new Date()) + " Querying account state..." + percent); ======= RateLimiter limiter = RateLimiter.create(qps.intValue()); String percent = 0 + "%"; System.out.print( dateFormat.format(new Date()) + " Querying account state..." + percent); >>>>>>> String percent = "0.00%"; System.out.print(dateFormat.format(new Date()) + " Querying account state..." + percent); <<<<<<< percent = "0.00%"; System.out.print(dateFormat.format(new Date()) + " Creating signed transactions..." + percent); for (int i = 0; i < segmentCount; ++i) { ======= percent = 0 + "%"; System.out.print( dateFormat.format(new Date()) + " Creating signed transactions..." + percent); for (int i = 0; i < segmentCount; ++i) { >>>>>>> percent = "0.00%"; System.out.print(dateFormat.format(new Date()) + " Creating signed transactions..." + percent); for (int i = 0; i < segmentCount; ++i) { <<<<<<< new Runnable() { @Override public void run() { DagTransferUser from = dagUserMgr.getFrom(index); DagTransferUser to = dagUserMgr.getTo(index); if ((deci.intValue() > 0) && (deci.intValue() >= (index % 10 + 1))) { to = dagUserMgr.getNext(index); } Random random = new Random(); int r = random.nextInt(100) + 1; BigInteger amount = BigInteger.valueOf(r); try { String signedTransaction = dagTransfer.userTransferSeq( from.getUser(), to.getUser(), amount); String content = String.format("%s %d %d%n", signedTransaction, index, r); lock.lock(); writer.write(content); } catch (Exception e) { e.printStackTrace(); } finally { lock.unlock(); ======= new Runnable() { @Override public void run() { while (true) { DagTransferUser from = dagUserMgr.getFrom(index); DagTransferUser to = dagUserMgr.getTo(index); if ((deci.intValue() > 0) && (deci.intValue() >= (index % 10 + 1))) { to = dagUserMgr.getNext(index); } Random random = new Random(); int r = random.nextInt(100) + 1; BigInteger amount = BigInteger.valueOf(r); try { String signedTransaction = dagTransfer.userTransferSeq( from.getUser(), to.getUser(), amount); String content = String.format( "%s %d %d%n", signedTransaction, index, r); lock.lock(); writer.write(content); break; } catch (Exception e) { e.printStackTrace(); continue; } finally { lock.unlock(); } >>>>>>> new Runnable() { @Override public void run() { DagTransferUser from = dagUserMgr.getFrom(index); DagTransferUser to = dagUserMgr.getTo(index); if ((deci.intValue() > 0) && (deci.intValue() >= (index % 10 + 1))) { to = dagUserMgr.getNext(index); } Random random = new Random(); int r = random.nextInt(100) + 1; BigInteger amount = BigInteger.valueOf(r); try { String signedTransaction = dagTransfer.userTransferSeq( from.getUser(), to.getUser(), amount); String content = String.format("%s %d %d%n", signedTransaction, index, r); lock.lock(); writer.write(content); } catch (Exception e) { e.printStackTrace(); } finally { lock.unlock(); <<<<<<< percent = String.format("%.2f%%", (end - latchCount) / (double) count.intValue() * 100); ======= percent = (int) (((end - latchCount) / (double) count.intValue()) * 100) + "%"; >>>>>>> percent = String.format("%.2f%%", (end - latchCount) / (double) count.intValue() * 100); <<<<<<< for (int p = 0; p < percent.length(); ++p) { System.out.print('\b'); ======= for (int p = 0; p < percent.length(); ++p) { System.out.print('\b'); } System.out.print("100%"); >>>>>>> for (int p = 0; p < percent.length(); ++p) { System.out.print('\b'); <<<<<<< sent = new AtomicInteger(0); for (int i = 0; i < fileList.length; ++i) { ======= for (int i = 0; i < fileList.length; ++i) { >>>>>>> sent = new AtomicInteger(0); for (int i = 0; i < fileList.length; ++i) { <<<<<<< ======= long elapsed = System.currentTimeMillis() - startTime; sent += signedTransactions.size(); double sendSpeed = sent / ((double) elapsed / 1000); System.out.println( "Already sent: " + sent + "/" + count + " transactions" + ",QPS=" + sendSpeed); >>>>>>>
<<<<<<< /** * Workaround to ensure type does not come back as T due to erasure, this enables you to create * a TypeReference via {@link Class Class&lt;T&gt;}. * * @return the parameterized Class type if applicable, otherwise a regular class * @throws ClassNotFoundException if the class type cannot be determined */ @SuppressWarnings("unchecked") public Class<T> getClassType() throws ClassNotFoundException { Type clsType = getType(); if (getType() instanceof ParameterizedType) { return (Class<T>) ((ParameterizedType) clsType).getRawType(); } else { return (Class<T>) Class.forName(clsType.getTypeName()); } } ======= /** * Workaround to ensure type does not come back as T due to erasure, this enables you to create a * TypeReference via {@link Class Class&lt;T&gt;}. * * @return the parameterized Class type if applicable, otherwise a regular class * @throws ClassNotFoundException if the class type cannot be determined */ @SuppressWarnings("unchecked") public Class<T> getClassType() throws ClassNotFoundException { return Utils.getClassType(getType()); } >>>>>>> /** * Workaround to ensure type does not come back as T due to erasure, this enables you to create * a TypeReference via {@link Class Class&lt;T&gt;}. * * @return the parameterized Class type if applicable, otherwise a regular class * @throws ClassNotFoundException if the class type cannot be determined */ @SuppressWarnings("unchecked") public Class<T> getClassType() throws ClassNotFoundException { return Utils.getClassType(getType()); }
<<<<<<< import com.google.gerrit.elasticsearch.builders.XContentBuilder; import com.google.gerrit.index.Index; import com.google.gerrit.index.Schema; import com.google.gerrit.index.Schema.Values; ======= import com.google.gerrit.elasticsearch.bulk.DeleteRequest; >>>>>>> import com.google.gerrit.elasticsearch.bulk.DeleteRequest; import com.google.gerrit.index.Index; import com.google.gerrit.index.Schema; <<<<<<< ======= import com.google.gerrit.server.index.Index; >>>>>>> <<<<<<< ======= import com.google.gerrit.server.index.Schema; >>>>>>> <<<<<<< return toAction(type, id, DELETE); } private static boolean shouldAddElement(Object element) { return !(element instanceof String) || !((String) element).isEmpty(); } protected String toDoc(V v) throws IOException { try (XContentBuilder closeable = new XContentBuilder()) { XContentBuilder builder = closeable.startObject(); for (Values<V> values : schema.buildFields(v)) { String name = values.getField().getName(); if (values.getField().isRepeatable()) { builder.field( name, Streams.stream(values.getValues()) .filter(e -> shouldAddElement(e)) .collect(toList())); } else { Object element = Iterables.getOnlyElement(values.getValues(), ""); if (shouldAddElement(element)) { builder.field(name, element); } } } return builder.endObject().string() + System.lineSeparator(); } } protected String toAction(String type, String id, String action) { JsonObject properties = new JsonObject(); properties.addProperty("_id", id); properties.addProperty("_index", indexName); properties.addProperty("_type", type); JsonObject jsonAction = new JsonObject(); jsonAction.add(action, properties); return jsonAction.toString() + System.lineSeparator(); ======= return new DeleteRequest(id, indexNameRaw, type).toString(); >>>>>>> return new DeleteRequest(id, indexNameRaw, type).toString();
<<<<<<< ======= import org.fisco.bcos.channel.test.parallel.parallelok.DagUserMgr; import org.fisco.bcos.channel.test.parallel.parallelok.DagTransferUser; >>>>>>> <<<<<<< import org.fisco.bcos.web3j.tx.Contract; import org.fisco.bcos.web3j.tx.TransactionManager; ======= >>>>>>> import org.fisco.bcos.web3j.tx.Contract; import org.fisco.bcos.web3j.tx.TransactionManager; <<<<<<< ======= import java.util.ArrayList; import java.util.Date; import java.util.concurrent.locks.*; import java.util.concurrent.CountDownLatch; >>>>>>> <<<<<<< final ParallelOk _parallelok = parallelok; final List<DagTransferUser> _allUser = allUser; ======= final List<DagTransferUser> _allUser = allUser; >>>>>>> final ParallelOk _parallelok = parallelok; final List<DagTransferUser> _allUser = allUser; <<<<<<< while (verify_success.get() + verify_failed.get() < total_user) { Thread.sleep(40); ; ======= while(verify_success.get() + verify_failed.get() < total_user) { Thread.sleep(40); >>>>>>> while (verify_success.get() + verify_failed.get() < total_user) { Thread.sleep(40); ; <<<<<<< private static SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); ======= >>>>>>> <<<<<<< int coreNum = Runtime.getRuntime().availableProcessors(); ======= >>>>>>>
<<<<<<< public class InternalAccountQuery extends InternalQuery<AccountState, InternalAccountQuery> { ======= public class InternalAccountQuery extends InternalQuery<AccountState> { private static final FluentLogger logger = FluentLogger.forEnclosingClass(); >>>>>>> public class InternalAccountQuery extends InternalQuery<AccountState, InternalAccountQuery> { private static final FluentLogger logger = FluentLogger.forEnclosingClass(); <<<<<<< public List<AccountState> byFullName(String fullName) { ======= @UsedAt(UsedAt.Project.COLLABNET) public AccountState oneByExternalId(ExternalId.Key externalId) throws OrmException { List<AccountState> accountStates = byExternalId(externalId); if (accountStates.size() == 1) { return accountStates.get(0); } else if (accountStates.size() > 0) { StringBuilder msg = new StringBuilder(); msg.append("Ambiguous external ID ").append(externalId).append(" for accounts: "); Joiner.on(", ") .appendTo( msg, accountStates.stream().map(AccountState.ACCOUNT_ID_FUNCTION).collect(toList())); logger.atWarning().log(msg.toString()); } return null; } public List<AccountState> byFullName(String fullName) throws OrmException { >>>>>>> @UsedAt(UsedAt.Project.COLLABNET) public AccountState oneByExternalId(ExternalId.Key externalId) { List<AccountState> accountStates = byExternalId(externalId); if (accountStates.size() == 1) { return accountStates.get(0); } else if (accountStates.size() > 0) { StringBuilder msg = new StringBuilder(); msg.append("Ambiguous external ID ").append(externalId).append(" for accounts: "); Joiner.on(", ") .appendTo( msg, accountStates.stream().map(AccountState.ACCOUNT_ID_FUNCTION).collect(toList())); logger.atWarning().log(msg.toString()); } return null; } public List<AccountState> byFullName(String fullName) {
<<<<<<< ColumnChunkMetaData column = new ColumnChunkMetaData(new String[] {"foo"}, PrimitiveTypeName.BINARY, CompressionCodecName.GZIP); column.setFirstDataPageOffset(start); ======= ColumnChunkMetaData column = new ColumnChunkMetaData(new String[] {"foo"}, PrimitiveTypeName.BINARY, CompressionCodecName.GZIP, Arrays.asList(Encoding.PLAIN)); column.setFirstDataPage(start); >>>>>>> ColumnChunkMetaData column = new ColumnChunkMetaData(new String[] {"foo"}, PrimitiveTypeName.BINARY, CompressionCodecName.GZIP, Arrays.asList(Encoding.PLAIN)); column.setFirstDataPageOffset(start);
<<<<<<< @Override public void writeDictionaryPage(DictionaryPage dictionaryPage) throws IOException { if (this.dictionaryPage != null) { throw new ParquetEncodingException("Only one dictionary page per block"); } this.memSize += dictionaryPage.getBytes().size(); this.dictionaryPage = dictionaryPage.copy(); if (DEBUG) LOG.debug("dictionary page written for " + dictionaryPage.getBytes().size() + " bytes and " + dictionaryPage.getDictionarySize() + " records"); } ======= @Override public String memUsageString(String prefix) { return String.format("%s %,d bytes", prefix, memSize); } >>>>>>> @Override public void writeDictionaryPage(DictionaryPage dictionaryPage) throws IOException { if (this.dictionaryPage != null) { throw new ParquetEncodingException("Only one dictionary page per block"); } this.memSize += dictionaryPage.getBytes().size(); this.dictionaryPage = dictionaryPage.copy(); if (DEBUG) LOG.debug("dictionary page written for " + dictionaryPage.getBytes().size() + " bytes and " + dictionaryPage.getDictionarySize() + " records"); } @Override public String memUsageString(String prefix) { return String.format("%s %,d bytes", prefix, memSize); }
<<<<<<< public boolean getBoolean() { ======= public long getLong() { throw new UnsupportedOperationException(); } @Override public boolean getBool() { >>>>>>> public boolean getBoolean() { throw new UnsupportedOperationException(); } public long getLong() {
<<<<<<< public void read(RecordConsumer recordConsumer) { ======= public void read(T[] records, int count) { if (count > records.length) { throw new IllegalArgumentException("count is greater than records size"); } for (int i = 0; i < count; i++) { readOneRecord(); records[i] = recordMaterializer.getCurrentRecord(); } } private void readOneRecord() { >>>>>>> public void read(T[] records, int count) { if (count > records.length) { throw new IllegalArgumentException("count is greater than records size"); } for (int i = 0; i < count; i++) { readOneRecord(); records[i] = recordMaterializer.getCurrentRecord(); } } private void readOneRecord() { <<<<<<< int currentCol = 0; startMessage(recordConsumer); ======= State currentState = states[0]; startMessage(); >>>>>>> State currentState = states[0]; startMessage(); <<<<<<< for (; currentLevel < (primitiveColumnIO.getFieldPath().length - 1) && d > getDefinitionLevel(currentLevel, primitiveColumnIO); ++currentLevel) { startGroup(recordConsumer, currentLevel, primitiveColumnIO); ======= int depth = currentState.definitionLevelToDepth[d]; for (; currentLevel <= depth; ++currentLevel) { String field = fieldPath[currentLevel]; int fieldIndex = indexFieldPath[currentLevel]; if (DEBUG) log(field + "(" + currentLevel + ") = new Group()"); startGroup(field, fieldIndex); >>>>>>> int depth = currentState.definitionLevelToDepth[d]; for (; currentLevel <= depth; ++currentLevel) { String field = fieldPath[currentLevel]; int fieldIndex = indexFieldPath[currentLevel]; if (DEBUG) log(field + "(" + currentLevel + ") = new Group()"); startGroup(field, fieldIndex); <<<<<<< String field = primitiveColumnIO.getFieldPath(currentLevel); int fieldIndex = primitiveColumnIO.getIndexFieldPath(currentLevel); if (DEBUG) log(field+"(" + currentLevel + ") = "+primitiveColumnIO.getType().asPrimitiveType().getPrimitive().toString(columnReader)); addPrimitive(recordConsumer, columnReader, primitiveColumnIO.getPrimitive(), field, fieldIndex); ======= String field = fieldPath[currentLevel]; int fieldIndex = indexFieldPath[currentLevel]; if (DEBUG) log(field+"(" + currentLevel + ") = "+currentState.primitive.toString(columnReader)); addPrimitive(columnReader, currentState.primitive, field, fieldIndex); >>>>>>> String field = fieldPath[currentLevel]; int fieldIndex = indexFieldPath[currentLevel]; if (DEBUG) log(field+"(" + currentLevel + ") = "+currentState.primitive.toString(columnReader)); addPrimitive(columnReader, currentState.primitive, field, fieldIndex); <<<<<<< private int getDefinitionLevel(int currentLevel, PrimitiveColumnIO primitiveColumnIO) { return primitiveColumnIO.getPath()[currentLevel].getDefinitionLevel(); } protected void startMessage(RecordConsumer recordConsumer) { ======= private void startMessage() { >>>>>>> private int getDefinitionLevel(int currentLevel, PrimitiveColumnIO primitiveColumnIO) { return primitiveColumnIO.getPath()[currentLevel].getDefinitionLevel(); } private void startMessage() { <<<<<<< protected void endMessage(RecordConsumer recordConsumer) { ======= private void endMessage() { >>>>>>> private void endMessage() { <<<<<<< private void startGroup(RecordConsumer recordConsumer, int currentLevel, PrimitiveColumnIO primitiveColumnIO) { String field = primitiveColumnIO.getFieldPath()[currentLevel]; int fieldIndex = primitiveColumnIO.getIndexFieldPath()[currentLevel]; if (DEBUG) log(field + "(" + currentLevel + ") = new Group()"); startField(recordConsumer, field, fieldIndex); ======= private void startGroup(String field, int fieldIndex) { startField(field, fieldIndex); >>>>>>> private void startGroup(String field, int fieldIndex) { startField(field, fieldIndex);
<<<<<<< import parquet.column.statistics.BinaryStatistics; import parquet.hadoop.api.ReadSupport; ======= >>>>>>> import parquet.column.statistics.BinaryStatistics; import parquet.hadoop.api.ReadSupport; <<<<<<< ColumnChunkMetaData column = ColumnChunkMetaData.get(ColumnPath.get("foo"), PrimitiveTypeName.BINARY, CompressionCodecName.GZIP, new HashSet<Encoding>(Arrays.asList(Encoding.PLAIN)), new BinaryStatistics(), start, 0l, 0l, 2l, 0l); ======= long uncompressedSize = compressedBlockSize * 2;//assuming the compression ratio is 2 ColumnChunkMetaData column = ColumnChunkMetaData.get( ColumnPath.get("foo"), PrimitiveTypeName.BINARY, CompressionCodecName.GZIP, new HashSet<Encoding>(Arrays.asList(Encoding.PLAIN)), start, 0l, 0l, compressedBlockSize, uncompressedSize); >>>>>>> long uncompressedSize = compressedBlockSize * 2;//assuming the compression ratio is 2 ColumnChunkMetaData column = ColumnChunkMetaData.get(ColumnPath.get("foo"), PrimitiveTypeName.BINARY, CompressionCodecName.GZIP, new HashSet<Encoding>(Arrays.asList(Encoding.PLAIN)), new BinaryStatistics(), start, 0l, 0l, compressedBlockSize, uncompressedSize);
<<<<<<< import java.io.DataOutput; import java.io.IOException; ======= import java.math.BigInteger; >>>>>>> import java.io.DataOutput; import java.io.IOException; import java.math.BigInteger; <<<<<<< System.out.println(columns); System.out.println("========="); columns.flip(); RecordReaderImplementation<Group> recordReader = getRecordReader(columnIO, schema, columns); ======= =="); columns.flip(); RecordReader<Group> recordReader = getRecordReader(columnIO, schema, columns); ======= columns.flush(); log(columns); log("========="); RecordReader<Group> recordReader = getRecordReader(columnIO, schema, memPageStore); >>>>>>> columns.flush(); log(columns); log("========="); RecordReaderImplementation<Group> recordReader = getRecordReader(columnIO, schema, memPageStore); <<<<<<< RecordReaderImplementation<Group> recordReader = getRecordReader(columnIO2, schema2, columns); ======= RecordReader<Group> recordReader = getRecordReader(columnIO2, schema2, memPageStore); >>>>>>> RecordReaderImplementation<Group> recordReader = getRecordReader(columnIO2, schema2, memPageStore); <<<<<<< private RecordReaderImplementation<Group> getRecordReader(MessageColumnIO columnIO, MessageType schema, ColumnsStore columns) { ======= private void log(Object o) { LOG.info(o); } private RecordReader<Group> getRecordReader(MessageColumnIO columnIO, MessageType schema, PageReadStore pageReadStore) { >>>>>>> private RecordReaderImplementation<Group> getRecordReader(MessageColumnIO columnIO, MessageType schema, PageReadStore pageReadStore) { <<<<<<< ======= String[] expected = { "startMessage()", "startField(DocId, 0)", "addLong(10)", "endField(DocId, 0)", "startField(Links, 1)", "startGroup()", "startField(Forward, 1)", "addLong(20)", "addLong(40)", "addLong(60)", "endField(Forward, 1)", "endGroup()", "endField(Links, 1)", "startField(Name, 2)", "startGroup()", "startField(Language, 0)", "startGroup()", "startField(Code, 0)", "addBinary(en-us)", "endField(Code, 0)", "startField(Country, 1)", "addBinary(us)", "endField(Country, 1)", "endGroup()", "startGroup()", "startField(Code, 0)", "addBinary(en)", "endField(Code, 0)", "endGroup()", "endField(Language, 0)", "startField(Url, 1)", "addBinary(http://A)", "endField(Url, 1)", "endGroup()", "startGroup()", "startField(Url, 1)", "addBinary(http://B)", "endField(Url, 1)", "endGroup()", "startGroup()", "startField(Language, 0)", "startGroup()", "startField(Code, 0)", "addBinary(en-gb)", "endField(Code, 0)", "startField(Country, 1)", "addBinary(gb)", "endField(Country, 1)", "endGroup()", "endField(Language, 0)", "endGroup()", "endField(Name, 2)", "endMessage()" }; >>>>>>>
<<<<<<< import static parquet.Log.DEBUG; ======= import static parquet.bytes.BytesInput.concat; >>>>>>> import static parquet.Log.DEBUG; import static parquet.bytes.BytesInput.concat; <<<<<<< final int padded8ValuesBlocks = (totalValues + 7) / 8; if (DEBUG) LOG.debug("writing " + totalValues + " values padded to " + (padded8ValuesBlocks * 8)); int header = padded8ValuesBlocks << 1 | 1; ======= int header = BytesUtils.paddedByteCountFromBits(totalValues) << 1 | 1; >>>>>>> final int padded8ValuesBlocks = (totalValues + 7) / 8; if (DEBUG) LOG.debug("writing " + totalValues + " values padded to " + (padded8ValuesBlocks * 8)); int header = padded8ValuesBlocks << 1 | 1; <<<<<<< BytesInput bitPacked = bitPackingEncoder.toBytes(); return BytesInput.fromSequence( BytesInput.from(size), bitPacked ); ======= return concat(BytesInput.from(size), bitPackingEncoder.toBytes()); >>>>>>> BytesInput bitPacked = bitPackingEncoder.toBytes(); return concat(BytesInput.from(size), bitPacked);