Unnamed: 0
int64
0
6.45k
func
stringlengths
37
161k
target
class label
2 classes
project
stringlengths
33
167
1,508
@Component("blCartStateFilter") /** * <p> * This filter should be configured after the BroadleafCommerce CustomerStateFilter listener from Spring Security. * Retrieves the cart for the current BroadleafCommerce Customer based using the authenticated user OR creates an empty non-modifiable cart and * stores it in the request. * </p> * * @author bpolster */ public class CartStateFilter extends GenericFilterBean implements Ordered { /** Logger for this class and subclasses */ protected final Log LOG = LogFactory.getLog(getClass()); @Resource(name = "blCartStateRequestProcessor") protected CartStateRequestProcessor cartStateProcessor; @Override @SuppressWarnings("unchecked") public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { cartStateProcessor.process(new ServletWebRequest((HttpServletRequest) request, (HttpServletResponse)response)); chain.doFilter(request, response); } @Override public int getOrder() { //FilterChainOrder has been dropped from Spring Security 3 //return FilterChainOrder.REMEMBER_ME_FILTER+1; return 1502; } }
1no label
core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_order_security_CartStateFilter.java
43
public class InvalidElementException extends TitanException { private final TitanElement element; /** * @param msg Exception message * @param element The invalid element causing the exception */ public InvalidElementException(String msg, TitanElement element) { super(msg); this.element = element; } /** * Returns the element causing the exception * * @return The element causing the exception */ public TitanElement getElement() { return element; } @Override public String toString() { return super.toString() + " [" + element.toString() + "]"; } public static IllegalStateException removedException(TitanElement element) { return new IllegalStateException("Element has been removed"); } }
0true
titan-core_src_main_java_com_thinkaurelius_titan_core_InvalidElementException.java
223
private static class OffsetsEnum implements Comparable<OffsetsEnum> { DocsAndPositionsEnum dp; int pos; int id; OffsetsEnum(DocsAndPositionsEnum dp, int id) throws IOException { this.dp = dp; this.id = id; this.pos = 1; } @Override public int compareTo(OffsetsEnum other) { try { int off = dp.startOffset(); int otherOff = other.dp.startOffset(); if (off == otherOff) { return id - other.id; } else { return Long.signum(((long)off) - otherOff); } } catch (IOException e) { throw new RuntimeException(e); } } }
0true
src_main_java_org_apache_lucene_search_postingshighlight_XPostingsHighlighter.java
1,678
public class FsBlobStore extends AbstractComponent implements BlobStore { private final Executor executor; private final File path; private final int bufferSizeInBytes; public FsBlobStore(Settings settings, Executor executor, File path) { super(settings); this.path = path; if (!path.exists()) { boolean b = FileSystemUtils.mkdirs(path); if (!b) { throw new BlobStoreException("Failed to create directory at [" + path + "]"); } } if (!path.isDirectory()) { throw new BlobStoreException("Path is not a directory at [" + path + "]"); } this.bufferSizeInBytes = (int) settings.getAsBytesSize("buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).bytes(); this.executor = executor; } @Override public String toString() { return path.toString(); } public File path() { return path; } public int bufferSizeInBytes() { return this.bufferSizeInBytes; } public Executor executor() { return executor; } @Override public ImmutableBlobContainer immutableBlobContainer(BlobPath path) { return new FsImmutableBlobContainer(this, path, buildAndCreate(path)); } @Override public void delete(BlobPath path) { FileSystemUtils.deleteRecursively(buildPath(path)); } @Override public void close() { // nothing to do here... } private synchronized File buildAndCreate(BlobPath path) { File f = buildPath(path); FileSystemUtils.mkdirs(f); return f; } private File buildPath(BlobPath path) { String[] paths = path.toArray(); if (paths.length == 0) { return path(); } File blobPath = new File(this.path, paths[0]); if (paths.length > 1) { for (int i = 1; i < paths.length; i++) { blobPath = new File(blobPath, paths[i]); } } return blobPath; } }
1no label
src_main_java_org_elasticsearch_common_blobstore_fs_FsBlobStore.java
54
public class OAdaptiveLock extends OAbstractLock { private final ReentrantLock lock = new ReentrantLock(); private final boolean concurrent; private final int timeout; private final boolean ignoreThreadInterruption; public OAdaptiveLock() { this.concurrent = true; this.timeout = 0; this.ignoreThreadInterruption = false; } public OAdaptiveLock(final int iTimeout) { this.concurrent = true; this.timeout = iTimeout; this.ignoreThreadInterruption = false; } public OAdaptiveLock(final boolean iConcurrent) { this.concurrent = iConcurrent; this.timeout = 0; this.ignoreThreadInterruption = false; } public OAdaptiveLock(final boolean iConcurrent, final int iTimeout, boolean ignoreThreadInterruption) { this.concurrent = iConcurrent; this.timeout = iTimeout; this.ignoreThreadInterruption = ignoreThreadInterruption; } public void lock() { if (concurrent) if (timeout > 0) { try { if (lock.tryLock(timeout, TimeUnit.MILLISECONDS)) // OK return; } catch (InterruptedException e) { if (ignoreThreadInterruption) { // IGNORE THE THREAD IS INTERRUPTED: TRY TO RE-LOCK AGAIN try { if (lock.tryLock(timeout, TimeUnit.MILLISECONDS)) { // OK, RESET THE INTERRUPTED STATE Thread.currentThread().interrupt(); return; } } catch (InterruptedException e2) { Thread.currentThread().interrupt(); } } throw new OLockException("Thread interrupted while waiting for resource of class '" + getClass() + "' with timeout=" + timeout); } throw new OTimeoutException("Timeout on acquiring lock against resource of class: " + getClass() + " with timeout=" + timeout); } else lock.lock(); } public boolean tryAcquireLock() { return tryAcquireLock(timeout, TimeUnit.MILLISECONDS); } public boolean tryAcquireLock(final long iTimeout, final TimeUnit iUnit) { if (concurrent) if (timeout > 0) try { return lock.tryLock(iTimeout, iUnit); } catch (InterruptedException e) { throw new OLockException("Thread interrupted while waiting for resource of class '" + getClass() + "' with timeout=" + timeout); } else return lock.tryLock(); return true; } public void unlock() { if (concurrent) lock.unlock(); } public boolean isConcurrent() { return concurrent; } public ReentrantLock getUnderlying() { return lock; } }
1no label
commons_src_main_java_com_orientechnologies_common_concur_lock_OAdaptiveLock.java
1,547
public class PropertyMapMap { public static final String CLASS = Tokens.makeNamespace(PropertyMapMap.class) + ".class"; public enum Counters { VERTICES_PROCESSED, OUT_EDGES_PROCESSED } public static Configuration createConfiguration(final Class<? extends Element> klass) { final Configuration configuration = new EmptyConfiguration(); configuration.setClass(CLASS, klass, Element.class); return configuration; } public static class Map extends Mapper<NullWritable, FaunusVertex, LongWritable, Text> { private boolean isVertex; private SafeMapperOutputs outputs; @Override public void setup(final Mapper.Context context) throws IOException, InterruptedException { this.isVertex = context.getConfiguration().getClass(CLASS, Element.class, Element.class).equals(Vertex.class); this.outputs = new SafeMapperOutputs(context); } private LongWritable longWritable = new LongWritable(); private Text text = new Text(); @Override public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, LongWritable, Text>.Context context) throws IOException, InterruptedException { if (this.isVertex) { if (value.hasPaths()) { this.longWritable.set(value.getLongId()); this.text.set(ElementPicker.getPropertyAsString(value, Tokens._PROPERTIES)); for (int i = 0; i < value.pathCount(); i++) { this.outputs.write(Tokens.SIDEEFFECT, this.longWritable, this.text); } DEFAULT_COMPAT.incrementContextCounter(context, Counters.VERTICES_PROCESSED, 1L); } } else { long edgesProcessed = 0; for (final Edge e : value.getEdges(Direction.OUT)) { final StandardFaunusEdge edge = (StandardFaunusEdge) e; if (edge.hasPaths()) { this.longWritable.set(edge.getLongId()); this.text.set(ElementPicker.getPropertyAsString(edge, Tokens._PROPERTIES)); for (int i = 0; i < edge.pathCount(); i++) { this.outputs.write(Tokens.SIDEEFFECT, this.longWritable, this.text); } edgesProcessed++; } } DEFAULT_COMPAT.incrementContextCounter(context, Counters.OUT_EDGES_PROCESSED, edgesProcessed); } this.outputs.write(Tokens.GRAPH, NullWritable.get(), value); } @Override public void cleanup(final Mapper<NullWritable, FaunusVertex, LongWritable, Text>.Context context) throws IOException, InterruptedException { this.outputs.close(); } } }
1no label
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_transform_PropertyMapMap.java
2,074
public class MultipleEntryOperation extends AbstractMapOperation implements BackupAwareOperation, PartitionAwareOperation { private static final EntryEventType __NO_NEED_TO_FIRE_EVENT = null; private EntryProcessor entryProcessor; private Set<Data> keys; MapEntrySet response; public MultipleEntryOperation() { } public MultipleEntryOperation(String name, Set<Data> keys, EntryProcessor entryProcessor) { super(name); this.keys = keys; this.entryProcessor = entryProcessor; } public void innerBeforeRun() { final ManagedContext managedContext = getNodeEngine().getSerializationService().getManagedContext(); managedContext.initialize(entryProcessor); } @Override public void run() throws Exception { response = new MapEntrySet(); final InternalPartitionService partitionService = getNodeEngine().getPartitionService(); final RecordStore recordStore = mapService.getRecordStore(getPartitionId(), name); final LocalMapStatsImpl mapStats = mapService.getLocalMapStatsImpl(name); MapEntrySimple entry; for (Data key : keys) { if (partitionService.getPartitionId(key) != getPartitionId()) continue; long start = System.currentTimeMillis(); Object objectKey = mapService.toObject(key); final Map.Entry<Data, Object> mapEntry = recordStore.getMapEntry(key); final Object valueBeforeProcess = mapEntry.getValue(); final Object valueBeforeProcessObject = mapService.toObject(valueBeforeProcess); entry = new MapEntrySimple(objectKey, valueBeforeProcessObject); final Object result = entryProcessor.process(entry); final Object valueAfterProcess = entry.getValue(); Data dataValue = null; if (result != null) { dataValue = mapService.toData(result); response.add(new AbstractMap.SimpleImmutableEntry<Data, Data>(key, dataValue)); } EntryEventType eventType; if (valueAfterProcess == null) { recordStore.remove(key); mapStats.incrementRemoves(getLatencyFrom(start)); eventType = EntryEventType.REMOVED; } else { if (valueBeforeProcessObject == null) { mapStats.incrementPuts(getLatencyFrom(start)); eventType = EntryEventType.ADDED; } // take this case as a read so no need to fire an event. else if (!entry.isModified()) { mapStats.incrementGets(getLatencyFrom(start)); eventType = __NO_NEED_TO_FIRE_EVENT; } else { mapStats.incrementPuts(getLatencyFrom(start)); eventType = EntryEventType.UPDATED; } // todo if this is a read only operation, record access operations should be done. if (eventType != __NO_NEED_TO_FIRE_EVENT) { recordStore.put(new AbstractMap.SimpleImmutableEntry<Data, Object>(key, valueAfterProcess)); } } if (eventType != __NO_NEED_TO_FIRE_EVENT) { final Data oldValue = mapService.toData(valueBeforeProcess); final Data value = mapService.toData(valueAfterProcess); mapService.publishEvent(getCallerAddress(), name, eventType, key, oldValue, value); if (mapService.isNearCacheAndInvalidationEnabled(name)) { mapService.invalidateAllNearCaches(name, key); } if (mapContainer.getWanReplicationPublisher() != null && mapContainer.getWanMergePolicy() != null) { if (EntryEventType.REMOVED.equals(eventType)) { mapService.publishWanReplicationRemove(name, key, Clock.currentTimeMillis()); } else { Record record = recordStore.getRecord(key); Data tempValue = mapService.toData(dataValue); final SimpleEntryView entryView = mapService.createSimpleEntryView(key, tempValue, record); mapService.publishWanReplicationUpdate(name, entryView); } } } } } @Override public boolean returnsResponse() { return true; } @Override public Object getResponse() { return response; } @Override public String toString() { return "MultipleEntryOperation{}"; } @Override public boolean shouldBackup() { return entryProcessor.getBackupProcessor() != null; } @Override public int getSyncBackupCount() { return 0; } @Override public int getAsyncBackupCount() { return mapContainer.getTotalBackupCount(); } @Override public Operation getBackupOperation() { EntryBackupProcessor backupProcessor = entryProcessor.getBackupProcessor(); return backupProcessor != null ? new MultipleEntryBackupOperation(name, keys, backupProcessor) : null; } @Override protected void readInternal(ObjectDataInput in) throws IOException { super.readInternal(in); entryProcessor = in.readObject(); int size = in.readInt(); keys = new HashSet<Data>(size); for (int i = 0; i < size; i++) { Data key = new Data(); key.readData(in); keys.add(key); } } @Override protected void writeInternal(ObjectDataOutput out) throws IOException { super.writeInternal(out); out.writeObject(entryProcessor); out.writeInt(keys.size()); for (Data key : keys) { key.writeData(out); } } private long getLatencyFrom(long begin) { return Clock.currentTimeMillis() - begin; } }
1no label
hazelcast_src_main_java_com_hazelcast_map_operation_MultipleEntryOperation.java
3,701
public class UidFieldMapper extends AbstractFieldMapper<Uid> implements InternalMapper, RootMapper { public static final String NAME = "_uid"; public static final String CONTENT_TYPE = "_uid"; public static class Defaults extends AbstractFieldMapper.Defaults { public static final String NAME = UidFieldMapper.NAME; public static final String INDEX_NAME = UidFieldMapper.NAME; public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); public static final FieldType NESTED_FIELD_TYPE; static { FIELD_TYPE.setIndexed(true); FIELD_TYPE.setTokenized(false); FIELD_TYPE.setStored(true); FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexOptions(FieldInfo.IndexOptions.DOCS_ONLY); FIELD_TYPE.freeze(); NESTED_FIELD_TYPE = new FieldType(FIELD_TYPE); NESTED_FIELD_TYPE.setStored(false); NESTED_FIELD_TYPE.freeze(); } } public static class Builder extends AbstractFieldMapper.Builder<Builder, UidFieldMapper> { public Builder() { super(Defaults.NAME, Defaults.FIELD_TYPE); indexName = Defaults.INDEX_NAME; } @Override public UidFieldMapper build(BuilderContext context) { return new UidFieldMapper(name, indexName, docValues, postingsProvider, docValuesProvider, fieldDataSettings, context.indexSettings()); } } public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { Builder builder = uid(); parseField(builder, builder.name, node, parserContext); return builder; } } public UidFieldMapper() { this(Defaults.NAME); } protected UidFieldMapper(String name) { this(name, name, null, null, null, null, ImmutableSettings.EMPTY); } protected UidFieldMapper(String name, String indexName, Boolean docValues, PostingsFormatProvider postingsFormat, DocValuesFormatProvider docValuesFormat, @Nullable Settings fieldDataSettings, Settings indexSettings) { super(new Names(name, indexName, indexName, name), Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), docValues, Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, postingsFormat, docValuesFormat, null, null, fieldDataSettings, indexSettings); } @Override public FieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @Override public FieldDataType defaultFieldDataType() { return new FieldDataType("string"); } @Override protected String defaultPostingFormat() { return "default"; } @Override public void preParse(ParseContext context) throws IOException { // if we have the id provided, fill it, and parse now if (context.sourceToParse().id() != null) { context.id(context.sourceToParse().id()); super.parse(context); } } @Override public void postParse(ParseContext context) throws IOException { if (context.id() == null && !context.sourceToParse().flyweight()) { throw new MapperParsingException("No id found while parsing the content source"); } // if we did not have the id as part of the sourceToParse, then we need to parse it here // it would have been filled in the _id parse phase if (context.sourceToParse().id() == null) { super.parse(context); // since we did not have the uid in the pre phase, we did not add it automatically to the nested docs // as they were created we need to make sure we add it to all the nested docs... if (context.docs().size() > 1) { final IndexableField uidField = context.rootDoc().getField(UidFieldMapper.NAME); assert uidField != null; // we need to go over the docs and add it... for (int i = 1; i < context.docs().size(); i++) { final Document doc = context.docs().get(i); doc.add(new Field(UidFieldMapper.NAME, uidField.stringValue(), Defaults.NESTED_FIELD_TYPE)); } } } } @Override public void parse(ParseContext context) throws IOException { // nothing to do here, we either do it in post parse, or in pre parse. } @Override public void validate(ParseContext context) throws MapperParsingException { } @Override public boolean includeInObject() { return false; } @Override protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException { Field uid = new Field(NAME, Uid.createUid(context.stringBuilder(), context.type(), context.id()), Defaults.FIELD_TYPE); context.uid(uid); fields.add(uid); if (hasDocValues()) { fields.add(new BinaryDocValuesField(NAME, new BytesRef(uid.stringValue()))); } } @Override public Uid value(Object value) { if (value == null) { return null; } return Uid.createUid(value.toString()); } public Term term(String type, String id) { return term(Uid.createUid(type, id)); } public Term term(String uid) { return names().createIndexNameTerm(uid); } @Override protected String contentType() { return CONTENT_TYPE; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { boolean includeDefaults = params.paramAsBoolean("include_defaults", false); // if defaults, don't output if (!includeDefaults && customFieldDataSettings == null && (postingsFormat == null || postingsFormat.name().equals(defaultPostingFormat())) && (docValuesFormat == null || docValuesFormat.name().equals(defaultDocValuesFormat()))) { return builder; } builder.startObject(CONTENT_TYPE); if (postingsFormat != null) { if (includeDefaults || !postingsFormat.name().equals(defaultPostingFormat())) { builder.field("postings_format", postingsFormat.name()); } } else if (includeDefaults) { String format = defaultPostingFormat(); if (format == null) { format = PostingsFormatService.DEFAULT_FORMAT; } builder.field("postings_format", format); } if (docValuesFormat != null) { if (includeDefaults || !docValuesFormat.name().equals(defaultDocValuesFormat())) { builder.field(DOC_VALUES_FORMAT, docValuesFormat.name()); } } else if (includeDefaults) { String format = defaultDocValuesFormat(); if (format == null) { format = DocValuesFormatService.DEFAULT_FORMAT; } builder.field(DOC_VALUES_FORMAT, format); } if (customFieldDataSettings != null) { builder.field("fielddata", (Map) customFieldDataSettings.getAsMap()); } else if (includeDefaults) { builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap()); } builder.endObject(); return builder; } @Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException { AbstractFieldMapper<?> fieldMergeWith = (AbstractFieldMapper<?>) mergeWith; // do nothing here, no merging, but also no exception if (!mergeContext.mergeFlags().simulate()) { // apply changeable values if (fieldMergeWith.postingsFormatProvider() != null) { this.postingsFormat = fieldMergeWith.postingsFormatProvider(); } } } }
1no label
src_main_java_org_elasticsearch_index_mapper_internal_UidFieldMapper.java
464
final OIndex<?> indexTwo = makeDbCall(databaseDocumentTxTwo, new ODbRelatedCall<OIndex<?>>() { public OIndex<?> call() { return indexManagerTwo.getIndex(indexOne.getName()); } });
0true
core_src_main_java_com_orientechnologies_orient_core_db_tool_ODatabaseCompare.java
753
public class MultiGetRequest extends ActionRequest<MultiGetRequest> { /** * A single get item. */ public static class Item implements Streamable { private String index; private String type; private String id; private String routing; private String[] fields; private long version = Versions.MATCH_ANY; private VersionType versionType = VersionType.INTERNAL; private FetchSourceContext fetchSourceContext; Item() { } /** * Constructs a single get item. * * @param index The index name * @param type The type (can be null) * @param id The id */ public Item(String index, @Nullable String type, String id) { this.index = index; this.type = type; this.id = id; } public String index() { return this.index; } public Item index(String index) { this.index = index; return this; } public String type() { return this.type; } public String id() { return this.id; } /** * The routing associated with this document. */ public Item routing(String routing) { this.routing = routing; return this; } public String routing() { return this.routing; } public Item parent(String parent) { if (routing == null) { this.routing = parent; } return this; } public Item fields(String... fields) { this.fields = fields; return this; } public String[] fields() { return this.fields; } public long version() { return version; } public Item version(long version) { this.version = version; return this; } public VersionType versionType() { return versionType; } public Item versionType(VersionType versionType) { this.versionType = versionType; return this; } public FetchSourceContext fetchSourceContext() { return this.fetchSourceContext; } /** * Allows setting the {@link FetchSourceContext} for this request, controlling if and how _source should be returned. */ public Item fetchSourceContext(FetchSourceContext fetchSourceContext) { this.fetchSourceContext = fetchSourceContext; return this; } public static Item readItem(StreamInput in) throws IOException { Item item = new Item(); item.readFrom(in); return item; } @Override public void readFrom(StreamInput in) throws IOException { index = in.readSharedString(); type = in.readOptionalSharedString(); id = in.readString(); routing = in.readOptionalString(); int size = in.readVInt(); if (size > 0) { fields = new String[size]; for (int i = 0; i < size; i++) { fields[i] = in.readString(); } } version = in.readVLong(); versionType = VersionType.fromValue(in.readByte()); fetchSourceContext = FetchSourceContext.optionalReadFromStream(in); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeSharedString(index); out.writeOptionalSharedString(type); out.writeString(id); out.writeOptionalString(routing); if (fields == null) { out.writeVInt(0); } else { out.writeVInt(fields.length); for (String field : fields) { out.writeString(field); } } out.writeVLong(version); out.writeByte(versionType.getValue()); FetchSourceContext.optionalWriteToStream(fetchSourceContext, out); } } private boolean listenerThreaded = false; String preference; Boolean realtime; boolean refresh; List<Item> items = new ArrayList<Item>(); public MultiGetRequest add(Item item) { items.add(item); return this; } public MultiGetRequest add(String index, @Nullable String type, String id) { items.add(new Item(index, type, id)); return this; } @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (items.isEmpty()) { validationException = ValidateActions.addValidationError("no documents to get", validationException); } else { for (int i = 0; i < items.size(); i++) { Item item = items.get(i); if (item.index() == null) { validationException = ValidateActions.addValidationError("index is missing for doc " + i, validationException); } if (item.id() == null) { validationException = ValidateActions.addValidationError("id is missing for doc " + i, validationException); } } } return validationException; } /** * Sets the preference to execute the search. Defaults to randomize across shards. Can be set to * <tt>_local</tt> to prefer local shards, <tt>_primary</tt> to execute only on primary shards, or * a custom value, which guarantees that the same order will be used across different requests. */ public MultiGetRequest preference(String preference) { this.preference = preference; return this; } public String preference() { return this.preference; } public boolean realtime() { return this.realtime == null ? true : this.realtime; } public MultiGetRequest realtime(Boolean realtime) { this.realtime = realtime; return this; } public boolean refresh() { return this.refresh; } public MultiGetRequest refresh(boolean refresh) { this.refresh = refresh; return this; } public MultiGetRequest add(@Nullable String defaultIndex, @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, byte[] data, int from, int length) throws Exception { return add(defaultIndex, defaultType, defaultFields, defaultFetchSource, new BytesArray(data, from, length), true); } public MultiGetRequest add(@Nullable String defaultIndex, @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, BytesReference data) throws Exception { return add(defaultIndex, defaultType, defaultFields, defaultFetchSource, data, true); } public MultiGetRequest add(@Nullable String defaultIndex, @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, BytesReference data, boolean allowExplicitIndex) throws Exception { return add(defaultIndex, defaultType, defaultFields, defaultFetchSource, null, data, allowExplicitIndex); } public MultiGetRequest add(@Nullable String defaultIndex, @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, @Nullable String defaultRouting, BytesReference data, boolean allowExplicitIndex) throws Exception { XContentParser parser = XContentFactory.xContent(data).createParser(data); try { XContentParser.Token token; String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_ARRAY) { if ("docs".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token != XContentParser.Token.START_OBJECT) { throw new ElasticsearchIllegalArgumentException("docs array element should include an object"); } String index = defaultIndex; String type = defaultType; String id = null; String routing = defaultRouting; String parent = null; List<String> fields = null; long version = Versions.MATCH_ANY; VersionType versionType = VersionType.INTERNAL; FetchSourceContext fetchSourceContext = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { if ("_index".equals(currentFieldName)) { if (!allowExplicitIndex) { throw new ElasticsearchIllegalArgumentException("explicit index in multi get is not allowed"); } index = parser.text(); } else if ("_type".equals(currentFieldName)) { type = parser.text(); } else if ("_id".equals(currentFieldName)) { id = parser.text(); } else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) { routing = parser.text(); } else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) { parent = parser.text(); } else if ("fields".equals(currentFieldName)) { fields = new ArrayList<String>(); fields.add(parser.text()); } else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) { version = parser.longValue(); } else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) { versionType = VersionType.fromString(parser.text()); } else if ("_source".equals(currentFieldName)) { if (parser.isBooleanValue()) { fetchSourceContext = new FetchSourceContext(parser.booleanValue()); } else if (token == XContentParser.Token.VALUE_STRING) { fetchSourceContext = new FetchSourceContext(new String[]{parser.text()}); } else { throw new ElasticsearchParseException("illegal type for _source: [" + token + "]"); } } } else if (token == XContentParser.Token.START_ARRAY) { if ("fields".equals(currentFieldName)) { fields = new ArrayList<String>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { fields.add(parser.text()); } } else if ("_source".equals(currentFieldName)) { ArrayList<String> includes = new ArrayList<String>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { includes.add(parser.text()); } fetchSourceContext = new FetchSourceContext(includes.toArray(Strings.EMPTY_ARRAY)); } } else if (token == XContentParser.Token.START_OBJECT) { if ("_source".equals(currentFieldName)) { List<String> currentList = null, includes = null, excludes = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); if ("includes".equals(currentFieldName) || "include".equals(currentFieldName)) { currentList = includes != null ? includes : (includes = new ArrayList<String>(2)); } else if ("excludes".equals(currentFieldName) || "exclude".equals(currentFieldName)) { currentList = excludes != null ? excludes : (excludes = new ArrayList<String>(2)); } else { throw new ElasticsearchParseException("Source definition may not contain " + parser.text()); } } else if (token == XContentParser.Token.START_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { currentList.add(parser.text()); } } else if (token.isValue()) { currentList.add(parser.text()); } else { throw new ElasticsearchParseException("unexpected token while parsing source settings"); } } fetchSourceContext = new FetchSourceContext( includes == null ? Strings.EMPTY_ARRAY : includes.toArray(new String[includes.size()]), excludes == null ? Strings.EMPTY_ARRAY : excludes.toArray(new String[excludes.size()])); } } } String[] aFields; if (fields != null) { aFields = fields.toArray(new String[fields.size()]); } else { aFields = defaultFields; } add(new Item(index, type, id).routing(routing).fields(aFields).parent(parent).version(version).versionType(versionType) .fetchSourceContext(fetchSourceContext == null ? defaultFetchSource : fetchSourceContext)); } } else if ("ids".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (!token.isValue()) { throw new ElasticsearchIllegalArgumentException("ids array element should only contain ids"); } add(new Item(defaultIndex, defaultType, parser.text()).fields(defaultFields).fetchSourceContext(defaultFetchSource).routing(defaultRouting)); } } } } } finally { parser.close(); } return this; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); preference = in.readOptionalString(); refresh = in.readBoolean(); byte realtime = in.readByte(); if (realtime == 0) { this.realtime = false; } else if (realtime == 1) { this.realtime = true; } int size = in.readVInt(); items = new ArrayList<Item>(size); for (int i = 0; i < size; i++) { items.add(Item.readItem(in)); } } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeOptionalString(preference); out.writeBoolean(refresh); if (realtime == null) { out.writeByte((byte) -1); } else if (realtime == false) { out.writeByte((byte) 0); } else { out.writeByte((byte) 1); } out.writeVInt(items.size()); for (Item item : items) { item.writeTo(out); } } }
1no label
src_main_java_org_elasticsearch_action_get_MultiGetRequest.java
1,852
nodeEngine.getExecutionService().submit("hz:map-merge", new Runnable() { public void run() { final SimpleEntryView entryView = createSimpleEntryView(record.getKey(), toData(record.getValue()), record); MergeOperation operation = new MergeOperation(mapContainer.getName(), record.getKey(), entryView, finalMergePolicy); try { int partitionId = nodeEngine.getPartitionService().getPartitionId(record.getKey()); Future f = nodeEngine.getOperationService().invokeOnPartition(SERVICE_NAME, operation, partitionId); f.get(); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } });
1no label
hazelcast_src_main_java_com_hazelcast_map_MapService.java
812
@Entity @Table(name = "BLC_OFFER") @Inheritance(strategy=InheritanceType.JOINED) @Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements") @AdminPresentationClass(populateToOneFields = PopulateToOneFieldsEnum.TRUE, friendlyName = "OfferImpl_baseOffer") @SQLDelete(sql="UPDATE BLC_OFFER SET ARCHIVED = 'Y' WHERE OFFER_ID = ?") public class OfferImpl implements Offer, Status, AdminMainEntity { public static final long serialVersionUID = 1L; @Id @GeneratedValue(generator= "OfferId") @GenericGenerator( name="OfferId", strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator", parameters = { @Parameter(name="segment_value", value="OfferImpl"), @Parameter(name="entity_name", value="org.broadleafcommerce.core.offer.domain.OfferImpl") } ) @Column(name = "OFFER_ID") @AdminPresentation(friendlyName = "OfferImpl_Offer_Id", visibility = VisibilityEnum.HIDDEN_ALL) protected Long id; @OneToMany(mappedBy = "offer", targetEntity = OfferCodeImpl.class, cascade = { CascadeType.ALL }, orphanRemoval = true) @Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region = "blStandardElements") @BatchSize(size = 50) @AdminPresentationCollection(addType = AddMethodType.PERSIST, friendlyName = "offerCodeTitle", order = 1, tab = Presentation.Tab.Name.Codes, tabOrder = Presentation.Tab.Order.Codes) protected List<OfferCode> offerCodes = new ArrayList<OfferCode>(100); @Column(name = "OFFER_NAME", nullable=false) @Index(name="OFFER_NAME_INDEX", columnNames={"OFFER_NAME"}) @AdminPresentation(friendlyName = "OfferImpl_Offer_Name", order = 1000, group = Presentation.Group.Name.Description, groupOrder = Presentation.Group.Order.Description, prominent = true, gridOrder = 1) protected String name; @Column(name = "OFFER_DESCRIPTION") @AdminPresentation(friendlyName = "OfferImpl_Offer_Description", order = 2000, group = Presentation.Group.Name.Description, groupOrder = Presentation.Group.Order.Description, prominent = true, gridOrder = 2, largeEntry = true) protected String description; @Column(name = "MARKETING_MESSASGE") @Index(name = "OFFER_MARKETING_MESSAGE_INDEX", columnNames = { "MARKETING_MESSASGE" }) @AdminPresentation(friendlyName = "OfferImpl_marketingMessage", order = 6000, group = Presentation.Group.Name.Description, groupOrder = Presentation.Group.Order.Description, translatable = true) protected String marketingMessage; @Column(name = "OFFER_TYPE", nullable=false) @Index(name="OFFER_TYPE_INDEX", columnNames={"OFFER_TYPE"}) @AdminPresentation(friendlyName = "OfferImpl_Offer_Type", order = 3000, group = Presentation.Group.Name.Description, groupOrder = Presentation.Group.Order.Description, prominent = true, gridOrder = 3, fieldType=SupportedFieldType.BROADLEAF_ENUMERATION, broadleafEnumeration="org.broadleafcommerce.core.offer.service.type.OfferType") protected String type; @Column(name = "OFFER_DISCOUNT_TYPE") @Index(name="OFFER_DISCOUNT_INDEX", columnNames={"OFFER_DISCOUNT_TYPE"}) @AdminPresentation(friendlyName = "OfferImpl_Offer_Discount_Type", order = 1000, group = Presentation.Group.Name.Amount, groupOrder = Presentation.Group.Order.Amount, requiredOverride = RequiredOverride.REQUIRED, fieldType=SupportedFieldType.BROADLEAF_ENUMERATION, broadleafEnumeration="org.broadleafcommerce.core.offer.service.type.OfferDiscountType") protected String discountType; @Column(name = "OFFER_VALUE", nullable=false, precision=19, scale=5) @AdminPresentation(friendlyName = "OfferImpl_Offer_Value", order = 2000, group = Presentation.Group.Name.Amount, groupOrder = Presentation.Group.Order.Amount, prominent = true, gridOrder = 4) protected BigDecimal value; @Column(name = "OFFER_PRIORITY") @AdminPresentation(friendlyName = "OfferImpl_Offer_Priority", order = 7, tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced, group = Presentation.Group.Name.Advanced, groupOrder = Presentation.Group.Order.Advanced) protected Integer priority; @Column(name = "START_DATE") @AdminPresentation(friendlyName = "OfferImpl_Offer_Start_Date", order = 1, group = Presentation.Group.Name.ActivityRange, groupOrder = Presentation.Group.Order.ActivityRange) protected Date startDate; @Column(name = "END_DATE") @AdminPresentation(friendlyName = "OfferImpl_Offer_End_Date", order = 2, group = Presentation.Group.Name.ActivityRange, groupOrder = Presentation.Group.Order.ActivityRange) protected Date endDate; @Column(name = "STACKABLE") protected Boolean stackable = true; @Column(name = "TARGET_SYSTEM") @AdminPresentation(friendlyName = "OfferImpl_Offer_Target_System", tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced, group = Presentation.Group.Name.Advanced, groupOrder = Presentation.Group.Order.Advanced) protected String targetSystem; @Column(name = "APPLY_TO_SALE_PRICE") @AdminPresentation(friendlyName = "OfferImpl_Apply_To_Sale_Price", tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced, group = Presentation.Group.Name.Advanced, groupOrder = Presentation.Group.Order.Advanced) protected Boolean applyToSalePrice = false; @Column(name = "APPLIES_TO_RULES", length = Integer.MAX_VALUE - 1) @AdminPresentation(excluded = true) @Lob @Type(type = "org.hibernate.type.StringClobType") @Deprecated protected String appliesToOrderRules; @Column(name = "APPLIES_WHEN_RULES", length = Integer.MAX_VALUE - 1) @AdminPresentation(excluded = true) @Lob @Type(type = "org.hibernate.type.StringClobType") @Deprecated protected String appliesToCustomerRules; @Column(name = "APPLY_OFFER_TO_MARKED_ITEMS") @AdminPresentation(excluded = true) @Deprecated protected boolean applyDiscountToMarkedItems; /** * No offers can be applied on top of this offer; * If false, stackable has to be false also */ @Column(name = "COMBINABLE_WITH_OTHER_OFFERS") @AdminPresentation(friendlyName = "OfferImpl_Offer_Combinable", tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced, group = Presentation.Group.Name.Advanced, groupOrder = Presentation.Group.Order.Advanced) protected Boolean combinableWithOtherOffers = true; @Column(name = "OFFER_DELIVERY_TYPE") @AdminPresentation(excluded = true) protected String deliveryType; @Column(name = "AUTOMATICALLY_ADDED") @AdminPresentation(friendlyName = "OfferImpl_Offer_Automatically_Added", order = 5000, group = Presentation.Group.Name.Description, groupOrder = Presentation.Group.Order.Description, fieldType = SupportedFieldType.BOOLEAN) protected Boolean automaticallyAdded = false; @Column(name = "MAX_USES") @AdminPresentation(friendlyName = "OfferImpl_Offer_Max_Uses_Per_Order", order = 7, tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced, group = Presentation.Group.Name.Advanced, groupOrder = Presentation.Group.Order.Advanced) protected Integer maxUsesPerOrder; @Column(name = "MAX_USES_PER_CUSTOMER") @AdminPresentation(friendlyName = "OfferImpl_Max_Uses_Per_Customer", order = 8, tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced, group = Presentation.Group.Name.Advanced, groupOrder = Presentation.Group.Order.Advanced) protected Long maxUsesPerCustomer; @Column(name = "USES") @AdminPresentation(friendlyName = "OfferImpl_Offer_Current_Uses", visibility = VisibilityEnum.HIDDEN_ALL) @Deprecated protected int uses; @Column(name = "OFFER_ITEM_QUALIFIER_RULE") @AdminPresentation(friendlyName = "OfferImpl_Item_Qualifier_Rule", tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced, group = Presentation.Group.Name.Advanced, groupOrder = Presentation.Group.Order.Advanced, fieldType = SupportedFieldType.BROADLEAF_ENUMERATION, broadleafEnumeration = "org.broadleafcommerce.core.offer.service.type.OfferItemRestrictionRuleType") protected String offerItemQualifierRuleType; @Column(name = "OFFER_ITEM_TARGET_RULE") @AdminPresentation(friendlyName = "OfferImpl_Item_Target_Rule", tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced, group = Presentation.Group.Name.Advanced, groupOrder = Presentation.Group.Order.Advanced, fieldType = SupportedFieldType.BROADLEAF_ENUMERATION, broadleafEnumeration = "org.broadleafcommerce.core.offer.service.type.OfferItemRestrictionRuleType") protected String offerItemTargetRuleType; @OneToMany(fetch = FetchType.LAZY, targetEntity = OfferItemCriteriaImpl.class, cascade={CascadeType.ALL}) @JoinTable(name = "BLC_QUAL_CRIT_OFFER_XREF", joinColumns = @JoinColumn(name = "OFFER_ID"), inverseJoinColumns = @JoinColumn(name = "OFFER_ITEM_CRITERIA_ID")) @Cascade(value={org.hibernate.annotations.CascadeType.ALL, org.hibernate.annotations.CascadeType.DELETE_ORPHAN}) @Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements") @AdminPresentation(friendlyName = "OfferImpl_Qualifying_Item_Rule", group = Presentation.Group.Name.Qualifiers, groupOrder = Presentation.Group.Order.Qualifiers, fieldType = SupportedFieldType.RULE_WITH_QUANTITY, ruleIdentifier = RuleIdentifier.ORDERITEM) protected Set<OfferItemCriteria> qualifyingItemCriteria = new HashSet<OfferItemCriteria>(); @OneToMany(fetch = FetchType.LAZY, targetEntity = OfferItemCriteriaImpl.class, cascade={CascadeType.ALL}) @JoinTable(name = "BLC_TAR_CRIT_OFFER_XREF", joinColumns = @JoinColumn(name = "OFFER_ID"), inverseJoinColumns = @JoinColumn(name = "OFFER_ITEM_CRITERIA_ID")) @Cascade(value={org.hibernate.annotations.CascadeType.ALL, org.hibernate.annotations.CascadeType.DELETE_ORPHAN}) @Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements") @AdminPresentation(friendlyName = "OfferImpl_Target_Item_Rule", group = Presentation.Group.Name.ItemTarget, groupOrder = Presentation.Group.Order.ItemTarget, fieldType = SupportedFieldType.RULE_WITH_QUANTITY, ruleIdentifier = RuleIdentifier.ORDERITEM, validationConfigurations = {@ValidationConfiguration(validationImplementation="blTargetItemRulesValidator")}) protected Set<OfferItemCriteria> targetItemCriteria = new HashSet<OfferItemCriteria>(); @Column(name = "TOTALITARIAN_OFFER") @AdminPresentation(friendlyName = "OfferImpl_Totalitarian_Offer", tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced, group = Presentation.Group.Name.Advanced, groupOrder = Presentation.Group.Order.Advanced, visibility = VisibilityEnum.HIDDEN_ALL) protected Boolean totalitarianOffer = false; @ManyToMany(targetEntity = OfferRuleImpl.class, cascade = {CascadeType.ALL}) @JoinTable(name = "BLC_OFFER_RULE_MAP", inverseJoinColumns = @JoinColumn(name = "OFFER_RULE_ID", referencedColumnName = "OFFER_RULE_ID")) @Cascade(value={org.hibernate.annotations.CascadeType.ALL, org.hibernate.annotations.CascadeType.DELETE_ORPHAN}) @MapKeyColumn(name = "MAP_KEY", nullable = false) @Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements") @AdminPresentationMapFields( mapDisplayFields = { @AdminPresentationMapField( fieldName = RuleIdentifier.CUSTOMER_FIELD_KEY, fieldPresentation = @AdminPresentation(fieldType = SupportedFieldType.RULE_SIMPLE, group = Presentation.Group.Name.Qualifiers, groupOrder = Presentation.Group.Order.Qualifiers, ruleIdentifier = RuleIdentifier.CUSTOMER, friendlyName = "OfferImpl_Customer_Rule") ), @AdminPresentationMapField( fieldName = RuleIdentifier.TIME_FIELD_KEY, fieldPresentation = @AdminPresentation(fieldType = SupportedFieldType.RULE_SIMPLE, group = Presentation.Group.Name.ActivityRange, groupOrder = Presentation.Group.Order.ActivityRange, ruleIdentifier = RuleIdentifier.TIME, friendlyName = "OfferImpl_Time_Rule") ), @AdminPresentationMapField( fieldName = RuleIdentifier.ORDER_FIELD_KEY, fieldPresentation = @AdminPresentation(fieldType = SupportedFieldType.RULE_SIMPLE, group = Presentation.Group.Name.Qualifiers, groupOrder = Presentation.Group.Order.Qualifiers, ruleIdentifier = RuleIdentifier.ORDER, friendlyName = "OfferImpl_Order_Rule") ), @AdminPresentationMapField( fieldName = RuleIdentifier.FULFILLMENT_GROUP_FIELD_KEY, fieldPresentation = @AdminPresentation(fieldType = SupportedFieldType.RULE_SIMPLE, group = Presentation.Group.Name.Qualifiers, groupOrder = Presentation.Group.Order.Qualifiers, ruleIdentifier = RuleIdentifier.FULFILLMENTGROUP, friendlyName = "OfferImpl_FG_Rule") ) } ) Map<String, OfferRule> offerMatchRules = new HashMap<String, OfferRule>(); @Column(name = "USE_NEW_FORMAT") @AdminPresentation(friendlyName = "OfferImpl_Treat_As_New_Format", tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced, group = Presentation.Group.Name.Advanced, groupOrder = Presentation.Group.Order.Advanced, visibility = VisibilityEnum.HIDDEN_ALL) protected Boolean treatAsNewFormat = false; @Column(name = "QUALIFYING_ITEM_MIN_TOTAL", precision=19, scale=5) @AdminPresentation(friendlyName="OfferImpl_Qualifying_Item_Subtotal", tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced, group = Presentation.Group.Name.Advanced, groupOrder = Presentation.Group.Order.Advanced) protected BigDecimal qualifyingItemSubTotal; @Embedded protected ArchiveStatus archiveStatus = new ArchiveStatus(); @Override public Long getId() { return id; } @Override public void setId(Long id) { this.id = id; } @Override public String getName() { return name; } @Override public void setName(String name) { this.name = name; } @Override public String getDescription() { return description; } @Override public void setDescription(String description) { this.description = description; } @Override public OfferType getType() { return OfferType.getInstance(type); } @Override public void setType(OfferType type) { this.type = type.getType(); } @Override public OfferDiscountType getDiscountType() { return OfferDiscountType.getInstance(discountType); } @Override public void setDiscountType(OfferDiscountType discountType) { this.discountType = discountType.getType(); } @Override public OfferItemRestrictionRuleType getOfferItemQualifierRuleType() { OfferItemRestrictionRuleType returnType = OfferItemRestrictionRuleType.getInstance(offerItemQualifierRuleType); if (returnType == null) { return OfferItemRestrictionRuleType.NONE; } else { return returnType; } } @Override public void setOfferItemQualifierRuleType(OfferItemRestrictionRuleType restrictionRuleType) { this.offerItemQualifierRuleType = restrictionRuleType.getType(); } @Override public OfferItemRestrictionRuleType getOfferItemTargetRuleType() { OfferItemRestrictionRuleType returnType = OfferItemRestrictionRuleType.getInstance(offerItemTargetRuleType); if (returnType == null) { return OfferItemRestrictionRuleType.NONE; } else { return returnType; } } @Override public void setOfferItemTargetRuleType(OfferItemRestrictionRuleType restrictionRuleType) { this.offerItemTargetRuleType = restrictionRuleType.getType(); } @Override public BigDecimal getValue() { return value; } @Override public void setValue(BigDecimal value) { this.value = value; } @Override public int getPriority() { return priority == null ? 0 : priority; } @Override public void setPriority(int priority) { this.priority = priority; } @Override public Date getStartDate() { if ('Y'==getArchived()) { return null; } return startDate; } @Override public void setStartDate(Date startDate) { this.startDate = startDate; } @Override public Date getEndDate() { return endDate; } @Override public void setEndDate(Date endDate) { this.endDate = endDate; } /** * Returns true if this offer can be stacked on top of another offer. Stackable is evaluated * against offers with the same offer type. * * @return true if stackable, otherwise false */ @Override public boolean isStackable() { return stackable == null ? false : stackable; } /** * Sets the stackable value for this offer. * * @param stackable */ @Override public void setStackable(boolean stackable) { this.stackable = stackable; } @Deprecated @JsonIgnore public boolean getStackable(){ return stackable; } @Override public String getTargetSystem() { return targetSystem; } @Override public void setTargetSystem(String targetSystem) { this.targetSystem = targetSystem; } @Override public boolean getApplyDiscountToSalePrice() { return applyToSalePrice == null ? false : applyToSalePrice; } @Override public void setApplyDiscountToSalePrice(boolean applyToSalePrice) { this.applyToSalePrice=applyToSalePrice; } @Override @Deprecated public String getAppliesToOrderRules() { return appliesToOrderRules; } @Override @Deprecated public void setAppliesToOrderRules(String appliesToOrderRules) { this.appliesToOrderRules = appliesToOrderRules; } @Override @Deprecated public String getAppliesToCustomerRules() { return appliesToCustomerRules; } @Override @Deprecated public void setAppliesToCustomerRules(String appliesToCustomerRules) { this.appliesToCustomerRules = appliesToCustomerRules; } @Override @Deprecated public boolean isApplyDiscountToMarkedItems() { return applyDiscountToMarkedItems; } @Deprecated @JsonIgnore public boolean getApplyDiscountToMarkedItems() { return applyDiscountToMarkedItems; } @Override @Deprecated public void setApplyDiscountToMarkedItems(boolean applyDiscountToMarkedItems) { this.applyDiscountToMarkedItems = applyDiscountToMarkedItems; } /** * Returns true if this offer can be combined with other offers in the order. * * @return true if combinableWithOtherOffers, otherwise false */ @Override public boolean isCombinableWithOtherOffers() { return combinableWithOtherOffers == null ? false : combinableWithOtherOffers; } /** * Sets the combinableWithOtherOffers value for this offer. * * @param combinableWithOtherOffers */ @Override public void setCombinableWithOtherOffers(boolean combinableWithOtherOffers) { this.combinableWithOtherOffers = combinableWithOtherOffers; } @Deprecated @JsonIgnore public boolean getCombinableWithOtherOffers() { return combinableWithOtherOffers; } @Override public boolean isAutomaticallyAdded() { if (automaticallyAdded == null) { if (deliveryType != null) { OfferDeliveryType offerDeliveryType = OfferDeliveryType.getInstance(deliveryType); return OfferDeliveryType.AUTOMATIC.equals(offerDeliveryType); } return false; } return automaticallyAdded; } @Override public void setAutomaticallyAdded(boolean automaticallyAdded) { this.automaticallyAdded = automaticallyAdded; } @Override @Deprecated @JsonIgnore public OfferDeliveryType getDeliveryType() { if (deliveryType == null) { if (isAutomaticallyAdded()) { return OfferDeliveryType.AUTOMATIC; } else { return OfferDeliveryType.MANUAL; } } return OfferDeliveryType.getInstance(deliveryType); } @Override public void setDeliveryType(OfferDeliveryType deliveryType) { this.deliveryType = deliveryType.getType(); } @Override public Long getMaxUsesPerCustomer() { return maxUsesPerCustomer == null ? 0 : maxUsesPerCustomer; } @Override public void setMaxUsesPerCustomer(Long maxUsesPerCustomer) { this.maxUsesPerCustomer = maxUsesPerCustomer; } @Override public boolean isUnlimitedUsePerCustomer() { return getMaxUsesPerCustomer() == 0; } @Override public boolean isLimitedUsePerCustomer() { return getMaxUsesPerCustomer() > 0; } @Override public int getMaxUsesPerOrder() { return maxUsesPerOrder == null ? 0 : maxUsesPerOrder; } @Override public void setMaxUsesPerOrder(int maxUsesPerOrder) { this.maxUsesPerOrder = maxUsesPerOrder; } @Override public boolean isUnlimitedUsePerOrder() { return getMaxUsesPerOrder() == 0; } @Override public boolean isLimitedUsePerOrder() { return getMaxUsesPerOrder() > 0; } @Override @Deprecated public int getMaxUses() { return getMaxUsesPerOrder(); } @Override public void setMaxUses(int maxUses) { setMaxUsesPerOrder(maxUses); } @Override @Deprecated public int getUses() { return uses; } @Override public String getMarketingMessage() { return DynamicTranslationProvider.getValue(this, "marketingMessage", marketingMessage); } @Override public void setMarketingMessage(String marketingMessage) { this.marketingMessage = marketingMessage; } @Override @Deprecated public void setUses(int uses) { this.uses = uses; } @Override public Set<OfferItemCriteria> getQualifyingItemCriteria() { return qualifyingItemCriteria; } @Override public void setQualifyingItemCriteria(Set<OfferItemCriteria> qualifyingItemCriteria) { this.qualifyingItemCriteria = qualifyingItemCriteria; } @Override public Set<OfferItemCriteria> getTargetItemCriteria() { return targetItemCriteria; } @Override public void setTargetItemCriteria(Set<OfferItemCriteria> targetItemCriteria) { this.targetItemCriteria = targetItemCriteria; } @Override public Boolean isTotalitarianOffer() { if (totalitarianOffer == null) { return false; } else { return totalitarianOffer.booleanValue(); } } @Override public void setTotalitarianOffer(Boolean totalitarianOffer) { if (totalitarianOffer == null) { this.totalitarianOffer = false; } else { this.totalitarianOffer = totalitarianOffer; } } @Override public Map<String, OfferRule> getOfferMatchRules() { if (offerMatchRules == null) { offerMatchRules = new HashMap<String, OfferRule>(); } return offerMatchRules; } @Override public void setOfferMatchRules(Map<String, OfferRule> offerMatchRules) { this.offerMatchRules = offerMatchRules; } @Override public Boolean getTreatAsNewFormat() { return treatAsNewFormat; } @Override public void setTreatAsNewFormat(Boolean treatAsNewFormat) { this.treatAsNewFormat = treatAsNewFormat; } @Override public Character getArchived() { if (archiveStatus == null) { archiveStatus = new ArchiveStatus(); } return archiveStatus.getArchived(); } @Override public void setArchived(Character archived) { if (archiveStatus == null) { archiveStatus = new ArchiveStatus(); } archiveStatus.setArchived(archived); } @Override public boolean isActive() { return DateUtil.isActive(startDate, endDate, true) && 'Y'!=getArchived(); } @Override public Money getQualifyingItemSubTotal() { return qualifyingItemSubTotal == null ? null : BroadleafCurrencyUtils.getMoney(qualifyingItemSubTotal, null); } @Override public void setQualifyingItemSubTotal(Money qualifyingItemSubTotal) { this.qualifyingItemSubTotal = Money.toAmount(qualifyingItemSubTotal); } @Override public List<OfferCode> getOfferCodes() { return offerCodes; } @Override public void setOfferCodes(List<OfferCode> offerCodes) { this.offerCodes = offerCodes; } @Override public String getMainEntityName() { return getName(); } @Override public int hashCode() { return new HashCodeBuilder() .append(name) .append(startDate) .append(type) .append(value) .build(); } @Override public boolean equals(Object o) { if (o instanceof OfferImpl) { OfferImpl that = (OfferImpl) o; return new EqualsBuilder() .append(this.id, that.id) .append(this.name, that.name) .append(this.startDate, that.startDate) .append(this.type, that.type) .append(this.value, that.value) .build(); } return false; } public static class Presentation { public static class Tab { public static class Name { public static final String Codes = "OfferImpl_Codes_Tab"; public static final String Advanced = "OfferImpl_Advanced_Tab"; } public static class Order { public static final int Codes = 1000; public static final int Advanced = 2000; } } public static class Group { public static class Name { public static final String Description = "OfferImpl_Description"; public static final String Amount = "OfferImpl_Amount"; public static final String ActivityRange = "OfferImpl_Activity_Range"; public static final String Qualifiers = "OfferImpl_Qualifiers"; public static final String ItemTarget = "OfferImpl_Item_Target"; public static final String Advanced = "OfferImpl_Advanced"; } public static class Order { public static final int Description = 1000; public static final int Amount = 2000; public static final int ActivityRange = 3000; public static final int Qualifiers = 4000; public static final int ItemTarget = 5000; public static final int Advanced = 1000; } } } }
1no label
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_domain_OfferImpl.java
1,546
public static class Map extends Mapper<NullWritable, FaunusVertex, NullWritable, WritableComparable> { private String key; private boolean isVertex; private WritableHandler handler; private SafeMapperOutputs outputs; @Override public void setup(final Mapper.Context context) throws IOException, InterruptedException { this.isVertex = context.getConfiguration().getClass(CLASS, Element.class, Element.class).equals(Vertex.class); this.key = context.getConfiguration().get(KEY); this.handler = new WritableHandler(context.getConfiguration().getClass(TYPE, Text.class, WritableComparable.class)); this.outputs = new SafeMapperOutputs(context); } @Override public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, NullWritable, WritableComparable>.Context context) throws IOException, InterruptedException { this.outputs.write(Tokens.GRAPH, NullWritable.get(), value); if (this.isVertex) { if (value.hasPaths()) { WritableComparable writable = this.handler.set(ElementPicker.getProperty(value, this.key)); for (int i = 0; i < value.pathCount(); i++) { this.outputs.write(Tokens.SIDEEFFECT, NullWritable.get(), writable); } DEFAULT_COMPAT.incrementContextCounter(context, Counters.VERTICES_PROCESSED, 1L); } } else { long edgesProcessed = 0; for (final Edge e : value.getEdges(Direction.OUT)) { final StandardFaunusEdge edge = (StandardFaunusEdge) e; if (edge.hasPaths()) { WritableComparable writable = this.handler.set(ElementPicker.getProperty(edge, this.key)); for (int i = 0; i < edge.pathCount(); i++) { this.outputs.write(Tokens.SIDEEFFECT, NullWritable.get(), writable); } edgesProcessed++; } } DEFAULT_COMPAT.incrementContextCounter(context, Counters.OUT_EDGES_PROCESSED, edgesProcessed); } } @Override public void cleanup(final Mapper<NullWritable, FaunusVertex, NullWritable, WritableComparable>.Context context) throws IOException, InterruptedException { this.outputs.close(); } }
1no label
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_transform_PropertyMap.java
436
public enum SupportedCriteriaFieldType { RULE, RULE_MAP }
0true
common_src_main_java_org_broadleafcommerce_common_presentation_client_SupportedCriteriaFieldType.java
1,704
public abstract class OServerCommandAbstract implements OServerCommand { protected OServer server; /** * Default constructor. Disable cache of content at HTTP level */ public OServerCommandAbstract() { } @Override public boolean beforeExecute(final OHttpRequest iRequest, OHttpResponse iResponse) throws IOException { setNoCache(iResponse); return true; } @Override public boolean afterExecute(final OHttpRequest iRequest, OHttpResponse iResponse) throws IOException { return true; } protected String[] checkSyntax(final String iURL, final int iArgumentCount, final String iSyntax) { final List<String> parts = OStringSerializerHelper.smartSplit(iURL, OHttpResponse.URL_SEPARATOR, 1, -1, true, true, false); if (parts.size() < iArgumentCount) throw new OHttpRequestException(iSyntax); final String[] array = new String[parts.size()]; return decodeParts(parts.toArray(array)); } /** * urldecode each request part return the same array instance * * @param parts * @return */ private String[] decodeParts(final String[] parts) { try { if (parts == null) return null; for (int i = 0; i < parts.length; i++) { String part = parts[i]; if (part == null) continue; // NEEDS DECODING part = java.net.URLDecoder.decode(part, "UTF-8"); parts[i] = part; } return parts; } catch (Exception ex) { throw new OException(ex); } } public OServer getServer() { return server; } public void configure(final OServer server) { this.server = server; } protected void setNoCache(final OHttpResponse iResponse) { // DEFAULT = DON'T CACHE iResponse.setHeader("Cache-Control: no-cache, no-store, max-age=0, must-revalidate\r\nPragma: no-cache"); } }
1no label
server_src_main_java_com_orientechnologies_orient_server_network_protocol_http_command_OServerCommandAbstract.java
649
public class GetIndexTemplatesAction extends IndicesAction<GetIndexTemplatesRequest, GetIndexTemplatesResponse, GetIndexTemplatesRequestBuilder> { public static final GetIndexTemplatesAction INSTANCE = new GetIndexTemplatesAction(); public static final String NAME = "indices/template/get"; protected GetIndexTemplatesAction() { super(NAME); } @Override public GetIndexTemplatesResponse newResponse() { return new GetIndexTemplatesResponse(); } @Override public GetIndexTemplatesRequestBuilder newRequestBuilder(IndicesAdminClient client) { return new GetIndexTemplatesRequestBuilder(client); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_template_get_GetIndexTemplatesAction.java
201
public class OEngineRemote extends OEngineAbstract { public static final String NAME = "remote"; private static final Map<String, OStorageRemote> sharedStorages = new ConcurrentHashMap<String, OStorageRemote>(); public OEngineRemote() { } public OStorage createStorage(final String iURL, final Map<String, String> iConfiguration) { try { synchronized (sharedStorages) { OStorageRemote sharedStorage = sharedStorages.get(iURL); if (sharedStorage == null) { sharedStorage = new OStorageRemote(null, iURL, "rw"); sharedStorages.put(iURL, sharedStorage); } return new OStorageRemoteThread(sharedStorage); } } catch (Throwable t) { OLogManager.instance().error(this, "Error on opening database: " + iURL, t, ODatabaseException.class); } return null; } public void removeStorage(final String iURL) { synchronized (sharedStorages) { sharedStorages.remove(iURL); } } @Override public void removeStorage(final OStorage iStorage) { synchronized (sharedStorages) { for (Entry<String, OStorageRemote> entry : sharedStorages.entrySet()) { if (entry.getValue() == iStorage) { sharedStorages.remove(entry.getKey()); break; } } } } @Override public void shutdown() { super.shutdown(); sharedStorages.clear(); } public String getName() { return NAME; } public boolean isShared() { return false; } }
0true
client_src_main_java_com_orientechnologies_orient_client_remote_OEngineRemote.java
801
public static class DocBuilder implements ToXContent { private BytesReference doc; public DocBuilder setDoc(BytesReference doc) { this.doc = doc; return this; } public DocBuilder setDoc(String field, Object value) { Map<String, Object> values = new HashMap<String, Object>(2); values.put(field, value); setDoc(values); return this; } public DocBuilder setDoc(String doc) { this.doc = new BytesArray(doc); return this; } public DocBuilder setDoc(XContentBuilder doc) { this.doc = doc.bytes(); return this; } public DocBuilder setDoc(Map doc) { return setDoc(doc, PercolateRequest.contentType); } public DocBuilder setDoc(Map doc, XContentType contentType) { try { return setDoc(XContentFactory.contentBuilder(contentType).map(doc)); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + doc + "]", e); } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { XContentType contentType = XContentFactory.xContentType(doc); if (contentType == builder.contentType()) { builder.rawField("doc", doc); } else { XContentParser parser = XContentFactory.xContent(contentType).createParser(doc); try { parser.nextToken(); builder.field("doc"); builder.copyCurrentStructure(parser); } finally { parser.close(); } } return builder; } }
0true
src_main_java_org_elasticsearch_action_percolate_PercolateSourceBuilder.java
1,131
public static class Factory implements NativeScriptFactory { @Override public ExecutableScript newScript(@Nullable Map<String, Object> params) { return new NativePayloadSumNoRecordScoreScript(params); } }
0true
src_test_java_org_elasticsearch_benchmark_scripts_score_script_NativePayloadSumNoRecordScoreScript.java
11
@SuppressWarnings({ "unchecked", "serial" }) public abstract class OMVRBTree<K, V> extends AbstractMap<K, V> implements ONavigableMap<K, V>, Cloneable, java.io.Serializable { private static final OAlwaysLessKey ALWAYS_LESS_KEY = new OAlwaysLessKey(); private static final OAlwaysGreaterKey ALWAYS_GREATER_KEY = new OAlwaysGreaterKey(); protected boolean pageItemFound = false; protected int pageItemComparator = 0; protected int pageIndex = -1; protected float pageLoadFactor = 0.7f; /** * The comparator used to maintain order in this tree map, or null if it uses the natural ordering of its keys. * * @serial */ protected final Comparator<? super K> comparator; protected transient OMVRBTreeEntry<K, V> root = null; /** * The number of structural modifications to the tree. */ transient int modCount = 0; protected transient boolean runtimeCheckEnabled = false; protected transient boolean debug = false; protected Object lastSearchKey; protected OMVRBTreeEntry<K, V> lastSearchNode; protected boolean lastSearchFound = false; protected int lastSearchIndex = -1; protected int keySize = 1; /** * Indicates search behavior in case of {@link OCompositeKey} keys that have less amount of internal keys are used, whether lowest * or highest partially matched key should be used. Such keys is allowed to use only in * * @link OMVRBTree#subMap(K, boolean, K, boolean)}, {@link OMVRBTree#tailMap(Object, boolean)} and * {@link OMVRBTree#headMap(Object, boolean)} . */ public static enum PartialSearchMode { /** * Any partially matched key will be used as search result. */ NONE, /** * The biggest partially matched key will be used as search result. */ HIGHEST_BOUNDARY, /** * The smallest partially matched key will be used as search result. */ LOWEST_BOUNDARY } /** * Constructs a new, empty tree map, using the natural ordering of its keys. All keys inserted into the map must implement the * {@link Comparable} interface. Furthermore, all such keys must be <i>mutually comparable</i>: <tt>k1.compareTo(k2)</tt> must not * throw a <tt>ClassCastException</tt> for any keys <tt>k1</tt> and <tt>k2</tt> in the map. If the user attempts to put a key into * the map that violates this constraint (for example, the user attempts to put a string key into a map whose keys are integers), * the <tt>put(Object key, Object value)</tt> call will throw a <tt>ClassCastException</tt>. */ public OMVRBTree() { this(1); } public OMVRBTree(int keySize) { comparator = ODefaultComparator.INSTANCE; init(); this.keySize = keySize; } /** * Constructs a new, empty tree map, ordered according to the given comparator. All keys inserted into the map must be <i>mutually * comparable</i> by the given comparator: <tt>comparator.compare(k1, * k2)</tt> must not throw a <tt>ClassCastException</tt> for any keys <tt>k1</tt> and <tt>k2</tt> in the map. If the user attempts * to put a key into the map that violates this constraint, the <tt>put(Object * key, Object value)</tt> call will throw a <tt>ClassCastException</tt>. * * @param iComparator * the comparator that will be used to order this map. If <tt>null</tt>, the {@linkplain Comparable natural ordering} of * the keys will be used. */ public OMVRBTree(final Comparator<? super K> iComparator) { init(); this.comparator = iComparator; } /** * Constructs a new tree map containing the same mappings as the given map, ordered according to the <i>natural ordering</i> of * its keys. All keys inserted into the new map must implement the {@link Comparable} interface. Furthermore, all such keys must * be <i>mutually comparable</i>: <tt>k1.compareTo(k2)</tt> must not throw a <tt>ClassCastException</tt> for any keys <tt>k1</tt> * and <tt>k2</tt> in the map. This method runs in n*log(n) time. * * @param m * the map whose mappings are to be placed in this map * @throws ClassCastException * if the keys in m are not {@link Comparable}, or are not mutually comparable * @throws NullPointerException * if the specified map is null */ public OMVRBTree(final Map<? extends K, ? extends V> m) { comparator = ODefaultComparator.INSTANCE; init(); putAll(m); } /** * Constructs a new tree map containing the same mappings and using the same ordering as the specified sorted map. This method * runs in linear time. * * @param m * the sorted map whose mappings are to be placed in this map, and whose comparator is to be used to sort this map * @throws NullPointerException * if the specified map is null */ public OMVRBTree(final SortedMap<K, ? extends V> m) { init(); comparator = m.comparator(); try { buildFromSorted(m.size(), m.entrySet().iterator(), null, null); } catch (java.io.IOException cannotHappen) { } catch (ClassNotFoundException cannotHappen) { } } /** * Create a new entry with the first key/value to handle. */ protected abstract OMVRBTreeEntry<K, V> createEntry(final K key, final V value); /** * Create a new node with the same parent of the node is splitting. */ protected abstract OMVRBTreeEntry<K, V> createEntry(final OMVRBTreeEntry<K, V> parent); protected abstract int getTreeSize(); public int getNodes() { int counter = -1; OMVRBTreeEntry<K, V> entry = getFirstEntry(); while (entry != null) { entry = successor(entry); counter++; } return counter; } protected abstract void setSize(int iSize); public abstract int getDefaultPageSize(); /** * Returns <tt>true</tt> if this map contains a mapping for the specified key. * * @param key * key whose presence in this map is to be tested * @return <tt>true</tt> if this map contains a mapping for the specified key * @throws ClassCastException * if the specified key cannot be compared with the keys currently in the map * @throws NullPointerException * if the specified key is null and this map uses natural ordering, or its comparator does not permit null keys */ @Override public boolean containsKey(final Object key) { return getEntry(key, PartialSearchMode.NONE) != null; } /** * Returns <tt>true</tt> if this map maps one or more keys to the specified value. More formally, returns <tt>true</tt> if and * only if this map contains at least one mapping to a value <tt>v</tt> such that * <tt>(value==null ? v==null : value.equals(v))</tt>. This operation will probably require time linear in the map size for most * implementations. * * @param value * value whose presence in this map is to be tested * @return <tt>true</tt> if a mapping to <tt>value</tt> exists; <tt>false</tt> otherwise * @since 1.2 */ @Override public boolean containsValue(final Object value) { for (OMVRBTreeEntry<K, V> e = getFirstEntry(); e != null; e = next(e)) if (valEquals(value, e.getValue())) return true; return false; } @Override public int size() { return getTreeSize(); } /** * Returns the value to which the specified key is mapped, or {@code null} if this map contains no mapping for the key. * * <p> * More formally, if this map contains a mapping from a key {@code k} to a value {@code v} such that {@code key} compares equal to * {@code k} according to the map's ordering, then this method returns {@code v}; otherwise it returns {@code null}. (There can be * at most one such mapping.) * * <p> * A return value of {@code null} does not <i>necessarily</i> indicate that the map contains no mapping for the key; it's also * possible that the map explicitly maps the key to {@code null}. The {@link #containsKey containsKey} operation may be used to * distinguish these two cases. * * @throws ClassCastException * if the specified key cannot be compared with the keys currently in the map * @throws NullPointerException * if the specified key is null and this map uses natural ordering, or its comparator does not permit null keys */ @Override public V get(final Object key) { if (getTreeSize() == 0) return null; OMVRBTreeEntry<K, V> entry = null; // TRY TO GET LATEST SEARCH final OMVRBTreeEntry<K, V> node = getLastSearchNodeForSameKey(key); if (node != null) { // SAME SEARCH OF PREVIOUS ONE: REUSE LAST RESULT? if (lastSearchFound) // REUSE LAST RESULT, OTHERWISE THE KEY NOT EXISTS return node.getValue(lastSearchIndex); } else // SEARCH THE ITEM entry = getEntry(key, PartialSearchMode.NONE); return entry == null ? null : entry.getValue(); } public Comparator<? super K> comparator() { return comparator; } /** * @throws NoSuchElementException * {@inheritDoc} */ public K firstKey() { return key(getFirstEntry()); } /** * @throws NoSuchElementException * {@inheritDoc} */ public K lastKey() { return key(getLastEntry()); } /** * Copies all of the mappings from the specified map to this map. These mappings replace any mappings that this map had for any of * the keys currently in the specified map. * * @param map * mappings to be stored in this map * @throws ClassCastException * if the class of a key or value in the specified map prevents it from being stored in this map * @throws NullPointerException * if the specified map is null or the specified map contains a null key and this map does not permit null keys */ @Override public void putAll(final Map<? extends K, ? extends V> map) { int mapSize = map.size(); if (getTreeSize() == 0 && mapSize != 0 && map instanceof SortedMap) { Comparator<?> c = ((SortedMap<? extends K, ? extends V>) map).comparator(); if (c == comparator || (c != null && c.equals(comparator))) { ++modCount; try { buildFromSorted(mapSize, map.entrySet().iterator(), null, null); } catch (java.io.IOException cannotHappen) { } catch (ClassNotFoundException cannotHappen) { } return; } } super.putAll(map); } /** * Returns this map's entry for the given key, or <tt>null</tt> if the map does not contain an entry for the key. * * In case of {@link OCompositeKey} keys you can specify which key can be used: lowest, highest, any. * * @param key * Key to search. * @param partialSearchMode * Which key can be used in case of {@link OCompositeKey} key is passed in. * * @return this map's entry for the given key, or <tt>null</tt> if the map does not contain an entry for the key * @throws ClassCastException * if the specified key cannot be compared with the keys currently in the map * @throws NullPointerException * if the specified key is null and this map uses natural ordering, or its comparator does not permit null keys */ public final OMVRBTreeEntry<K, V> getEntry(final Object key, final PartialSearchMode partialSearchMode) { return getEntry(key, false, partialSearchMode); } final OMVRBTreeEntry<K, V> getEntry(final Object key, final boolean iGetContainer, final PartialSearchMode partialSearchMode) { if (key == null) return setLastSearchNode(null, null); pageItemFound = false; if (getTreeSize() == 0) { pageIndex = 0; return iGetContainer ? root : null; } final K k; if (keySize == 1) k = (K) key; else if (((OCompositeKey) key).getKeys().size() == keySize) k = (K) key; else if (partialSearchMode.equals(PartialSearchMode.NONE)) k = (K) key; else { final OCompositeKey fullKey = new OCompositeKey((Comparable<? super K>) key); int itemsToAdd = keySize - fullKey.getKeys().size(); final Comparable<?> keyItem; if (partialSearchMode.equals(PartialSearchMode.HIGHEST_BOUNDARY)) keyItem = ALWAYS_GREATER_KEY; else keyItem = ALWAYS_LESS_KEY; for (int i = 0; i < itemsToAdd; i++) fullKey.addKey(keyItem); k = (K) fullKey; } OMVRBTreeEntry<K, V> p = getBestEntryPoint(k); checkTreeStructure(p); if (p == null) return setLastSearchNode(key, null); OMVRBTreeEntry<K, V> lastNode = p; OMVRBTreeEntry<K, V> prevNode = null; OMVRBTreeEntry<K, V> tmpNode; int beginKey = -1; try { while (p != null && p.getSize() > 0) { searchNodeCallback(); lastNode = p; beginKey = compare(k, p.getFirstKey()); if (beginKey == 0) { // EXACT MATCH, YOU'RE VERY LUCKY: RETURN THE FIRST KEY WITHOUT SEARCH INSIDE THE NODE pageIndex = 0; pageItemFound = true; pageItemComparator = 0; return setLastSearchNode(key, p); } pageItemComparator = compare(k, p.getLastKey()); if (beginKey < 0) { if (pageItemComparator < 0) { tmpNode = predecessor(p); if (tmpNode != null && tmpNode != prevNode) { // MINOR THAN THE CURRENT: GET THE LEFT NODE prevNode = p; p = tmpNode; continue; } } } else if (beginKey > 0) { if (pageItemComparator > 0) { tmpNode = successor(p); if (tmpNode != null && tmpNode != prevNode) { // MAJOR THAN THE CURRENT: GET THE RIGHT NODE prevNode = p; p = tmpNode; continue; } } } // SEARCH INSIDE THE NODE final V value = lastNode.search(k); // PROBABLY PARTIAL KEY IS FOUND USE SEARCH MODE TO FIND PREFERRED ONE if (key instanceof OCompositeKey) { final OCompositeKey compositeKey = (OCompositeKey) key; if (value != null && compositeKey.getKeys().size() == keySize) { return setLastSearchNode(key, lastNode); } if (partialSearchMode.equals(PartialSearchMode.NONE)) { if (value != null || iGetContainer) return lastNode; else return null; } if (partialSearchMode.equals(PartialSearchMode.HIGHEST_BOUNDARY)) { // FOUNDED ENTRY EITHER GREATER THAN EXISTING ITEM OR ITEM DOES NOT EXIST return adjustHighestPartialSearchResult(iGetContainer, lastNode, compositeKey); } if (partialSearchMode.equals(PartialSearchMode.LOWEST_BOUNDARY)) { return adjustLowestPartialSearchResult(iGetContainer, lastNode, compositeKey); } } if (value != null) { setLastSearchNode(key, lastNode); } if (value != null || iGetContainer) // FOUND: RETURN CURRENT NODE OR AT LEAST THE CONTAINER NODE return lastNode; // NOT FOUND return null; } } finally { checkTreeStructure(p); } return setLastSearchNode(key, null); } private OMVRBTreeEntry<K, V> adjustHighestPartialSearchResult(final boolean iGetContainer, final OMVRBTreeEntry<K, V> lastNode, final OCompositeKey compositeKey) { final int oldPageIndex = pageIndex; final OMVRBTreeEntry<K, V> prevNd = previous(lastNode); if (prevNd == null) { pageIndex = oldPageIndex; pageItemFound = false; if (iGetContainer) return lastNode; return null; } pageItemComparator = compare(prevNd.getKey(), compositeKey); if (pageItemComparator == 0) { pageItemFound = true; return prevNd; } else if (pageItemComparator > 1) { pageItemFound = false; if (iGetContainer) return prevNd; return null; } else { pageIndex = oldPageIndex; pageItemFound = false; if (iGetContainer) return lastNode; return null; } } private OMVRBTreeEntry<K, V> adjustLowestPartialSearchResult(final boolean iGetContainer, OMVRBTreeEntry<K, V> lastNode, final OCompositeKey compositeKey) { // RARE CASE WHEN NODE ITSELF DOES CONTAIN KEY, BUT ALL KEYS LESS THAN GIVEN ONE final int oldPageIndex = pageIndex; final OMVRBTreeEntry<K, V> oldNode = lastNode; if (pageIndex >= lastNode.getSize()) { lastNode = next(lastNode); if (lastNode == null) { lastNode = oldNode; pageIndex = oldPageIndex; pageItemFound = false; if (iGetContainer) return lastNode; return null; } } pageItemComparator = compare(lastNode.getKey(), compositeKey); if (pageItemComparator == 0) { pageItemFound = true; return lastNode; } else { pageItemFound = false; if (iGetContainer) return lastNode; return null; } } /** * Basic implementation that returns the root node. */ protected OMVRBTreeEntry<K, V> getBestEntryPoint(final K key) { return root; } /** * Gets the entry corresponding to the specified key; if no such entry exists, returns the entry for the least key greater than * the specified key; if no such entry exists (i.e., the greatest key in the Tree is less than the specified key), returns * <tt>null</tt>. * * @param key * Key to search. * @param partialSearchMode * In case of {@link OCompositeKey} key is passed in this parameter will be used to find preferred one. */ public OMVRBTreeEntry<K, V> getCeilingEntry(final K key, final PartialSearchMode partialSearchMode) { OMVRBTreeEntry<K, V> p = getEntry(key, true, partialSearchMode); if (p == null) return null; if (pageItemFound) return p; // NOT MATCHED, POSITION IS ALREADY TO THE NEXT ONE else if (pageIndex < p.getSize()) { if (key instanceof OCompositeKey) return adjustSearchResult((OCompositeKey) key, partialSearchMode, p); else return p; } return null; } /** * Gets the entry corresponding to the specified key; if no such entry exists, returns the entry for the greatest key less than * the specified key; if no such entry exists, returns <tt>null</tt>. * * @param key * Key to search. * @param partialSearchMode * In case of {@link OCompositeKey} composite key is passed in this parameter will be used to find preferred one. */ public OMVRBTreeEntry<K, V> getFloorEntry(final K key, final PartialSearchMode partialSearchMode) { OMVRBTreeEntry<K, V> p = getEntry(key, true, partialSearchMode); if (p == null) return null; if (pageItemFound) return p; final OMVRBTreeEntry<K, V> adjacentEntry = previous(p); if (adjacentEntry == null) return null; if (key instanceof OCompositeKey) { return adjustSearchResult((OCompositeKey) key, partialSearchMode, adjacentEntry); } return adjacentEntry; } private OMVRBTreeEntry<K, V> adjustSearchResult(final OCompositeKey key, final PartialSearchMode partialSearchMode, final OMVRBTreeEntry<K, V> foundEntry) { if (partialSearchMode.equals(PartialSearchMode.NONE)) return foundEntry; final OCompositeKey keyToSearch = key; final OCompositeKey foundKey = (OCompositeKey) foundEntry.getKey(); if (keyToSearch.getKeys().size() < keySize) { final OCompositeKey borderKey = new OCompositeKey(); final OCompositeKey keyToCompare = new OCompositeKey(); final List<Object> keyItems = foundKey.getKeys(); for (int i = 0; i < keySize - 1; i++) { final Object keyItem = keyItems.get(i); borderKey.addKey(keyItem); if (i < keyToSearch.getKeys().size()) keyToCompare.addKey(keyItem); } if (partialSearchMode.equals(PartialSearchMode.HIGHEST_BOUNDARY)) borderKey.addKey(ALWAYS_GREATER_KEY); else borderKey.addKey(ALWAYS_LESS_KEY); final OMVRBTreeEntry<K, V> adjustedNode = getEntry(borderKey, true, PartialSearchMode.NONE); if (partialSearchMode.equals(PartialSearchMode.HIGHEST_BOUNDARY)) return adjustHighestPartialSearchResult(false, adjustedNode, keyToCompare); else return adjustLowestPartialSearchResult(false, adjustedNode, keyToCompare); } return foundEntry; } /** * Gets the entry for the least key greater than the specified key; if no such entry exists, returns the entry for the least key * greater than the specified key; if no such entry exists returns <tt>null</tt>. */ public OMVRBTreeEntry<K, V> getHigherEntry(final K key) { final OMVRBTreeEntry<K, V> p = getEntry(key, true, PartialSearchMode.HIGHEST_BOUNDARY); if (p == null) return null; if (pageItemFound) // MATCH, RETURN THE NEXT ONE return next(p); else if (pageIndex < p.getSize()) // NOT MATCHED, POSITION IS ALREADY TO THE NEXT ONE return p; return null; } /** * Returns the entry for the greatest key less than the specified key; if no such entry exists (i.e., the least key in the Tree is * greater than the specified key), returns <tt>null</tt>. */ public OMVRBTreeEntry<K, V> getLowerEntry(final K key) { final OMVRBTreeEntry<K, V> p = getEntry(key, true, PartialSearchMode.LOWEST_BOUNDARY); if (p == null) return null; return previous(p); } /** * Associates the specified value with the specified key in this map. If the map previously contained a mapping for the key, the * old value is replaced. * * @param key * key with which the specified value is to be associated * @param value * value to be associated with the specified key * * @return the previous value associated with <tt>key</tt>, or <tt>null</tt> if there was no mapping for <tt>key</tt>. (A * <tt>null</tt> return can also indicate that the map previously associated <tt>null</tt> with <tt>key</tt>.) * @throws ClassCastException * if the specified key cannot be compared with the keys currently in the map * @throws NullPointerException * if the specified key is null and this map uses natural ordering, or its comparator does not permit null keys */ @Override public V put(final K key, final V value) { OMVRBTreeEntry<K, V> parentNode = null; try { if (root == null) { root = createEntry(key, value); root.setColor(BLACK); setSize(1); modCount++; return null; } // TRY TO GET LATEST SEARCH parentNode = getLastSearchNodeForSameKey(key); if (parentNode != null) { if (lastSearchFound) { // EXACT MATCH: UPDATE THE VALUE pageIndex = lastSearchIndex; modCount++; return parentNode.setValue(value); } } // SEARCH THE ITEM parentNode = getEntry(key, true, PartialSearchMode.NONE); if (pageItemFound) { modCount++; // EXACT MATCH: UPDATE THE VALUE return parentNode.setValue(value); } setLastSearchNode(null, null); if (parentNode == null) { parentNode = root; pageIndex = 0; } if (parentNode.getFreeSpace() > 0) { // INSERT INTO THE PAGE parentNode.insert(pageIndex, key, value); } else { // CREATE NEW NODE AND COPY HALF OF VALUES FROM THE ORIGIN TO THE NEW ONE IN ORDER TO GET VALUES BALANCED final OMVRBTreeEntry<K, V> newNode = createEntry(parentNode); if (pageIndex < parentNode.getPageSplitItems()) // INSERT IN THE ORIGINAL NODE parentNode.insert(pageIndex, key, value); else // INSERT IN THE NEW NODE newNode.insert(pageIndex - parentNode.getPageSplitItems(), key, value); OMVRBTreeEntry<K, V> node = parentNode.getRight(); OMVRBTreeEntry<K, V> prevNode = parentNode; int cmp = 0; final K fk = newNode.getFirstKey(); if (comparator != null) while (node != null) { cmp = comparator.compare(fk, node.getFirstKey()); if (cmp < 0) { prevNode = node; node = node.getLeft(); } else if (cmp > 0) { prevNode = node; node = node.getRight(); } else { throw new IllegalStateException("Duplicated keys were found in OMVRBTree."); } } else while (node != null) { cmp = compare(fk, node.getFirstKey()); if (cmp < 0) { prevNode = node; node = node.getLeft(); } else if (cmp > 0) { prevNode = node; node = node.getRight(); } else { throw new IllegalStateException("Duplicated keys were found in OMVRBTree."); } } if (prevNode == parentNode) parentNode.setRight(newNode); else if (cmp < 0) prevNode.setLeft(newNode); else if (cmp > 0) prevNode.setRight(newNode); else throw new IllegalStateException("Duplicated keys were found in OMVRBTree."); fixAfterInsertion(newNode); } modCount++; setSizeDelta(+1); } finally { checkTreeStructure(parentNode); } return null; } /** * Removes the mapping for this key from this OMVRBTree if present. * * @param key * key for which mapping should be removed * @return the previous value associated with <tt>key</tt>, or <tt>null</tt> if there was no mapping for <tt>key</tt>. (A * <tt>null</tt> return can also indicate that the map previously associated <tt>null</tt> with <tt>key</tt>.) * @throws ClassCastException * if the specified key cannot be compared with the keys currently in the map * @throws NullPointerException * if the specified key is null and this map uses natural ordering, or its comparator does not permit null keys */ @Override public V remove(final Object key) { OMVRBTreeEntry<K, V> p = getEntry(key, PartialSearchMode.NONE); setLastSearchNode(null, null); if (p == null) return null; V oldValue = p.getValue(); deleteEntry(p); return oldValue; } /** * Removes all of the mappings from this map. The map will be empty after this call returns. */ @Override public void clear() { modCount++; setSize(0); setLastSearchNode(null, null); setRoot(null); } /** * Returns a shallow copy of this <tt>OMVRBTree</tt> instance. (The keys and values themselves are not cloned.) * * @return a shallow copy of this map */ @Override public Object clone() { OMVRBTree<K, V> clone = null; try { clone = (OMVRBTree<K, V>) super.clone(); } catch (CloneNotSupportedException e) { throw new InternalError(); } // Put clone into "virgin" state (except for comparator) clone.pageIndex = pageIndex; clone.pageItemFound = pageItemFound; clone.pageLoadFactor = pageLoadFactor; clone.root = null; clone.setSize(0); clone.modCount = 0; clone.entrySet = null; clone.navigableKeySet = null; clone.descendingMap = null; // Initialize clone with our mappings try { clone.buildFromSorted(getTreeSize(), entrySet().iterator(), null, null); } catch (java.io.IOException cannotHappen) { } catch (ClassNotFoundException cannotHappen) { } return clone; } // ONavigableMap API methods /** * @since 1.6 */ public Map.Entry<K, V> firstEntry() { return exportEntry(getFirstEntry()); } /** * @since 1.6 */ public Map.Entry<K, V> lastEntry() { return exportEntry(getLastEntry()); } /** * @since 1.6 */ public Entry<K, V> pollFirstEntry() { OMVRBTreeEntry<K, V> p = getFirstEntry(); Map.Entry<K, V> result = exportEntry(p); if (p != null) deleteEntry(p); return result; } /** * @since 1.6 */ public Entry<K, V> pollLastEntry() { OMVRBTreeEntry<K, V> p = getLastEntry(); Map.Entry<K, V> result = exportEntry(p); if (p != null) deleteEntry(p); return result; } /** * @throws ClassCastException * {@inheritDoc} * @throws NullPointerException * if the specified key is null and this map uses natural ordering, or its comparator does not permit null keys * @since 1.6 */ public Map.Entry<K, V> lowerEntry(final K key) { return exportEntry(getLowerEntry(key)); } /** * @throws ClassCastException * {@inheritDoc} * @throws NullPointerException * if the specified key is null and this map uses natural ordering, or its comparator does not permit null keys * @since 1.6 */ public K lowerKey(final K key) { return keyOrNull(getLowerEntry(key)); } /** * @throws ClassCastException * {@inheritDoc} * @throws NullPointerException * if the specified key is null and this map uses natural ordering, or its comparator does not permit null keys * @since 1.6 */ public Map.Entry<K, V> floorEntry(final K key) { return exportEntry(getFloorEntry(key, PartialSearchMode.NONE)); } /** * @throws ClassCastException * {@inheritDoc} * @throws NullPointerException * if the specified key is null and this map uses natural ordering, or its comparator does not permit null keys * @since 1.6 */ public K floorKey(final K key) { return keyOrNull(getFloorEntry(key, PartialSearchMode.NONE)); } /** * @throws ClassCastException * {@inheritDoc} * @throws NullPointerException * if the specified key is null and this map uses natural ordering, or its comparator does not permit null keys * @since 1.6 */ public Map.Entry<K, V> ceilingEntry(final K key) { return exportEntry(getCeilingEntry(key, PartialSearchMode.NONE)); } /** * @throws ClassCastException * {@inheritDoc} * @throws NullPointerException * if the specified key is null and this map uses natural ordering, or its comparator does not permit null keys * @since 1.6 */ public K ceilingKey(final K key) { return keyOrNull(getCeilingEntry(key, PartialSearchMode.NONE)); } /** * @throws ClassCastException * {@inheritDoc} * @throws NullPointerException * if the specified key is null and this map uses natural ordering, or its comparator does not permit null keys * @since 1.6 */ public Map.Entry<K, V> higherEntry(final K key) { return exportEntry(getHigherEntry(key)); } /** * @throws ClassCastException * {@inheritDoc} * @throws NullPointerException * if the specified key is null and this map uses natural ordering, or its comparator does not permit null keys * @since 1.6 */ public K higherKey(final K key) { return keyOrNull(getHigherEntry(key)); } // Views /** * Fields initialized to contain an instance of the entry set view the first time this view is requested. Views are stateless, so * there's no reason to create more than one. */ private transient EntrySet entrySet = null; private transient KeySet<K> navigableKeySet = null; private transient ONavigableMap<K, V> descendingMap = null; /** * Returns a {@link Set} view of the keys contained in this map. The set's iterator returns the keys in ascending order. The set * is backed by the map, so changes to the map are reflected in the set, and vice-versa. If the map is modified while an iteration * over the set is in progress (except through the iterator's own <tt>remove</tt> operation), the results of the iteration are * undefined. The set supports element removal, which removes the corresponding mapping from the map, via the * <tt>Iterator.remove</tt>, <tt>Set.remove</tt>, <tt>removeAll</tt>, <tt>retainAll</tt>, and <tt>clear</tt> operations. It does * not support the <tt>add</tt> or <tt>addAll</tt> operations. */ @Override public Set<K> keySet() { return navigableKeySet(); } /** * @since 1.6 */ public ONavigableSet<K> navigableKeySet() { final KeySet<K> nks = navigableKeySet; return (nks != null) ? nks : (navigableKeySet = (KeySet<K>) new KeySet<Object>((ONavigableMap<Object, Object>) this)); } /** * @since 1.6 */ public ONavigableSet<K> descendingKeySet() { return descendingMap().navigableKeySet(); } /** * Returns a {@link Collection} view of the values contained in this map. The collection's iterator returns the values in * ascending order of the corresponding keys. The collection is backed by the map, so changes to the map are reflected in the * collection, and vice-versa. If the map is modified while an iteration over the collection is in progress (except through the * iterator's own <tt>remove</tt> operation), the results of the iteration are undefined. The collection supports element removal, * which removes the corresponding mapping from the map, via the <tt>Iterator.remove</tt>, <tt>Collection.remove</tt>, * <tt>removeAll</tt>, <tt>retainAll</tt> and <tt>clear</tt> operations. It does not support the <tt>add</tt> or <tt>addAll</tt> * operations. */ @Override public Collection<V> values() { final Collection<V> vs = new Values(); return (vs != null) ? vs : null; } /** * Returns a {@link Set} view of the mappings contained in this map. The set's iterator returns the entries in ascending key * order. The set is backed by the map, so changes to the map are reflected in the set, and vice-versa. If the map is modified * while an iteration over the set is in progress (except through the iterator's own <tt>remove</tt> operation, or through the * <tt>setValue</tt> operation on a map entry returned by the iterator) the results of the iteration are undefined. The set * supports element removal, which removes the corresponding mapping from the map, via the <tt>Iterator.remove</tt>, * <tt>Set.remove</tt>, <tt>removeAll</tt>, <tt>retainAll</tt> and <tt>clear</tt> operations. It does not support the <tt>add</tt> * or <tt>addAll</tt> operations. */ @Override public Set<Map.Entry<K, V>> entrySet() { final EntrySet es = entrySet; return (es != null) ? es : (entrySet = new EntrySet()); } /** * @since 1.6 */ public ONavigableMap<K, V> descendingMap() { final ONavigableMap<K, V> km = descendingMap; return (km != null) ? km : (descendingMap = new DescendingSubMap<K, V>(this, true, null, true, true, null, true)); } /** * @throws ClassCastException * {@inheritDoc} * @throws NullPointerException * if <tt>fromKey</tt> or <tt>toKey</tt> is null and this map uses natural ordering, or its comparator does not permit * null keys * @throws IllegalArgumentException * {@inheritDoc} * @since 1.6 */ public ONavigableMap<K, V> subMap(final K fromKey, final boolean fromInclusive, final K toKey, final boolean toInclusive) { return new AscendingSubMap<K, V>(this, false, fromKey, fromInclusive, false, toKey, toInclusive); } /** * @throws ClassCastException * {@inheritDoc} * @throws NullPointerException * if <tt>toKey</tt> is null and this map uses natural ordering, or its comparator does not permit null keys * @throws IllegalArgumentException * {@inheritDoc} * @since 1.6 */ public ONavigableMap<K, V> headMap(final K toKey, final boolean inclusive) { return new AscendingSubMap<K, V>(this, true, null, true, false, toKey, inclusive); } /** * @throws ClassCastException * {@inheritDoc} * @throws NullPointerException * if <tt>fromKey</tt> is null and this map uses natural ordering, or its comparator does not permit null keys * @throws IllegalArgumentException * {@inheritDoc} * @since 1.6 */ public ONavigableMap<K, V> tailMap(final K fromKey, final boolean inclusive) { return new AscendingSubMap<K, V>(this, false, fromKey, inclusive, true, null, true); } /** * @throws ClassCastException * {@inheritDoc} * @throws NullPointerException * if <tt>fromKey</tt> or <tt>toKey</tt> is null and this map uses natural ordering, or its comparator does not permit * null keys * @throws IllegalArgumentException * {@inheritDoc} */ public SortedMap<K, V> subMap(final K fromKey, final K toKey) { return subMap(fromKey, true, toKey, false); } /** * @throws ClassCastException * {@inheritDoc} * @throws NullPointerException * if <tt>toKey</tt> is null and this map uses natural ordering, or its comparator does not permit null keys * @throws IllegalArgumentException * {@inheritDoc} */ public SortedMap<K, V> headMap(final K toKey) { return headMap(toKey, false); } /** * @throws ClassCastException * {@inheritDoc} * @throws NullPointerException * if <tt>fromKey</tt> is null and this map uses natural ordering, or its comparator does not permit null keys * @throws IllegalArgumentException * {@inheritDoc} */ public SortedMap<K, V> tailMap(final K fromKey) { return tailMap(fromKey, true); } // View class support public class Values extends AbstractCollection<V> { @Override public Iterator<V> iterator() { return new ValueIterator(getFirstEntry()); } public Iterator<V> inverseIterator() { return new ValueInverseIterator(getLastEntry()); } @Override public int size() { return OMVRBTree.this.size(); } @Override public boolean contains(final Object o) { return OMVRBTree.this.containsValue(o); } @Override public boolean remove(final Object o) { for (OMVRBTreeEntry<K, V> e = getFirstEntry(); e != null; e = next(e)) { if (valEquals(e.getValue(), o)) { deleteEntry(e); return true; } } return false; } @Override public void clear() { OMVRBTree.this.clear(); } } public class EntrySet extends AbstractSet<Map.Entry<K, V>> { @Override public Iterator<Map.Entry<K, V>> iterator() { return new EntryIterator(getFirstEntry()); } public Iterator<Map.Entry<K, V>> inverseIterator() { return new InverseEntryIterator(getLastEntry()); } @Override public boolean contains(final Object o) { if (!(o instanceof Map.Entry)) return false; OMVRBTreeEntry<K, V> entry = (OMVRBTreeEntry<K, V>) o; final V value = entry.getValue(); final V p = get(entry.getKey()); return p != null && valEquals(p, value); } @Override public boolean remove(final Object o) { if (!(o instanceof Map.Entry)) return false; final OMVRBTreeEntry<K, V> entry = (OMVRBTreeEntry<K, V>) o; final V value = entry.getValue(); OMVRBTreeEntry<K, V> p = getEntry(entry.getKey(), PartialSearchMode.NONE); if (p != null && valEquals(p.getValue(), value)) { deleteEntry(p); return true; } return false; } @Override public int size() { return OMVRBTree.this.size(); } @Override public void clear() { OMVRBTree.this.clear(); } } /* * Unlike Values and EntrySet, the KeySet class is static, delegating to a ONavigableMap to allow use by SubMaps, which outweighs * the ugliness of needing type-tests for the following Iterator methods that are defined appropriately in main versus submap * classes. */ OLazyIterator<K> keyIterator() { return new KeyIterator(getFirstEntry()); } OLazyIterator<K> descendingKeyIterator() { return new DescendingKeyIterator(getLastEntry()); } @SuppressWarnings("rawtypes") static final class KeySet<E> extends AbstractSet<E> implements ONavigableSet<E> { private final ONavigableMap<E, Object> m; KeySet(ONavigableMap<E, Object> map) { m = map; } @Override public OLazyIterator<E> iterator() { if (m instanceof OMVRBTree) return ((OMVRBTree<E, Object>) m).keyIterator(); else return (((OMVRBTree.NavigableSubMap) m).keyIterator()); } public OLazyIterator<E> descendingIterator() { if (m instanceof OMVRBTree) return ((OMVRBTree<E, Object>) m).descendingKeyIterator(); else return (((OMVRBTree.NavigableSubMap) m).descendingKeyIterator()); } @Override public int size() { return m.size(); } @Override public boolean isEmpty() { return m.isEmpty(); } @Override public boolean contains(final Object o) { return m.containsKey(o); } @Override public void clear() { m.clear(); } public E lower(final E e) { return m.lowerKey(e); } public E floor(final E e) { return m.floorKey(e); } public E ceiling(final E e) { return m.ceilingKey(e); } public E higher(final E e) { return m.higherKey(e); } public E first() { return m.firstKey(); } public E last() { return m.lastKey(); } public Comparator<? super E> comparator() { return m.comparator(); } public E pollFirst() { final Map.Entry<E, Object> e = m.pollFirstEntry(); return e == null ? null : e.getKey(); } public E pollLast() { final Map.Entry<E, Object> e = m.pollLastEntry(); return e == null ? null : e.getKey(); } @Override public boolean remove(final Object o) { final int oldSize = size(); m.remove(o); return size() != oldSize; } public ONavigableSet<E> subSet(final E fromElement, final boolean fromInclusive, final E toElement, final boolean toInclusive) { return new OMVRBTreeSet<E>(m.subMap(fromElement, fromInclusive, toElement, toInclusive)); } public ONavigableSet<E> headSet(final E toElement, final boolean inclusive) { return new OMVRBTreeSet<E>(m.headMap(toElement, inclusive)); } public ONavigableSet<E> tailSet(final E fromElement, final boolean inclusive) { return new OMVRBTreeSet<E>(m.tailMap(fromElement, inclusive)); } public SortedSet<E> subSet(final E fromElement, final E toElement) { return subSet(fromElement, true, toElement, false); } public SortedSet<E> headSet(final E toElement) { return headSet(toElement, false); } public SortedSet<E> tailSet(final E fromElement) { return tailSet(fromElement, true); } public ONavigableSet<E> descendingSet() { return new OMVRBTreeSet<E>(m.descendingMap()); } } final class EntryIterator extends AbstractEntryIterator<K, V, Map.Entry<K, V>> { EntryIterator(final OMVRBTreeEntry<K, V> first) { super(first); } public Map.Entry<K, V> next() { return nextEntry(); } } final class InverseEntryIterator extends AbstractEntryIterator<K, V, Map.Entry<K, V>> { InverseEntryIterator(final OMVRBTreeEntry<K, V> last) { super(last); // we have to set ourselves after current index to make iterator work if (last != null) { pageIndex = last.getTree().getPageIndex() + 1; } } public Map.Entry<K, V> next() { return prevEntry(); } } final class ValueIterator extends AbstractEntryIterator<K, V, V> { ValueIterator(final OMVRBTreeEntry<K, V> first) { super(first); } @Override public V next() { return nextValue(); } } final class ValueInverseIterator extends AbstractEntryIterator<K, V, V> { ValueInverseIterator(final OMVRBTreeEntry<K, V> last) { super(last); // we have to set ourselves after current index to make iterator work if (last != null) { pageIndex = last.getTree().getPageIndex() + 1; } } @Override public boolean hasNext() { return hasPrevious(); } @Override public V next() { return prevValue(); } } final class KeyIterator extends AbstractEntryIterator<K, V, K> { KeyIterator(final OMVRBTreeEntry<K, V> first) { super(first); } @Override public K next() { return nextKey(); } } final class DescendingKeyIterator extends AbstractEntryIterator<K, V, K> { DescendingKeyIterator(final OMVRBTreeEntry<K, V> first) { super(first); } public K next() { return prevEntry().getKey(); } } // Little utilities /** * Compares two keys using the correct comparison method for this OMVRBTree. */ final int compare(final Object k1, final Object k2) { return comparator == null ? ((Comparable<? super K>) k1).compareTo((K) k2) : comparator.compare((K) k1, (K) k2); } /** * Test two values for equality. Differs from o1.equals(o2) only in that it copes with <tt>null</tt> o1 properly. */ final static boolean valEquals(final Object o1, final Object o2) { return (o1 == null ? o2 == null : o1.equals(o2)); } /** * Return SimpleImmutableEntry for entry, or null if null */ static <K, V> Map.Entry<K, V> exportEntry(final OMVRBTreeEntry<K, V> omvrbTreeEntryPosition) { return omvrbTreeEntryPosition == null ? null : new OSimpleImmutableEntry<K, V>(omvrbTreeEntryPosition); } /** * Return SimpleImmutableEntry for entry, or null if null */ static <K, V> Map.Entry<K, V> exportEntry(final OMVRBTreeEntryPosition<K, V> omvrbTreeEntryPosition) { return omvrbTreeEntryPosition == null ? null : new OSimpleImmutableEntry<K, V>(omvrbTreeEntryPosition.entry); } /** * Return key for entry, or null if null */ static <K, V> K keyOrNull(final OMVRBTreeEntry<K, V> e) { return e == null ? null : e.getKey(); } /** * Return key for entry, or null if null */ static <K, V> K keyOrNull(OMVRBTreeEntryPosition<K, V> e) { return e == null ? null : e.getKey(); } /** * Returns the key corresponding to the specified Entry. * * @throws NoSuchElementException * if the Entry is null */ static <K> K key(OMVRBTreeEntry<K, ?> e) { if (e == null) throw new NoSuchElementException(); return e.getKey(); } // SubMaps /** * @serial include */ static abstract class NavigableSubMap<K, V> extends AbstractMap<K, V> implements ONavigableMap<K, V>, java.io.Serializable { /** * The backing map. */ final OMVRBTree<K, V> m; /** * Endpoints are represented as triples (fromStart, lo, loInclusive) and (toEnd, hi, hiInclusive). If fromStart is true, then * the low (absolute) bound is the start of the backing map, and the other values are ignored. Otherwise, if loInclusive is * true, lo is the inclusive bound, else lo is the exclusive bound. Similarly for the upper bound. */ final K lo, hi; final boolean fromStart, toEnd; final boolean loInclusive, hiInclusive; NavigableSubMap(final OMVRBTree<K, V> m, final boolean fromStart, K lo, final boolean loInclusive, final boolean toEnd, K hi, final boolean hiInclusive) { if (!fromStart && !toEnd) { if (m.compare(lo, hi) > 0) throw new IllegalArgumentException("fromKey > toKey"); } else { if (!fromStart) // type check m.compare(lo, lo); if (!toEnd) m.compare(hi, hi); } this.m = m; this.fromStart = fromStart; this.lo = lo; this.loInclusive = loInclusive; this.toEnd = toEnd; this.hi = hi; this.hiInclusive = hiInclusive; } // internal utilities final boolean tooLow(final Object key) { if (!fromStart) { int c = m.compare(key, lo); if (c < 0 || (c == 0 && !loInclusive)) return true; } return false; } final boolean tooHigh(final Object key) { if (!toEnd) { int c = m.compare(key, hi); if (c > 0 || (c == 0 && !hiInclusive)) return true; } return false; } final boolean inRange(final Object key) { return !tooLow(key) && !tooHigh(key); } final boolean inClosedRange(final Object key) { return (fromStart || m.compare(key, lo) >= 0) && (toEnd || m.compare(hi, key) >= 0); } final boolean inRange(final Object key, final boolean inclusive) { return inclusive ? inRange(key) : inClosedRange(key); } /* * Absolute versions of relation operations. Subclasses map to these using like-named "sub" versions that invert senses for * descending maps */ final OMVRBTreeEntryPosition<K, V> absLowest() { OMVRBTreeEntry<K, V> e = (fromStart ? m.getFirstEntry() : (loInclusive ? m.getCeilingEntry(lo, PartialSearchMode.LOWEST_BOUNDARY) : m.getHigherEntry(lo))); return (e == null || tooHigh(e.getKey())) ? null : new OMVRBTreeEntryPosition<K, V>(e); } final OMVRBTreeEntryPosition<K, V> absHighest() { OMVRBTreeEntry<K, V> e = (toEnd ? m.getLastEntry() : (hiInclusive ? m.getFloorEntry(hi, PartialSearchMode.HIGHEST_BOUNDARY) : m.getLowerEntry(hi))); return (e == null || tooLow(e.getKey())) ? null : new OMVRBTreeEntryPosition<K, V>(e); } final OMVRBTreeEntryPosition<K, V> absCeiling(K key) { if (tooLow(key)) return absLowest(); OMVRBTreeEntry<K, V> e = m.getCeilingEntry(key, PartialSearchMode.NONE); return (e == null || tooHigh(e.getKey())) ? null : new OMVRBTreeEntryPosition<K, V>(e); } final OMVRBTreeEntryPosition<K, V> absHigher(K key) { if (tooLow(key)) return absLowest(); OMVRBTreeEntry<K, V> e = m.getHigherEntry(key); return (e == null || tooHigh(e.getKey())) ? null : new OMVRBTreeEntryPosition<K, V>(e); } final OMVRBTreeEntryPosition<K, V> absFloor(K key) { if (tooHigh(key)) return absHighest(); OMVRBTreeEntry<K, V> e = m.getFloorEntry(key, PartialSearchMode.NONE); return (e == null || tooLow(e.getKey())) ? null : new OMVRBTreeEntryPosition<K, V>(e); } final OMVRBTreeEntryPosition<K, V> absLower(K key) { if (tooHigh(key)) return absHighest(); OMVRBTreeEntry<K, V> e = m.getLowerEntry(key); return (e == null || tooLow(e.getKey())) ? null : new OMVRBTreeEntryPosition<K, V>(e); } /** Returns the absolute high fence for ascending traversal */ final OMVRBTreeEntryPosition<K, V> absHighFence() { return (toEnd ? null : new OMVRBTreeEntryPosition<K, V>(hiInclusive ? m.getHigherEntry(hi) : m.getCeilingEntry(hi, PartialSearchMode.LOWEST_BOUNDARY))); } /** Return the absolute low fence for descending traversal */ final OMVRBTreeEntryPosition<K, V> absLowFence() { return (fromStart ? null : new OMVRBTreeEntryPosition<K, V>(loInclusive ? m.getLowerEntry(lo) : m.getFloorEntry(lo, PartialSearchMode.HIGHEST_BOUNDARY))); } // Abstract methods defined in ascending vs descending classes // These relay to the appropriate absolute versions abstract OMVRBTreeEntry<K, V> subLowest(); abstract OMVRBTreeEntry<K, V> subHighest(); abstract OMVRBTreeEntry<K, V> subCeiling(K key); abstract OMVRBTreeEntry<K, V> subHigher(K key); abstract OMVRBTreeEntry<K, V> subFloor(K key); abstract OMVRBTreeEntry<K, V> subLower(K key); /** Returns ascending iterator from the perspective of this submap */ abstract OLazyIterator<K> keyIterator(); /** Returns descending iterator from the perspective of this submap */ abstract OLazyIterator<K> descendingKeyIterator(); // public methods @Override public boolean isEmpty() { return (fromStart && toEnd) ? m.isEmpty() : entrySet().isEmpty(); } @Override public int size() { return (fromStart && toEnd) ? m.size() : entrySet().size(); } @Override public final boolean containsKey(Object key) { return inRange(key) && m.containsKey(key); } @Override public final V put(K key, V value) { if (!inRange(key)) throw new IllegalArgumentException("key out of range"); return m.put(key, value); } @Override public final V get(Object key) { return !inRange(key) ? null : m.get(key); } @Override public final V remove(Object key) { return !inRange(key) ? null : m.remove(key); } public final Map.Entry<K, V> ceilingEntry(K key) { return exportEntry(subCeiling(key)); } public final K ceilingKey(K key) { return keyOrNull(subCeiling(key)); } public final Map.Entry<K, V> higherEntry(K key) { return exportEntry(subHigher(key)); } public final K higherKey(K key) { return keyOrNull(subHigher(key)); } public final Map.Entry<K, V> floorEntry(K key) { return exportEntry(subFloor(key)); } public final K floorKey(K key) { return keyOrNull(subFloor(key)); } public final Map.Entry<K, V> lowerEntry(K key) { return exportEntry(subLower(key)); } public final K lowerKey(K key) { return keyOrNull(subLower(key)); } public final K firstKey() { return key(subLowest()); } public final K lastKey() { return key(subHighest()); } public final Map.Entry<K, V> firstEntry() { return exportEntry(subLowest()); } public final Map.Entry<K, V> lastEntry() { return exportEntry(subHighest()); } public final Map.Entry<K, V> pollFirstEntry() { OMVRBTreeEntry<K, V> e = subLowest(); Map.Entry<K, V> result = exportEntry(e); if (e != null) m.deleteEntry(e); return result; } public final Map.Entry<K, V> pollLastEntry() { OMVRBTreeEntry<K, V> e = subHighest(); Map.Entry<K, V> result = exportEntry(e); if (e != null) m.deleteEntry(e); return result; } // Views transient ONavigableMap<K, V> descendingMapView = null; transient EntrySetView entrySetView = null; transient KeySet<K> navigableKeySetView = null; @SuppressWarnings("rawtypes") public final ONavigableSet<K> navigableKeySet() { KeySet<K> nksv = navigableKeySetView; return (nksv != null) ? nksv : (navigableKeySetView = new OMVRBTree.KeySet(this)); } @Override public final Set<K> keySet() { return navigableKeySet(); } public ONavigableSet<K> descendingKeySet() { return descendingMap().navigableKeySet(); } public final SortedMap<K, V> subMap(final K fromKey, final K toKey) { return subMap(fromKey, true, toKey, false); } public final SortedMap<K, V> headMap(final K toKey) { return headMap(toKey, false); } public final SortedMap<K, V> tailMap(final K fromKey) { return tailMap(fromKey, true); } // View classes abstract class EntrySetView extends AbstractSet<Map.Entry<K, V>> { private transient int size = -1, sizeModCount; @Override public int size() { if (fromStart && toEnd) return m.size(); if (size == -1 || sizeModCount != m.modCount) { sizeModCount = m.modCount; size = 0; Iterator<?> i = iterator(); while (i.hasNext()) { size++; i.next(); } } return size; } @Override public boolean isEmpty() { OMVRBTreeEntryPosition<K, V> n = absLowest(); return n == null || tooHigh(n.getKey()); } @Override public boolean contains(final Object o) { if (!(o instanceof OMVRBTreeEntry)) return false; final OMVRBTreeEntry<K, V> entry = (OMVRBTreeEntry<K, V>) o; final K key = entry.getKey(); if (!inRange(key)) return false; V nodeValue = m.get(key); return nodeValue != null && valEquals(nodeValue, entry.getValue()); } @Override public boolean remove(final Object o) { if (!(o instanceof OMVRBTreeEntry)) return false; final OMVRBTreeEntry<K, V> entry = (OMVRBTreeEntry<K, V>) o; final K key = entry.getKey(); if (!inRange(key)) return false; final OMVRBTreeEntry<K, V> node = m.getEntry(key, PartialSearchMode.NONE); if (node != null && valEquals(node.getValue(), entry.getValue())) { m.deleteEntry(node); return true; } return false; } } /** * Iterators for SubMaps */ abstract class SubMapIterator<T> implements OLazyIterator<T> { OMVRBTreeEntryPosition<K, V> lastReturned; OMVRBTreeEntryPosition<K, V> next; final K fenceKey; int expectedModCount; SubMapIterator(final OMVRBTreeEntryPosition<K, V> first, final OMVRBTreeEntryPosition<K, V> fence) { expectedModCount = m.modCount; lastReturned = null; next = first; fenceKey = fence == null ? null : fence.getKey(); } public final boolean hasNext() { if (next != null) { final K k = next.getKey(); return k != fenceKey && !k.equals(fenceKey); } return false; } final OMVRBTreeEntryPosition<K, V> nextEntry() { final OMVRBTreeEntryPosition<K, V> e; if (next != null) e = new OMVRBTreeEntryPosition<K, V>(next); else e = null; if (e == null || e.entry == null) throw new NoSuchElementException(); final K k = e.getKey(); if (k == fenceKey || k.equals(fenceKey)) throw new NoSuchElementException(); if (m.modCount != expectedModCount) throw new ConcurrentModificationException(); next.assign(OMVRBTree.next(e)); lastReturned = e; return e; } final OMVRBTreeEntryPosition<K, V> prevEntry() { final OMVRBTreeEntryPosition<K, V> e; if (next != null) e = new OMVRBTreeEntryPosition<K, V>(next); else e = null; if (e == null || e.entry == null) throw new NoSuchElementException(); final K k = e.getKey(); if (k == fenceKey || k.equals(fenceKey)) throw new NoSuchElementException(); if (m.modCount != expectedModCount) throw new ConcurrentModificationException(); next.assign(OMVRBTree.previous(e)); lastReturned = e; return e; } final public T update(final T iValue) { if (lastReturned == null) throw new IllegalStateException(); if (m.modCount != expectedModCount) throw new ConcurrentModificationException(); return (T) lastReturned.entry.setValue((V) iValue); } final void removeAscending() { if (lastReturned == null) throw new IllegalStateException(); if (m.modCount != expectedModCount) throw new ConcurrentModificationException(); // deleted entries are replaced by their successors if (lastReturned.entry.getLeft() != null && lastReturned.entry.getRight() != null) next = lastReturned; m.deleteEntry(lastReturned.entry); lastReturned = null; expectedModCount = m.modCount; } final void removeDescending() { if (lastReturned == null) throw new IllegalStateException(); if (m.modCount != expectedModCount) throw new ConcurrentModificationException(); m.deleteEntry(lastReturned.entry); lastReturned = null; expectedModCount = m.modCount; } } final class SubMapEntryIterator extends SubMapIterator<Map.Entry<K, V>> { SubMapEntryIterator(final OMVRBTreeEntryPosition<K, V> first, final OMVRBTreeEntryPosition<K, V> fence) { super(first, fence); } public Map.Entry<K, V> next() { final Map.Entry<K, V> e = OMVRBTree.exportEntry(next); nextEntry(); return e; } public void remove() { removeAscending(); } } final class SubMapKeyIterator extends SubMapIterator<K> { SubMapKeyIterator(final OMVRBTreeEntryPosition<K, V> first, final OMVRBTreeEntryPosition<K, V> fence) { super(first, fence); } public K next() { return nextEntry().getKey(); } public void remove() { removeAscending(); } } final class DescendingSubMapEntryIterator extends SubMapIterator<Map.Entry<K, V>> { DescendingSubMapEntryIterator(final OMVRBTreeEntryPosition<K, V> last, final OMVRBTreeEntryPosition<K, V> fence) { super(last, fence); } public Map.Entry<K, V> next() { final Map.Entry<K, V> e = OMVRBTree.exportEntry(next); prevEntry(); return e; } public void remove() { removeDescending(); } } final class DescendingSubMapKeyIterator extends SubMapIterator<K> { DescendingSubMapKeyIterator(final OMVRBTreeEntryPosition<K, V> last, final OMVRBTreeEntryPosition<K, V> fence) { super(last, fence); } public K next() { return prevEntry().getKey(); } public void remove() { removeDescending(); } } } /** * @serial include */ static final class AscendingSubMap<K, V> extends NavigableSubMap<K, V> { private static final long serialVersionUID = 912986545866124060L; AscendingSubMap(final OMVRBTree<K, V> m, final boolean fromStart, final K lo, final boolean loInclusive, final boolean toEnd, K hi, final boolean hiInclusive) { super(m, fromStart, lo, loInclusive, toEnd, hi, hiInclusive); } public Comparator<? super K> comparator() { return m.comparator(); } public ONavigableMap<K, V> subMap(final K fromKey, final boolean fromInclusive, final K toKey, final boolean toInclusive) { if (!inRange(fromKey, fromInclusive)) throw new IllegalArgumentException("fromKey out of range"); if (!inRange(toKey, toInclusive)) throw new IllegalArgumentException("toKey out of range"); return new AscendingSubMap<K, V>(m, false, fromKey, fromInclusive, false, toKey, toInclusive); } public ONavigableMap<K, V> headMap(final K toKey, final boolean inclusive) { if (!inRange(toKey, inclusive)) throw new IllegalArgumentException("toKey out of range"); return new AscendingSubMap<K, V>(m, fromStart, lo, loInclusive, false, toKey, inclusive); } public ONavigableMap<K, V> tailMap(final K fromKey, final boolean inclusive) { if (!inRange(fromKey, inclusive)) throw new IllegalArgumentException("fromKey out of range"); return new AscendingSubMap<K, V>(m, false, fromKey, inclusive, toEnd, hi, hiInclusive); } public ONavigableMap<K, V> descendingMap() { ONavigableMap<K, V> mv = descendingMapView; return (mv != null) ? mv : (descendingMapView = new DescendingSubMap<K, V>(m, fromStart, lo, loInclusive, toEnd, hi, hiInclusive)); } @Override OLazyIterator<K> keyIterator() { return new SubMapKeyIterator(absLowest(), absHighFence()); } @Override OLazyIterator<K> descendingKeyIterator() { return new DescendingSubMapKeyIterator(absHighest(), absLowFence()); } final class AscendingEntrySetView extends EntrySetView { @Override public Iterator<Map.Entry<K, V>> iterator() { return new SubMapEntryIterator(absLowest(), absHighFence()); } } @Override public Set<Map.Entry<K, V>> entrySet() { EntrySetView es = entrySetView; return (es != null) ? es : new AscendingEntrySetView(); } @Override OMVRBTreeEntry<K, V> subLowest() { return absLowest().entry; } @Override OMVRBTreeEntry<K, V> subHighest() { return absHighest().entry; } @Override OMVRBTreeEntry<K, V> subCeiling(final K key) { return absCeiling(key).entry; } @Override OMVRBTreeEntry<K, V> subHigher(final K key) { return absHigher(key).entry; } @Override OMVRBTreeEntry<K, V> subFloor(final K key) { return absFloor(key).entry; } @Override OMVRBTreeEntry<K, V> subLower(final K key) { return absLower(key).entry; } } /** * @serial include */ static final class DescendingSubMap<K, V> extends NavigableSubMap<K, V> { private static final long serialVersionUID = 912986545866120460L; private final Comparator<? super K> reverseComparator = Collections.reverseOrder(m.comparator); DescendingSubMap(final OMVRBTree<K, V> m, final boolean fromStart, final K lo, final boolean loInclusive, final boolean toEnd, final K hi, final boolean hiInclusive) { super(m, fromStart, lo, loInclusive, toEnd, hi, hiInclusive); } public Comparator<? super K> comparator() { return reverseComparator; } public ONavigableMap<K, V> subMap(final K fromKey, final boolean fromInclusive, final K toKey, final boolean toInclusive) { if (!inRange(fromKey, fromInclusive)) throw new IllegalArgumentException("fromKey out of range"); if (!inRange(toKey, toInclusive)) throw new IllegalArgumentException("toKey out of range"); return new DescendingSubMap<K, V>(m, false, toKey, toInclusive, false, fromKey, fromInclusive); } public ONavigableMap<K, V> headMap(final K toKey, final boolean inclusive) { if (!inRange(toKey, inclusive)) throw new IllegalArgumentException("toKey out of range"); return new DescendingSubMap<K, V>(m, false, toKey, inclusive, toEnd, hi, hiInclusive); } public ONavigableMap<K, V> tailMap(final K fromKey, final boolean inclusive) { if (!inRange(fromKey, inclusive)) throw new IllegalArgumentException("fromKey out of range"); return new DescendingSubMap<K, V>(m, fromStart, lo, loInclusive, false, fromKey, inclusive); } public ONavigableMap<K, V> descendingMap() { ONavigableMap<K, V> mv = descendingMapView; return (mv != null) ? mv : (descendingMapView = new AscendingSubMap<K, V>(m, fromStart, lo, loInclusive, toEnd, hi, hiInclusive)); } @Override OLazyIterator<K> keyIterator() { return new DescendingSubMapKeyIterator(absHighest(), absLowFence()); } @Override OLazyIterator<K> descendingKeyIterator() { return new SubMapKeyIterator(absLowest(), absHighFence()); } final class DescendingEntrySetView extends EntrySetView { @Override public Iterator<Map.Entry<K, V>> iterator() { return new DescendingSubMapEntryIterator(absHighest(), absLowFence()); } } @Override public Set<Map.Entry<K, V>> entrySet() { EntrySetView es = entrySetView; return (es != null) ? es : new DescendingEntrySetView(); } @Override OMVRBTreeEntry<K, V> subLowest() { return absHighest().entry; } @Override OMVRBTreeEntry<K, V> subHighest() { return absLowest().entry; } @Override OMVRBTreeEntry<K, V> subCeiling(final K key) { return absFloor(key).entry; } @Override OMVRBTreeEntry<K, V> subHigher(final K key) { return absLower(key).entry; } @Override OMVRBTreeEntry<K, V> subFloor(final K key) { return absCeiling(key).entry; } @Override OMVRBTreeEntry<K, V> subLower(final K key) { return absHigher(key).entry; } } // Red-black mechanics public static final boolean RED = false; public static final boolean BLACK = true; /** * Node in the Tree. Doubles as a means to pass key-value pairs back to user (see Map.Entry). */ /** * Returns the first Entry in the OMVRBTree (according to the OMVRBTree's key-sort function). Returns null if the OMVRBTree is * empty. */ public OMVRBTreeEntry<K, V> getFirstEntry() { OMVRBTreeEntry<K, V> p = root; if (p != null) { if (p.getSize() > 0) pageIndex = 0; while (p.getLeft() != null) p = p.getLeft(); } return p; } /** * Returns the last Entry in the OMVRBTree (according to the OMVRBTree's key-sort function). Returns null if the OMVRBTree is * empty. */ protected OMVRBTreeEntry<K, V> getLastEntry() { OMVRBTreeEntry<K, V> p = root; if (p != null) while (p.getRight() != null) p = p.getRight(); if (p != null) pageIndex = p.getSize() - 1; return p; } public static <K, V> OMVRBTreeEntry<K, V> successor(final OMVRBTreeEntryPosition<K, V> t) { t.entry.getTree().setPageIndex(t.position); return successor(t.entry); } /** * Returns the successor of the specified Entry, or null if no such. */ public static <K, V> OMVRBTreeEntry<K, V> successor(final OMVRBTreeEntry<K, V> t) { if (t == null) return null; OMVRBTreeEntry<K, V> p = null; if (t.getRight() != null) { p = t.getRight(); while (p.getLeft() != null) p = p.getLeft(); } else { p = t.getParent(); OMVRBTreeEntry<K, V> ch = t; while (p != null && ch == p.getRight()) { ch = p; p = p.getParent(); } } return p; } public static <K, V> OMVRBTreeEntry<K, V> next(final OMVRBTreeEntryPosition<K, V> t) { t.entry.getTree().setPageIndex(t.position); return next(t.entry); } /** * Returns the next item of the tree. */ public static <K, V> OMVRBTreeEntry<K, V> next(final OMVRBTreeEntry<K, V> t) { if (t == null) return null; final OMVRBTreeEntry<K, V> succ; if (t.tree.pageIndex < t.getSize() - 1) { // ITERATE INSIDE THE NODE succ = t; t.tree.pageIndex++; } else { // GET THE NEXT NODE succ = OMVRBTree.successor(t); t.tree.pageIndex = 0; } return succ; } public static <K, V> OMVRBTreeEntry<K, V> predecessor(final OMVRBTreeEntryPosition<K, V> t) { t.entry.getTree().setPageIndex(t.position); return predecessor(t.entry); } /** * Returns the predecessor of the specified Entry, or null if no such. */ public static <K, V> OMVRBTreeEntry<K, V> predecessor(final OMVRBTreeEntry<K, V> t) { if (t == null) return null; else if (t.getLeft() != null) { OMVRBTreeEntry<K, V> p = t.getLeft(); while (p.getRight() != null) p = p.getRight(); return p; } else { OMVRBTreeEntry<K, V> p = t.getParent(); Entry<K, V> ch = t; while (p != null && ch == p.getLeft()) { ch = p; p = p.getParent(); } return p; } } public static <K, V> OMVRBTreeEntry<K, V> previous(final OMVRBTreeEntryPosition<K, V> t) { t.entry.getTree().setPageIndex(t.position); return previous(t.entry); } /** * Returns the previous item of the tree. */ public static <K, V> OMVRBTreeEntry<K, V> previous(final OMVRBTreeEntry<K, V> t) { if (t == null) return null; final int index = t.getTree().getPageIndex(); final OMVRBTreeEntry<K, V> prev; if (index <= 0) { prev = predecessor(t); if (prev != null) t.tree.pageIndex = prev.getSize() - 1; else t.tree.pageIndex = 0; } else { prev = t; t.tree.pageIndex = index - 1; } return prev; } /** * Balancing operations. * * Implementations of rebalancings during insertion and deletion are slightly different than the CLR version. Rather than using * dummy nilnodes, we use a set of accessors that deal properly with null. They are used to avoid messiness surrounding nullness * checks in the main algorithms. */ private static <K, V> boolean colorOf(final OMVRBTreeEntry<K, V> p) { return (p == null ? BLACK : p.getColor()); } private static <K, V> OMVRBTreeEntry<K, V> parentOf(final OMVRBTreeEntry<K, V> p) { return (p == null ? null : p.getParent()); } private static <K, V> void setColor(final OMVRBTreeEntry<K, V> p, final boolean c) { if (p != null) p.setColor(c); } private static <K, V> OMVRBTreeEntry<K, V> leftOf(final OMVRBTreeEntry<K, V> p) { return (p == null) ? null : p.getLeft(); } private static <K, V> OMVRBTreeEntry<K, V> rightOf(final OMVRBTreeEntry<K, V> p) { return (p == null) ? null : p.getRight(); } /** From CLR */ protected void rotateLeft(final OMVRBTreeEntry<K, V> p) { if (p != null) { OMVRBTreeEntry<K, V> r = p.getRight(); p.setRight(r.getLeft()); if (r.getLeft() != null) r.getLeft().setParent(p); r.setParent(p.getParent()); if (p.getParent() == null) setRoot(r); else if (p.getParent().getLeft() == p) p.getParent().setLeft(r); else p.getParent().setRight(r); p.setParent(r); r.setLeft(p); } } protected void setRoot(final OMVRBTreeEntry<K, V> iRoot) { root = iRoot; } /** From CLR */ protected void rotateRight(final OMVRBTreeEntry<K, V> p) { if (p != null) { OMVRBTreeEntry<K, V> l = p.getLeft(); p.setLeft(l.getRight()); if (l.getRight() != null) l.getRight().setParent(p); l.setParent(p.getParent()); if (p.getParent() == null) setRoot(l); else if (p.getParent().getRight() == p) p.getParent().setRight(l); else p.getParent().setLeft(l); l.setRight(p); p.setParent(l); } } private OMVRBTreeEntry<K, V> grandparent(final OMVRBTreeEntry<K, V> n) { return parentOf(parentOf(n)); } private OMVRBTreeEntry<K, V> uncle(final OMVRBTreeEntry<K, V> n) { if (parentOf(n) == leftOf(grandparent(n))) return rightOf(grandparent(n)); else return leftOf(grandparent(n)); } private void fixAfterInsertion(final OMVRBTreeEntry<K, V> n) { if (parentOf(n) == null) setColor(n, BLACK); else insert_case2(n); } private void insert_case2(final OMVRBTreeEntry<K, V> n) { if (colorOf(parentOf(n)) == BLACK) return; /* Tree is still valid */ else insert_case3(n); } private void insert_case3(final OMVRBTreeEntry<K, V> n) { if (uncle(n) != null && colorOf(uncle(n)) == RED) { setColor(parentOf(n), BLACK); setColor(uncle(n), BLACK); setColor(grandparent(n), RED); fixAfterInsertion(grandparent(n)); } else insert_case4(n); } private void insert_case4(OMVRBTreeEntry<K, V> n) { if (n == rightOf(parentOf(n)) && parentOf(n) == leftOf(grandparent(n))) { rotateLeft(parentOf(n)); n = leftOf(n); } else if (n == leftOf(parentOf(n)) && parentOf(n) == rightOf(grandparent(n))) { rotateRight(parentOf(n)); n = rightOf(n); } insert_case5(n); } private void insert_case5(final OMVRBTreeEntry<K, V> n) { setColor(parentOf(n), BLACK); setColor(grandparent(n), RED); if (n == leftOf(parentOf(n)) && parentOf(n) == leftOf(grandparent(n))) { rotateRight(grandparent(n)); } else { rotateLeft(grandparent(n)); } } /** * Delete node p, and then re-balance the tree. * * @param p * node to delete * @return */ OMVRBTreeEntry<K, V> deleteEntry(OMVRBTreeEntry<K, V> p) { setSizeDelta(-1); modCount++; if (pageIndex > -1) { // DELETE INSIDE THE NODE p.remove(); if (p.getSize() > 0) return p; } final OMVRBTreeEntry<K, V> next = successor(p); // DELETE THE ENTIRE NODE, RE-BUILDING THE STRUCTURE removeNode(p); // RETURN NEXT NODE return next; } /** * Remove a node from the tree. * * @param p * Node to remove * * @return Node that was removed. Passed and removed nodes may be different in case node to remove contains two children. In this * case node successor will be found and removed but it's content will be copied to the node that was passed in method. */ protected OMVRBTreeEntry<K, V> removeNode(OMVRBTreeEntry<K, V> p) { modCount++; // If strictly internal, copy successor's element to p and then make p // point to successor. if (p.getLeft() != null && p.getRight() != null) { OMVRBTreeEntry<K, V> s = next(p); p.copyFrom(s); p = s; } // p has 2 children // Start fixup at replacement node, if it exists. final OMVRBTreeEntry<K, V> replacement = (p.getLeft() != null ? p.getLeft() : p.getRight()); if (replacement != null) { // Link replacement to parent replacement.setParent(p.getParent()); if (p.getParent() == null) setRoot(replacement); else if (p == p.getParent().getLeft()) p.getParent().setLeft(replacement); else p.getParent().setRight(replacement); // Null out links so they are OK to use by fixAfterDeletion. p.setLeft(null); p.setRight(null); p.setParent(null); // Fix replacement if (p.getColor() == BLACK) fixAfterDeletion(replacement); } else if (p.getParent() == null && size() == 0) { // return if we are the only node. Check the size to be sure the map is empty clear(); } else { // No children. Use self as phantom replacement and unlink. if (p.getColor() == BLACK) fixAfterDeletion(p); if (p.getParent() != null) { if (p == p.getParent().getLeft()) p.getParent().setLeft(null); else if (p == p.getParent().getRight()) p.getParent().setRight(null); p.setParent(null); } } return p; } /** From CLR */ private void fixAfterDeletion(OMVRBTreeEntry<K, V> x) { while (x != root && colorOf(x) == BLACK) { if (x == leftOf(parentOf(x))) { OMVRBTreeEntry<K, V> sib = rightOf(parentOf(x)); if (colorOf(sib) == RED) { setColor(sib, BLACK); setColor(parentOf(x), RED); rotateLeft(parentOf(x)); sib = rightOf(parentOf(x)); } if (colorOf(leftOf(sib)) == BLACK && colorOf(rightOf(sib)) == BLACK) { setColor(sib, RED); x = parentOf(x); } else { if (colorOf(rightOf(sib)) == BLACK) { setColor(leftOf(sib), BLACK); setColor(sib, RED); rotateRight(sib); sib = rightOf(parentOf(x)); } setColor(sib, colorOf(parentOf(x))); setColor(parentOf(x), BLACK); setColor(rightOf(sib), BLACK); rotateLeft(parentOf(x)); x = root; } } else { // symmetric OMVRBTreeEntry<K, V> sib = leftOf(parentOf(x)); if (colorOf(sib) == RED) { setColor(sib, BLACK); setColor(parentOf(x), RED); rotateRight(parentOf(x)); sib = leftOf(parentOf(x)); } if (x != null && colorOf(rightOf(sib)) == BLACK && colorOf(leftOf(sib)) == BLACK) { setColor(sib, RED); x = parentOf(x); } else { if (colorOf(leftOf(sib)) == BLACK) { setColor(rightOf(sib), BLACK); setColor(sib, RED); rotateLeft(sib); sib = leftOf(parentOf(x)); } setColor(sib, colorOf(parentOf(x))); setColor(parentOf(x), BLACK); setColor(leftOf(sib), BLACK); rotateRight(parentOf(x)); x = root; } } } setColor(x, BLACK); } /** * Save the state of the <tt>OMVRBTree</tt> instance to a stream (i.e., serialize it). * * @serialData The <i>size</i> of the OMVRBTree (the number of key-value mappings) is emitted (int), followed by the key (Object) * and value (Object) for each key-value mapping represented by the OMVRBTree. The key-value mappings are emitted in * key-order (as determined by the OMVRBTree's Comparator, or by the keys' natural ordering if the OMVRBTree has no * Comparator). */ private void writeObject(final ObjectOutputStream s) throws java.io.IOException { // Write out the Comparator and any hidden stuff s.defaultWriteObject(); // Write out size (number of Mappings) s.writeInt(size()); // Write out keys and values (alternating) for (Iterator<Map.Entry<K, V>> i = entrySet().iterator(); i.hasNext();) { Entry<K, V> e = i.next(); s.writeObject(e.getKey()); s.writeObject(e.getValue()); } } /** * Reconstitute the <tt>OMVRBTree</tt> instance from a stream (i.e., deserialize it). */ private void readObject(final java.io.ObjectInputStream s) throws IOException, ClassNotFoundException { // Read in the Comparator and any hidden stuff s.defaultReadObject(); // Read in size setSize(s.readInt()); buildFromSorted(size(), null, s, null); } /** Intended to be called only from OTreeSet.readObject */ void readOTreeSet(int iSize, ObjectInputStream s, V defaultVal) throws java.io.IOException, ClassNotFoundException { buildFromSorted(iSize, null, s, defaultVal); } /** Intended to be called only from OTreeSet.addAll */ void addAllForOTreeSet(SortedSet<? extends K> set, V defaultVal) { try { buildFromSorted(set.size(), set.iterator(), null, defaultVal); } catch (java.io.IOException cannotHappen) { } catch (ClassNotFoundException cannotHappen) { } } /** * Linear time tree building algorithm from sorted data. Can accept keys and/or values from iterator or stream. This leads to too * many parameters, but seems better than alternatives. The four formats that this method accepts are: * * 1) An iterator of Map.Entries. (it != null, defaultVal == null). 2) An iterator of keys. (it != null, defaultVal != null). 3) A * stream of alternating serialized keys and values. (it == null, defaultVal == null). 4) A stream of serialized keys. (it == * null, defaultVal != null). * * It is assumed that the comparator of the OMVRBTree is already set prior to calling this method. * * @param size * the number of keys (or key-value pairs) to be read from the iterator or stream * @param it * If non-null, new entries are created from entries or keys read from this iterator. * @param str * If non-null, new entries are created from keys and possibly values read from this stream in serialized form. Exactly * one of it and str should be non-null. * @param defaultVal * if non-null, this default value is used for each value in the map. If null, each value is read from iterator or * stream, as described above. * @throws IOException * propagated from stream reads. This cannot occur if str is null. * @throws ClassNotFoundException * propagated from readObject. This cannot occur if str is null. */ private void buildFromSorted(final int size, final Iterator<?> it, final java.io.ObjectInputStream str, final V defaultVal) throws java.io.IOException, ClassNotFoundException { setSize(size); root = buildFromSorted(0, 0, size - 1, computeRedLevel(size), it, str, defaultVal); } /** * Recursive "helper method" that does the real work of the previous method. Identically named parameters have identical * definitions. Additional parameters are documented below. It is assumed that the comparator and size fields of the OMVRBTree are * already set prior to calling this method. (It ignores both fields.) * * @param level * the current level of tree. Initial call should be 0. * @param lo * the first element index of this subtree. Initial should be 0. * @param hi * the last element index of this subtree. Initial should be size-1. * @param redLevel * the level at which nodes should be red. Must be equal to computeRedLevel for tree of this size. */ private final OMVRBTreeEntry<K, V> buildFromSorted(final int level, final int lo, final int hi, final int redLevel, final Iterator<?> it, final java.io.ObjectInputStream str, final V defaultVal) throws java.io.IOException, ClassNotFoundException { /* * Strategy: The root is the middlemost element. To get to it, we have to first recursively construct the entire left subtree, * so as to grab all of its elements. We can then proceed with right subtree. * * The lo and hi arguments are the minimum and maximum indices to pull out of the iterator or stream for current subtree. They * are not actually indexed, we just proceed sequentially, ensuring that items are extracted in corresponding order. */ if (hi < lo) return null; final int mid = (lo + hi) / 2; OMVRBTreeEntry<K, V> left = null; if (lo < mid) left = buildFromSorted(level + 1, lo, mid - 1, redLevel, it, str, defaultVal); // extract key and/or value from iterator or stream K key; V value; if (it != null) { if (defaultVal == null) { OMVRBTreeEntry<K, V> entry = (OMVRBTreeEntry<K, V>) it.next(); key = entry.getKey(); value = entry.getValue(); } else { key = (K) it.next(); value = defaultVal; } } else { // use stream key = (K) str.readObject(); value = (defaultVal != null ? defaultVal : (V) str.readObject()); } final OMVRBTreeEntry<K, V> middle = createEntry(key, value); // color nodes in non-full bottom most level red if (level == redLevel) middle.setColor(RED); if (left != null) { middle.setLeft(left); left.setParent(middle); } if (mid < hi) { OMVRBTreeEntry<K, V> right = buildFromSorted(level + 1, mid + 1, hi, redLevel, it, str, defaultVal); middle.setRight(right); right.setParent(middle); } return middle; } /** * Find the level down to which to assign all nodes BLACK. This is the last `full' level of the complete binary tree produced by * buildTree. The remaining nodes are colored RED. (This makes a `nice' set of color assignments wrt future insertions.) This * level number is computed by finding the number of splits needed to reach the zeroeth node. (The answer is ~lg(N), but in any * case must be computed by same quick O(lg(N)) loop.) */ private static int computeRedLevel(final int sz) { int level = 0; for (int m = sz - 1; m >= 0; m = m / 2 - 1) level++; return level; } public int getPageIndex() { return pageIndex; } public void setPageIndex(final int iPageIndex) { pageIndex = iPageIndex; } private void init() { } public OMVRBTreeEntry<K, V> getRoot() { return root; } protected void printInMemoryStructure(final OMVRBTreeEntry<K, V> iRootNode) { printInMemoryNode("root", iRootNode, 0); } private void printInMemoryNode(final String iLabel, OMVRBTreeEntry<K, V> iNode, int iLevel) { if (iNode == null) return; for (int i = 0; i < iLevel; ++i) System.out.print(' '); System.out.println(iLabel + ": " + iNode.toString() + " (" + (iNode.getColor() ? "B" : "R") + ")"); ++iLevel; printInMemoryNode(iLevel + ".left", iNode.getLeftInMemory(), iLevel); printInMemoryNode(iLevel + ".right", iNode.getRightInMemory(), iLevel); } public void checkTreeStructure(final OMVRBTreeEntry<K, V> iRootNode) { if (!runtimeCheckEnabled || iRootNode == null) return; int currPageIndex = pageIndex; OMVRBTreeEntry<K, V> prevNode = null; int i = 0; for (OMVRBTreeEntry<K, V> e = iRootNode.getFirstInMemory(); e != null; e = e.getNextInMemory()) { if (e.getSize() == 0) OLogManager.instance().error(this, "[OMVRBTree.checkTreeStructure] Node %s has 0 items\n", e); if (prevNode != null) { if (prevNode.getTree() == null) OLogManager.instance().error(this, "[OMVRBTree.checkTreeStructure] Freed record %d found in memory\n", i); if (compare(e.getFirstKey(), e.getLastKey()) > 0) { OLogManager.instance().error(this, "[OMVRBTree.checkTreeStructure] begin key is > than last key\n", e.getFirstKey(), e.getLastKey()); printInMemoryStructure(iRootNode); } if (compare(e.getFirstKey(), prevNode.getLastKey()) < 0) { OLogManager.instance().error(this, "[OMVRBTree.checkTreeStructure] Node %s starts with a key minor than the last key of the previous node %s\n", e, prevNode); printInMemoryStructure(e.getParentInMemory() != null ? e.getParentInMemory() : e); } } if (e.getLeftInMemory() != null && e.getLeftInMemory() == e) { OLogManager.instance().error(this, "[OMVRBTree.checkTreeStructure] Node %s has left that points to itself!\n", e); printInMemoryStructure(iRootNode); } if (e.getRightInMemory() != null && e.getRightInMemory() == e) { OLogManager.instance().error(this, "[OMVRBTree.checkTreeStructure] Node %s has right that points to itself!\n", e); printInMemoryStructure(iRootNode); } if (e.getLeftInMemory() != null && e.getLeftInMemory() == e.getRightInMemory()) { OLogManager.instance().error(this, "[OMVRBTree.checkTreeStructure] Node %s has left and right equals!\n", e); printInMemoryStructure(iRootNode); } if (e.getParentInMemory() != null && e.getParentInMemory().getRightInMemory() != e && e.getParentInMemory().getLeftInMemory() != e) { OLogManager.instance().error(this, "[OMVRBTree.checkTreeStructure] Node %s is the children of node %s but the cross-reference is missed!\n", e, e.getParentInMemory()); printInMemoryStructure(iRootNode); } prevNode = e; ++i; } pageIndex = currPageIndex; } public boolean isRuntimeCheckEnabled() { return runtimeCheckEnabled; } public void setChecks(boolean checks) { this.runtimeCheckEnabled = checks; } public void setRuntimeCheckEnabled(boolean runtimeCheckEnabled) { this.runtimeCheckEnabled = runtimeCheckEnabled; } public boolean isDebug() { return debug; } public void setDebug(boolean debug) { this.debug = debug; } protected OMVRBTreeEntry<K, V> getLastSearchNodeForSameKey(final Object key) { if (key != null && lastSearchKey != null) { if (key instanceof OCompositeKey) return key.equals(lastSearchKey) ? lastSearchNode : null; if (comparator != null) return comparator.compare((K) key, (K) lastSearchKey) == 0 ? lastSearchNode : null; else try { return ((Comparable<? super K>) key).compareTo((K) lastSearchKey) == 0 ? lastSearchNode : null; } catch (Exception e) { // IGNORE IT } } return null; } protected OMVRBTreeEntry<K, V> setLastSearchNode(final Object iKey, final OMVRBTreeEntry<K, V> iNode) { lastSearchKey = iKey; lastSearchNode = iNode; lastSearchFound = iNode != null ? iNode.tree.pageItemFound : false; lastSearchIndex = iNode != null ? iNode.tree.pageIndex : -1; return iNode; } protected void searchNodeCallback() { } protected void setSizeDelta(final int iDelta) { setSize(size() + iDelta); } }
0true
commons_src_main_java_com_orientechnologies_common_collection_OMVRBTree.java
843
public class AlterAndGetRequest extends AbstractAlterRequest { public AlterAndGetRequest() { } public AlterAndGetRequest(String name, Data function) { super(name, function); } @Override protected Operation prepareOperation() { return new AlterAndGetOperation(name, function); } @Override public int getClassId() { return AtomicReferencePortableHook.ALTER_AND_GET; } }
0true
hazelcast_src_main_java_com_hazelcast_concurrent_atomicreference_client_AlterAndGetRequest.java
470
public class SandBoxType implements Serializable, BroadleafEnumerationType { private static final long serialVersionUID = 1L; private static final Map<String, SandBoxType> TYPES = new LinkedHashMap<String, SandBoxType>(); public static final SandBoxType USER = new SandBoxType("USER", "User"); public static final SandBoxType APPROVAL = new SandBoxType("APPROVAL", "Approval"); public static final SandBoxType PRODUCTION = new SandBoxType("PRODUCTION", "Production"); public static SandBoxType getInstance(final String type) { return TYPES.get(type); } private String type; private String friendlyType; public SandBoxType() { //do nothing } public SandBoxType(final String type, final String friendlyType) { this.friendlyType = friendlyType; setType(type); } public String getType() { return type; } public String getFriendlyType() { return friendlyType; } private void setType(final String type) { this.type = type; if (!TYPES.containsKey(type)) { TYPES.put(type, this); } else { throw new RuntimeException("Cannot add the type: (" + type + "). It already exists as a type via " + getInstance(type).getClass().getName()); } } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((type == null) ? 0 : type.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; SandBoxType other = (SandBoxType) obj; if (type == null) { if (other.type != null) return false; } else if (!type.equals(other.type)) return false; return true; } }
1no label
common_src_main_java_org_broadleafcommerce_common_sandbox_domain_SandBoxType.java
50
@Test(enabled = false) public class UnsafeComparatorTest { public void testOneByteArray() { final byte[] keyOne = new byte[] { 1 }; final byte[] keyTwo = new byte[] { 2 }; Assert.assertTrue(OUnsafeByteArrayComparator.INSTANCE.compare(keyOne, keyTwo) < 0); Assert.assertTrue(OUnsafeByteArrayComparator.INSTANCE.compare(keyTwo, keyOne) > 0); Assert.assertTrue(OUnsafeByteArrayComparator.INSTANCE.compare(keyTwo, keyTwo) == 0); } public void testOneLongArray() { final byte[] keyOne = new byte[] { 0, 1, 0, 0, 0, 0, 0, 0 }; final byte[] keyTwo = new byte[] { 1, 0, 0, 0, 0, 0, 0, 0 }; Assert.assertTrue(OUnsafeByteArrayComparator.INSTANCE.compare(keyOne, keyTwo) < 0); Assert.assertTrue(OUnsafeByteArrayComparator.INSTANCE.compare(keyTwo, keyOne) > 0); Assert.assertTrue(OUnsafeByteArrayComparator.INSTANCE.compare(keyTwo, keyTwo) == 0); } public void testOneLongArrayAndByte() { final byte[] keyOne = new byte[] { 1, 1, 0, 0, 0, 0, 0, 0, 0 }; final byte[] keyTwo = new byte[] { 1, 1, 0, 0, 0, 0, 0, 0, 1 }; Assert.assertTrue(OUnsafeByteArrayComparator.INSTANCE.compare(keyOne, keyTwo) < 0); Assert.assertTrue(OUnsafeByteArrayComparator.INSTANCE.compare(keyTwo, keyOne) > 0); Assert.assertTrue(OUnsafeByteArrayComparator.INSTANCE.compare(keyTwo, keyTwo) == 0); } }
0true
commons_src_test_java_com_orientechnologies_common_comparator_UnsafeComparatorTest.java
498
@Entity @Inheritance(strategy = InheritanceType.JOINED) @Table(name="BLC_CATALOG") @Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements") public class CatalogImpl implements Catalog { private static final Log LOG = LogFactory.getLog(CatalogImpl.class); @Id @GeneratedValue(generator= "CatalogId") @GenericGenerator( name="CatalogId", strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator", parameters = { @Parameter(name="segment_value", value="CatalogImpl"), @Parameter(name="entity_name", value="org.broadleafcommerce.common.site.domain.CatalogImpl") } ) @Column(name = "CATALOG_ID") protected Long id; @Column(name = "NAME") @AdminPresentation(friendlyName = "Catalog_Name", order=1, prominent = true) protected String name; @ManyToMany(targetEntity = SiteImpl.class) @JoinTable(name = "BLC_SITE_CATALOG", joinColumns = @JoinColumn(name = "CATALOG_ID"), inverseJoinColumns = @JoinColumn(name = "SITE_ID")) @BatchSize(size = 50) protected List<Site> sites = new ArrayList<Site>(); @Override public Long getId() { return id; } @Override public void setId(Long id) { this.id = id; } @Override public String getName() { return name; } @Override public void setName(String name) { this.name = name; } @Override public List<Site> getSites() { return sites; } @Override public void setSites(List<Site> sites) { this.sites = sites; } public void checkCloneable(Catalog catalog) throws CloneNotSupportedException, SecurityException, NoSuchMethodException { Method cloneMethod = catalog.getClass().getMethod("clone", new Class[]{}); if (cloneMethod.getDeclaringClass().getName().startsWith("org.broadleafcommerce") && !catalog.getClass().getName().startsWith("org.broadleafcommerce")) { //subclass is not implementing the clone method throw new CloneNotSupportedException("Custom extensions and implementations should implement clone."); } } @Override public Catalog clone() { Catalog clone; try { clone = (Catalog) Class.forName(this.getClass().getName()).newInstance(); try { checkCloneable(clone); } catch (CloneNotSupportedException e) { LOG.warn("Clone implementation missing in inheritance hierarchy outside of Broadleaf: " + clone.getClass().getName(), e); } clone.setId(id); clone.setName(name); } catch (Exception e) { throw new RuntimeException(e); } return clone; } }
0true
common_src_main_java_org_broadleafcommerce_common_site_domain_CatalogImpl.java
240
highlighter = new XPostingsHighlighter() { @Override protected Passage[] getEmptyHighlight(String fieldName, BreakIterator bi, int maxPassages) { return new Passage[0]; } };
0true
src_test_java_org_apache_lucene_search_postingshighlight_XPostingsHighlighterTests.java
534
public interface OFetchListener { /** * Fetch the linked field. * * @param iRoot * @param iFieldName * @param iLinked * @return null if the fetching must stop, otherwise the current field value */ public Object fetchLinked(final ORecordSchemaAware<?> iRoot, final Object iUserObject, final String iFieldName, final ORecordSchemaAware<?> iLinked, final OFetchContext iContext) throws OFetchException; public void parseLinked(final ORecordSchemaAware<?> iRootRecord, final OIdentifiable iLinked, final Object iUserObject, final String iFieldName, final OFetchContext iContext) throws OFetchException; public void parseLinkedCollectionValue(final ORecordSchemaAware<?> iRootRecord, final OIdentifiable iLinked, final Object iUserObject, final String iFieldName, final OFetchContext iContext) throws OFetchException; public Object fetchLinkedMapEntry(final ORecordSchemaAware<?> iRoot, final Object iUserObject, final String iFieldName, final String iKey, final ORecordSchemaAware<?> iLinked, final OFetchContext iContext) throws OFetchException; public Object fetchLinkedCollectionValue(final ORecordSchemaAware<?> iRoot, final Object iUserObject, final String iFieldName, final ORecordSchemaAware<?> iLinked, final OFetchContext iContext) throws OFetchException; public void processStandardField(final ORecordSchemaAware<?> iRecord, final Object iFieldValue, final String iFieldName, final OFetchContext iContext, final Object iUserObject, String iFormat) throws OFetchException; }
0true
core_src_main_java_com_orientechnologies_orient_core_fetch_OFetchListener.java
571
public class ODefaultIndexFactory implements OIndexFactory { public static final String SBTREE_ALGORITHM = "SBTREE"; public static final String MVRBTREE_ALGORITHM = "MVRBTREE"; public static final String MVRBTREE_VALUE_CONTAINER = "MVRBTREESET"; public static final String SBTREEBONSAI_VALUE_CONTAINER = "SBTREEBONSAISET"; public static final String NONE_VALUE_CONTAINER = "NONE"; private static final Set<String> TYPES; static { final Set<String> types = new HashSet<String>(); types.add(OClass.INDEX_TYPE.UNIQUE.toString()); types.add(OClass.INDEX_TYPE.NOTUNIQUE.toString()); types.add(OClass.INDEX_TYPE.FULLTEXT.toString()); types.add(OClass.INDEX_TYPE.DICTIONARY.toString()); TYPES = Collections.unmodifiableSet(types); } /** * Index types : * <ul> * <li>UNIQUE</li> * <li>NOTUNIQUE</li> * <li>FULLTEXT</li> * <li>DICTIONARY</li> * </ul> */ public Set<String> getTypes() { return TYPES; } public OIndexInternal<?> createIndex(ODatabaseRecord database, String indexType, String algorithm, String valueContainerAlgorithm) throws OConfigurationException { if (valueContainerAlgorithm == null) { if (OClass.INDEX_TYPE.NOTUNIQUE.toString().equals(indexType) || OClass.INDEX_TYPE.NOTUNIQUE_HASH_INDEX.toString().equals(indexType) || OClass.INDEX_TYPE.FULLTEXT_HASH_INDEX.toString().equals(indexType) || OClass.INDEX_TYPE.FULLTEXT.toString().equals(indexType)) valueContainerAlgorithm = MVRBTREE_VALUE_CONTAINER; else valueContainerAlgorithm = NONE_VALUE_CONTAINER; } if ((database.getStorage().getType().equals(OEngineLocalPaginated.NAME) || database.getStorage().getType() .equals(OEngineLocal.NAME)) && valueContainerAlgorithm.equals(ODefaultIndexFactory.MVRBTREE_VALUE_CONTAINER) && OGlobalConfiguration.INDEX_NOTUNIQUE_USE_SBTREE_CONTAINER_BY_DEFAULT.getValueAsBoolean()) { OLogManager .instance() .warn( this, "Index was created using %s as values container. " + "This container is deprecated and is not supported any more. To avoid this message please drop and recreate indexes or perform DB export/import.", valueContainerAlgorithm); } if (SBTREE_ALGORITHM.equals(algorithm)) return createSBTreeIndex(indexType, valueContainerAlgorithm); if (MVRBTREE_ALGORITHM.equals(algorithm) || algorithm == null) return createMRBTreeIndex(indexType, valueContainerAlgorithm); throw new OConfigurationException("Unsupported type : " + indexType); } private OIndexInternal<?> createMRBTreeIndex(String indexType, String valueContainerAlgorithm) { if (OClass.INDEX_TYPE.UNIQUE.toString().equals(indexType)) { return new OIndexUnique(indexType, MVRBTREE_ALGORITHM, new OMVRBTreeIndexEngine<OIdentifiable>(), valueContainerAlgorithm); } else if (OClass.INDEX_TYPE.NOTUNIQUE.toString().equals(indexType)) { return new OIndexNotUnique(indexType, MVRBTREE_ALGORITHM, new OMVRBTreeIndexEngine<Set<OIdentifiable>>(), valueContainerAlgorithm); } else if (OClass.INDEX_TYPE.FULLTEXT.toString().equals(indexType)) { return new OIndexFullText(indexType, MVRBTREE_ALGORITHM, new OMVRBTreeIndexEngine<Set<OIdentifiable>>(), valueContainerAlgorithm); } else if (OClass.INDEX_TYPE.DICTIONARY.toString().equals(indexType)) { return new OIndexDictionary(indexType, MVRBTREE_ALGORITHM, new OMVRBTreeIndexEngine<OIdentifiable>(), valueContainerAlgorithm); } throw new OConfigurationException("Unsupported type : " + indexType); } private OIndexInternal<?> createSBTreeIndex(String indexType, String valueContainerAlgorithm) { if (OClass.INDEX_TYPE.UNIQUE.toString().equals(indexType)) { return new OIndexUnique(indexType, SBTREE_ALGORITHM, new OSBTreeIndexEngine<OIdentifiable>(), valueContainerAlgorithm); } else if (OClass.INDEX_TYPE.NOTUNIQUE.toString().equals(indexType)) { return new OIndexNotUnique(indexType, SBTREE_ALGORITHM, new OSBTreeIndexEngine<Set<OIdentifiable>>(), valueContainerAlgorithm); } else if (OClass.INDEX_TYPE.FULLTEXT.toString().equals(indexType)) { return new OIndexFullText(indexType, SBTREE_ALGORITHM, new OSBTreeIndexEngine<Set<OIdentifiable>>(), valueContainerAlgorithm); } else if (OClass.INDEX_TYPE.DICTIONARY.toString().equals(indexType)) { return new OIndexDictionary(indexType, SBTREE_ALGORITHM, new OSBTreeIndexEngine<OIdentifiable>(), valueContainerAlgorithm); } throw new OConfigurationException("Unsupported type : " + indexType); } }
1no label
core_src_main_java_com_orientechnologies_orient_core_index_ODefaultIndexFactory.java
2,576
clusterService.submitStateUpdateTask("zen-disco-master_receive_cluster_state_from_another_master [" + newState.nodes().masterNode() + "]", Priority.URGENT, new ProcessedClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { if (newState.version() > currentState.version()) { logger.warn("received cluster state from [{}] which is also master but with a newer cluster_state, rejoining to cluster...", newState.nodes().masterNode()); return rejoin(currentState, "zen-disco-master_receive_cluster_state_from_another_master [" + newState.nodes().masterNode() + "]"); } else { logger.warn("received cluster state from [{}] which is also master but with an older cluster_state, telling [{}] to rejoin the cluster", newState.nodes().masterNode(), newState.nodes().masterNode()); transportService.sendRequest(newState.nodes().masterNode(), RejoinClusterRequestHandler.ACTION, new RejoinClusterRequest(currentState.nodes().localNodeId()), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { @Override public void handleException(TransportException exp) { logger.warn("failed to send rejoin request to [{}]", exp, newState.nodes().masterNode()); } }); return currentState; } } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { newStateProcessed.onNewClusterStateProcessed(); } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); newStateProcessed.onNewClusterStateFailed(t); } });
1no label
src_main_java_org_elasticsearch_discovery_zen_ZenDiscovery.java
73
public abstract class BaseClientRemoveListenerRequest extends CallableClientRequest { protected String name; protected String registrationId; protected BaseClientRemoveListenerRequest() { } protected BaseClientRemoveListenerRequest(String name, String registrationId) { this.name = name; this.registrationId = registrationId; } public String getRegistrationId() { return registrationId; } public void setRegistrationId(String registrationId) { this.registrationId = registrationId; } public String getName() { return name; } public void setName(final String name) { this.name = name; } @Override public void write(PortableWriter writer) throws IOException { writer.writeUTF("n", name); writer.writeUTF("r", registrationId); } @Override public void read(PortableReader reader) throws IOException { name = reader.readUTF("n"); registrationId = reader.readUTF("r"); } }
0true
hazelcast_src_main_java_com_hazelcast_client_BaseClientRemoveListenerRequest.java
454
private static class TreeKeyIterator implements Iterator<OIdentifiable> { private final boolean autoConvertToRecord; private OSBTreeMapEntryIterator<OIdentifiable, Boolean> entryIterator; public TreeKeyIterator(OTreeInternal<OIdentifiable, Boolean> tree, boolean autoConvertToRecord) { entryIterator = new OSBTreeMapEntryIterator<OIdentifiable, Boolean>(tree); this.autoConvertToRecord = autoConvertToRecord; } @Override public boolean hasNext() { return entryIterator.hasNext(); } @Override public OIdentifiable next() { final OIdentifiable identifiable = entryIterator.next().getKey(); if (autoConvertToRecord) return identifiable.getRecord(); else return identifiable; } @Override public void remove() { entryIterator.remove(); } }
0true
core_src_main_java_com_orientechnologies_orient_core_db_record_ridset_sbtree_OIndexRIDContainerSBTree.java
490
client.getClientExecutionService().executeInternal(new Runnable() { public void run() { for (MembershipListener listener : listeners.values()) { if (event.getEventType() == MembershipEvent.MEMBER_ADDED) { listener.memberAdded(event); } else { listener.memberRemoved(event); } } } });
1no label
hazelcast-client_src_main_java_com_hazelcast_client_spi_impl_ClientClusterServiceImpl.java
30
public class EmbeddedBlueprintsTest extends AbstractCassandraBlueprintsTest { @Override protected WriteConfiguration getGraphConfig() { return CassandraStorageSetup.getEmbeddedGraphConfiguration(getClass().getSimpleName()); } @Override public void extraCleanUp(String uid) throws BackendException { ModifiableConfiguration mc = new ModifiableConfiguration(GraphDatabaseConfiguration.ROOT_NS, getGraphConfig(), Restriction.NONE); StoreManager m = new CassandraEmbeddedStoreManager(mc); m.clearStorage(); m.close(); } }
0true
titan-cassandra_src_test_java_com_thinkaurelius_titan_blueprints_EmbeddedBlueprintsTest.java
279
public interface ActionFuture<T> extends Future<T> { /** * Similar to {@link #get()}, just catching the {@link InterruptedException} with * restoring the interrupted state on the thread and throwing an {@link org.elasticsearch.ElasticsearchIllegalStateException}, * and throwing the actual cause of the {@link java.util.concurrent.ExecutionException}. * <p/> * <p>Note, the actual cause is unwrapped to the actual failure (for example, unwrapped * from {@link org.elasticsearch.transport.RemoteTransportException}. The root failure is * still accessible using {@link #getRootFailure()}. */ T actionGet() throws ElasticsearchException; /** * Similar to {@link #get(long, java.util.concurrent.TimeUnit)}, just catching the {@link InterruptedException} with * restoring the interrupted state on the thread and throwing an {@link org.elasticsearch.ElasticsearchIllegalStateException}, * and throwing the actual cause of the {@link java.util.concurrent.ExecutionException}. * <p/> * <p>Note, the actual cause is unwrapped to the actual failure (for example, unwrapped * from {@link org.elasticsearch.transport.RemoteTransportException}. The root failure is * still accessible using {@link #getRootFailure()}. */ T actionGet(String timeout) throws ElasticsearchException; /** * Similar to {@link #get(long, java.util.concurrent.TimeUnit)}, just catching the {@link InterruptedException} with * restoring the interrupted state on the thread and throwing an {@link org.elasticsearch.ElasticsearchIllegalStateException}, * and throwing the actual cause of the {@link java.util.concurrent.ExecutionException}. * <p/> * <p>Note, the actual cause is unwrapped to the actual failure (for example, unwrapped * from {@link org.elasticsearch.transport.RemoteTransportException}. The root failure is * still accessible using {@link #getRootFailure()}. * * @param timeoutMillis Timeout in millis */ T actionGet(long timeoutMillis) throws ElasticsearchException; /** * Similar to {@link #get(long, java.util.concurrent.TimeUnit)}, just catching the {@link InterruptedException} with * restoring the interrupted state on the thread and throwing an {@link org.elasticsearch.ElasticsearchIllegalStateException}, * and throwing the actual cause of the {@link java.util.concurrent.ExecutionException}. * <p/> * <p>Note, the actual cause is unwrapped to the actual failure (for example, unwrapped * from {@link org.elasticsearch.transport.RemoteTransportException}. The root failure is * still accessible using {@link #getRootFailure()}. */ T actionGet(long timeout, TimeUnit unit) throws ElasticsearchException; /** * Similar to {@link #get(long, java.util.concurrent.TimeUnit)}, just catching the {@link InterruptedException} with * restoring the interrupted state on the thread and throwing an {@link org.elasticsearch.ElasticsearchIllegalStateException}, * and throwing the actual cause of the {@link java.util.concurrent.ExecutionException}. * <p/> * <p>Note, the actual cause is unwrapped to the actual failure (for example, unwrapped * from {@link org.elasticsearch.transport.RemoteTransportException}. The root failure is * still accessible using {@link #getRootFailure()}. */ T actionGet(TimeValue timeout) throws ElasticsearchException; /** * The root (possibly) wrapped failure. */ @Nullable Throwable getRootFailure(); }
0true
src_main_java_org_elasticsearch_action_ActionFuture.java
794
private static class AddOneFunction implements IFunction<Long, Long> { @Override public Long apply(Long input) { return input+1; } }
0true
hazelcast_src_test_java_com_hazelcast_concurrent_atomiclong_AtomicLongTest.java
225
public enum OAccessType { FIELD, PROPERTY }
0true
core_src_main_java_com_orientechnologies_orient_core_annotation_OAccess.java
627
public abstract class AbstractModelVariableModifierProcessor extends AbstractElementProcessor { public AbstractModelVariableModifierProcessor(String elementName) { super(elementName); } /** * This method will handle calling the modifyModelAttributes abstract method and return * an "OK" processor result */ @Override protected ProcessorResult processElement(final Arguments arguments, final Element element) { modifyModelAttributes(arguments, element); // Remove the tag from the DOM final NestableNode parent = element.getParent(); parent.removeChild(element); return ProcessorResult.OK; } /** * Helper method to add a value to the expression evaluation root (model) Map * @param key the key to add to the model * @param value the value represented by the key */ @SuppressWarnings("unchecked") protected void addToModel(Arguments arguments, String key, Object value) { ((Map<String, Object>) arguments.getExpressionEvaluationRoot()).put(key, value); } /** * This method must be overriding by a processor that wishes to modify the model. It will * be called by this abstract processor in the correct precendence in the evaluation chain. * @param arguments * @param element */ protected abstract void modifyModelAttributes(Arguments arguments, Element element); }
0true
common_src_main_java_org_broadleafcommerce_common_web_dialect_AbstractModelVariableModifierProcessor.java
120
public class TxManager extends AbstractTransactionManager implements Lifecycle { public interface Monitor { void txStarted( Xid xid ); void txCommitted( Xid xid ); void txRolledBack( Xid xid ); void txManagerStopped(); public static class Adapter implements Monitor { @Override public void txStarted( Xid xid ) { } @Override public void txCommitted( Xid xid ) { } @Override public void txRolledBack( Xid xid ) { } @Override public void txManagerStopped() { } } } private ThreadLocalWithSize<TransactionImpl> txThreadMap; // private ThreadLocalWithSize<TransactionImpl> txThreadMap = new ThreadLocalWithSize<>(); private final File txLogDir; private File logSwitcherFileName = null; private String txLog1FileName = "tm_tx_log.1"; private String txLog2FileName = "tm_tx_log.2"; private final int maxTxLogRecordCount = 1000; private final AtomicInteger eventIdentifierCounter = new AtomicInteger( 0 ); private final Map<RecoveredBranchInfo, Boolean> branches = new HashMap<>(); private volatile TxLog txLog = null; private final AtomicInteger startedTxCount = new AtomicInteger( 0 ); private final AtomicInteger comittedTxCount = new AtomicInteger( 0 ); private final AtomicInteger rolledBackTxCount = new AtomicInteger( 0 ); private int peakConcurrentTransactions = 0; private final StringLogger log; private final XaDataSourceManager xaDataSourceManager; private final FileSystemAbstraction fileSystem; private TxManager.TxManagerDataSourceRegistrationListener dataSourceRegistrationListener; private Throwable recoveryError; private final TransactionStateFactory stateFactory; private final Factory<byte[]> xidGlobalIdFactory; private final KernelHealth kernelHealth; private final Monitors monitors; private final Monitor monitor; public TxManager( File txLogDir, XaDataSourceManager xaDataSourceManager, StringLogger log, FileSystemAbstraction fileSystem, TransactionStateFactory stateFactory, Factory<byte[]> xidGlobalIdFactory, KernelHealth kernelHealth, Monitors monitors ) { this( txLogDir, xaDataSourceManager, log, fileSystem, stateFactory, new Monitor.Adapter(), xidGlobalIdFactory, kernelHealth, monitors ); } public TxManager( File txLogDir, XaDataSourceManager xaDataSourceManager, StringLogger log, FileSystemAbstraction fileSystem, TransactionStateFactory stateFactory, Monitor monitor, Factory<byte[]> xidGlobalIdFactory, KernelHealth kernelHealth, Monitors monitors ) { this.txLogDir = txLogDir; this.xaDataSourceManager = xaDataSourceManager; this.fileSystem = fileSystem; this.log = log; this.stateFactory = stateFactory; this.monitor = monitor; this.xidGlobalIdFactory = xidGlobalIdFactory; this.kernelHealth = kernelHealth; this.monitors = monitors; } int getNextEventIdentifier() { return eventIdentifierCounter.incrementAndGet(); } private <E extends Exception> E logAndReturn( String msg, E exception ) { try { log.error( msg, exception ); return exception; } catch ( Throwable t ) { return exception; } } private volatile boolean recovered = false; @Override public void init() { } @Override public synchronized void start() throws Throwable { txThreadMap = new ThreadLocalWithSize<>(); openLog(); findPendingDatasources(); dataSourceRegistrationListener = new TxManagerDataSourceRegistrationListener(); xaDataSourceManager.addDataSourceRegistrationListener( dataSourceRegistrationListener ); } private void findPendingDatasources() { try { Iterable<List<TxLog.Record>> danglingRecordList = txLog.getDanglingRecords(); for ( List<TxLog.Record> tx : danglingRecordList ) { for ( TxLog.Record rec : tx ) { if ( rec.getType() == TxLog.BRANCH_ADD ) { RecoveredBranchInfo branchId = new RecoveredBranchInfo( rec.getBranchId()) ; if ( branches.containsKey( branchId ) ) { continue; } branches.put( branchId, false ); } } } } catch ( IOException e ) { throw logAndReturn( "Failed to start transaction manager: Unable to recover pending branches.", new TransactionFailureException( "Unable to start TM", e ) ); } } @Override public synchronized void stop() { recovered = false; xaDataSourceManager.removeDataSourceRegistrationListener( dataSourceRegistrationListener ); closeLog(); monitor.txManagerStopped(); } @Override public void shutdown() throws Throwable { } synchronized TxLog getTxLog() throws IOException { if ( txLog.getRecordCount() > maxTxLogRecordCount ) { if ( txLog.getName().endsWith( txLog1FileName ) ) { txLog.switchToLogFile( new File( txLogDir, txLog2FileName )); changeActiveLog( txLog2FileName ); } else if ( txLog.getName().endsWith( txLog2FileName ) ) { txLog.switchToLogFile( new File( txLogDir, txLog1FileName )); changeActiveLog( txLog1FileName ); } else { setTmNotOk( new Exception( "Unknown active tx log file[" + txLog.getName() + "], unable to switch." ) ); final IOException ex = new IOException( "Unknown txLogFile[" + txLog.getName() + "] not equals to either [" + txLog1FileName + "] or [" + txLog2FileName + "]" ); throw logAndReturn( "TM error accessing log file", ex ); } } return txLog; } private void closeLog() { if ( txLog != null ) { try { txLog.close(); txLog = null; recovered = false; } catch ( IOException e ) { log.error( "Unable to close tx log[" + txLog.getName() + "]", e ); } } log.info( "TM shutting down" ); } private void changeActiveLog( String newFileName ) throws IOException { // change active log StoreChannel fc = fileSystem.open( logSwitcherFileName, "rw" ); ByteBuffer buf = ByteBuffer.wrap( UTF8.encode( newFileName ) ); fc.truncate( 0 ); fc.write( buf ); fc.force( true ); fc.close(); } synchronized void setTmNotOk( Throwable cause ) { kernelHealth.panic( cause ); } @Override public void begin() throws NotSupportedException, SystemException { begin( ForceMode.forced ); } @Override public void begin( ForceMode forceMode ) throws NotSupportedException, SystemException { assertTmOk(); TransactionImpl tx = txThreadMap.get(); if ( tx != null ) { throw logAndReturn( "TM error tx begin", new NotSupportedException( "Nested transactions not supported. Thread: " + Thread.currentThread() ) ); } tx = new TransactionImpl( xidGlobalIdFactory.newInstance(), this, forceMode, stateFactory, log ); txThreadMap.set( tx ); int concurrentTxCount = txThreadMap.size(); if ( concurrentTxCount > peakConcurrentTransactions ) { peakConcurrentTransactions = concurrentTxCount; } startedTxCount.incrementAndGet(); monitor.txStarted( new XidImpl( tx.getGlobalId(), new byte[0] ) ); // start record written on resource enlistment } private void assertTmOk() throws SystemException { if ( !recovered ) { throw new SystemException( "TxManager not recovered" ); } kernelHealth.assertHealthy( SystemException.class ); } // called when a resource gets enlisted void writeStartRecord( byte globalId[] ) throws SystemException { try { getTxLog().txStart( globalId ); } catch ( IOException e ) { setTmNotOk( e ); throw logAndReturn( "Error writing start record", Exceptions.withCause( new SystemException( "TM " + "encountered a problem, " + " error writing transaction log," ), e ) ); } } @Override public void commit() throws RollbackException, HeuristicMixedException, HeuristicRollbackException, IllegalStateException, SystemException { TransactionImpl tx = txThreadMap.get(); if ( tx == null ) { throw logAndReturn( "TM error tx commit", new IllegalStateException( "Not in transaction. Thread: " + Thread.currentThread() ) ); } boolean successful = false; try { assertTmOk(); if ( tx.getStatus() != Status.STATUS_ACTIVE && tx.getStatus() != Status.STATUS_MARKED_ROLLBACK ) { throw logAndReturn( "TM error tx commit", new IllegalStateException( "Tx status is: " + getTxStatusAsString( tx.getStatus() ) ) ); } tx.doBeforeCompletion(); // delist resources? if ( tx.getStatus() == Status.STATUS_ACTIVE ) { comittedTxCount.incrementAndGet(); commit( tx ); } else if ( tx.getStatus() == Status.STATUS_MARKED_ROLLBACK ) { rolledBackTxCount.incrementAndGet(); rollbackCommit( tx ); } else { throw logAndReturn( "TM error tx commit", new IllegalStateException( "Tx status is: " + getTxStatusAsString( tx.getStatus() ) ) ); } successful = true; } finally { // Call after completion as a safety net tx.doAfterCompletion(); monitor.txCommitted( new XidImpl( tx.getGlobalId(), new byte[0] ) ); txThreadMap.remove(); if ( successful ) { tx.finish( true ); } else { try { tx.finish( false ); } catch ( RuntimeException e ) { log.error( "Failed to commit transaction, and was then subsequently unable to " + "finish the failed tx.", e ); } } } } private void commit( TransactionImpl tx ) throws SystemException, HeuristicMixedException, HeuristicRollbackException { // mark as commit in log done TxImpl.doCommit() Throwable commitFailureCause = null; int xaErrorCode = -1; /* * The attempt to commit and the corresponding rollback in case of failure happens under the same lock. * This is necessary for a transaction to be able to cleanup its state in case it fails to commit * without any other transaction coming in and disrupting things. Hooks will be called under this * lock in case of rollback but not if commit succeeds, which should be ok throughput wise. There is * some performance degradation related to this, since now we hold a lock over commit() for * (potentially) all resource managers, while without this monitor each commit() on each * XaResourceManager locks only that. */ if ( tx.getResourceCount() == 0 ) { tx.setStatus( Status.STATUS_COMMITTED ); } else { try { tx.doCommit(); } catch ( CommitNotificationFailedException e ) { // Let this pass through. Catching this exception here will still have the exception // propagate out to the user (wrapped in a TransactionFailureException), but will not // set this transaction manager in "not OK" state. // At the time of adding this, this approach was chosen over throwing an XAException // with a specific error code since no error code seemed suitable. log.warn( "Commit notification failed: " + e ); } catch ( XAException e ) { // Behold, the error handling decision maker of great power. // // The thinking behind the code below is that there are certain types of errors that we understand, // and know that we can safely roll back after they occur. An example would be a user trying to delete // a node that still has relationships. For these errors, we keep a whitelist (the switch below), // and roll back when they occur. // // For *all* errors that we don't know exactly what they mean, we panic and run around in circles. // Other errors could involve out of disk space (can't recover) or out of memory (can't recover) // or anything else. The point is that there is no way for us to trust the state of the system any // more, so we set transaction manager to not ok and expect the user to fix the problem and do recovery. switch(e.errorCode) { // These are error states that we can safely recover from /* * User tried to delete a node that still had relationships, or in some other way violated * data model constraints. */ case XAException.XA_RBINTEGRITY: /* * A network error occurred. */ case XAException.XA_HEURCOM: xaErrorCode = e.errorCode; commitFailureCause = e; log.error( "Commit failed, status=" + getTxStatusAsString( tx.getStatus() ) + ", errorCode=" + xaErrorCode, e ); break; // Error codes where we are not *certain* that we still know the state of the system default: setTmNotOk( e ); throw logAndReturn("TM error tx commit",new TransactionFailureException( "commit threw exception", e )); } } catch ( Throwable t ) { setTmNotOk( t ); // this should never be throw logAndReturn("Commit failed for " + tx, new TransactionFailureException( "commit threw exception but status is committed?", t )); } } if ( tx.getStatus() != Status.STATUS_COMMITTED ) { try { tx.doRollback(); } catch ( Throwable e ) { setTmNotOk( e ); String commitError = commitFailureCause != null ? "error in commit: " + commitFailureCause : "error code in commit: " + xaErrorCode; String rollbackErrorCode = "Unknown error code"; if ( e instanceof XAException ) { rollbackErrorCode = Integer.toString( ((XAException) e).errorCode ); } throw logAndReturn( "Unable to rollback transaction "+ tx +". " + "Some resources may be commited others not. " + "Neo4j kernel should be SHUTDOWN for " + "resource maintance and transaction recovery ---->", Exceptions.withCause( new HeuristicMixedException( "Unable to rollback "+tx+" ---> " + commitError + " ---> error code for rollback: " + rollbackErrorCode ), e ) ); } tx.doAfterCompletion(); try { if ( tx.isGlobalStartRecordWritten() ) { getTxLog().txDone( tx.getGlobalId() ); } } catch ( IOException e ) { setTmNotOk( e ); throw logAndReturn( "Error writing transaction log for " + tx, Exceptions.withCause( new SystemException( "TM encountered a problem, while committing transaction " + tx + ", error writing transaction log" ), e ) ); } tx.setStatus( Status.STATUS_NO_TRANSACTION ); if ( commitFailureCause == null ) { throw logAndReturn( "TM error tx commit", new HeuristicRollbackException( "Failed to commit, transaction "+ tx +" rolled back ---> " + "error code was: " + xaErrorCode ) ); } else { throw logAndReturn( "TM error tx commit", Exceptions.withCause( new HeuristicRollbackException( "Failed to commit transaction "+ tx +", transaction rolled back ---> " + commitFailureCause ), commitFailureCause ) ); } } tx.doAfterCompletion(); try { if ( tx.isGlobalStartRecordWritten() ) { getTxLog().txDone( tx.getGlobalId() ); } } catch ( IOException e ) { setTmNotOk( e ); throw logAndReturn( "Error writing transaction log for " + tx, Exceptions.withCause( new SystemException( "TM encountered a problem, " + " error writing transaction log for "+ tx ), e ) ); } tx.setStatus( Status.STATUS_NO_TRANSACTION ); } private void rollbackCommit( TransactionImpl tx ) throws HeuristicMixedException, RollbackException, SystemException { try { tx.doRollback(); } catch ( XAException e ) { setTmNotOk( e ); throw logAndReturn( "Unable to rollback marked transaction. " + "Some resources may be commited others not. " + "Neo4j kernel should be SHUTDOWN for " + "resource maintance and transaction recovery: " + tx, Exceptions.withCause( new HeuristicMixedException( "Unable to rollback " + tx + " ---> error code for rollback: " + e.errorCode ), e ) ); } tx.doAfterCompletion(); try { if ( tx.isGlobalStartRecordWritten() ) { getTxLog().txDone( tx.getGlobalId() ); } } catch ( IOException e ) { setTmNotOk( e ); throw logAndReturn( "Error writing transaction log for " + tx, Exceptions.withCause( new SystemException( "TM encountered a problem, error writing transaction log" ), e ) ); } tx.setStatus( Status.STATUS_NO_TRANSACTION ); RollbackException rollbackException = new RollbackException( "Failed to commit, transaction rolled back" ); ExceptionCauseSetter.setCause( rollbackException, tx.getRollbackCause() ); throw rollbackException; } @Override public void rollback() throws IllegalStateException, SystemException { TransactionImpl tx = txThreadMap.get(); if ( tx == null ) { throw logAndReturn( "TM error tx commit", new IllegalStateException( "Not in transaction. Thread: " + Thread.currentThread() ) ); } try { assertTmOk(); if ( tx.getStatus() == Status.STATUS_ACTIVE || tx.getStatus() == Status.STATUS_MARKED_ROLLBACK || tx.getStatus() == Status.STATUS_PREPARING ) { tx.setStatus( Status.STATUS_MARKED_ROLLBACK ); tx.doBeforeCompletion(); // delist resources? try { rolledBackTxCount.incrementAndGet(); tx.doRollback(); } catch ( XAException e ) { setTmNotOk( e ); throw logAndReturn( "Unable to rollback marked or active transaction "+ tx +". " + "Some resources may be commited others not. " + "Neo4j kernel should be SHUTDOWN for " + "resource maintance and transaction recovery ---->", Exceptions.withCause( new SystemException( "Unable to rollback " + tx + " ---> error code for rollback: " + e.errorCode ), e ) ); } tx.doAfterCompletion(); try { if ( tx.isGlobalStartRecordWritten() ) { getTxLog().txDone( tx.getGlobalId() ); } } catch ( IOException e ) { setTmNotOk( e ); throw logAndReturn( "Error writing transaction log for " + tx, Exceptions.withCause( new SystemException( "TM encountered a problem, " + " error writing transaction log" ), e ) ); } tx.setStatus( Status.STATUS_NO_TRANSACTION ); } else { throw new IllegalStateException( "Tx status is: " + getTxStatusAsString( tx.getStatus() ) ); } } finally { // Call after completion as a safety net tx.doAfterCompletion(); txThreadMap.remove(); tx.finish( false ); } monitor.txRolledBack( new XidImpl( tx.getGlobalId(), new byte[0] ) ); } @Override public int getStatus() { TransactionImpl tx = txThreadMap.get(); if ( tx != null ) { return tx.getStatus(); } return Status.STATUS_NO_TRANSACTION; } @Override public Transaction getTransaction() throws SystemException { // It's pretty important that we check the tmOk state here. This method is called from getForceMode // which in turn is called from XaResourceManager.commit(Xid, boolean) and so on all the way up to // TxManager.commit(Thread, TransactionImpl), wherein the monitor lock on TxManager is held! // It's very important that we check the tmOk state, during commit, while holding the lock on the // TxManager, as we could otherwise get into a situation where a transaction crashes the database // during commit, while another makes it past the check and then procedes to rotate the log, making // the crashed transaction unrecoverable. assertTmOk(); return txThreadMap.get(); } @Override public void resume( Transaction tx ) throws IllegalStateException, SystemException { assertTmOk(); Transaction associatedTx = txThreadMap.get(); if ( associatedTx != null ) { throw new ThreadAssociatedWithOtherTransactionException( Thread.currentThread(), associatedTx, tx ); } TransactionImpl txImpl = (TransactionImpl) tx; if ( txImpl.getStatus() != Status.STATUS_NO_TRANSACTION ) { if ( txImpl.isActive() ) { throw new TransactionAlreadyActiveException( Thread.currentThread(), tx ); } txImpl.markAsActive(); txThreadMap.set( txImpl ); } } @Override public Transaction suspend() throws SystemException { assertTmOk(); // check for ACTIVE/MARKED_ROLLBACK? TransactionImpl tx = txThreadMap.get(); if ( tx != null ) { txThreadMap.remove(); tx.markAsSuspended(); } // OK to return null here according to the JTA spec (at least 1.1) return tx; } @Override public void setRollbackOnly() throws IllegalStateException, SystemException { assertTmOk(); TransactionImpl tx = txThreadMap.get(); if ( tx == null ) { throw new IllegalStateException( "Not in transaction. Thread: " + Thread.currentThread() ); } tx.setRollbackOnly(); } @Override public void setTransactionTimeout( int seconds ) throws SystemException { assertTmOk(); // ... } private void openLog() { logSwitcherFileName = new File( txLogDir, "active_tx_log"); txLog1FileName = "tm_tx_log.1"; txLog2FileName = "tm_tx_log.2"; try { if ( fileSystem.fileExists( logSwitcherFileName ) ) { StoreChannel fc = fileSystem.open( logSwitcherFileName, "rw" ); byte fileName[] = new byte[256]; ByteBuffer buf = ByteBuffer.wrap( fileName ); fc.read( buf ); fc.close(); File currentTxLog = new File( txLogDir, UTF8.decode( fileName ).trim()); if ( !fileSystem.fileExists( currentTxLog ) ) { throw logAndReturn( "TM startup failure", new TransactionFailureException( "Unable to start TM, " + "active tx log file[" + currentTxLog + "] not found." ) ); } txLog = new TxLog( currentTxLog, fileSystem, monitors ); log.info( "TM opening log: " + currentTxLog ); } else { if ( fileSystem.fileExists( new File( txLogDir, txLog1FileName )) || fileSystem.fileExists( new File( txLogDir, txLog2FileName ) )) { throw logAndReturn( "TM startup failure", new TransactionFailureException( "Unable to start TM, " + "no active tx log file found but found either " + txLog1FileName + " or " + txLog2FileName + " file, please set one of them as active or " + "remove them." ) ); } ByteBuffer buf = ByteBuffer.wrap( txLog1FileName .getBytes( "UTF-8" ) ); StoreChannel fc = fileSystem.open( logSwitcherFileName, "rw" ); fc.write( buf ); txLog = new TxLog( new File( txLogDir, txLog1FileName), fileSystem, monitors ); log.info( "TM new log: " + txLog1FileName ); fc.force( true ); fc.close(); } } catch ( IOException e ) { throw logAndReturn( "TM startup failure", new TransactionFailureException( "Unable to start TM", e ) ); } } @Override public void doRecovery() { if ( txLog == null ) { openLog(); } if ( recovered ) { return; } try { // Assuming here that the last datasource to register is the Neo one // Do recovery on start - all Resources should be registered by now Iterable<List<TxLog.Record>> knownDanglingRecordList = txLog.getDanglingRecords(); boolean danglingRecordsFound = knownDanglingRecordList.iterator().hasNext(); if ( danglingRecordsFound ) { log.info( "Unresolved transactions found in " + txLog.getName() + ", recovery started... " ); } // Recover DataSources. Always call due to some internal state using it as a trigger. xaDataSourceManager.recover( knownDanglingRecordList.iterator() ); if ( danglingRecordsFound ) { log.info( "Recovery completed, all transactions have been " + "resolved to a consistent state." ); } getTxLog().truncate(); recovered = true; kernelHealth.healed(); } catch ( Throwable t ) { setTmNotOk( t ); recoveryError = t; } } byte[] getBranchId( XAResource xaRes ) { if ( xaRes instanceof XaResource ) { byte branchId[] = ((XaResource) xaRes).getBranchId(); if ( branchId != null ) { return branchId; } } return xaDataSourceManager.getBranchId( xaRes ); } String getTxStatusAsString( int status ) { switch ( status ) { case Status.STATUS_ACTIVE: return "STATUS_ACTIVE"; case Status.STATUS_NO_TRANSACTION: return "STATUS_NO_TRANSACTION"; case Status.STATUS_PREPARING: return "STATUS_PREPARING"; case Status.STATUS_PREPARED: return "STATUS_PREPARED"; case Status.STATUS_COMMITTING: return "STATUS_COMMITING"; case Status.STATUS_COMMITTED: return "STATUS_COMMITED"; case Status.STATUS_ROLLING_BACK: return "STATUS_ROLLING_BACK"; case Status.STATUS_ROLLEDBACK: return "STATUS_ROLLEDBACK"; case Status.STATUS_UNKNOWN: return "STATUS_UNKNOWN"; case Status.STATUS_MARKED_ROLLBACK: return "STATUS_MARKED_ROLLBACK"; default: return "STATUS_UNKNOWN(" + status + ")"; } } /** * @return The current transaction's event identifier or -1 if no * transaction is currently running. */ @Override public int getEventIdentifier() { TransactionImpl tx; try { tx = (TransactionImpl) getTransaction(); } catch ( SystemException e ) { throw new RuntimeException( e ); } if ( tx != null ) { return tx.getEventIdentifier(); } return -1; } @Override public ForceMode getForceMode() { try { // The call to getTransaction() is important. See the comment in getTransaction(). return ((TransactionImpl)getTransaction()).getForceMode(); } catch ( SystemException e ) { throw new RuntimeException( e ); } } @Override public Throwable getRecoveryError() { return recoveryError; } public int getStartedTxCount() { return startedTxCount.get(); } public int getCommittedTxCount() { return comittedTxCount.get(); } public int getRolledbackTxCount() { return rolledBackTxCount.get(); } public int getActiveTxCount() { return txThreadMap.size(); } public int getPeakConcurrentTxCount() { return peakConcurrentTransactions; } @Override public TransactionState getTransactionState() { Transaction tx; try { tx = getTransaction(); } catch ( SystemException e ) { throw new RuntimeException( e ); } return tx != null ? ((TransactionImpl)tx).getState() : TransactionState.NO_STATE; } private class TxManagerDataSourceRegistrationListener implements DataSourceRegistrationListener { @Override public void registeredDataSource( XaDataSource ds ) { branches.put( new RecoveredBranchInfo( ds.getBranchId() ), true ); boolean everythingRegistered = true; for ( boolean dsRegistered : branches.values() ) { everythingRegistered &= dsRegistered; } if ( everythingRegistered ) { doRecovery(); } } @Override public void unregisteredDataSource( XaDataSource ds ) { branches.put( new RecoveredBranchInfo( ds.getBranchId() ), false ); boolean everythingUnregistered = true; for ( boolean dsRegistered : branches.values() ) { everythingUnregistered &= !dsRegistered; } if ( everythingUnregistered ) { closeLog(); } } } /* * We use a hash map to store the branch ids. byte[] however does not offer a useful implementation of equals() or * hashCode(), so we need a wrapper that does that. */ private static final class RecoveredBranchInfo { final byte[] branchId; private RecoveredBranchInfo( byte[] branchId ) { this.branchId = branchId; } @Override public int hashCode() { return Arrays.hashCode( branchId ); } @Override public boolean equals( Object obj ) { if ( obj == null || obj.getClass() != RecoveredBranchInfo.class ) { return false; } return Arrays.equals( branchId, ( ( RecoveredBranchInfo )obj ).branchId ); } } }
0true
community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_TxManager.java
251
public class BufferedChecksumIndexOutput extends BufferedIndexOutput { private final IndexOutput delegate; private final BufferedIndexOutput bufferedDelegate; private final Checksum digest; public BufferedChecksumIndexOutput(IndexOutput delegate, Checksum digest) { super(delegate instanceof BufferedIndexOutput ? ((BufferedIndexOutput) delegate).getBufferSize() : BufferedIndexOutput.DEFAULT_BUFFER_SIZE); if (delegate instanceof BufferedIndexOutput) { bufferedDelegate = (BufferedIndexOutput) delegate; this.delegate = delegate; } else { this.delegate = delegate; bufferedDelegate = null; } this.digest = digest; } public Checksum digest() { return digest; } public IndexOutput underlying() { return this.delegate; } // don't override it, base class method simple reads from input and writes to this output // @Override public void copyBytes(IndexInput input, long numBytes) throws IOException { // delegate.copyBytes(input, numBytes); // } @Override public void close() throws IOException { try { super.close(); } finally { delegate.close(); } } @Override protected void flushBuffer(byte[] b, int offset, int len) throws IOException { if (bufferedDelegate != null) { bufferedDelegate.flushBuffer(b, offset, len); } else { delegate.writeBytes(b, offset, len); } digest.update(b, offset, len); } // don't override it, base class method simple reads from input and writes to this output // @Override public void copyBytes(IndexInput input, long numBytes) throws IOException { // delegate.copyBytes(input, numBytes); // } @Override public void flush() throws IOException { try { super.flush(); } finally { delegate.flush(); } } @Override public void seek(long pos) throws IOException { // seek might be called on files, which means that the checksum is not file checksum // but a checksum of the bytes written to this stream, which is the same for each // type of file in lucene super.seek(pos); delegate.seek(pos); } @Override public long length() throws IOException { return delegate.length(); } @Override public void setLength(long length) throws IOException { delegate.setLength(length); } @Override public String toString() { return delegate.toString(); } }
0true
src_main_java_org_apache_lucene_store_BufferedChecksumIndexOutput.java
1,243
public class NodeAdminClient extends AbstractComponent implements AdminClient { private final NodeIndicesAdminClient indicesAdminClient; private final NodeClusterAdminClient clusterAdminClient; @Inject public NodeAdminClient(Settings settings, NodeClusterAdminClient clusterAdminClient, NodeIndicesAdminClient indicesAdminClient) { super(settings); this.indicesAdminClient = indicesAdminClient; this.clusterAdminClient = clusterAdminClient; } @Override public IndicesAdminClient indices() { return indicesAdminClient; } @Override public ClusterAdminClient cluster() { return this.clusterAdminClient; } }
0true
src_main_java_org_elasticsearch_client_node_NodeAdminClient.java
64
public interface TitanGraphTransaction extends TransactionalGraph, KeyIndexableGraph, SchemaManager { /* --------------------------------------------------------------- * Modifications * --------------------------------------------------------------- */ /** * Creates a new vertex in the graph with the default vertex label. * * @return New vertex in the graph created in the context of this transaction. */ public TitanVertex addVertex(); /** * Creates a new vertex in the graph with the vertex label named by the argument. * * @param vertexLabel the name of the vertex label to use * @return a new vertex in the graph created in the context of this transaction */ public TitanVertex addVertexWithLabel(String vertexLabel); /** * Creates a new vertex in the graph with the given vertex label. * * @param vertexLabel the vertex label which will apply to the new vertex * @return a new vertex in the graph created in the context of this transaction */ public TitanVertex addVertexWithLabel(VertexLabel vertexLabel); /** * Retrieves the vertex for the specified id. * * @param id id of the vertex to retrieve * @return vertex with the given id if it exists, else null * @see #containsVertex */ public TitanVertex getVertex(long id); /** * Retrieves the vertices for the given ids and returns a map from those ids to the corresponding vertices. * If a given id does not identify a vertex, it is not included in the returned map * * @param ids array of ids for which to retrieve vertices * @return map from ids to corresponding vertices * does not identify a vertex */ public Map<Long,TitanVertex> getVertices(long... ids); /** * Checks whether a vertex with the specified id exists in the graph database. * * @param vertexid vertex id * @return true, if a vertex with that id exists, else false */ public boolean containsVertex(long vertexid); /** * @return * @see TitanGraph#query() */ public TitanGraphQuery<? extends TitanGraphQuery> query(); /** * Returns a {@link com.thinkaurelius.titan.core.TitanIndexQuery} to query for vertices or edges against the specified indexing backend using * the given query string. The query string is analyzed and answered by the underlying storage backend. * <p/> * Note, that using indexQuery will may ignore modifications in the current transaction. * * @param indexName Name of the indexing backend to query as configured * @param query Query string * @return TitanIndexQuery object to query the index directly */ public TitanIndexQuery indexQuery(String indexName, String query); /** * @return * @see TitanGraph#multiQuery(com.thinkaurelius.titan.core.TitanVertex...) */ public TitanMultiVertexQuery<? extends TitanMultiVertexQuery> multiQuery(TitanVertex... vertices); /** * @return * @see TitanGraph#multiQuery(java.util.Collection) */ public TitanMultiVertexQuery<? extends TitanMultiVertexQuery> multiQuery(Collection<TitanVertex> vertices); }
0true
titan-core_src_main_java_com_thinkaurelius_titan_core_TitanGraphTransaction.java
1,328
@ClusterScope(scope=Scope.TEST, numNodes=0) public class SimpleDataNodesTests extends ElasticsearchIntegrationTest { @Test public void testDataNodes() throws Exception { cluster().startNode(settingsBuilder().put("node.data", false).build()); client().admin().indices().create(createIndexRequest("test")).actionGet(); try { client().index(Requests.indexRequest("test").type("type1").id("1").source(source("1", "test")).timeout(timeValueSeconds(1))).actionGet(); fail("no allocation should happen"); } catch (UnavailableShardsException e) { // all is well } cluster().startNode(settingsBuilder().put("node.data", false).build()); assertThat(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNodes("2").setLocal(true).execute().actionGet().isTimedOut(), equalTo(false)); // still no shard should be allocated try { client().index(Requests.indexRequest("test").type("type1").id("1").source(source("1", "test")).timeout(timeValueSeconds(1))).actionGet(); fail("no allocation should happen"); } catch (UnavailableShardsException e) { // all is well } // now, start a node data, and see that it gets with shards cluster().startNode(settingsBuilder().put("node.data", true).build()); assertThat(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNodes("3").setLocal(true).execute().actionGet().isTimedOut(), equalTo(false)); IndexResponse indexResponse = client().index(Requests.indexRequest("test").type("type1").id("1").source(source("1", "test"))).actionGet(); assertThat(indexResponse.getId(), equalTo("1")); assertThat(indexResponse.getType(), equalTo("type1")); } private String source(String id, String nameValue) { return "{ type1 : { \"id\" : \"" + id + "\", \"name\" : \"" + nameValue + "\" } }"; } }
0true
src_test_java_org_elasticsearch_cluster_SimpleDataNodesTests.java
739
public class TransportDeleteByQueryAction extends TransportIndicesReplicationOperationAction<DeleteByQueryRequest, DeleteByQueryResponse, IndexDeleteByQueryRequest, IndexDeleteByQueryResponse, ShardDeleteByQueryRequest, ShardDeleteByQueryRequest, ShardDeleteByQueryResponse> { private final DestructiveOperations destructiveOperations; @Inject public TransportDeleteByQueryAction(Settings settings, ClusterService clusterService, TransportService transportService, ThreadPool threadPool, TransportIndexDeleteByQueryAction indexDeleteByQueryAction, NodeSettingsService nodeSettingsService) { super(settings, transportService, clusterService, threadPool, indexDeleteByQueryAction); this.destructiveOperations = new DestructiveOperations(logger, settings, nodeSettingsService); } @Override protected void doExecute(DeleteByQueryRequest request, ActionListener<DeleteByQueryResponse> listener) { destructiveOperations.failDestructive(request.indices()); super.doExecute(request, listener); } @Override protected Map<String, Set<String>> resolveRouting(ClusterState clusterState, DeleteByQueryRequest request) throws ElasticsearchException { return clusterState.metaData().resolveSearchRouting(request.routing(), request.indices()); } @Override protected DeleteByQueryRequest newRequestInstance() { return new DeleteByQueryRequest(); } @Override protected DeleteByQueryResponse newResponseInstance(DeleteByQueryRequest request, AtomicReferenceArray indexResponses) { DeleteByQueryResponse response = new DeleteByQueryResponse(); for (int i = 0; i < indexResponses.length(); i++) { IndexDeleteByQueryResponse indexResponse = (IndexDeleteByQueryResponse) indexResponses.get(i); if (indexResponse != null) { response.getIndices().put(indexResponse.getIndex(), indexResponse); } } return response; } @Override protected boolean accumulateExceptions() { return false; } @Override protected String transportAction() { return DeleteByQueryAction.NAME; } @Override protected ClusterBlockException checkGlobalBlock(ClusterState state, DeleteByQueryRequest replicationPingRequest) { return state.blocks().globalBlockedException(ClusterBlockLevel.READ); } @Override protected ClusterBlockException checkRequestBlock(ClusterState state, DeleteByQueryRequest request, String[] concreteIndices) { return state.blocks().indicesBlockedException(ClusterBlockLevel.WRITE, concreteIndices); } @Override protected IndexDeleteByQueryRequest newIndexRequestInstance(DeleteByQueryRequest request, String index, Set<String> routing) { String[] filteringAliases = clusterService.state().metaData().filteringAliases(index, request.indices()); return new IndexDeleteByQueryRequest(request, index, routing, filteringAliases); } }
0true
src_main_java_org_elasticsearch_action_deletebyquery_TransportDeleteByQueryAction.java
60
@edu.umd.cs.findbugs.annotations.SuppressWarnings("EI_EXPOSE_REP") public class RestValue implements DataSerializable { private byte[] value; private byte[] contentType; public RestValue() { } public RestValue(byte[] value, byte[] contentType) { this.value = value; this.contentType = contentType; } public void readData(ObjectDataInput in) throws IOException { value = IOUtil.readByteArray(in); contentType = IOUtil.readByteArray(in); } public void writeData(ObjectDataOutput out) throws IOException { IOUtil.writeByteArray(out, value); IOUtil.writeByteArray(out, contentType); } public byte[] getContentType() { return contentType; } public void setContentType(byte[] contentType) { this.contentType = contentType; } public byte[] getValue() { return value; } public void setValue(byte[] value) { this.value = value; } @Override public String toString() { String contentTypeStr; if (contentType == null) { contentTypeStr = "unknown-content-type"; } else { contentTypeStr = bytesToString(contentType); } String valueStr; if (value == null) { valueStr = "value.length=0"; } else if (contentTypeStr.contains("text")) { valueStr = "value=\"" + bytesToString(value) + "\""; } else { valueStr = "value.length=" + value.length; } return "RestValue{" + "contentType='" + contentTypeStr + "', " + valueStr + '}'; } }
0true
hazelcast_src_main_java_com_hazelcast_ascii_rest_RestValue.java
32
public class StateMachines implements MessageProcessor, MessageSource { private final Logger logger = LoggerFactory.getLogger( StateMachines.class ); private final MessageSender sender; private DelayedDirectExecutor executor; private Executor stateMachineExecutor; private Timeouts timeouts; private final Map<Class<? extends MessageType>, StateMachine> stateMachines = new LinkedHashMap<Class<? extends MessageType>, StateMachine>(); private final List<MessageProcessor> outgoingProcessors = new ArrayList<MessageProcessor>(); private final OutgoingMessageHolder outgoing; // This is used to ensure fairness of message delivery private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock( true ); private final String instanceIdHeaderValue; public StateMachines( MessageSource source, final MessageSender sender, Timeouts timeouts, DelayedDirectExecutor executor, Executor stateMachineExecutor, InstanceId instanceId ) { this.sender = sender; this.executor = executor; this.stateMachineExecutor = stateMachineExecutor; this.timeouts = timeouts; this.instanceIdHeaderValue = instanceId.toString(); outgoing = new OutgoingMessageHolder(); timeouts.addMessageProcessor( this ); source.addMessageProcessor( this ); } public Timeouts getTimeouts() { return timeouts; } public synchronized void addStateMachine( StateMachine stateMachine ) { stateMachines.put( stateMachine.getMessageType(), stateMachine ); } public synchronized void removeStateMachine( StateMachine stateMachine ) { stateMachines.remove( stateMachine.getMessageType() ); } public Iterable<StateMachine> getStateMachines() { return stateMachines.values(); } @Override public void addMessageProcessor( MessageProcessor messageProcessor ) { outgoingProcessors.add( messageProcessor ); } public OutgoingMessageHolder getOutgoing() { return outgoing; } @Override public boolean process( final Message<? extends MessageType> message ) { stateMachineExecutor.execute( new Runnable() { OutgoingMessageHolder temporaryOutgoing = new OutgoingMessageHolder(); @Override public void run() { lock.writeLock().lock(); try { // Lock timeouts while we are processing the message synchronized ( timeouts ) { StateMachine stateMachine = stateMachines.get( message.getMessageType().getClass() ); if ( stateMachine == null ) { return; // No StateMachine registered for this MessageType type - Ignore this } stateMachine.handle( message, temporaryOutgoing ); Message<? extends MessageType> tempMessage; while ((tempMessage = temporaryOutgoing.nextOutgoingMessage()) != null) { outgoing.offer( tempMessage ); } // Process and send messages // Allow state machines to send messages to each other as well in this loop Message<? extends MessageType> outgoingMessage; List<Message<? extends MessageType>> toSend = new LinkedList<Message<? extends MessageType>>(); try { while ( ( outgoingMessage = outgoing.nextOutgoingMessage() ) != null ) { message.copyHeadersTo( outgoingMessage, CONVERSATION_ID, CREATED_BY ); for ( MessageProcessor outgoingProcessor : outgoingProcessors ) { try { if ( !outgoingProcessor.process( outgoingMessage ) ) { break; } } catch ( Throwable e ) { logger.warn( "Outgoing message processor threw exception", e ); } } if ( outgoingMessage.hasHeader( Message.TO ) ) { outgoingMessage.setHeader( Message.INSTANCE_ID, instanceIdHeaderValue ); toSend.add( outgoingMessage ); } else { // Deliver internally if possible StateMachine internalStatemachine = stateMachines.get( outgoingMessage.getMessageType() .getClass() ); if ( internalStatemachine != null ) { internalStatemachine.handle( (Message) outgoingMessage, temporaryOutgoing ); while ((tempMessage = temporaryOutgoing.nextOutgoingMessage()) != null) { outgoing.offer( tempMessage ); } } } } if ( !toSend.isEmpty() ) // the check is necessary, sender may not have started yet { sender.process( toSend ); } } catch ( Exception e ) { logger.warn( "Error processing message " + message, e ); } } } finally { lock.writeLock().unlock(); } // Before returning, process delayed executions so that they are done before returning // This will effectively trigger all notifications created by contexts executor.drain(); } } ); return true; } public void addStateTransitionListener( StateTransitionListener stateTransitionListener ) { for ( StateMachine stateMachine : stateMachines.values() ) { stateMachine.addStateTransitionListener( stateTransitionListener ); } } public void removeStateTransitionListener( StateTransitionListener stateTransitionListener ) { for ( StateMachine stateMachine : stateMachines.values() ) { stateMachine.removeStateTransitionListener( stateTransitionListener ); } } @Override public String toString() { List<String> states = new ArrayList<String>(); for ( StateMachine stateMachine : stateMachines.values() ) { states.add( stateMachine.getState().getClass().getSuperclass().getSimpleName() + ":" + stateMachine .getState().toString() ); } return states.toString(); } public StateMachine getStateMachine( Class<? extends MessageType> messageType ) { return stateMachines.get( messageType ); } private class OutgoingMessageHolder implements MessageHolder { private Deque<Message<? extends MessageType>> outgoingMessages = new ArrayDeque<Message<? extends MessageType>>(); @Override public synchronized void offer( Message<? extends MessageType> message ) { outgoingMessages.addFirst( message ); } public synchronized Message<? extends MessageType> nextOutgoingMessage() { return outgoingMessages.pollFirst(); } } }
1no label
enterprise_cluster_src_main_java_org_neo4j_cluster_StateMachines.java
149
public abstract class XaCommand { private boolean isRecovered = false; /** * Default implementation of rollback that does nothing. This method is not * to undo any work done by the {@link #execute} method. Commands in a * {@link XaTransaction} are either all rolled back or all executed, they're * not linked together as usual execute/rollback methods. * <p> * Since a command only is in memory nothing has been made persistent so * rollback usually don't have to do anything. Sometimes however a command * needs to acquire resources when created (since the application thinks it * has done the work when the command is created). For example, if a command * creates some entity that has a primary id we need to generate that id * upon command creation. But if the command is rolled back we should * release that id. This is the place to do just that. */ public void rollback() { }; /** * Executes the command and makes it persistent. This method must succeed, * any protests about this command not being able to execute should be done * before execution of any command within the transaction. */ public abstract void execute(); /** * When a command is added to a transaction (usually when it is created) it * must be written to the {@link XaLogicalLog}. This method should write * all the data that is needed to re-create the command (see * {@link XaCommandFactory}). * <p> * Write the data to the <CODE>fileChannel</CODE>, you can use the * <CODE>buffer</CODE> supplied or create your own buffer since its capacity * is very small (137 bytes or something). Acccess to writing commands is * synchronized, only one command will be written at a time so if you need * to write larger data sets the commands can share the same buffer. * <p> * Don't throw an <CODE>IOException</CODE> to imply something is wrong * with the command. An exception should only be thrown here if there is a * real IO failure. If something is wrong with this command it should have * been detected when it was created. * <p> * Don't <CODE>force</CODE>, <CODE>position</CODE> or anything except * normal forward <CODE>write</CODE> with the file channel. * * @param fileChannel * The channel to the {@link XaLogicalLog} * @param buffer * A small byte buffer that can be used to write command data * @throws IOException * In case of *real* IO failure */ public abstract void writeToFile( LogBuffer buffer ) throws IOException; /** * If this command is created by the command factory during a recovery scan * of the logical log this method will be called to mark the command as a * "recovered command". */ protected void setRecovered() { isRecovered = true; } /** * Returns wether or not this is a "recovered command". * * @return <CODE>true</CODE> if command was created during a recovery else * <CODE>false</CODE> is returned */ public boolean isRecovered() { return isRecovered; } }
0true
community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_xaframework_XaCommand.java
209
@SuppressWarnings("unchecked") public class OStorageRemoteThread implements OStorageProxy { private static AtomicInteger sessionSerialId = new AtomicInteger(-1); private final OStorageRemote delegate; private String serverURL; private int sessionId; public OStorageRemoteThread(final OStorageRemote iSharedStorage) { delegate = iSharedStorage; serverURL = null; sessionId = sessionSerialId.decrementAndGet(); } public OStorageRemoteThread(final OStorageRemote iSharedStorage, final int iSessionId) { delegate = iSharedStorage; serverURL = null; sessionId = iSessionId; } public void open(final String iUserName, final String iUserPassword, final Map<String, Object> iOptions) { pushSession(); try { delegate.open(iUserName, iUserPassword, iOptions); } finally { popSession(); } } @Override public boolean isDistributed() { return delegate.isDistributed(); } public void create(final Map<String, Object> iOptions) { pushSession(); try { delegate.create(iOptions); } finally { popSession(); } } public void close(boolean iForce) { pushSession(); try { delegate.close(iForce); Orient.instance().unregisterStorage(this); } finally { popSession(); } } public boolean dropCluster(final String iClusterName, final boolean iTruncate) { pushSession(); try { return delegate.dropCluster(iClusterName, iTruncate); } finally { popSession(); } } public int getUsers() { pushSession(); try { return delegate.getUsers(); } finally { popSession(); } } public int addUser() { pushSession(); try { return delegate.addUser(); } finally { popSession(); } } public OSharedResourceAdaptiveExternal getLock() { pushSession(); try { return delegate.getLock(); } finally { popSession(); } } public void setSessionId(final String iServerURL, final int iSessionId) { serverURL = iServerURL; sessionId = iSessionId; delegate.setSessionId(serverURL, iSessionId); } public void reload() { pushSession(); try { delegate.reload(); } finally { popSession(); } } public boolean exists() { pushSession(); try { return delegate.exists(); } finally { popSession(); } } public int removeUser() { pushSession(); try { return delegate.removeUser(); } finally { popSession(); } } public void close() { pushSession(); try { delegate.close(); } finally { popSession(); } } public void delete() { pushSession(); try { delegate.delete(); Orient.instance().unregisterStorage(this); } finally { popSession(); } } @Override public OStorage getUnderlying() { return delegate; } public Set<String> getClusterNames() { pushSession(); try { return delegate.getClusterNames(); } finally { popSession(); } } @Override public void backup(OutputStream out, Map<String, Object> options, final Callable<Object> callable) throws IOException { throw new UnsupportedOperationException("backup"); } @Override public void restore(InputStream in, Map<String, Object> options, final Callable<Object> callable) throws IOException { throw new UnsupportedOperationException("restore"); } public OStorageOperationResult<OPhysicalPosition> createRecord(final int iDataSegmentId, final ORecordId iRid, final byte[] iContent, ORecordVersion iRecordVersion, final byte iRecordType, final int iMode, ORecordCallback<OClusterPosition> iCallback) { pushSession(); try { return delegate.createRecord(iDataSegmentId, iRid, iContent, OVersionFactory.instance().createVersion(), iRecordType, iMode, iCallback); } finally { popSession(); } } public OStorageOperationResult<ORawBuffer> readRecord(final ORecordId iRid, final String iFetchPlan, boolean iIgnoreCache, ORecordCallback<ORawBuffer> iCallback, boolean loadTombstones) { pushSession(); try { return delegate.readRecord(iRid, iFetchPlan, iIgnoreCache, null, loadTombstones); } finally { popSession(); } } public OStorageOperationResult<ORecordVersion> updateRecord(final ORecordId iRid, final byte[] iContent, final ORecordVersion iVersion, final byte iRecordType, final int iMode, ORecordCallback<ORecordVersion> iCallback) { pushSession(); try { return delegate.updateRecord(iRid, iContent, iVersion, iRecordType, iMode, iCallback); } finally { popSession(); } } public OStorageOperationResult<Boolean> deleteRecord(final ORecordId iRid, final ORecordVersion iVersion, final int iMode, ORecordCallback<Boolean> iCallback) { pushSession(); try { return delegate.deleteRecord(iRid, iVersion, iMode, iCallback); } finally { popSession(); } } @Override public boolean updateReplica(int dataSegmentId, ORecordId rid, byte[] content, ORecordVersion recordVersion, byte recordType) throws IOException { pushSession(); try { return delegate.updateReplica(dataSegmentId, rid, content, recordVersion, recordType); } finally { popSession(); } } @Override public ORecordMetadata getRecordMetadata(ORID rid) { pushSession(); try { return delegate.getRecordMetadata(rid); } finally { popSession(); } } @Override public <V> V callInRecordLock(Callable<V> iCallable, ORID rid, boolean iExclusiveLock) { pushSession(); try { return delegate.callInRecordLock(iCallable, rid, iExclusiveLock); } finally { popSession(); } } @Override public boolean cleanOutRecord(ORecordId recordId, ORecordVersion recordVersion, int iMode, ORecordCallback<Boolean> callback) { pushSession(); try { return delegate.cleanOutRecord(recordId, recordVersion, iMode, callback); } finally { popSession(); } } public long count(final int iClusterId) { pushSession(); try { return delegate.count(iClusterId); } finally { popSession(); } } @Override public long count(int iClusterId, boolean countTombstones) { pushSession(); try { return delegate.count(iClusterId, countTombstones); } finally { popSession(); } } @Override public long count(int[] iClusterIds, boolean countTombstones) { pushSession(); try { return delegate.count(iClusterIds, countTombstones); } finally { popSession(); } } public String toString() { pushSession(); try { return delegate.toString(); } finally { popSession(); } } public OClusterPosition[] getClusterDataRange(final int iClusterId) { pushSession(); try { return delegate.getClusterDataRange(iClusterId); } finally { popSession(); } } @Override public OPhysicalPosition[] higherPhysicalPositions(int currentClusterId, OPhysicalPosition physicalPosition) { pushSession(); try { return delegate.higherPhysicalPositions(currentClusterId, physicalPosition); } finally { popSession(); } } @Override public OPhysicalPosition[] lowerPhysicalPositions(int currentClusterId, OPhysicalPosition physicalPosition) { pushSession(); try { return delegate.lowerPhysicalPositions(currentClusterId, physicalPosition); } finally { popSession(); } } @Override public OPhysicalPosition[] ceilingPhysicalPositions(int clusterId, OPhysicalPosition physicalPosition) { pushSession(); try { return delegate.ceilingPhysicalPositions(clusterId, physicalPosition); } finally { popSession(); } } @Override public OPhysicalPosition[] floorPhysicalPositions(int clusterId, OPhysicalPosition physicalPosition) { pushSession(); try { return delegate.floorPhysicalPositions(clusterId, physicalPosition); } finally { popSession(); } } public long getSize() { pushSession(); try { return delegate.getSize(); } finally { popSession(); } } public long countRecords() { pushSession(); try { return delegate.countRecords(); } finally { popSession(); } } public long count(final int[] iClusterIds) { pushSession(); try { return delegate.count(iClusterIds); } finally { popSession(); } } public Object command(final OCommandRequestText iCommand) { pushSession(); try { return delegate.command(iCommand); } finally { popSession(); } } public void commit(final OTransaction iTx, Runnable callback) { pushSession(); try { delegate.commit(iTx, null); } finally { popSession(); } } public void rollback(OTransaction iTx) { pushSession(); try { delegate.rollback(iTx); } finally { popSession(); } } public int getClusterIdByName(final String iClusterName) { pushSession(); try { return delegate.getClusterIdByName(iClusterName); } finally { popSession(); } } public String getClusterTypeByName(final String iClusterName) { pushSession(); try { return delegate.getClusterTypeByName(iClusterName); } finally { popSession(); } } public int getDefaultClusterId() { pushSession(); try { return delegate.getDefaultClusterId(); } finally { popSession(); } } public void setDefaultClusterId(final int defaultClusterId) { pushSession(); try { delegate.setDefaultClusterId(defaultClusterId); } finally { popSession(); } } public int addCluster(final String iClusterType, final String iClusterName, final String iLocation, final String iDataSegmentName, boolean forceListBased, final Object... iArguments) { pushSession(); try { return delegate.addCluster(iClusterType, iClusterName, iLocation, iDataSegmentName, false, iArguments); } finally { popSession(); } } public int addCluster(String iClusterType, String iClusterName, int iRequestedId, String iLocation, String iDataSegmentName, boolean forceListBased, Object... iParameters) { pushSession(); try { return delegate .addCluster(iClusterType, iClusterName, iRequestedId, iLocation, iDataSegmentName, forceListBased, iParameters); } finally { popSession(); } } public boolean dropCluster(final int iClusterId, final boolean iTruncate) { pushSession(); try { return delegate.dropCluster(iClusterId, iTruncate); } finally { popSession(); } } public ODataSegment getDataSegmentById(final int iDataSegmentId) { return delegate.getDataSegmentById(iDataSegmentId); } public int getDataSegmentIdByName(final String iDataSegmentName) { return delegate.getDataSegmentIdByName(iDataSegmentName); } public int addDataSegment(final String iDataSegmentName) { pushSession(); try { return delegate.addDataSegment(iDataSegmentName); } finally { popSession(); } } public int addDataSegment(final String iSegmentName, final String iSegmentFileName) { pushSession(); try { return delegate.addDataSegment(iSegmentName, iSegmentFileName); } finally { popSession(); } } public boolean dropDataSegment(final String iSegmentName) { pushSession(); try { return delegate.dropDataSegment(iSegmentName); } finally { popSession(); } } public void synch() { pushSession(); try { delegate.synch(); } finally { popSession(); } } public String getPhysicalClusterNameById(final int iClusterId) { pushSession(); try { return delegate.getPhysicalClusterNameById(iClusterId); } finally { popSession(); } } public int getClusters() { pushSession(); try { return delegate.getClusterMap(); } finally { popSession(); } } public Collection<OCluster> getClusterInstances() { pushSession(); try { return delegate.getClusterInstances(); } finally { popSession(); } } public OCluster getClusterById(final int iId) { pushSession(); try { return delegate.getClusterById(iId); } finally { popSession(); } } public long getVersion() { pushSession(); try { return delegate.getVersion(); } finally { popSession(); } } public boolean isPermanentRequester() { pushSession(); try { return delegate.isPermanentRequester(); } finally { popSession(); } } public void updateClusterConfiguration(final byte[] iContent) { pushSession(); try { delegate.updateClusterConfiguration(iContent); } finally { popSession(); } } public OStorageConfiguration getConfiguration() { pushSession(); try { return delegate.getConfiguration(); } finally { popSession(); } } public boolean isClosed() { return delegate.isClosed(); } public boolean checkForRecordValidity(final OPhysicalPosition ppos) { pushSession(); try { return delegate.checkForRecordValidity(ppos); } finally { popSession(); } } public String getName() { pushSession(); try { return delegate.getName(); } finally { popSession(); } } public String getURL() { return delegate.getURL(); } public void beginResponse(final OChannelBinaryAsynchClient iNetwork) throws IOException { pushSession(); try { delegate.beginResponse(iNetwork); } finally { popSession(); } } public OLevel2RecordCache getLevel2Cache() { return delegate.getLevel2Cache(); } public boolean existsResource(final String iName) { return delegate.existsResource(iName); } public synchronized <T> T getResource(final String iName, final Callable<T> iCallback) { return (T) delegate.getResource(iName, iCallback); } public <T> T removeResource(final String iName) { return (T) delegate.removeResource(iName); } public ODocument getClusterConfiguration() { return delegate.getClusterConfiguration(); } protected void handleException(final OChannelBinaryAsynchClient iNetwork, final String iMessage, final Exception iException) { delegate.handleException(iNetwork, iMessage, iException); } public <V> V callInLock(final Callable<V> iCallable, final boolean iExclusiveLock) { return delegate.callInLock(iCallable, iExclusiveLock); } public ORemoteServerEventListener getRemoteServerEventListener() { return delegate.getAsynchEventListener(); } public void setRemoteServerEventListener(final ORemoteServerEventListener iListener) { delegate.setAsynchEventListener(iListener); } public void removeRemoteServerEventListener() { delegate.removeRemoteServerEventListener(); } public static int getNextConnectionId() { return sessionSerialId.decrementAndGet(); } @Override public void checkForClusterPermissions(final String iClusterName) { delegate.checkForClusterPermissions(iClusterName); } public STATUS getStatus() { return delegate.getStatus(); } @Override public String getType() { return delegate.getType(); } @Override public boolean equals(final Object iOther) { return iOther == this || iOther == delegate; } protected void pushSession() { delegate.setSessionId(serverURL, sessionId); } protected void popSession() { serverURL = delegate.getServerURL(); sessionId = delegate.getSessionId(); } }
1no label
client_src_main_java_com_orientechnologies_orient_client_remote_OStorageRemoteThread.java
395
public interface ORecordLazyMultiValue extends ODetachable, OSizeable { public Iterator<OIdentifiable> rawIterator(); /** * Browse all the set to convert all the items into records. */ public void convertLinks2Records(); /** * Browse all the set to convert all the items into links. * * @return */ public boolean convertRecords2Links(); public boolean isAutoConvertToRecord(); public void setAutoConvertToRecord(boolean convertToRecord); }
0true
core_src_main_java_com_orientechnologies_orient_core_db_record_ORecordLazyMultiValue.java
57
@SuppressWarnings("serial") static final class ForEachTransformedKeyTask<K,V,U> extends BulkTask<K,V,Void> { final Fun<? super K, ? extends U> transformer; final Action<? super U> action; ForEachTransformedKeyTask (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t, Fun<? super K, ? extends U> transformer, Action<? super U> action) { super(p, b, i, f, t); this.transformer = transformer; this.action = action; } public final void compute() { final Fun<? super K, ? extends U> transformer; final Action<? super U> action; if ((transformer = this.transformer) != null && (action = this.action) != null) { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); new ForEachTransformedKeyTask<K,V,U> (this, batch >>>= 1, baseLimit = h, f, tab, transformer, action).fork(); } for (Node<K,V> p; (p = advance()) != null; ) { U u; if ((u = transformer.apply(p.key)) != null) action.apply(u); } propagateCompletion(); } } }
0true
src_main_java_jsr166e_ConcurrentHashMapV8.java
84
public interface StaticAssetStorage { Long getId(); void setId(Long id); Blob getFileData(); void setFileData(Blob fileData); public Long getStaticAssetId(); public void setStaticAssetId(Long staticAssetId); }
0true
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_file_domain_StaticAssetStorage.java
1,348
private class NodeIndexDeletedTransportHandler extends BaseTransportRequestHandler<NodeIndexDeletedMessage> { static final String ACTION = "cluster/nodeIndexDeleted"; @Override public NodeIndexDeletedMessage newInstance() { return new NodeIndexDeletedMessage(); } @Override public void messageReceived(NodeIndexDeletedMessage message, TransportChannel channel) throws Exception { innerNodeIndexDeleted(message.index, message.nodeId); channel.sendResponse(TransportResponse.Empty.INSTANCE); } @Override public String executor() { return ThreadPool.Names.SAME; } }
0true
src_main_java_org_elasticsearch_cluster_action_index_NodeIndexDeletedAction.java
18
public class EntrySet extends AbstractSet<Map.Entry<K, V>> { @Override public Iterator<Map.Entry<K, V>> iterator() { return new EntryIterator(getFirstEntry()); } public Iterator<Map.Entry<K, V>> inverseIterator() { return new InverseEntryIterator(getLastEntry()); } @Override public boolean contains(final Object o) { if (!(o instanceof Map.Entry)) return false; OMVRBTreeEntry<K, V> entry = (OMVRBTreeEntry<K, V>) o; final V value = entry.getValue(); final V p = get(entry.getKey()); return p != null && valEquals(p, value); } @Override public boolean remove(final Object o) { if (!(o instanceof Map.Entry)) return false; final OMVRBTreeEntry<K, V> entry = (OMVRBTreeEntry<K, V>) o; final V value = entry.getValue(); OMVRBTreeEntry<K, V> p = getEntry(entry.getKey(), PartialSearchMode.NONE); if (p != null && valEquals(p.getValue(), value)) { deleteEntry(p); return true; } return false; } @Override public int size() { return OMVRBTree.this.size(); } @Override public void clear() { OMVRBTree.this.clear(); } }
0true
commons_src_main_java_com_orientechnologies_common_collection_OMVRBTree.java
174
public abstract class TargetClientRequest extends ClientRequest { private static final int TRY_COUNT = 100; @Override final void process() throws Exception { final ClientEndpoint endpoint = getEndpoint(); final Operation op = prepareOperation(); op.setCallerUuid(endpoint.getUuid()); final InvocationBuilder builder = clientEngine.createInvocationBuilder(getServiceName(), op, getTarget()) .setTryCount(TRY_COUNT) .setResultDeserialized(false) .setCallback(new Callback<Object>() { public void notify(Object object) { endpoint.sendResponse(filter(object), getCallId()); } }); builder.invoke(); } protected abstract Operation prepareOperation(); public abstract Address getTarget(); protected Object filter(Object response) { return response; } }
0true
hazelcast_src_main_java_com_hazelcast_client_TargetClientRequest.java
1,376
final Runnable callback = new Runnable() { @Override public void run() { final ODocument indexEntries = getIndexChanges(); if (indexEntries != null) { final Map<String, OIndexInternal<?>> indexesToCommit = new HashMap<String, OIndexInternal<?>>(); for (Entry<String, Object> indexEntry : indexEntries) { final OIndexInternal<?> index = indexes.get(indexEntry.getKey()).getInternal(); indexesToCommit.put(index.getName(), index.getInternal()); } for (OIndexInternal<?> indexInternal : indexesToCommit.values()) indexInternal.preCommit(); for (Entry<String, Object> indexEntry : indexEntries) { final OIndexInternal<?> index = indexesToCommit.get(indexEntry.getKey()).getInternal(); if (index == null) { OLogManager.instance().error(this, "Index with name " + indexEntry.getKey() + " was not found."); throw new OIndexException("Index with name " + indexEntry.getKey() + " was not found."); } else index.addTxOperation((ODocument) indexEntry.getValue()); } try { for (OIndexInternal<?> indexInternal : indexesToCommit.values()) indexInternal.commit(); } finally { for (OIndexInternal<?> indexInternal : indexesToCommit.values()) indexInternal.postCommit(); } } } };
1no label
core_src_main_java_com_orientechnologies_orient_core_tx_OTransactionOptimistic.java
726
@RunWith(HazelcastParallelClassRunner.class) @Category(QuickTest.class) public class SetTransactionTest extends HazelcastTestSupport { static final String ELEMENT = "item"; @Test public void testAdd_withinTxn() throws Exception { final String setName = randomString(); final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2); final HazelcastInstance instance = factory.newInstances()[0]; final ISet<String> set = instance.getSet(setName); final TransactionContext context = instance.newTransactionContext(); context.beginTransaction(); final TransactionalSet<Object> txnSet = context.getSet(setName); assertTrue(txnSet.add(ELEMENT)); assertEquals(1, txnSet.size()); context.commitTransaction(); assertEquals(1, set.size()); } @Test public void testSetSizeAfterAdd_withinTxn() throws Exception { final String setName = randomString(); final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2); final HazelcastInstance instance = factory.newInstances()[0]; final ISet<String> set = instance.getSet(setName); final TransactionContext context = instance.newTransactionContext(); context.beginTransaction(); final TransactionalSet<Object> txnSet = context.getSet(setName); txnSet.add(ELEMENT); context.commitTransaction(); assertEquals(1, set.size()); } @Test public void testRemove_withinTxn() throws Exception { final String setName = randomString(); final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2); final HazelcastInstance instance = factory.newInstances()[0]; final ISet<String> set = instance.getSet(setName); set.add(ELEMENT); final TransactionContext context = instance.newTransactionContext(); context.beginTransaction(); final TransactionalSet<Object> txnSet = context.getSet(setName); assertTrue(txnSet.remove(ELEMENT)); assertFalse(txnSet.remove("NOT_THERE")); context.commitTransaction(); assertEquals(0, set.size()); } @Test public void testSetSizeAfterRemove_withinTxn() throws Exception { final String setName = randomString(); final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2); final HazelcastInstance instance = factory.newInstances()[0]; final ISet<String> set = instance.getSet(setName); set.add(ELEMENT); final TransactionContext context = instance.newTransactionContext(); context.beginTransaction(); final TransactionalSet<Object> txnSet = context.getSet(setName); txnSet.remove(ELEMENT); context.commitTransaction(); assertEquals(0, set.size()); } @Test public void testAddDuplicateElement_withinTxn() throws Exception { final String setName = randomString(); final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2); final HazelcastInstance instance = factory.newInstances()[0]; final TransactionContext context = instance.newTransactionContext(); context.beginTransaction(); final TransactionalSet<Object> txnSet = context.getSet(setName); assertTrue(txnSet.add(ELEMENT)); assertFalse(txnSet.add(ELEMENT)); context.commitTransaction(); assertEquals(1, instance.getSet(setName).size()); } @Test public void testAddExistingElement_withinTxn() throws Exception { final String setName = randomString(); final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2); final HazelcastInstance instance = factory.newInstances()[0]; final ISet<String> set = instance.getSet(setName); set.add(ELEMENT); final TransactionContext context = instance.newTransactionContext(); context.beginTransaction(); final TransactionalSet<Object> txnSet = context.getSet(setName); assertFalse(txnSet.add(ELEMENT)); context.commitTransaction(); assertEquals(1, set.size()); } @Test public void testSetSizeAfterAddingDuplicateElement_withinTxn() throws Exception { final String setName = randomString(); final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2); final HazelcastInstance instance = factory.newInstances()[0]; final ISet<String> set = instance.getSet(setName); set.add(ELEMENT); final TransactionContext context = instance.newTransactionContext(); context.beginTransaction(); final TransactionalSet<Object> txnSet = context.getSet(setName); txnSet.add(ELEMENT); context.commitTransaction(); assertEquals(1, set.size()); } @Test public void testAddRollBack() throws Exception { final String setName = randomString(); final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2); final HazelcastInstance instance = factory.newInstances()[0]; final ISet<String> set = instance.getSet(setName); set.add(ELEMENT); final TransactionContext context = instance.newTransactionContext(); context.beginTransaction(); final TransactionalSet<Object> setTxn = context.getSet(setName); setTxn.add("itemWillGetRollBacked"); context.rollbackTransaction(); assertEquals(1, set.size()); } }
0true
hazelcast_src_test_java_com_hazelcast_collection_SetTransactionTest.java
412
trackedList.addChangeListener(new OMultiValueChangeListener<Integer, String>() { public void onAfterRecordChanged(final OMultiValueChangeEvent<Integer, String> event) { Assert.assertEquals(event.getChangeType(), OMultiValueChangeEvent.OChangeType.REMOVE); Assert.assertEquals(event.getOldValue(), "value2"); Assert.assertEquals(event.getKey().intValue(), 1); Assert.assertNull(event.getValue()); changed.value = true; } });
0true
core_src_test_java_com_orientechnologies_orient_core_db_record_TrackedListTest.java
1,222
intPage = build(type, maxCount(limit, BigArrays.INT_PAGE_SIZE, intsWeight, totalWeight), searchThreadPoolSize, availableProcessors, new Recycler.C<int[]>() { @Override public int[] newInstance(int sizing) { return new int[BigArrays.INT_PAGE_SIZE]; } @Override public void clear(int[] value) {} });
0true
src_main_java_org_elasticsearch_cache_recycler_PageCacheRecycler.java
196
public static class Name { public static final String Audit = "Auditable_Audit"; }
0true
common_src_main_java_org_broadleafcommerce_common_audit_Auditable.java
752
public class TxnSetRemoveRequest extends TxnCollectionRequest { public TxnSetRemoveRequest() { } public TxnSetRemoveRequest(String name, Data value) { super(name, value); } @Override public Object innerCall() throws Exception { return getEndpoint().getTransactionContext(txnId).getSet(name).remove(value); } @Override public String getServiceName() { return SetService.SERVICE_NAME; } @Override public int getClassId() { return CollectionPortableHook.TXN_SET_REMOVE; } @Override public Permission getRequiredPermission() { return new SetPermission(name, ActionConstants.ACTION_REMOVE); } }
0true
hazelcast_src_main_java_com_hazelcast_collection_client_TxnSetRemoveRequest.java
429
trackedMap.addChangeListener(new OMultiValueChangeListener<Object, String>() { public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) { changed.value = true; } });
0true
core_src_test_java_com_orientechnologies_orient_core_db_record_TrackedMapTest.java
1,062
public class MapConfig { public final static int MIN_BACKUP_COUNT = 0; public final static int DEFAULT_BACKUP_COUNT = 1; public final static int MAX_BACKUP_COUNT = 6; public final static int MIN_EVICTION_PERCENTAGE = 0; public final static int DEFAULT_EVICTION_PERCENTAGE = 25; public final static int MAX_EVICTION_PERCENTAGE = 100; public final static int DEFAULT_TTL_SECONDS = 0; public final static int DEFAULT_MAX_IDLE_SECONDS = 0; public final static int DEFAULT_MAX_SIZE = Integer.MAX_VALUE; public final static EvictionPolicy DEFAULT_EVICTION_POLICY = EvictionPolicy.NONE; public final static String DEFAULT_MAP_MERGE_POLICY = PutIfAbsentMapMergePolicy.class.getName(); public final static InMemoryFormat DEFAULT_IN_MEMORY_FORMAT = InMemoryFormat.BINARY; private String name = null; private int backupCount = DEFAULT_BACKUP_COUNT; private int asyncBackupCount = MIN_BACKUP_COUNT; private int evictionPercentage = DEFAULT_EVICTION_PERCENTAGE; private int timeToLiveSeconds = DEFAULT_TTL_SECONDS; private int maxIdleSeconds = DEFAULT_MAX_IDLE_SECONDS; private MaxSizeConfig maxSizeConfig = new MaxSizeConfig(); private EvictionPolicy evictionPolicy = DEFAULT_EVICTION_POLICY; private MapStoreConfig mapStoreConfig = null; private NearCacheConfig nearCacheConfig = null; private boolean readBackupData = false; private boolean optimizeQueries = false; private String mergePolicy = DEFAULT_MAP_MERGE_POLICY; private InMemoryFormat inMemoryFormat = DEFAULT_IN_MEMORY_FORMAT; private WanReplicationRef wanReplicationRef; private List<EntryListenerConfig> listenerConfigs; private List<MapIndexConfig> mapIndexConfigs; private boolean statisticsEnabled = true; private PartitioningStrategyConfig partitioningStrategyConfig; private MapConfigReadOnly readOnly; public enum EvictionPolicy { LRU, LFU, NONE } public MapConfig(String name) { this.name = name; } public MapConfig() { } public MapConfig(MapConfig config) { this.name = config.name; this.backupCount = config.backupCount; this.asyncBackupCount = config.asyncBackupCount; this.evictionPercentage = config.evictionPercentage; this.timeToLiveSeconds = config.timeToLiveSeconds; this.maxIdleSeconds = config.maxIdleSeconds; this.maxSizeConfig = config.maxSizeConfig != null ? new MaxSizeConfig(config.maxSizeConfig) : null; this.evictionPolicy = config.evictionPolicy; this.inMemoryFormat = config.inMemoryFormat; this.mapStoreConfig = config.mapStoreConfig != null ? new MapStoreConfig(config.mapStoreConfig) : null; this.nearCacheConfig = config.nearCacheConfig != null ? new NearCacheConfig(config.nearCacheConfig) : null; this.readBackupData = config.readBackupData; this.optimizeQueries = config.optimizeQueries; this.statisticsEnabled = config.statisticsEnabled; this.mergePolicy = config.mergePolicy; this.wanReplicationRef = config.wanReplicationRef != null ? new WanReplicationRef(config.wanReplicationRef) : null; this.listenerConfigs = new ArrayList<EntryListenerConfig>(config.getEntryListenerConfigs()); this.mapIndexConfigs = new ArrayList<MapIndexConfig>(config.getMapIndexConfigs()); this.partitioningStrategyConfig = config.partitioningStrategyConfig != null ? new PartitioningStrategyConfig(config.getPartitioningStrategyConfig()) : null; } public MapConfigReadOnly getAsReadOnly(){ if (readOnly == null){ readOnly = new MapConfigReadOnly(this); } return readOnly; } /** * @return the name */ public String getName() { return name; } /** * @param name the name to set */ public MapConfig setName(String name) { this.name = name; return this; } /** * @return data type that will be used for storing records. */ public InMemoryFormat getInMemoryFormat() { return inMemoryFormat; } /** * Data type that will be used for storing records. * Possible values: * BINARY (default): keys and values will be stored as binary data * OBJECT : values will be stored in their object forms * OFFHEAP : values will be stored in non-heap region of JVM * * @param inMemoryFormat the record type to set * @throws IllegalArgumentException if inMemoryFormat is null. */ public MapConfig setInMemoryFormat(InMemoryFormat inMemoryFormat) { this.inMemoryFormat = isNotNull(inMemoryFormat,"inMemoryFormat"); return this; } /** * @return the backupCount * @see #getAsyncBackupCount() */ public int getBackupCount() { return backupCount; } /** * Number of synchronous backups. If 1 is set as the backup-count for example, * then all entries of the map will be copied to another JVM for * fail-safety. 0 means no sync backup. * * @param backupCount the backupCount to set * @see #setAsyncBackupCount(int) */ public MapConfig setBackupCount(final int backupCount) { if (backupCount < MIN_BACKUP_COUNT) { throw new IllegalArgumentException("map backup count must be equal to or bigger than " + MIN_BACKUP_COUNT); } if ((backupCount + this.asyncBackupCount) > MAX_BACKUP_COUNT) { throw new IllegalArgumentException("total (sync + async) map backup count must be less than " + MAX_BACKUP_COUNT); } this.backupCount = backupCount; return this; } /** * @return the asyncBackupCount * @see #setBackupCount(int) */ public int getAsyncBackupCount() { return asyncBackupCount; } /** * Number of asynchronous backups. * 0 means no backup. * * @param asyncBackupCount the asyncBackupCount to set * @see #setBackupCount(int) */ public MapConfig setAsyncBackupCount(final int asyncBackupCount) { if (asyncBackupCount < MIN_BACKUP_COUNT) { throw new IllegalArgumentException("map async backup count must be equal to or bigger than " + MIN_BACKUP_COUNT); } if ((this.backupCount + asyncBackupCount) > MAX_BACKUP_COUNT) { throw new IllegalArgumentException("total (sync + async) map backup count must be less than " + MAX_BACKUP_COUNT); } this.asyncBackupCount = asyncBackupCount; return this; } public int getTotalBackupCount() { return backupCount + asyncBackupCount; } /** * @return the evictionPercentage */ public int getEvictionPercentage() { return evictionPercentage; } /** * When max. size is reached, specified percentage of the map will be evicted. * Any integer between 0 and 100 is allowed. * If 25 is set for example, 25% of the entries will get evicted. * * @param evictionPercentage the evictionPercentage to set * @throws IllegalArgumentException if evictionPercentage is not in the 0-100 range. */ public MapConfig setEvictionPercentage(final int evictionPercentage) { if (evictionPercentage < MIN_EVICTION_PERCENTAGE) { throw new IllegalArgumentException("eviction percentage must be greater or equal than 0"); } if (evictionPercentage > MAX_EVICTION_PERCENTAGE) { throw new IllegalArgumentException("eviction percentage must be smaller or equal than 100"); } this.evictionPercentage = evictionPercentage; return this; } /** * @return the timeToLiveSeconds */ public int getTimeToLiveSeconds() { return timeToLiveSeconds; } /** * Maximum number of seconds for each entry to stay in the map. Entries that are * older than timeToLiveSeconds will get automatically evicted from the map. * Updates on the entry don't change the eviction time. * Any integer between 0 and Integer.MAX_VALUE. * 0 means infinite. Default is 0. * * @param timeToLiveSeconds the timeToLiveSeconds to set */ public MapConfig setTimeToLiveSeconds(int timeToLiveSeconds) { this.timeToLiveSeconds = timeToLiveSeconds; return this; } /** * @return the maxIdleSeconds */ public int getMaxIdleSeconds() { return maxIdleSeconds; } /** * Maximum number of seconds for each entry to stay idle in the map. Entries that are * idle(not touched) for more than maxIdleSeconds will get * automatically evicted from the map. Entry is touched if get, put or * containsKey is called. * Any integer between 0 and Integer.MAX_VALUE. * 0 means infinite. Default is 0. * * @param maxIdleSeconds the maxIdleSeconds to set */ public MapConfig setMaxIdleSeconds(int maxIdleSeconds) { this.maxIdleSeconds = maxIdleSeconds; return this; } public MaxSizeConfig getMaxSizeConfig() { return maxSizeConfig; } public MapConfig setMaxSizeConfig(MaxSizeConfig maxSizeConfig) { this.maxSizeConfig = maxSizeConfig; return this; } /** * @return the evictionPolicy */ public EvictionPolicy getEvictionPolicy() { return evictionPolicy; } /** * @param evictionPolicy the evictionPolicy to set */ public MapConfig setEvictionPolicy(EvictionPolicy evictionPolicy) { this.evictionPolicy = evictionPolicy; return this; } /** * Returns the map store configuration * * @return the mapStoreConfig */ public MapStoreConfig getMapStoreConfig() { return mapStoreConfig; } /** * Sets the mapStore configuration * * @param mapStoreConfig the mapStoreConfig to set */ public MapConfig setMapStoreConfig(MapStoreConfig mapStoreConfig) { this.mapStoreConfig = mapStoreConfig; return this; } public NearCacheConfig getNearCacheConfig() { return nearCacheConfig; } public MapConfig setNearCacheConfig(NearCacheConfig nearCacheConfig) { this.nearCacheConfig = nearCacheConfig; return this; } public String getMergePolicy() { return mergePolicy; } public MapConfig setMergePolicy(String mergePolicy) { this.mergePolicy = mergePolicy; return this; } public boolean isStatisticsEnabled() { return statisticsEnabled; } public MapConfig setStatisticsEnabled(boolean statisticsEnabled) { this.statisticsEnabled = statisticsEnabled; return this; } public boolean isReadBackupData() { return readBackupData; } public MapConfig setReadBackupData(boolean readBackupData) { this.readBackupData = readBackupData; return this; } public WanReplicationRef getWanReplicationRef() { return wanReplicationRef; } public MapConfig setWanReplicationRef(WanReplicationRef wanReplicationRef) { this.wanReplicationRef = wanReplicationRef; return this; } public MapConfig addEntryListenerConfig(EntryListenerConfig listenerConfig) { getEntryListenerConfigs().add(listenerConfig); return this; } public List<EntryListenerConfig> getEntryListenerConfigs() { if (listenerConfigs == null) { listenerConfigs = new ArrayList<EntryListenerConfig>(); } return listenerConfigs; } public MapConfig setEntryListenerConfigs(List<EntryListenerConfig> listenerConfigs) { this.listenerConfigs = listenerConfigs; return this; } public MapConfig addMapIndexConfig(MapIndexConfig mapIndexConfig) { getMapIndexConfigs().add(mapIndexConfig); return this; } public List<MapIndexConfig> getMapIndexConfigs() { if (mapIndexConfigs == null) { mapIndexConfigs = new ArrayList<MapIndexConfig>(); } return mapIndexConfigs; } public MapConfig setMapIndexConfigs(List<MapIndexConfig> mapIndexConfigs) { this.mapIndexConfigs = mapIndexConfigs; return this; } public PartitioningStrategyConfig getPartitioningStrategyConfig() { return partitioningStrategyConfig; } public MapConfig setPartitioningStrategyConfig(PartitioningStrategyConfig partitioningStrategyConfig) { this.partitioningStrategyConfig = partitioningStrategyConfig; return this; } public boolean isNearCacheEnabled() { return nearCacheConfig != null; } public boolean isOptimizeQueries() { return optimizeQueries; } public MapConfig setOptimizeQueries(boolean optimizeQueries) { this.optimizeQueries = optimizeQueries; return this; } public boolean isCompatible(MapConfig other) { if (this == other) { return true; } return other != null && (this.name != null ? this.name.equals(other.name) : other.name == null) && this.backupCount == other.backupCount && this.asyncBackupCount == other.asyncBackupCount && this.evictionPercentage == other.evictionPercentage && this.maxIdleSeconds == other.maxIdleSeconds && (this.maxSizeConfig.getSize() == other.maxSizeConfig.getSize() || (Math.min(maxSizeConfig.getSize(), other.maxSizeConfig.getSize()) == 0 && Math.max(maxSizeConfig.getSize(), other.maxSizeConfig.getSize()) == Integer.MAX_VALUE)) && this.timeToLiveSeconds == other.timeToLiveSeconds && this.readBackupData == other.readBackupData; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + this.backupCount; result = prime * result + this.asyncBackupCount; result = prime * result + this.evictionPercentage; result = prime * result + ((this.evictionPolicy == null) ? 0 : this.evictionPolicy .hashCode()); result = prime * result + ((this.mapStoreConfig == null) ? 0 : this.mapStoreConfig .hashCode()); result = prime * result + this.maxIdleSeconds; result = prime * result + this.maxSizeConfig.getSize(); result = prime * result + ((this.mergePolicy == null) ? 0 : this.mergePolicy.hashCode()); result = prime * result + ((this.name == null) ? 0 : this.name.hashCode()); result = prime * result + ((this.nearCacheConfig == null) ? 0 : this.nearCacheConfig .hashCode()); result = prime * result + this.timeToLiveSeconds; result = prime * result + (this.readBackupData ? 1231 : 1237); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof MapConfig)) { return false; } MapConfig other = (MapConfig) obj; return (this.name != null ? this.name.equals(other.name) : other.name == null) && this.backupCount == other.backupCount && this.asyncBackupCount == other.asyncBackupCount && this.evictionPercentage == other.evictionPercentage && this.maxIdleSeconds == other.maxIdleSeconds && this.maxSizeConfig.getSize() == other.maxSizeConfig.getSize() && this.timeToLiveSeconds == other.timeToLiveSeconds && this.readBackupData == other.readBackupData && (this.mergePolicy != null ? this.mergePolicy.equals(other.mergePolicy) : other.mergePolicy == null) && (this.inMemoryFormat != null ? this.inMemoryFormat.equals(other.inMemoryFormat) : other.inMemoryFormat == null) && (this.evictionPolicy != null ? this.evictionPolicy.equals(other.evictionPolicy) : other.evictionPolicy == null) && (this.mapStoreConfig != null ? this.mapStoreConfig.equals(other.mapStoreConfig) : other.mapStoreConfig == null) && (this.nearCacheConfig != null ? this.nearCacheConfig.equals(other.nearCacheConfig) : other.nearCacheConfig == null); } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("MapConfig"); sb.append("{name='").append(name).append('\''); sb.append(", inMemoryFormat=").append(inMemoryFormat).append('\''); sb.append(", backupCount=").append(backupCount); sb.append(", asyncBackupCount=").append(asyncBackupCount); sb.append(", timeToLiveSeconds=").append(timeToLiveSeconds); sb.append(", maxIdleSeconds=").append(maxIdleSeconds); sb.append(", evictionPolicy='").append(evictionPolicy).append('\''); sb.append(", evictionPercentage=").append(evictionPercentage); sb.append(", maxSizeConfig=").append(maxSizeConfig); sb.append(", readBackupData=").append(readBackupData); sb.append(", nearCacheConfig=").append(nearCacheConfig); sb.append(", mapStoreConfig=").append(mapStoreConfig); sb.append(", mergePolicyConfig='").append(mergePolicy).append('\''); sb.append(", wanReplicationRef=").append(wanReplicationRef); sb.append(", listenerConfigs=").append(listenerConfigs); sb.append(", mapIndexConfigs=").append(mapIndexConfigs); sb.append('}'); return sb.toString(); } }
1no label
hazelcast_src_main_java_com_hazelcast_config_MapConfig.java
372
public class TransportGetRepositoriesAction extends TransportMasterNodeReadOperationAction<GetRepositoriesRequest, GetRepositoriesResponse> { @Inject public TransportGetRepositoriesAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool) { super(settings, transportService, clusterService, threadPool); } @Override protected String executor() { return ThreadPool.Names.MANAGEMENT; } @Override protected String transportAction() { return GetRepositoriesAction.NAME; } @Override protected GetRepositoriesRequest newRequest() { return new GetRepositoriesRequest(); } @Override protected GetRepositoriesResponse newResponse() { return new GetRepositoriesResponse(); } @Override protected ClusterBlockException checkBlock(GetRepositoriesRequest request, ClusterState state) { return state.blocks().indexBlockedException(ClusterBlockLevel.METADATA, ""); } @Override protected void masterOperation(final GetRepositoriesRequest request, ClusterState state, final ActionListener<GetRepositoriesResponse> listener) throws ElasticsearchException { MetaData metaData = state.metaData(); RepositoriesMetaData repositories = metaData.custom(RepositoriesMetaData.TYPE); if (request.repositories().length == 0 || (request.repositories().length == 1 && "_all".equals(request.repositories()[0]))) { if (repositories != null) { listener.onResponse(new GetRepositoriesResponse(repositories.repositories())); } else { listener.onResponse(new GetRepositoriesResponse(ImmutableList.<RepositoryMetaData>of())); } } else { if (repositories != null) { ImmutableList.Builder<RepositoryMetaData> repositoryListBuilder = ImmutableList.builder(); for (String repository : request.repositories()) { RepositoryMetaData repositoryMetaData = repositories.repository(repository); if (repositoryMetaData == null) { listener.onFailure(new RepositoryMissingException(repository)); return; } repositoryListBuilder.add(repositoryMetaData); } listener.onResponse(new GetRepositoriesResponse(repositoryListBuilder.build())); } else { listener.onFailure(new RepositoryMissingException(request.repositories()[0])); } } } }
1no label
src_main_java_org_elasticsearch_action_admin_cluster_repositories_get_TransportGetRepositoriesAction.java
401
public enum ClientNearCacheType { /** * java.util.concurrent.ConcurrentMap implementation */ Map, /** * com.hazelcast.core.ReplicatedMap implementation */ ReplicatedMap }
0true
hazelcast-client_src_main_java_com_hazelcast_client_nearcache_ClientNearCacheType.java
66
{ @Override public boolean accept( File dir, String fileName ) { return fileName.startsWith( prefix ); } } );
0true
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestXaFramework.java
1,226
public static enum Type { SOFT_THREAD_LOCAL { @Override <T> Recycler<T> build(Recycler.C<T> c, int limit, int estimatedThreadPoolSize, int availableProcessors) { return threadLocal(softFactory(dequeFactory(c, limit / estimatedThreadPoolSize))); } }, THREAD_LOCAL { @Override <T> Recycler<T> build(Recycler.C<T> c, int limit, int estimatedThreadPoolSize, int availableProcessors) { return threadLocal(dequeFactory(c, limit / estimatedThreadPoolSize)); } }, QUEUE { @Override <T> Recycler<T> build(Recycler.C<T> c, int limit, int estimatedThreadPoolSize, int availableProcessors) { return concurrentDeque(c, limit); } }, SOFT_CONCURRENT { @Override <T> Recycler<T> build(Recycler.C<T> c, int limit, int estimatedThreadPoolSize, int availableProcessors) { return concurrent(softFactory(dequeFactory(c, limit / availableProcessors)), availableProcessors); } }, CONCURRENT { @Override <T> Recycler<T> build(Recycler.C<T> c, int limit, int estimatedThreadPoolSize, int availableProcessors) { return concurrent(dequeFactory(c, limit / availableProcessors), availableProcessors); } }, NONE { @Override <T> Recycler<T> build(Recycler.C<T> c, int limit, int estimatedThreadPoolSize, int availableProcessors) { return none(c); } }; public static Type parse(String type) { if (Strings.isNullOrEmpty(type)) { return SOFT_CONCURRENT; } try { return Type.valueOf(type.toUpperCase(Locale.ROOT)); } catch (IllegalArgumentException e) { throw new ElasticsearchIllegalArgumentException("no type support [" + type + "]"); } } abstract <T> Recycler<T> build(Recycler.C<T> c, int limit, int estimatedThreadPoolSize, int availableProcessors); }
0true
src_main_java_org_elasticsearch_cache_recycler_PageCacheRecycler.java
488
@SuppressWarnings("serial") public class ODatabaseExportException extends RuntimeException { public ODatabaseExportException() { super(); } public ODatabaseExportException(String message, Throwable cause) { super(message, cause); } public ODatabaseExportException(String message) { super(message); } public ODatabaseExportException(Throwable cause) { super(cause); } }
0true
core_src_main_java_com_orientechnologies_orient_core_db_tool_ODatabaseExportException.java
270
public class MapPutPartitionAwareRunnable implements Runnable, DataSerializable, PartitionAware, HazelcastInstanceAware { private HazelcastInstance instance; public String mapName; public Object partitionKey; public MapPutPartitionAwareRunnable(){} public MapPutPartitionAwareRunnable(String mapName, Object partitionKey) { this.mapName = mapName; this.partitionKey = partitionKey; } public void writeData(ObjectDataOutput out) throws IOException { out.writeUTF(mapName); } public void readData(ObjectDataInput in) throws IOException { mapName = in.readUTF(); } public void run() { Member member = instance.getCluster().getLocalMember(); IMap map = instance.getMap(mapName); map.put(member.getUuid(), member.getUuid()+"value"); } @Override public void setHazelcastInstance(HazelcastInstance hazelcastInstance) { instance = hazelcastInstance; } public String getMapName() { return mapName; } public void setMapName(String mapName) { this.mapName = mapName; } @Override public Object getPartitionKey() { return partitionKey; } }
0true
hazelcast-client_src_test_java_com_hazelcast_client_executor_tasks_MapPutPartitionAwareRunnable.java
1,054
private final class IndexResultListener implements OQueryOperator.IndexResultListener { private final Set<OIdentifiable> result = new HashSet<OIdentifiable>(); @Override public Object getResult() { return result; } @Override public boolean addResult(OIdentifiable value) { if (compiledFilter == null || Boolean.TRUE.equals(compiledFilter.evaluate(value.getRecord(), null, context))) result.add(value); return fetchLimit < 0 || fetchLimit >= result.size(); } }
1no label
core_src_main_java_com_orientechnologies_orient_core_sql_OCommandExecutorSQLSelect.java
213
Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { stty("echo"); } catch (Exception e) { } } });
0true
tools_src_main_java_com_orientechnologies_orient_console_OConsoleDatabaseApp.java
208
public class OStorageRemoteAsynchEventListener implements ORemoteServerEventListener { private OStorageRemote storage; public OStorageRemoteAsynchEventListener(final OStorageRemote storage) { this.storage = storage; } public void onRequest(final byte iRequestCode, final Object obj) { if (iRequestCode == OChannelBinaryProtocol.REQUEST_PUSH_RECORD) // ASYNCHRONOUS PUSH INTO THE LEVEL2 CACHE storage.getLevel2Cache().updateRecord((ORecordInternal<?>) obj); else if (iRequestCode == OChannelBinaryProtocol.REQUEST_PUSH_DISTRIB_CONFIG) { storage.updateClusterConfiguration((byte[]) obj); if (OLogManager.instance().isDebugEnabled()) { synchronized (storage.getClusterConfiguration()) { OLogManager.instance() .debug(this, "Received new cluster configuration: %s", storage.getClusterConfiguration().toJSON("")); } } } } public OStorageRemote getStorage() { return storage; } }
0true
client_src_main_java_com_orientechnologies_orient_client_remote_OStorageRemoteAsynchEventListener.java
148
(new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() { public sun.misc.Unsafe run() throws Exception { Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class; for (java.lang.reflect.Field f : k.getDeclaredFields()) { f.setAccessible(true); Object x = f.get(null); if (k.isInstance(x)) return k.cast(x); } throw new NoSuchFieldError("the Unsafe"); }});
0true
src_main_java_jsr166e_extra_AtomicDoubleArray.java
44
{ @Override public void notify( ClusterMemberListener listener ) { for ( MemberIsAvailable memberIsAvailable : clusterMembersSnapshot.getCurrentAvailable( member ) ) { listener.memberIsUnavailable( memberIsAvailable.getRole(), member ); } } } );
1no label
enterprise_cluster_src_main_java_org_neo4j_cluster_member_paxos_PaxosClusterMemberEvents.java
262
ex.schedule(new Runnable() { @Override public void run() { hzs.get(1).shutdown(); } }, 1000, TimeUnit.MILLISECONDS);
0true
hazelcast-client_src_test_java_com_hazelcast_client_executor_ExecutionDelayTest.java
482
public class AnalyzeRequestBuilder extends SingleCustomOperationRequestBuilder<AnalyzeRequest, AnalyzeResponse, AnalyzeRequestBuilder> { public AnalyzeRequestBuilder(IndicesAdminClient indicesClient) { super((InternalIndicesAdminClient) indicesClient, new AnalyzeRequest()); } public AnalyzeRequestBuilder(IndicesAdminClient indicesClient, String index, String text) { super((InternalIndicesAdminClient) indicesClient, new AnalyzeRequest(index, text)); } /** * Sets the index to use to analyzer the text (for example, if it holds specific analyzers * registered). */ public AnalyzeRequestBuilder setIndex(String index) { request.index(index); return this; } /** * Sets the analyzer name to use in order to analyze the text. * * @param analyzer The analyzer name. */ public AnalyzeRequestBuilder setAnalyzer(String analyzer) { request.analyzer(analyzer); return this; } /** * Sets the field that its analyzer will be used to analyze the text. Note, requires an index * to be set. */ public AnalyzeRequestBuilder setField(String field) { request.field(field); return this; } /** * Instead of setting the analyzer, sets the tokenizer that will be used as part of a custom * analyzer. */ public AnalyzeRequestBuilder setTokenizer(String tokenizer) { request.tokenizer(tokenizer); return this; } /** * Sets token filters that will be used on top of a tokenizer provided. */ public AnalyzeRequestBuilder setTokenFilters(String... tokenFilters) { request.tokenFilters(tokenFilters); return this; } @Override protected void doExecute(ActionListener<AnalyzeResponse> listener) { ((IndicesAdminClient) client).analyze(request, listener); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_analyze_AnalyzeRequestBuilder.java
1,623
@Component("blDynamicEntityDao") @Scope("prototype") public class DynamicEntityDaoImpl implements DynamicEntityDao { private static final Log LOG = LogFactory.getLog(DynamicEntityDaoImpl.class); protected static final Map<String,Map<String, FieldMetadata>> METADATA_CACHE = new LRUMap<String, Map<String, FieldMetadata>>(100, 1000); /* * This is the same as POLYMORPHIC_ENTITY_CACHE, except that it does not contain classes that are abstract or have been marked for exclusion * from polymorphism */ protected EntityManager standardEntityManager; @Resource(name="blMetadata") protected Metadata metadata; @Resource(name="blEJB3ConfigurationDao") protected EJB3ConfigurationDao ejb3ConfigurationDao; @Resource(name="blEntityConfiguration") protected EntityConfiguration entityConfiguration; @Resource(name="blMetadataProviders") protected List<FieldMetadataProvider> fieldMetadataProviders = new ArrayList<FieldMetadataProvider>(); @Resource(name= "blDefaultFieldMetadataProvider") protected FieldMetadataProvider defaultFieldMetadataProvider; @Resource(name="blAppConfigurationRemoteService") protected AppConfigurationService appConfigurationRemoteService; protected DynamicDaoHelper dynamicDaoHelper = new DynamicDaoHelperImpl(); @Value("${cache.entity.dao.metadata.ttl}") protected int cacheEntityMetaDataTtl; protected long lastCacheFlushTime = System.currentTimeMillis(); @Override public Criteria createCriteria(Class<?> entityClass) { return ((HibernateEntityManager) getStandardEntityManager()).getSession().createCriteria(entityClass); } @Override public Serializable persist(Serializable entity) { standardEntityManager.persist(entity); standardEntityManager.flush(); return entity; } @Override public Serializable merge(Serializable entity) { Serializable response = standardEntityManager.merge(entity); standardEntityManager.flush(); return response; } @Override public void flush() { standardEntityManager.flush(); } @Override public void detach(Serializable entity) { standardEntityManager.detach(entity); } @Override public void refresh(Serializable entity) { standardEntityManager.refresh(entity); } @Override public Serializable retrieve(Class<?> entityClass, Object primaryKey) { return (Serializable) standardEntityManager.find(entityClass, primaryKey); } @Override public void remove(Serializable entity) { boolean isArchivable = Status.class.isAssignableFrom(entity.getClass()); if (isArchivable) { ((Status) entity).setArchived('Y'); merge(entity); } else { standardEntityManager.remove(entity); standardEntityManager.flush(); } } @Override public void clear() { standardEntityManager.clear(); } @Override public PersistentClass getPersistentClass(String targetClassName) { return ejb3ConfigurationDao.getConfiguration().getClassMapping(targetClassName); } protected boolean useCache() { if (cacheEntityMetaDataTtl < 0) { return true; } if (cacheEntityMetaDataTtl == 0) { return false; } else { if ((System.currentTimeMillis() - lastCacheFlushTime) > cacheEntityMetaDataTtl) { lastCacheFlushTime = System.currentTimeMillis(); METADATA_CACHE.clear(); DynamicDaoHelperImpl.POLYMORPHIC_ENTITY_CACHE.clear(); DynamicDaoHelperImpl.POLYMORPHIC_ENTITY_CACHE_WO_EXCLUSIONS.clear(); return true; // cache is empty } else { return true; } } } @Override public Class<?>[] getAllPolymorphicEntitiesFromCeiling(Class<?> ceilingClass) { return getAllPolymorphicEntitiesFromCeiling(ceilingClass, true); } /* (non-Javadoc) * @see org.broadleafcommerce.openadmin.server.dao.DynamicEntityDao#getAllPolymorphicEntitiesFromCeiling(java.lang.Class) */ @Override public Class<?>[] getAllPolymorphicEntitiesFromCeiling(Class<?> ceilingClass, boolean includeUnqualifiedPolymorphicEntities) { return dynamicDaoHelper.getAllPolymorphicEntitiesFromCeiling(ceilingClass, getSessionFactory(), includeUnqualifiedPolymorphicEntities, useCache()); } public Class<?>[] sortEntities(Class<?> ceilingClass, List<Class<?>> entities) { return dynamicDaoHelper.sortEntities(ceilingClass, entities); } protected void addClassToTree(Class<?> clazz, ClassTree tree) { Class<?> testClass; try { testClass = Class.forName(tree.getFullyQualifiedClassname()); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } if (clazz.equals(testClass)) { return; } if (clazz.getSuperclass().equals(testClass)) { ClassTree myTree = new ClassTree(clazz.getName(), isExcludeClassFromPolymorphism(clazz)); createClassTreeFromAnnotation(clazz, myTree); tree.setChildren((ClassTree[]) ArrayUtils.add(tree.getChildren(), myTree)); } else { for (ClassTree child : tree.getChildren()) { addClassToTree(clazz, child); } } } protected void createClassTreeFromAnnotation(Class<?> clazz, ClassTree myTree) { AdminPresentationClass classPresentation = clazz.getAnnotation(AdminPresentationClass.class); if (classPresentation != null) { String friendlyName = classPresentation.friendlyName(); if (!StringUtils.isEmpty(friendlyName)) { myTree.setFriendlyName(friendlyName); } } } @Override public ClassTree getClassTree(Class<?>[] polymorphicClasses) { String ceilingClass = null; for (Class<?> clazz : polymorphicClasses) { AdminPresentationClass classPresentation = clazz.getAnnotation(AdminPresentationClass.class); if (classPresentation != null) { String ceilingEntity = classPresentation.ceilingDisplayEntity(); if (!StringUtils.isEmpty(ceilingEntity)) { ceilingClass = ceilingEntity; break; } } } if (ceilingClass != null) { int pos = -1; int j = 0; for (Class<?> clazz : polymorphicClasses) { if (clazz.getName().equals(ceilingClass)) { pos = j; break; } j++; } if (pos >= 0) { Class<?>[] temp = new Class<?>[pos + 1]; System.arraycopy(polymorphicClasses, 0, temp, 0, j + 1); polymorphicClasses = temp; } } ClassTree classTree = null; if (!ArrayUtils.isEmpty(polymorphicClasses)) { Class<?> topClass = polymorphicClasses[polymorphicClasses.length-1]; classTree = new ClassTree(topClass.getName(), isExcludeClassFromPolymorphism(topClass)); createClassTreeFromAnnotation(topClass, classTree); for (int j=polymorphicClasses.length-1; j >= 0; j--) { addClassToTree(polymorphicClasses[j], classTree); } classTree.finalizeStructure(1); } return classTree; } @Override public ClassTree getClassTreeFromCeiling(Class<?> ceilingClass) { Class<?>[] sortedEntities = getAllPolymorphicEntitiesFromCeiling(ceilingClass); return getClassTree(sortedEntities); } @Override public Map<String, FieldMetadata> getSimpleMergedProperties(String entityName, PersistencePerspective persistencePerspective) { Class<?>[] entityClasses; try { entityClasses = getAllPolymorphicEntitiesFromCeiling(Class.forName(entityName)); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } if (!ArrayUtils.isEmpty(entityClasses)) { return getMergedProperties( entityName, entityClasses, (ForeignKey) persistencePerspective.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY), persistencePerspective.getAdditionalNonPersistentProperties(), persistencePerspective.getAdditionalForeignKeys(), MergedPropertyType.PRIMARY, persistencePerspective.getPopulateToOneFields(), persistencePerspective.getIncludeFields(), persistencePerspective.getExcludeFields(), persistencePerspective.getConfigurationKey(), "" ); } else { Map<String, FieldMetadata> mergedProperties = new HashMap<String, FieldMetadata>(); Class<?> targetClass; try { targetClass = Class.forName(entityName); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } Map<String, FieldMetadata> attributesMap = metadata.getFieldPresentationAttributes(null, targetClass, this, ""); for (String property : attributesMap.keySet()) { FieldMetadata presentationAttribute = attributesMap.get(property); if (!presentationAttribute.getExcluded()) { Field field = FieldManager.getSingleField(targetClass, property); if (!Modifier.isStatic(field.getModifiers())) { boolean handled = false; for (FieldMetadataProvider provider : fieldMetadataProviders) { FieldProviderResponse response = provider.addMetadataFromFieldType( new AddMetadataFromFieldTypeRequest(field, targetClass, null, new ForeignKey[]{}, MergedPropertyType.PRIMARY, null, null, "", property, null, false, 0, attributesMap, presentationAttribute, ((BasicFieldMetadata) presentationAttribute).getExplicitFieldType(), field.getType(), this), mergedProperties); if (FieldProviderResponse.NOT_HANDLED != response) { handled = true; } if (FieldProviderResponse.HANDLED_BREAK == response) { break; } } if (!handled) { //this provider is not included in the provider list on purpose - it is designed to handle basic //AdminPresentation fields, and those fields not admin presentation annotated at all defaultFieldMetadataProvider.addMetadataFromFieldType( new AddMetadataFromFieldTypeRequest(field, targetClass, null, new ForeignKey[]{}, MergedPropertyType.PRIMARY, null, null, "", property, null, false, 0, attributesMap, presentationAttribute, ((BasicFieldMetadata) presentationAttribute).getExplicitFieldType(), field.getType(), this), mergedProperties); } } } } return mergedProperties; } } @Override public Map<String, FieldMetadata> getMergedProperties( String ceilingEntityFullyQualifiedClassname, Class<?>[] entities, ForeignKey foreignField, String[] additionalNonPersistentProperties, ForeignKey[] additionalForeignFields, MergedPropertyType mergedPropertyType, Boolean populateManyToOneFields, String[] includeFields, String[] excludeFields, String configurationKey, String prefix ) { Map<String, FieldMetadata> mergedProperties = getMergedPropertiesRecursively( ceilingEntityFullyQualifiedClassname, entities, foreignField, additionalNonPersistentProperties, additionalForeignFields, mergedPropertyType, populateManyToOneFields, includeFields, excludeFields, configurationKey, new ArrayList<Class<?>>(), prefix, false ); final List<String> removeKeys = new ArrayList<String>(); for (final String key : mergedProperties.keySet()) { if (mergedProperties.get(key).getExcluded() != null && mergedProperties.get(key).getExcluded()) { removeKeys.add(key); } } for (String removeKey : removeKeys) { mergedProperties.remove(removeKey); } // Allow field metadata providers to contribute additional fields here. These latestage handlers take place // after any cached lookups occur, and are ideal for adding in dynamic properties that are not globally cacheable // like properties gleaned from reflection typically are. Set<String> keys = new HashSet<String>(mergedProperties.keySet()); for (Class<?> targetClass : entities) { for (String key : keys) { LateStageAddMetadataRequest amr = new LateStageAddMetadataRequest(key, null, targetClass, this, ""); boolean foundOneOrMoreHandlers = false; for (FieldMetadataProvider fieldMetadataProvider : fieldMetadataProviders) { FieldProviderResponse response = fieldMetadataProvider.lateStageAddMetadata(amr, mergedProperties); if (FieldProviderResponse.NOT_HANDLED != response) { foundOneOrMoreHandlers = true; } if (FieldProviderResponse.HANDLED_BREAK == response) { break; } } if (!foundOneOrMoreHandlers) { defaultFieldMetadataProvider.lateStageAddMetadata(amr, mergedProperties); } } } return mergedProperties; } protected Map<String, FieldMetadata> getMergedPropertiesRecursively( final String ceilingEntityFullyQualifiedClassname, final Class<?>[] entities, final ForeignKey foreignField, final String[] additionalNonPersistentProperties, final ForeignKey[] additionalForeignFields, final MergedPropertyType mergedPropertyType, final Boolean populateManyToOneFields, final String[] includeFields, final String[] excludeFields, final String configurationKey, final List<Class<?>> parentClasses, final String prefix, final Boolean isParentExcluded ) { PropertyBuilder propertyBuilder = new PropertyBuilder() { @Override public Map<String, FieldMetadata> execute(Boolean overridePopulateManyToOne) { Map<String, FieldMetadata> mergedProperties = new HashMap<String, FieldMetadata>(); Boolean classAnnotatedPopulateManyToOneFields; if (overridePopulateManyToOne != null) { classAnnotatedPopulateManyToOneFields = overridePopulateManyToOne; } else { classAnnotatedPopulateManyToOneFields = populateManyToOneFields; } buildPropertiesFromPolymorphicEntities( entities, foreignField, additionalNonPersistentProperties, additionalForeignFields, mergedPropertyType, classAnnotatedPopulateManyToOneFields, includeFields, excludeFields, configurationKey, ceilingEntityFullyQualifiedClassname, mergedProperties, parentClasses, prefix, isParentExcluded ); return mergedProperties; } }; Map<String, FieldMetadata> mergedProperties = metadata.overrideMetadata(entities, propertyBuilder, prefix, isParentExcluded, ceilingEntityFullyQualifiedClassname, configurationKey, this); applyIncludesAndExcludes(includeFields, excludeFields, prefix, isParentExcluded, mergedProperties); applyForeignKeyPrecedence(foreignField, additionalForeignFields, mergedProperties); return mergedProperties; } protected void applyForeignKeyPrecedence(ForeignKey foreignField, ForeignKey[] additionalForeignFields, Map<String, FieldMetadata> mergedProperties) { for (String key : mergedProperties.keySet()) { boolean isForeign = false; if (foreignField != null) { isForeign = foreignField.getManyToField().equals(key); } if (!isForeign && !ArrayUtils.isEmpty(additionalForeignFields)) { for (ForeignKey foreignKey : additionalForeignFields) { isForeign = foreignKey.getManyToField().equals(key); if (isForeign) { break; } } } if (isForeign) { FieldMetadata metadata = mergedProperties.get(key); metadata.setExcluded(false); } } } protected void applyIncludesAndExcludes(String[] includeFields, String[] excludeFields, String prefix, Boolean isParentExcluded, Map<String, FieldMetadata> mergedProperties) { //check includes if (!ArrayUtils.isEmpty(includeFields)) { for (String include : includeFields) { for (String key : mergedProperties.keySet()) { String testKey = prefix + key; if (!(testKey.startsWith(include + ".") || testKey.equals(include))) { FieldMetadata metadata = mergedProperties.get(key); LOG.debug("applyIncludesAndExcludes:Excluding " + key + " because this field did not appear in the explicit includeFields list"); metadata.setExcluded(true); } else { FieldMetadata metadata = mergedProperties.get(key); if (!isParentExcluded) { LOG.debug("applyIncludesAndExcludes:Showing " + key + " because this field appears in the explicit includeFields list"); metadata.setExcluded(false); } } } } } else if (!ArrayUtils.isEmpty(excludeFields)) { //check excludes for (String exclude : excludeFields) { for (String key : mergedProperties.keySet()) { String testKey = prefix + key; if (testKey.startsWith(exclude + ".") || testKey.equals(exclude)) { FieldMetadata metadata = mergedProperties.get(key); LOG.debug("applyIncludesAndExcludes:Excluding " + key + " because this field appears in the explicit excludeFields list"); metadata.setExcluded(true); } else { FieldMetadata metadata = mergedProperties.get(key); if (!isParentExcluded) { LOG.debug("applyIncludesAndExcludes:Showing " + key + " because this field did not appear in the explicit excludeFields list"); metadata.setExcluded(false); } } } } } } protected String pad(String s, int length, char pad) { StringBuilder buffer = new StringBuilder(s); while (buffer.length() < length) { buffer.insert(0, pad); } return buffer.toString(); } protected String getCacheKey(ForeignKey foreignField, String[] additionalNonPersistentProperties, ForeignKey[] additionalForeignFields, MergedPropertyType mergedPropertyType, Boolean populateManyToOneFields, Class<?> clazz, String configurationKey, Boolean isParentExcluded) { StringBuilder sb = new StringBuilder(150); sb.append(clazz.hashCode()); sb.append(foreignField==null?"":foreignField.toString()); sb.append(configurationKey); sb.append(isParentExcluded); if (additionalNonPersistentProperties != null) { for (String prop : additionalNonPersistentProperties) { sb.append(prop); } } if (additionalForeignFields != null) { for (ForeignKey key : additionalForeignFields) { sb.append(key.toString()); } } sb.append(mergedPropertyType); sb.append(populateManyToOneFields); String digest; try { MessageDigest md = MessageDigest.getInstance("MD5"); byte[] messageDigest = md.digest(sb.toString().getBytes()); BigInteger number = new BigInteger(1,messageDigest); digest = number.toString(16); } catch(NoSuchAlgorithmException e) { throw new RuntimeException(e); } return pad(digest, 32, '0'); } protected void buildPropertiesFromPolymorphicEntities( Class<?>[] entities, ForeignKey foreignField, String[] additionalNonPersistentProperties, ForeignKey[] additionalForeignFields, MergedPropertyType mergedPropertyType, Boolean populateManyToOneFields, String[] includeFields, String[] excludeFields, String configurationKey, String ceilingEntityFullyQualifiedClassname, Map<String, FieldMetadata> mergedProperties, List<Class<?>> parentClasses, String prefix, Boolean isParentExcluded ) { for (Class<?> clazz : entities) { String cacheKey = getCacheKey(foreignField, additionalNonPersistentProperties, additionalForeignFields, mergedPropertyType, populateManyToOneFields, clazz, configurationKey, isParentExcluded); Map<String, FieldMetadata> cacheData = null; synchronized(DynamicDaoHelperImpl.LOCK_OBJECT) { if (useCache()) { cacheData = METADATA_CACHE.get(cacheKey); } if (cacheData == null) { Map<String, FieldMetadata> props = getPropertiesForEntityClass( clazz, foreignField, additionalNonPersistentProperties, additionalForeignFields, mergedPropertyType, populateManyToOneFields, includeFields, excludeFields, configurationKey, ceilingEntityFullyQualifiedClassname, parentClasses, prefix, isParentExcluded ); //first check all the properties currently in there to see if my entity inherits from them for (Class<?> clazz2 : entities) { if (!clazz2.getName().equals(clazz.getName())) { for (Map.Entry<String, FieldMetadata> entry : props.entrySet()) { FieldMetadata metadata = entry.getValue(); try { if (Class.forName(metadata.getInheritedFromType()).isAssignableFrom(clazz2)) { String[] both = (String[]) ArrayUtils.addAll(metadata.getAvailableToTypes(), new String[]{clazz2.getName()}); metadata.setAvailableToTypes(both); } } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } } } METADATA_CACHE.put(cacheKey, props); cacheData = props; } } //clone the metadata before passing to the system Map<String, FieldMetadata> clonedCache = new HashMap<String, FieldMetadata>(cacheData.size()); for (Map.Entry<String, FieldMetadata> entry : cacheData.entrySet()) { clonedCache.put(entry.getKey(), entry.getValue().cloneFieldMetadata()); } mergedProperties.putAll(clonedCache); } } @Override public Field[] getAllFields(Class<?> targetClass) { Field[] allFields = new Field[]{}; boolean eof = false; Class<?> currentClass = targetClass; while (!eof) { Field[] fields = currentClass.getDeclaredFields(); allFields = (Field[]) ArrayUtils.addAll(allFields, fields); if (currentClass.getSuperclass() != null) { currentClass = currentClass.getSuperclass(); } else { eof = true; } } return allFields; } @Override public Map<String, FieldMetadata> getPropertiesForPrimitiveClass( String propertyName, String friendlyPropertyName, Class<?> targetClass, Class<?> parentClass, MergedPropertyType mergedPropertyType ) { Map<String, FieldMetadata> fields = new HashMap<String, FieldMetadata>(); BasicFieldMetadata presentationAttribute = new BasicFieldMetadata(); presentationAttribute.setFriendlyName(friendlyPropertyName); if (String.class.isAssignableFrom(targetClass)) { presentationAttribute.setExplicitFieldType(SupportedFieldType.STRING); presentationAttribute.setVisibility(VisibilityEnum.VISIBLE_ALL); fields.put(propertyName, metadata.getFieldMetadata("", propertyName, null, SupportedFieldType.STRING, null, parentClass, presentationAttribute, mergedPropertyType, this)); } else if (Boolean.class.isAssignableFrom(targetClass)) { presentationAttribute.setExplicitFieldType(SupportedFieldType.BOOLEAN); presentationAttribute.setVisibility(VisibilityEnum.VISIBLE_ALL); fields.put(propertyName, metadata.getFieldMetadata("", propertyName, null, SupportedFieldType.BOOLEAN, null, parentClass, presentationAttribute, mergedPropertyType, this)); } else if (Date.class.isAssignableFrom(targetClass)) { presentationAttribute.setExplicitFieldType(SupportedFieldType.DATE); presentationAttribute.setVisibility(VisibilityEnum.VISIBLE_ALL); fields.put(propertyName, metadata.getFieldMetadata("", propertyName, null, SupportedFieldType.DATE, null, parentClass, presentationAttribute, mergedPropertyType, this)); } else if (Money.class.isAssignableFrom(targetClass)) { presentationAttribute.setExplicitFieldType(SupportedFieldType.MONEY); presentationAttribute.setVisibility(VisibilityEnum.VISIBLE_ALL); fields.put(propertyName, metadata.getFieldMetadata("", propertyName, null, SupportedFieldType.MONEY, null, parentClass, presentationAttribute, mergedPropertyType, this)); } else if ( Byte.class.isAssignableFrom(targetClass) || Integer.class.isAssignableFrom(targetClass) || Long.class.isAssignableFrom(targetClass) || Short.class.isAssignableFrom(targetClass) ) { presentationAttribute.setExplicitFieldType(SupportedFieldType.INTEGER); presentationAttribute.setVisibility(VisibilityEnum.VISIBLE_ALL); fields.put(propertyName, metadata.getFieldMetadata("", propertyName, null, SupportedFieldType.INTEGER, null, parentClass, presentationAttribute, mergedPropertyType, this)); } else if ( Double.class.isAssignableFrom(targetClass) || BigDecimal.class.isAssignableFrom(targetClass) ) { presentationAttribute.setExplicitFieldType(SupportedFieldType.DECIMAL); presentationAttribute.setVisibility(VisibilityEnum.VISIBLE_ALL); fields.put(propertyName, metadata.getFieldMetadata("", propertyName, null, SupportedFieldType.DECIMAL, null, parentClass, presentationAttribute, mergedPropertyType, this)); } ((BasicFieldMetadata) fields.get(propertyName)).setLength(255); ((BasicFieldMetadata) fields.get(propertyName)).setForeignKeyCollection(false); ((BasicFieldMetadata) fields.get(propertyName)).setRequired(true); ((BasicFieldMetadata) fields.get(propertyName)).setUnique(true); ((BasicFieldMetadata) fields.get(propertyName)).setScale(100); ((BasicFieldMetadata) fields.get(propertyName)).setPrecision(100); return fields; } @Override public SessionFactory getSessionFactory() { return dynamicDaoHelper.getSessionFactory((HibernateEntityManager) standardEntityManager); } @Override public Map<String, Object> getIdMetadata(Class<?> entityClass) { return dynamicDaoHelper.getIdMetadata(entityClass, (HibernateEntityManager) standardEntityManager); } @Override public List<String> getPropertyNames(Class<?> entityClass) { return dynamicDaoHelper.getPropertyNames(entityClass, (HibernateEntityManager) standardEntityManager); } @Override public List<Type> getPropertyTypes(Class<?> entityClass) { return dynamicDaoHelper.getPropertyTypes(entityClass, (HibernateEntityManager) standardEntityManager); } protected Map<String, FieldMetadata> getPropertiesForEntityClass( Class<?> targetClass, ForeignKey foreignField, String[] additionalNonPersistentProperties, ForeignKey[] additionalForeignFields, MergedPropertyType mergedPropertyType, Boolean populateManyToOneFields, String[] includeFields, String[] excludeFields, String configurationKey, String ceilingEntityFullyQualifiedClassname, List<Class<?>> parentClasses, String prefix, Boolean isParentExcluded ) { Map<String, FieldMetadata> presentationAttributes = metadata.getFieldPresentationAttributes(null, targetClass, this, ""); if (isParentExcluded) { for (String key : presentationAttributes.keySet()) { LOG.debug("getPropertiesForEntityClass:Excluding " + key + " because parent is excluded."); presentationAttributes.get(key).setExcluded(true); } } Map idMetadata = getIdMetadata(targetClass); Map<String, FieldMetadata> fields = new HashMap<String, FieldMetadata>(); String idProperty = (String) idMetadata.get("name"); List<String> propertyNames = getPropertyNames(targetClass); propertyNames.add(idProperty); Type idType = (Type) idMetadata.get("type"); List<Type> propertyTypes = getPropertyTypes(targetClass); propertyTypes.add(idType); PersistentClass persistentClass = getPersistentClass(targetClass.getName()); Iterator testIter = persistentClass.getPropertyIterator(); List<Property> propertyList = new ArrayList<Property>(); //check the properties for problems while(testIter.hasNext()) { Property property = (Property) testIter.next(); if (property.getName().contains(".")) { throw new IllegalArgumentException("Properties from entities that utilize a period character ('.') in their name are incompatible with this system. The property name in question is: (" + property.getName() + ") from the class: (" + targetClass.getName() + ")"); } propertyList.add(property); } buildProperties( targetClass, foreignField, additionalForeignFields, additionalNonPersistentProperties, mergedPropertyType, presentationAttributes, propertyList, fields, propertyNames, propertyTypes, idProperty, populateManyToOneFields, includeFields, excludeFields, configurationKey, ceilingEntityFullyQualifiedClassname, parentClasses, prefix, isParentExcluded ); BasicFieldMetadata presentationAttribute = new BasicFieldMetadata(); presentationAttribute.setExplicitFieldType(SupportedFieldType.STRING); presentationAttribute.setVisibility(VisibilityEnum.HIDDEN_ALL); if (!ArrayUtils.isEmpty(additionalNonPersistentProperties)) { Class<?>[] entities = getAllPolymorphicEntitiesFromCeiling(targetClass); for (String additionalNonPersistentProperty : additionalNonPersistentProperties) { if (StringUtils.isEmpty(prefix) || (!StringUtils.isEmpty(prefix) && additionalNonPersistentProperty.startsWith(prefix))) { String myAdditionalNonPersistentProperty = additionalNonPersistentProperty; //get final property if this is a dot delimited property int finalDotPos = additionalNonPersistentProperty.lastIndexOf('.'); if (finalDotPos >= 0) { myAdditionalNonPersistentProperty = myAdditionalNonPersistentProperty.substring(finalDotPos + 1, myAdditionalNonPersistentProperty.length()); } //check all the polymorphic types on this target class to see if the end property exists Field testField = null; Method testMethod = null; for (Class<?> clazz : entities) { try { testMethod = clazz.getMethod(myAdditionalNonPersistentProperty); if (testMethod != null) { break; } } catch (NoSuchMethodException e) { //do nothing - method does not exist } testField = getFieldManager().getField(clazz, myAdditionalNonPersistentProperty); if (testField != null) { break; } } //if the property exists, add it to the metadata for this class if (testField != null || testMethod != null) { fields.put(additionalNonPersistentProperty, metadata.getFieldMetadata(prefix, additionalNonPersistentProperty, propertyList, SupportedFieldType.STRING, null, targetClass, presentationAttribute, mergedPropertyType, this)); } } } } return fields; } protected void buildProperties( Class<?> targetClass, ForeignKey foreignField, ForeignKey[] additionalForeignFields, String[] additionalNonPersistentProperties, MergedPropertyType mergedPropertyType, Map<String, FieldMetadata> presentationAttributes, List<Property> componentProperties, Map<String, FieldMetadata> fields, List<String> propertyNames, List<Type> propertyTypes, String idProperty, Boolean populateManyToOneFields, String[] includeFields, String[] excludeFields, String configurationKey, String ceilingEntityFullyQualifiedClassname, List<Class<?>> parentClasses, String prefix, Boolean isParentExcluded ) { int j = 0; Comparator<String> propertyComparator = new Comparator<String>() { @Override public int compare(String o1, String o2) { //check for property name equality and for map field properties if (o1.equals(o2) || o1.startsWith(o2 + FieldManager.MAPFIELDSEPARATOR) || o2.startsWith(o1 + FieldManager.MAPFIELDSEPARATOR)) { return 0; } return o1.compareTo(o2); } }; List<String> presentationKeyList = new ArrayList<String>(presentationAttributes.keySet()); Collections.sort(presentationKeyList); for (String propertyName : propertyNames) { final Type type = propertyTypes.get(j); boolean isPropertyForeignKey = testForeignProperty(foreignField, prefix, propertyName); int additionalForeignKeyIndexPosition = findAdditionalForeignKeyIndex(additionalForeignFields, prefix, propertyName); j++; Field myField = getFieldManager().getField(targetClass, propertyName); if (myField == null) { //try to get the field with the prefix - needed for advanced collections that appear in @Embedded classes myField = getFieldManager().getField(targetClass, prefix + propertyName); } if ( !type.isAnyType() && !type.isCollectionType() || isPropertyForeignKey || additionalForeignKeyIndexPosition >= 0 || Collections.binarySearch(presentationKeyList, propertyName, propertyComparator) >= 0 ) { if (myField != null) { boolean handled = false; for (FieldMetadataProvider provider : fieldMetadataProviders) { FieldMetadata presentationAttribute = presentationAttributes.get(propertyName); if (presentationAttribute != null) { setExcludedBasedOnShowIfProperty(presentationAttribute); } FieldProviderResponse response = provider.addMetadataFromFieldType( new AddMetadataFromFieldTypeRequest(myField, targetClass, foreignField, additionalForeignFields, mergedPropertyType, componentProperties, idProperty, prefix, propertyName, type, isPropertyForeignKey, additionalForeignKeyIndexPosition, presentationAttributes, presentationAttribute, null, type.getReturnedClass(), this), fields); if (FieldProviderResponse.NOT_HANDLED != response) { handled = true; } if (FieldProviderResponse.HANDLED_BREAK == response) { break; } } if (!handled) { buildBasicProperty(myField, targetClass, foreignField, additionalForeignFields, additionalNonPersistentProperties, mergedPropertyType, presentationAttributes, componentProperties, fields, idProperty, populateManyToOneFields, includeFields, excludeFields, configurationKey, ceilingEntityFullyQualifiedClassname, parentClasses, prefix, isParentExcluded, propertyName, type, isPropertyForeignKey, additionalForeignKeyIndexPosition); } } } } } public Boolean testPropertyInclusion(FieldMetadata presentationAttribute) { setExcludedBasedOnShowIfProperty(presentationAttribute); return !(presentationAttribute != null && ((presentationAttribute.getExcluded() != null && presentationAttribute.getExcluded()) || (presentationAttribute.getChildrenExcluded() != null && presentationAttribute.getChildrenExcluded()))); } protected boolean setExcludedBasedOnShowIfProperty(FieldMetadata fieldMetadata) { if(fieldMetadata != null && fieldMetadata.getShowIfProperty()!=null && !fieldMetadata.getShowIfProperty().equals("") && appConfigurationRemoteService.getBooleanPropertyValue(fieldMetadata.getShowIfProperty())!=null && !appConfigurationRemoteService.getBooleanPropertyValue(fieldMetadata.getShowIfProperty()) ) { //do not include this in the display if it returns false. fieldMetadata.setExcluded(true); return false; } return true; } protected Boolean testPropertyRecursion(String prefix, List<Class<?>> parentClasses, String propertyName, Class<?> targetClass, String ceilingEntityFullyQualifiedClassname) { Boolean includeField = true; if (!StringUtils.isEmpty(prefix)) { Field testField = getFieldManager().getField(targetClass, propertyName); if (testField == null) { Class<?>[] entities; try { entities = getAllPolymorphicEntitiesFromCeiling(Class.forName(ceilingEntityFullyQualifiedClassname)); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } for (Class<?> clazz : entities) { testField = getFieldManager().getField(clazz, propertyName); if (testField != null) { break; } } String testProperty = prefix + propertyName; if (testField == null) { testField = getFieldManager().getField(targetClass, testProperty); } if (testField == null) { for (Class<?> clazz : entities) { testField = getFieldManager().getField(clazz, testProperty); if (testField != null) { break; } } } } if (testField != null) { Class<?> testType = testField.getType(); for (Class<?> parentClass : parentClasses) { if (parentClass.isAssignableFrom(testType) || testType.isAssignableFrom(parentClass)) { includeField = false; break; } } if (includeField && (targetClass.isAssignableFrom(testType) || testType.isAssignableFrom(targetClass))) { includeField = false; } } } return includeField; } protected void buildBasicProperty( Field field, Class<?> targetClass, ForeignKey foreignField, ForeignKey[] additionalForeignFields, String[] additionalNonPersistentProperties, MergedPropertyType mergedPropertyType, Map<String, FieldMetadata> presentationAttributes, List<Property> componentProperties, Map<String, FieldMetadata> fields, String idProperty, Boolean populateManyToOneFields, String[] includeFields, String[] excludeFields, String configurationKey, String ceilingEntityFullyQualifiedClassname, List<Class<?>> parentClasses, String prefix, Boolean isParentExcluded, String propertyName, Type type, boolean propertyForeignKey, int additionalForeignKeyIndexPosition) { FieldMetadata presentationAttribute = presentationAttributes.get(propertyName); Boolean amIExcluded = isParentExcluded || !testPropertyInclusion(presentationAttribute); Boolean includeField = testPropertyRecursion(prefix, parentClasses, propertyName, targetClass, ceilingEntityFullyQualifiedClassname); SupportedFieldType explicitType = null; if (presentationAttribute != null && presentationAttribute instanceof BasicFieldMetadata) { explicitType = ((BasicFieldMetadata) presentationAttribute).getExplicitFieldType(); } Class<?> returnedClass = type.getReturnedClass(); checkProp: { if (type.isComponentType() && includeField) { buildComponentProperties( targetClass, foreignField, additionalForeignFields, additionalNonPersistentProperties, mergedPropertyType, fields, idProperty, populateManyToOneFields, includeFields, excludeFields, configurationKey, ceilingEntityFullyQualifiedClassname, propertyName, type, returnedClass, parentClasses, amIExcluded, prefix ); break checkProp; } /* * Currently we do not support ManyToOne fields whose class type is the same * as the target type, since this forms an infinite loop and will cause a stack overflow. */ if ( type.isEntityType() && !returnedClass.isAssignableFrom(targetClass) && populateManyToOneFields && includeField ) { buildEntityProperties( fields, foreignField, additionalForeignFields, additionalNonPersistentProperties, populateManyToOneFields, includeFields, excludeFields, configurationKey, ceilingEntityFullyQualifiedClassname, propertyName, returnedClass, targetClass, parentClasses, prefix, amIExcluded ); break checkProp; } } //Don't include this property if it failed manyToOne inclusion and is not a specified foreign key if (includeField || propertyForeignKey || additionalForeignKeyIndexPosition >= 0) { defaultFieldMetadataProvider.addMetadataFromFieldType( new AddMetadataFromFieldTypeRequest(field, targetClass, foreignField, additionalForeignFields, mergedPropertyType, componentProperties, idProperty, prefix, propertyName, type, propertyForeignKey, additionalForeignKeyIndexPosition, presentationAttributes, presentationAttribute, explicitType, returnedClass, this), fields); } } protected boolean testForeignProperty(ForeignKey foreignField, String prefix, String propertyName) { boolean isPropertyForeignKey = false; if (foreignField != null) { isPropertyForeignKey = foreignField.getManyToField().equals(prefix + propertyName); } return isPropertyForeignKey; } protected int findAdditionalForeignKeyIndex(ForeignKey[] additionalForeignFields, String prefix, String propertyName) { int additionalForeignKeyIndexPosition = -1; if (additionalForeignFields != null) { additionalForeignKeyIndexPosition = Arrays.binarySearch(additionalForeignFields, new ForeignKey(prefix + propertyName, null, null), new Comparator<ForeignKey>() { @Override public int compare(ForeignKey o1, ForeignKey o2) { return o1.getManyToField().compareTo(o2.getManyToField()); } }); } return additionalForeignKeyIndexPosition; } protected void buildEntityProperties( Map<String, FieldMetadata> fields, ForeignKey foreignField, ForeignKey[] additionalForeignFields, String[] additionalNonPersistentProperties, Boolean populateManyToOneFields, String[] includeFields, String[] excludeFields, String configurationKey, String ceilingEntityFullyQualifiedClassname, String propertyName, Class<?> returnedClass, Class<?> targetClass, List<Class<?>> parentClasses, String prefix, Boolean isParentExcluded ) { Class<?>[] polymorphicEntities = getAllPolymorphicEntitiesFromCeiling(returnedClass); List<Class<?>> clonedParentClasses = new ArrayList<Class<?>>(); for (Class<?> parentClass : parentClasses) { clonedParentClasses.add(parentClass); } clonedParentClasses.add(targetClass); Map<String, FieldMetadata> newFields = getMergedPropertiesRecursively( ceilingEntityFullyQualifiedClassname, polymorphicEntities, foreignField, additionalNonPersistentProperties, additionalForeignFields, MergedPropertyType.PRIMARY, populateManyToOneFields, includeFields, excludeFields, configurationKey, clonedParentClasses, prefix + propertyName + '.', isParentExcluded ); for (FieldMetadata newMetadata : newFields.values()) { newMetadata.setInheritedFromType(targetClass.getName()); newMetadata.setAvailableToTypes(new String[]{targetClass.getName()}); } Map<String, FieldMetadata> convertedFields = new HashMap<String, FieldMetadata>(newFields.size()); for (Map.Entry<String, FieldMetadata> key : newFields.entrySet()) { convertedFields.put(propertyName + '.' + key.getKey(), key.getValue()); } fields.putAll(convertedFields); } protected void buildComponentProperties( Class<?> targetClass, ForeignKey foreignField, ForeignKey[] additionalForeignFields, String[] additionalNonPersistentProperties, MergedPropertyType mergedPropertyType, Map<String, FieldMetadata> fields, String idProperty, Boolean populateManyToOneFields, String[] includeFields, String[] excludeFields, String configurationKey, String ceilingEntityFullyQualifiedClassname, String propertyName, Type type, Class<?> returnedClass, List<Class<?>> parentClasses, Boolean isParentExcluded, String prefix ) { String[] componentProperties = ((ComponentType) type).getPropertyNames(); List<String> componentPropertyNames = Arrays.asList(componentProperties); Type[] componentTypes = ((ComponentType) type).getSubtypes(); List<Type> componentPropertyTypes = Arrays.asList(componentTypes); String tempPrefix = ""; int pos = prefix.indexOf("."); if (pos > 0 && pos < prefix.length()-1) { //only use part of the prefix if it's more than one layer deep tempPrefix = prefix.substring(pos + 1, prefix.length()); } Map<String, FieldMetadata> componentPresentationAttributes = metadata.getFieldPresentationAttributes(targetClass, returnedClass, this, tempPrefix + propertyName + "."); if (isParentExcluded) { for (String key : componentPresentationAttributes.keySet()) { LOG.debug("buildComponentProperties:Excluding " + key + " because the parent was excluded"); componentPresentationAttributes.get(key).setExcluded(true); } } PersistentClass persistentClass = getPersistentClass(targetClass.getName()); Property property; try { property = persistentClass.getProperty(propertyName); } catch (MappingException e) { property = persistentClass.getProperty(prefix + propertyName); } Iterator componentPropertyIterator = ((org.hibernate.mapping.Component) property.getValue()).getPropertyIterator(); List<Property> componentPropertyList = new ArrayList<Property>(); while(componentPropertyIterator.hasNext()) { componentPropertyList.add((Property) componentPropertyIterator.next()); } Map<String, FieldMetadata> newFields = new HashMap<String, FieldMetadata>(); buildProperties( targetClass, foreignField, additionalForeignFields, additionalNonPersistentProperties, mergedPropertyType, componentPresentationAttributes, componentPropertyList, newFields, componentPropertyNames, componentPropertyTypes, idProperty, populateManyToOneFields, includeFields, excludeFields, configurationKey, ceilingEntityFullyQualifiedClassname, parentClasses, propertyName + ".", isParentExcluded ); Map<String, FieldMetadata> convertedFields = new HashMap<String, FieldMetadata>(); for (String key : newFields.keySet()) { convertedFields.put(propertyName + "." + key, newFields.get(key)); } fields.putAll(convertedFields); } @Override public EntityManager getStandardEntityManager() { return standardEntityManager; } @Override public void setStandardEntityManager(EntityManager entityManager) { this.standardEntityManager = entityManager; } public EJB3ConfigurationDao getEjb3ConfigurationDao() { return ejb3ConfigurationDao; } public void setEjb3ConfigurationDao(EJB3ConfigurationDao ejb3ConfigurationDao) { this.ejb3ConfigurationDao = ejb3ConfigurationDao; } @Override public FieldManager getFieldManager() { return new FieldManager(entityConfiguration, this); } @Override public EntityConfiguration getEntityConfiguration() { return entityConfiguration; } @Override public void setEntityConfiguration(EntityConfiguration entityConfiguration) { this.entityConfiguration = entityConfiguration; } @Override public Metadata getMetadata() { return metadata; } @Override public void setMetadata(Metadata metadata) { this.metadata = metadata; } public List<FieldMetadataProvider> getFieldMetadataProviders() { return fieldMetadataProviders; } public void setFieldMetadataProviders(List<FieldMetadataProvider> fieldMetadataProviders) { this.fieldMetadataProviders = fieldMetadataProviders; } @Override public FieldMetadataProvider getDefaultFieldMetadataProvider() { return defaultFieldMetadataProvider; } public void setDefaultFieldMetadataProvider(FieldMetadataProvider defaultFieldMetadataProvider) { this.defaultFieldMetadataProvider = defaultFieldMetadataProvider; } protected boolean isExcludeClassFromPolymorphism(Class<?> clazz) { return dynamicDaoHelper.isExcludeClassFromPolymorphism(clazz); } public DynamicDaoHelper getDynamicDaoHelper() { return dynamicDaoHelper; } public void setDynamicDaoHelper(DynamicDaoHelper dynamicDaoHelper) { this.dynamicDaoHelper = dynamicDaoHelper; } }
1no label
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_dao_DynamicEntityDaoImpl.java
427
@Test public class TrackedMapTest { public void testPutOne() { final ODocument doc = new ODocument(); final OTrackedMap<String> map = new OTrackedMap<String>(doc); doc.unsetDirty(); Assert.assertFalse(doc.isDirty()); final ORef<Boolean> changed = new ORef<Boolean>(false); map.addChangeListener(new OMultiValueChangeListener<Object, String>() { public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) { Assert.assertEquals(event.getChangeType(), OMultiValueChangeEvent.OChangeType.ADD); Assert.assertNull(event.getOldValue()); Assert.assertEquals(event.getKey(), "key1"); Assert.assertEquals(event.getValue(), "value1"); changed.value = true; } }); map.put("key1", "value1"); Assert.assertTrue(changed.value); Assert.assertTrue(doc.isDirty()); } public void testPutTwo() { final ODocument doc = new ODocument(); final OTrackedMap<String> map = new OTrackedMap<String>(doc); doc.unsetDirty(); Assert.assertFalse(doc.isDirty()); map.put("key1", "value1"); doc.unsetDirty(); Assert.assertFalse(doc.isDirty()); final ORef<Boolean> changed = new ORef<Boolean>(false); map.addChangeListener(new OMultiValueChangeListener<Object, String>() { public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) { Assert.assertEquals(event.getChangeType(), OMultiValueChangeEvent.OChangeType.UPDATE); Assert.assertEquals(event.getOldValue(), "value1"); Assert.assertEquals(event.getKey(), "key1"); Assert.assertEquals(event.getValue(), "value2"); changed.value = true; } }); map.put("key1", "value2"); Assert.assertTrue(changed.value); Assert.assertTrue(doc.isDirty()); } public void testPutThree() { final ODocument doc = new ODocument(); final OTrackedMap<String> map = new OTrackedMap<String>(doc); doc.unsetDirty(); Assert.assertFalse(doc.isDirty()); map.put("key1", "value1"); doc.unsetDirty(); Assert.assertFalse(doc.isDirty()); final ORef<Boolean> changed = new ORef<Boolean>(false); map.addChangeListener(new OMultiValueChangeListener<Object, String>() { public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) { changed.value = true; } }); map.put("key1", "value1"); Assert.assertFalse(changed.value); Assert.assertFalse(doc.isDirty()); } public void testPutFour() { final ODocument doc = new ODocument(); final OTrackedMap<String> map = new OTrackedMap<String>(doc); doc.unsetDirty(); Assert.assertFalse(doc.isDirty()); map.put("key1", "value1"); doc.unsetDirty(); Assert.assertFalse(doc.isDirty()); final ORef<Boolean> changed = new ORef<Boolean>(false); map.addChangeListener(new OMultiValueChangeListener<Object, String>() { public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) { changed.value = true; } }); map.put("key1", "value1"); Assert.assertFalse(changed.value); Assert.assertFalse(doc.isDirty()); } public void testPutFive() { final ODocument doc = new ODocument(); final OTrackedMap<String> map = new OTrackedMap<String>(doc); doc.unsetDirty(); Assert.assertFalse(doc.isDirty()); map.setInternalStatus(ORecordElement.STATUS.UNMARSHALLING); final ORef<Boolean> changed = new ORef<Boolean>(false); map.addChangeListener(new OMultiValueChangeListener<Object, String>() { public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) { changed.value = true; } }); map.put("key1", "value1"); Assert.assertFalse(changed.value); Assert.assertFalse(doc.isDirty()); } public void testRemoveOne() { final ODocument doc = new ODocument(); final OTrackedMap<String> map = new OTrackedMap<String>(doc); map.put("key1", "value1"); doc.unsetDirty(); Assert.assertFalse(doc.isDirty()); final ORef<Boolean> changed = new ORef<Boolean>(false); map.addChangeListener(new OMultiValueChangeListener<Object, String>() { public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) { Assert.assertEquals(event.getChangeType(), OMultiValueChangeEvent.OChangeType.REMOVE); Assert.assertEquals(event.getOldValue(), "value1"); Assert.assertEquals(event.getKey(), "key1"); Assert.assertEquals(event.getValue(), null); changed.value = true; } }); map.remove("key1"); Assert.assertTrue(changed.value); Assert.assertTrue(doc.isDirty()); } public void testRemoveTwo() { final ODocument doc = new ODocument(); final OTrackedMap<String> map = new OTrackedMap<String>(doc); map.put("key1", "value1"); doc.unsetDirty(); Assert.assertFalse(doc.isDirty()); final ORef<Boolean> changed = new ORef<Boolean>(false); map.addChangeListener(new OMultiValueChangeListener<Object, String>() { public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) { changed.value = true; } }); map.remove("key2"); Assert.assertFalse(changed.value); Assert.assertFalse(doc.isDirty()); } public void testRemoveThree() { final ODocument doc = new ODocument(); final OTrackedMap<String> map = new OTrackedMap<String>(doc); map.put("key1", "value1"); doc.unsetDirty(); Assert.assertFalse(doc.isDirty()); map.setInternalStatus(ORecordElement.STATUS.UNMARSHALLING); final ORef<Boolean> changed = new ORef<Boolean>(false); map.addChangeListener(new OMultiValueChangeListener<Object, String>() { public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) { changed.value = true; } }); map.remove("key1"); Assert.assertFalse(changed.value); Assert.assertFalse(doc.isDirty()); } public void testClearOne() { final ODocument doc = new ODocument(); final OTrackedMap<String> trackedMap = new OTrackedMap<String>(doc); trackedMap.put("key1", "value1"); trackedMap.put("key2", "value2"); trackedMap.put("key3", "value3"); doc.unsetDirty(); Assert.assertFalse(doc.isDirty()); final Set<OMultiValueChangeEvent<Object, String>> firedEvents = new HashSet<OMultiValueChangeEvent<Object, String>>(); firedEvents.add(new OMultiValueChangeEvent<Object, String>(OMultiValueChangeEvent.OChangeType.REMOVE, "key1", null, "value1")); firedEvents.add(new OMultiValueChangeEvent<Object, String>(OMultiValueChangeEvent.OChangeType.REMOVE, "key2", null, "value2")); firedEvents.add(new OMultiValueChangeEvent<Object, String>(OMultiValueChangeEvent.OChangeType.REMOVE, "key3", null, "value3")); final ORef<Boolean> changed = new ORef<Boolean>(false); trackedMap.addChangeListener(new OMultiValueChangeListener<Object, String>() { public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) { if (!firedEvents.remove(event)) Assert.fail(); changed.value = true; } }); trackedMap.clear(); Assert.assertEquals(firedEvents.size(), 0); Assert.assertTrue(changed.value); Assert.assertTrue(doc.isDirty()); } public void testClearTwo() { final ODocument doc = new ODocument(); final OTrackedMap<String> trackedMap = new OTrackedMap<String>(doc); trackedMap.put("key1", "value1"); trackedMap.put("key2", "value2"); trackedMap.put("key3", "value3"); doc.unsetDirty(); Assert.assertFalse(doc.isDirty()); final ORef<Boolean> changed = new ORef<Boolean>(false); trackedMap.setInternalStatus(ORecordElement.STATUS.UNMARSHALLING); trackedMap.addChangeListener(new OMultiValueChangeListener<Object, String>() { public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) { changed.value = true; } }); trackedMap.clear(); Assert.assertFalse(changed.value); Assert.assertFalse(doc.isDirty()); } public void testClearThree() { final ODocument doc = new ODocument(); final OTrackedMap<String> trackedMap = new OTrackedMap<String>(doc); trackedMap.put("key1", "value1"); trackedMap.put("key2", "value2"); trackedMap.put("key3", "value3"); doc.unsetDirty(); Assert.assertFalse(doc.isDirty()); trackedMap.clear(); Assert.assertTrue(doc.isDirty()); } public void testReturnOriginalStateOne() { final ODocument doc = new ODocument(); final OTrackedMap<String> trackedMap = new OTrackedMap<String>(doc); trackedMap.put("key1", "value1"); trackedMap.put("key2", "value2"); trackedMap.put("key3", "value3"); trackedMap.put("key4", "value4"); trackedMap.put("key5", "value5"); trackedMap.put("key6", "value6"); trackedMap.put("key7", "value7"); final Map<Object, String> original = new HashMap<Object, String>(trackedMap); final List<OMultiValueChangeEvent<Object, String>> firedEvents = new ArrayList<OMultiValueChangeEvent<Object, String>>(); trackedMap.addChangeListener(new OMultiValueChangeListener<Object, String>() { public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) { firedEvents.add(event); } }); trackedMap.put("key8", "value8"); trackedMap.put("key9", "value9"); trackedMap.put("key2", "value10"); trackedMap.put("key11", "value11"); trackedMap.remove("key5"); trackedMap.remove("key5"); trackedMap.put("key3", "value12"); trackedMap.remove("key8"); trackedMap.remove("key3"); Assert.assertEquals(trackedMap.returnOriginalState(firedEvents), original); } public void testReturnOriginalStateTwo() { final ODocument doc = new ODocument(); final OTrackedMap<String> trackedMap = new OTrackedMap<String>(doc); trackedMap.put("key1", "value1"); trackedMap.put("key2", "value2"); trackedMap.put("key3", "value3"); trackedMap.put("key4", "value4"); trackedMap.put("key5", "value5"); trackedMap.put("key6", "value6"); trackedMap.put("key7", "value7"); final Map<Object, String> original = new HashMap<Object, String>(trackedMap); final List<OMultiValueChangeEvent<Object, String>> firedEvents = new ArrayList<OMultiValueChangeEvent<Object, String>>(); trackedMap.addChangeListener(new OMultiValueChangeListener<Object, String>() { public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) { firedEvents.add(event); } }); trackedMap.put("key8", "value8"); trackedMap.put("key9", "value9"); trackedMap.put("key2", "value10"); trackedMap.put("key11", "value11"); trackedMap.remove("key5"); trackedMap.remove("key5"); trackedMap.clear(); trackedMap.put("key3", "value12"); trackedMap.remove("key8"); trackedMap.remove("key3"); Assert.assertEquals(trackedMap.returnOriginalState(firedEvents), original); } /** * Test that {@link OTrackedMap} is serialised correctly. */ @Test public void testMapSerialization() throws Exception { class NotSerializableDocument extends ODocument { private static final long serialVersionUID = 1L; private void writeObject(ObjectOutputStream oos) throws IOException { throw new NotSerializableException(); } } final OTrackedMap<String> beforeSerialization = new OTrackedMap<String>(new NotSerializableDocument()); beforeSerialization.put(0, "firstVal"); beforeSerialization.put(1, "secondVal"); final OMemoryStream memoryStream = new OMemoryStream(); final ObjectOutputStream out = new ObjectOutputStream(memoryStream); out.writeObject(beforeSerialization); out.close(); final ObjectInputStream input = new ObjectInputStream(new OMemoryInputStream(memoryStream.copy())); @SuppressWarnings("unchecked") final Map<Object, String> afterSerialization = (Map<Object, String>) input.readObject(); Assert.assertEquals(afterSerialization.size(), beforeSerialization.size(), "Map size"); for (int i = 0; i < afterSerialization.size(); i++) { Assert.assertEquals(afterSerialization.get(i), beforeSerialization.get(i)); } } }
0true
core_src_test_java_com_orientechnologies_orient_core_db_record_TrackedMapTest.java
95
public class NamedOperationManagerImpl implements NamedOperationManager { protected List<NamedOperationComponent> namedOperationComponents = new ArrayList<NamedOperationComponent>(); @Override public Map<String, String> manageNamedParameters(Map<String, String> parameterMap) { List<String> utilizedNames = new ArrayList<String>(); Map<String, String> derivedMap = new LinkedHashMap<String, String>(); for (NamedOperationComponent namedOperationComponent : namedOperationComponents) { utilizedNames.addAll(namedOperationComponent.setOperationValues(parameterMap, derivedMap)); } for (String utilizedName : utilizedNames) { parameterMap.remove(utilizedName); } derivedMap.putAll(parameterMap); return derivedMap; } public List<NamedOperationComponent> getNamedOperationComponents() { return namedOperationComponents; } public void setNamedOperationComponents(List<NamedOperationComponent> namedOperationComponents) { this.namedOperationComponents = namedOperationComponents; } }
0true
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_file_service_operation_NamedOperationManagerImpl.java
62
public class AddSatisfiesProposal extends CorrectionProposal { public static void addSatisfiesProposals(Tree.CompilationUnit cu, Node node, Collection<ICompletionProposal> proposals, IProject project) { node = determineNode(node); if (node == null) { return; } TypeDeclaration typeDec = determineTypeDeclaration(node); if (typeDec == null) { return; } boolean isTypeParam = typeDec instanceof TypeParameter; List<ProducedType> missingSatisfiedTypes = determineMissingSatisfiedTypes(cu, node, typeDec); if (!isTypeParam) { for (Iterator<ProducedType> it = missingSatisfiedTypes.iterator(); it.hasNext();) { ProducedType pt = it.next(); if (!(pt.getDeclaration() instanceof Interface)) { it.remove(); } //TODO: add extends clause for if the type is a Class } } if (missingSatisfiedTypes.isEmpty()) { return; } String changeText = asIntersectionTypeString(missingSatisfiedTypes); for (PhasedUnit unit: getUnits(project)) { if (!isTypeParam || typeDec.getUnit().equals(unit.getUnit())) { Node declaration = determineContainer(unit.getCompilationUnit(), typeDec); if (declaration==null) { continue; } createProposals(proposals, typeDec, isTypeParam, changeText, unit, declaration); break; } } } private static void createProposals(Collection<ICompletionProposal> proposals, TypeDeclaration typeDec, boolean isTypeParam, String changeText, PhasedUnit unit, Node declaration) { if (isTypeParam) { if (declaration instanceof Tree.ClassDefinition) { Tree.ClassDefinition classDefinition = (Tree.ClassDefinition) declaration; addConstraintSatisfiesProposals(typeDec, changeText, unit, proposals, classDefinition.getTypeConstraintList(), classDefinition.getClassBody().getStartIndex()); } else if (declaration instanceof Tree.InterfaceDefinition) { Tree.InterfaceDefinition interfaceDefinition = (Tree.InterfaceDefinition) declaration; addConstraintSatisfiesProposals(typeDec, changeText, unit, proposals, interfaceDefinition.getTypeConstraintList(), interfaceDefinition.getInterfaceBody().getStartIndex()); } else if (declaration instanceof Tree.MethodDefinition) { Tree.MethodDefinition methodDefinition = (Tree.MethodDefinition)declaration; addConstraintSatisfiesProposals(typeDec, changeText, unit, proposals, methodDefinition.getTypeConstraintList(), methodDefinition.getBlock().getStartIndex()); } else if (declaration instanceof Tree.ClassDeclaration) { Tree.ClassDeclaration classDefinition = (Tree.ClassDeclaration) declaration; addConstraintSatisfiesProposals(typeDec, changeText, unit, proposals, classDefinition.getTypeConstraintList(), classDefinition.getClassSpecifier().getStartIndex()); } else if (declaration instanceof Tree.InterfaceDefinition) { Tree.InterfaceDeclaration interfaceDefinition = (Tree.InterfaceDeclaration) declaration; addConstraintSatisfiesProposals(typeDec, changeText, unit, proposals, interfaceDefinition.getTypeConstraintList(), interfaceDefinition.getTypeSpecifier().getStartIndex()); } else if (declaration instanceof Tree.MethodDeclaration) { Tree.MethodDeclaration methodDefinition = (Tree.MethodDeclaration)declaration; addConstraintSatisfiesProposals(typeDec, changeText, unit, proposals, methodDefinition.getTypeConstraintList(), methodDefinition.getSpecifierExpression().getStartIndex()); } } else { if (declaration instanceof Tree.ClassDefinition) { Tree.ClassDefinition classDefinition = (Tree.ClassDefinition) declaration; addSatisfiesProposals(typeDec, changeText, unit, proposals, classDefinition.getSatisfiedTypes(), classDefinition.getTypeConstraintList()==null ? classDefinition.getClassBody().getStartIndex() : classDefinition.getTypeConstraintList().getStartIndex()); } else if (declaration instanceof Tree.ObjectDefinition) { Tree.ObjectDefinition objectDefinition = (Tree.ObjectDefinition) declaration; addSatisfiesProposals(typeDec, changeText, unit, proposals, objectDefinition.getSatisfiedTypes(), objectDefinition.getClassBody().getStartIndex()); } else if (declaration instanceof Tree.InterfaceDefinition) { Tree.InterfaceDefinition interfaceDefinition = (Tree.InterfaceDefinition) declaration; addSatisfiesProposals(typeDec, changeText, unit, proposals, interfaceDefinition.getSatisfiedTypes(), interfaceDefinition.getTypeConstraintList()==null ? interfaceDefinition.getInterfaceBody().getStartIndex() : interfaceDefinition.getTypeConstraintList().getStartIndex()); } } } private static void addConstraintSatisfiesProposals(TypeDeclaration typeParam, String missingSatisfiedType, PhasedUnit unit, Collection<ICompletionProposal> proposals, TypeConstraintList typeConstraints, Integer typeContainerBodyStartIndex) { String changeText = null; Integer changeIndex = null; if (typeConstraints != null) { for (TypeConstraint typeConstraint: typeConstraints.getTypeConstraints()) { if (typeConstraint.getDeclarationModel().equals(typeParam)) { changeText = " & " + missingSatisfiedType; changeIndex = typeConstraint.getStopIndex() + 1; break; } } } if (changeText == null) { changeText = "given "+ typeParam.getName() + " satisfies " + missingSatisfiedType + " "; changeIndex = typeContainerBodyStartIndex; } if (changeText != null) { IFile file = CeylonBuilder.getFile(unit); TextFileChange change = new TextFileChange("Add generic type constraint", file); change.setEdit(new InsertEdit(changeIndex, changeText)); String desc = "Add generic type constraint '" + typeParam.getName() + " satisfies " + missingSatisfiedType + "'"; AddSatisfiesProposal p = new AddSatisfiesProposal(typeParam, desc, missingSatisfiedType, change); if ( !proposals.contains(p)) { proposals.add(p); } } } private static void addSatisfiesProposals(TypeDeclaration typeParam, String missingSatisfiedType, PhasedUnit unit, Collection<ICompletionProposal> proposals, Tree.SatisfiedTypes typeConstraints, Integer typeContainerBodyStartIndex) { String changeText = null; Integer changeIndex = null; if (typeConstraints != null) { changeText = " & " + missingSatisfiedType; changeIndex = typeConstraints.getStopIndex() + 1; } else if (changeText == null) { changeText = "satisfies " + missingSatisfiedType + " "; changeIndex = typeContainerBodyStartIndex; } if (changeText != null) { IFile file = CeylonBuilder.getFile(unit); TextFileChange change = new TextFileChange("Add satisfies type", file); change.setEdit(new InsertEdit(changeIndex, changeText)); String desc = "Add inherited interface '" + typeParam.getName() + " satisfies " + missingSatisfiedType + "'"; AddSatisfiesProposal p = new AddSatisfiesProposal(typeParam, desc, missingSatisfiedType, change); if (!proposals.contains(p)) { proposals.add(p); } } } private static Node determineNode(Node node) { if (node instanceof Tree.SpecifierExpression) { node = ((Tree.SpecifierExpression) node).getExpression(); } if (node instanceof Tree.Expression) { node = ((Tree.Expression) node).getTerm(); } return node; } private static TypeDeclaration determineTypeDeclaration(Node node) { TypeDeclaration typeDec = null; if (node instanceof Tree.ClassOrInterface || node instanceof Tree.TypeParameterDeclaration) { Declaration declaration = ((Tree.Declaration) node).getDeclarationModel(); if (declaration instanceof ClassOrInterface) { typeDec = (TypeDeclaration) declaration; } } else if (node instanceof Tree.ObjectDefinition) { Value val = ((Tree.ObjectDefinition) node).getDeclarationModel(); return val.getType().getDeclaration(); } else if (node instanceof Tree.BaseType) { TypeDeclaration baseTypeDecl = ((Tree.BaseType) node).getDeclarationModel(); if (baseTypeDecl instanceof TypeDeclaration) { typeDec = baseTypeDecl; } } else if (node instanceof Tree.Term) { // ProducedType type = node.getUnit() // .denotableType(((Tree.Term)node).getTypeModel()); ProducedType type = ((Tree.Term) node).getTypeModel(); if (type != null) { typeDec = type.getDeclaration(); } } return typeDec; } private static Node determineContainer(CompilationUnit cu, final TypeDeclaration typeDec) { FindDeclarationNodeVisitor fdv = new FindDeclarationNodeVisitor(typeDec) { @Override public void visit(Tree.ObjectDefinition that) { if (that.getDeclarationModel().getType().getDeclaration().equals(typeDec)) { declarationNode = that; } super.visit(that); } }; fdv.visit(cu); Tree.Declaration dec = (Tree.Declaration) fdv.getDeclarationNode(); if (dec != null) { FindContainerVisitor fcv = new FindContainerVisitor(dec); fcv.visit(cu); return fcv.getStatementOrArgument(); } return null; } private static List<ProducedType> determineMissingSatisfiedTypes(CompilationUnit cu, Node node, TypeDeclaration typeDec) { List<ProducedType> missingSatisfiedTypes = new ArrayList<ProducedType>(); if (node instanceof Tree.Term) { FindInvocationVisitor fav = new FindInvocationVisitor(node); fav.visit(cu); if (fav.parameter != null) { ProducedType type = fav.parameter.getType(); if (type!=null && type.getDeclaration()!=null) { if (type.getDeclaration() instanceof ClassOrInterface) { missingSatisfiedTypes.add(type); } else if (type.getDeclaration() instanceof IntersectionType) { for (ProducedType it: type.getDeclaration().getSatisfiedTypes()) { if (!typeDec.inherits(it.getDeclaration())) { missingSatisfiedTypes.add(it); } } } } } } else { List<TypeParameter> stTypeParams = determineSatisfiedTypesTypeParams(cu, node, typeDec); if (!stTypeParams.isEmpty()) { ProducedType typeParamType = typeDec.getType(); Map<TypeParameter, ProducedType> substitutions = new HashMap<TypeParameter, ProducedType>(); for (TypeParameter stTypeParam : stTypeParams) { substitutions.put(stTypeParam, typeParamType); } for (TypeParameter stTypeParam : stTypeParams) { for (ProducedType stTypeParamSatisfiedType: stTypeParam.getSatisfiedTypes()) { stTypeParamSatisfiedType = stTypeParamSatisfiedType.substitute(substitutions); boolean isMissing = true; for (ProducedType typeParamSatisfiedType: typeDec.getSatisfiedTypes()) { if (stTypeParamSatisfiedType.isSupertypeOf(typeParamSatisfiedType)) { isMissing = false; break; } } if (isMissing) { for(ProducedType missingSatisfiedType: missingSatisfiedTypes) { if( missingSatisfiedType.isExactly(stTypeParamSatisfiedType) ) { isMissing = false; break; } } } if (isMissing) { missingSatisfiedTypes.add(stTypeParamSatisfiedType); } } } } } return missingSatisfiedTypes; } private static List<TypeParameter> determineSatisfiedTypesTypeParams( Tree.CompilationUnit cu, Node typeParamNode, final TypeDeclaration typeDec) { final List<TypeParameter> stTypeParams = new ArrayList<TypeParameter>(); FindContainerVisitor fcv = new FindContainerVisitor(typeParamNode); fcv.visit(cu); Tree.StatementOrArgument soa = fcv.getStatementOrArgument(); soa.visit(new Visitor() { @Override public void visit(Tree.SimpleType that) { super.visit(that); determineSatisfiedTypesTypeParams(typeDec, that, stTypeParams); } @Override public void visit(Tree.StaticMemberOrTypeExpression that) { super.visit(that); determineSatisfiedTypesTypeParams(typeDec, that, stTypeParams); } }); // if (soa instanceof Tree.ClassOrInterface) { // Tree.ClassOrInterface coi = (Tree.ClassOrInterface) soa; // if (coi.getSatisfiedTypes() != null) { // for (Tree.StaticType st: coi.getSatisfiedTypes().getTypes()) { // // FIXME: gavin this needs checking // if (st instanceof Tree.SimpleType) { // } // } // } // } // else if (soa instanceof Tree.AttributeDeclaration) { // Tree.AttributeDeclaration ad = (Tree.AttributeDeclaration) soa; // Tree.Type at = ad.getType(); // if (at instanceof Tree.SimpleType) { // determineSatisfiedTypesTypeParams(typeDec, // (Tree.SimpleType) at, stTypeParams); // } // } return stTypeParams; } private static void determineSatisfiedTypesTypeParams(TypeDeclaration typeParam, Tree.SimpleType st, List<TypeParameter> stTypeParams) { Tree.TypeArgumentList args = st.getTypeArgumentList(); if (args != null) { List<Tree.Type> stTypeArguments = args.getTypes(); for (int i=0; i<stTypeArguments.size(); i++) { ProducedType stTypeArgument = stTypeArguments.get(i).getTypeModel(); if (stTypeArgument!=null && typeParam.equals(stTypeArgument.getDeclaration())) { TypeDeclaration stDecl = st.getDeclarationModel(); if (stDecl!=null) { if (stDecl.getTypeParameters()!=null && stDecl.getTypeParameters().size()>i) { stTypeParams.add(stDecl.getTypeParameters().get(i)); } } } } } } private static void determineSatisfiedTypesTypeParams(TypeDeclaration typeParam, Tree.StaticMemberOrTypeExpression st, List<TypeParameter> stTypeParams) { Tree.TypeArguments args = st.getTypeArguments(); if (args instanceof Tree.TypeArgumentList) { List<Tree.Type> stTypeArguments = ((Tree.TypeArgumentList) args).getTypes(); for (int i=0; i<stTypeArguments.size(); i++) { ProducedType stTypeArgument = stTypeArguments.get(i).getTypeModel(); if (stTypeArgument!=null && typeParam.equals(stTypeArgument.getDeclaration())) { Declaration stDecl = st.getDeclaration(); if (stDecl instanceof TypeDeclaration) { TypeDeclaration td = (TypeDeclaration)stDecl; if (td.getTypeParameters()!=null && td.getTypeParameters().size()>i) { stTypeParams.add(td.getTypeParameters().get(i)); } } } } } } private final TypeDeclaration typeParam; private final String missingSatisfiedTypeText; private AddSatisfiesProposal(TypeDeclaration typeParam, String description, String missingSatisfiedTypeText, TextFileChange change) { super(description, change, new Region(change.getEdit().getOffset(), 0)); this.typeParam = typeParam; this.missingSatisfiedTypeText = missingSatisfiedTypeText; } @Override public boolean equals(Object obj) { if (obj instanceof AddSatisfiesProposal) { AddSatisfiesProposal that = (AddSatisfiesProposal) obj; return that.typeParam.equals(typeParam) && that.missingSatisfiedTypeText .equals(missingSatisfiedTypeText); } return false; } @Override public int hashCode() { return typeParam.hashCode() + missingSatisfiedTypeText.hashCode(); } }
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_AddSatisfiesProposal.java
264
public class EmailTest extends BaseTest { @Resource EmailService emailService; private GreenMail greenMail; @BeforeClass protected void setupEmailTest() { greenMail = new GreenMail( new ServerSetup[] { new ServerSetup(30000, "127.0.0.1", ServerSetup.PROTOCOL_SMTP) } ); greenMail.start(); } @AfterClass protected void tearDownEmailTest() { greenMail.stop(); } @Test public void testSynchronousEmail() throws Exception { EmailInfo info = new EmailInfo(); info.setFromAddress("[email protected]"); info.setSubject("test"); info.setEmailTemplate("org/broadleafcommerce/common/email/service/template/default.vm"); info.setSendEmailReliableAsync("false"); emailService.sendTemplateEmail("to@localhost", info, null); } }
0true
integration_src_test_java_org_broadleafcommerce_common_email_service_EmailTest.java
1,195
public class Message<E> extends EventObject { private final E messageObject; private final long publishTime; private final Member publishingMember; public Message(String topicName, E messageObject, long publishTime, Member publishingMember) { super(topicName); this.messageObject = messageObject; this.publishTime = publishTime; this.publishingMember = publishingMember; } /** * Returns published message * * @return message object */ public E getMessageObject() { return messageObject; } /** * Return the time when the message is published * * @return publish time */ public long getPublishTime() { return publishTime; } /** * Returns the member that published the message * * @return publishing member */ public Member getPublishingMember() { return publishingMember; } }
1no label
hazelcast_src_main_java_com_hazelcast_core_Message.java
217
public class ClientReadHandler extends ClientAbstractSelectionHandler { private final ByteBuffer buffer; private volatile long lastHandle; private ClientPacket packet; public ClientReadHandler(ClientConnection connection, IOSelector ioSelector, int bufferSize) { super(connection, ioSelector); buffer = ByteBuffer.allocate(bufferSize); } @Override public void run() { registerOp(SelectionKey.OP_READ); } @Override public void handle() { lastHandle = Clock.currentTimeMillis(); if (!connection.live()) { if (logger.isFinestEnabled()) { String message = "We are being asked to read, but connection is not live so we won't"; logger.finest(message); } return; } try { int readBytes = socketChannel.read(buffer); if (readBytes == -1) { throw new EOFException("Remote socket closed!"); } } catch (IOException e) { handleSocketException(e); return; } try { if (buffer.position() == 0) { return; } buffer.flip(); while (buffer.hasRemaining()) { if (packet == null) { packet = new ClientPacket(connection.getConnectionManager().getSerializationContext()); } boolean complete = packet.readFrom(buffer); if (complete) { packet.setConn(connection); connectionManager.handlePacket(packet); packet = null; } else { break; } } if (buffer.hasRemaining()) { buffer.compact(); } else { buffer.clear(); } } catch (Throwable t) { handleSocketException(t); } } long getLastHandle() { return lastHandle; } }
1no label
hazelcast-client_src_main_java_com_hazelcast_client_connection_nio_ClientReadHandler.java
741
public class ListAddRequest extends CollectionAddRequest { private int index; public ListAddRequest() { } public ListAddRequest(String name, Data value, int index) { super(name, value); this.index = index; } @Override protected Operation prepareOperation() { return new ListAddOperation(name, index, value); } @Override public int getClassId() { return CollectionPortableHook.LIST_ADD; } public void write(PortableWriter writer) throws IOException { writer.writeInt("i", index); super.write(writer); } public void read(PortableReader reader) throws IOException { index = reader.readInt("i"); super.read(reader); } }
0true
hazelcast_src_main_java_com_hazelcast_collection_client_ListAddRequest.java
604
public class TransportGetSettingsAction extends TransportMasterNodeReadOperationAction<GetSettingsRequest, GetSettingsResponse> { private final SettingsFilter settingsFilter; @Inject public TransportGetSettingsAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, SettingsFilter settingsFilter) { super(settings, transportService, clusterService, threadPool); this.settingsFilter = settingsFilter; } @Override protected String transportAction() { return GetSettingsAction.NAME; } @Override protected String executor() { // Very lightweight operation return ThreadPool.Names.SAME; } @Override protected GetSettingsRequest newRequest() { return new GetSettingsRequest(); } @Override protected GetSettingsResponse newResponse() { return new GetSettingsResponse(); } @Override protected void masterOperation(GetSettingsRequest request, ClusterState state, ActionListener<GetSettingsResponse> listener) throws ElasticsearchException { request.indices(state.metaData().concreteIndices(request.indices(), request.indicesOptions())); ImmutableOpenMap.Builder<String, Settings> indexToSettingsBuilder = ImmutableOpenMap.builder(); for (String concreteIndex : request.indices()) { IndexMetaData indexMetaData = state.getMetaData().index(concreteIndex); if (indexMetaData == null) { continue; } Settings settings = settingsFilter.filterSettings(indexMetaData.settings()); if (!CollectionUtils.isEmpty(request.names())) { ImmutableSettings.Builder settingsBuilder = ImmutableSettings.builder(); for (Map.Entry<String, String> entry : settings.getAsMap().entrySet()) { if (Regex.simpleMatch(request.names(), entry.getKey())) { settingsBuilder.put(entry.getKey(), entry.getValue()); } } settings = settingsBuilder.build(); } indexToSettingsBuilder.put(concreteIndex, settings); } listener.onResponse(new GetSettingsResponse(indexToSettingsBuilder.build())); } }
1no label
src_main_java_org_elasticsearch_action_admin_indices_settings_get_TransportGetSettingsAction.java
744
public class ExplainRequestBuilder extends SingleShardOperationRequestBuilder<ExplainRequest, ExplainResponse, ExplainRequestBuilder> { private QuerySourceBuilder sourceBuilder; ExplainRequestBuilder(Client client) { super((InternalClient) client, new ExplainRequest()); } public ExplainRequestBuilder(Client client, String index, String type, String id) { super((InternalClient) client, new ExplainRequest().index(index).type(type).id(id)); } /** * Sets the type to get a score explanation for. */ public ExplainRequestBuilder setType(String type) { request().type(type); return this; } /** * Sets the id to get a score explanation for. */ public ExplainRequestBuilder setId(String id) { request().id(id); return this; } /** * Sets the routing for sharding. */ public ExplainRequestBuilder setRouting(String routing) { request().routing(routing); return this; } /** * Simple sets the routing. Since the parent is only used to get to the right shard. */ public ExplainRequestBuilder setParent(String parent) { request().parent(parent); return this; } /** * Sets the shard preference. */ public ExplainRequestBuilder setPreference(String preference) { request().preference(preference); return this; } /** * Sets the query to get a score explanation for. */ public ExplainRequestBuilder setQuery(QueryBuilder query) { sourceBuilder().setQuery(query); return this; } /** * Sets the query to get a score explanation for. */ public ExplainRequestBuilder setQuery(BytesReference query) { sourceBuilder().setQuery(query); return this; } /** * Explicitly specify the fields that will be returned for the explained document. By default, nothing is returned. */ public ExplainRequestBuilder setFields(String... fields) { request.fields(fields); return this; } /** * Indicates whether the response should contain the stored _source * * * @param fetch * @return */ public ExplainRequestBuilder setFetchSource(boolean fetch) { FetchSourceContext context = request.fetchSourceContext(); if (context == null) { request.fetchSourceContext(new FetchSourceContext(fetch)); } else { context.fetchSource(fetch); } return this; } /** * Indicate that _source should be returned, with an "include" and/or "exclude" set which can include simple wildcard * elements. * * @param include An optional include (optionally wildcarded) pattern to filter the returned _source * @param exclude An optional exclude (optionally wildcarded) pattern to filter the returned _source */ public ExplainRequestBuilder setFetchSource(@Nullable String include, @Nullable String exclude) { return setFetchSource( include == null? Strings.EMPTY_ARRAY : new String[] {include}, exclude == null? Strings.EMPTY_ARRAY : new String[] {exclude}); } /** * Indicate that _source should be returned, with an "include" and/or "exclude" set which can include simple wildcard * elements. * * @param includes An optional list of include (optionally wildcarded) pattern to filter the returned _source * @param excludes An optional list of exclude (optionally wildcarded) pattern to filter the returned _source */ public ExplainRequestBuilder setFetchSource(@Nullable String[] includes, @Nullable String[] excludes) { FetchSourceContext context = request.fetchSourceContext(); if (context == null) { request.fetchSourceContext(new FetchSourceContext(includes, excludes)); } else { context.fetchSource(true); context.includes(includes); context.excludes(excludes); } return this; } /** * Sets the full source of the explain request (for example, wrapping an actual query). */ public ExplainRequestBuilder setSource(BytesReference source, boolean unsafe) { request().source(source, unsafe); return this; } /** * Sets whether the actual explain action should occur in a different thread if executed locally. */ public ExplainRequestBuilder operationThreaded(boolean threadedOperation) { request().operationThreaded(threadedOperation); return this; } protected void doExecute(ActionListener<ExplainResponse> listener) { if (sourceBuilder != null) { request.source(sourceBuilder); } ((Client) client).explain(request, listener); } private QuerySourceBuilder sourceBuilder() { if (sourceBuilder == null) { sourceBuilder = new QuerySourceBuilder(); } return sourceBuilder; } }
0true
src_main_java_org_elasticsearch_action_explain_ExplainRequestBuilder.java
285
public class ActionRequestValidationException extends ElasticsearchException { private final List<String> validationErrors = new ArrayList<String>(); public ActionRequestValidationException() { super(null); } public void addValidationError(String error) { validationErrors.add(error); } public void addValidationErrors(Iterable<String> errors) { for (String error : errors) { validationErrors.add(error); } } public List<String> validationErrors() { return validationErrors; } @Override public String getMessage() { StringBuilder sb = new StringBuilder(); sb.append("Validation Failed: "); int index = 0; for (String error : validationErrors) { sb.append(++index).append(": ").append(error).append(";"); } return sb.toString(); } }
0true
src_main_java_org_elasticsearch_action_ActionRequestValidationException.java
743
public class ProductType implements Serializable, BroadleafEnumerationType { private static final long serialVersionUID = 1L; private static final Map<String, ProductType> TYPES = new LinkedHashMap<String, ProductType>(); public static final ProductType PRODUCT = new ProductType("org.broadleafcommerce.core.catalog.domain.Product", "Normal Product"); public static final ProductType BUNDLE = new ProductType("org.broadleafcommerce.core.catalog.domain.ProductBundle", "Product Bundle"); public static ProductType getInstance(final String type) { return TYPES.get(type); } private String type; private String friendlyType; public ProductType() { //do nothing } public ProductType(final String type, final String friendlyType) { this.friendlyType = friendlyType; setType(type); } public String getType() { return type; } public String getFriendlyType() { return friendlyType; } private void setType(final String type) { this.type = type; if (!TYPES.containsKey(type)) { TYPES.put(type, this); } } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((type == null) ? 0 : type.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; ProductType other = (ProductType) obj; if (type == null) { if (other.type != null) return false; } else if (!type.equals(other.type)) return false; return true; } }
1no label
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_service_type_ProductType.java
2,859
public class ReplicaSyncRetryResponse extends Operation implements PartitionAwareOperation, BackupOperation, UrgentSystemOperation { public ReplicaSyncRetryResponse() { } public void beforeRun() throws Exception { } public void run() throws Exception { final NodeEngineImpl nodeEngine = (NodeEngineImpl) getNodeEngine(); final InternalPartitionServiceImpl partitionService = (InternalPartitionServiceImpl) nodeEngine.getPartitionService(); final int partitionId = getPartitionId(); final int replicaIndex = getReplicaIndex(); partitionService.schedulePartitionReplicaSync(partitionId, replicaIndex, InternalPartitionService.REPLICA_SYNC_RETRY_DELAY); } public void afterRun() throws Exception { } public boolean returnsResponse() { return false; } public Object getResponse() { return null; } public boolean validatesTarget() { return true; } public void logError(Throwable e) { ReplicaErrorLogger.log(e, getLogger()); } protected void writeInternal(ObjectDataOutput out) throws IOException { } protected void readInternal(ObjectDataInput in) throws IOException { } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("ReplicaSyncRetryResponse"); sb.append("{partition=").append(getPartitionId()); sb.append(", replica=").append(getReplicaIndex()); sb.append('}'); return sb.toString(); } }
1no label
hazelcast_src_main_java_com_hazelcast_partition_impl_ReplicaSyncRetryResponse.java
3,711
private class ManagedThread extends Thread { public ManagedThread(Runnable target) { super(threadGroup, target, threadName); } @Override public void run() { try { super.run(); } catch (OutOfMemoryError e) { OutOfMemoryErrorDispatcher.onOutOfMemory(e); } } }
1no label
hazelcast_src_main_java_com_hazelcast_util_executor_SingleExecutorThreadFactory.java
480
public class AnalyzeAction extends IndicesAction<AnalyzeRequest, AnalyzeResponse, AnalyzeRequestBuilder> { public static final AnalyzeAction INSTANCE = new AnalyzeAction(); public static final String NAME = "indices/analyze"; private AnalyzeAction() { super(NAME); } @Override public AnalyzeResponse newResponse() { return new AnalyzeResponse(); } @Override public AnalyzeRequestBuilder newRequestBuilder(IndicesAdminClient client) { return new AnalyzeRequestBuilder(client); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_analyze_AnalyzeAction.java
1,289
public class LaunchHelper { static void addFiles(List<IFile> files, IResource resource) { switch (resource.getType()) { case IResource.FILE: IFile file = (IFile) resource; IPath path = file.getFullPath(); //getProjectRelativePath(); if (path!=null && "ceylon".equals(path.getFileExtension()) ) { files.add(file); } break; case IResource.FOLDER: case IResource.PROJECT: IContainer folder = (IContainer) resource; try { for (IResource child: folder.members()) { addFiles(files, child); } } catch (CoreException e) { e.printStackTrace(); } break; } } static Object[] findDeclarationFromFiles(List<IFile> files) { List<Declaration> topLevelDeclarations = new LinkedList<Declaration>(); List<IFile> correspondingfiles = new LinkedList<IFile>(); for (IFile file : files) { IProject project = file.getProject(); TypeChecker typeChecker = getProjectTypeChecker(project); if (typeChecker != null) { PhasedUnit phasedUnit = typeChecker.getPhasedUnits() .getPhasedUnit(createResourceVirtualFile(file)); if (phasedUnit!=null) { List<Declaration> declarations = phasedUnit.getDeclarations(); for (Declaration d : declarations) { if (isRunnable(d)) { topLevelDeclarations.add(d); correspondingfiles.add(file); } } } } } Declaration declarationToRun = null; IFile fileToRun = null; if (topLevelDeclarations.size() == 0) { MessageDialog.openError(EditorUtil.getShell(), "Ceylon Launcher", "No ceylon runnable element"); } else if (topLevelDeclarations.size() > 1) { declarationToRun = chooseDeclaration(topLevelDeclarations); if (declarationToRun!=null) { fileToRun = correspondingfiles.get(topLevelDeclarations.indexOf(declarationToRun)); } } else { declarationToRun = topLevelDeclarations.get(0); fileToRun = correspondingfiles.get(0); } return new Object[] {declarationToRun, fileToRun}; } private static boolean isRunnable(Declaration d) { boolean candidateDeclaration = true; if (!d.isToplevel() || !d.isShared()) { candidateDeclaration = false; } if (d instanceof Method) { Method methodDecl = (Method) d; if (!methodDecl.getParameterLists().isEmpty() && !methodDecl.getParameterLists().get(0).getParameters().isEmpty()) { candidateDeclaration = false; } } else if (d instanceof Class) { Class classDecl = (Class) d; if (classDecl.isAbstract() || classDecl.getParameterList()==null || !classDecl.getParameterList().getParameters().isEmpty()) { candidateDeclaration = false; } } else { candidateDeclaration = false; } return candidateDeclaration; } static Module getModule(IProject project, String fullModuleName) { fullModuleName = normalizeFullModuleName(fullModuleName); if (fullModuleName != null) { String[] parts = fullModuleName.split("/"); if (parts != null && parts.length != 2) { return null; } for (Module module: getProjectDeclaredSourceModules(project)) { if (module.getNameAsString().equals(parts[0]) && module.getVersion().equals(parts[1])) { return module; } } if (isDefaultModulePresent(project)) { return getDefaultModule(project); } } return null; } private static String normalizeFullModuleName(String fullModuleName) { if (Module.DEFAULT_MODULE_NAME.equals(fullModuleName)) { return getFullModuleName(getEmptyDefaultModule()); } else { return fullModuleName; } } private static Module getDefaultModule(IProject project) { Module defaultModule = getProjectModules(project).getDefaultModule(); if (defaultModule == null) { defaultModule = getEmptyDefaultModule(); } return defaultModule; } private static Module getEmptyDefaultModule() { Module defaultModule = new Module(); defaultModule.setName(Arrays.asList(new String[]{Module.DEFAULT_MODULE_NAME})); defaultModule.setVersion("unversioned"); defaultModule.setDefault(true); return defaultModule; } static Module getModule(Declaration decl) { if (decl.getUnit().getPackage() != null) { if (decl.getUnit().getPackage().getModule() != null) { return decl.getUnit().getPackage().getModule(); } } return getEmptyDefaultModule(); } static String getModuleFullName(Declaration decl) { Module module = getModule(decl); if (module.isDefault()) { return Module.DEFAULT_MODULE_NAME; } else { return getFullModuleName(module); } } static Set<Module> getModules(IProject project, boolean includeDefault) { Set<Module> modules = new HashSet<Module>(); for(Module module: getProjectDeclaredSourceModules(project)) { if (module.isAvailable() && !module.getNameAsString().startsWith(Module.LANGUAGE_MODULE_NAME) && !module.isJava() ) { if ((module.isDefault() && includeDefault) // TODO : this is *never* true : the default module is not in the requested list || (!module.isDefault() && module.getPackage(module.getNameAsString()) != null)){ modules.add(module); } } } if (modules.isEmpty() || isDefaultModulePresent(project)) { modules.add(getDefaultModule(project)); } return modules; } private static boolean isDefaultModulePresent(IProject project) { Module defaultModule = getProjectModules(project).getDefaultModule(); if (defaultModule != null) { List<Declaration> decls = getDeclarationsForModule(project, defaultModule); if (!decls.isEmpty()) { return true; } } return false; } static boolean isModuleInProject(IProject project, String fullModuleName) { if (fullModuleName.equals(Module.DEFAULT_MODULE_NAME) && isDefaultModulePresent(project)) { return true; } for (Module module : getModules(project, false)) { if (fullModuleName != null && fullModuleName.equals(getFullModuleName(module))) { return true; } } return false; } static String getFullModuleName(Module module) { return module.getNameAsString()+"/"+module.getVersion(); } static List<Declaration> getDeclarationsForModule(IProject project, Module module) { List<Declaration> modDecls = new LinkedList<Declaration>(); if (module != null) { List<Package> pkgs = module.getPackages(); // avoid concurrent exception for (Package pkg : pkgs) { if (pkg.getModule() != null && isPackageInProject(project, pkg)) for (Declaration decl : pkg.getMembers()) { if (isRunnable(decl)) { modDecls.add(decl); } } } } return modDecls; } private static boolean isPackageInProject(IProject project, Package pkg) { TypeChecker typeChecker = getProjectTypeChecker(project); List<PhasedUnit> pus = typeChecker.getPhasedUnits().getPhasedUnits(); for (PhasedUnit phasedUnit : pus) { if (pkg.equals(phasedUnit.getPackage())) { return true; } } return false; } static List<Declaration> getDeclarationsForModule(String projectName, String fullModuleName) { IProject project = getProjectFromName(projectName); Module module = getModule(project, fullModuleName); return getDeclarationsForModule(project, module); } /** * Does not attempt to get all declarations before it returns true * @param project * @param fullModuleName * @param topLevelName * @return boolean if a top-level is contained in a module */ static boolean isModuleContainsTopLevel(IProject project, String fullModuleName, String topLevelName) { if (!isModuleInProject(project, fullModuleName)) { return false; } if (Module.DEFAULT_MODULE_NAME.equals(fullModuleName)) { fullModuleName = getFullModuleName(getDefaultModule(project)); } Module mod = getModule(project, fullModuleName); if (mod == null) { return false; } for (Package pkg : mod.getPackages()) { for (Declaration decl : pkg.getMembers()) { if (getRunnableName(decl).equals(topLevelName)) { return true; } } } return false; } static String getRunnableName(Declaration d) { return d.getQualifiedNameString().replace("::", "."); } static Declaration chooseDeclaration(final List<Declaration> decls) { FilteredItemsSelectionDialog sd = new CeylonTopLevelSelectionDialog(EditorUtil.getShell(), false, decls); if (sd.open() == Window.OK) { return (Declaration)sd.getFirstResult(); } return null; } static Module chooseModule(String projectName, boolean includeDefault) { return chooseModule(getProjectFromName(projectName), includeDefault); } static Module chooseModule(IProject project, boolean includeDefault) { if (getDefaultOrOnlyModule(project, includeDefault) != null) { return getDefaultOrOnlyModule(project, includeDefault); } Set<Module> modules = getModules(project, true); FilteredItemsSelectionDialog cmsd = new CeylonModuleSelectionDialog(EditorUtil.getShell(), modules, "Choose Ceylon Module"); if (cmsd.open() == Window.OK) { return (Module)cmsd.getFirstResult(); } return null; } static IProject getProjectFromName(String projectName) { if (projectName != null && projectName.length() > 0) { IWorkspace workspace = getWorkspace(); IStatus status = workspace.validateName(projectName, IResource.PROJECT); if (status.isOK()) { return workspace.getRoot().getProject(projectName); } } return null; } static String getTopLevelNormalName(String moduleFullName, String displayName) { if (displayName.contains(DEFAULT_RUN_MARKER) && moduleFullName.indexOf('/') != -1) { return moduleFullName.substring(0, moduleFullName.indexOf('/')) + ".run"; } return displayName; } static String getTopLevelDisplayName(Declaration decl) { String topLevelName = getRunnableName(decl); if (getModule(decl) != null && decl.equals(getDefaultRunnableForModule(getModule(decl)))) { topLevelName = "run" + DEFAULT_RUN_MARKER; } return topLevelName; } static Module getDefaultOrOnlyModule(IProject project, boolean includeDefault) { Set<Module> modules = getModules(project, true); //if only one real module or just one default module, just send it back if (modules.size() == 1) { return modules.iterator().next(); } if (modules.size() ==2 && !includeDefault) { Iterator<Module> modIterator = modules.iterator(); while (modIterator.hasNext()) { Module realMod = modIterator.next(); if (!realMod.isDefault()) { return realMod; } } } return null; } static Declaration getDefaultRunnableForModule(Module mod) { Declaration decl = null; if (mod.getRootPackage() != null) { decl = mod.getRootPackage() .getDirectMember("run", null, false); } return decl; } static Module getModule(IFolder folder) { Package pkg = getPackage(folder); if (pkg != null) { return pkg.getModule(); } return null; } static boolean isBuilderEnabled(IProject project, String property) { if (CAN_LAUNCH_AS_CEYLON_JAVA_MODULE.equals(property)) { return CeylonBuilder.compileToJava(project); } else if (CAN_LAUNCH_AS_CEYLON_JAVASCIPT_MODULE.equals(property)) { return CeylonBuilder.compileToJs(project); } return false; } }
1no label
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_core_launch_LaunchHelper.java
535
class ShardGatewaySnapshotRequest extends BroadcastShardOperationRequest { ShardGatewaySnapshotRequest() { } public ShardGatewaySnapshotRequest(String index, int shardId, GatewaySnapshotRequest request) { super(index, shardId, request); } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_gateway_snapshot_ShardGatewaySnapshotRequest.java
254
public class OCaseInsensitiveCollate extends ODefaultComparator implements OCollate { public String getName() { return "ci"; } public Object transform(final Object obj) { if (obj instanceof String) return ((String) obj).toLowerCase(); return obj; } }
1no label
core_src_main_java_com_orientechnologies_orient_core_collate_OCaseInsensitiveCollate.java
47
static final class PackageProposal extends CompletionProposal { private final boolean withBody; private final int len; private final Package p; private final String completed; private final CeylonParseController cpc; PackageProposal(int offset, String prefix, boolean withBody, int len, Package p, String completed, CeylonParseController cpc) { super(offset, prefix, PACKAGE, completed, completed.substring(len)); this.withBody = withBody; this.len = len; this.p = p; this.completed = completed; this.cpc = cpc; } @Override public Point getSelection(IDocument document) { if (withBody) { return new Point(offset+completed.length()-prefix.length()-len-5, 3); } else { return new Point(offset+completed.length()-prefix.length()-len, 0); } } @Override public void apply(IDocument document) { super.apply(document); if (withBody && EditorsUI.getPreferenceStore() .getBoolean(LINKED_MODE)) { final LinkedModeModel linkedModeModel = new LinkedModeModel(); final Point selection = getSelection(document); List<ICompletionProposal> proposals = new ArrayList<ICompletionProposal>(); for (final Declaration d: p.getMembers()) { if (Util.isResolvable(d) && d.isShared() && !isOverloadedVersion(d)) { proposals.add(new ICompletionProposal() { @Override public Point getSelection(IDocument document) { return null; } @Override public Image getImage() { return getImageForDeclaration(d); } @Override public String getDisplayString() { return d.getName(); } @Override public IContextInformation getContextInformation() { return null; } @Override public String getAdditionalProposalInfo() { return null; } @Override public void apply(IDocument document) { try { document.replace(selection.x, selection.y, d.getName()); } catch (BadLocationException e) { e.printStackTrace(); } linkedModeModel.exit(ILinkedModeListener.UPDATE_CARET); } }); } } if (!proposals.isEmpty()) { ProposalPosition linkedPosition = new ProposalPosition(document, selection.x, selection.y, 0, proposals.toArray(NO_COMPLETIONS)); try { LinkedMode.addLinkedPosition(linkedModeModel, linkedPosition); LinkedMode.installLinkedMode((CeylonEditor) EditorUtil.getCurrentEditor(), document, linkedModeModel, this, new LinkedMode.NullExitPolicy(), -1, 0); } catch (BadLocationException ble) { ble.printStackTrace(); } } } } @Override public String getAdditionalProposalInfo() { return getDocumentationFor(cpc, p); } @Override protected boolean qualifiedNameIsPath() { return true; } }
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_PackageCompletions.java
8
public interface OLazyIterator<T> extends Iterator<T> { public T update(T iValue); }
0true
commons_src_main_java_com_orientechnologies_common_collection_OLazyIterator.java
1,862
@Controller("blAdminBasicEntityController") @RequestMapping("/{sectionKey:.+}") public class AdminBasicEntityController extends AdminAbstractController { protected static final Log LOG = LogFactory.getLog(AdminBasicEntityController.class); // ****************************************** // REQUEST-MAPPING BOUND CONTROLLER METHODS * // ****************************************** /** * Renders the main entity listing for the specified class, which is based on the current sectionKey with some optional * criteria. * * @param request * @param response * @param model * @param pathVars * @param criteria a Map of property name -> list critiera values * @return the return view path * @throws Exception */ @RequestMapping(value = "", method = RequestMethod.GET) public String viewEntityList(HttpServletRequest request, HttpServletResponse response, Model model, @PathVariable Map<String, String> pathVars, @RequestParam MultiValueMap<String, String> requestParams) throws Exception { String sectionKey = getSectionKey(pathVars); String sectionClassName = getClassNameForSection(sectionKey); PersistencePackageRequest ppr = getSectionPersistencePackageRequest(sectionClassName, requestParams); ClassMetadata cmd = service.getClassMetadata(ppr); DynamicResultSet drs = service.getRecords(ppr); ListGrid listGrid = formService.buildMainListGrid(drs, cmd, sectionKey); List<EntityFormAction> mainActions = new ArrayList<EntityFormAction>(); addAddActionIfAllowed(sectionClassName, cmd, mainActions); Field firstField = listGrid.getHeaderFields().iterator().next(); if (requestParams.containsKey(firstField.getName())) { model.addAttribute("mainSearchTerm", requestParams.get(firstField.getName()).get(0)); } model.addAttribute("entityFriendlyName", cmd.getPolymorphicEntities().getFriendlyName()); model.addAttribute("currentUrl", request.getRequestURL().toString()); model.addAttribute("listGrid", listGrid); model.addAttribute("mainActions", mainActions); model.addAttribute("viewType", "entityList"); setModelAttributes(model, sectionKey); return "modules/defaultContainer"; } /** * Adds the "Add" button to the main entity form if the current user has permissions to create new instances * of the entity and all of the fields in the entity aren't marked as read only. * * @param sectionClassName * @param cmd * @param mainActions */ protected void addAddActionIfAllowed(String sectionClassName, ClassMetadata cmd, List<EntityFormAction> mainActions) { // If the user does not have create permissions, we will not add the "Add New" button boolean canCreate = true; try { adminRemoteSecurityService.securityCheck(sectionClassName, EntityOperationType.ADD); } catch (ServiceException e) { if (e instanceof SecurityServiceException) { canCreate = false; } } if (canCreate) { checkReadOnly: { //check if all the metadata is read only for (Property property : cmd.getProperties()) { if (property.getMetadata() instanceof BasicFieldMetadata) { if (((BasicFieldMetadata) property.getMetadata()).getReadOnly() == null || !((BasicFieldMetadata) property.getMetadata()).getReadOnly()) { break checkReadOnly; } } } canCreate = false; } } if (canCreate) { mainActions.add(DefaultMainActions.ADD); } mainEntityActionsExtensionManager.modifyMainActions(cmd, mainActions); } /** * Renders the modal form that is used to add a new parent level entity. Note that this form cannot render any * subcollections as operations on those collections require the parent level entity to first be saved and have * and id. Once the entity is initially saved, we will redirect the user to the normal manage entity screen where * they can then perform operations on sub collections. * * @param request * @param response * @param model * @param pathVars * @param entityType * @return the return view path * @throws Exception */ @RequestMapping(value = "/add", method = RequestMethod.GET) public String viewAddEntityForm(HttpServletRequest request, HttpServletResponse response, Model model, @PathVariable Map<String, String> pathVars, @RequestParam(defaultValue = "") String entityType) throws Exception { String sectionKey = getSectionKey(pathVars); String sectionClassName = getClassNameForSection(sectionKey); ClassMetadata cmd = service.getClassMetadata(getSectionPersistencePackageRequest(sectionClassName)); // If the entity type isn't specified, we need to determine if there are various polymorphic types for this entity. if (StringUtils.isBlank(entityType)) { if (cmd.getPolymorphicEntities().getChildren().length == 0) { entityType = cmd.getPolymorphicEntities().getFullyQualifiedClassname(); } else { entityType = getDefaultEntityType(); } } else { entityType = URLDecoder.decode(entityType, "UTF-8"); } // If we still don't have a type selected, that means that there were indeed multiple possible types and we // will be allowing the user to pick his desired type. if (StringUtils.isBlank(entityType)) { List<ClassTree> entityTypes = getAddEntityTypes(cmd.getPolymorphicEntities()); model.addAttribute("entityTypes", entityTypes); model.addAttribute("viewType", "modal/entityTypeSelection"); String requestUri = request.getRequestURI(); if (!request.getContextPath().equals("/") && requestUri.startsWith(request.getContextPath())) { requestUri = requestUri.substring(request.getContextPath().length() + 1, requestUri.length()); } model.addAttribute("currentUri", requestUri); } else { EntityForm entityForm = formService.createEntityForm(cmd); // We need to make sure that the ceiling entity is set to the interface and the specific entity type // is set to the type we're going to be creating. entityForm.setCeilingEntityClassname(cmd.getCeilingType()); entityForm.setEntityType(entityType); // When we initially build the class metadata (and thus, the entity form), we had all of the possible // polymorphic fields built out. Now that we have a concrete entity type to render, we can remove the // fields that are not applicable for this given entity type. formService.removeNonApplicableFields(cmd, entityForm, entityType); model.addAttribute("entityForm", entityForm); model.addAttribute("viewType", "modal/entityAdd"); } model.addAttribute("entityFriendlyName", cmd.getPolymorphicEntities().getFriendlyName()); model.addAttribute("currentUrl", request.getRequestURL().toString()); model.addAttribute("modalHeaderType", "addEntity"); setModelAttributes(model, sectionKey); return "modules/modalContainer"; } /** * Processes the request to add a new entity. If successful, returns a redirect to the newly created entity. * * @param request * @param response * @param model * @param pathVars * @param entityForm * @param result * @return the return view path * @throws Exception */ @RequestMapping(value = "/add", method = RequestMethod.POST) public String addEntity(HttpServletRequest request, HttpServletResponse response, Model model, @PathVariable Map<String, String> pathVars, @ModelAttribute(value="entityForm") EntityForm entityForm, BindingResult result) throws Exception { String sectionKey = getSectionKey(pathVars); extractDynamicFormFields(entityForm); Entity entity = service.addEntity(entityForm, getSectionCustomCriteria()); entityFormValidator.validate(entityForm, entity, result); if (result.hasErrors()) { String sectionClassName = getClassNameForSection(sectionKey); ClassMetadata cmd = service.getClassMetadata(getSectionPersistencePackageRequest(sectionClassName)); entityForm.clearFieldsMap(); formService.populateEntityForm(cmd, entity, entityForm); formService.removeNonApplicableFields(cmd, entityForm, entityForm.getEntityType()); model.addAttribute("viewType", "modal/entityAdd"); model.addAttribute("currentUrl", request.getRequestURL().toString()); model.addAttribute("modalHeaderType", "addEntity"); setModelAttributes(model, sectionKey); return "modules/modalContainer"; } // Note that AJAX Redirects need the context path prepended to them return "ajaxredirect:" + getContextPath(request) + sectionKey + "/" + entity.getPMap().get("id").getValue(); } /** * Renders the main entity form for the specified entity * * @param request * @param response * @param model * @param pathVars * @param id * @param modal - whether or not to show the entity in a read-only modal * @return the return view path * @throws Exception */ @RequestMapping(value = "/{id}", method = RequestMethod.GET) public String viewEntityForm(HttpServletRequest request, HttpServletResponse response, Model model, @PathVariable Map<String, String> pathVars, @PathVariable("id") String id) throws Exception { String sectionKey = getSectionKey(pathVars); String sectionClassName = getClassNameForSection(sectionKey); PersistencePackageRequest ppr = getSectionPersistencePackageRequest(sectionClassName); ClassMetadata cmd = service.getClassMetadata(ppr); Entity entity = service.getRecord(ppr, id, cmd, false); Map<String, DynamicResultSet> subRecordsMap = service.getRecordsForAllSubCollections(ppr, entity); EntityForm entityForm = formService.createEntityForm(cmd, entity, subRecordsMap); model.addAttribute("entity", entity); model.addAttribute("entityForm", entityForm); model.addAttribute("currentUrl", request.getRequestURL().toString()); setModelAttributes(model, sectionKey); boolean readable = false; for (Property property : cmd.getProperties()) { FieldMetadata fieldMetadata = property.getMetadata(); if (fieldMetadata instanceof BasicFieldMetadata) { if (!((BasicFieldMetadata) fieldMetadata).getReadOnly()) { readable = true; break; } } else { if (((CollectionMetadata) fieldMetadata).isMutable()) { readable = true; break; } } } if (!readable) { entityForm.setReadOnly(); } // If the user does not have edit permissions, we will go ahead and make the form read only to prevent confusion try { adminRemoteSecurityService.securityCheck(sectionClassName, EntityOperationType.UPDATE); } catch (ServiceException e) { if (e instanceof SecurityServiceException) { entityForm.setReadOnly(); } } if (isAjaxRequest(request)) { entityForm.setReadOnly(); model.addAttribute("viewType", "modal/entityView"); model.addAttribute("modalHeaderType", "viewEntity"); return "modules/modalContainer"; } else { model.addAttribute("viewType", "entityEdit"); return "modules/defaultContainer"; } } /** * Attempts to save the given entity. If validation is unsuccessful, it will re-render the entity form with * error fields highlighted. On a successful save, it will refresh the entity page. * * @param request * @param response * @param model * @param pathVars * @param id * @param entityForm * @param result * @return the return view path * @throws Exception */ @RequestMapping(value = "/{id}", method = RequestMethod.POST) public String saveEntity(HttpServletRequest request, HttpServletResponse response, Model model, @PathVariable Map<String, String> pathVars, @PathVariable(value="id") String id, @ModelAttribute(value="entityForm") EntityForm entityForm, BindingResult result, RedirectAttributes ra) throws Exception { String sectionKey = getSectionKey(pathVars); String sectionClassName = getClassNameForSection(sectionKey); PersistencePackageRequest ppr = getSectionPersistencePackageRequest(sectionClassName); extractDynamicFormFields(entityForm); Entity entity = service.updateEntity(entityForm, getSectionCustomCriteria()); entityFormValidator.validate(entityForm, entity, result); if (result.hasErrors()) { model.addAttribute("headerFlash", "save.unsuccessful"); model.addAttribute("headerFlashAlert", true); Map<String, DynamicResultSet> subRecordsMap = service.getRecordsForAllSubCollections(ppr, entity); ClassMetadata cmd = service.getClassMetadata(ppr); entityForm.clearFieldsMap(); formService.populateEntityForm(cmd, entity, subRecordsMap, entityForm); model.addAttribute("entity", entity); model.addAttribute("currentUrl", request.getRequestURL().toString()); setModelAttributes(model, sectionKey); if (isAjaxRequest(request)) { entityForm.setReadOnly(); model.addAttribute("viewType", "modal/entityView"); model.addAttribute("modalHeaderType", "viewEntity"); return "modules/modalContainer"; } else { model.addAttribute("viewType", "entityEdit"); return "modules/defaultContainer"; } } ra.addFlashAttribute("headerFlash", "save.successful"); return "redirect:/" + sectionKey + "/" + id; } /** * Attempts to remove the given entity. * * @param request * @param response * @param model * @param pathVars * @param id * @return the return view path * @throws Exception */ @RequestMapping(value = "/{id}/delete", method = RequestMethod.POST) public String removeEntity(HttpServletRequest request, HttpServletResponse response, Model model, @PathVariable Map<String, String> pathVars, @PathVariable(value="id") String id, @ModelAttribute(value="entityForm") EntityForm entityForm, BindingResult result) throws Exception { String sectionKey = getSectionKey(pathVars); try { service.removeEntity(entityForm, getSectionCustomCriteria()); } catch (ServiceException e) { if (e.containsCause(ConstraintViolationException.class)) { // Create a flash attribute for the unsuccessful delete FlashMap fm = new FlashMap(); fm.put("headerFlash", "delete.unsuccessful"); fm.put("headerFlashAlert", true); request.setAttribute(DispatcherServlet.OUTPUT_FLASH_MAP_ATTRIBUTE, fm); // Make sure we have this error show up in our logs LOG.error("Could not delete record", e); // Refresh the page return "redirect:/" + sectionKey + "/" + id; } throw e; } return "redirect:/" + sectionKey; } /** * Shows the modal dialog that is used to select a "to-one" collection item. For example, this could be used to show * a list of categories for the ManyToOne field "defaultCategory" in Product. * * @param request * @param response * @param model * @param pathVars * @param owningClass * @param collectionField * @return the return view path * @throws Exception */ @RequestMapping(value = "/{owningClass:.*}/{collectionField:.*}/select", method = RequestMethod.GET) public String showSelectCollectionItem(HttpServletRequest request, HttpServletResponse response, Model model, @PathVariable Map<String, String> pathVars, @PathVariable(value = "owningClass") String owningClass, @PathVariable(value="collectionField") String collectionField, @RequestParam MultiValueMap<String, String> requestParams) throws Exception { PersistencePackageRequest ppr = getSectionPersistencePackageRequest(owningClass, requestParams); ClassMetadata mainMetadata = service.getClassMetadata(ppr); Property collectionProperty = mainMetadata.getPMap().get(collectionField); FieldMetadata md = collectionProperty.getMetadata(); ppr = PersistencePackageRequest.fromMetadata(md); ppr.addFilterAndSortCriteria(getCriteria(requestParams)); ppr.setStartIndex(getStartIndex(requestParams)); ppr.setMaxIndex(getMaxIndex(requestParams)); if (md instanceof BasicFieldMetadata) { DynamicResultSet drs = service.getRecords(ppr); ListGrid listGrid = formService.buildCollectionListGrid(null, drs, collectionProperty, owningClass); model.addAttribute("listGrid", listGrid); model.addAttribute("viewType", "modal/simpleSelectEntity"); } model.addAttribute("currentUrl", request.getRequestURL().toString()); model.addAttribute("modalHeaderType", "selectCollectionItem"); model.addAttribute("collectionProperty", collectionProperty); setModelAttributes(model, owningClass); return "modules/modalContainer"; } @RequestMapping(value = "/{collectionField:.*}/details", method = RequestMethod.GET) public @ResponseBody Map<String, String> getCollectionValueDetails(HttpServletRequest request, HttpServletResponse response, Model model, @PathVariable Map<String, String> pathVars, @PathVariable(value="collectionField") String collectionField, @RequestParam String ids, @RequestParam MultiValueMap<String, String> requestParams) throws Exception { String sectionKey = getSectionKey(pathVars); String sectionClassName = getClassNameForSection(sectionKey); PersistencePackageRequest ppr = getSectionPersistencePackageRequest(sectionClassName, requestParams); ClassMetadata mainMetadata = service.getClassMetadata(ppr); Property collectionProperty = mainMetadata.getPMap().get(collectionField); FieldMetadata md = collectionProperty.getMetadata(); ppr = PersistencePackageRequest.fromMetadata(md); ppr.setStartIndex(getStartIndex(requestParams)); ppr.setMaxIndex(getMaxIndex(requestParams)); if (md instanceof BasicFieldMetadata) { String idProp = ((BasicFieldMetadata) md).getForeignKeyProperty(); String displayProp = ((BasicFieldMetadata) md).getForeignKeyDisplayValueProperty(); List<String> filterValues = Arrays.asList(ids.split(FILTER_VALUE_SEPARATOR_REGEX)); ppr.addFilterAndSortCriteria(new FilterAndSortCriteria(idProp, filterValues)); DynamicResultSet drs = service.getRecords(ppr); Map<String, String> returnMap = new HashMap<String, String>(); for (Entity e : drs.getRecords()) { String id = e.getPMap().get(idProp).getValue(); String disp = e.getPMap().get(displayProp).getDisplayValue(); if (StringUtils.isBlank(disp)) { disp = e.getPMap().get(displayProp).getValue(); } returnMap.put(id, disp); } return returnMap; } return null; } /** * Shows the modal popup for the current selected "to-one" field. For instance, if you are viewing a list of products * then this method is invoked when a user clicks on the name of the default category field. * * @param request * @param response * @param model * @param pathVars * @param collectionField * @param id * @return * @throws Exception */ @RequestMapping(value = "/{collectionField:.*}/{id}/view", method = RequestMethod.GET) public String viewCollectionItemDetails(HttpServletRequest request, HttpServletResponse response, Model model, @PathVariable Map<String, String> pathVars, @PathVariable(value="collectionField") String collectionField, @PathVariable(value="id") String id) throws Exception { String sectionKey = getSectionKey(pathVars); String mainClassName = getClassNameForSection(sectionKey); ClassMetadata mainMetadata = service.getClassMetadata(getSectionPersistencePackageRequest(mainClassName)); Property collectionProperty = mainMetadata.getPMap().get(collectionField); BasicFieldMetadata md = (BasicFieldMetadata) collectionProperty.getMetadata(); AdminSection section = adminNavigationService.findAdminSectionByClass(md.getForeignKeyClass()); String sectionUrlKey = (section.getUrl().startsWith("/")) ? section.getUrl().substring(1) : section.getUrl(); Map<String, String> varsForField = new HashMap<String, String>(); varsForField.put("sectionKey", sectionUrlKey); return viewEntityForm(request, response, model, varsForField, id); } /** * Returns the records for a given collectionField filtered by a particular criteria * * @param request * @param response * @param model * @param pathVars * @param collectionField * @param criteriaForm * @return the return view path * @throws Exception */ @RequestMapping(value = "/{id}/{collectionField:.*}", method = RequestMethod.GET) public String getCollectionFieldRecords(HttpServletRequest request, HttpServletResponse response, Model model, @PathVariable Map<String, String> pathVars, @PathVariable(value="id") String id, @PathVariable(value="collectionField") String collectionField, @RequestParam MultiValueMap<String, String> requestParams) throws Exception { String sectionKey = getSectionKey(pathVars); String mainClassName = getClassNameForSection(sectionKey); PersistencePackageRequest ppr = getSectionPersistencePackageRequest(mainClassName, requestParams); ClassMetadata mainMetadata = service.getClassMetadata(ppr); Property collectionProperty = mainMetadata.getPMap().get(collectionField); ppr = getSectionPersistencePackageRequest(mainClassName); Entity entity = service.getRecord(ppr, id, mainMetadata, false); // Next, we must get the new list grid that represents this collection ListGrid listGrid = getCollectionListGrid(mainMetadata, entity, collectionProperty, requestParams, sectionKey); model.addAttribute("listGrid", listGrid); model.addAttribute("currentParams", new ObjectMapper().writeValueAsString(requestParams)); // We return the new list grid so that it can replace the currently visible one setModelAttributes(model, sectionKey); return "views/standaloneListGrid"; } /** * Shows the modal dialog that is used to add an item to a given collection. There are several possible outcomes * of this call depending on the type of the specified collection field. * * <ul> * <li> * <b>Basic Collection (Persist)</b> - Renders a blank form for the specified target entity so that the user may * enter information and associate the record with this collection. Used by fields such as ProductAttribute. * </li> * <li> * <b>Basic Collection (Lookup)</b> - Renders a list grid that allows the user to click on an entity and select it. * Used by fields such as "allParentCategories". * </li> * <li> * <b>Adorned Collection (without form)</b> - Renders a list grid that allows the user to click on an entity and * select it. The view rendered by this is identical to basic collection (lookup), but will perform the operation * on an adorned field, which may carry extra meta-information about the created relationship, such as order. * </li> * <li> * <b>Adorned Collection (with form)</b> - Renders a list grid that allows the user to click on an entity and * select it. Once the user selects the entity, he will be presented with an empty form based on the specified * "maintainedAdornedTargetFields" for this field. Used by fields such as "crossSellProducts", which in addition * to linking an entity, provide extra fields, such as a promotional message. * </li> * <li> * <b>Map Collection</b> - Renders a form for the target entity that has an additional key field. This field is * populated either from the configured map keys, or as a result of a lookup in the case of a key based on another * entity. Used by fields such as the mediaMap on a Sku. * </li> * * @param request * @param response * @param model * @param sectionKey * @param id * @param collectionField * @param requestParams * @return the return view path * @throws Exception */ @RequestMapping(value = "/{id}/{collectionField:.*}/add", method = RequestMethod.GET) public String showAddCollectionItem(HttpServletRequest request, HttpServletResponse response, Model model, @PathVariable Map<String, String> pathVars, @PathVariable(value = "id") String id, @PathVariable(value = "collectionField") String collectionField, @RequestParam MultiValueMap<String, String> requestParams) throws Exception { String sectionKey = getSectionKey(pathVars); String mainClassName = getClassNameForSection(sectionKey); ClassMetadata mainMetadata = service.getClassMetadata(getSectionPersistencePackageRequest(mainClassName)); Property collectionProperty = mainMetadata.getPMap().get(collectionField); FieldMetadata md = collectionProperty.getMetadata(); PersistencePackageRequest ppr = PersistencePackageRequest.fromMetadata(md) .withFilterAndSortCriteria(getCriteria(requestParams)) .withStartIndex(getStartIndex(requestParams)) .withMaxIndex(getMaxIndex(requestParams)); if (md instanceof BasicCollectionMetadata) { BasicCollectionMetadata fmd = (BasicCollectionMetadata) md; if (fmd.getAddMethodType().equals(AddMethodType.PERSIST)) { ClassMetadata cmd = service.getClassMetadata(ppr); // If the entity type isn't specified, we need to determine if there are various polymorphic types // for this entity. String entityType = null; if (requestParams.containsKey("entityType")) { entityType = requestParams.get("entityType").get(0); } if (StringUtils.isBlank(entityType)) { if (cmd.getPolymorphicEntities().getChildren().length == 0) { entityType = cmd.getPolymorphicEntities().getFullyQualifiedClassname(); } else { entityType = getDefaultEntityType(); } } else { entityType = URLDecoder.decode(entityType, "UTF-8"); } if (StringUtils.isBlank(entityType)) { List<ClassTree> entityTypes = getAddEntityTypes(cmd.getPolymorphicEntities()); model.addAttribute("entityTypes", entityTypes); model.addAttribute("viewType", "modal/entityTypeSelection"); model.addAttribute("entityFriendlyName", cmd.getPolymorphicEntities().getFriendlyName()); String requestUri = request.getRequestURI(); if (!request.getContextPath().equals("/") && requestUri.startsWith(request.getContextPath())) { requestUri = requestUri.substring(request.getContextPath().length() + 1, requestUri.length()); } model.addAttribute("currentUri", requestUri); model.addAttribute("modalHeaderType", "addEntity"); setModelAttributes(model, sectionKey); return "modules/modalContainer"; } else { ppr = ppr.withCeilingEntityClassname(entityType); } } } //service.getContextSpecificRelationshipId(mainMetadata, entity, prefix); model.addAttribute("currentParams", new ObjectMapper().writeValueAsString(requestParams)); return buildAddCollectionItemModel(request, response, model, id, collectionField, sectionKey, collectionProperty, md, ppr, null, null); } /** * Adds the requested collection item * * @param request * @param response * @param model * @param pathVars * @param id * @param collectionField * @param entityForm * @return the return view path * @throws Exception */ @RequestMapping(value = "/{id}/{collectionField:.*}/add", method = RequestMethod.POST) public String addCollectionItem(HttpServletRequest request, HttpServletResponse response, Model model, @PathVariable Map<String, String> pathVars, @PathVariable(value="id") String id, @PathVariable(value="collectionField") String collectionField, @ModelAttribute(value="entityForm") EntityForm entityForm, BindingResult result) throws Exception { String sectionKey = getSectionKey(pathVars); String mainClassName = getClassNameForSection(sectionKey); ClassMetadata mainMetadata = service.getClassMetadata(getSectionPersistencePackageRequest(mainClassName)); Property collectionProperty = mainMetadata.getPMap().get(collectionField); PersistencePackageRequest ppr = getSectionPersistencePackageRequest(mainClassName); Entity entity = service.getRecord(ppr, id, mainMetadata, false); // First, we must save the collection entity Entity savedEntity = service.addSubCollectionEntity(entityForm, mainMetadata, collectionProperty, entity); entityFormValidator.validate(entityForm, savedEntity, result); if (result.hasErrors()) { FieldMetadata md = collectionProperty.getMetadata(); ppr = PersistencePackageRequest.fromMetadata(md); return buildAddCollectionItemModel(request, response, model, id, collectionField, sectionKey, collectionProperty, md, ppr, entityForm, savedEntity); } // Next, we must get the new list grid that represents this collection ListGrid listGrid = getCollectionListGrid(mainMetadata, entity, collectionProperty, null, sectionKey); model.addAttribute("listGrid", listGrid); // We return the new list grid so that it can replace the currently visible one setModelAttributes(model, sectionKey); return "views/standaloneListGrid"; } /** * Builds out all of the model information needed for showing the add modal for collection items on both the initial GET * as well as after a POST with validation errors * * @param request * @param model * @param id * @param collectionField * @param sectionKey * @param collectionProperty * @param md * @param ppr * @return the appropriate view to display for the modal * @see {@link #addCollectionItem(HttpServletRequest, HttpServletResponse, Model, Map, String, String, EntityForm, BindingResult)} * @see {@link #showAddCollectionItem(HttpServletRequest, HttpServletResponse, Model, Map, String, String, MultiValueMap)} * @throws ServiceException */ protected String buildAddCollectionItemModel(HttpServletRequest request, HttpServletResponse response, Model model, String id, String collectionField, String sectionKey, Property collectionProperty, FieldMetadata md, PersistencePackageRequest ppr, EntityForm entityForm, Entity entity) throws ServiceException { if (entityForm != null) { entityForm.clearFieldsMap(); } if (md instanceof BasicCollectionMetadata) { BasicCollectionMetadata fmd = (BasicCollectionMetadata) md; // When adding items to basic collections, we will sometimes show a form to persist a new record // and sometimes show a list grid to allow the user to associate an existing record. if (fmd.getAddMethodType().equals(AddMethodType.PERSIST)) { ClassMetadata collectionMetadata = service.getClassMetadata(ppr); if (entityForm == null) { entityForm = formService.createEntityForm(collectionMetadata); entityForm.setCeilingEntityClassname(ppr.getCeilingEntityClassname()); entityForm.setEntityType(ppr.getCeilingEntityClassname()); } else { formService.populateEntityForm(collectionMetadata, entityForm); formService.populateEntityFormFieldValues(collectionMetadata, entity, entityForm); } formService.removeNonApplicableFields(collectionMetadata, entityForm, ppr.getCeilingEntityClassname()); entityForm.getTabs().iterator().next().getIsVisible(); model.addAttribute("entityForm", entityForm); model.addAttribute("viewType", "modal/simpleAddEntity"); } else { DynamicResultSet drs = service.getRecords(ppr); ListGrid listGrid = formService.buildCollectionListGrid(id, drs, collectionProperty, sectionKey); listGrid.setPathOverride(request.getRequestURL().toString()); model.addAttribute("listGrid", listGrid); model.addAttribute("viewType", "modal/simpleSelectEntity"); } } else if (md instanceof AdornedTargetCollectionMetadata) { AdornedTargetCollectionMetadata fmd = (AdornedTargetCollectionMetadata) md; // Even though this field represents an adorned target collection, the list we want to show in the modal // is the standard list grid for the target entity of this field ppr.setOperationTypesOverride(null); ppr.setType(PersistencePackageRequest.Type.STANDARD); ClassMetadata collectionMetadata = service.getClassMetadata(ppr); DynamicResultSet drs = service.getRecords(ppr); ListGrid listGrid = formService.buildMainListGrid(drs, collectionMetadata, sectionKey); listGrid.setSubCollectionFieldName(collectionField); listGrid.setPathOverride(request.getRequestURL().toString()); listGrid.setFriendlyName(collectionMetadata.getPolymorphicEntities().getFriendlyName()); if (entityForm == null) { entityForm = formService.buildAdornedListForm(fmd, ppr.getAdornedList(), id); } else { formService.buildAdornedListForm(fmd, ppr.getAdornedList(), id, entityForm); formService.populateEntityFormFieldValues(collectionMetadata, entity, entityForm); } listGrid.setListGridType(ListGrid.Type.ADORNED); for (Entry<String, Field> entry : entityForm.getFields().entrySet()) { if (entry.getValue().getIsVisible()) { listGrid.setListGridType(ListGrid.Type.ADORNED_WITH_FORM); break; } } model.addAttribute("listGrid", listGrid); model.addAttribute("entityForm", entityForm); model.addAttribute("viewType", "modal/adornedSelectEntity"); } else if (md instanceof MapMetadata) { MapMetadata fmd = (MapMetadata) md; ClassMetadata collectionMetadata = service.getClassMetadata(ppr); if (entityForm == null) { entityForm = formService.buildMapForm(fmd, ppr.getMapStructure(), collectionMetadata, id); } else { formService.buildMapForm(fmd, ppr.getMapStructure(), collectionMetadata, id, entityForm); formService.populateEntityFormFieldValues(collectionMetadata, entity, entityForm); } model.addAttribute("entityForm", entityForm); model.addAttribute("viewType", "modal/mapAddEntity"); } model.addAttribute("currentUrl", request.getRequestURL().toString()); model.addAttribute("modalHeaderType", "addCollectionItem"); model.addAttribute("collectionProperty", collectionProperty); setModelAttributes(model, sectionKey); return "modules/modalContainer"; } /** * Shows the appropriate modal dialog to edit the selected collection item * * @param request * @param response * @param model * @param pathVars * @param id * @param collectionField * @param collectionItemId * @return the return view path * @throws Exception */ @RequestMapping(value = "/{id}/{collectionField:.*}/{collectionItemId}", method = RequestMethod.GET) public String showUpdateCollectionItem(HttpServletRequest request, HttpServletResponse response, Model model, @PathVariable Map<String, String> pathVars, @PathVariable(value="id") String id, @PathVariable(value="collectionField") String collectionField, @PathVariable(value="collectionItemId") String collectionItemId) throws Exception { return showViewUpdateCollection(request, model, pathVars, id, collectionField, collectionItemId, "updateCollectionItem"); } /** * Shows the appropriate modal dialog to view the selected collection item. This will display the modal as readonly * * @param request * @param response * @param model * @param pathVars * @param id * @param collectionField * @param collectionItemId * @return the return view path * @throws Exception */ @RequestMapping(value = "/{id}/{collectionField:.*}/{collectionItemId}/view", method = RequestMethod.GET) public String showViewCollectionItem(HttpServletRequest request, HttpServletResponse response, Model model, @PathVariable Map<String, String> pathVars, @PathVariable(value="id") String id, @PathVariable(value="collectionField") String collectionField, @PathVariable(value="collectionItemId") String collectionItemId) throws Exception { String returnPath = showViewUpdateCollection(request, model, pathVars, id, collectionField, collectionItemId, "viewCollectionItem"); // Since this is a read-only view, actions don't make sense in this context EntityForm ef = (EntityForm) model.asMap().get("entityForm"); ef.removeAllActions(); return returnPath; } protected String showViewUpdateCollection(HttpServletRequest request, Model model, Map<String, String> pathVars, String id, String collectionField, String collectionItemId, String modalHeaderType) throws ServiceException { return showViewUpdateCollection(request, model, pathVars, id, collectionField, collectionItemId, modalHeaderType, null, null); } /** * Shows the view and populates the model for updating a collection item. You can also pass in an entityform and entity * which are optional. If they are not passed in then they are automatically looked up * * @param request * @param model * @param pathVars * @param id * @param collectionField * @param collectionItemId * @param modalHeaderType * @param ef * @param entity * @return * @throws ServiceException */ protected String showViewUpdateCollection(HttpServletRequest request, Model model, Map<String, String> pathVars, String id, String collectionField, String collectionItemId, String modalHeaderType, EntityForm entityForm, Entity entity) throws ServiceException { String sectionKey = getSectionKey(pathVars); String mainClassName = getClassNameForSection(sectionKey); ClassMetadata mainMetadata = service.getClassMetadata(getSectionPersistencePackageRequest(mainClassName)); Property collectionProperty = mainMetadata.getPMap().get(collectionField); FieldMetadata md = collectionProperty.getMetadata(); PersistencePackageRequest ppr = getSectionPersistencePackageRequest(mainClassName); Entity parentEntity = service.getRecord(ppr, id, mainMetadata, false); ppr = PersistencePackageRequest.fromMetadata(md); if (md instanceof BasicCollectionMetadata && ((BasicCollectionMetadata) md).getAddMethodType().equals(AddMethodType.PERSIST)) { BasicCollectionMetadata fmd = (BasicCollectionMetadata) md; ClassMetadata collectionMetadata = service.getClassMetadata(ppr); if (entity == null) { entity = service.getRecord(ppr, collectionItemId, collectionMetadata, true); } Map<String, DynamicResultSet> subRecordsMap = service.getRecordsForAllSubCollections(ppr, entity); if (entityForm == null) { entityForm = formService.createEntityForm(collectionMetadata, entity, subRecordsMap); } else { entityForm.clearFieldsMap(); formService.populateEntityForm(collectionMetadata, entity, subRecordsMap, entityForm); //remove all the actions since we're not trying to redisplay them on the form entityForm.removeAllActions(); } entityForm.removeAction(DefaultEntityFormActions.DELETE); model.addAttribute("entityForm", entityForm); model.addAttribute("viewType", "modal/simpleEditEntity"); } else if (md instanceof AdornedTargetCollectionMetadata && ((AdornedTargetCollectionMetadata) md).getMaintainedAdornedTargetFields().length > 0) { AdornedTargetCollectionMetadata fmd = (AdornedTargetCollectionMetadata) md; if (entity == null) { entity = service.getAdvancedCollectionRecord(mainMetadata, parentEntity, collectionProperty, collectionItemId); } boolean populateTypeAndId = true; if (entityForm == null) { entityForm = formService.buildAdornedListForm(fmd, ppr.getAdornedList(), id); } else { entityForm.clearFieldsMap(); String entityType = entityForm.getEntityType(); formService.buildAdornedListForm(fmd, ppr.getAdornedList(), id, entityForm); entityForm.setEntityType(entityType); populateTypeAndId = false; } ClassMetadata cmd = service.getClassMetadata(ppr); for (String field : fmd.getMaintainedAdornedTargetFields()) { Property p = cmd.getPMap().get(field); if (p != null && p.getMetadata() instanceof AdornedTargetCollectionMetadata) { // Because we're dealing with a nested adorned target collection, this particular request must act // directly on the first adorned target collection. Because of this, we need the actual id property // from the entity that models the adorned target relationship, and not the id of the target object. Property alternateIdProperty = entity.getPMap().get(BasicPersistenceModule.ALTERNATE_ID_PROPERTY); DynamicResultSet drs = service.getRecordsForCollection(cmd, entity, p, null, null, null, alternateIdProperty.getValue()); ListGrid listGrid = formService.buildCollectionListGrid(alternateIdProperty.getValue(), drs, p, ppr.getAdornedList().getAdornedTargetEntityClassname()); listGrid.setListGridType(ListGrid.Type.INLINE); listGrid.getToolbarActions().add(DefaultListGridActions.ADD); entityForm.addListGrid(listGrid, EntityForm.DEFAULT_TAB_NAME, EntityForm.DEFAULT_TAB_ORDER); } else if (p != null && p.getMetadata() instanceof MapMetadata) { // See above comment for AdornedTargetCollectionMetadata MapMetadata mmd = (MapMetadata) p.getMetadata(); Property alternateIdProperty = entity.getPMap().get(BasicPersistenceModule.ALTERNATE_ID_PROPERTY); DynamicResultSet drs = service.getRecordsForCollection(cmd, entity, p, null, null, null, alternateIdProperty.getValue()); ListGrid listGrid = formService.buildCollectionListGrid(alternateIdProperty.getValue(), drs, p, mmd.getTargetClass()); listGrid.setListGridType(ListGrid.Type.INLINE); listGrid.getToolbarActions().add(DefaultListGridActions.ADD); entityForm.addListGrid(listGrid, EntityForm.DEFAULT_TAB_NAME, EntityForm.DEFAULT_TAB_ORDER); } } formService.populateEntityFormFields(entityForm, entity, populateTypeAndId, populateTypeAndId); formService.populateAdornedEntityFormFields(entityForm, entity, ppr.getAdornedList()); boolean atLeastOneBasicField = false; for (Entry<String, Field> entry : entityForm.getFields().entrySet()) { if (entry.getValue().getIsVisible()) { atLeastOneBasicField = true; break; } } if (!atLeastOneBasicField) { entityForm.removeAction(DefaultEntityFormActions.SAVE); } model.addAttribute("entityForm", entityForm); model.addAttribute("viewType", "modal/adornedEditEntity"); } else if (md instanceof MapMetadata) { MapMetadata fmd = (MapMetadata) md; ClassMetadata collectionMetadata = service.getClassMetadata(ppr); if (entity == null) { entity = service.getAdvancedCollectionRecord(mainMetadata, parentEntity, collectionProperty, collectionItemId); } boolean populateTypeAndId = true; if (entityForm == null) { entityForm = formService.buildMapForm(fmd, ppr.getMapStructure(), collectionMetadata, id); } else { //save off the prior key before clearing out the fields map as it will not appear //back on the saved entity String priorKey = entityForm.getFields().get("priorKey").getValue(); entityForm.clearFieldsMap(); formService.buildMapForm(fmd, ppr.getMapStructure(), collectionMetadata, id, entityForm); entityForm.getFields().get("priorKey").setValue(priorKey); populateTypeAndId = false; } formService.populateEntityFormFields(entityForm, entity, populateTypeAndId, populateTypeAndId); formService.populateMapEntityFormFields(entityForm, entity); model.addAttribute("entityForm", entityForm); model.addAttribute("viewType", "modal/mapEditEntity"); } model.addAttribute("currentUrl", request.getRequestURL().toString()); model.addAttribute("modalHeaderType", modalHeaderType); model.addAttribute("collectionProperty", collectionProperty); setModelAttributes(model, sectionKey); return "modules/modalContainer"; } /** * Updates the specified collection item * * @param request * @param response * @param model * @param pathVars * @param id * @param collectionField * @param entityForm * @return the return view path * @throws Exception */ @RequestMapping(value = "/{id}/{collectionField:.*}/{collectionItemId}", method = RequestMethod.POST) public String updateCollectionItem(HttpServletRequest request, HttpServletResponse response, Model model, @PathVariable Map<String, String> pathVars, @PathVariable(value="id") String id, @PathVariable(value="collectionField") String collectionField, @PathVariable(value="collectionItemId") String collectionItemId, @ModelAttribute(value="entityForm") EntityForm entityForm, BindingResult result) throws Exception { String sectionKey = getSectionKey(pathVars); String mainClassName = getClassNameForSection(sectionKey); ClassMetadata mainMetadata = service.getClassMetadata(getSectionPersistencePackageRequest(mainClassName)); Property collectionProperty = mainMetadata.getPMap().get(collectionField); PersistencePackageRequest ppr = getSectionPersistencePackageRequest(mainClassName); Entity entity = service.getRecord(ppr, id, mainMetadata, false); // First, we must save the collection entity Entity savedEntity = service.updateSubCollectionEntity(entityForm, mainMetadata, collectionProperty, entity, collectionItemId); entityFormValidator.validate(entityForm, savedEntity, result); if (result.hasErrors()) { return showViewUpdateCollection(request, model, pathVars, id, collectionField, collectionItemId, "updateCollectionItem", entityForm, savedEntity); } // Next, we must get the new list grid that represents this collection ListGrid listGrid = getCollectionListGrid(mainMetadata, entity, collectionProperty, null, sectionKey); model.addAttribute("listGrid", listGrid); // We return the new list grid so that it can replace the currently visible one setModelAttributes(model, sectionKey); return "views/standaloneListGrid"; } /** * Updates the given colleciton item's sequence. This should only be triggered for adorned target collections * where a sort field is specified -- any other invocation is incorrect and will result in an exception. * * @param request * @param response * @param model * @param pathVars * @param id * @param collectionField * @param collectionItemId * @return an object explaining the state of the operation * @throws Exception */ @RequestMapping(value = "/{id}/{collectionField:.*}/{collectionItemId}/sequence", method = RequestMethod.POST) public @ResponseBody Map<String, Object> updateCollectionItemSequence(HttpServletRequest request, HttpServletResponse response, Model model, @PathVariable Map<String, String> pathVars, @PathVariable(value="id") String id, @PathVariable(value="collectionField") String collectionField, @PathVariable(value="collectionItemId") String collectionItemId, @RequestParam(value="newSequence") String newSequence) throws Exception { String sectionKey = getSectionKey(pathVars); String mainClassName = getClassNameForSection(sectionKey); ClassMetadata mainMetadata = service.getClassMetadata(getSectionPersistencePackageRequest(mainClassName)); Property collectionProperty = mainMetadata.getPMap().get(collectionField); FieldMetadata md = collectionProperty.getMetadata(); PersistencePackageRequest ppr = getSectionPersistencePackageRequest(mainClassName); Entity parentEntity = service.getRecord(ppr, id, mainMetadata, false); ppr = PersistencePackageRequest.fromMetadata(md); if (md instanceof AdornedTargetCollectionMetadata) { AdornedTargetCollectionMetadata fmd = (AdornedTargetCollectionMetadata) md; AdornedTargetList atl = ppr.getAdornedList(); // Get an entity form for the entity EntityForm entityForm = formService.buildAdornedListForm(fmd, ppr.getAdornedList(), id); Entity entity = service.getAdvancedCollectionRecord(mainMetadata, parentEntity, collectionProperty, collectionItemId); formService.populateEntityFormFields(entityForm, entity); formService.populateAdornedEntityFormFields(entityForm, entity, ppr.getAdornedList()); // Set the new sequence (note that it will come in 0-indexed but the persistence module expects 1-indexed) int sequenceValue = Integer.parseInt(newSequence) + 1; Field field = entityForm.findField(atl.getSortField()); field.setValue(String.valueOf(sequenceValue)); Map<String, Object> responseMap = new HashMap<String, Object>(); service.updateSubCollectionEntity(entityForm, mainMetadata, collectionProperty, entity, collectionItemId); responseMap.put("status", "ok"); responseMap.put("field", collectionField); return responseMap; } else { throw new UnsupportedOperationException("Cannot handle sequencing for non adorned target collection fields."); } } /** * Removes the requested collection item * * Note that the request must contain a parameter called "key" when attempting to remove a collection item from a * map collection. * * @param request * @param response * @param model * @param pathVars * @param id * @param collectionField * @param collectionItemId * @return the return view path * @throws Exception */ @RequestMapping(value = "/{id}/{collectionField:.*}/{collectionItemId}/delete", method = RequestMethod.POST) public String removeCollectionItem(HttpServletRequest request, HttpServletResponse response, Model model, @PathVariable Map<String, String> pathVars, @PathVariable(value="id") String id, @PathVariable(value="collectionField") String collectionField, @PathVariable(value="collectionItemId") String collectionItemId) throws Exception { String sectionKey = getSectionKey(pathVars); String mainClassName = getClassNameForSection(sectionKey); ClassMetadata mainMetadata = service.getClassMetadata(getSectionPersistencePackageRequest(mainClassName)); Property collectionProperty = mainMetadata.getPMap().get(collectionField); String priorKey = request.getParameter("key"); PersistencePackageRequest ppr = getSectionPersistencePackageRequest(mainClassName); Entity entity = service.getRecord(ppr, id, mainMetadata, false); // First, we must remove the collection entity service.removeSubCollectionEntity(mainMetadata, collectionProperty, entity, collectionItemId, priorKey); // Next, we must get the new list grid that represents this collection ListGrid listGrid = getCollectionListGrid(mainMetadata, entity, collectionProperty, null, sectionKey); model.addAttribute("listGrid", listGrid); // We return the new list grid so that it can replace the currently visible one setModelAttributes(model, sectionKey); return "views/standaloneListGrid"; } // ********************************* // ADDITIONAL SPRING-BOUND METHODS * // ********************************* /** * Invoked on every request to provide the ability to register specific binders for Spring's binding process. * By default, we register a binder that treats empty Strings as null and a Boolean editor that supports either true * or false. If the value is passed in as null, it will treat it as false. * * @param binder */ @InitBinder public void initBinder(WebDataBinder binder) { binder.registerCustomEditor(String.class, new StringTrimmerEditor(true)); binder.registerCustomEditor(Boolean.class, new NonNullBooleanEditor()); } }
1no label
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_web_controller_entity_AdminBasicEntityController.java
45
public class TouchCommandParser extends TypeAwareCommandParser { public TouchCommandParser(TextCommandType type) { super(type); } public TextCommand parser(SocketTextReader socketTextReader, String cmd, int space) { StringTokenizer st = new StringTokenizer(cmd); st.nextToken(); String key = null; int expiration = 0; boolean noReply = false; if (st.hasMoreTokens()) { key = st.nextToken(); } else { return new ErrorCommand(ERROR_CLIENT); } if (st.hasMoreTokens()) { expiration = Integer.parseInt(st.nextToken()); } else { return new ErrorCommand(ERROR_CLIENT); } if (st.hasMoreTokens()) { noReply = "noreply".equals(st.nextToken()); } return new TouchCommand(type, key, expiration, noReply); } }
0true
hazelcast_src_main_java_com_hazelcast_ascii_memcache_TouchCommandParser.java
118
public interface ArchivedPagePublisher { void processPageArchive(Page page, String basePageKey); }
0true
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_page_message_ArchivedPagePublisher.java
1,536
public class OrderMapReduce { public static final String CLASS = Tokens.makeNamespace(OrderMapReduce.class) + ".class"; public static final String KEY = Tokens.makeNamespace(OrderMapReduce.class) + ".key"; public static final String TYPE = Tokens.makeNamespace(OrderMapReduce.class) + ".type"; public static final String ELEMENT_KEY = Tokens.makeNamespace(OrderMapReduce.class) + ".elementKey"; public enum Counters { VERTICES_PROCESSED, OUT_EDGES_PROCESSED } public static Configuration createConfiguration(final Class<? extends Element> klass, final String key, final Class<? extends WritableComparable> type, final String elementKey) { final Configuration configuration = new EmptyConfiguration(); configuration.setClass(OrderMapReduce.CLASS, klass, Element.class); configuration.set(OrderMapReduce.KEY, key); configuration.setClass(OrderMapReduce.TYPE, type, WritableComparable.class); configuration.set(OrderMapReduce.ELEMENT_KEY, elementKey); return configuration; } public static Class<? extends WritableComparator> createComparator(final TransformPipe.Order order, final Class<? extends WritableComparable> comparable) { Class<? extends WritableComparator> comparatorClass = null; if (comparable.equals(LongWritable.class)) comparatorClass = order.equals(TransformPipe.Order.INCR) ? LongWritable.Comparator.class : LongWritable.DecreasingComparator.class; else if (comparable.equals(IntWritable.class)) comparatorClass = order.equals(TransformPipe.Order.INCR) ? IntWritable.Comparator.class : WritableComparators.DecreasingIntComparator.class; else if (comparable.equals(FloatWritable.class)) comparatorClass = order.equals(TransformPipe.Order.INCR) ? FloatWritable.Comparator.class : WritableComparators.DecreasingFloatComparator.class; else if (comparable.equals(DoubleWritable.class)) comparatorClass = order.equals(TransformPipe.Order.INCR) ? DoubleWritable.Comparator.class : WritableComparators.DecreasingDoubleComparator.class; else if (comparable.equals(Text.class)) comparatorClass = order.equals(TransformPipe.Order.INCR) ? Text.Comparator.class : WritableComparators.DecreasingTextComparator.class; return comparatorClass; } public static class Map extends Mapper<NullWritable, FaunusVertex, WritableComparable, Text> { private String key; private boolean isVertex; private WritableHandler handler; private String elementKey; private SafeMapperOutputs outputs; @Override public void setup(final Mapper.Context context) throws IOException, InterruptedException { this.isVertex = context.getConfiguration().getClass(CLASS, Element.class, Element.class).equals(Vertex.class); this.key = context.getConfiguration().get(KEY); this.handler = new WritableHandler(context.getConfiguration().getClass(TYPE, Text.class, WritableComparable.class)); this.elementKey = context.getConfiguration().get(ELEMENT_KEY); this.outputs = new SafeMapperOutputs(context); } private Text text = new Text(); private WritableComparable writable; @Override public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, WritableComparable, Text>.Context context) throws IOException, InterruptedException { if (this.isVertex) { if (value.hasPaths()) { this.text.set(ElementPicker.getPropertyAsString(value, this.elementKey)); final Object temp = ElementPicker.getProperty(value, this.key); if (this.key.equals(Tokens._COUNT)) { this.writable = this.handler.set(temp); context.write(this.writable, this.text); } else if (temp instanceof Number) { this.writable = this.handler.set(multiplyPathCount((Number) temp, value.pathCount())); context.write(this.writable, this.text); } else { this.writable = this.handler.set(temp); for (int i = 0; i < value.pathCount(); i++) { context.write(this.writable, this.text); } } DEFAULT_COMPAT.incrementContextCounter(context, Counters.VERTICES_PROCESSED, 1L); } } else { long edgesProcessed = 0; for (final Edge e : value.getEdges(Direction.OUT)) { final StandardFaunusEdge edge = (StandardFaunusEdge) e; if (edge.hasPaths()) { this.text.set(ElementPicker.getPropertyAsString(edge, this.elementKey)); final Object temp = ElementPicker.getProperty(edge, this.key); if (this.key.equals(Tokens._COUNT)) { this.writable = this.handler.set(temp); context.write(this.writable, this.text); } else if (temp instanceof Number) { this.writable = this.handler.set(multiplyPathCount((Number) temp, edge.pathCount())); context.write(this.writable, this.text); } else { this.writable = this.handler.set(temp); for (int i = 0; i < edge.pathCount(); i++) { context.write(this.writable, this.text); } } edgesProcessed++; } } DEFAULT_COMPAT.incrementContextCounter(context, Counters.OUT_EDGES_PROCESSED, edgesProcessed); } this.outputs.write(Tokens.GRAPH, NullWritable.get(), value); } @Override public void cleanup(final Mapper<NullWritable, FaunusVertex, WritableComparable, Text>.Context context) throws IOException, InterruptedException { this.outputs.close(); } } public static class Reduce extends Reducer<WritableComparable, Text, Text, WritableComparable> { private SafeReducerOutputs outputs; @Override public void setup(final Reducer<WritableComparable, Text, Text, WritableComparable>.Context context) throws IOException, InterruptedException { this.outputs = new SafeReducerOutputs(context); } @Override public void reduce(final WritableComparable key, final Iterable<Text> values, final Reducer<WritableComparable, Text, Text, WritableComparable>.Context context) throws IOException, InterruptedException { for (final Text value : values) { this.outputs.write(Tokens.SIDEEFFECT, value, key); } } @Override public void cleanup(final Reducer<WritableComparable, Text, Text, WritableComparable>.Context context) throws IOException, InterruptedException { this.outputs.close(); } } private static Number multiplyPathCount(final Number value, final Long pathCount) { if (value instanceof Long) return (Long) value * pathCount; else if (value instanceof Integer) return (Integer) value * pathCount; else if (value instanceof Double) return (Double) value * pathCount; else if (value instanceof Float) return (Float) value * pathCount; else return value.doubleValue() * pathCount; } }
1no label
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_transform_OrderMapReduce.java
46
public static interface ConcurrentHashMapSpliterator<T> { /** * If possible, returns a new spliterator covering * approximately one half of the elements, which will not be * covered by this spliterator. Returns null if cannot be * split. */ ConcurrentHashMapSpliterator<T> trySplit(); /** * Returns an estimate of the number of elements covered by * this Spliterator. */ long estimateSize(); /** Applies the action to each untraversed element */ void forEachRemaining(Action<? super T> action); /** If an element remains, applies the action and returns true. */ boolean tryAdvance(Action<? super T> action); }
0true
src_main_java_jsr166e_ConcurrentHashMapV8.java
813
public static class Response extends ActionResponse { private List<Item> items; public List<Item> items() { return items; } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVInt(items.size()); for (Item item : items) { out.writeVInt(item.slot); if (item.response != null) { out.writeBoolean(true); item.response.writeTo(out); } else { out.writeBoolean(false); out.writeText(item.error); } } } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); int size = in.readVInt(); items = new ArrayList<Item>(size); for (int i = 0; i < size; i++) { int slot = in.readVInt(); if (in.readBoolean()) { PercolateShardResponse shardResponse = new PercolateShardResponse(); shardResponse.readFrom(in); items.add(new Item(slot, shardResponse)); } else { items.add(new Item(slot, in.readText())); } } } public static class Item { private final int slot; private final PercolateShardResponse response; private final Text error; public Item(Integer slot, PercolateShardResponse response) { this.slot = slot; this.response = response; this.error = null; } public Item(Integer slot, Text error) { this.slot = slot; this.error = error; this.response = null; } public int slot() { return slot; } public PercolateShardResponse response() { return response; } public Text error() { return error; } public boolean failed() { return error != null; } } }
0true
src_main_java_org_elasticsearch_action_percolate_TransportShardMultiPercolateAction.java
371
public interface OObjectLazyMultivalueElement<T> { public void detach(boolean nonProxiedInstance); public void detachAll(boolean nonProxiedInstance); public T getNonOrientInstance(); public Object getUnderlying(); }
0true
core_src_main_java_com_orientechnologies_orient_core_db_object_OObjectLazyMultivalueElement.java