Unnamed: 0
int64 0
6.45k
| func
stringlengths 29
253k
| target
class label 2
classes | project
stringlengths 36
167
|
---|---|---|---|
3,467 | public static class Stats implements Streamable, ToXContent {
private long indexCount;
private long indexTimeInMillis;
private long indexCurrent;
private long deleteCount;
private long deleteTimeInMillis;
private long deleteCurrent;
Stats() {
}
public Stats(long indexCount, long indexTimeInMillis, long indexCurrent, long deleteCount, long deleteTimeInMillis, long deleteCurrent) {
this.indexCount = indexCount;
this.indexTimeInMillis = indexTimeInMillis;
this.indexCurrent = indexCurrent;
this.deleteCount = deleteCount;
this.deleteTimeInMillis = deleteTimeInMillis;
this.deleteCurrent = deleteCurrent;
}
public void add(Stats stats) {
indexCount += stats.indexCount;
indexTimeInMillis += stats.indexTimeInMillis;
indexCurrent += stats.indexCurrent;
deleteCount += stats.deleteCount;
deleteTimeInMillis += stats.deleteTimeInMillis;
deleteCurrent += stats.deleteCurrent;
}
public long getIndexCount() {
return indexCount;
}
public TimeValue getIndexTime() {
return new TimeValue(indexTimeInMillis);
}
public long getIndexTimeInMillis() {
return indexTimeInMillis;
}
public long getIndexCurrent() {
return indexCurrent;
}
public long getDeleteCount() {
return deleteCount;
}
public TimeValue getDeleteTime() {
return new TimeValue(deleteTimeInMillis);
}
public long getDeleteTimeInMillis() {
return deleteTimeInMillis;
}
public long getDeleteCurrent() {
return deleteCurrent;
}
public static Stats readStats(StreamInput in) throws IOException {
Stats stats = new Stats();
stats.readFrom(in);
return stats;
}
@Override
public void readFrom(StreamInput in) throws IOException {
indexCount = in.readVLong();
indexTimeInMillis = in.readVLong();
indexCurrent = in.readVLong();
deleteCount = in.readVLong();
deleteTimeInMillis = in.readVLong();
deleteCurrent = in.readVLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVLong(indexCount);
out.writeVLong(indexTimeInMillis);
out.writeVLong(indexCurrent);
out.writeVLong(deleteCount);
out.writeVLong(deleteTimeInMillis);
out.writeVLong(deleteCurrent);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.INDEX_TOTAL, indexCount);
builder.timeValueField(Fields.INDEX_TIME_IN_MILLIS, Fields.INDEX_TIME, indexTimeInMillis);
builder.field(Fields.INDEX_CURRENT, indexCurrent);
builder.field(Fields.DELETE_TOTAL, deleteCount);
builder.timeValueField(Fields.DELETE_TIME_IN_MILLIS, Fields.DELETE_TIME, deleteTimeInMillis);
builder.field(Fields.DELETE_CURRENT, deleteCurrent);
return builder;
}
} | 0true
| src_main_java_org_elasticsearch_index_indexing_IndexingStats.java |
3,673 | public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements InternalMapper, RootMapper {
public static final String NAME = "_parent";
public static final String CONTENT_TYPE = "_parent";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = ParentFieldMapper.NAME;
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
static {
FIELD_TYPE.setIndexed(true);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setStored(true);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_ONLY);
FIELD_TYPE.freeze();
}
}
public static class Builder extends Mapper.Builder<Builder, ParentFieldMapper> {
protected String indexName;
private String type;
protected PostingsFormatProvider postingsFormat;
public Builder() {
super(Defaults.NAME);
this.indexName = name;
}
public Builder type(String type) {
this.type = type;
return builder;
}
protected Builder postingsFormat(PostingsFormatProvider postingsFormat) {
this.postingsFormat = postingsFormat;
return builder;
}
@Override
public ParentFieldMapper build(BuilderContext context) {
if (type == null) {
throw new MapperParsingException("Parent mapping must contain the parent type");
}
return new ParentFieldMapper(name, indexName, type, postingsFormat, null, context.indexSettings());
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
ParentFieldMapper.Builder builder = parent();
for (Map.Entry<String, Object> entry : node.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("type")) {
builder.type(fieldNode.toString());
} else if (fieldName.equals("postings_format")) {
String postingFormatName = fieldNode.toString();
builder.postingsFormat(parserContext.postingFormatService().get(postingFormatName));
}
}
return builder;
}
}
private final String type;
private final BytesRef typeAsBytes;
protected ParentFieldMapper(String name, String indexName, String type, PostingsFormatProvider postingsFormat, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(name, indexName, indexName, name), Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), null,
Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, postingsFormat, null, null, null, fieldDataSettings, indexSettings);
this.type = type;
this.typeAsBytes = type == null ? null : new BytesRef(type);
}
public ParentFieldMapper() {
this(Defaults.NAME, Defaults.NAME, null, null, null, null);
}
public String type() {
return type;
}
@Override
public FieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@Override
public FieldDataType defaultFieldDataType() {
return new FieldDataType("string");
}
@Override
public boolean hasDocValues() {
return false;
}
@Override
public void preParse(ParseContext context) throws IOException {
}
@Override
public void postParse(ParseContext context) throws IOException {
parse(context);
}
@Override
public void validate(ParseContext context) throws MapperParsingException {
}
@Override
public boolean includeInObject() {
return true;
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
if (!active()) {
return;
}
if (context.parser().currentName() != null && context.parser().currentName().equals(Defaults.NAME)) {
// we are in the parsing of _parent phase
String parentId = context.parser().text();
context.sourceToParse().parent(parentId);
fields.add(new Field(names.indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType));
} else {
// otherwise, we are running it post processing of the xcontent
String parsedParentId = context.doc().get(Defaults.NAME);
if (context.sourceToParse().parent() != null) {
String parentId = context.sourceToParse().parent();
if (parsedParentId == null) {
if (parentId == null) {
throw new MapperParsingException("No parent id provided, not within the document, and not externally");
}
// we did not add it in the parsing phase, add it now
fields.add(new Field(names.indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType));
} else if (parentId != null && !parsedParentId.equals(Uid.createUid(context.stringBuilder(), type, parentId))) {
throw new MapperParsingException("Parent id mismatch, document value is [" + Uid.createUid(parsedParentId).id() + "], while external value is [" + parentId + "]");
}
}
}
// we have parent mapping, yet no value was set, ignore it...
}
@Override
public Uid value(Object value) {
if (value == null) {
return null;
}
return Uid.createUid(value.toString());
}
@Override
public Object valueForSearch(Object value) {
if (value == null) {
return null;
}
String sValue = value.toString();
if (sValue == null) {
return null;
}
int index = sValue.indexOf(Uid.DELIMITER);
if (index == -1) {
return sValue;
}
return sValue.substring(index + 1);
}
@Override
public BytesRef indexedValueForSearch(Object value) {
if (value instanceof BytesRef) {
BytesRef bytesRef = (BytesRef) value;
if (Uid.hasDelimiter(bytesRef)) {
return bytesRef;
}
return Uid.createUidAsBytes(typeAsBytes, bytesRef);
}
String sValue = value.toString();
if (sValue.indexOf(Uid.DELIMITER) == -1) {
return Uid.createUidAsBytes(type, sValue);
}
return super.indexedValueForSearch(value);
}
@Override
public Query termQuery(Object value, @Nullable QueryParseContext context) {
if (context == null) {
return super.termQuery(value, context);
}
return new ConstantScoreQuery(termFilter(value, context));
}
@Override
public Filter termFilter(Object value, @Nullable QueryParseContext context) {
if (context == null) {
return super.termFilter(value, context);
}
BytesRef bValue = BytesRefs.toBytesRef(value);
if (Uid.hasDelimiter(bValue)) {
return new TermFilter(new Term(names.indexName(), bValue));
}
List<String> types = new ArrayList<String>(context.mapperService().types().size());
for (DocumentMapper documentMapper : context.mapperService()) {
if (!documentMapper.parentFieldMapper().active()) {
types.add(documentMapper.type());
}
}
if (types.isEmpty()) {
return Queries.MATCH_NO_FILTER;
} else if (types.size() == 1) {
return new TermFilter(new Term(names.indexName(), Uid.createUidAsBytes(types.get(0), bValue)));
} else {
// we use all non child types, cause we don't know if its exact or not...
List<BytesRef> typesValues = new ArrayList<BytesRef>(types.size());
for (String type : context.mapperService().types()) {
typesValues.add(Uid.createUidAsBytes(type, bValue));
}
return new TermsFilter(names.indexName(), typesValues);
}
}
@Override
public Filter termsFilter(List values, @Nullable QueryParseContext context) {
if (context == null) {
return super.termsFilter(values, context);
}
// This will not be invoked if values is empty, so don't check for empty
if (values.size() == 1) {
return termFilter(values.get(0), context);
}
List<String> types = new ArrayList<String>(context.mapperService().types().size());
for (DocumentMapper documentMapper : context.mapperService()) {
if (!documentMapper.parentFieldMapper().active()) {
types.add(documentMapper.type());
}
}
List<BytesRef> bValues = new ArrayList<BytesRef>(values.size());
for (Object value : values) {
BytesRef bValue = BytesRefs.toBytesRef(value);
if (Uid.hasDelimiter(bValue)) {
bValues.add(bValue);
} else {
// we use all non child types, cause we don't know if its exact or not...
for (String type : types) {
bValues.add(Uid.createUidAsBytes(type, bValue));
}
}
}
return new TermsFilter(names.indexName(), bValues);
}
/**
* We don't need to analyzer the text, and we need to convert it to UID...
*/
@Override
public boolean useTermQueryWithQueryString() {
return true;
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (!active()) {
return builder;
}
builder.startObject(CONTENT_TYPE);
builder.field("type", type);
builder.endObject();
return builder;
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
ParentFieldMapper other = (ParentFieldMapper) mergeWith;
if (active() == other.active()) {
return;
}
if (active() != other.active() || !type.equals(other.type)) {
mergeContext.addConflict("The _parent field can't be added or updated");
}
}
/**
* @return Whether the _parent field is actually used.
*/
public boolean active() {
return type != null;
}
} | 1no label
| src_main_java_org_elasticsearch_index_mapper_internal_ParentFieldMapper.java |
1,334 | public class OFuzzyCheckpointEndRecord implements OWALRecord {
private OLogSequenceNumber lsn;
public OFuzzyCheckpointEndRecord() {
}
@Override
public int toStream(byte[] content, int offset) {
return offset;
}
@Override
public int fromStream(byte[] content, int offset) {
return offset;
}
@Override
public int serializedSize() {
return 0;
}
@Override
public boolean isUpdateMasterRecord() {
return false;
}
@Override
public OLogSequenceNumber getLsn() {
return lsn;
}
@Override
public void setLsn(OLogSequenceNumber lsn) {
this.lsn = lsn;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
return true;
}
@Override
public String toString() {
return "OFuzzyCheckpointEndRecord{" + "lsn=" + lsn + '}';
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_storage_impl_local_paginated_wal_OFuzzyCheckpointEndRecord.java |
33 | {
@Override
public void run()
{
Channel channel = getChannel( to );
try
{
if ( channel == null )
{
channel = openChannel( to );
openedChannel( to, channel );
// Instance could be connected to, remove any marker of it being failed
failedInstances.remove( to );
}
}
catch ( Exception e )
{
// Only print out failure message on first fail
if ( !failedInstances.contains( to ) )
{
msgLog.warn( e.getMessage() );
failedInstances.add( to );
}
return;
}
try
{
// Set FROM header
message.setHeader( Message.FROM, me.toASCIIString() );
msgLog.debug( "Sending to " + to + ": " + message );
ChannelFuture future = channel.write( message );
future.addListener( new ChannelFutureListener()
{
@Override
public void operationComplete( ChannelFuture future ) throws Exception
{
if ( !future.isSuccess() )
{
msgLog.debug( "Unable to write " + message + " to " + future.getChannel(),
future.getCause() );
}
}
} );
}
catch ( Exception e )
{
msgLog.warn( "Could not send message", e );
channel.close();
}
}
} ); | 1no label
| enterprise_cluster_src_main_java_org_neo4j_cluster_com_NetworkSender.java |
1,638 | public static final Validator NON_NEGATIVE_DOUBLE = new Validator() {
@Override
public String validate(String setting, String value) {
try {
if (Double.parseDouble(value) < 0.0) {
return "the value of the setting " + setting + " must be a non negative double";
}
} catch (NumberFormatException ex) {
return "cannot parse value [" + value + "] as a double";
}
return null;
}
}; | 0true
| src_main_java_org_elasticsearch_cluster_settings_Validator.java |
42 | public interface BiAction<A,B> { void apply(A a, B b); } | 0true
| src_main_java_jsr166e_ConcurrentHashMapV8.java |
226 | public class BerkeleyVariableLengthKCVSTest extends KeyColumnValueStoreTest {
public KeyColumnValueStoreManager openStorageManager() throws BackendException {
BerkeleyJEStoreManager sm = new BerkeleyJEStoreManager(BerkeleyStorageSetup.getBerkeleyJEConfiguration());
return new OrderedKeyValueStoreManagerAdapter(sm);
}
@Test
public void testGetKeysWithKeyRange() throws Exception {
super.testGetKeysWithKeyRange();
}
@Test @Override
public void testConcurrentGetSlice() throws ExecutionException, InterruptedException, BackendException {
}
@Test @Override
public void testConcurrentGetSliceAndMutate() throws BackendException, ExecutionException, InterruptedException {
}
} | 0true
| titan-berkeleyje_src_test_java_com_thinkaurelius_titan_diskstorage_berkeleyje_BerkeleyVariableLengthKCVSTest.java |
6,421 | targetTransport.threadPool().generic().execute(new Runnable() {
@Override
public void run() {
targetTransport.messageReceived(data, action, sourceTransport, version, null);
}
}); | 1no label
| src_main_java_org_elasticsearch_transport_local_LocalTransportChannel.java |
1,007 | public static class Presentation {
public static class Group {
public static class Name {
}
public static class Order {
}
}
public static class FieldOrder {
public static final int NAME = 1000;
public static final int DESCRIPTION = 2000;
public static final int FLATRATES = 9000;
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_FulfillmentOptionImpl.java |
27 | class DataValueTupleBinding extends TupleBinding<DataValue> {
@Override
public void objectToEntry(DataValue dv, TupleOutput to) {
Map<String, String> data = dv.getData();
for (String key: data.keySet()) {
String value = data.get(key);
to.writeString(key);
to.writeString(value);
}
}
@Override
public DataValue entryToObject(TupleInput ti) {
Map<String, String> data = new HashMap<String, String>();
while (ti.available() > 0) {
String key = ti.readString();
String value = ti.readString();
data.put(key, value);
}
return new DataValue(data);
}
} | 0true
| timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_DataValueTupleBinding.java |
2,117 | public interface ESLogger {
String getPrefix();
String getName();
/**
* Allows to set the logger level
* If the new level is null, the logger will inherit its level
* from its nearest ancestor with a specific (non-null) level value.
* @param level the new level
*/
void setLevel(String level);
/**
* Returns the current logger level
* If the level is null, it means that the logger inherits its level
* from its nearest ancestor with a specific (non-null) level value.
* @return the logger level
*/
String getLevel();
/**
* Returns {@code true} if a TRACE level message is logged.
*/
boolean isTraceEnabled();
/**
* Returns {@code true} if a DEBUG level message is logged.
*/
boolean isDebugEnabled();
/**
* Returns {@code true} if an INFO level message is logged.
*/
boolean isInfoEnabled();
/**
* Returns {@code true} if a WARN level message is logged.
*/
boolean isWarnEnabled();
/**
* Returns {@code true} if an ERROR level message is logged.
*/
boolean isErrorEnabled();
/**
* Logs a DEBUG level message.
*/
void trace(String msg, Object... params);
/**
* Logs a DEBUG level message.
*/
void trace(String msg, Throwable cause, Object... params);
/**
* Logs a DEBUG level message.
*/
void debug(String msg, Object... params);
/**
* Logs a DEBUG level message.
*/
void debug(String msg, Throwable cause, Object... params);
/**
* Logs an INFO level message.
*/
void info(String msg, Object... params);
/**
* Logs an INFO level message.
*/
void info(String msg, Throwable cause, Object... params);
/**
* Logs a WARN level message.
*/
void warn(String msg, Object... params);
/**
* Logs a WARN level message.
*/
void warn(String msg, Throwable cause, Object... params);
/**
* Logs an ERROR level message.
*/
void error(String msg, Object... params);
/**
* Logs an ERROR level message.
*/
void error(String msg, Throwable cause, Object... params);
} | 0true
| src_main_java_org_elasticsearch_common_logging_ESLogger.java |
1,069 | public class TransportSingleShardTermVectorAction extends TransportShardSingleOperationAction<TermVectorRequest, TermVectorResponse> {
private final IndicesService indicesService;
@Inject
public TransportSingleShardTermVectorAction(Settings settings, ClusterService clusterService, TransportService transportService,
IndicesService indicesService, ThreadPool threadPool) {
super(settings, threadPool, clusterService, transportService);
this.indicesService = indicesService;
}
@Override
protected String executor() {
// TODO: Is this the right pool to execute this on?
return ThreadPool.Names.GET;
}
@Override
protected String transportAction() {
return TermVectorAction.NAME;
}
@Override
protected ClusterBlockException checkGlobalBlock(ClusterState state, TermVectorRequest request) {
return state.blocks().globalBlockedException(ClusterBlockLevel.READ);
}
@Override
protected ClusterBlockException checkRequestBlock(ClusterState state, TermVectorRequest request) {
return state.blocks().indexBlockedException(ClusterBlockLevel.READ, request.index());
}
@Override
protected ShardIterator shards(ClusterState state, TermVectorRequest request) {
return clusterService.operationRouting().getShards(clusterService.state(), request.index(), request.type(), request.id(),
request.routing(), request.preference());
}
@Override
protected void resolveRequest(ClusterState state, TermVectorRequest request) {
// update the routing (request#index here is possibly an alias)
request.routing(state.metaData().resolveIndexRouting(request.routing(), request.index()));
request.index(state.metaData().concreteIndex(request.index()));
// Fail fast on the node that received the request.
if (request.routing() == null && state.getMetaData().routingRequired(request.index(), request.type())) {
throw new RoutingMissingException(request.index(), request.type(), request.id());
}
}
@Override
protected TermVectorResponse shardOperation(TermVectorRequest request, int shardId) throws ElasticsearchException {
IndexService indexService = indicesService.indexServiceSafe(request.index());
IndexShard indexShard = indexService.shardSafe(shardId);
return indexShard.termVectorService().getTermVector(request);
}
@Override
protected TermVectorRequest newRequest() {
return new TermVectorRequest();
}
@Override
protected TermVectorResponse newResponse() {
return new TermVectorResponse();
}
} | 0true
| src_main_java_org_elasticsearch_action_termvector_TransportSingleShardTermVectorAction.java |
3,000 | public class IdCacheModule extends AbstractModule {
public static final class IdCacheSettings {
public static final String ID_CACHE_TYPE = "index.cache.id.type";
}
private final Settings settings;
public IdCacheModule(Settings settings) {
this.settings = settings;
}
@Override
protected void configure() {
bind(IdCache.class)
.to(settings.getAsClass(IdCacheSettings.ID_CACHE_TYPE, SimpleIdCache.class, "org.elasticsearch.index.cache.id.", "IdCache"))
.in(Scopes.SINGLETON);
}
} | 0true
| src_main_java_org_elasticsearch_index_cache_id_IdCacheModule.java |
1,437 | public class Invalidation implements DataSerializable {
private Object key;
private Object version;
public Invalidation() {
}
public Invalidation(final Object key, final Object version) {
this.key = key;
this.version = version;
}
public Object getKey() {
return key;
}
public Object getVersion() {
return version;
}
public void writeData(final ObjectDataOutput out) throws IOException {
out.writeObject(key);
out.writeObject(version);
}
public void readData(final ObjectDataInput in) throws IOException {
key = in.readObject();
version = in.readObject();
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("Invalidation");
sb.append("{key=").append(key);
sb.append(", version=").append(version);
sb.append('}');
return sb.toString();
}
} | 0true
| hazelcast-hibernate_hazelcast-hibernate3_src_main_java_com_hazelcast_hibernate_local_Invalidation.java |
425 | restoreService.addListener(new RestoreService.RestoreCompletionListener() {
SnapshotId snapshotId = new SnapshotId(request.repository(), request.snapshot());
@Override
public void onRestoreCompletion(SnapshotId snapshotId, RestoreInfo snapshot) {
if (this.snapshotId.equals(snapshotId)) {
listener.onResponse(new RestoreSnapshotResponse(snapshot));
restoreService.removeListener(this);
}
}
}); | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_snapshots_restore_TransportRestoreSnapshotAction.java |
1,061 | public class OCommandExecutorSQLUpdate extends OCommandExecutorSQLSetAware implements OCommandResultListener {
public static final String KEYWORD_UPDATE = "UPDATE";
private static final String KEYWORD_ADD = "ADD";
private static final String KEYWORD_PUT = "PUT";
private static final String KEYWORD_REMOVE = "REMOVE";
private static final String KEYWORD_INCREMENT = "INCREMENT";
private static final String KEYWORD_MERGE = "MERGE";
private Map<String, Object> setEntries = new LinkedHashMap<String, Object>();
private List<OPair<String, Object>> addEntries = new ArrayList<OPair<String, Object>>();
private Map<String, OPair<String, Object>> putEntries = new LinkedHashMap<String, OPair<String, Object>>();
private List<OPair<String, Object>> removeEntries = new ArrayList<OPair<String, Object>>();
private Map<String, Number> incrementEntries = new LinkedHashMap<String, Number>();
private ODocument merge = null;
private OQuery<?> query;
private OSQLFilter compiledFilter;
private int recordCount = 0;
private String subjectName;
private static final Object EMPTY_VALUE = new Object();
private OCommandParameters parameters;
@SuppressWarnings("unchecked")
public OCommandExecutorSQLUpdate parse(final OCommandRequest iRequest) {
final ODatabaseRecord database = getDatabase();
init((OCommandRequestText) iRequest);
setEntries.clear();
addEntries.clear();
putEntries.clear();
removeEntries.clear();
incrementEntries.clear();
content = null;
merge = null;
query = null;
recordCount = 0;
parserRequiredKeyword(KEYWORD_UPDATE);
subjectName = parserRequiredWord(false, "Invalid target", " =><,\r\n");
if (subjectName == null)
throwSyntaxErrorException("Invalid subject name. Expected cluster, class, index or sub-query");
parserNextWord(true);
String word = parserGetLastWord();
if (parserIsEnded()
|| (!word.equals(KEYWORD_SET) && !word.equals(KEYWORD_ADD) && !word.equals(KEYWORD_PUT) && !word.equals(KEYWORD_REMOVE)
&& !word.equals(KEYWORD_INCREMENT) && !word.equals(KEYWORD_CONTENT) && !word.equals(KEYWORD_MERGE)))
throwSyntaxErrorException("Expected keyword " + KEYWORD_SET + "," + KEYWORD_ADD + "," + KEYWORD_CONTENT + "," + KEYWORD_MERGE
+ "," + KEYWORD_PUT + "," + KEYWORD_REMOVE + " or " + KEYWORD_INCREMENT);
while (!parserIsEnded() && !parserGetLastWord().equals(OCommandExecutorSQLAbstract.KEYWORD_WHERE)) {
word = parserGetLastWord();
if (word.equals(KEYWORD_CONTENT))
parseContent();
else if (word.equals(KEYWORD_MERGE))
parseMerge();
else if (word.equals(KEYWORD_SET))
parseSetFields(setEntries);
else if (word.equals(KEYWORD_ADD))
parseAddFields();
else if (word.equals(KEYWORD_PUT))
parsePutFields();
else if (word.equals(KEYWORD_REMOVE))
parseRemoveFields();
else if (word.equals(KEYWORD_INCREMENT))
parseIncrementFields();
else
break;
parserNextWord(true);
}
final String additionalStatement = parserGetLastWord();
if (subjectName.startsWith("(")) {
subjectName = subjectName.trim();
query = database.command(new OSQLAsynchQuery<ODocument>(subjectName.substring(1, subjectName.length() - 1), this)
.setContext(context));
if (additionalStatement.equals(OCommandExecutorSQLAbstract.KEYWORD_WHERE)
|| additionalStatement.equals(OCommandExecutorSQLAbstract.KEYWORD_LIMIT))
compiledFilter = OSQLEngine.getInstance().parseCondition(parserText.substring(parserGetCurrentPosition()), getContext(),
KEYWORD_WHERE);
} else if (additionalStatement.equals(OCommandExecutorSQLAbstract.KEYWORD_WHERE)
|| additionalStatement.equals(OCommandExecutorSQLAbstract.KEYWORD_LIMIT))
query = new OSQLAsynchQuery<ODocument>("select from " + subjectName + " " + additionalStatement + " "
+ parserText.substring(parserGetCurrentPosition()), this);
else if (additionalStatement != null && !additionalStatement.isEmpty())
throwSyntaxErrorException("Invalid keyword " + additionalStatement);
else
query = new OSQLAsynchQuery<ODocument>("select from " + subjectName, this);
return this;
}
public Object execute(final Map<Object, Object> iArgs) {
if (subjectName == null)
throw new OCommandExecutionException("Cannot execute the command because it has not been parsed yet");
parameters = new OCommandParameters(iArgs);
Map<Object, Object> queryArgs;
if(parameters.size() > 0 && parameters.getByName(0) != null ){
queryArgs = new HashMap<Object, Object>();
for (int i = parameterCounter; i < parameters.size(); i++) {
if (parameters.getByName(i) != null)
queryArgs.put(i - parameterCounter, parameters.getByName(i));
}
} else {
queryArgs = iArgs;
}
query.setUseCache(false);
query.setContext(context);
getDatabase().query(query, queryArgs);
return recordCount;
}
/**
* Update current record.
*/
@SuppressWarnings("unchecked")
public boolean result(final Object iRecord) {
final ODocument record = (ODocument) ((OIdentifiable) iRecord).getRecord();
if (compiledFilter != null) {
// ADDITIONAL FILTERING
if (!(Boolean) compiledFilter.evaluate(record, null, context))
return false;
}
final Set<ODocument> updatedRecords = new HashSet<ODocument>();
parameters.reset();
if (content != null) {
// REPLACE ALL THE CONTENT
record.clear();
record.merge(content, false, false);
updatedRecords.add(record);
}
if (merge != null) {
// REPLACE ALL THE CONTENT
record.merge(merge, true, false);
updatedRecords.add(record);
}
// BIND VALUES TO UPDATE
if (!setEntries.isEmpty()) {
Set<ODocument> changedDocuments = OSQLHelper.bindParameters(record, setEntries, parameters, context);
if (changedDocuments != null)
updatedRecords.addAll(changedDocuments);
}
// BIND VALUES TO INCREMENT
for (Map.Entry<String, Number> entry : incrementEntries.entrySet()) {
final Number prevValue = record.field(entry.getKey());
if (prevValue == null)
// NO PREVIOUS VALUE: CONSIDER AS 0
record.field(entry.getKey(), entry.getValue());
else
// COMPUTING INCREMENT
record.field(entry.getKey(), OType.increment(prevValue, entry.getValue()));
updatedRecords.add(record);
}
Object v;
// BIND VALUES TO ADD
Collection<Object> coll;
Object fieldValue;
for (OPair<String, Object> entry : addEntries) {
coll = null;
if (!record.containsField(entry.getKey())) {
// GET THE TYPE IF ANY
if (record.getSchemaClass() != null) {
OProperty prop = record.getSchemaClass().getProperty(entry.getKey());
if (prop != null && prop.getType() == OType.LINKSET)
// SET TYPE
coll = new HashSet<Object>();
}
if (coll == null)
// IN ALL OTHER CASES USE A LIST
coll = new ArrayList<Object>();
record.field(entry.getKey(), coll);
} else {
fieldValue = record.field(entry.getKey());
if (fieldValue instanceof Collection<?>)
coll = (Collection<Object>) fieldValue;
else
continue;
}
v = entry.getValue();
if (v instanceof OSQLFilterItem)
v = ((OSQLFilterItem) v).getValue(record, context);
else if (v instanceof OSQLFunctionRuntime)
v = ((OSQLFunctionRuntime) v).execute(record, null, context);
coll.add(v);
updatedRecords.add(record);
}
// BIND VALUES TO PUT (AS MAP)
Map<String, Object> map;
OPair<String, Object> pair;
for (Entry<String, OPair<String, Object>> entry : putEntries.entrySet()) {
fieldValue = record.field(entry.getKey());
if (fieldValue == null) {
if (record.getSchemaClass() != null) {
final OProperty property = record.getSchemaClass().getProperty(entry.getKey());
if (property != null
&& (property.getType() != null && (!property.getType().equals(OType.EMBEDDEDMAP) && !property.getType().equals(
OType.LINKMAP)))) {
throw new OCommandExecutionException("field " + entry.getKey() + " is not defined as a map");
}
}
fieldValue = new HashMap<String, Object>();
record.field(entry.getKey(), fieldValue);
}
if (fieldValue instanceof Map<?, ?>) {
map = (Map<String, Object>) fieldValue;
pair = entry.getValue();
v = pair.getValue();
if (v instanceof OSQLFilterItem)
v = ((OSQLFilterItem) v).getValue(record, context);
else if (pair.getValue() instanceof OSQLFunctionRuntime)
v = ((OSQLFunctionRuntime) v).execute(record, null, context);
map.put(pair.getKey(), v);
updatedRecords.add(record);
}
}
// REMOVE FIELD IF ANY
for (OPair<String, Object> entry : removeEntries) {
v = entry.getValue();
if (v == EMPTY_VALUE) {
record.removeField(entry.getKey());
updatedRecords.add(record);
} else {
fieldValue = record.field(entry.getKey());
if (fieldValue instanceof Collection<?>) {
coll = (Collection<Object>) fieldValue;
if (coll.remove(v))
updatedRecords.add(record);
} else if (fieldValue instanceof Map<?, ?>) {
map = (Map<String, Object>) fieldValue;
if (map.remove(v) != null)
updatedRecords.add(record);
}
}
}
for (ODocument d : updatedRecords) {
d.setDirty();
d.save();
recordCount++;
}
return true;
}
protected void parseMerge() {
if (!parserIsEnded() && !parserGetLastWord().equals(KEYWORD_WHERE)) {
final String contentAsString = parserRequiredWord(false, "document to merge expected").trim();
merge = new ODocument().fromJSON(contentAsString);
parserSkipWhiteSpaces();
}
if (merge == null)
throwSyntaxErrorException("Document to merge not provided. Example: MERGE { \"name\": \"Jay\" }");
}
private void parseAddFields() {
String fieldName;
String fieldValue;
while (!parserIsEnded() && (addEntries.size() == 0 || parserGetLastSeparator() == ',' || parserGetCurrentChar() == ',')
&& !parserGetLastWord().equals(KEYWORD_WHERE)) {
fieldName = parserRequiredWord(false, "Field name expected");
parserRequiredKeyword("=");
fieldValue = parserRequiredWord(false, "Value expected", " =><,\r\n");
// INSERT TRANSFORMED FIELD VALUE
addEntries.add(new OPair<String, Object>(fieldName, getFieldValueCountingParameters(fieldValue)));
parserSkipWhiteSpaces();
}
if (addEntries.size() == 0)
throwSyntaxErrorException("Entries to add <field> = <value> are missed. Example: name = 'Bill', salary = 300.2.");
}
private void parsePutFields() {
String fieldName;
String fieldKey;
String fieldValue;
while (!parserIsEnded() && (putEntries.size() == 0 || parserGetLastSeparator() == ',' || parserGetCurrentChar() == ',')
&& !parserGetLastWord().equals(KEYWORD_WHERE)) {
fieldName = parserRequiredWord(false, "Field name expected");
parserRequiredKeyword("=");
fieldKey = parserRequiredWord(false, "Key expected");
fieldValue = getBlock(parserRequiredWord(false, "Value expected", " =><,\r\n"));
// INSERT TRANSFORMED FIELD VALUE
putEntries.put(fieldName, new OPair<String, Object>((String) getFieldValueCountingParameters(fieldKey),
getFieldValueCountingParameters(fieldValue)));
parserSkipWhiteSpaces();
}
if (putEntries.size() == 0)
throwSyntaxErrorException("Entries to put <field> = <key>, <value> are missed. Example: name = 'Bill', 30");
}
private void parseRemoveFields() {
String fieldName;
String fieldValue;
Object value;
while (!parserIsEnded() && (removeEntries.size() == 0 || parserGetLastSeparator() == ',' || parserGetCurrentChar() == ',')
&& !parserGetLastWord().equals(KEYWORD_WHERE)) {
fieldName = parserRequiredWord(false, "Field name expected");
final boolean found = parserOptionalKeyword("=", "WHERE");
if (found)
if (parserGetLastWord().equals("WHERE")) {
parserGoBack();
value = EMPTY_VALUE;
} else {
fieldValue = getBlock(parserRequiredWord(false, "Value expected"));
value = getFieldValueCountingParameters(fieldValue);
}
else
value = EMPTY_VALUE;
// INSERT FIELD NAME TO BE REMOVED
removeEntries.add(new OPair<String, Object>(fieldName, value));
parserSkipWhiteSpaces();
}
if (removeEntries.size() == 0)
throwSyntaxErrorException("Field(s) to remove are missed. Example: name, salary");
}
private void parseIncrementFields() {
String fieldName;
String fieldValue;
while (!parserIsEnded() && (incrementEntries.size() == 0 || parserGetLastSeparator() == ',')
&& !parserGetLastWord().equals(KEYWORD_WHERE)) {
fieldName = parserRequiredWord(false, "Field name expected");
parserRequiredKeyword("=");
fieldValue = getBlock(parserRequiredWord(false, "Value expected"));
// INSERT TRANSFORMED FIELD VALUE
incrementEntries.put(fieldName, (Number) getFieldValueCountingParameters(fieldValue));
parserSkipWhiteSpaces();
}
if (incrementEntries.size() == 0)
throwSyntaxErrorException("Entries to increment <field> = <value> are missed. Example: salary = -100");
}
@Override
public String getSyntax() {
return "UPDATE <class>|cluster:<cluster>> [SET|ADD|PUT|REMOVE|INCREMENT|CONTENT {<JSON>}|MERGE {<JSON>}] [[,] <field-name> = <expression>|<sub-command>]* [WHERE <conditions>]";
}
@Override
public void end() {
}
protected String getBlock(String fieldValue) {
if (fieldValue.startsWith("{") || fieldValue.startsWith("[") || fieldValue.startsWith("[")) {
parserSkipWhiteSpaces();
final StringBuilder buffer = new StringBuilder();
parserSetCurrentPosition(OStringSerializerHelper.parse(parserText, buffer, parserGetCurrentPosition(), -1,
OStringSerializerHelper.DEFAULT_FIELD_SEPARATOR, true, true, false, OStringSerializerHelper.DEFAULT_IGNORE_CHARS));
fieldValue = buffer.toString();
}
return fieldValue;
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_sql_OCommandExecutorSQLUpdate.java |
467 | @Repository("blSandBoxDao")
public class SandBoxDaoImpl implements SandBoxDao {
@PersistenceContext(unitName = "blPU")
protected EntityManager sandBoxEntityManager;
@Resource(name = "blTransactionManager")
JpaTransactionManager transactionManager;
@Override
public SandBox retrieve(Long id) {
return sandBoxEntityManager.find(SandBoxImpl.class, id);
}
@Override
public SandBox retrieveSandBoxByType(Site site, SandBoxType sandboxType) {
TypedQuery<SandBox> query = sandBoxEntityManager.createNamedQuery("BC_READ_SANDBOX_BY_TYPE", SandBox.class);
//query.setParameter("site", site);
query.setParameter("sandboxType", sandboxType.getType());
SandBox response = null;
try {
response = query.getSingleResult();
} catch (NoResultException e) {
//do nothing - there is no sandbox
}
return response;
}
@Override
public SandBox retrieveNamedSandBox(Site site, SandBoxType sandboxType, String sandboxName) {
Query query = sandBoxEntityManager.createNamedQuery("BC_READ_SANDBOX_BY_TYPE_AND_NAME");
//query.setParameter("site", site);
query.setParameter("sandboxType", sandboxType.getType());
query.setParameter("sandboxName", sandboxName);
SandBox response = null;
try {
response = (SandBox) query.getSingleResult();
} catch (NoResultException e) {
//do nothing - there is no sandbox
}
return response;
}
@Override
public SandBox persist(SandBox entity) {
sandBoxEntityManager.persist(entity);
sandBoxEntityManager.flush();
return entity;
}
public SandBox createSandBox(Site site, String sandBoxName, SandBoxType sandBoxType) {
TransactionStatus status = TransactionUtils.createTransaction("createSandBox",
TransactionDefinition.PROPAGATION_REQUIRES_NEW, transactionManager);
try {
SandBox approvalSandbox = retrieveNamedSandBox(site, sandBoxType, sandBoxName);
if (approvalSandbox == null) {
approvalSandbox = new SandBoxImpl();
approvalSandbox.setSite(site);
approvalSandbox.setName(sandBoxName);
approvalSandbox.setSandBoxType(sandBoxType);
approvalSandbox = persist(approvalSandbox);
}
TransactionUtils.finalizeTransaction(status, transactionManager, false);
return approvalSandbox;
} catch (Exception ex) {
TransactionUtils.finalizeTransaction(status, transactionManager, true);
throw new RuntimeException(ex);
}
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_sandbox_dao_SandBoxDaoImpl.java |
343 | dbPool = new ODatabasePoolAbstract<DB>(this, iMinSize, iMaxSize, idleTimeout, timeBetweenEvictionRunsMillis) {
public void onShutdown() {
if (owner instanceof ODatabasePoolBase<?>)
((ODatabasePoolBase<?>) owner).close();
}
public DB createNewResource(final String iDatabaseName, final Object... iAdditionalArgs) {
if (iAdditionalArgs.length < 2)
throw new OSecurityAccessException("Username and/or password missed");
return createResource(owner, iDatabaseName, iAdditionalArgs);
}
public boolean reuseResource(final String iKey, final Object[] iAdditionalArgs, final DB iValue) {
if (((ODatabasePooled) iValue).isUnderlyingOpen()) {
((ODatabasePooled) iValue).reuse(owner, iAdditionalArgs);
if (iValue.getStorage().isClosed())
// STORAGE HAS BEEN CLOSED: REOPEN IT
iValue.getStorage().open((String) iAdditionalArgs[0], (String) iAdditionalArgs[1], null);
else if (!((ODatabaseComplex<?>) iValue).getUser().checkPassword((String) iAdditionalArgs[1]))
throw new OSecurityAccessException(iValue.getName(), "User or password not valid for database: '"
+ iValue.getName() + "'");
return true;
}
return false;
}
}; | 0true
| core_src_main_java_com_orientechnologies_orient_core_db_ODatabasePoolBase.java |
1,705 | runnable = new Runnable() { public void run() { map.isLocked(null); } }; | 0true
| hazelcast_src_test_java_com_hazelcast_map_BasicMapTest.java |
1,268 | return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<Response>>() {
@Override
public ActionFuture<Response> doWithNode(DiscoveryNode node) throws ElasticsearchException {
return proxy.execute(node, request);
}
}); | 1no label
| src_main_java_org_elasticsearch_client_transport_support_InternalTransportClient.java |
182 | public class BroadleafPageController extends BroadleafAbstractController implements Controller {
protected static String MODEL_ATTRIBUTE_NAME="page";
@Override
public ModelAndView handleRequest(HttpServletRequest request, HttpServletResponse response) throws Exception {
ModelAndView model = new ModelAndView();
PageDTO page = (PageDTO) request.getAttribute(PageHandlerMapping.PAGE_ATTRIBUTE_NAME);
assert page != null;
model.addObject(MODEL_ATTRIBUTE_NAME, page);
model.setViewName(page.getTemplatePath());
return model;
}
} | 0true
| admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_web_controller_BroadleafPageController.java |
1,157 | public class OSQLMethodRight extends OAbstractSQLMethod {
public static final String NAME = "right";
public OSQLMethodRight() {
super(NAME, 1);
}
@Override
public Object execute(OIdentifiable iCurrentRecord, OCommandContext iContext, Object ioResult, Object[] iMethodParams) {
final int offset = Integer.parseInt(iMethodParams[0].toString());
ioResult = ioResult != null ? ioResult.toString().substring(
offset < ioResult.toString().length() ? ioResult.toString().length() - offset : 0) : null;
return ioResult;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_sql_method_misc_OSQLMethodRight.java |
703 | constructors[TXN_LIST_SIZE] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new TxnListSizeRequest();
}
}; | 0true
| hazelcast_src_main_java_com_hazelcast_collection_CollectionPortableHook.java |
1,450 | public class Timestamp implements DataSerializable {
private Object key;
private long timestamp;
public Timestamp() {
}
public Timestamp(final Object key, final long timestamp) {
this.key = key;
this.timestamp = timestamp;
}
public Object getKey() {
return key;
}
public long getTimestamp() {
return timestamp;
}
public void writeData(final ObjectDataOutput out) throws IOException {
out.writeObject(key);
out.writeLong(timestamp);
}
public void readData(final ObjectDataInput in) throws IOException {
key = in.readObject();
timestamp = in.readLong();
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("Timestamp");
sb.append("{key=").append(key);
sb.append(", timestamp=").append(timestamp);
sb.append('}');
return sb.toString();
}
} | 0true
| hazelcast-hibernate_hazelcast-hibernate4_src_main_java_com_hazelcast_hibernate_local_Timestamp.java |
316 | public class ConfigMapUpgradeConfigurationTestIT
{
@Test
public void shouldNotAllowAutomaticUpgradeIfConfigParameterIsMissing()
{
Config config = defaultConfig();
assertFalse( config.get( GraphDatabaseSettings.allow_store_upgrade ) );
try
{
new ConfigMapUpgradeConfiguration( config ).checkConfigurationAllowsAutomaticUpgrade();
fail( "Should throw exception" );
} catch ( UpgradeNotAllowedByConfigurationException e )
{
// expected
}
}
@Test
public void shouldNotAllowAutomaticUpgradeIfConfigParameterIsFalse()
{
Config config = defaultConfig( stringMap( GraphDatabaseSettings.allow_store_upgrade.name(), "false" ) );
try
{
new ConfigMapUpgradeConfiguration( config ).checkConfigurationAllowsAutomaticUpgrade();
fail( "Should throw exception" );
} catch ( UpgradeNotAllowedByConfigurationException e )
{
// expected
}
}
@Test
public void shouldNotAllowAutomaticUpgradeIfConfigParameterIsTrue()
{
Config config = defaultConfig( stringMap( GraphDatabaseSettings.allow_store_upgrade.name(), "false" ) );
try
{
new ConfigMapUpgradeConfiguration( config ).checkConfigurationAllowsAutomaticUpgrade();
fail( "Should throw exception" );
} catch ( UpgradeNotAllowedByConfigurationException e )
{
// expected
}
}
} | 0true
| community_kernel_src_test_java_org_neo4j_kernel_impl_storemigration_ConfigMapUpgradeConfigurationTestIT.java |
485 | executor.execute(new Runnable() {
public void run() {
try {
callback.onResponse(serializationService.toObject(resolveResponse()));
} catch (Throwable t) {
callback.onFailure(t);
}
}
}); | 1no label
| hazelcast-client_src_main_java_com_hazelcast_client_spi_impl_ClientCallFuture.java |
1,227 | @Repository("blShippingRatesDao")
@Deprecated
public class ShippingRateDaoImpl implements ShippingRateDao {
@PersistenceContext(unitName = "blPU")
protected EntityManager em;
@Resource(name = "blEntityConfiguration")
protected EntityConfiguration entityConfiguration;
@Override
public ShippingRate save(ShippingRate shippingRate) {
return em.merge(shippingRate);
}
@Override
public ShippingRate readShippingRateById(Long id) {
return em.find(ShippingRateImpl.class, id);
}
@Override
@SuppressWarnings("unchecked")
public ShippingRate readShippingRateByFeeTypesUnityQty(String feeType, String feeSubType, BigDecimal unitQuantity) {
Query query = em.createNamedQuery("BC_READ_FIRST_SHIPPING_RATE_BY_FEE_TYPES");
query.setParameter("feeType", feeType);
query.setParameter("feeSubType", feeSubType);
query.setParameter("bandUnitQuantity", unitQuantity);
List<ShippingRate> returnedRates = query.getResultList();
if (returnedRates.size() > 0) {
return returnedRates.get(0);
} else {
return null;
}
}
@Override
public ShippingRate create() {
return (ShippingRate) entityConfiguration.createEntityInstance(ShippingRate.class.getName());
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_pricing_dao_ShippingRateDaoImpl.java |
642 | public static class IndexShardStatusRequest extends BroadcastShardOperationRequest {
boolean recovery;
boolean snapshot;
IndexShardStatusRequest() {
}
IndexShardStatusRequest(String index, int shardId, IndicesStatusRequest request) {
super(index, shardId, request);
recovery = request.recovery();
snapshot = request.snapshot();
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
recovery = in.readBoolean();
snapshot = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(recovery);
out.writeBoolean(snapshot);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_status_TransportIndicesStatusAction.java |
928 | public interface OfferTimeZoneProcessor {
public TimeZone getTimeZone(Offer offer);
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_service_processor_OfferTimeZoneProcessor.java |
6,302 | public class MockDirectoryHelper {
public static final String RANDOM_IO_EXCEPTION_RATE = "index.store.mock.random.io_exception_rate";
public static final String RANDOM_IO_EXCEPTION_RATE_ON_OPEN = "index.store.mock.random.io_exception_rate_on_open";
public static final String RANDOM_THROTTLE = "index.store.mock.random.throttle";
public static final String CHECK_INDEX_ON_CLOSE = "index.store.mock.check_index_on_close";
public static final String RANDOM_PREVENT_DOUBLE_WRITE = "index.store.mock.random.prevent_double_write";
public static final String RANDOM_NO_DELETE_OPEN_FILE = "index.store.mock.random.no_delete_open_file";
public static final String RANDOM_FAIL_ON_CLOSE= "index.store.mock.random.fail_on_close";
public static final Set<ElasticsearchMockDirectoryWrapper> wrappers = ConcurrentCollections.newConcurrentSet();
private final Random random;
private final double randomIOExceptionRate;
private final double randomIOExceptionRateOnOpen;
private final Throttling throttle;
private final boolean checkIndexOnClose;
private final Settings indexSettings;
private final ShardId shardId;
private final boolean preventDoubleWrite;
private final boolean noDeleteOpenFile;
private final ESLogger logger;
private final boolean failOnClose;
public MockDirectoryHelper(ShardId shardId, Settings indexSettings, ESLogger logger) {
final long seed = indexSettings.getAsLong(ElasticsearchIntegrationTest.INDEX_SEED_SETTING, 0l);
random = new Random(seed);
randomIOExceptionRate = indexSettings.getAsDouble(RANDOM_IO_EXCEPTION_RATE, 0.0d);
randomIOExceptionRateOnOpen = indexSettings.getAsDouble(RANDOM_IO_EXCEPTION_RATE_ON_OPEN, 0.0d);
preventDoubleWrite = indexSettings.getAsBoolean(RANDOM_PREVENT_DOUBLE_WRITE, true); // true is default in MDW
noDeleteOpenFile = indexSettings.getAsBoolean(RANDOM_NO_DELETE_OPEN_FILE, random.nextBoolean()); // true is default in MDW
random.nextInt(shardId.getId() + 1); // some randomness per shard
throttle = Throttling.valueOf(indexSettings.get(RANDOM_THROTTLE, random.nextDouble() < 0.1 ? "SOMETIMES" : "NEVER"));
checkIndexOnClose = indexSettings.getAsBoolean(CHECK_INDEX_ON_CLOSE, false);// we can't do this by default since it might close the index input that we still read from in a pending fetch phase.
failOnClose = indexSettings.getAsBoolean(RANDOM_FAIL_ON_CLOSE, false);
if (logger.isDebugEnabled()) {
logger.debug("Using MockDirWrapper with seed [{}] throttle: [{}] checkIndexOnClose: [{}]", SeedUtils.formatSeed(seed),
throttle, checkIndexOnClose);
}
this.indexSettings = indexSettings;
this.shardId = shardId;
this.logger = logger;
}
public Directory wrap(Directory dir) {
final ElasticsearchMockDirectoryWrapper w = new ElasticsearchMockDirectoryWrapper(random, dir, logger, failOnClose);
w.setRandomIOExceptionRate(randomIOExceptionRate);
w.setRandomIOExceptionRateOnOpen(randomIOExceptionRateOnOpen);
w.setThrottling(throttle);
w.setCheckIndexOnClose(checkIndexOnClose);
w.setPreventDoubleWrite(preventDoubleWrite);
w.setNoDeleteOpenFile(noDeleteOpenFile);
wrappers.add(w);
return w;
}
public Directory[] wrapAllInplace(Directory[] dirs) {
for (int i = 0; i < dirs.length; i++) {
dirs[i] = wrap(dirs[i]);
}
return dirs;
}
public FsDirectoryService randomDirectorService(IndexStore indexStore) {
if ((Constants.WINDOWS || Constants.SUN_OS) && Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) {
return new MmapFsDirectoryService(shardId, indexSettings, indexStore);
} else if (Constants.WINDOWS) {
return new SimpleFsDirectoryService(shardId, indexSettings, indexStore);
}
switch (random.nextInt(3)) {
case 1:
return new MmapFsDirectoryService(shardId, indexSettings, indexStore);
case 0:
return new SimpleFsDirectoryService(shardId, indexSettings, indexStore);
default:
return new NioFsDirectoryService(shardId, indexSettings, indexStore);
}
}
public DirectoryService randomRamDirectoryService() {
return new RamDirectoryService(shardId, indexSettings);
}
public static final class ElasticsearchMockDirectoryWrapper extends MockDirectoryWrapper {
private final ESLogger logger;
private final boolean failOnClose;
public ElasticsearchMockDirectoryWrapper(Random random, Directory delegate, ESLogger logger, boolean failOnClose) {
super(random, delegate);
this.logger = logger;
this.failOnClose = failOnClose;
}
@Override
public void close() throws IOException {
try {
super.close();
} catch (RuntimeException ex) {
if (failOnClose) {
throw ex;
}
// we catch the exception on close to properly close shards even if there are open files
// the test framework will call closeWithRuntimeException after the test exits to fail
// on unclosed files.
logger.debug("MockDirectoryWrapper#close() threw exception", ex);
}
}
public void closeWithRuntimeException() throws IOException {
super.close(); // force fail if open files etc. called in tear down of ElasticsearchIntegrationTest
}
}
} | 1no label
| src_test_java_org_elasticsearch_test_store_MockDirectoryHelper.java |
1,619 | public final class ScriptEngineManagerContext {
private static volatile ScriptEngineManager scriptEngineManager = new ScriptEngineManager();
//we don't want instances.
private ScriptEngineManagerContext() {
}
public static ScriptEngineManager getScriptEngineManager() {
return scriptEngineManager;
}
public static void setScriptEngineManager(ScriptEngineManager scriptEngineManager) {
if (scriptEngineManager == null) {
throw new NullPointerException("ScriptEngineManager is required!");
}
ScriptEngineManagerContext.scriptEngineManager = scriptEngineManager;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_management_ScriptEngineManagerContext.java |
807 | public class GetAndAddRequest extends AtomicLongRequest {
public GetAndAddRequest() {
}
public GetAndAddRequest(String name, long delta) {
super(name, delta);
}
@Override
protected Operation prepareOperation() {
return new GetAndAddOperation(name, delta);
}
@Override
public int getClassId() {
return AtomicLongPortableHook.GET_AND_ADD;
}
@Override
public Permission getRequiredPermission() {
if (name.startsWith(IdGeneratorService.ATOMIC_LONG_NAME)) {
return null;
}
if (delta == 0) {
return new AtomicLongPermission(name, ActionConstants.ACTION_READ);
}
return new AtomicLongPermission(name, ActionConstants.ACTION_MODIFY);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_concurrent_atomiclong_client_GetAndAddRequest.java |
3,134 | awaitBusy(new Predicate<Object>() {
public boolean apply(Object o) {
IndicesStatsResponse stats = client().admin().indices().prepareStats().setSegments(true).get();
long segmentsMemoryWithBloom = stats.getTotal().getSegments().getMemoryInBytes();
logger.info("trying segments with bloom: {}", segmentsMemoryWithoutBloom);
return segmentsMemoryWithoutBloom == (segmentsMemoryWithBloom - BloomFilter.Factory.DEFAULT.createFilter(1).getSizeInBytes());
}
}); | 0true
| src_test_java_org_elasticsearch_index_engine_internal_InternalEngineIntegrationTest.java |
1,928 | public interface ConstantBindingBuilder {
/**
* Binds constant to the given value.
*/
void to(String value);
/**
* Binds constant to the given value.
*/
void to(int value);
/**
* Binds constant to the given value.
*/
void to(long value);
/**
* Binds constant to the given value.
*/
void to(boolean value);
/**
* Binds constant to the given value.
*/
void to(double value);
/**
* Binds constant to the given value.
*/
void to(float value);
/**
* Binds constant to the given value.
*/
void to(short value);
/**
* Binds constant to the given value.
*/
void to(char value);
/**
* Binds constant to the given value.
*/
void to(Class<?> value);
/**
* Binds constant to the given value.
*/
<E extends Enum<E>> void to(E value);
} | 0true
| src_main_java_org_elasticsearch_common_inject_binder_ConstantBindingBuilder.java |
872 | public class CandidatePromotionItems {
protected HashMap<OfferItemCriteria, List<PromotableOrderItem>> candidateQualifiersMap = new HashMap<OfferItemCriteria, List<PromotableOrderItem>>();
protected HashMap<OfferItemCriteria, List<PromotableOrderItem>> candidateTargetsMap = new HashMap<OfferItemCriteria, List<PromotableOrderItem>>();
protected boolean isMatchedQualifier = false;
protected boolean isMatchedTarget = false;
public void addQualifier(OfferItemCriteria criteria, PromotableOrderItem item) {
List<PromotableOrderItem> itemList = candidateQualifiersMap.get(criteria);
if (itemList == null) {
itemList = new ArrayList<PromotableOrderItem>();
candidateQualifiersMap.put(criteria, itemList);
}
itemList.add(item);
}
public void addTarget(OfferItemCriteria criteria, PromotableOrderItem item) {
List<PromotableOrderItem> itemList = candidateTargetsMap.get(criteria);
if (itemList == null) {
itemList = new ArrayList<PromotableOrderItem>();
candidateTargetsMap.put(criteria, itemList);
}
itemList.add(item);
}
public boolean isMatchedQualifier() {
return isMatchedQualifier;
}
public void setMatchedQualifier(boolean isMatchedCandidate) {
this.isMatchedQualifier = isMatchedCandidate;
}
public HashMap<OfferItemCriteria, List<PromotableOrderItem>> getCandidateQualifiersMap() {
return candidateQualifiersMap;
}
public HashMap<OfferItemCriteria, List<PromotableOrderItem>> getCandidateTargetsMap() {
return candidateTargetsMap;
}
public boolean isMatchedTarget() {
return isMatchedTarget;
}
public void setMatchedTarget(boolean isMatchedCandidate) {
this.isMatchedTarget = isMatchedCandidate;
}
public Set<PromotableOrderItem> getAllCandidateTargets() {
Set<PromotableOrderItem> promotableOrderItemSet = new HashSet<PromotableOrderItem>();
for (List<PromotableOrderItem> orderItems : getCandidateTargetsMap().values()) {
promotableOrderItemSet.addAll(orderItems);
}
return promotableOrderItemSet;
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_service_discount_CandidatePromotionItems.java |
296 | public class OTraverseContext extends OBasicCommandContext {
private Set<ORID> history = new HashSet<ORID>();
private List<OTraverseAbstractProcess<?>> stack = new ArrayList<OTraverseAbstractProcess<?>>();
private int depth = -1;
public void push(final OTraverseAbstractProcess<?> iProcess) {
stack.add(iProcess);
}
public Map<String, Object> getVariables() {
final HashMap<String, Object> map = new HashMap<String, Object>();
map.put("depth", depth);
map.put("path", getPath());
map.put("stack", stack);
// DELEGATE
map.putAll(super.getVariables());
return map;
}
public Object getVariable(final String iName) {
final String name = iName.trim().toUpperCase();
if ("DEPTH".startsWith(name))
return depth;
else if (name.startsWith("PATH"))
return ODocumentHelper.getFieldValue(getPath(), iName.substring("PATH".length()));
else if (name.startsWith("STACK"))
return ODocumentHelper.getFieldValue(stack, iName.substring("STACK".length()));
else if (name.startsWith("HISTORY"))
return ODocumentHelper.getFieldValue(history, iName.substring("HISTORY".length()));
else
// DELEGATE
return super.getVariable(iName);
}
public OTraverseAbstractProcess<?> pop() {
if (stack.isEmpty())
throw new IllegalStateException("Traverse stack is empty");
return stack.remove(stack.size() - 1);
}
public OTraverseAbstractProcess<?> peek() {
return stack.isEmpty() ? null : stack.get(stack.size() - 1);
}
public OTraverseAbstractProcess<?> peek(final int iFromLast) {
return stack.size() + iFromLast < 0 ? null : stack.get(stack.size() + iFromLast);
}
public void reset() {
stack.clear();
}
public boolean isAlreadyTraversed(final OIdentifiable identity) {
return history.contains(identity.getIdentity());
}
public void addTraversed(final OIdentifiable identity) {
history.add(identity.getIdentity());
}
public int incrementDepth() {
return ++depth;
}
public int decrementDepth() {
return --depth;
}
public String getPath() {
final StringBuilder buffer = new StringBuilder();
for (OTraverseAbstractProcess<?> process : stack) {
final String status = process.getStatus();
if (status != null) {
if (buffer.length() > 0 && !status.startsWith("["))
buffer.append('.');
buffer.append(status);
}
}
return buffer.toString();
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_command_traverse_OTraverseContext.java |
97 | public static final class Point {
private final float longitude;
private final float latitude;
/**
* Constructs a point with the given latitude and longitude
* @param latitude Between -90 and 90 degrees
* @param longitude Between -180 and 180 degrees
*/
Point(float latitude, float longitude) {
this.longitude = longitude;
this.latitude = latitude;
}
/**
* Longitude of this point
* @return
*/
public float getLongitude() {
return longitude;
}
/**
* Latitude of this point
* @return
*/
public float getLatitude() {
return latitude;
}
private com.spatial4j.core.shape.Point getSpatial4jPoint() {
return CTX.makePoint(longitude,latitude);
}
/**
* Returns the distance to another point in kilometers
*
* @param other Point
* @return
*/
public double distance(Point other) {
return DistanceUtils.degrees2Dist(CTX.getDistCalc().distance(getSpatial4jPoint(),other.getSpatial4jPoint()),DistanceUtils.EARTH_MEAN_RADIUS_KM);
}
@Override
public String toString() {
return "["+latitude+","+longitude+"]";
}
@Override
public int hashCode() {
return new HashCodeBuilder().append(latitude).append(longitude).toHashCode();
}
@Override
public boolean equals(Object other) {
if (this==other) return true;
else if (other==null) return false;
else if (!getClass().isInstance(other)) return false;
Point oth = (Point)other;
return latitude==oth.latitude && longitude==oth.longitude;
}
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_Geoshape.java |
71 | public class AuthenticationException extends HazelcastException {
public AuthenticationException() {
super("Wrong group name and password.");
}
public AuthenticationException(String message) {
super(message);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_client_AuthenticationException.java |
393 | @RunWith(HazelcastParallelClassRunner.class)
@Category(QuickTest.class)
public class ClientMultiMapTest {
static HazelcastInstance server;
static HazelcastInstance client;
@BeforeClass
public static void init() {
server = Hazelcast.newHazelcastInstance();
client = HazelcastClient.newHazelcastClient();
}
@AfterClass
public static void destroy() {
HazelcastClient.shutdownAll();
Hazelcast.shutdownAll();
}
@Test
public void testPut() {
final Object key = "key1";
final MultiMap mm = client.getMultiMap(randomString());
assertTrue(mm.put(key, 1));
}
@Test(expected = HazelcastSerializationException.class)
public void testPut_withNullValue() {
Object key ="key";
final MultiMap mm = client.getMultiMap(randomString());
assertFalse(mm.put(key, null));
}
@Test(expected = NullPointerException.class)
public void testPut_withNullKey() {
Object value ="value";
final MultiMap mm = client.getMultiMap(randomString());
assertFalse(mm.put(null, value));
}
@Test
public void testPutMultiValuesToKey() {
final Object key = "key1";
final MultiMap mm = client.getMultiMap(randomString());
mm.put(key, 1);
assertTrue(mm.put(key, 2));
}
@Test
public void testPut_WithExistingKeyValue() {
final Object key = "key1";
final MultiMap mm = client.getMultiMap(randomString());
assertTrue(mm.put(key, 1));
assertFalse(mm.put(key, 1));
}
@Test
public void testValueCount() {
final Object key = "key1";
final MultiMap mm = client.getMultiMap(randomString());
mm.put(key, 1);
mm.put(key, 2);
assertEquals(2, mm.valueCount(key));
}
@Test
public void testValueCount_whenKeyNotThere() {
final Object key = "key1";
final MultiMap mm = client.getMultiMap(randomString());
assertEquals(0, mm.valueCount("NOT_THERE"));
}
@Test
public void testSizeCount() {
final Object key1 = "key1";
final Object key2 = "key2";
final MultiMap mm = client.getMultiMap(randomString());
mm.put(key1, 1);
mm.put(key1, 2);
mm.put(key2, 1);
mm.put(key2, 2);
mm.put(key2, 2);
assertEquals(4, mm.size());
}
@Test
public void testEmptySizeCount() {
final MultiMap mm = client.getMultiMap(randomString());
assertEquals(0, mm.size());
}
@Test
public void testGet_whenNotExist() {
final MultiMap mm = client.getMultiMap(randomString());
Collection coll = mm.get("NOT_THERE");
assertEquals(Collections.EMPTY_LIST, coll);
}
@Test
public void testGet() {
final Object key = "key";
final int maxItemsPerKey = 33;
final MultiMap mm = client.getMultiMap(randomString());
Set expected = new TreeSet();
for ( int i=0; i< maxItemsPerKey; i++ ){
mm.put(key, i);
expected.add(i);
}
Collection resultSet = new TreeSet( mm.get(key) );
assertEquals(expected, resultSet);
}
@Test
public void testRemove_whenKeyNotExist() {
final MultiMap mm = client.getMultiMap(randomString());
Collection coll = mm.remove("NOT_THERE");
assertEquals(Collections.EMPTY_LIST, coll);
}
@Test
public void testRemoveKey() {
final Object key = "key";
final int maxItemsPerKey = 44;
final MultiMap mm = client.getMultiMap(randomString());
Set expeted = new TreeSet();
for ( int i=0; i< maxItemsPerKey; i++ ){
mm.put(key, i);
expeted.add(i);
}
Set resultSet = new TreeSet( mm.remove(key) );
assertEquals(expeted, resultSet);
assertEquals(0, mm.size());
}
@Test
public void testRemoveValue_whenValueNotExists() {
final Object key = "key";
final int maxItemsPerKey = 4;
final MultiMap mm = client.getMultiMap(randomString());
for ( int i=0; i< maxItemsPerKey; i++ ){
mm.put(key, i);
}
boolean result = mm.remove(key, "NOT_THERE");
assertFalse(result);
}
@Test
public void testRemoveKeyValue() {
final Object key = "key";
final int maxItemsPerKey = 4;
final MultiMap mm = client.getMultiMap(randomString());
for ( int i=0; i< maxItemsPerKey; i++ ){
mm.put(key, i);
}
for ( int i=0; i< maxItemsPerKey; i++ ){
boolean result = mm.remove(key, i);
assertTrue(result);
}
}
@Test(expected = UnsupportedOperationException.class)
public void testLocalKeySet() {
final MultiMap mm = client.getMultiMap(randomString());
mm.localKeySet();
}
@Test
public void testEmptyKeySet() {
final MultiMap mm = client.getMultiMap(randomString());
assertEquals(Collections.EMPTY_SET, mm.keySet());
}
@Test
public void testKeySet() {
final int maxKeys = 23;
final MultiMap mm = client.getMultiMap(randomString());
Set expected = new TreeSet();
for ( int key=0; key< maxKeys; key++ ){
mm.put(key, 1);
expected.add(key);
}
assertEquals(expected, mm.keySet());
}
@Test
public void testValues_whenEmptyCollection() {
final MultiMap mm = client.getMultiMap(randomString());
assertEquals(Collections.EMPTY_LIST, mm.values());
}
@Test
public void testKeyValues() {
final int maxKeys = 31;
final int maxValues = 3;
final MultiMap mm = client.getMultiMap(randomString());
Set expected = new TreeSet();
for ( int key=0; key< maxKeys; key++ ){
for ( int val=0; val< maxValues; val++ ){
mm.put(key, val);
expected.add(val);
}
}
Set resultSet = new TreeSet( mm.values() );
assertEquals(expected, resultSet);
}
@Test
public void testEntrySet_whenEmpty() {
final MultiMap mm = client.getMultiMap(randomString());
assertEquals(Collections.EMPTY_SET, mm.entrySet());
}
@Test
public void testEnterySet() {
final int maxKeys = 14;
final int maxValues = 3;
final MultiMap mm = client.getMultiMap(randomString());
for ( int key=0; key< maxKeys; key++ ){
for ( int val=0; val< maxValues; val++ ){
mm.put(key, val);
}
}
assertEquals(maxKeys * maxValues, mm.entrySet().size());
}
@Test
public void testContainsKey_whenKeyExists() {
final MultiMap mm = client.getMultiMap(randomString());
mm.put("key1", "value1");
assertTrue(mm.containsKey("key1"));
}
@Test
public void testContainsKey_whenKeyNotExists() {
final MultiMap mm = client.getMultiMap(randomString());
assertFalse(mm.containsKey("NOT_THERE"));
}
@Test(expected = NullPointerException.class)
public void testContainsKey_whenKeyNull() {
final MultiMap mm = client.getMultiMap(randomString());
assertFalse(mm.containsKey(null));
}
@Test
public void testContainsValue_whenExists() {
final MultiMap mm = client.getMultiMap(randomString());
mm.put("key1", "value1");
assertTrue(mm.containsValue("value1"));
assertFalse(mm.containsValue("NOT_THERE"));
}
@Test
public void testContainsValue_whenNotExists() {
final MultiMap mm = client.getMultiMap(randomString());
assertFalse(mm.containsValue("NOT_THERE"));
}
@Test
public void testContainsValue_whenSearchValueNull() {
final MultiMap mm = client.getMultiMap(randomString());
assertFalse(mm.containsValue(null));
}
@Test
public void testContainsEntry() {
final MultiMap mm = client.getMultiMap(randomString());
mm.put("key1", "value1");
assertTrue(mm.containsEntry("key1", "value1"));
assertFalse(mm.containsEntry("key1", "NOT_THERE"));
assertFalse(mm.containsEntry("NOT_THERE", "NOT_THERE"));
assertFalse(mm.containsEntry("NOT_THERE", "value1"));
}
@Test(expected = UnsupportedOperationException.class)
public void testGetLocalMultiMapStats() {
final MultiMap mm = client.getMultiMap(randomString());
mm.getLocalMultiMapStats();
}
@Test
public void testClear() {
final MultiMap mm = client.getMultiMap(randomString());
final int maxKeys = 9;
final int maxValues = 3;
for ( int key=0; key< maxKeys; key++ ){
for ( int val=0; val< maxValues; val++ ){
mm.put(key, val);
}
}
mm.clear();
assertEquals(0, mm.size());
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_multimap_ClientMultiMapTest.java |
13 | result.add(new CompletionProposal(offset, "", null, funtext, funtext) {
@Override
public Point getSelection(IDocument document) {
return new Point(offset + text.indexOf("nothing"), 7);
}
@Override
public Image getImage() {
return CeylonResources.MINOR_CHANGE;
}
}); | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_CeylonCompletionProcessor.java |
2,804 | public static class TokenFiltersBindings {
private final Map<String, Class<? extends TokenFilterFactory>> tokenFilters = Maps.newHashMap();
public TokenFiltersBindings() {
}
public void processTokenFilter(String name, Class<? extends TokenFilterFactory> tokenFilterFactory) {
tokenFilters.put(name, tokenFilterFactory);
}
} | 0true
| src_main_java_org_elasticsearch_index_analysis_AnalysisModule.java |
2,793 | new AbstractModule() {
@Override
protected void configure() {
bind(ClusterService.class).toProvider(Providers.of((ClusterService) null));
bind(CircuitBreakerService.class).to(DummyCircuitBreakerService.class);
}
} | 0true
| src_test_java_org_elasticsearch_index_aliases_IndexAliasesServiceTests.java |
804 | private class ASyncAction {
final ActionListener<MultiPercolateResponse> finalListener;
final Map<ShardId, TransportShardMultiPercolateAction.Request> requestsByShard;
final List<Object> percolateRequests;
final Map<ShardId, IntArrayList> shardToSlots;
final AtomicInteger expectedOperations;
final AtomicArray<Object> reducedResponses;
final AtomicReferenceArray<AtomicInteger> expectedOperationsPerItem;
final AtomicReferenceArray<AtomicReferenceArray> responsesByItemAndShard;
ASyncAction(List<Object> percolateRequests, ActionListener<MultiPercolateResponse> finalListener, ClusterState clusterState) {
this.finalListener = finalListener;
this.percolateRequests = percolateRequests;
responsesByItemAndShard = new AtomicReferenceArray<AtomicReferenceArray>(percolateRequests.size());
expectedOperationsPerItem = new AtomicReferenceArray<AtomicInteger>(percolateRequests.size());
reducedResponses = new AtomicArray<Object>(percolateRequests.size());
// Resolving concrete indices and routing and grouping the requests by shard
requestsByShard = new HashMap<ShardId, TransportShardMultiPercolateAction.Request>();
// Keep track what slots belong to what shard, in case a request to a shard fails on all copies
shardToSlots = new HashMap<ShardId, IntArrayList>();
int expectedResults = 0;
for (int slot = 0; slot < percolateRequests.size(); slot++) {
Object element = percolateRequests.get(slot);
assert element != null;
if (element instanceof PercolateRequest) {
PercolateRequest percolateRequest = (PercolateRequest) element;
String[] concreteIndices;
try {
concreteIndices = clusterState.metaData().concreteIndices(percolateRequest.indices(), percolateRequest.indicesOptions());
} catch (IndexMissingException e) {
reducedResponses.set(slot, e);
responsesByItemAndShard.set(slot, new AtomicReferenceArray(0));
expectedOperationsPerItem.set(slot, new AtomicInteger(0));
continue;
}
Map<String, Set<String>> routing = clusterState.metaData().resolveSearchRouting(percolateRequest.routing(), percolateRequest.indices());
// TODO: I only need shardIds, ShardIterator(ShardRouting) is only needed in TransportShardMultiPercolateAction
GroupShardsIterator shards = clusterService.operationRouting().searchShards(
clusterState, percolateRequest.indices(), concreteIndices, routing, percolateRequest.preference()
);
if (shards.size() == 0) {
reducedResponses.set(slot, new UnavailableShardsException(null, "No shards available"));
responsesByItemAndShard.set(slot, new AtomicReferenceArray(0));
expectedOperationsPerItem.set(slot, new AtomicInteger(0));
continue;
}
responsesByItemAndShard.set(slot, new AtomicReferenceArray(shards.size()));
expectedOperationsPerItem.set(slot, new AtomicInteger(shards.size()));
for (ShardIterator shard : shards) {
ShardId shardId = shard.shardId();
TransportShardMultiPercolateAction.Request requests = requestsByShard.get(shardId);
if (requests == null) {
requestsByShard.put(shardId, requests = new TransportShardMultiPercolateAction.Request(shard.shardId().getIndex(), shardId.id(), percolateRequest.preference()));
}
logger.trace("Adding shard[{}] percolate request for item[{}]", shardId, slot);
requests.add(new TransportShardMultiPercolateAction.Request.Item(slot, new PercolateShardRequest(shardId, percolateRequest)));
IntArrayList items = shardToSlots.get(shardId);
if (items == null) {
shardToSlots.put(shardId, items = new IntArrayList());
}
items.add(slot);
}
expectedResults++;
} else if (element instanceof Throwable || element instanceof MultiGetResponse.Failure) {
logger.trace("item[{}] won't be executed, reason: {}", slot, element);
reducedResponses.set(slot, element);
responsesByItemAndShard.set(slot, new AtomicReferenceArray(0));
expectedOperationsPerItem.set(slot, new AtomicInteger(0));
}
}
expectedOperations = new AtomicInteger(expectedResults);
}
void run() {
if (expectedOperations.get() == 0) {
finish();
return;
}
logger.trace("mpercolate executing for shards {}", requestsByShard.keySet());
for (Map.Entry<ShardId, TransportShardMultiPercolateAction.Request> entry : requestsByShard.entrySet()) {
final ShardId shardId = entry.getKey();
TransportShardMultiPercolateAction.Request shardRequest = entry.getValue();
shardMultiPercolateAction.execute(shardRequest, new ActionListener<TransportShardMultiPercolateAction.Response>() {
@Override
public void onResponse(TransportShardMultiPercolateAction.Response response) {
onShardResponse(shardId, response);
}
@Override
public void onFailure(Throwable e) {
onShardFailure(shardId, e);
}
});
}
}
@SuppressWarnings("unchecked")
void onShardResponse(ShardId shardId, TransportShardMultiPercolateAction.Response response) {
logger.debug("{} Percolate shard response", shardId);
try {
for (TransportShardMultiPercolateAction.Response.Item item : response.items()) {
AtomicReferenceArray shardResults = responsesByItemAndShard.get(item.slot());
if (shardResults == null) {
assert false : "shardResults can't be null";
continue;
}
if (item.failed()) {
shardResults.set(shardId.id(), new BroadcastShardOperationFailedException(shardId, item.error().string()));
} else {
shardResults.set(shardId.id(), item.response());
}
assert expectedOperationsPerItem.get(item.slot()).get() >= 1 : "slot[" + item.slot() + "] can't be lower than one";
if (expectedOperationsPerItem.get(item.slot()).decrementAndGet() == 0) {
// Failure won't bubble up, since we fail the whole request now via the catch clause below,
// so expectedOperationsPerItem will not be decremented twice.
reduce(item.slot());
}
}
} catch (Throwable e) {
logger.error("{} Percolate original reduce error", e, shardId);
finalListener.onFailure(e);
}
}
@SuppressWarnings("unchecked")
void onShardFailure(ShardId shardId, Throwable e) {
logger.debug("{} Shard multi percolate failure", e, shardId);
try {
IntArrayList slots = shardToSlots.get(shardId);
for (int i = 0; i < slots.size(); i++) {
int slot = slots.get(i);
AtomicReferenceArray shardResults = responsesByItemAndShard.get(slot);
if (shardResults == null) {
continue;
}
shardResults.set(shardId.id(), new BroadcastShardOperationFailedException(shardId, e));
assert expectedOperationsPerItem.get(slot).get() >= 1 : "slot[" + slot + "] can't be lower than one. Caused by: " + e.getMessage();
if (expectedOperationsPerItem.get(slot).decrementAndGet() == 0) {
reduce(slot);
}
}
} catch (Throwable t) {
logger.error("{} Percolate original reduce error, original error {}", t, shardId, e);
finalListener.onFailure(t);
}
}
void reduce(int slot) {
AtomicReferenceArray shardResponses = responsesByItemAndShard.get(slot);
PercolateResponse reducedResponse = TransportPercolateAction.reduce((PercolateRequest) percolateRequests.get(slot), shardResponses, percolatorService);
reducedResponses.set(slot, reducedResponse);
assert expectedOperations.get() >= 1 : "slot[" + slot + "] expected options should be >= 1 but is " + expectedOperations.get();
if (expectedOperations.decrementAndGet() == 0) {
finish();
}
}
void finish() {
MultiPercolateResponse.Item[] finalResponse = new MultiPercolateResponse.Item[reducedResponses.length()];
for (int slot = 0; slot < reducedResponses.length(); slot++) {
Object element = reducedResponses.get(slot);
assert element != null : "Element[" + slot + "] shouldn't be null";
if (element instanceof PercolateResponse) {
finalResponse[slot] = new MultiPercolateResponse.Item((PercolateResponse) element);
} else if (element instanceof Throwable) {
finalResponse[slot] = new MultiPercolateResponse.Item(ExceptionsHelper.detailedMessage((Throwable) element));
} else if (element instanceof MultiGetResponse.Failure) {
finalResponse[slot] = new MultiPercolateResponse.Item(((MultiGetResponse.Failure)element).getMessage());
}
}
finalListener.onResponse(new MultiPercolateResponse(finalResponse));
}
} | 0true
| src_main_java_org_elasticsearch_action_percolate_TransportMultiPercolateAction.java |
1,786 | protected static class CoordinateNode implements ToXContent {
protected final Coordinate coordinate;
protected final List<CoordinateNode> children;
/**
* Creates a new leaf CoordinateNode
*
* @param coordinate
* Coordinate for the Node
*/
protected CoordinateNode(Coordinate coordinate) {
this.coordinate = coordinate;
this.children = null;
}
/**
* Creates a new parent CoordinateNode
*
* @param children
* Children of the Node
*/
protected CoordinateNode(List<CoordinateNode> children) {
this.children = children;
this.coordinate = null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (children == null) {
builder.startArray().value(coordinate.x).value(coordinate.y).endArray();
} else {
builder.startArray();
for (CoordinateNode child : children) {
child.toXContent(builder, params);
}
builder.endArray();
}
return builder;
}
} | 0true
| src_main_java_org_elasticsearch_common_geo_builders_ShapeBuilder.java |
1,393 | public class DimensionWrapper extends BaseWrapper implements APIWrapper<Dimension>{
@XmlElement
protected BigDecimal width;
@XmlElement
protected BigDecimal height;
@XmlElement
protected BigDecimal depth;
@XmlElement
protected BigDecimal girth;
@XmlElement
protected String container;
@XmlElement
protected String size;
@XmlElement
protected String dimensionUnitOfMeasure;
@Override
public void wrapDetails(Dimension model, HttpServletRequest request) {
this.width = model.getWidth();
this.depth = model.getDepth();
this.height = model.getHeight();
this.girth = model.getGirth();
if (model.getDimensionUnitOfMeasure() != null) {
this.dimensionUnitOfMeasure = model.getDimensionUnitOfMeasure().getType();
}
if (model.getSize() != null) {
this.size = model.getSize().getType();
}
if (model.getContainer() != null) {
this.container = model.getContainer().getType();
}
}
@Override
public void wrapSummary(Dimension model, HttpServletRequest request) {
wrapDetails(model, request);
}
} | 0true
| core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_api_wrapper_DimensionWrapper.java |
86 | nodeEngine.getExecutionService().schedule(new Runnable() {
public void run() {
if (connection.live()) {
try {
connection.close();
} catch (Throwable e) {
logger.warning("While closing client connection: " + e.toString());
}
}
}
}, DESTROY_ENDPOINT_DELAY_MS, TimeUnit.MILLISECONDS); | 1no label
| hazelcast_src_main_java_com_hazelcast_client_ClientEngineImpl.java |
6,132 | public class MockRepository extends FsRepository {
private final AtomicLong failureCounter = new AtomicLong();
public void resetFailureCount() {
failureCounter.set(0);
}
public long getFailureCount() {
return failureCounter.get();
}
private final double randomControlIOExceptionRate;
private final double randomDataFileIOExceptionRate;
private final long waitAfterUnblock;
private final MockBlobStore mockBlobStore;
private final String randomPrefix;
private volatile boolean blockOnControlFiles;
private volatile boolean blockOnDataFiles;
private volatile boolean blocked = false;
@Inject
public MockRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository) throws IOException {
super(name, repositorySettings, indexShardRepository);
randomControlIOExceptionRate = repositorySettings.settings().getAsDouble("random_control_io_exception_rate", 0.0);
randomDataFileIOExceptionRate = repositorySettings.settings().getAsDouble("random_data_file_io_exception_rate", 0.0);
blockOnControlFiles = repositorySettings.settings().getAsBoolean("block_on_control", false);
blockOnDataFiles = repositorySettings.settings().getAsBoolean("block_on_data", false);
randomPrefix = repositorySettings.settings().get("random");
waitAfterUnblock = repositorySettings.settings().getAsLong("wait_after_unblock", 0L);
logger.info("starting mock repository with random prefix " + randomPrefix);
mockBlobStore = new MockBlobStore(super.blobStore());
}
private void addFailure() {
failureCounter.incrementAndGet();
}
@Override
protected void doStop() throws ElasticsearchException {
unblock();
super.doStop();
}
@Override
protected BlobStore blobStore() {
return mockBlobStore;
}
public boolean blocked() {
return mockBlobStore.blocked();
}
public void unblock() {
mockBlobStore.unblockExecution();
}
public void blockOnDataFiles(boolean blocked) {
blockOnDataFiles = blocked;
}
public void blockOnControlFiles(boolean blocked) {
blockOnControlFiles = blocked;
}
public class MockBlobStore extends BlobStoreWrapper {
ConcurrentMap<String, AtomicLong> accessCounts = new ConcurrentHashMap<String, AtomicLong>();
private long incrementAndGet(String path) {
AtomicLong value = accessCounts.get(path);
if (value == null) {
value = accessCounts.putIfAbsent(path, new AtomicLong(1));
}
if (value != null) {
return value.incrementAndGet();
}
return 1;
}
public MockBlobStore(BlobStore delegate) {
super(delegate);
}
@Override
public ImmutableBlobContainer immutableBlobContainer(BlobPath path) {
return new MockImmutableBlobContainer(super.immutableBlobContainer(path));
}
public synchronized void unblockExecution() {
if (blocked) {
blocked = false;
// Clean blocking flags, so we wouldn't try to block again
blockOnDataFiles = false;
blockOnControlFiles = false;
this.notifyAll();
}
}
public boolean blocked() {
return blocked;
}
private synchronized boolean blockExecution() {
boolean wasBlocked = false;
try {
while (blockOnDataFiles || blockOnControlFiles) {
blocked = true;
this.wait();
wasBlocked = true;
}
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
return wasBlocked;
}
private class MockImmutableBlobContainer extends ImmutableBlobContainerWrapper {
private MessageDigest digest;
private boolean shouldFail(String blobName, double probability) {
if (probability > 0.0) {
String path = path().add(blobName).buildAsString("/") + "/" + randomPrefix;
path += "/" + incrementAndGet(path);
logger.info("checking [{}] [{}]", path, Math.abs(hashCode(path)) < Integer.MAX_VALUE * probability);
return Math.abs(hashCode(path)) < Integer.MAX_VALUE * probability;
} else {
return false;
}
}
private int hashCode(String path) {
try {
digest = MessageDigest.getInstance("MD5");
byte[] bytes = digest.digest(path.getBytes("UTF-8"));
int i = 0;
return ((bytes[i++] & 0xFF) << 24) | ((bytes[i++] & 0xFF) << 16)
| ((bytes[i++] & 0xFF) << 8) | (bytes[i++] & 0xFF);
} catch (NoSuchAlgorithmException ex) {
throw new ElasticsearchException("cannot calculate hashcode", ex);
} catch (UnsupportedEncodingException ex) {
throw new ElasticsearchException("cannot calculate hashcode", ex);
}
}
private void maybeIOExceptionOrBlock(String blobName) throws IOException {
if (blobName.startsWith("__")) {
if (shouldFail(blobName, randomDataFileIOExceptionRate)) {
logger.info("throwing random IOException for file [{}] at path [{}]", blobName, path());
addFailure();
throw new IOException("Random IOException");
} else if (blockOnDataFiles) {
logger.info("blocking I/O operation for file [{}] at path [{}]", blobName, path());
if (blockExecution() && waitAfterUnblock > 0) {
try {
// Delay operation after unblocking
// So, we can start node shutdown while this operation is still running.
Thread.sleep(waitAfterUnblock);
} catch (InterruptedException ex) {
//
}
}
}
} else {
if (shouldFail(blobName, randomControlIOExceptionRate)) {
logger.info("throwing random IOException for file [{}] at path [{}]", blobName, path());
addFailure();
throw new IOException("Random IOException");
} else if (blockOnControlFiles) {
logger.info("blocking I/O operation for file [{}] at path [{}]", blobName, path());
if (blockExecution() && waitAfterUnblock > 0) {
try {
// Delay operation after unblocking
// So, we can start node shutdown while this operation is still running.
Thread.sleep(waitAfterUnblock);
} catch (InterruptedException ex) {
//
}
}
}
}
}
private void maybeIOExceptionOrBlock(String blobName, ImmutableBlobContainer.WriterListener listener) {
try {
maybeIOExceptionOrBlock(blobName);
} catch (IOException ex) {
listener.onFailure(ex);
}
}
private void maybeIOExceptionOrBlock(String blobName, ImmutableBlobContainer.ReadBlobListener listener) {
try {
maybeIOExceptionOrBlock(blobName);
} catch (IOException ex) {
listener.onFailure(ex);
}
}
public MockImmutableBlobContainer(ImmutableBlobContainer delegate) {
super(delegate);
}
@Override
public void writeBlob(String blobName, InputStream is, long sizeInBytes, WriterListener listener) {
maybeIOExceptionOrBlock(blobName, listener);
super.writeBlob(blobName, is, sizeInBytes, listener);
}
@Override
public void writeBlob(String blobName, InputStream is, long sizeInBytes) throws IOException {
maybeIOExceptionOrBlock(blobName);
super.writeBlob(blobName, is, sizeInBytes);
}
@Override
public boolean blobExists(String blobName) {
return super.blobExists(blobName);
}
@Override
public void readBlob(String blobName, ReadBlobListener listener) {
maybeIOExceptionOrBlock(blobName, listener);
super.readBlob(blobName, listener);
}
@Override
public byte[] readBlobFully(String blobName) throws IOException {
maybeIOExceptionOrBlock(blobName);
return super.readBlobFully(blobName);
}
@Override
public boolean deleteBlob(String blobName) throws IOException {
maybeIOExceptionOrBlock(blobName);
return super.deleteBlob(blobName);
}
@Override
public void deleteBlobsByPrefix(String blobNamePrefix) throws IOException {
maybeIOExceptionOrBlock(blobNamePrefix);
super.deleteBlobsByPrefix(blobNamePrefix);
}
@Override
public void deleteBlobsByFilter(BlobNameFilter filter) throws IOException {
maybeIOExceptionOrBlock("");
super.deleteBlobsByFilter(filter);
}
@Override
public ImmutableMap<String, BlobMetaData> listBlobs() throws IOException {
maybeIOExceptionOrBlock("");
return super.listBlobs();
}
@Override
public ImmutableMap<String, BlobMetaData> listBlobsByPrefix(String blobNamePrefix) throws IOException {
maybeIOExceptionOrBlock(blobNamePrefix);
return super.listBlobsByPrefix(blobNamePrefix);
}
}
}
} | 1no label
| src_test_java_org_elasticsearch_snapshots_mockstore_MockRepository.java |
362 | public enum DIRECTION {
BOTH, IN, OUT
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_db_graph_OGraphDatabase.java |
408 | public class ClientAtomicLongProxy extends ClientProxy implements IAtomicLong {
private final String name;
private volatile Data key;
public ClientAtomicLongProxy(String instanceName, String serviceName, String objectId) {
super(instanceName, serviceName, objectId);
this.name = objectId;
}
@Override
public <R> R apply(IFunction<Long, R> function) {
isNotNull(function, "function");
return invoke(new ApplyRequest(name, toData(function)));
}
@Override
public void alter(IFunction<Long, Long> function) {
isNotNull(function, "function");
invoke(new AlterRequest(name, toData(function)));
}
@Override
public long alterAndGet(IFunction<Long, Long> function) {
isNotNull(function, "function");
return (Long) invoke(new AlterAndGetRequest(name, toData(function)));
}
@Override
public long getAndAlter(IFunction<Long, Long> function) {
isNotNull(function, "function");
return (Long) invoke(new GetAndAlterRequest(name, toData(function)));
}
@Override
public long addAndGet(long delta) {
AddAndGetRequest request = new AddAndGetRequest(name, delta);
Long result = invoke(request);
return result;
}
@Override
public boolean compareAndSet(long expect, long update) {
CompareAndSetRequest request = new CompareAndSetRequest(name, expect, update);
Boolean result = invoke(request);
return result;
}
@Override
public long decrementAndGet() {
return addAndGet(-1);
}
@Override
public long get() {
return getAndAdd(0);
}
@Override
public long getAndAdd(long delta) {
GetAndAddRequest request = new GetAndAddRequest(name, delta);
Long result = invoke(request);
return result;
}
@Override
public long getAndSet(long newValue) {
GetAndSetRequest request = new GetAndSetRequest(name, newValue);
Long result = invoke(request);
return result;
}
@Override
public long incrementAndGet() {
return addAndGet(1);
}
@Override
public long getAndIncrement() {
return getAndAdd(1);
}
@Override
public void set(long newValue) {
SetRequest request = new SetRequest(name, newValue);
invoke(request);
}
@Override
protected void onDestroy() {
}
protected <T> T invoke(ClientRequest req) {
return super.invoke(req, getKey());
}
private Data getKey() {
if (key == null) {
key = toData(name);
}
return key;
}
@Override
public String toString() {
return "IAtomicLong{" + "name='" + name + '\'' + '}';
}
} | 1no label
| hazelcast-client_src_main_java_com_hazelcast_client_proxy_ClientAtomicLongProxy.java |
533 | public class OFetchHelper {
public static final String ROOT_FETCH = "*";
private static final boolean debug = false;
public static Map<String, Integer> buildFetchPlan(final String iFetchPlan) {
final Map<String, Integer> fetchPlan = new HashMap<String, Integer>();
fetchPlan.put(ROOT_FETCH, 0);
if (iFetchPlan != null) {
// CHECK IF THERE IS SOME FETCH-DEPTH
final List<String> planParts = OStringSerializerHelper.split(iFetchPlan, ' ');
if (!planParts.isEmpty()) {
List<String> parts;
for (String planPart : planParts) {
parts = OStringSerializerHelper.split(planPart, ':');
if (parts.size() != 2)
throw new IllegalArgumentException("Wrong fetch plan: " + planPart);
fetchPlan.put(parts.get(0), Integer.parseInt(parts.get(1)));
}
}
}
return fetchPlan;
}
public static void fetch(final ORecordInternal<?> iRootRecord, final Object iUserObject, final Map<String, Integer> iFetchPlan,
final OFetchListener iListener, final OFetchContext iContext, final String iFormat) {
try {
if (iRootRecord instanceof ODocument) {
// SCHEMA AWARE
final ODocument record = (ODocument) iRootRecord;
final Map<ORID, Integer> parsedRecords = new HashMap<ORID, Integer>();
final boolean isEmbedded = record.isEmbedded() || !record.getIdentity().isPersistent();
if (!isEmbedded)
parsedRecords.put(iRootRecord.getIdentity(), 0);
if (!iFormat.contains("shallow"))
processRecordRidMap(record, iFetchPlan, 0, 0, -1, parsedRecords, "", iContext);
processRecord(record, iUserObject, iFetchPlan, 0, 0, -1, parsedRecords, "", iListener, iContext, iFormat);
}
} catch (Exception e) {
e.printStackTrace();
OLogManager.instance().error(null, "Fetching error on record %s", e, iRootRecord.getIdentity());
}
}
public static void checkFetchPlanValid(final String iFetchPlan) {
if (iFetchPlan != null && !iFetchPlan.isEmpty()) {
// CHECK IF THERE IS SOME FETCH-DEPTH
final List<String> planParts = OStringSerializerHelper.split(iFetchPlan, ' ');
if (!planParts.isEmpty()) {
for (String planPart : planParts) {
final List<String> parts = OStringSerializerHelper.split(planPart, ':');
if (parts.size() != 2) {
throw new IllegalArgumentException("Fetch plan '" + iFetchPlan + "' is invalid");
}
}
} else {
throw new IllegalArgumentException("Fetch plan '" + iFetchPlan + "' is invalid");
}
}
}
public static boolean isFetchPlanValid(final String iFetchPlan) {
if (iFetchPlan != null && !iFetchPlan.isEmpty()) {
// CHECK IF THERE IS SOME FETCH-DEPTH
final List<String> planParts = OStringSerializerHelper.split(iFetchPlan, ' ');
if (!planParts.isEmpty()) {
for (String planPart : planParts) {
final List<String> parts = OStringSerializerHelper.split(planPart, ':');
if (parts.size() != 2) {
return false;
}
}
} else {
return false;
}
}
return true;
}
private static int getDepthLevel(final Map<String, Integer> iFetchPlan, final String iFieldPath) {
if (debug) {
System.out.println(" ++++ getDepthLevel start");
System.out.println(" +++++ iFetchPlan: " + iFetchPlan);
System.out.println(" +++++ iFieldPath: " + iFieldPath);
}
Integer depthLevel = iFetchPlan.get(OFetchHelper.ROOT_FETCH);
if (debug)
System.out.println(" +++++ depthLevel (root_fetch): " + depthLevel);
for (String fieldFetchDefinition : iFetchPlan.keySet()) {
if (debug)
System.out.println(" .......... fieldFetchDefinition: " + fieldFetchDefinition);
if (iFieldPath.equals(fieldFetchDefinition)) {
// GET THE FETCH PLAN FOR THE GENERIC FIELD IF SPECIFIED
depthLevel = iFetchPlan.get(fieldFetchDefinition);
break;
} else if (fieldFetchDefinition.startsWith(iFieldPath)) {
// SETS THE FETCH LEVEL TO 2 (LOADS ALL DOCUMENT FIELDS)
depthLevel = 1;
break;
}
}
if (debug)
System.out.println(" ..... depthLevel: " + depthLevel);
return depthLevel.intValue();
}
public static void processRecordRidMap(final ORecordSchemaAware<?> record, Map<String, Integer> iFetchPlan,
final int iCurrentLevel, final int iLevelFromRoot, final int iFieldDepthLevel, final Map<ORID, Integer> parsedRecords,
final String iFieldPathFromRoot, final OFetchContext iContext) throws IOException {
if (iFetchPlan == null)
return;
Object fieldValue;
for (String fieldName : record.fieldNames()) {
int depthLevel;
final String fieldPath = !iFieldPathFromRoot.isEmpty() ? iFieldPathFromRoot + "." + fieldName : fieldName;
depthLevel = getDepthLevel(iFetchPlan, fieldPath);
if (depthLevel == -2)
continue;
if (iFieldDepthLevel > -1)
depthLevel = iFieldDepthLevel;
fieldValue = record.field(fieldName);
if (fieldValue == null
|| !(fieldValue instanceof OIdentifiable)
&& (!(fieldValue instanceof ORecordLazyMultiValue) || !((ORecordLazyMultiValue) fieldValue).rawIterator().hasNext() || !(((ORecordLazyMultiValue) fieldValue)
.rawIterator().next() instanceof OIdentifiable))
&& (!(fieldValue instanceof Collection<?>) || ((Collection<?>) fieldValue).size() == 0 || !(((Collection<?>) fieldValue)
.iterator().next() instanceof OIdentifiable))
&& (!(fieldValue instanceof OMultiCollectionIterator<?>))
&& (!(fieldValue instanceof Map<?, ?>) || ((Map<?, ?>) fieldValue).size() == 0 || !(((Map<?, ?>) fieldValue).values()
.iterator().next() instanceof OIdentifiable))) {
continue;
} else {
try {
final boolean isEmbedded = isEmbedded(fieldValue);
if (!(isEmbedded && iContext.fetchEmbeddedDocuments()) && !iFetchPlan.containsKey(fieldPath)
&& depthLevel > -1 && iCurrentLevel >= depthLevel)
// MAX DEPTH REACHED: STOP TO FETCH THIS FIELD
continue;
final int nextLevel = isEmbedded ? iLevelFromRoot : iLevelFromRoot + 1;
fetchRidMap(record, iFetchPlan, fieldValue, fieldName, iCurrentLevel, nextLevel, iFieldDepthLevel, parsedRecords,
fieldPath, iContext);
} catch (Exception e) {
e.printStackTrace();
OLogManager.instance().error(null, "Fetching error on record %s", e, record.getIdentity());
}
}
}
}
private static void fetchRidMap(final ORecordSchemaAware<?> iRootRecord, final Map<String, Integer> iFetchPlan,
final Object fieldValue, final String fieldName, final int iCurrentLevel, final int iLevelFromRoot,
final int iFieldDepthLevel, final Map<ORID, Integer> parsedRecords, final String iFieldPathFromRoot,
final OFetchContext iContext) throws IOException {
if (fieldValue == null) {
return;
} else if (fieldValue instanceof ODocument) {
fetchDocumentRidMap(iFetchPlan, fieldValue, fieldName, iCurrentLevel, iLevelFromRoot, iFieldDepthLevel, parsedRecords,
iFieldPathFromRoot, iContext);
} else if (fieldValue instanceof Iterable<?>) {
fetchCollectionRidMap(iFetchPlan, fieldValue, fieldName, iCurrentLevel, iLevelFromRoot, iFieldDepthLevel, parsedRecords,
iFieldPathFromRoot, iContext);
} else if (fieldValue.getClass().isArray()) {
fetchArrayRidMap(iFetchPlan, fieldValue, fieldName, iCurrentLevel, iLevelFromRoot, iFieldDepthLevel, parsedRecords,
iFieldPathFromRoot, iContext);
} else if (fieldValue instanceof Map<?, ?>) {
fetchMapRidMap(iFetchPlan, fieldValue, fieldName, iCurrentLevel, iLevelFromRoot, iFieldDepthLevel, parsedRecords,
iFieldPathFromRoot, iContext);
}
}
private static void fetchDocumentRidMap(Map<String, Integer> iFetchPlan, Object fieldValue, String fieldName,
final int iCurrentLevel, final int iLevelFromRoot, final int iFieldDepthLevel, final Map<ORID, Integer> parsedRecords,
final String iFieldPathFromRoot, final OFetchContext iContext) throws IOException {
updateRidMap(iFetchPlan, (ODocument) fieldValue, iCurrentLevel, iLevelFromRoot, iFieldDepthLevel, parsedRecords,
iFieldPathFromRoot, iContext);
}
@SuppressWarnings("unchecked")
private static void fetchCollectionRidMap(final Map<String, Integer> iFetchPlan, final Object fieldValue, final String fieldName,
final int iCurrentLevel, final int iLevelFromRoot, final int iFieldDepthLevel, final Map<ORID, Integer> parsedRecords,
final String iFieldPathFromRoot, final OFetchContext iContext) throws IOException {
final Iterable<OIdentifiable> linked = (Iterable<OIdentifiable>) fieldValue;
for (OIdentifiable d : linked) {
// GO RECURSIVELY
d = d.getRecord();
updateRidMap(iFetchPlan, (ODocument) d, iCurrentLevel, iLevelFromRoot, iFieldDepthLevel, parsedRecords, iFieldPathFromRoot,
iContext);
}
}
private static void fetchArrayRidMap(final Map<String, Integer> iFetchPlan, final Object fieldValue, final String fieldName,
final int iCurrentLevel, final int iLevelFromRoot, final int iFieldDepthLevel, final Map<ORID, Integer> parsedRecords,
final String iFieldPathFromRoot, final OFetchContext iContext) throws IOException {
if (fieldValue instanceof ODocument[]) {
final ODocument[] linked = (ODocument[]) fieldValue;
for (ODocument d : linked)
// GO RECURSIVELY
updateRidMap(iFetchPlan, (ODocument) d, iCurrentLevel, iLevelFromRoot, iFieldDepthLevel, parsedRecords, iFieldPathFromRoot,
iContext);
}
}
@SuppressWarnings("unchecked")
private static void fetchMapRidMap(Map<String, Integer> iFetchPlan, Object fieldValue, String fieldName, final int iCurrentLevel,
final int iLevelFromRoot, final int iFieldDepthLevel, final Map<ORID, Integer> parsedRecords,
final String iFieldPathFromRoot, final OFetchContext iContext) throws IOException {
final Map<String, ODocument> linked = (Map<String, ODocument>) fieldValue;
for (ODocument d : (linked).values())
// GO RECURSIVELY
updateRidMap(iFetchPlan, (ODocument) d, iCurrentLevel, iLevelFromRoot, iFieldDepthLevel, parsedRecords, iFieldPathFromRoot,
iContext);
}
private static void updateRidMap(final Map<String, Integer> iFetchPlan, final ODocument fieldValue, final int iCurrentLevel,
final int iLevelFromRoot, final int iFieldDepthLevel, final Map<ORID, Integer> parsedRecords,
final String iFieldPathFromRoot, final OFetchContext iContext) throws IOException {
final Integer fetchedLevel = parsedRecords.get(fieldValue.getIdentity());
int currentLevel = iCurrentLevel + 1;
int fieldDepthLevel = iFieldDepthLevel;
if (iFetchPlan.containsKey(iFieldPathFromRoot)) {
currentLevel = 1;
fieldDepthLevel = iFetchPlan.get(iFieldPathFromRoot);
}
final boolean isEmbedded = isEmbedded(fieldValue);
if (isEmbedded || fetchedLevel == null) {
if (!isEmbedded)
parsedRecords.put(fieldValue.getIdentity(), iLevelFromRoot);
processRecordRidMap(fieldValue, iFetchPlan, currentLevel, iLevelFromRoot, fieldDepthLevel, parsedRecords, iFieldPathFromRoot,
iContext);
}
}
private static void processRecord(final ORecordSchemaAware<?> record, final Object iUserObject,
final Map<String, Integer> iFetchPlan, final int iCurrentLevel, final int iLevelFromRoot, final int iFieldDepthLevel,
final Map<ORID, Integer> parsedRecords, final String iFieldPathFromRoot, final OFetchListener iListener,
final OFetchContext iContext, final String iFormat) throws IOException {
Object fieldValue;
iContext.onBeforeFetch(record);
if (debug) {
System.out.println("processRecord start");
System.out.println("iFieldDepthLevel: " + iFieldDepthLevel);
System.out.println("record: " + record.toString());
System.out.println("iFetchPlan: " + iFetchPlan);
System.out.println("iCurrentLevel: " + iCurrentLevel);
System.out.println("iLevelFromRoot: " + iLevelFromRoot);
System.out.println("iCurrentLevel: " + iCurrentLevel);
System.out.println("parsedRecords: " + parsedRecords);
System.out.println("iFieldPathFromRoot: " + iFieldPathFromRoot);
}
for (String fieldName : record.fieldNames()) {
String fieldPath = !iFieldPathFromRoot.isEmpty() ? iFieldPathFromRoot + "." + fieldName : fieldName;
if (debug) {
System.out.println(" fieldName: " + fieldName);
System.out.println(" fieldPath: " + fieldPath);
}
int depthLevel;
depthLevel = getDepthLevel(iFetchPlan, fieldPath);
if (depthLevel == -2)
continue;
if (iFieldDepthLevel > -1)
depthLevel = iFieldDepthLevel;
if (debug)
System.out.println(" depthLevel: " + depthLevel);
fieldValue = record.field(fieldName);
boolean fetch = !iFormat.contains("shallow")
&& (!(fieldValue instanceof ODocument) || depthLevel == -1 || iCurrentLevel <= depthLevel || iFetchPlan
.containsKey(fieldPath));
final boolean isEmbedded = isEmbedded(fieldValue);
if (!fetch && isEmbedded && iContext.fetchEmbeddedDocuments())
// EMBEDDED, GO DEEPER
fetch = true;
if (iFormat.contains("shallow")
|| fieldValue == null
|| (!fetch && fieldValue instanceof OIdentifiable)
|| !(fieldValue instanceof OIdentifiable)
&& (!(fieldValue instanceof ORecordLazyMultiValue) || !((ORecordLazyMultiValue) fieldValue).rawIterator().hasNext() || !(((ORecordLazyMultiValue) fieldValue)
.rawIterator().next() instanceof OIdentifiable))
&& (!(OMultiValue.getFirstValue(fieldValue) instanceof OIdentifiable
|| OMultiValue.getFirstValue(OMultiValue.getFirstValue(fieldValue)) instanceof OIdentifiable || OMultiValue
.getFirstValue(OMultiValue.getFirstValue(OMultiValue.getFirstValue(fieldValue))) instanceof OIdentifiable))) {
iContext.onBeforeStandardField(fieldValue, fieldName, iUserObject);
iListener.processStandardField(record, fieldValue, fieldName, iContext, iUserObject, iFormat);
iContext.onAfterStandardField(fieldValue, fieldName, iUserObject);
} else {
try {
if (fetch) {
final int nextLevel = isEmbedded ? iLevelFromRoot : iLevelFromRoot + 1;
fetch(record, iUserObject, iFetchPlan, fieldValue, fieldName, iCurrentLevel, nextLevel, iFieldDepthLevel,
parsedRecords, depthLevel, fieldPath, iListener, iContext);
}
} catch (Exception e) {
e.printStackTrace();
OLogManager.instance().error(null, "Fetching error on record %s", e, record.getIdentity());
}
}
}
iContext.onAfterFetch(record);
}
public static boolean isEmbedded(Object fieldValue) {
boolean isEmbedded = fieldValue instanceof ODocument
&& (((ODocument) fieldValue).isEmbedded() || !((ODocument) fieldValue).getIdentity().isPersistent());
if (!isEmbedded) {
try {
final Object f = OMultiValue.getFirstValue(fieldValue);
isEmbedded = f != null
&& (f instanceof ODocument && (((ODocument) f).isEmbedded() || !((ODocument) f).getIdentity().isPersistent()));
} catch (Exception e) {
// IGNORE IT
}
}
return isEmbedded;
}
private static void fetch(final ORecordSchemaAware<?> iRootRecord, final Object iUserObject,
final Map<String, Integer> iFetchPlan, final Object fieldValue, final String fieldName, final int iCurrentLevel,
final int iLevelFromRoot, final int iFieldDepthLevel, final Map<ORID, Integer> parsedRecords, final int depthLevel,
final String iFieldPathFromRoot, final OFetchListener iListener, final OFetchContext iContext) throws IOException {
int currentLevel = iCurrentLevel + 1;
int fieldDepthLevel = iFieldDepthLevel;
if (iFetchPlan.containsKey(iFieldPathFromRoot)) {
currentLevel = 0;
fieldDepthLevel = iFetchPlan.get(iFieldPathFromRoot);
}
if (fieldValue == null) {
iListener.processStandardField(iRootRecord, null, fieldName, iContext, iUserObject, "");
} else if (fieldValue instanceof OIdentifiable) {
if (fieldValue instanceof ODocument && ((ODocument) fieldValue).getClassName() != null
&& ((ODocument) fieldValue).getClassName().equals(OMVRBTreeRIDProvider.PERSISTENT_CLASS_NAME)) {
fetchCollection(iRootRecord, iUserObject, iFetchPlan, fieldValue, fieldName, currentLevel, iLevelFromRoot, fieldDepthLevel,
parsedRecords, iFieldPathFromRoot, iListener, iContext);
} else {
fetchDocument(iRootRecord, iUserObject, iFetchPlan, (OIdentifiable) fieldValue, fieldName, currentLevel, iLevelFromRoot,
fieldDepthLevel, parsedRecords, iFieldPathFromRoot, iListener, iContext);
}
} else if (fieldValue instanceof Collection<?>) {
fetchCollection(iRootRecord, iUserObject, iFetchPlan, fieldValue, fieldName, currentLevel, iLevelFromRoot, fieldDepthLevel,
parsedRecords, iFieldPathFromRoot, iListener, iContext);
} else if (fieldValue.getClass().isArray()) {
fetchArray(iRootRecord, iUserObject, iFetchPlan, fieldValue, fieldName, currentLevel, iLevelFromRoot, fieldDepthLevel,
parsedRecords, iFieldPathFromRoot, iListener, iContext);
} else if (fieldValue instanceof Map<?, ?>) {
fetchMap(iRootRecord, iUserObject, iFetchPlan, fieldValue, fieldName, currentLevel, iLevelFromRoot, fieldDepthLevel,
parsedRecords, iFieldPathFromRoot, iListener, iContext);
}
}
@SuppressWarnings("unchecked")
private static void fetchMap(final ORecordSchemaAware<?> iRootRecord, final Object iUserObject,
final Map<String, Integer> iFetchPlan, Object fieldValue, String fieldName, final int iCurrentLevel,
final int iLevelFromRoot, final int iFieldDepthLevel, final Map<ORID, Integer> parsedRecords,
final String iFieldPathFromRoot, final OFetchListener iListener, final OFetchContext iContext) throws IOException {
final Map<String, ODocument> linked = (Map<String, ODocument>) fieldValue;
iContext.onBeforeMap(iRootRecord, fieldName, iUserObject);
for (Object key : linked.keySet()) {
final Object o = linked.get(key);
if (o instanceof OIdentifiable) {
final ORecordInternal<?> r = ((OIdentifiable) o).getRecord();
if (r != null) {
if (r instanceof ODocument) {
// GO RECURSIVELY
final ODocument d = (ODocument) r;
final Integer fieldDepthLevel = parsedRecords.get(d.getIdentity());
if (!d.getIdentity().isValid() || (fieldDepthLevel != null && fieldDepthLevel.intValue() == iLevelFromRoot)) {
removeParsedFromMap(parsedRecords, d);
iContext.onBeforeDocument(iRootRecord, d, key.toString(), iUserObject);
final Object userObject = iListener.fetchLinkedMapEntry(iRootRecord, iUserObject, fieldName, key.toString(), d,
iContext);
processRecord(d, userObject, iFetchPlan, iCurrentLevel, iLevelFromRoot, iFieldDepthLevel, parsedRecords,
iFieldPathFromRoot, iListener, iContext, "");
iContext.onAfterDocument(iRootRecord, d, key.toString(), iUserObject);
} else {
iListener.parseLinked(iRootRecord, d, iUserObject, key.toString(), iContext);
}
} else
iListener.parseLinked(iRootRecord, r, iUserObject, key.toString(), iContext);
}
} else if (o instanceof Map) {
fetchMap(iRootRecord, iUserObject, iFetchPlan, o, key.toString(), iCurrentLevel + 1, iLevelFromRoot, iFieldDepthLevel,
parsedRecords, iFieldPathFromRoot, iListener, iContext);
} else if (OMultiValue.isMultiValue(o)) {
fetchCollection(iRootRecord, iUserObject, iFetchPlan, o, key.toString(), iCurrentLevel + 1, iLevelFromRoot,
iFieldDepthLevel, parsedRecords, iFieldPathFromRoot, iListener, iContext);
} else
iListener.processStandardField(iRootRecord, o, key.toString(), iContext, iUserObject, "");
}
iContext.onAfterMap(iRootRecord, fieldName, iUserObject);
}
private static void fetchArray(final ORecordSchemaAware<?> iRootRecord, final Object iUserObject,
final Map<String, Integer> iFetchPlan, Object fieldValue, String fieldName, final int iCurrentLevel,
final int iLevelFromRoot, final int iFieldDepthLevel, final Map<ORID, Integer> parsedRecords,
final String iFieldPathFromRoot, final OFetchListener iListener, final OFetchContext iContext) throws IOException {
if (fieldValue instanceof ODocument[]) {
final ODocument[] linked = (ODocument[]) fieldValue;
iContext.onBeforeArray(iRootRecord, fieldName, iUserObject, linked);
for (ODocument d : linked) {
// GO RECURSIVELY
final Integer fieldDepthLevel = parsedRecords.get(d.getIdentity());
if (!d.getIdentity().isValid() || (fieldDepthLevel != null && fieldDepthLevel.intValue() == iLevelFromRoot)) {
removeParsedFromMap(parsedRecords, d);
iContext.onBeforeDocument(iRootRecord, d, fieldName, iUserObject);
final Object userObject = iListener.fetchLinked(iRootRecord, iUserObject, fieldName, d, iContext);
processRecord(d, userObject, iFetchPlan, iCurrentLevel, iLevelFromRoot, iFieldDepthLevel, parsedRecords,
iFieldPathFromRoot, iListener, iContext, "");
iContext.onAfterDocument(iRootRecord, d, fieldName, iUserObject);
} else {
iListener.parseLinkedCollectionValue(iRootRecord, d, iUserObject, fieldName, iContext);
}
}
iContext.onAfterArray(iRootRecord, fieldName, iUserObject);
} else {
iListener.processStandardField(iRootRecord, fieldValue, fieldName, iContext, iUserObject, "");
}
}
@SuppressWarnings("unchecked")
private static void fetchCollection(final ORecordSchemaAware<?> iRootRecord, final Object iUserObject,
final Map<String, Integer> iFetchPlan, final Object fieldValue, final String fieldName, final int iCurrentLevel,
final int iLevelFromRoot, final int iFieldDepthLevel, final Map<ORID, Integer> parsedRecords,
final String iFieldPathFromRoot, final OFetchListener iListener, final OFetchContext iContext) throws IOException {
final Collection<?> linked;
if (fieldValue instanceof ODocument)
linked = new OMVRBTreeRIDSet().fromDocument((ODocument) fieldValue);
else if (fieldValue instanceof Collection<?>) {
linked = (Collection<OIdentifiable>) fieldValue;
iContext.onBeforeCollection(iRootRecord, fieldName, iUserObject, linked);
} else if (fieldValue instanceof Map<?, ?>) {
linked = (Collection<?>) ((Map<?, ?>) fieldValue).values();
iContext.onBeforeMap(iRootRecord, fieldName, iUserObject);
} else
throw new IllegalStateException("Unrecognized type: " + fieldValue.getClass());
final Iterator<?> iter;
if (linked instanceof ORecordLazyMultiValue)
iter = ((ORecordLazyMultiValue) linked).rawIterator();
else
iter = linked.iterator();
try {
while (iter.hasNext()) {
final Object o = iter.next();
if (o == null)
continue;
if (o instanceof OIdentifiable) {
OIdentifiable d = (OIdentifiable) o;
// GO RECURSIVELY
final Integer fieldDepthLevel = parsedRecords.get(d.getIdentity());
if (!d.getIdentity().isPersistent() || (fieldDepthLevel != null && fieldDepthLevel.intValue() == iLevelFromRoot)) {
removeParsedFromMap(parsedRecords, d);
d = d.getRecord();
if (!(d instanceof ODocument)) {
iListener.processStandardField(null, d, fieldName, iContext, iUserObject, "");
} else {
iContext.onBeforeDocument(iRootRecord, (ODocument) d, fieldName, iUserObject);
final Object userObject = iListener.fetchLinkedCollectionValue(iRootRecord, iUserObject, fieldName, (ODocument) d,
iContext);
processRecord((ODocument) d, userObject, iFetchPlan, iCurrentLevel, iLevelFromRoot, iFieldDepthLevel, parsedRecords,
iFieldPathFromRoot, iListener, iContext, "");
iContext.onAfterDocument(iRootRecord, (ODocument) d, fieldName, iUserObject);
}
} else {
iListener.parseLinkedCollectionValue(iRootRecord, d, iUserObject, fieldName, iContext);
}
} else if (o instanceof Map<?, ?>) {
fetchMap(iRootRecord, iUserObject, iFetchPlan, o, null, iCurrentLevel + 1, iLevelFromRoot, iFieldDepthLevel,
parsedRecords, iFieldPathFromRoot, iListener, iContext);
} else if (OMultiValue.isMultiValue(o)) {
fetchCollection(iRootRecord, iUserObject, iFetchPlan, o, null, iCurrentLevel + 1, iLevelFromRoot, iFieldDepthLevel,
parsedRecords, iFieldPathFromRoot, iListener, iContext);
}
}
} finally {
if (fieldValue instanceof Collection<?>)
iContext.onAfterCollection(iRootRecord, fieldName, iUserObject);
else if (fieldValue instanceof Map<?, ?>)
iContext.onAfterMap(iRootRecord, fieldName, iUserObject);
}
}
private static void fetchDocument(final ORecordSchemaAware<?> iRootRecord, final Object iUserObject,
final Map<String, Integer> iFetchPlan, final OIdentifiable fieldValue, final String fieldName, final int iCurrentLevel,
final int iLevelFromRoot, final int iFieldDepthLevel, final Map<ORID, Integer> parsedRecords,
final String iFieldPathFromRoot, final OFetchListener iListener, final OFetchContext iContext) throws IOException {
final Integer fieldDepthLevel = parsedRecords.get(fieldValue.getIdentity());
if (!fieldValue.getIdentity().isValid() || (fieldDepthLevel != null && fieldDepthLevel.intValue() == iLevelFromRoot)) {
removeParsedFromMap(parsedRecords, fieldValue);
final ODocument linked = (ODocument) fieldValue;
iContext.onBeforeDocument(iRootRecord, linked, fieldName, iUserObject);
Object userObject = iListener.fetchLinked(iRootRecord, iUserObject, fieldName, linked, iContext);
processRecord(linked, userObject, iFetchPlan, iCurrentLevel, iLevelFromRoot, iFieldDepthLevel, parsedRecords,
iFieldPathFromRoot, iListener, iContext, "");
iContext.onAfterDocument(iRootRecord, linked, fieldName, iUserObject);
} else {
iListener.parseLinked(iRootRecord, fieldValue, iUserObject, fieldName, iContext);
}
}
protected static void removeParsedFromMap(final Map<ORID, Integer> parsedRecords, OIdentifiable d) {
parsedRecords.remove(d.getIdentity());
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_fetch_OFetchHelper.java |
1,230 | public abstract class OAbstractFile implements OFile {
private FileLock fileLock;
protected File osFile;
protected RandomAccessFile accessFile;
protected FileChannel channel;
protected volatile boolean dirty = false;
protected volatile boolean headerDirty = false;
protected int version;
protected int incrementSize = DEFAULT_INCREMENT_SIZE;
protected long maxSize;
protected byte[] securityCode = new byte[32]; // PART OF HEADER (32 bytes)
protected String mode;
protected boolean failCheck = true;
protected volatile long size; // PART OF HEADER (4 bytes)
public static final int HEADER_SIZE = 1024;
protected static final int HEADER_DATA_OFFSET = 128;
protected static final int DEFAULT_SIZE = 1024000;
protected static final int DEFAULT_INCREMENT_SIZE = -50; // NEGATIVE NUMBER MEANS AS PERCENT OF
// CURRENT
// SIZE
private static final int OPEN_RETRY_MAX = 10;
private static final int OPEN_DELAY_RETRY = 100;
private static final long LOCK_WAIT_TIME = 300;
private static final int LOCK_MAX_RETRIES = 10;
protected static final int SIZE_OFFSET_V_0 = 0;
protected static final int FILLEDUPTO_OFFSET_V_0 = 4;
protected static final int SOFTLY_CLOSED_OFFSET_V_0 = 8;
protected static final int SIZE_OFFSET = 0;
protected static final int FILLEDUPTO_OFFSET = 8;
protected static final int SOFTLY_CLOSED_OFFSET = 16;
protected static final int VERSION_OFFSET = 48;
protected static final int CURRENT_VERSION = 1;
private final ReadWriteLock lock = new ReentrantReadWriteLock();
private boolean wasSoftlyClosed = true;
public abstract long getFileSize();
public abstract long getFilledUpTo();
public abstract void setSize(long iSize) throws IOException;
public abstract void writeHeaderLong(int iPosition, long iValue) throws IOException;
public abstract long readHeaderLong(int iPosition) throws IOException;
public abstract boolean synch() throws IOException;
public abstract void read(long iOffset, byte[] iDestBuffer, int iLenght) throws IOException;
public abstract short readShort(long iLogicalPosition) throws IOException;
public abstract int readInt(long iLogicalPosition) throws IOException;
public abstract long readLong(long iOffset) throws IOException;
public abstract byte readByte(long iOffset) throws IOException;
public abstract void writeInt(long iOffset, int iValue) throws IOException;
public abstract void writeLong(long iOffset, long iValue) throws IOException;
public abstract void writeShort(long iOffset, short iValue) throws IOException;
public abstract void writeByte(long iOffset, byte iValue) throws IOException;
public abstract void write(long iOffset, byte[] iSourceBuffer) throws IOException;
protected abstract void init() throws IOException;
protected abstract void setFilledUpTo(long iHow) throws IOException;
protected abstract void flushHeader() throws IOException;
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#open()
*/
public boolean open() throws IOException {
acquireWriteLock();
try {
if (!osFile.exists())
throw new FileNotFoundException("File: " + osFile.getAbsolutePath());
openChannel(osFile.length());
OLogManager.instance().debug(this, "Checking file integrity of " + osFile.getName() + "...");
init();
long filledUpTo = getFilledUpTo();
long fileSize = getFileSize();
if (fileSize == 0) {
// CORRUPTED? GET THE OS FILE SIZE
final long newFileSize = osFile.length() - HEADER_SIZE;
if (newFileSize != fileSize) {
OLogManager
.instance()
.error(
this,
"Invalid fileSize=%d for file %s. Resetting it to the os file size: %d. Probably the file was not closed correctly last time. The number of records has been set to the maximum value. It's strongly suggested to export and reimport the database before using it",
fileSize, getOsFile().getAbsolutePath(), newFileSize);
setFilledUpTo(newFileSize, true);
setSize(newFileSize, true);
fileSize = newFileSize;
}
}
if (filledUpTo > 0 && filledUpTo > fileSize) {
OLogManager
.instance()
.error(
this,
"Invalid filledUp=%d for file %s. Resetting it to the os file size: %d. Probably the file was not closed correctly last time. The number of records has been set to the maximum value. It's strongly suggested to export and reimport the database before using it",
filledUpTo, getOsFile().getAbsolutePath(), fileSize);
setSize(fileSize);
setFilledUpTo(fileSize);
filledUpTo = getFilledUpTo();
}
if (filledUpTo > fileSize || filledUpTo < 0)
OLogManager.instance().error(this, "Invalid filledUp size (=" + filledUpTo + "). The file could be corrupted", null,
OStorageException.class);
if (failCheck) {
wasSoftlyClosed = isSoftlyClosed();
if (wasSoftlyClosed)
setSoftlyClosed(false);
}
if (version < CURRENT_VERSION) {
setSize(fileSize, true);
setFilledUpTo(filledUpTo, true);
setVersion(CURRENT_VERSION);
version = CURRENT_VERSION;
setSoftlyClosed(!failCheck);
}
if (failCheck)
return wasSoftlyClosed;
return true;
} finally {
releaseWriteLock();
}
}
public boolean wasSoftlyClosed() {
acquireReadLock();
try {
return wasSoftlyClosed;
} finally {
releaseReadLock();
}
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#create(int)
*/
public void create(int iStartSize) throws IOException {
acquireWriteLock();
try {
if (iStartSize == -1)
iStartSize = DEFAULT_SIZE;
openChannel(iStartSize);
setFilledUpTo(0, true);
setSize(maxSize > 0 && iStartSize > maxSize ? maxSize : iStartSize, true);
setVersion(CURRENT_VERSION);
version = CURRENT_VERSION;
setSoftlyClosed(!failCheck);
} finally {
releaseWriteLock();
}
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#close()
*/
public void close() throws IOException {
acquireWriteLock();
try {
try {
setSoftlyClosed(true);
if (OGlobalConfiguration.FILE_LOCK.getValueAsBoolean())
unlock();
if (channel != null && channel.isOpen()) {
channel.close();
channel = null;
}
if (accessFile != null) {
accessFile.close();
accessFile = null;
}
} catch (Exception e) {
OLogManager.instance().error(this, "Error on closing file " + osFile.getAbsolutePath(), e, OIOException.class);
}
} finally {
releaseWriteLock();
}
}
public void close(boolean softlyClosed) throws IOException {
acquireWriteLock();
try {
try {
setSoftlyClosed(softlyClosed);
if (OGlobalConfiguration.FILE_LOCK.getValueAsBoolean())
unlock();
if (channel != null && channel.isOpen()) {
channel.close();
channel = null;
}
if (accessFile != null) {
accessFile.close();
accessFile = null;
}
} catch (Exception e) {
OLogManager.instance().error(this, "Error on closing file " + osFile.getAbsolutePath(), e, OIOException.class);
}
} finally {
releaseWriteLock();
}
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#delete()
*/
public void delete() throws IOException {
acquireWriteLock();
try {
close();
if (osFile != null) {
boolean deleted = osFile.delete();
while (!deleted) {
OMemoryWatchDog.freeMemoryForResourceCleanup(100);
deleted = !osFile.exists() || osFile.delete();
}
}
} finally {
releaseWriteLock();
}
}
/*
* Locks a portion of file.
*/
public FileLock lock(final long iRangeFrom, final long iRangeSize, final boolean iShared) throws IOException {
acquireWriteLock();
try {
return channel.lock(iRangeFrom, iRangeSize, iShared);
} finally {
releaseWriteLock();
}
}
/*
* Unlocks a portion of file.
*/
public OFile unlock(final FileLock iLock) throws IOException {
acquireWriteLock();
try {
if (iLock != null) {
try {
iLock.release();
} catch (ClosedChannelException e) {
}
}
return this;
} finally {
releaseWriteLock();
}
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#lock()
*/
public void lock() throws IOException {
if (channel == null)
return;
acquireWriteLock();
try {
for (int i = 0; i < LOCK_MAX_RETRIES; ++i) {
try {
fileLock = channel.tryLock(0, 1, true);
if (fileLock != null)
break;
} catch (OverlappingFileLockException e) {
OLogManager.instance().debug(this,
"Cannot open file '" + osFile.getAbsolutePath() + "' because it is locked. Waiting %d ms and retrying %d/%d...",
LOCK_WAIT_TIME, i, LOCK_MAX_RETRIES);
// FORCE FINALIZATION TO COLLECT ALL THE PENDING BUFFERS
OMemoryWatchDog.freeMemoryForResourceCleanup(LOCK_WAIT_TIME);
}
if (fileLock == null)
throw new OLockException(
"File '"
+ osFile.getPath()
+ "' is locked by another process, maybe the database is in use by another process. Use the remote mode with a OrientDB server to allow multiple access to the same database.");
}
} finally {
releaseWriteLock();
}
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#unlock()
*/
public void unlock() throws IOException {
acquireWriteLock();
try {
if (fileLock != null) {
try {
fileLock.release();
} catch (ClosedChannelException e) {
}
fileLock = null;
}
} finally {
releaseWriteLock();
}
}
protected void checkSize(final long iSize) throws IOException {
acquireReadLock();
try {
if (OLogManager.instance().isDebugEnabled())
OLogManager.instance().debug(this, "Changing file size to " + iSize + " bytes. " + toString());
final long filledUpTo = getFilledUpTo();
if (iSize < filledUpTo)
OLogManager.instance().error(
this,
"You cannot resize down the file to " + iSize + " bytes, since it is less than current space used: " + filledUpTo
+ " bytes", OIOException.class);
} finally {
releaseReadLock();
}
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#removeTail(int)
*/
public void removeTail(long iSizeToShrink) throws IOException {
acquireWriteLock();
try {
final long filledUpTo = getFilledUpTo();
if (filledUpTo < iSizeToShrink)
iSizeToShrink = 0;
setFilledUpTo(filledUpTo - iSizeToShrink);
} finally {
releaseWriteLock();
}
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#shrink(int)
*/
public void shrink(final long iSize) throws IOException {
acquireWriteLock();
try {
final long filledUpTo = getFilledUpTo();
if (iSize >= filledUpTo)
return;
OLogManager.instance().debug(this, "Shrinking filled file from " + filledUpTo + " to " + iSize + " bytes. " + toString());
setFilledUpTo(iSize);
} finally {
releaseWriteLock();
}
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#allocateSpace(int)
*/
public long allocateSpace(final long iSize) throws IOException {
acquireWriteLock();
try {
final long offset = getFilledUpTo();
final long size = getFileSize();
if (getFreeSpace() < iSize) {
if (maxSize > 0 && maxSize - size < iSize)
throw new IllegalArgumentException("Cannot enlarge file since the configured max size ("
+ OFileUtils.getSizeAsString(maxSize) + ") was reached! " + toString());
// MAKE ROOM
long newFileSize = size;
if (newFileSize == 0)
// PROBABLY HAS BEEN LOST WITH HARD KILLS
newFileSize = DEFAULT_SIZE;
// GET THE STEP SIZE IN BYTES
long stepSizeInBytes = incrementSize > 0 ? incrementSize : -1 * size / 100 * incrementSize;
// FIND THE BEST SIZE TO ALLOCATE (BASED ON INCREMENT-SIZE)
while (newFileSize - offset <= iSize) {
newFileSize += stepSizeInBytes;
if (newFileSize == 0)
// EMPTY FILE: ALLOCATE REQUESTED SIZE ONLY
newFileSize = iSize;
if (newFileSize > maxSize && maxSize > 0)
// TOO BIG: ROUND TO THE MAXIMUM FILE SIZE
newFileSize = maxSize;
}
setSize(newFileSize);
}
// THERE IS SPACE IN FILE: RETURN THE UPPER BOUND OFFSET AND UPDATE THE FILLED THRESHOLD
setFilledUpTo(offset + iSize);
return offset;
} finally {
releaseWriteLock();
}
}
protected long checkRegions(final long iOffset, final long iLength) {
acquireReadLock();
try {
if (iOffset < 0 || iOffset + iLength > getFilledUpTo())
throw new OIOException("You cannot access outside the file size (" + getFilledUpTo()
+ " bytes). You have requested portion " + iOffset + "-" + (iOffset + iLength) + " bytes. File: " + toString());
return iOffset;
} finally {
releaseReadLock();
}
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#getFreeSpace()
*/
public long getFreeSpace() {
acquireReadLock();
try {
return getFileSize() - getFilledUpTo();
} finally {
releaseReadLock();
}
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#canOversize(int)
*/
public boolean canOversize(final int iRecordSize) {
acquireReadLock();
try {
return maxSize - getFileSize() > iRecordSize;
} finally {
releaseReadLock();
}
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#toString()
*/
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("File: ");
builder.append(osFile.getName());
if (accessFile != null) {
builder.append(" os-size=");
try {
builder.append(accessFile.length());
} catch (IOException e) {
builder.append("?");
}
}
builder.append(", stored=");
builder.append(getFileSize());
builder.append(", filled=");
builder.append(getFilledUpTo());
builder.append(", max=");
builder.append(maxSize);
builder.append("");
return builder.toString();
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#getOsFile()
*/
public File getOsFile() {
acquireReadLock();
try {
return osFile;
} finally {
releaseReadLock();
}
}
public OAbstractFile init(final String iFileName, final String iMode) {
acquireWriteLock();
try {
mode = iMode;
osFile = new File(iFileName);
return this;
} finally {
releaseWriteLock();
}
}
protected void openChannel(final long newSize) throws IOException {
acquireWriteLock();
try {
OLogManager.instance().debug(this, "[OFile.openChannel] opening channel for file '%s' of size: %d", osFile, osFile.length());
for (int i = 0; i < OPEN_RETRY_MAX; ++i)
try {
accessFile = new RandomAccessFile(osFile, mode);
break;
} catch (FileNotFoundException e) {
if (i == OPEN_DELAY_RETRY)
throw e;
// TRY TO RE-CREATE THE DIRECTORY (THIS HAPPENS ON WINDOWS AFTER A DELETE IS PENDING, USUALLY WHEN REOPEN THE DB VERY
// FREQUENTLY)
osFile.getParentFile().mkdirs();
try {
Thread.sleep(OPEN_DELAY_RETRY);
} catch (InterruptedException e1) {
Thread.currentThread().interrupt();
}
}
if (accessFile == null)
throw new FileNotFoundException(osFile.getAbsolutePath());
if (accessFile.length() != newSize)
accessFile.setLength(newSize);
accessFile.seek(VERSION_OFFSET);
version = accessFile.read();
accessFile.seek(0);
channel = accessFile.getChannel();
if (OGlobalConfiguration.FILE_LOCK.getValueAsBoolean())
lock();
} finally {
releaseWriteLock();
}
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#getMaxSize()
*/
public long getMaxSize() {
acquireReadLock();
try {
return maxSize;
} finally {
releaseReadLock();
}
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#setMaxSize(int)
*/
public void setMaxSize(int maxSize) {
acquireWriteLock();
try {
this.maxSize = maxSize;
} finally {
releaseWriteLock();
}
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#getIncrementSize()
*/
public int getIncrementSize() {
acquireReadLock();
try {
return incrementSize;
} finally {
releaseReadLock();
}
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#setIncrementSize(int)
*/
public void setIncrementSize(int incrementSize) {
acquireWriteLock();
try {
this.incrementSize = incrementSize;
} finally {
releaseWriteLock();
}
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#isOpen()
*/
public boolean isOpen() {
acquireReadLock();
try {
return accessFile != null;
} finally {
releaseReadLock();
}
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#exists()
*/
public boolean exists() {
acquireReadLock();
try {
return osFile != null && osFile.exists();
} finally {
releaseReadLock();
}
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#isFailCheck()
*/
public boolean isFailCheck() {
acquireReadLock();
try {
return failCheck;
} finally {
releaseReadLock();
}
}
/*
* (non-Javadoc)
*
* @see com.orientechnologies.orient.core.storage.fs.OFileAAA#setFailCheck(boolean)
*/
public void setFailCheck(boolean failCheck) {
acquireWriteLock();
try {
this.failCheck = failCheck;
} finally {
releaseWriteLock();
}
}
protected void setDirty() {
acquireWriteLock();
try {
if (!dirty)
dirty = true;
} finally {
releaseWriteLock();
}
}
protected void setHeaderDirty() {
acquireWriteLock();
try {
if (!headerDirty)
headerDirty = true;
} finally {
releaseWriteLock();
}
}
public String getName() {
acquireReadLock();
try {
return osFile.getName();
} finally {
releaseReadLock();
}
}
public String getPath() {
acquireReadLock();
try {
return osFile.getPath();
} finally {
releaseReadLock();
}
}
public String getAbsolutePath() {
acquireReadLock();
try {
return osFile.getAbsolutePath();
} finally {
releaseReadLock();
}
}
public boolean renameTo(final File newFile) {
acquireWriteLock();
try {
return osFile.renameTo(newFile);
} finally {
releaseWriteLock();
}
}
protected void acquireWriteLock() {
lock.writeLock().lock();
}
protected void releaseWriteLock() {
lock.writeLock().unlock();
}
protected void acquireReadLock() {
lock.readLock().lock();
}
protected void releaseReadLock() {
lock.readLock().unlock();
}
protected abstract void setVersion(int version) throws IOException;
protected abstract void setFilledUpTo(final long iHow, boolean force);
protected abstract void setSize(final long size, final boolean force) throws IOException;
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_storage_fs_OAbstractFile.java |
377 | public static class TestReducerFactory
implements ReducerFactory<String, Integer, Integer> {
@Override
public Reducer<String, Integer, Integer> newReducer(String key) {
return new TestReducer();
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_mapreduce_DistributedMapperClientMapReduceTest.java |
1,783 | public static final class Pair implements Serializable {
private final String significant;
private final String insignificant;
public Pair(String significant, String insignificant) {
this.significant = significant;
this.insignificant = insignificant;
}
@Override
public boolean equals(Object thatObj) {
if (this == thatObj) {
return true;
}
if (thatObj == null || getClass() != thatObj.getClass()) {
return false;
}
Pair that = (Pair) thatObj;
return this.significant.equals(that.significant);
}
@Override
public int hashCode() {
return significant.hashCode();
}
} | 0true
| hazelcast_src_test_java_com_hazelcast_map_InMemoryFormatTest.java |
312 | {
@Override
public Iterator<PropertyBlock> iterator()
{
return new PropertyBlockIterator( node );
}
}; | 0true
| community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_xa_NeoStoreIndexStoreView.java |
2,171 | public static class NotBits implements Bits {
private final Bits bits;
public NotBits(Bits bits) {
this.bits = bits;
}
@Override
public boolean get(int index) {
return !bits.get(index);
}
@Override
public int length() {
return bits.length();
}
} | 0true
| src_main_java_org_elasticsearch_common_lucene_docset_NotDocIdSet.java |
131 | class InitializerProposal extends CorrectionProposal {
private final class InitializerValueProposal
implements ICompletionProposal, ICompletionProposalExtension2 {
private final String text;
private final Image image;
private final int offset;
private InitializerValueProposal(int offset, String text, Image image) {
this.offset = offset;
this.text = text;
this.image = image;
}
protected IRegion getCurrentRegion(IDocument document)
throws BadLocationException {
int start = offset;
int length = 0;
for (int i=offset;
i<document.getLength();
i++) {
char ch = document.getChar(i);
if (Character.isWhitespace(ch) ||
ch==';'||ch==','||ch==')') {
break;
}
length++;
}
return new Region(start, length);
}
@Override
public Image getImage() {
return image;
}
@Override
public Point getSelection(IDocument document) {
return new Point(offset + text.length(), 0);
}
public void apply(IDocument document) {
try {
IRegion region = getCurrentRegion(document);
document.replace(region.getOffset(),
region.getLength(), text);
}
catch (BadLocationException e) {
e.printStackTrace();
}
}
public String getDisplayString() {
return text;
}
public String getAdditionalProposalInfo() {
return null;
}
@Override
public IContextInformation getContextInformation() {
return null;
}
@Override
public void apply(ITextViewer viewer, char trigger,
int stateMask, int offset) {
apply(viewer.getDocument());
}
@Override
public void selected(ITextViewer viewer, boolean smartToggle) {}
@Override
public void unselected(ITextViewer viewer) {}
@Override
public boolean validate(IDocument document, int offset,
DocumentEvent event) {
try {
IRegion region = getCurrentRegion(document);
String prefix = document.get(region.getOffset(),
offset-region.getOffset());
return text.startsWith(prefix);
}
catch (BadLocationException e) {
return false;
}
}
}
private CeylonEditor editor;
private final ProducedType type;
private final Scope scope;
private final Unit unit;
private final int exitPos;
InitializerProposal(String name, Change change,
Declaration declaration, ProducedType type,
Region selection, Image image, int exitPos,
CeylonEditor editor) {
super(name, change, selection, image);
this.exitPos = exitPos;
this.editor = editor;
this.scope = declaration.getScope();
this.unit = declaration.getUnit();
this.type = type;
}
InitializerProposal(String name, Change change,
Scope scope, Unit unit, ProducedType type,
Region selection, Image image, int exitPos,
CeylonEditor editor) {
super(name, change, selection, image);
this.exitPos = exitPos;
this.editor = editor;
this.scope = scope;
this.unit = unit;
this.type = type;
}
@Override
public void apply(IDocument document) {
int lenBefore = document.getLength();
super.apply(document);
int lenAfter = document.getLength();
if (editor==null) {
IEditorPart ed = EditorUtil.getCurrentEditor();
if (ed instanceof CeylonEditor) {
editor = (CeylonEditor) ed;
}
}
if (editor!=null) {
Point point = getSelection(document);
if (point.y>0) {
LinkedModeModel linkedModeModel = new LinkedModeModel();
ICompletionProposal[] proposals = getProposals(document, point);
if (proposals.length>1) {
ProposalPosition linkedPosition =
new ProposalPosition(document, point.x, point.y, 0,
proposals);
try {
LinkedMode.addLinkedPosition(linkedModeModel, linkedPosition);
int adjustedExitPos = exitPos;
if (exitPos>=0 && exitPos>point.x) {
adjustedExitPos += lenAfter-lenBefore;
}
int exitSeq = exitPos>=0 ? 1 : NO_STOP;
LinkedMode.installLinkedMode(editor, document, linkedModeModel,
this, new DeleteBlockingExitPolicy(document),
exitSeq, adjustedExitPos);
}
catch (BadLocationException e) {
e.printStackTrace();
}
}
}
}
}
private ICompletionProposal[] getProposals(IDocument document,
Point point) {
List<ICompletionProposal> proposals =
new ArrayList<ICompletionProposal>();
try {
proposals.add(new InitializerValueProposal(point.x,
document.get(point.x, point.y), null));
}
catch (BadLocationException e1) {
e1.printStackTrace();
}
addValueArgumentProposals(point.x, proposals);
return proposals.toArray(new ICompletionProposal[0]);
}
private void addValueArgumentProposals(int loc,
List<ICompletionProposal> props) {
TypeDeclaration td = type.getDeclaration();
for (DeclarationWithProximity dwp:
getSortedProposedValues(scope, unit)) {
if (dwp.isUnimported()) {
//don't propose unimported stuff b/c adding
//imports drops us out of linked mode and
//because it results in a pause
continue;
}
Declaration d = dwp.getDeclaration();
if (d instanceof Value) {
Value value = (Value) d;
if (d.getUnit().getPackage().getNameAsString()
.equals(Module.LANGUAGE_MODULE_NAME)) {
if (isIgnoredLanguageModuleValue(value)) {
continue;
}
}
ProducedType vt = value.getType();
if (vt!=null && !vt.isNothing() &&
((td instanceof TypeParameter) &&
isInBounds(((TypeParameter)td).getSatisfiedTypes(), vt) ||
vt.isSubtypeOf(type))) {
props.add(new InitializerValueProposal(loc, d.getName(),
getImageForDeclaration(d)));
}
}
if (d instanceof Method) {
if (!d.isAnnotation()) {
Method method = (Method) d;
if (d.getUnit().getPackage().getNameAsString()
.equals(Module.LANGUAGE_MODULE_NAME)) {
if (isIgnoredLanguageModuleMethod(method)) {
continue;
}
}
ProducedType mt = method.getType();
if (mt!=null && !mt.isNothing() &&
((td instanceof TypeParameter) &&
isInBounds(((TypeParameter)td).getSatisfiedTypes(), mt) ||
mt.isSubtypeOf(type))) {
StringBuilder sb = new StringBuilder();
sb.append(d.getName());
appendPositionalArgs(d, unit, sb, false, false);
props.add(new InitializerValueProposal(loc, sb.toString(),
getImageForDeclaration(d)));
}
}
}
if (d instanceof Class) {
Class clazz = (Class) d;
if (!clazz.isAbstract() && !d.isAnnotation()) {
if (d.getUnit().getPackage().getNameAsString()
.equals(Module.LANGUAGE_MODULE_NAME)) {
if (isIgnoredLanguageModuleClass(clazz)) {
continue;
}
}
ProducedType ct = clazz.getType();
if (ct!=null && !ct.isNothing() &&
((td instanceof TypeParameter) &&
isInBounds(((TypeParameter)td).getSatisfiedTypes(), ct) ||
ct.getDeclaration().equals(type.getDeclaration()) ||
ct.isSubtypeOf(type))) {
StringBuilder sb = new StringBuilder();
sb.append(d.getName());
appendPositionalArgs(d, unit, sb, false, false);
props.add(new InitializerValueProposal(loc, sb.toString(),
getImageForDeclaration(d)));
}
}
}
}
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_InitializerProposal.java |
1,487 | @SuppressWarnings("unchecked")
public class OObjectDatabaseTx extends ODatabasePojoAbstract<Object> implements ODatabaseObject, OUserObject2RecordHandler {
public static final String TYPE = "object";
protected ODictionary<Object> dictionary;
protected OEntityManager entityManager;
protected boolean saveOnlyDirty;
protected boolean lazyLoading;
protected boolean automaticSchemaGeneration;
protected OMetadataObject metadata;
public OObjectDatabaseTx(final String iURL) {
super(new ODatabaseDocumentTx(iURL));
underlying.setDatabaseOwner(this);
init();
}
public <T> T newInstance(final Class<T> iType) {
return (T) newInstance(iType.getSimpleName(), null, new Object[0]);
}
public <T> T newInstance(final Class<T> iType, Object... iArgs) {
return (T) newInstance(iType.getSimpleName(), null, iArgs);
}
public <RET> RET newInstance(String iClassName) {
return (RET) newInstance(iClassName, null, new Object[0]);
}
@Override
public <THISDB extends ODatabase> THISDB open(String iUserName, String iUserPassword) {
super.open(iUserName, iUserPassword);
entityManager.registerEntityClass(OUser.class);
entityManager.registerEntityClass(ORole.class);
metadata = new OMetadataObject(underlying.getMetadata());
return (THISDB) this;
}
@Override
public OMetadataObject getMetadata() {
checkOpeness();
if (metadata == null)
metadata = new OMetadataObject(underlying.getMetadata());
return metadata;
}
/**
* Create a new POJO by its class name. Assure to have called the registerEntityClasses() declaring the packages that are part of
* entity classes.
*
* @see OEntityManager#registerEntityClasses(String)
*/
public <RET extends Object> RET newInstance(final String iClassName, final Object iEnclosingClass, Object... iArgs) {
checkSecurity(ODatabaseSecurityResources.CLASS, ORole.PERMISSION_CREATE, iClassName);
try {
Class<?> entityClass = entityManager.getEntityClass(iClassName);
if (entityClass != null) {
RET enhanced = (RET) OObjectEntityEnhancer.getInstance().getProxiedInstance(entityManager.getEntityClass(iClassName),
iEnclosingClass, underlying.newInstance(iClassName), null, iArgs);
return (RET) enhanced;
} else {
throw new OSerializationException("Type " + iClassName
+ " cannot be serialized because is not part of registered entities. To fix this error register this class");
}
} catch (Exception e) {
OLogManager.instance().error(this, "Error on creating object of class " + iClassName, e, ODatabaseException.class);
}
return null;
}
/**
* Create a new POJO by its class name. Assure to have called the registerEntityClasses() declaring the packages that are part of
* entity classes.
*
* @see OEntityManager#registerEntityClasses(String)
*/
public <RET extends Object> RET newInstance(final String iClassName, final Object iEnclosingClass, ODocument iDocument,
Object... iArgs) {
checkSecurity(ODatabaseSecurityResources.CLASS, ORole.PERMISSION_CREATE, iClassName);
try {
Class<?> entityClass = entityManager.getEntityClass(iClassName);
if (entityClass != null) {
RET enhanced = (RET) OObjectEntityEnhancer.getInstance().getProxiedInstance(entityManager.getEntityClass(iClassName),
iEnclosingClass, iDocument, null, iArgs);
return (RET) enhanced;
} else {
throw new OSerializationException("Type " + iClassName
+ " cannot be serialized because is not part of registered entities. To fix this error register this class");
}
} catch (Exception e) {
OLogManager.instance().error(this, "Error on creating object of class " + iClassName, e, ODatabaseException.class);
}
return null;
}
public <RET> OObjectIteratorClass<RET> browseClass(final Class<RET> iClusterClass) {
return browseClass(iClusterClass, true);
}
public <RET> OObjectIteratorClass<RET> browseClass(final Class<RET> iClusterClass, final boolean iPolymorphic) {
if (iClusterClass == null)
return null;
return browseClass(iClusterClass.getSimpleName(), iPolymorphic);
}
public <RET> OObjectIteratorClass<RET> browseClass(final String iClassName) {
return browseClass(iClassName, true);
}
public <RET> OObjectIteratorClass<RET> browseClass(final String iClassName, final boolean iPolymorphic) {
checkOpeness();
checkSecurity(ODatabaseSecurityResources.CLASS, ORole.PERMISSION_READ, iClassName);
return new OObjectIteratorClass<RET>(this, (ODatabaseRecordAbstract) getUnderlying().getUnderlying(), iClassName, iPolymorphic);
}
public <RET> OObjectIteratorCluster<RET> browseCluster(final String iClusterName) {
checkOpeness();
checkSecurity(ODatabaseSecurityResources.CLUSTER, ORole.PERMISSION_READ, iClusterName);
return (OObjectIteratorCluster<RET>) new OObjectIteratorCluster<Object>(this, (ODatabaseRecordAbstract) getUnderlying()
.getUnderlying(), getClusterIdByName(iClusterName));
}
public <RET> RET load(final Object iPojo) {
return (RET) load(iPojo, null);
}
public <RET> RET reload(final Object iPojo) {
return (RET) reload(iPojo, null, true);
}
public <RET> RET reload(final Object iPojo, final boolean iIgnoreCache) {
return (RET) reload(iPojo, null, iIgnoreCache);
}
public <RET> RET reload(Object iPojo, final String iFetchPlan, final boolean iIgnoreCache) {
checkOpeness();
if (iPojo == null)
return null;
// GET THE ASSOCIATED DOCUMENT
final ODocument record = getRecordByUserObject(iPojo, true);
underlying.reload(record, iFetchPlan, iIgnoreCache);
iPojo = stream2pojo(record, iPojo, iFetchPlan, true);
return (RET) iPojo;
}
public <RET> RET load(final Object iPojo, final String iFetchPlan) {
return (RET) load(iPojo, iFetchPlan, false);
}
@Override
public void attach(final Object iPojo) {
OObjectEntitySerializer.attach(iPojo, this);
}
public <RET> RET attachAndSave(final Object iPojo) {
attach(iPojo);
return (RET) save(iPojo);
}
@Override
/**
* Method that detaches all fields contained in the document to the given object. It returns by default a proxied instance. To get
* a detached non proxied instance @see {@link OObjectEntitySerializer.detach(T o, ODatabaseObject db, boolean
* returnNonProxiedInstance)}
*
* @param <T>
* @param o
* :- the object to detach
* @return the detached object
*/
public <RET> RET detach(final Object iPojo) {
return (RET) OObjectEntitySerializer.detach(iPojo, this);
}
/**
* Method that detaches all fields contained in the document to the given object.
*
* @param <RET>
* @param iPojo
* :- the object to detach
* @param returnNonProxiedInstance
* :- defines if the return object will be a proxied instance or not. If set to TRUE and the object does not contains @Id
* and @Version fields it could procude data replication
* @return the object serialized or with detached data
*/
public <RET> RET detach(final Object iPojo, boolean returnNonProxiedInstance) {
return (RET) OObjectEntitySerializer.detach(iPojo, this, returnNonProxiedInstance);
}
/**
* Method that detaches all fields contained in the document to the given object and recursively all object tree. This may throw a
* {@link StackOverflowError} with big objects tree. To avoid it set the stack size with -Xss java option
*
* @param <RET>
* @param iPojo
* :- the object to detach
* @param returnNonProxiedInstance
* :- defines if the return object will be a proxied instance or not. If set to TRUE and the object does not contains @Id
* and @Version fields it could procude data replication
* @return the object serialized or with detached data
*/
public <RET> RET detachAll(final Object iPojo, boolean returnNonProxiedInstance) {
return (RET) OObjectEntitySerializer.detachAll(iPojo, this, returnNonProxiedInstance);
}
public <RET> RET load(final Object iPojo, final String iFetchPlan, final boolean iIgnoreCache) {
return (RET) load(iPojo, iFetchPlan, iIgnoreCache, false);
}
@Override
public <RET> RET load(Object iPojo, String iFetchPlan, boolean iIgnoreCache, boolean loadTombstone) {
checkOpeness();
if (iPojo == null)
return null;
// GET THE ASSOCIATED DOCUMENT
ODocument record = getRecordByUserObject(iPojo, true);
try {
record.setInternalStatus(ORecordElement.STATUS.UNMARSHALLING);
record = underlying.load(record, iFetchPlan, iIgnoreCache, loadTombstone);
return (RET) stream2pojo(record, iPojo, iFetchPlan);
} finally {
record.setInternalStatus(ORecordElement.STATUS.LOADED);
}
}
public <RET> RET load(final ORID iRecordId) {
return (RET) load(iRecordId, null);
}
public <RET> RET load(final ORID iRecordId, final String iFetchPlan) {
return (RET) load(iRecordId, iFetchPlan, false);
}
public <RET> RET load(final ORID iRecordId, final String iFetchPlan, final boolean iIgnoreCache) {
return (RET) load(iRecordId, iFetchPlan, iIgnoreCache, false);
}
@Override
public <RET> RET load(ORID iRecordId, String iFetchPlan, boolean iIgnoreCache, boolean loadTombstone) {
checkOpeness();
if (iRecordId == null)
return null;
// GET THE ASSOCIATED DOCUMENT
final ODocument record = (ODocument) underlying.load(iRecordId, iFetchPlan, iIgnoreCache, loadTombstone);
if (record == null)
return null;
return (RET) OObjectEntityEnhancer.getInstance().getProxiedInstance(record.getClassName(), entityManager, record, null);
}
/**
* Saves an object to the databasein synchronous mode . First checks if the object is new or not. In case it's new a new ODocument
* is created and bound to the object, otherwise the ODocument is retrieved and updated. The object is introspected using the Java
* Reflection to extract the field values. <br/>
* If a multi value (array, collection or map of objects) is passed, then each single object is stored separately.
*/
public <RET> RET save(final Object iContent) {
return (RET) save(iContent, (String) null, OPERATION_MODE.SYNCHRONOUS, false, null, null);
}
/**
* Saves an object to the database specifying the mode. First checks if the object is new or not. In case it's new a new ODocument
* is created and bound to the object, otherwise the ODocument is retrieved and updated. The object is introspected using the Java
* Reflection to extract the field values. <br/>
* If a multi value (array, collection or map of objects) is passed, then each single object is stored separately.
*/
public <RET> RET save(final Object iContent, OPERATION_MODE iMode, boolean iForceCreate,
final ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<ORecordVersion> iRecordUpdatedCallback) {
return (RET) save(iContent, null, iMode, false, iRecordCreatedCallback, iRecordUpdatedCallback);
}
/**
* Saves an object in synchronous mode to the database forcing a record cluster where to store it. First checks if the object is
* new or not. In case it's new a new ODocument is created and bound to the object, otherwise the ODocument is retrieved and
* updated. The object is introspected using the Java Reflection to extract the field values. <br/>
* If a multi value (array, collection or map of objects) is passed, then each single object is stored separately.
*
* Before to use the specified cluster a check is made to know if is allowed and figures in the configured and the record is valid
* following the constraints declared in the schema.
*
* @see ORecordSchemaAware#validate()
*/
public <RET> RET save(final Object iPojo, final String iClusterName) {
return (RET) save(iPojo, iClusterName, OPERATION_MODE.SYNCHRONOUS, false, null, null);
}
@Override
public boolean updatedReplica(Object iPojo) {
OSerializationThreadLocal.INSTANCE.get().clear();
// GET THE ASSOCIATED DOCUMENT
final Object proxiedObject = OObjectEntitySerializer.serializeObject(iPojo, this);
final ODocument record = getRecordByUserObject(proxiedObject, true);
boolean result;
try {
record.setInternalStatus(com.orientechnologies.orient.core.db.record.ORecordElement.STATUS.MARSHALLING);
result = underlying.updatedReplica(record);
((OObjectProxyMethodHandler) ((ProxyObject) proxiedObject).getHandler()).updateLoadedFieldMap(proxiedObject);
// RE-REGISTER FOR NEW RECORDS SINCE THE ID HAS CHANGED
registerUserObject(proxiedObject, record);
} finally {
record.setInternalStatus(com.orientechnologies.orient.core.db.record.ORecordElement.STATUS.LOADED);
}
return result;
}
/**
* Saves an object to the database forcing a record cluster where to store it. First checks if the object is new or not. In case
* it's new a new ODocument is created and bound to the object, otherwise the ODocument is retrieved and updated. The object is
* introspected using the Java Reflection to extract the field values. <br/>
* If a multi value (array, collection or map of objects) is passed, then each single object is stored separately.
*
* Before to use the specified cluster a check is made to know if is allowed and figures in the configured and the record is valid
* following the constraints declared in the schema.
*
* @see ORecordSchemaAware#validate()
*/
public <RET> RET save(final Object iPojo, final String iClusterName, OPERATION_MODE iMode, boolean iForceCreate,
final ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<ORecordVersion> iRecordUpdatedCallback) {
checkOpeness();
if (iPojo == null)
return (RET) iPojo;
else if (OMultiValue.isMultiValue(iPojo)) {
// MULTI VALUE OBJECT: STORE SINGLE POJOS
for (Object pojo : OMultiValue.getMultiValueIterable(iPojo)) {
save(pojo, iClusterName);
}
return (RET) iPojo;
} else {
OSerializationThreadLocal.INSTANCE.get().clear();
// GET THE ASSOCIATED DOCUMENT
final Object proxiedObject = OObjectEntitySerializer.serializeObject(iPojo, this);
final ODocument record = getRecordByUserObject(proxiedObject, true);
try {
record.setInternalStatus(ORecordElement.STATUS.MARSHALLING);
if (!saveOnlyDirty || record.isDirty()) {
// REGISTER BEFORE TO SERIALIZE TO AVOID PROBLEMS WITH CIRCULAR DEPENDENCY
// registerUserObject(iPojo, record);
deleteOrphans((((OObjectProxyMethodHandler) ((ProxyObject) proxiedObject).getHandler())));
ODocument savedRecord = underlying.save(record, iClusterName, iMode, iForceCreate, iRecordCreatedCallback,
iRecordUpdatedCallback);
((OObjectProxyMethodHandler) ((ProxyObject) proxiedObject).getHandler()).setDoc(savedRecord);
((OObjectProxyMethodHandler) ((ProxyObject) proxiedObject).getHandler()).updateLoadedFieldMap(proxiedObject);
// RE-REGISTER FOR NEW RECORDS SINCE THE ID HAS CHANGED
registerUserObject(proxiedObject, record);
}
} finally {
record.setInternalStatus(ORecordElement.STATUS.LOADED);
}
return (RET) proxiedObject;
}
}
public ODatabaseObject delete(final Object iPojo) {
checkOpeness();
if (iPojo == null)
return this;
ODocument record = getRecordByUserObject(iPojo, false);
if (record == null) {
final ORecordId rid = OObjectSerializerHelper.getObjectID(this, iPojo);
if (rid == null)
throw new OObjectNotDetachedException("Cannot retrieve the object's ID for '" + iPojo + "' because has not been detached");
record = (ODocument) underlying.load(rid);
}
deleteCascade(record);
underlying.delete(record);
if (getTransaction() instanceof OTransactionNoTx)
unregisterPojo(iPojo, record);
return this;
}
@Override
public ODatabaseObject delete(final ORID iRID) {
checkOpeness();
if (iRID == null)
return this;
final ORecordInternal<?> record = iRID.getRecord();
if (record instanceof ODocument) {
Object iPojo = getUserObjectByRecord(record, null);
deleteCascade((ODocument) record);
underlying.delete(record);
if (getTransaction() instanceof OTransactionNoTx)
unregisterPojo(iPojo, (ODocument) record);
}
return this;
}
@Override
public ODatabaseObject delete(final ORID iRID, final ORecordVersion iVersion) {
deleteRecord(iRID, iVersion, false);
return this;
}
@Override
public ODatabaseComplex<Object> cleanOutRecord(ORID iRID, ORecordVersion iVersion) {
deleteRecord(iRID, iVersion, true);
return this;
}
private boolean deleteRecord(ORID iRID, ORecordVersion iVersion, boolean prohibitTombstones) {
checkOpeness();
if (iRID == null)
return true;
ODocument record = iRID.getRecord();
if (record != null) {
Object iPojo = getUserObjectByRecord(record, null);
deleteCascade(record);
if (prohibitTombstones)
underlying.cleanOutRecord(iRID, iVersion);
else
underlying.delete(iRID, iVersion);
if (getTransaction() instanceof OTransactionNoTx)
unregisterPojo(iPojo, record);
}
return false;
}
protected void deleteCascade(final ODocument record) {
if (record == null)
return;
List<String> toDeleteCascade = OObjectEntitySerializer.getCascadeDeleteFields(record.getClassName());
if (toDeleteCascade != null) {
for (String field : toDeleteCascade) {
Object toDelete = record.field(field);
if (toDelete instanceof OIdentifiable) {
if (toDelete != null)
delete(((OIdentifiable) toDelete).getIdentity());
} else if (toDelete instanceof Collection) {
for (OIdentifiable cascadeRecord : ((Collection<OIdentifiable>) toDelete)) {
if (cascadeRecord != null)
delete(((OIdentifiable) cascadeRecord).getIdentity());
}
} else if (toDelete instanceof Map) {
for (OIdentifiable cascadeRecord : ((Map<Object, OIdentifiable>) toDelete).values()) {
if (cascadeRecord != null)
delete(((OIdentifiable) cascadeRecord).getIdentity());
}
}
}
}
}
public long countClass(final String iClassName) {
checkOpeness();
return underlying.countClass(iClassName);
}
public long countClass(final Class<?> iClass) {
checkOpeness();
return underlying.countClass(iClass.getSimpleName());
}
public ODictionary<Object> getDictionary() {
checkOpeness();
if (dictionary == null)
dictionary = new ODictionaryWrapper(this, underlying.getDictionary().getIndex());
return dictionary;
}
@Override
public ODatabasePojoAbstract<Object> commit() {
try {
// BY PASS DOCUMENT DB
((ODatabaseRecordTx) underlying.getUnderlying()).commit();
if (getTransaction().getAllRecordEntries() != null) {
// UPDATE ID & VERSION FOR ALL THE RECORDS
Object pojo = null;
for (ORecordOperation entry : getTransaction().getAllRecordEntries()) {
switch (entry.type) {
case ORecordOperation.CREATED:
case ORecordOperation.UPDATED:
break;
case ORecordOperation.DELETED:
final ORecordInternal<?> rec = entry.getRecord();
if (rec instanceof ODocument)
unregisterPojo(pojo, (ODocument) rec);
break;
}
}
}
} finally {
getTransaction().close();
}
return this;
}
@Override
public ODatabasePojoAbstract<Object> rollback() {
try {
// COPY ALL TX ENTRIES
final List<ORecordOperation> newEntries;
if (getTransaction().getCurrentRecordEntries() != null) {
newEntries = new ArrayList<ORecordOperation>();
for (ORecordOperation entry : getTransaction().getCurrentRecordEntries())
if (entry.type == ORecordOperation.CREATED)
newEntries.add(entry);
} else
newEntries = null;
// BY PASS DOCUMENT DB
((ODatabaseRecordTx) underlying.getUnderlying()).rollback();
} finally {
getTransaction().close();
}
return this;
}
public OEntityManager getEntityManager() {
return entityManager;
}
@Override
public ODatabaseDocument getUnderlying() {
return underlying;
}
/**
* Returns the version number of the object. Version starts from 0 assigned on creation.
*
* @param iPojo
* User object
*/
@Override
public ORecordVersion getVersion(final Object iPojo) {
checkOpeness();
final ODocument record = getRecordByUserObject(iPojo, false);
if (record != null)
return record.getRecordVersion();
return OObjectSerializerHelper.getObjectVersion(iPojo);
}
/**
* Returns the object unique identity.
*
* @param iPojo
* User object
*/
@Override
public ORID getIdentity(final Object iPojo) {
checkOpeness();
if (iPojo instanceof OIdentifiable)
return ((OIdentifiable) iPojo).getIdentity();
final ODocument record = getRecordByUserObject(iPojo, false);
if (record != null)
return record.getIdentity();
return OObjectSerializerHelper.getObjectID(this, iPojo);
}
public boolean isSaveOnlyDirty() {
return saveOnlyDirty;
}
public void setSaveOnlyDirty(boolean saveOnlyDirty) {
this.saveOnlyDirty = saveOnlyDirty;
}
public boolean isAutomaticSchemaGeneration() {
return automaticSchemaGeneration;
}
public void setAutomaticSchemaGeneration(boolean automaticSchemaGeneration) {
this.automaticSchemaGeneration = automaticSchemaGeneration;
}
public Object newInstance() {
checkOpeness();
return new ODocument();
}
public <DBTYPE extends ODatabase> DBTYPE checkSecurity(final String iResource, final byte iOperation) {
return (DBTYPE) underlying.checkSecurity(iResource, iOperation);
}
public <DBTYPE extends ODatabase> DBTYPE checkSecurity(final String iResource, final int iOperation, Object iResourceSpecific) {
return (DBTYPE) underlying.checkSecurity(iResource, iOperation, iResourceSpecific);
}
public <DBTYPE extends ODatabase> DBTYPE checkSecurity(final String iResource, final int iOperation, Object... iResourcesSpecific) {
return (DBTYPE) underlying.checkSecurity(iResource, iOperation, iResourcesSpecific);
}
@Override
public ODocument pojo2Stream(final Object iPojo, final ODocument iRecord) {
if (iPojo instanceof ProxyObject) {
return ((OObjectProxyMethodHandler) ((ProxyObject) iPojo).getHandler()).getDoc();
}
return OObjectSerializerHelper.toStream(iPojo, iRecord, getEntityManager(),
getMetadata().getSchema().getClass(iPojo.getClass().getSimpleName()), this, this, saveOnlyDirty);
}
@Override
public Object stream2pojo(ODocument iRecord, final Object iPojo, final String iFetchPlan) {
return stream2pojo(iRecord, iPojo, iFetchPlan, false);
}
public Object stream2pojo(ODocument iRecord, final Object iPojo, final String iFetchPlan, boolean iReload) {
if (iRecord.getInternalStatus() == ORecordElement.STATUS.NOT_LOADED)
iRecord = (ODocument) iRecord.load();
if (iReload) {
if (iPojo != null) {
if (iPojo instanceof Proxy) {
((OObjectProxyMethodHandler) ((ProxyObject) iPojo).getHandler()).setDoc(iRecord);
((OObjectProxyMethodHandler) ((ProxyObject) iPojo).getHandler()).updateLoadedFieldMap(iPojo);
return iPojo;
} else
return OObjectEntityEnhancer.getInstance().getProxiedInstance(iPojo.getClass(), iRecord);
} else
return OObjectEntityEnhancer.getInstance().getProxiedInstance(iRecord.getClassName(), entityManager, iRecord, null);
} else if (!(iPojo instanceof Proxy))
return OObjectEntityEnhancer.getInstance().getProxiedInstance(iPojo.getClass(), iRecord);
else
return iPojo;
}
public boolean isLazyLoading() {
return lazyLoading;
}
public void setLazyLoading(final boolean lazyLoading) {
this.lazyLoading = lazyLoading;
}
public String getType() {
return TYPE;
}
@Override
public ODocument getRecordByUserObject(Object iPojo, boolean iCreateIfNotAvailable) {
if (iPojo instanceof Proxy)
return OObjectEntitySerializer.getDocument((Proxy) iPojo);
return OObjectEntitySerializer.getDocument((Proxy) OObjectEntitySerializer.serializeObject(iPojo, this));
}
@Override
public Object getUserObjectByRecord(final OIdentifiable iRecord, final String iFetchPlan, final boolean iCreate) {
final ODocument document = iRecord.getRecord();
return OObjectEntityEnhancer.getInstance().getProxiedInstance(document.getClassName(), getEntityManager(), document, null);
}
@Override
public void registerUserObject(final Object iObject, final ORecordInternal<?> iRecord) {
}
public void registerUserObjectAfterLinkSave(ORecordInternal<?> iRecord) {
}
@Override
public void unregisterPojo(final Object iObject, final ODocument iRecord) {
}
public void registerClassMethodFilter(Class<?> iClass, OObjectMethodFilter iMethodFilter) {
OObjectEntityEnhancer.getInstance().registerClassMethodFilter(iClass, iMethodFilter);
}
public void deregisterClassMethodFilter(final Class<?> iClass) {
OObjectEntityEnhancer.getInstance().deregisterClassMethodFilter(iClass);
}
protected void init() {
entityManager = OEntityManager.getEntityManagerByDatabaseURL(getURL());
entityManager.setClassHandler(OObjectEntityClassHandler.getInstance());
saveOnlyDirty = OGlobalConfiguration.OBJECT_SAVE_ONLY_DIRTY.getValueAsBoolean();
OObjectSerializerHelper.register();
lazyLoading = true;
if (!isClosed() && entityManager.getEntityClass(OUser.class.getSimpleName()) == null) {
entityManager.registerEntityClass(OUser.class);
entityManager.registerEntityClass(ORole.class);
}
}
protected void deleteOrphans(final OObjectProxyMethodHandler handler) {
for (ORID orphan : handler.getOrphans()) {
final ODocument doc = orphan.getRecord();
deleteCascade(doc);
underlying.delete(doc);
}
handler.getOrphans().clear();
}
} | 1no label
| object_src_main_java_com_orientechnologies_orient_object_db_OObjectDatabaseTx.java |
328 | public class LockException extends RuntimeException
{
public LockException()
{
super();
}
public LockException( String message )
{
super( message );
}
public LockException( String message, Throwable cause )
{
super( message, cause );
}
public LockException( Throwable cause )
{
super( cause );
}
} | 0true
| community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_LockException.java |
98 | class ConcatenationVisitor extends Visitor {
Tree.SumOp result;
@Override
public void visit(Tree.SumOp that) {
if (that.getStartIndex()<=node.getStartIndex() &&
that.getStopIndex()>=node.getStopIndex()) {
Tree.Term lt = that.getLeftTerm();
Tree.Term rt = that.getRightTerm();
if ((lt instanceof Tree.StringLiteral ||
lt instanceof Tree.StringTemplate) &&
rt instanceof Tree.SumOp &&
(((Tree.SumOp) rt).getRightTerm()
instanceof Tree.StringLiteral ||
((Tree.SumOp) rt).getRightTerm()
instanceof Tree.StringTemplate)) {
result = that;
}
if ((rt instanceof Tree.StringLiteral ||
rt instanceof Tree.StringTemplate) &&
lt instanceof Tree.SumOp &&
(((Tree.SumOp) lt).getLeftTerm()
instanceof Tree.StringLiteral ||
((Tree.SumOp) lt).getLeftTerm()
instanceof Tree.StringTemplate)) {
result = that;
}
}
super.visit(that);
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_ConvertToInterpolationProposal.java |
3,738 | public static class TypeParser extends ObjectMapper.TypeParser {
@Override
protected ObjectMapper.Builder createBuilder(String name) {
return new Builder(name);
}
@Override
protected void processField(ObjectMapper.Builder builder, String fieldName, Object fieldNode) {
if (fieldName.equals("date_formats") || fieldName.equals("dynamic_date_formats")) {
List<FormatDateTimeFormatter> dateTimeFormatters = newArrayList();
if (fieldNode instanceof List) {
for (Object node1 : (List) fieldNode) {
dateTimeFormatters.add(parseDateTimeFormatter(fieldName, node1));
}
} else if ("none".equals(fieldNode.toString())) {
dateTimeFormatters = null;
} else {
dateTimeFormatters.add(parseDateTimeFormatter(fieldName, fieldNode));
}
if (dateTimeFormatters == null) {
((Builder) builder).noDynamicDateTimeFormatter();
} else {
((Builder) builder).dynamicDateTimeFormatter(dateTimeFormatters);
}
} else if (fieldName.equals("dynamic_templates")) {
// "dynamic_templates" : [
// {
// "template_1" : {
// "match" : "*_test",
// "match_mapping_type" : "string",
// "mapping" : { "type" : "string", "store" : "yes" }
// }
// }
// ]
List tmplNodes = (List) fieldNode;
for (Object tmplNode : tmplNodes) {
Map<String, Object> tmpl = (Map<String, Object>) tmplNode;
if (tmpl.size() != 1) {
throw new MapperParsingException("A dynamic template must be defined with a name");
}
Map.Entry<String, Object> entry = tmpl.entrySet().iterator().next();
((Builder) builder).add(DynamicTemplate.parse(entry.getKey(), (Map<String, Object>) entry.getValue()));
}
} else if (fieldName.equals("date_detection")) {
((Builder) builder).dateDetection = nodeBooleanValue(fieldNode);
} else if (fieldName.equals("numeric_detection")) {
((Builder) builder).numericDetection = nodeBooleanValue(fieldNode);
}
}
} | 1no label
| src_main_java_org_elasticsearch_index_mapper_object_RootObjectMapper.java |
1,615 | class NotifyTimeout implements Runnable {
final TimeoutClusterStateListener listener;
final TimeValue timeout;
ScheduledFuture future;
NotifyTimeout(TimeoutClusterStateListener listener, TimeValue timeout) {
this.listener = listener;
this.timeout = timeout;
}
public void cancel() {
future.cancel(false);
}
@Override
public void run() {
if (future.isCancelled()) {
return;
}
if (lifecycle.stoppedOrClosed()) {
listener.onClose();
} else {
listener.onTimeout(this.timeout);
}
// note, we rely on the listener to remove itself in case of timeout if needed
}
} | 0true
| src_main_java_org_elasticsearch_cluster_service_InternalClusterService.java |
280 | public abstract class MessageCreator {
private JavaMailSender mailSender;
public MessageCreator(JavaMailSender mailSender) {
this.mailSender = mailSender;
}
public void sendMessage(final HashMap<String,Object> props) throws MailException {
MimeMessagePreparator preparator = buildMimeMessagePreparator(props);
this.mailSender.send(preparator);
}
public abstract String buildMessageBody(EmailInfo info, HashMap<String,Object> props);
public MimeMessagePreparator buildMimeMessagePreparator(final HashMap<String,Object> props) {
MimeMessagePreparator preparator = new MimeMessagePreparator() {
public void prepare(MimeMessage mimeMessage) throws Exception {
EmailTarget emailUser = (EmailTarget) props.get(EmailPropertyType.USER.getType());
EmailInfo info = (EmailInfo) props.get(EmailPropertyType.INFO.getType());
MimeMessageHelper message = new MimeMessageHelper(mimeMessage, (info.getAttachments() != null && info.getAttachments().size() > 0));
message.setTo(emailUser.getEmailAddress());
message.setFrom(info.getFromAddress());
message.setSubject(info.getSubject());
if (emailUser.getBCCAddresses() != null && emailUser.getBCCAddresses().length > 0) {
message.setBcc(emailUser.getBCCAddresses());
}
if (emailUser.getCCAddresses() != null && emailUser.getCCAddresses().length > 0) {
message.setCc(emailUser.getCCAddresses());
}
String messageBody = info.getMessageBody();
if (messageBody == null) {
messageBody = buildMessageBody(info, props);
}
message.setText(messageBody, true);
for (Attachment attachment : info.getAttachments()) {
ByteArrayDataSource dataSource = new ByteArrayDataSource(attachment.getData(), attachment.getMimeType());
message.addAttachment(attachment.getFilename(), dataSource);
}
}
};
return preparator;
}
public JavaMailSender getMailSender() {
return mailSender;
}
public void setMailSender(JavaMailSender mailSender) {
this.mailSender = mailSender;
}
} | 1no label
| common_src_main_java_org_broadleafcommerce_common_email_service_message_MessageCreator.java |
1,086 | executor.submit(new Runnable() {
@Override
public void run() {
assertAcked(admin().indices().prepareAliases().addAlias("test", aliasName));
client().index(indexRequest(aliasName).type("type1").id("1").source(source("1", "test"))).actionGet();
}
}); | 0true
| src_test_java_org_elasticsearch_aliases_IndexAliasesTests.java |
1,772 | assertTrueEventually(new AssertTask() {
public void run() {
assertEquals(0, map.size());
}
}); | 0true
| hazelcast_src_test_java_com_hazelcast_map_EvictionTest.java |
1,136 | public class QueryFilterAggregationSearchBenchmark {
static final long COUNT = SizeValue.parseSizeValue("5m").singles();
static final int BATCH = 1000;
static final int QUERY_COUNT = 200;
static final int NUMBER_OF_TERMS = 200;
static Client client;
public static void main(String[] args) throws Exception {
Settings settings = settingsBuilder()
.put("index.refresh_interval", "-1")
.put("gateway.type", "local")
.put(SETTING_NUMBER_OF_SHARDS, 2)
.put(SETTING_NUMBER_OF_REPLICAS, 0)
.build();
String clusterName = QueryFilterAggregationSearchBenchmark.class.getSimpleName();
Node node1 = nodeBuilder()
.clusterName(clusterName)
.settings(settingsBuilder().put(settings).put("name", "node1")).node();
client = node1.client();
long[] lValues = new long[NUMBER_OF_TERMS];
for (int i = 0; i < NUMBER_OF_TERMS; i++) {
lValues[i] = ThreadLocalRandom.current().nextLong();
}
Thread.sleep(10000);
try {
client.admin().indices().create(createIndexRequest("test")).actionGet();
StopWatch stopWatch = new StopWatch().start();
System.out.println("--> Indexing [" + COUNT + "] ...");
long ITERS = COUNT / BATCH;
long i = 1;
int counter = 0;
for (; i <= ITERS; i++) {
BulkRequestBuilder request = client.prepareBulk();
for (int j = 0; j < BATCH; j++) {
counter++;
XContentBuilder builder = jsonBuilder().startObject();
builder.field("id", Integer.toString(counter));
builder.field("l_value", lValues[ThreadLocalRandom.current().nextInt(NUMBER_OF_TERMS)]);
builder.endObject();
request.add(Requests.indexRequest("test").type("type1").id(Integer.toString(counter))
.source(builder));
}
BulkResponse response = request.execute().actionGet();
if (response.hasFailures()) {
System.err.println("--> failures...");
}
if (((i * BATCH) % 100000) == 0) {
System.out.println("--> Indexed " + (i * BATCH) + " took " + stopWatch.stop().lastTaskTime());
stopWatch.start();
}
}
System.out.println("--> Indexing took " + stopWatch.totalTime() + ", TPS " + (((double) (COUNT)) / stopWatch.totalTime().secondsFrac()));
} catch (Exception e) {
System.out.println("--> Index already exists, ignoring indexing phase, waiting for green");
ClusterHealthResponse clusterHealthResponse = client.admin().cluster().prepareHealth().setWaitForGreenStatus().setTimeout("10m").execute().actionGet();
if (clusterHealthResponse.isTimedOut()) {
System.err.println("--> Timed out waiting for cluster health");
}
}
client.admin().indices().prepareRefresh().execute().actionGet();
if (client.prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount() != COUNT) {
throw new Error();
}
System.out.println("--> Number of docs in index: " + COUNT);
final long anyValue = ((Number) client.prepareSearch().execute().actionGet().getHits().hits()[0].sourceAsMap().get("l_value")).longValue();
long totalQueryTime = 0;
totalQueryTime = 0;
for (int j = 0; j < QUERY_COUNT; j++) {
SearchResponse searchResponse = client.prepareSearch()
.setSearchType(SearchType.COUNT)
.setQuery(termQuery("l_value", anyValue))
.execute().actionGet();
totalQueryTime += searchResponse.getTookInMillis();
}
System.out.println("--> Simple Query on first l_value " + totalQueryTime + "ms");
totalQueryTime = 0;
for (int j = 0; j < QUERY_COUNT; j++) {
SearchResponse searchResponse = client.prepareSearch()
.setSearchType(SearchType.COUNT)
.setQuery(termQuery("l_value", anyValue))
.addFacet(FacetBuilders.queryFacet("query").query(termQuery("l_value", anyValue)))
.execute().actionGet();
totalQueryTime += searchResponse.getTookInMillis();
}
System.out.println("--> Query facet first l_value " + totalQueryTime + "ms");
totalQueryTime = 0;
for (int j = 0; j < QUERY_COUNT; j++) {
SearchResponse searchResponse = client.prepareSearch()
.setSearchType(SearchType.COUNT)
.setQuery(termQuery("l_value", anyValue))
.addAggregation(AggregationBuilders.filter("filter").filter(FilterBuilders.termFilter("l_value", anyValue)))
.execute().actionGet();
totalQueryTime += searchResponse.getTookInMillis();
}
System.out.println("--> Filter agg first l_value " + totalQueryTime + "ms");
totalQueryTime = 0;
for (int j = 0; j < QUERY_COUNT; j++) {
SearchResponse searchResponse = client.prepareSearch()
.setSearchType(SearchType.COUNT)
.setQuery(termQuery("l_value", anyValue))
.addFacet(FacetBuilders.queryFacet("query").query(termQuery("l_value", anyValue)).global(true).mode(FacetBuilder.Mode.COLLECTOR))
.execute().actionGet();
totalQueryTime += searchResponse.getTookInMillis();
}
System.out.println("--> Query facet first l_value (global) (mode/collector) " + totalQueryTime + "ms");
totalQueryTime = 0;
for (int j = 0; j < QUERY_COUNT; j++) {
SearchResponse searchResponse = client.prepareSearch()
.setSearchType(SearchType.COUNT)
.setQuery(termQuery("l_value", anyValue))
.addFacet(FacetBuilders.queryFacet("query").query(termQuery("l_value", anyValue)).global(true).mode(FacetBuilder.Mode.POST))
.execute().actionGet();
totalQueryTime += searchResponse.getTookInMillis();
}
System.out.println("--> Query facet first l_value (global) (mode/post) " + totalQueryTime + "ms");
}
} | 0true
| src_test_java_org_elasticsearch_benchmark_search_aggregations_QueryFilterAggregationSearchBenchmark.java |
1,643 | @edu.umd.cs.findbugs.annotations.SuppressWarnings("DM_GC")
public class RunGcRequest implements ConsoleRequest {
public RunGcRequest() {
}
@Override
public int getType() {
return ConsoleRequestConstants.REQUEST_TYPE_RUN_GC;
}
@Override
public Object readResponse(ObjectDataInput in) throws IOException {
return "Successfully garbage collected.";
}
@Override
public void writeResponse(ManagementCenterService mcs, ObjectDataOutput dos) throws Exception {
System.gc();
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
}
@Override
public void readData(ObjectDataInput in) throws IOException {
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_management_request_RunGcRequest.java |
54 | public class HttpGetCommand extends HttpCommand {
boolean nextLine;
public HttpGetCommand(String uri) {
super(TextCommandType.HTTP_GET, uri);
}
public boolean readFrom(ByteBuffer cb) {
while (cb.hasRemaining()) {
char c = (char) cb.get();
if (c == '\n') {
if (nextLine) {
return true;
}
nextLine = true;
} else if (c != '\r') {
nextLine = false;
}
}
return false;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_ascii_rest_HttpGetCommand.java |
1,690 | public class URLImmutableBlobContainer extends AbstractURLBlobContainer implements ImmutableBlobContainer {
/**
* Constructs a new URLImmutableBlobContainer
*
* @param blobStore blob store
* @param blobPath blob path to this container
* @param path URL of this container
*/
public URLImmutableBlobContainer(URLBlobStore blobStore, BlobPath blobPath, URL path) {
super(blobStore, blobPath, path);
}
/**
* This operation is not supported by URL Blob Container
*/
@Override
public void writeBlob(final String blobName, final InputStream is, final long sizeInBytes, final WriterListener listener) {
throw new UnsupportedOperationException("URL repository is read only");
}
/**
* This operation is not supported by URL Blob Container
*/
@Override
public void writeBlob(String blobName, InputStream is, long sizeInBytes) throws IOException {
throw new UnsupportedOperationException("URL repository is read only");
}
} | 0true
| src_main_java_org_elasticsearch_common_blobstore_url_URLImmutableBlobContainer.java |
996 | @RunWith(HazelcastParallelClassRunner.class)
@Category(QuickTest.class)
public class SemaphoreTest extends HazelcastTestSupport {
private HazelcastInstance hz;
@Before
public void setUp() {
hz = createHazelcastInstance();
}
@Test(timeout = 30000)
public void testAcquire() throws InterruptedException {
final ISemaphore semaphore = hz.getSemaphore(randomString());
int numberOfPermits = 20;
assertTrue(semaphore.init(numberOfPermits));
for (int i = 0; i < numberOfPermits; i++) {
assertEquals(numberOfPermits - i, semaphore.availablePermits());
semaphore.acquire();
}
assertEquals(semaphore.availablePermits(), 0);
}
@Test(timeout = 30000)
public void testRelease() {
final ISemaphore semaphore = hz.getSemaphore(randomString());
int numberOfPermits = 20;
for (int i = 0; i < numberOfPermits; i++) {
assertEquals(i, semaphore.availablePermits());
semaphore.release();
}
assertEquals(semaphore.availablePermits(), numberOfPermits);
}
@Test(timeout = 30000)
public void testMultipleAcquire() throws InterruptedException {
final ISemaphore semaphore = hz.getSemaphore(randomString());
int numberOfPermits = 20;
assertTrue(semaphore.init(numberOfPermits));
for (int i = 0; i < numberOfPermits; i += 5) {
assertEquals(numberOfPermits - i, semaphore.availablePermits());
semaphore.acquire(5);
}
assertEquals(semaphore.availablePermits(), 0);
}
@Test(timeout = 30000)
public void testMultipleRelease() {
final ISemaphore semaphore = hz.getSemaphore(randomString());
int numberOfPermits = 20;
for (int i = 0; i < numberOfPermits; i += 5) {
assertEquals(i, semaphore.availablePermits());
semaphore.release(5);
}
assertEquals(semaphore.availablePermits(), numberOfPermits);
}
@Test(timeout = 30000)
public void testDrain() throws InterruptedException {
final ISemaphore semaphore = hz.getSemaphore(randomString());
int numberOfPermits = 20;
assertTrue(semaphore.init(numberOfPermits));
semaphore.acquire(5);
int drainedPermits = semaphore.drainPermits();
assertEquals(drainedPermits, numberOfPermits - 5);
assertEquals(semaphore.availablePermits(), 0);
}
@Test(timeout = 30000)
public void testReduce() {
final ISemaphore semaphore = hz.getSemaphore(randomString());
int numberOfPermits = 20;
assertTrue(semaphore.init(numberOfPermits));
for (int i = 0; i < numberOfPermits; i += 5) {
assertEquals(numberOfPermits - i, semaphore.availablePermits());
semaphore.reducePermits(5);
}
assertEquals(semaphore.availablePermits(), 0);
}
@Test(timeout = 30000)
public void testTryAcquire() {
final ISemaphore semaphore = hz.getSemaphore(randomMapName());
int numberOfPermits = 20;
assertTrue(semaphore.init(numberOfPermits));
for (int i = 0; i < numberOfPermits; i++) {
assertEquals(numberOfPermits - i, semaphore.availablePermits());
assertEquals(semaphore.tryAcquire(), true);
}
assertFalse(semaphore.tryAcquire());
assertEquals(semaphore.availablePermits(), 0);
}
@Test(timeout = 30000)
public void testTryAcquireMultiple() {
final ISemaphore semaphore = hz.getSemaphore(randomString());
int numberOfPermits = 20;
assertTrue(semaphore.init(numberOfPermits));
for (int i = 0; i < numberOfPermits; i += 5) {
assertEquals(numberOfPermits - i, semaphore.availablePermits());
assertEquals(semaphore.tryAcquire(5), true);
}
assertEquals(semaphore.availablePermits(), 0);
}
@Test(timeout = 30000)
public void testInit_whenNotIntialized() {
ISemaphore semaphore = hz.getSemaphore(randomString());
boolean result = semaphore.init(2);
assertTrue(result);
assertEquals(2, semaphore.availablePermits());
}
@Test(timeout = 30000)
public void testInit_whenAlreadyIntialized() {
ISemaphore semaphore = hz.getSemaphore(randomString());
semaphore.init(2);
boolean result = semaphore.init(4);
assertFalse(result);
assertEquals(2, semaphore.availablePermits());
}
} | 0true
| hazelcast_src_test_java_com_hazelcast_concurrent_semaphore_SemaphoreTest.java |
767 | public enum ExtensionResultStatusType {
HANDLED, // Extension handled the result but leaves it up to the manager to decide what to do next
HANDLED_CONTINUE, // Extension handled and recommends that the manger continue
HANDLED_STOP, // Extension handled and recommends that the manger stop
NOT_HANDLED // Extension did not handle the request
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_extension_ExtensionResultStatusType.java |
1,580 | ClusterInfoService cis = new ClusterInfoService() {
@Override
public ClusterInfo getClusterInfo() {
logger.info("--> calling fake getClusterInfo");
return clusterInfo;
}
}; | 0true
| src_test_java_org_elasticsearch_cluster_routing_allocation_decider_DiskThresholdDeciderTests.java |
2,512 | YAML(2) {
@Override
public String restContentType() {
return "application/yaml";
}
@Override
public String shortName() {
return "yaml";
}
@Override
public XContent xContent() {
return YamlXContent.yamlXContent;
}
}; | 0true
| src_main_java_org_elasticsearch_common_xcontent_XContentType.java |
3,357 | static class GeoPointValuesSingleFixedSet extends GeoPointValues {
private final BigDoubleArrayList lon;
private final BigDoubleArrayList lat;
private final FixedBitSet set;
private final GeoPoint scratch = new GeoPoint();
GeoPointValuesSingleFixedSet(BigDoubleArrayList lon, BigDoubleArrayList lat, FixedBitSet set) {
super(false);
this.lon = lon;
this.lat = lat;
this.set = set;
}
@Override
public int setDocument(int docId) {
this.docId = docId;
return set.get(docId) ? 1 : 0;
}
@Override
public GeoPoint nextValue() {
return scratch.reset(lat.get(docId), lon.get(docId));
}
} | 0true
| src_main_java_org_elasticsearch_index_fielddata_plain_GeoPointDoubleArrayAtomicFieldData.java |
3,741 | public class PathMapperTests extends ElasticsearchTestCase {
@Test
public void testPathMapping() throws IOException {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/path/test-mapping.json");
DocumentMapper docMapper = MapperTestUtils.newParser().parse(mapping);
assertThat(docMapper.mappers().indexName("first1"), notNullValue());
assertThat(docMapper.mappers().indexName("name1.first1"), nullValue());
assertThat(docMapper.mappers().indexName("last1"), nullValue());
assertThat(docMapper.mappers().indexName("i_last_1"), notNullValue());
assertThat(docMapper.mappers().indexName("name1.last1"), nullValue());
assertThat(docMapper.mappers().indexName("name1.i_last_1"), nullValue());
assertThat(docMapper.mappers().indexName("first2"), nullValue());
assertThat(docMapper.mappers().indexName("name2.first2"), notNullValue());
assertThat(docMapper.mappers().indexName("last2"), nullValue());
assertThat(docMapper.mappers().indexName("i_last_2"), nullValue());
assertThat(docMapper.mappers().indexName("name2.i_last_2"), notNullValue());
assertThat(docMapper.mappers().indexName("name2.last2"), nullValue());
// test full name
assertThat(docMapper.mappers().fullName("first1"), nullValue());
assertThat(docMapper.mappers().fullName("name1.first1"), notNullValue());
assertThat(docMapper.mappers().fullName("last1"), nullValue());
assertThat(docMapper.mappers().fullName("i_last_1"), nullValue());
assertThat(docMapper.mappers().fullName("name1.last1"), notNullValue());
assertThat(docMapper.mappers().fullName("name1.i_last_1"), nullValue());
assertThat(docMapper.mappers().fullName("first2"), nullValue());
assertThat(docMapper.mappers().fullName("name2.first2"), notNullValue());
assertThat(docMapper.mappers().fullName("last2"), nullValue());
assertThat(docMapper.mappers().fullName("i_last_2"), nullValue());
assertThat(docMapper.mappers().fullName("name2.i_last_2"), nullValue());
assertThat(docMapper.mappers().fullName("name2.last2"), notNullValue());
}
} | 0true
| src_test_java_org_elasticsearch_index_mapper_path_PathMapperTests.java |
328 | @SuppressWarnings("serial")
public class OStorageSegmentConfiguration implements Serializable {
public transient OStorageConfiguration root;
public int id;
public String name;
public String maxSize = "0";
public String fileType = "mmap";
public String fileStartSize = "500Kb";
public String fileMaxSize = "500Mb";
public String fileIncrementSize = "50%";
public String defrag = "auto";
public OStorageFileConfiguration[] infoFiles;
String location;
public OStorageSegmentConfiguration() {
infoFiles = new OStorageFileConfiguration[0];
}
public OStorageSegmentConfiguration(final OStorageConfiguration iRoot, final String iSegmentName, final int iId) {
root = iRoot;
name = iSegmentName;
id = iId;
infoFiles = new OStorageFileConfiguration[0];
}
public OStorageSegmentConfiguration(final OStorageConfiguration iRoot, final String iSegmentName, final int iId,
final String iDirectory) {
root = iRoot;
name = iSegmentName;
id = iId;
location = iDirectory;
infoFiles = new OStorageFileConfiguration[0];
}
public void setRoot(OStorageConfiguration iRoot) {
this.root = iRoot;
for (OStorageFileConfiguration f : infoFiles)
f.parent = this;
}
public String getLocation() {
if (location != null)
return location;
return root != null ? root.getDirectory() : null;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_config_OStorageSegmentConfiguration.java |
754 | public interface CheckoutResponse {
public Map<PaymentInfo, Referenced> getInfos();
public Order getOrder();
public PaymentResponse getPaymentResponse();
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_checkout_service_workflow_CheckoutResponse.java |
5,312 | public static class Bucket extends InternalTerms.Bucket {
final BytesRef termBytes;
public Bucket(BytesRef term, long docCount, InternalAggregations aggregations) {
super(docCount, aggregations);
this.termBytes = term;
}
@Override
public String getKey() {
return termBytes.utf8ToString();
}
@Override
public Text getKeyAsText() {
return new BytesText(new BytesArray(termBytes));
}
@Override
public Number getKeyAsNumber() {
// this method is needed for scripted numeric faceting
return Double.parseDouble(termBytes.utf8ToString());
}
@Override
int compareTerm(Terms.Bucket other) {
return BytesRef.getUTF8SortedAsUnicodeComparator().compare(termBytes, ((Bucket) other).termBytes);
}
} | 1no label
| src_main_java_org_elasticsearch_search_aggregations_bucket_terms_StringTerms.java |
2,914 | public class PatternTokenizerFactory extends AbstractTokenizerFactory {
private final Pattern pattern;
private final int group;
@Inject
public PatternTokenizerFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
super(index, indexSettings, name, settings);
String sPattern = settings.get("pattern", "\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/);
if (sPattern == null) {
throw new ElasticsearchIllegalArgumentException("pattern is missing for [" + name + "] tokenizer of type 'pattern'");
}
this.pattern = Regex.compile(sPattern, settings.get("flags"));
this.group = settings.getAsInt("group", -1);
}
@Override
public Tokenizer create(Reader reader) {
return new PatternTokenizer(reader, pattern, group);
}
} | 0true
| src_main_java_org_elasticsearch_index_analysis_PatternTokenizerFactory.java |
181 | (new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() {
public sun.misc.Unsafe run() throws Exception {
Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class;
for (java.lang.reflect.Field f : k.getDeclaredFields()) {
f.setAccessible(true);
Object x = f.get(null);
if (k.isInstance(x))
return k.cast(x);
}
throw new NoSuchFieldError("the Unsafe");
}}); | 0true
| src_main_java_jsr166y_LinkedTransferQueue.java |
333 | public static enum OPTIONS {
SECURITY
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_db_ODatabase.java |
1,228 | private final class Prohibiter implements Callable<Void> {
public Void call() throws Exception {
countDownLatch.await();
for (int n = 0; n < CYCLES_COUNT; n++) {
modificationLock.prohibitModifications();
long beforeModification = counter.get();
Thread.sleep(50);
if (n % 10 == 0)
System.out
.println("After prohibit modifications " + beforeModification + " before allow modifications " + counter.get());
Assert.assertEquals(counter.get(), beforeModification);
modificationLock.allowModifications();
Thread.sleep(50);
}
return null;
}
} | 0true
| core_src_test_java_com_orientechnologies_orient_core_storage_StorageModificationLockTest.java |
1,765 | assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
assertNull(map.get(2));
assertEquals(2, map.get(1));
}
}); | 0true
| hazelcast_src_test_java_com_hazelcast_map_EvictionTest.java |
692 | public class CollectionPortableHook implements PortableHook {
public static final int F_ID = FactoryIdHelper.getFactoryId(FactoryIdHelper.COLLECTION_PORTABLE_FACTORY, -20);
public static final int COLLECTION_SIZE = 1;
public static final int COLLECTION_CONTAINS = 2;
public static final int COLLECTION_ADD = 3;
public static final int COLLECTION_REMOVE = 4;
public static final int COLLECTION_ADD_ALL = 5;
public static final int COLLECTION_COMPARE_AND_REMOVE = 6;
public static final int COLLECTION_CLEAR = 7;
public static final int COLLECTION_GET_ALL = 8;
public static final int COLLECTION_ADD_LISTENER = 9;
public static final int LIST_ADD_ALL = 10;
public static final int LIST_GET = 11;
public static final int LIST_SET = 12;
public static final int LIST_ADD = 13;
public static final int LIST_REMOVE = 14;
public static final int LIST_INDEX_OF = 15;
public static final int LIST_SUB = 16;
public static final int TXN_LIST_ADD = 17;
public static final int TXN_LIST_REMOVE = 18;
public static final int TXN_LIST_SIZE = 19;
public static final int TXN_SET_ADD = 20;
public static final int TXN_SET_REMOVE = 21;
public static final int TXN_SET_SIZE = 22;
public static final int COLLECTION_REMOVE_LISTENER = 23;
public int getFactoryId() {
return F_ID;
}
@Override
public PortableFactory createFactory() {
ConstructorFunction<Integer, Portable>[] constructors = new ConstructorFunction[COLLECTION_REMOVE_LISTENER + 1];
constructors[COLLECTION_SIZE] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new CollectionSizeRequest();
}
};
constructors[COLLECTION_CONTAINS] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new CollectionContainsRequest();
}
};
constructors[COLLECTION_ADD] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new CollectionAddRequest();
}
};
constructors[COLLECTION_REMOVE] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new CollectionRemoveRequest();
}
};
constructors[COLLECTION_ADD_ALL] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new CollectionAddAllRequest();
}
};
constructors[COLLECTION_COMPARE_AND_REMOVE] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new CollectionCompareAndRemoveRequest();
}
};
constructors[COLLECTION_CLEAR] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new CollectionClearRequest();
}
};
constructors[COLLECTION_GET_ALL] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new CollectionGetAllRequest();
}
};
constructors[COLLECTION_ADD_LISTENER] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new CollectionAddListenerRequest();
}
};
constructors[LIST_ADD_ALL] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new ListAddAllRequest();
}
};
constructors[LIST_GET] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new ListGetRequest();
}
};
constructors[LIST_SET] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new ListSetRequest();
}
};
constructors[LIST_ADD] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new ListAddRequest();
}
};
constructors[LIST_REMOVE] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new ListRemoveRequest();
}
};
constructors[LIST_INDEX_OF] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new ListIndexOfRequest();
}
};
constructors[LIST_SUB] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new ListSubRequest();
}
};
constructors[TXN_LIST_ADD] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new TxnListAddRequest();
}
};
constructors[TXN_LIST_REMOVE] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new TxnListRemoveRequest();
}
};
constructors[TXN_LIST_SIZE] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new TxnListSizeRequest();
}
};
constructors[TXN_SET_ADD] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new TxnSetAddRequest();
}
};
constructors[TXN_SET_REMOVE] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new TxnSetRemoveRequest();
}
};
constructors[TXN_SET_SIZE] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new TxnSetSizeRequest();
}
};
constructors[COLLECTION_REMOVE_LISTENER] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new CollectionRemoveListenerRequest();
}
};
return new ArrayPortableFactory(constructors);
}
@Override
public Collection<ClassDefinition> getBuiltinDefinitions() {
return null;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_collection_CollectionPortableHook.java |
1,710 | runnable = new Runnable() { public void run() { map.forceUnlock(null); } }; | 0true
| hazelcast_src_test_java_com_hazelcast_map_BasicMapTest.java |
458 | el[6] = StaticArrayEntryList.of(Iterables.transform(entries.entrySet(),new Function<Map.Entry<Integer, Long>, Entry>() {
@Nullable
@Override
public Entry apply(@Nullable Map.Entry<Integer, Long> entry) {
return StaticArrayEntry.ofBytes(entry, ByteEntryGetter.INSTANCE);
}
})); | 0true
| titan-test_src_test_java_com_thinkaurelius_titan_diskstorage_keycolumnvalue_StaticArrayEntryTest.java |
291 | public class OSQLScriptFormatter implements OScriptFormatter {
public String getFunctionDefinition(final OFunction f) {
return null;
}
@Override
public String getFunctionInvoke(final OFunction iFunction, final Object[] iArgs) {
// TODO: BIND ARGS
return iFunction.getCode();
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_command_script_formatter_OSQLScriptFormatter.java |
896 | public class PromotableOrderAdjustmentImpl implements PromotableOrderAdjustment {
private static final long serialVersionUID = 1L;
protected PromotableCandidateOrderOffer promotableCandidateOrderOffer;
protected PromotableOrder promotableOrder;
protected Money adjustmentValue;
protected Offer offer;
protected boolean roundOfferValues = true;
protected int roundingScale = 2;
protected RoundingMode roundingMode = RoundingMode.HALF_EVEN;
public PromotableOrderAdjustmentImpl(PromotableCandidateOrderOffer promotableCandidateOrderOffer, PromotableOrder promotableOrder) {
assert (promotableOrder != null);
assert (promotableCandidateOrderOffer != null);
this.promotableCandidateOrderOffer = promotableCandidateOrderOffer;
this.promotableOrder = promotableOrder;
this.offer = promotableCandidateOrderOffer.getOffer();
computeAdjustmentValue();
}
public PromotableOrderAdjustmentImpl(PromotableCandidateOrderOffer promotableCandidateOrderOffer,
PromotableOrder promotableOrder, Money adjustmentValue) {
this(promotableCandidateOrderOffer, promotableOrder);
if (promotableOrder.isIncludeOrderAndItemAdjustments()) {
this.adjustmentValue = adjustmentValue;
}
}
@Override
public PromotableOrder getPromotableOrder() {
return promotableOrder;
}
@Override
public Offer getOffer() {
return offer;
}
/*
* Calculates the value of the adjustment by first getting the current value of the order and then
* calculating the value of this adjustment.
*
* If this adjustment value is greater than the currentOrderValue (e.g. would make the order go negative
* then the adjustment value is set to the value of the order).
*/
protected void computeAdjustmentValue() {
adjustmentValue = new Money(promotableOrder.getOrderCurrency());
Money currentOrderValue = promotableOrder.calculateSubtotalWithAdjustments();
// Note: FIXED_PRICE not calculated as this is not a valid option for offers.
if (offer.getDiscountType().equals(OfferDiscountType.AMOUNT_OFF)) {
adjustmentValue = new Money(offer.getValue(), promotableOrder.getOrderCurrency());
} else if (offer.getDiscountType().equals(OfferDiscountType.PERCENT_OFF)) {
BigDecimal offerValue = currentOrderValue.getAmount().multiply(offer.getValue().divide(new BigDecimal("100"), 5, RoundingMode.HALF_EVEN));
if (isRoundOfferValues()) {
offerValue = offerValue.setScale(roundingScale, roundingMode);
}
adjustmentValue = new Money(offerValue, promotableOrder.getOrderCurrency(), 5);
}
if (currentOrderValue.lessThan(adjustmentValue)) {
adjustmentValue = currentOrderValue;
}
}
@Override
public Money getAdjustmentValue() {
return adjustmentValue;
}
/**
* It is sometimes problematic to offer percentage-off offers with regards to rounding. For example,
* consider an item that costs 9.99 and has a 50% promotion. To be precise, the offer value is 4.995,
* but this may be a strange value to display to the user depending on the currency being used.
*/
public boolean isRoundOfferValues() {
return roundOfferValues;
}
/**
* @see #isRoundOfferValues()
*
* @param roundingScale
*/
public void setRoundingScale(int roundingScale) {
this.roundingScale = roundingScale;
}
public int getRoundingScale() {
return roundingScale;
}
/**
* @see #isRoundOfferValues()
*
* @param roundingMode
*/
public void setRoundingMode(RoundingMode roundingMode) {
this.roundingMode = roundingMode;
}
public RoundingMode getRoundingMode() {
return roundingMode;
}
@Override
public boolean isCombinable() {
Boolean combinable = offer.isCombinableWithOtherOffers();
return (combinable != null && combinable);
}
@Override
public boolean isTotalitarian() {
Boolean totalitarian = offer.isTotalitarianOffer();
return (totalitarian != null && totalitarian.booleanValue());
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_service_discount_domain_PromotableOrderAdjustmentImpl.java |
5,244 | public class NestedAggregator extends SingleBucketAggregator implements ReaderContextAware {
private final Filter parentFilter;
private final Filter childFilter;
private Bits childDocs;
private FixedBitSet parentDocs;
public NestedAggregator(String name, AggregatorFactories factories, String nestedPath, AggregationContext aggregationContext, Aggregator parent) {
super(name, factories, aggregationContext, parent);
MapperService.SmartNameObjectMapper mapper = aggregationContext.searchContext().smartNameObjectMapper(nestedPath);
if (mapper == null) {
throw new AggregationExecutionException("facet nested path [" + nestedPath + "] not found");
}
ObjectMapper objectMapper = mapper.mapper();
if (objectMapper == null) {
throw new AggregationExecutionException("facet nested path [" + nestedPath + "] not found");
}
if (!objectMapper.nested().isNested()) {
throw new AggregationExecutionException("facet nested path [" + nestedPath + "] is not nested");
}
parentFilter = aggregationContext.searchContext().filterCache().cache(NonNestedDocsFilter.INSTANCE);
childFilter = aggregationContext.searchContext().filterCache().cache(objectMapper.nestedTypeFilter());
}
@Override
public void setNextReader(AtomicReaderContext reader) {
try {
DocIdSet docIdSet = parentFilter.getDocIdSet(reader, null);
// In ES if parent is deleted, then also the children are deleted. Therefore acceptedDocs can also null here.
childDocs = DocIdSets.toSafeBits(reader.reader(), childFilter.getDocIdSet(reader, null));
if (DocIdSets.isEmpty(docIdSet)) {
parentDocs = null;
} else {
parentDocs = (FixedBitSet) docIdSet;
}
} catch (IOException ioe) {
throw new AggregationExecutionException("Failed to aggregate [" + name + "]", ioe);
}
}
@Override
public void collect(int parentDoc, long bucketOrd) throws IOException {
// here we translate the parent doc to a list of its nested docs, and then call super.collect for evey one of them
// so they'll be collected
if (parentDoc == 0 || parentDocs == null) {
return;
}
int prevParentDoc = parentDocs.prevSetBit(parentDoc - 1);
int numChildren = 0;
for (int i = (parentDoc - 1); i > prevParentDoc; i--) {
if (childDocs.get(i)) {
++numChildren;
collectBucketNoCounts(i, bucketOrd);
}
}
incrementBucketDocCount(numChildren, bucketOrd);
}
@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) {
return new InternalNested(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal));
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalNested(name, 0, buildEmptySubAggregations());
}
public static class Factory extends AggregatorFactory {
private final String path;
public Factory(String name, String path) {
super(name, InternalNested.TYPE.name());
this.path = path;
}
@Override
public Aggregator create(AggregationContext context, Aggregator parent, long expectedBucketsCount) {
return new NestedAggregator(name, factories, path, context, parent);
}
}
} | 1no label
| src_main_java_org_elasticsearch_search_aggregations_bucket_nested_NestedAggregator.java |
2,491 | public class XContentBuilderString {
private final XContentString underscore;
private final XContentString camelCase;
public XContentBuilderString(String value) {
underscore = new XContentString(Strings.toUnderscoreCase(value));
camelCase = new XContentString(Strings.toCamelCase(value));
}
public XContentString underscore() {
return underscore;
}
public XContentString camelCase() {
return camelCase;
}
} | 0true
| src_main_java_org_elasticsearch_common_xcontent_XContentBuilderString.java |
1,974 | public final class ProviderMethodsModule implements Module {
private final Object delegate;
private final TypeLiteral<?> typeLiteral;
private ProviderMethodsModule(Object delegate) {
this.delegate = checkNotNull(delegate, "delegate");
this.typeLiteral = TypeLiteral.get(this.delegate.getClass());
}
/**
* Returns a module which creates bindings for provider methods from the given module.
*/
public static Module forModule(Module module) {
return forObject(module);
}
/**
* Returns a module which creates bindings for provider methods from the given object.
* This is useful notably for <a href="http://code.google.com/p/google-gin/">GIN</a>
*/
public static Module forObject(Object object) {
// avoid infinite recursion, since installing a module always installs itself
if (object instanceof ProviderMethodsModule) {
return Modules.EMPTY_MODULE;
}
return new ProviderMethodsModule(object);
}
public synchronized void configure(Binder binder) {
for (ProviderMethod<?> providerMethod : getProviderMethods(binder)) {
providerMethod.configure(binder);
}
}
public List<ProviderMethod<?>> getProviderMethods(Binder binder) {
List<ProviderMethod<?>> result = Lists.newArrayList();
for (Class<?> c = delegate.getClass(); c != Object.class; c = c.getSuperclass()) {
for (Method method : c.getDeclaredMethods()) {
if (method.getAnnotation(Provides.class) != null) {
result.add(createProviderMethod(binder, method));
}
}
}
return result;
}
<T> ProviderMethod<T> createProviderMethod(Binder binder, final Method method) {
binder = binder.withSource(method);
Errors errors = new Errors(method);
// prepare the parameter providers
List<Dependency<?>> dependencies = Lists.newArrayList();
List<Provider<?>> parameterProviders = Lists.newArrayList();
List<TypeLiteral<?>> parameterTypes = typeLiteral.getParameterTypes(method);
Annotation[][] parameterAnnotations = method.getParameterAnnotations();
for (int i = 0; i < parameterTypes.size(); i++) {
Key<?> key = getKey(errors, parameterTypes.get(i), method, parameterAnnotations[i]);
dependencies.add(Dependency.get(key));
parameterProviders.add(binder.getProvider(key));
}
@SuppressWarnings("unchecked") // Define T as the method's return type.
TypeLiteral<T> returnType = (TypeLiteral<T>) typeLiteral.getReturnType(method);
Key<T> key = getKey(errors, returnType, method, method.getAnnotations());
Class<? extends Annotation> scopeAnnotation
= Annotations.findScopeAnnotation(errors, method.getAnnotations());
for (Message message : errors.getMessages()) {
binder.addError(message);
}
return new ProviderMethod<T>(key, method, delegate, ImmutableSet.copyOf(dependencies),
parameterProviders, scopeAnnotation);
}
<T> Key<T> getKey(Errors errors, TypeLiteral<T> type, Member member, Annotation[] annotations) {
Annotation bindingAnnotation = Annotations.findBindingAnnotation(errors, member, annotations);
return bindingAnnotation == null ? Key.get(type) : Key.get(type, bindingAnnotation);
}
@Override
public boolean equals(Object o) {
return o instanceof ProviderMethodsModule
&& ((ProviderMethodsModule) o).delegate == delegate;
}
@Override
public int hashCode() {
return delegate.hashCode();
}
} | 0true
| src_main_java_org_elasticsearch_common_inject_internal_ProviderMethodsModule.java |
1,044 | public interface OrderItemQualifier extends Serializable {
/**
* Unique id of the item qualifier.
* @return
*/
Long getId();
/**
* Sets the id for this OrderItemQualifier
* @param id
*/
void setId(Long id);
/**
* The related order item.
* @return
*/
OrderItem getOrderItem();
/**
* Sets the related order item.
* @param orderItem
*/
void setOrderItem(OrderItem orderItem);
/**
* Sets the related offer.
* @param offer
*/
void setOffer(Offer offer);
/**
* Returns the related offer
* @return
*/
Offer getOffer();
/**
* Sets the quantity of the associated OrderItem that was used as a qualifier.
* @param quantity
*/
void setQuantity(Long quantity);
/**
* Returns the quantity of the associated OrderItem that was used as a qualifier.
* @return
*/
Long getQuantity();
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_OrderItemQualifier.java |
106 | static final class ValueSpliterator<K,V> extends Traverser<K,V>
implements ConcurrentHashMapSpliterator<V> {
long est; // size estimate
ValueSpliterator(Node<K,V>[] tab, int size, int index, int limit,
long est) {
super(tab, size, index, limit);
this.est = est;
}
public ConcurrentHashMapSpliterator<V> trySplit() {
int i, f, h;
return (h = ((i = baseIndex) + (f = baseLimit)) >>> 1) <= i ? null :
new ValueSpliterator<K,V>(tab, baseSize, baseLimit = h,
f, est >>>= 1);
}
public void forEachRemaining(Action<? super V> action) {
if (action == null) throw new NullPointerException();
for (Node<K,V> p; (p = advance()) != null;)
action.apply(p.val);
}
public boolean tryAdvance(Action<? super V> action) {
if (action == null) throw new NullPointerException();
Node<K,V> p;
if ((p = advance()) == null)
return false;
action.apply(p.val);
return true;
}
public long estimateSize() { return est; }
} | 0true
| src_main_java_jsr166e_ConcurrentHashMapV8.java |
4,523 | private class PurgerThread extends Thread {
volatile boolean running = true;
public PurgerThread(String name) {
super(name);
setDaemon(true);
}
public void doStop() {
running = false;
}
public void run() {
while (running) {
try {
List<IndexShard> shardsToPurge = getShardsToPurge();
purgeShards(shardsToPurge);
} catch (Throwable e) {
if (running) {
logger.warn("failed to execute ttl purge", e);
}
}
try {
Thread.sleep(interval.millis());
} catch (InterruptedException e) {
// ignore, if we are interrupted because we are shutting down, running will be false
}
}
}
/**
* Returns the shards to purge, i.e. the local started primary shards that have ttl enabled and disable_purge to false
*/
private List<IndexShard> getShardsToPurge() {
List<IndexShard> shardsToPurge = new ArrayList<IndexShard>();
MetaData metaData = clusterService.state().metaData();
for (IndexService indexService : indicesService) {
// check the value of disable_purge for this index
IndexMetaData indexMetaData = metaData.index(indexService.index().name());
if (indexMetaData == null) {
continue;
}
boolean disablePurge = indexMetaData.settings().getAsBoolean(INDEX_TTL_DISABLE_PURGE, false);
if (disablePurge) {
continue;
}
// should be optimized with the hasTTL flag
FieldMappers ttlFieldMappers = indexService.mapperService().name(TTLFieldMapper.NAME);
if (ttlFieldMappers == null) {
continue;
}
// check if ttl is enabled for at least one type of this index
boolean hasTTLEnabled = false;
for (FieldMapper ttlFieldMapper : ttlFieldMappers) {
if (((TTLFieldMapper) ttlFieldMapper).enabled()) {
hasTTLEnabled = true;
break;
}
}
if (hasTTLEnabled) {
for (IndexShard indexShard : indexService) {
if (indexShard.state() == IndexShardState.STARTED && indexShard.routingEntry().primary() && indexShard.routingEntry().started()) {
shardsToPurge.add(indexShard);
}
}
}
}
return shardsToPurge;
}
} | 1no label
| src_main_java_org_elasticsearch_indices_ttl_IndicesTTLService.java |
1,471 | public class BroadleafRegisterController extends BroadleafAbstractController {
protected boolean useEmailForLogin = true;
protected static String registerSuccessView = "ajaxredirect:/";
protected static String registerView = "authentication/register";
@Resource(name="blCustomerService")
protected CustomerService customerService;
@Resource(name="blRegisterCustomerValidator")
protected RegisterCustomerValidator registerCustomerValidator;
@Resource(name="blLoginService")
protected LoginService loginService;
@Resource(name = "blOrderService")
protected OrderService orderService;
public String register(RegisterCustomerForm registerCustomerForm, HttpServletRequest request,
HttpServletResponse response, Model model) {
String redirectUrl = request.getParameter("successUrl");
if (StringUtils.isNotBlank(redirectUrl)) {
registerCustomerForm.setRedirectUrl(redirectUrl);
}
return getRegisterView();
}
public String processRegister(RegisterCustomerForm registerCustomerForm, BindingResult errors,
HttpServletRequest request, HttpServletResponse response, Model model)
throws ServiceException, PricingException {
if (useEmailForLogin) {
Customer customer = registerCustomerForm.getCustomer();
customer.setUsername(customer.getEmailAddress());
}
registerCustomerValidator.validate(registerCustomerForm, errors, useEmailForLogin);
if (!errors.hasErrors()) {
Customer newCustomer = customerService.registerCustomer(registerCustomerForm.getCustomer(),
registerCustomerForm.getPassword(), registerCustomerForm.getPasswordConfirm());
assert(newCustomer != null);
// The next line needs to use the customer from the input form and not the customer returned after registration
// so that we still have the unencoded password for use by the authentication mechanism.
loginService.loginCustomer(registerCustomerForm.getCustomer());
// Need to ensure that the Cart on CartState is owned by the newly registered customer.
Order cart = CartState.getCart();
if (cart != null && !(cart instanceof NullOrderImpl) && cart.getEmailAddress() == null) {
cart.setEmailAddress(newCustomer.getEmailAddress());
orderService.save(cart, false);
}
String redirectUrl = registerCustomerForm.getRedirectUrl();
if (StringUtils.isNotBlank(redirectUrl) && redirectUrl.contains(":")) {
redirectUrl = null;
}
return StringUtils.isBlank(redirectUrl) ? getRegisterSuccessView() : "redirect:" + redirectUrl;
} else {
return getRegisterView();
}
}
public RegisterCustomerForm initCustomerRegistrationForm() {
Customer customer = CustomerState.getCustomer();
if (customer == null || ! customer.isAnonymous()) {
customer = customerService.createCustomerFromId(null);
}
RegisterCustomerForm customerRegistrationForm = new RegisterCustomerForm();
customerRegistrationForm.setCustomer(customer);
return customerRegistrationForm;
}
public boolean isUseEmailForLogin() {
return useEmailForLogin;
}
public void setUseEmailForLogin(boolean useEmailForLogin) {
this.useEmailForLogin = useEmailForLogin;
}
/**
* Returns the view that will be returned from this controller when the
* registration is successful. The success view should be a redirect (e.g. start with "redirect:" since
* this will cause the entire SpringSecurity pipeline to be fulfilled.
*
* By default, returns "redirect:/"
*
* @return the register success view
*/
public String getRegisterSuccessView() {
return registerSuccessView;
}
/**
* Returns the view that will be used to display the registration page.
*
* By default, returns "/register"
*
* @return the register view
*/
public String getRegisterView() {
return registerView;
}
} | 0true
| core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_controller_account_BroadleafRegisterController.java |
1,322 | awaitBusy(new Predicate<Object>() {
public boolean apply(Object obj) {
ClusterState state = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState();
return state.blocks().hasGlobalBlock(Discovery.NO_MASTER_BLOCK);
}
}); | 0true
| src_test_java_org_elasticsearch_cluster_MinimumMasterNodesTests.java |
1,059 | public class OCommandExecutorSQLTruncateCluster extends OCommandExecutorSQLAbstract implements OCommandDistributedReplicateRequest {
public static final String KEYWORD_TRUNCATE = "TRUNCATE";
public static final String KEYWORD_CLUSTER = "CLUSTER";
private String clusterName;
@SuppressWarnings("unchecked")
public OCommandExecutorSQLTruncateCluster parse(final OCommandRequest iRequest) {
init((OCommandRequestText) iRequest);
StringBuilder word = new StringBuilder();
int oldPos = 0;
int pos = nextWord(parserText, parserTextUpperCase, oldPos, word, true);
if (pos == -1 || !word.toString().equals(KEYWORD_TRUNCATE))
throw new OCommandSQLParsingException("Keyword " + KEYWORD_TRUNCATE + " not found. Use " + getSyntax(), parserText, oldPos);
oldPos = pos;
pos = nextWord(parserText, parserTextUpperCase, oldPos, word, true);
if (pos == -1 || !word.toString().equals(KEYWORD_CLUSTER))
throw new OCommandSQLParsingException("Keyword " + KEYWORD_CLUSTER + " not found. Use " + getSyntax(), parserText, oldPos);
oldPos = pos;
pos = nextWord(parserText, parserText, oldPos, word, true);
if (pos == -1)
throw new OCommandSQLParsingException("Expected cluster name. Use " + getSyntax(), parserText, oldPos);
clusterName = word.toString();
final ODatabaseRecord database = getDatabase();
if (database.getClusterIdByName(clusterName) == -1)
throw new OCommandSQLParsingException("Cluster '" + clusterName + "' not found", parserText, oldPos);
return this;
}
/**
* Execute the command.
*/
public Object execute(final Map<Object, Object> iArgs) {
if (clusterName == null)
throw new OCommandExecutionException("Cannot execute the command because it has not been parsed yet");
final OCluster cluster = ((OStorageEmbedded) getDatabase().getStorage()).getClusterByName(clusterName);
final long recs = cluster.getEntries();
try {
cluster.truncate();
} catch (IOException e) {
throw new OCommandExecutionException("Error on executing command", e);
}
return recs;
}
@Override
public String getSyntax() {
return "TRUNCATE CLUSTER <cluster-name>";
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_sql_OCommandExecutorSQLTruncateCluster.java |
2,118 | public abstract class ESLoggerFactory {
private static volatile ESLoggerFactory defaultFactory = new JdkESLoggerFactory();
static {
try {
Class<?> loggerClazz = Class.forName("org.apache.log4j.Logger");
// below will throw a NoSuchMethod failure with using slf4j log4j bridge
loggerClazz.getMethod("setLevel", Class.forName("org.apache.log4j.Level"));
defaultFactory = new Log4jESLoggerFactory();
} catch (Throwable e) {
// no log4j
try {
Class.forName("org.slf4j.Logger");
defaultFactory = new Slf4jESLoggerFactory();
} catch (Throwable e1) {
// no slf4j
}
}
}
/**
* Changes the default factory.
*/
public static void setDefaultFactory(ESLoggerFactory defaultFactory) {
if (defaultFactory == null) {
throw new NullPointerException("defaultFactory");
}
ESLoggerFactory.defaultFactory = defaultFactory;
}
public static ESLogger getLogger(String prefix, String name) {
return defaultFactory.newInstance(prefix == null ? null : prefix.intern(), name.intern());
}
public static ESLogger getLogger(String name) {
return defaultFactory.newInstance(name.intern());
}
public static ESLogger getRootLogger() {
return defaultFactory.rootLogger();
}
public ESLogger newInstance(String name) {
return newInstance(null, name);
}
protected abstract ESLogger rootLogger();
protected abstract ESLogger newInstance(String prefix, String name);
} | 0true
| src_main_java_org_elasticsearch_common_logging_ESLoggerFactory.java |
502 | @SuppressWarnings("unchecked")
public class ODictionary<T extends Object> {
private OIndex<OIdentifiable> index;
public ODictionary(final OIndex<OIdentifiable> iIndex) {
index = iIndex;
}
public <RET extends T> RET get(final String iKey) {
final OIdentifiable value = index.get(iKey);
if (value == null)
return null;
return (RET) value.getRecord();
}
public <RET extends T> RET get(final String iKey, final String iFetchPlan) {
final OIdentifiable value = index.get(iKey);
if (value == null)
return null;
if (value instanceof ORID)
return (RET) ODatabaseRecordThreadLocal.INSTANCE.get().load(((ORID) value), iFetchPlan);
return (RET) ((ODocument) value).load(iFetchPlan);
}
public void put(final String iKey, final Object iValue) {
index.put(iKey, (OIdentifiable) iValue);
}
public boolean containsKey(final String iKey) {
return index.contains(iKey);
}
public boolean remove(final String iKey) {
return index.remove(iKey);
}
public long size() {
return index.getSize();
}
public Iterable<Object> keys() {
return index.keys();
}
public OIndex<OIdentifiable> getIndex() {
return index;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_dictionary_ODictionary.java |
2,054 | public final class GetOperation extends KeyBasedMapOperation
implements IdentifiedDataSerializable, ReadonlyOperation {
private Data result;
public GetOperation() {
}
public GetOperation(String name, Data dataKey) {
super(name, dataKey);
}
public void run() {
result = mapService.toData(recordStore.get(dataKey));
}
public void afterRun() {
mapService.interceptAfterGet(name, result);
}
@Override
public Object getResponse() {
return result;
}
@Override
public String toString() {
return "GetOperation{}";
}
public int getFactoryId() {
return MapDataSerializerHook.F_ID;
}
public int getId() {
return MapDataSerializerHook.GET;
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_map_operation_GetOperation.java |
99 | @SuppressWarnings("restriction")
public class OUnsafeMemory implements ODirectMemory {
public static final OUnsafeMemory INSTANCE;
protected static final Unsafe unsafe;
private static final boolean unaligned;
private static final long UNSAFE_COPY_THRESHOLD = 1024L * 1024L;
static {
OUnsafeMemory futureInstance;
unsafe = (Unsafe) AccessController.doPrivileged(new PrivilegedAction<Object>() {
public Object run() {
try {
Field f = Unsafe.class.getDeclaredField("theUnsafe");
f.setAccessible(true);
return f.get(null);
} catch (NoSuchFieldException e) {
throw new Error();
} catch (IllegalAccessException e) {
throw new Error();
}
}
});
try {
unsafe.getClass().getDeclaredMethod("copyMemory", Object.class, long.class, Object.class, long.class, long.class);
Class<?> unsafeMemoryJava7 = OUnsafeMemory.class.getClassLoader().loadClass(
"com.orientechnologies.common.directmemory.OUnsafeMemoryJava7");
futureInstance = (OUnsafeMemory) unsafeMemoryJava7.newInstance();
} catch (Exception e) {
futureInstance = new OUnsafeMemory();
}
INSTANCE = futureInstance;
String arch = System.getProperty("os.arch");
unaligned = arch.equals("i386") || arch.equals("x86") || arch.equals("amd64") || arch.equals("x86_64");
}
@Override
public long allocate(byte[] bytes) {
final long pointer = unsafe.allocateMemory(bytes.length);
set(pointer, bytes, 0, bytes.length);
return pointer;
}
@Override
public long allocate(long size) {
return unsafe.allocateMemory(size);
}
@Override
public void free(long pointer) {
unsafe.freeMemory(pointer);
}
@Override
public byte[] get(long pointer, final int length) {
final byte[] result = new byte[length];
for (int i = 0; i < length; i++)
result[i] = unsafe.getByte(pointer++);
return result;
}
@Override
public void get(long pointer, byte[] array, int arrayOffset, int length) {
pointer += arrayOffset;
for (int i = arrayOffset; i < length + arrayOffset; i++)
array[i] = unsafe.getByte(pointer++);
}
@Override
public void set(long pointer, byte[] content, int arrayOffset, int length) {
for (int i = arrayOffset; i < length + arrayOffset; i++)
unsafe.putByte(pointer++, content[i]);
}
@Override
public int getInt(long pointer) {
if (unaligned)
return unsafe.getInt(pointer);
return (0xFF & unsafe.getByte(pointer++)) << 24 | (0xFF & unsafe.getByte(pointer++)) << 16
| (0xFF & unsafe.getByte(pointer++)) << 8 | (0xFF & unsafe.getByte(pointer));
}
@Override
public void setInt(long pointer, int value) {
if (unaligned)
unsafe.putInt(pointer, value);
else {
unsafe.putByte(pointer++, (byte) (value >>> 24));
unsafe.putByte(pointer++, (byte) (value >>> 16));
unsafe.putByte(pointer++, (byte) (value >>> 8));
unsafe.putByte(pointer, (byte) (value));
}
}
@Override
public void setShort(long pointer, short value) {
if (unaligned)
unsafe.putShort(pointer, value);
else {
unsafe.putByte(pointer++, (byte) (value >>> 8));
unsafe.putByte(pointer, (byte) value);
}
}
@Override
public short getShort(long pointer) {
if (unaligned)
return unsafe.getShort(pointer);
return (short) (unsafe.getByte(pointer++) << 8 | (unsafe.getByte(pointer) & 0xff));
}
@Override
public void setChar(long pointer, char value) {
if (unaligned)
unsafe.putChar(pointer, value);
else {
unsafe.putByte(pointer++, (byte) (value >>> 8));
unsafe.putByte(pointer, (byte) (value));
}
}
@Override
public char getChar(long pointer) {
if (unaligned)
return unsafe.getChar(pointer);
return (char) ((unsafe.getByte(pointer++) << 8) | (unsafe.getByte(pointer) & 0xff));
}
@Override
public long getLong(long pointer) {
if (unaligned)
return unsafe.getLong(pointer);
return (0xFFL & unsafe.getByte(pointer++)) << 56 | (0xFFL & unsafe.getByte(pointer++)) << 48
| (0xFFL & unsafe.getByte(pointer++)) << 40 | (0xFFL & unsafe.getByte(pointer++)) << 32
| (0xFFL & unsafe.getByte(pointer++)) << 24 | (0xFFL & unsafe.getByte(pointer++)) << 16
| (0xFFL & unsafe.getByte(pointer++)) << 8 | (0xFFL & unsafe.getByte(pointer));
}
@Override
public void setLong(long pointer, long value) {
if (unaligned)
unsafe.putLong(pointer, value);
else {
unsafe.putByte(pointer++, (byte) (value >>> 56));
unsafe.putByte(pointer++, (byte) (value >>> 48));
unsafe.putByte(pointer++, (byte) (value >>> 40));
unsafe.putByte(pointer++, (byte) (value >>> 32));
unsafe.putByte(pointer++, (byte) (value >>> 24));
unsafe.putByte(pointer++, (byte) (value >>> 16));
unsafe.putByte(pointer++, (byte) (value >>> 8));
unsafe.putByte(pointer, (byte) (value));
}
}
@Override
public byte getByte(long pointer) {
return unsafe.getByte(pointer);
}
@Override
public void setByte(long pointer, byte value) {
unsafe.putByte(pointer, value);
}
@Override
public void moveData(long srcPointer, long destPointer, long len) {
while (len > 0) {
long size = (len > UNSAFE_COPY_THRESHOLD) ? UNSAFE_COPY_THRESHOLD : len;
unsafe.copyMemory(srcPointer, destPointer, size);
len -= size;
srcPointer += size;
destPointer += size;
}
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_directmemory_OUnsafeMemory.java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.