Unnamed: 0
int64 0
6.45k
| func
stringlengths 29
253k
| target
class label 2
classes | project
stringlengths 36
167
|
---|---|---|---|
18 | public final class DiskBufferEnv implements DataBufferEnv, Cloneable {
private static final Logger LOGGER = LoggerFactory.getLogger(DiskBufferEnv.class);
private static final String META_DATABASE_PATH = "metaBuffer";
private static final String META_DATABASE_NAME = "meta";
private static enum STATE {
unInitialized, initializing, initialized;
}
private Environment dbufferEnv;
private STATE state = STATE.unInitialized;
private final Properties prop;
private volatile long bufferTimeMills;
private long evictorRecurrMills;
private File envHome;
private final int concurrency;
private final int bufferWriteThreadPoolSize;
private final int numOfBufferPartitions;
private final int currentBufferPartition;
private final long partitionOverlapMillis;
private final long metaRefreshMillis;
private TransactionConfig txnConfig;
private CursorConfig cursorConfig;
private DiskQuotaHelper diskQuotaHelper;
private static Properties loadDefaultPropertyFile() {
Properties prop = new Properties();
InputStream is = null;
try {
is = ClassLoader.getSystemResourceAsStream("properties/feed.properties");
prop.load(is);
} catch (Exception e) {
LOGGER.error("Cannot initialized DataBufferEnv properties", e);
} finally {
if (is != null) {
try {
is.close();
} catch (IOException ioe) {
// ignore exception
}
}
}
return prop;
}
public String getErrorMsg() {
return diskQuotaHelper.getErrorMsg();
}
public DiskBufferEnv(Properties prop) {
if (prop == null) {
prop = loadDefaultPropertyFile();
}
this.prop = prop;
this.currentBufferPartition = 0;
File bufferHome = new File(FilepathReplacer.substitute(getPropertyWithPrecedence(prop, "buffer.disk.loc")));
if (!bufferHome.exists()) {
bufferHome.mkdirs();
}
envHome = new File(bufferHome, META_DATABASE_PATH);
if (!envHome.exists()) {
envHome.mkdirs();
}
concurrency = Integer.parseInt(prop.getProperty("buffer.concurrency"));
evictorRecurrMills = Long.parseLong(prop.getProperty("buffer.evictor.recurrMills"));
bufferWriteThreadPoolSize = Integer.parseInt(prop.getProperty("buffer.write.threadPool.size"));
numOfBufferPartitions = Integer.parseInt(prop.getProperty("buffer.partitions"));
bufferTimeMills = Long.parseLong(prop.getProperty("buffer.time.millis"));
metaRefreshMillis = Long.parseLong(prop.getProperty("meta.buffer.refresh.millis"));
if (bufferTimeMills > numOfBufferPartitions) {
bufferTimeMills = bufferTimeMills / numOfBufferPartitions;
}
partitionOverlapMillis = Long.parseLong(prop.getProperty("buffer.partition.overlap.millis"));
diskQuotaHelper = new DiskQuotaHelper(prop, bufferHome);
this.state = STATE.initializing;
setup(false);
}
public DiskBufferEnv(Properties prop, int currentBufferPartition) { //throws Exception {
if (prop == null) {
prop = loadDefaultPropertyFile();
}
this.prop = prop;
this.currentBufferPartition = currentBufferPartition;
File bufferHome = new File(FilepathReplacer.substitute(getPropertyWithPrecedence(prop, "buffer.disk.loc")));
if (!bufferHome.exists()) {
bufferHome.mkdirs();
}
envHome = new File(bufferHome, String.valueOf(currentBufferPartition));
if (!envHome.exists()) {
envHome.mkdirs();
}
concurrency = Integer.parseInt(prop.getProperty("buffer.concurrency"));
evictorRecurrMills = Long.parseLong(prop.getProperty("buffer.evictor.recurrMills"));
bufferWriteThreadPoolSize = Integer.parseInt(prop.getProperty("buffer.write.threadPool.size"));
numOfBufferPartitions = Integer.parseInt(prop.getProperty("buffer.partitions"));
bufferTimeMills = Long.parseLong(prop.getProperty("buffer.time.millis"));
bufferTimeMills = bufferTimeMills / numOfBufferPartitions;
partitionOverlapMillis = Long.parseLong(prop.getProperty("buffer.partition.overlap.millis"));
metaRefreshMillis = Long.parseLong(prop.getProperty("meta.buffer.refresh.millis"));
diskQuotaHelper = new DiskQuotaHelper(prop, bufferHome);
this.state = STATE.initializing;
setup(false);
}
private void setup(boolean readOnly) {
assertState(STATE.initializing);
// Instantiate an environment configuration object
EnvironmentConfig envConfig = new EnvironmentConfig();
envConfig.setSharedCache(true);
String cachePercent = prop.getProperty("bdb.cache.percent");
if (cachePercent != null) {
envConfig.setCachePercent(Integer.parseInt(cachePercent));
}
// Configure the environment for the read-only state as identified by
// the readOnly parameter on this method call.
envConfig.setReadOnly(readOnly);
// If the environment is opened for write, then we want to be able to
// create the environment if it does not exist.
envConfig.setAllowCreate(true);
envConfig.setConfigParam(EnvironmentConfig.CHECKPOINTER_BYTES_INTERVAL, "40000000");
envConfig.setTransactional(false);
envConfig.setDurability(Durability.COMMIT_NO_SYNC);
envConfig.setConfigParam(EnvironmentConfig.ENV_RUN_CLEANER, Boolean.FALSE.toString());
envConfig.setConfigParam(EnvironmentConfig.ENV_IS_LOCKING, Boolean.FALSE.toString());
setupConfig();
// Instantiate the Environment. This opens it and also possibly
// creates it.
try {
dbufferEnv = new Environment(envHome, envConfig);
state = STATE.initialized;
} catch (DatabaseException de) {
LOGGER.error("DatabaseException in setup", de);
state = STATE.unInitialized;
}
}
private void setupConfig() {
txnConfig = new TransactionConfig();
txnConfig.setReadUncommitted(true);
txnConfig.setDurability(Durability.COMMIT_NO_SYNC);
cursorConfig = new CursorConfig();
cursorConfig.setReadUncommitted(true);
}
private String getPropertyWithPrecedence(Properties localProps, String key) {
String systemProp = System.getProperty(key);
return systemProp != null ? systemProp.trim() : localProps.getProperty(key, "unset").trim();
}
public Database openMetaDiskStore() throws DatabaseException {
assertState(STATE.initialized);
DatabaseConfig dbConfig = new DatabaseConfig();
dbConfig.setAllowCreate(true);
dbConfig.setSortedDuplicates(false);
dbConfig.setTransactional(false);
Database diskStore = dbufferEnv.openDatabase(null, META_DATABASE_NAME, dbConfig);
return diskStore;
}
public Database openDiskStore(String dbName, SecondaryKeyCreator... keyCreators) throws DatabaseException {
assertState(STATE.initialized);
DatabaseConfig dbConfig = new DatabaseConfig();
dbConfig.setAllowCreate(true);
dbConfig.setSortedDuplicates(false);
dbConfig.setTransactional(false);
Database diskStore = dbufferEnv.openDatabase(null, dbName, dbConfig);
int i=0;
for (SecondaryKeyCreator keyCreator : keyCreators) {
SecondaryConfig secDbConfig = new SecondaryConfig();
secDbConfig.setKeyCreator(keyCreator);
secDbConfig.setAllowCreate(true);
secDbConfig.setSortedDuplicates(true);
secDbConfig.setTransactional(false);
// Perform the actual open
String secDbName = dbName + i;
dbufferEnv.openSecondaryDatabase(null, secDbName, diskStore, secDbConfig);
i++;
}
return diskStore;
}
public boolean isDiskBufferFull() {
return diskQuotaHelper.isDiskBufferFull();
}
public Transaction beginTransaction() throws DatabaseException {
assertState(STATE.initialized);
TransactionConfig txnConfig = new TransactionConfig();
txnConfig.setReadUncommitted(true);
return dbufferEnv.beginTransaction(null, txnConfig);
}
public SecondaryCursor openSecondaryCursor(Transaction txn, Database database, int index) throws DatabaseException {
List<SecondaryDatabase> secDbs = database.getSecondaryDatabases();
assert secDbs.size() == 2;
SecondaryDatabase secDb = secDbs.get(index);
SecondaryCursor mySecCursor = secDb.openCursor(txn, cursorConfig);
return mySecCursor;
}
public void removeEnvironment() throws DatabaseException {
dbufferEnv.cleanLog();
dbufferEnv.close();
deleteDatabaseFile(currentBufferPartition);
this.state = STATE.unInitialized;
}
public void closeEnvironment() throws DatabaseException {
dbufferEnv.cleanLog();
dbufferEnv.close();
this.state = STATE.unInitialized;
}
public void removeAndCloseAllDiskStores() throws DatabaseException {
List<String> dbNames = dbufferEnv.getDatabaseNames();
for (String dbName : dbNames) {
try {
dbufferEnv.removeDatabase(null, dbName);
} catch (DatabaseException de) {
continue;
}
}
closeEnvironment();
}
public void closeDatabase(Database database) throws DatabaseException {
if (database == null) { return; }
List<SecondaryDatabase> secDbs = database.getSecondaryDatabases();
for (Database secDb : secDbs) {
secDb.close();
}
database.close();
}
public void closeAndRestartEnvironment() throws DatabaseException {
boolean isReadOnly = dbufferEnv.getConfig().getReadOnly();
removeAndCloseAllDiskStores();
restartEnvironment(isReadOnly);
}
public void restartEnvironment(boolean isReadOnly) throws DatabaseException {
state = STATE.initializing;
setup(isReadOnly);
}
public int getConcurrencyDegree() {
return concurrency;
}
public int getBufferWriteThreadPoolSize() {
return bufferWriteThreadPoolSize;
}
public long getBufferTime() {
return bufferTimeMills;
}
public long getEvictorRecurr() {
return evictorRecurrMills;
}
public int getNumOfBufferPartitions() {
return numOfBufferPartitions;
}
public void setBufferTime(long bufferTimeMills) {
this.bufferTimeMills = bufferTimeMills;
}
public long getBufferPartitionOverlap() {
return partitionOverlapMillis;
}
public int getCurrentBufferPartition() {
return currentBufferPartition;
}
public DataBufferEnv advanceBufferPartition() {
int nextBufferPartition = nextBufferPartition();
deleteDatabaseFile(nextBufferPartition);
DiskBufferEnv newBufferEnv = new DiskBufferEnv(prop, (this.currentBufferPartition + 1) % numOfBufferPartitions);
return newBufferEnv;
}
private void deleteDatabaseFile(int partitionNo) {
File parentDir = this.envHome.getParentFile();
File nextBufferPartitionDir = new File(parentDir, String.valueOf(partitionNo));
if (nextBufferPartitionDir.exists()) {
if (nextBufferPartitionDir.isDirectory()) {
File[] files = nextBufferPartitionDir.listFiles();
for (File f: files) {
f.delete();
}
}
nextBufferPartitionDir.delete();
}
}
public int nextBufferPartition() {
return (this.currentBufferPartition+1)%numOfBufferPartitions;
}
public int previousBufferPartition(int currentPartition) {
int i = currentPartition;
if (i == 0) {
i = this.numOfBufferPartitions-1;
} else {
i--;
}
return i;
}
public long getMetaRefresh() {
return this.metaRefreshMillis;
}
@Override
public Object clone() {
return new DiskBufferEnv(prop, 0);
}
@Override
public Object cloneMetaBuffer() {
return new DiskBufferEnv(prop);
}
private void assertState(STATE expectedState) {
assert this.state == expectedState;
}
@Override
public Properties getConfigProperties() {
return this.prop;
}
public void flush() {
this.dbufferEnv.sync();
}
@Override
public LOS getLOS() {
return LOS.medium;
}
} | 0true
| timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_config_DiskBufferEnv.java |
1,051 | @SuppressWarnings("unchecked")
public class OCommandExecutorSQLSelect extends OCommandExecutorSQLResultsetAbstract {
private static final String KEYWORD_AS = " AS ";
public static final String KEYWORD_SELECT = "SELECT";
public static final String KEYWORD_ASC = "ASC";
public static final String KEYWORD_DESC = "DESC";
public static final String KEYWORD_ORDER = "ORDER";
public static final String KEYWORD_BY = "BY";
public static final String KEYWORD_GROUP = "GROUP";
public static final String KEYWORD_FETCHPLAN = "FETCHPLAN";
private static final int MIN_THRESHOLD_USE_INDEX_AS_TARGET = 100;
private Map<String, String> projectionDefinition = null;
private Map<String, Object> projections = null; // THIS HAS BEEN KEPT FOR COMPATIBILITY; BUT
// IT'S
// USED THE
// PROJECTIONS IN GROUPED-RESULTS
private List<OPair<String, String>> orderedFields;
private List<String> groupByFields;
private Map<Object, ORuntimeResult> groupedResult;
private Object expandTarget;
private int fetchLimit = -1;
private OIdentifiable lastRecord;
private Iterator<OIdentifiable> subIterator;
private String fetchPlan;
/**
* Compile the filter conditions only the first time.
*/
public OCommandExecutorSQLSelect parse(final OCommandRequest iRequest) {
super.parse(iRequest);
if (context == null)
context = new OBasicCommandContext();
final int pos = parseProjections();
if (pos == -1)
return this;
final int endPosition = parserText.length();
parserNextWord(true);
if (parserGetLastWord().equalsIgnoreCase(KEYWORD_FROM)) {
// FROM
parsedTarget = OSQLEngine.getInstance().parseTarget(parserText.substring(parserGetCurrentPosition(), endPosition),
getContext(), KEYWORD_WHERE);
parserSetCurrentPosition(parsedTarget.parserIsEnded() ? endPosition : parsedTarget.parserGetCurrentPosition()
+ parserGetCurrentPosition());
} else
parserGoBack();
if (!parserIsEnded()) {
parserSkipWhiteSpaces();
while (!parserIsEnded()) {
parserNextWord(true);
if (!parserIsEnded()) {
final String w = parserGetLastWord();
if (w.equals(KEYWORD_WHERE)) {
compiledFilter = OSQLEngine.getInstance().parseCondition(parserText.substring(parserGetCurrentPosition(), endPosition),
getContext(), KEYWORD_WHERE);
optimize();
parserSetCurrentPosition(compiledFilter.parserIsEnded() ? endPosition : compiledFilter.parserGetCurrentPosition()
+ parserGetCurrentPosition());
} else if (w.equals(KEYWORD_LET))
parseLet();
else if (w.equals(KEYWORD_GROUP))
parseGroupBy(w);
else if (w.equals(KEYWORD_ORDER))
parseOrderBy(w);
else if (w.equals(KEYWORD_LIMIT))
parseLimit(w);
else if (w.equals(KEYWORD_SKIP))
parseSkip(w);
else if (w.equals(KEYWORD_FETCHPLAN))
parseFetchplan(w);
else if (w.equals(KEYWORD_TIMEOUT))
parseTimeout(w);
else
throwParsingException("Invalid keyword '" + w + "'");
}
}
}
if (limit == 0 || limit < -1) {
throw new IllegalArgumentException("Limit must be > 0 or = -1 (no limit)");
}
return this;
}
/**
* Determine clusters that are used in select operation
*
* @return set of involved clusters
*/
public Set<Integer> getInvolvedClusters() {
final Set<Integer> clusters = new HashSet<Integer>();
if (parsedTarget.getTargetRecords() != null) {
for (OIdentifiable identifiable : parsedTarget.getTargetRecords()) {
clusters.add(identifiable.getIdentity().getClusterId());
}
}
if (parsedTarget.getTargetClasses() != null) {
final OStorage storage = getDatabase().getStorage();
for (String clazz : parsedTarget.getTargetClasses().values()) {
clusters.add(storage.getClusterIdByName(clazz));
}
}
if (parsedTarget.getTargetClusters() != null) {
final OStorage storage = getDatabase().getStorage();
for (String clazz : parsedTarget.getTargetClusters().values()) {
clusters.add(storage.getClusterIdByName(clazz));
}
}
if (parsedTarget.getTargetIndex() != null) {
// TODO indexes??
}
return clusters;
}
/**
* Add condition so that query will be executed only on the given id range. That is used to verify that query will be executed on
* the single node
*
* @param fromId
* @param toId
* @return this
*/
public OCommandExecutorSQLSelect boundToLocalNode(long fromId, long toId) {
if (fromId == toId) {
// single node in dht
return this;
}
final OSQLFilterCondition nodeCondition;
if (fromId < toId) {
nodeCondition = getConditionForRidPosRange(fromId, toId);
} else {
nodeCondition = new OSQLFilterCondition(getConditionForRidPosRange(fromId, Long.MAX_VALUE), new OQueryOperatorOr(),
getConditionForRidPosRange(-1L, toId));
}
if (compiledFilter == null) {
compiledFilter = OSQLEngine.getInstance().parseCondition("", getContext(), KEYWORD_WHERE);
}
final OSQLFilterCondition rootCondition = compiledFilter.getRootCondition();
if (rootCondition != null) {
compiledFilter.setRootCondition(new OSQLFilterCondition(nodeCondition, new OQueryOperatorAnd(), rootCondition));
} else {
compiledFilter.setRootCondition(nodeCondition);
}
return this;
}
protected static OSQLFilterCondition getConditionForRidPosRange(long fromId, long toId) {
final OSQLFilterCondition fromCondition = new OSQLFilterCondition(new OSQLFilterItemField(null,
ODocumentHelper.ATTRIBUTE_RID_POS), new OQueryOperatorMajor(), fromId);
final OSQLFilterCondition toCondition = new OSQLFilterCondition(
new OSQLFilterItemField(null, ODocumentHelper.ATTRIBUTE_RID_POS), new OQueryOperatorMinorEquals(), toId);
return new OSQLFilterCondition(fromCondition, new OQueryOperatorAnd(), toCondition);
}
/**
* @return {@code ture} if any of the sql functions perform aggregation, {@code false} otherwise
*/
public boolean isAnyFunctionAggregates() {
if (projections != null) {
for (Entry<String, Object> p : projections.entrySet()) {
if (p.getValue() instanceof OSQLFunctionRuntime && ((OSQLFunctionRuntime) p.getValue()).aggregateResults())
return true;
}
}
return false;
}
public boolean hasNext() {
if (lastRecord == null)
// GET THE NEXT
lastRecord = next();
// BROWSE ALL THE RECORDS
return lastRecord != null;
}
public OIdentifiable next() {
if (lastRecord != null) {
// RETURN LATEST AND RESET IT
final OIdentifiable result = lastRecord;
lastRecord = null;
return result;
}
if (subIterator == null) {
if (target == null) {
// GET THE RESULT
executeSearch(null);
applyExpand();
handleNoTarget();
handleGroupBy();
applyOrderBy();
subIterator = new ArrayList<OIdentifiable>((List<OIdentifiable>) getResult()).iterator();
lastRecord = null;
tempResult = null;
groupedResult = null;
} else
subIterator = (Iterator<OIdentifiable>) target;
}
// RESUME THE LAST POSITION
if (lastRecord == null && subIterator != null)
while (subIterator.hasNext()) {
lastRecord = subIterator.next();
if (lastRecord != null)
return lastRecord;
}
return lastRecord;
}
public void remove() {
throw new UnsupportedOperationException("remove()");
}
public Iterator<OIdentifiable> iterator() {
return this;
}
public Object execute(final Map<Object, Object> iArgs) {
if (iArgs != null)
// BIND ARGUMENTS INTO CONTEXT TO ACCESS FROM ANY POINT (EVEN FUNCTIONS)
for (Entry<Object, Object> arg : iArgs.entrySet())
context.setVariable(arg.getKey().toString(), arg.getValue());
if (timeoutMs > 0)
getContext().beginExecution(timeoutMs, timeoutStrategy);
if (!optimizeExecution()) {
fetchLimit = getQueryFetchLimit();
executeSearch(iArgs);
applyExpand();
handleNoTarget();
handleGroupBy();
applyOrderBy();
applyLimitAndSkip();
}
return getResult();
}
protected void executeSearch(final Map<Object, Object> iArgs) {
assignTarget(iArgs);
if (target == null) {
if (let != null)
// EXECUTE ONCE TO ASSIGN THE LET
assignLetClauses(lastRecord != null ? lastRecord.getRecord() : null);
// SEARCH WITHOUT USING TARGET (USUALLY WHEN LET/INDEXES ARE INVOLVED)
return;
}
final long startFetching = System.currentTimeMillis();
try {
// BROWSE ALL THE RECORDS
while (target.hasNext())
if (!executeSearchRecord(target.next()))
break;
} finally {
context.setVariable("fetchingFromTargetElapsed", (System.currentTimeMillis() - startFetching));
}
if (request.getResultListener() != null)
request.getResultListener().end();
}
@Override
protected boolean assignTarget(Map<Object, Object> iArgs) {
if (!super.assignTarget(iArgs)) {
if (parsedTarget.getTargetIndex() != null)
searchInIndex();
else
throw new OQueryParsingException("No source found in query: specify class, cluster(s), index or single record(s). Use "
+ getSyntax());
}
return true;
}
protected boolean executeSearchRecord(final OIdentifiable id) {
if (Thread.interrupted())
throw new OCommandExecutionException("The select execution has been interrupted");
if (!context.checkTimeout())
return false;
final ORecordInternal<?> record = id.getRecord();
context.updateMetric("recordReads", +1);
if (record == null || record.getRecordType() != ODocument.RECORD_TYPE)
// SKIP IT
return true;
context.updateMetric("documentReads", +1);
if (filter(record))
if (!handleResult(record, true))
// END OF EXECUTION
return false;
return true;
}
protected boolean handleResult(final OIdentifiable iRecord, final boolean iCloneIt) {
lastRecord = null;
if (orderedFields == null && skip > 0) {
skip--;
return true;
}
if (iCloneIt)
lastRecord = iRecord instanceof ORecord<?> ? ((ORecord<?>) iRecord).copy() : iRecord.getIdentity().copy();
else
lastRecord = iRecord;
resultCount++;
addResult(lastRecord);
if (orderedFields == null && !isAnyFunctionAggregates() && fetchLimit > -1 && resultCount >= fetchLimit)
// BREAK THE EXECUTION
return false;
return true;
}
protected void addResult(OIdentifiable iRecord) {
if (iRecord == null)
return;
if (projections != null || groupByFields != null && !groupByFields.isEmpty()) {
if (groupedResult == null) {
// APPLY PROJECTIONS IN LINE
iRecord = ORuntimeResult.getProjectionResult(resultCount, projections, context, iRecord);
if (iRecord == null)
return;
} else {
// AGGREGATION/GROUP BY
final ODocument doc = (ODocument) iRecord.getRecord();
Object fieldValue = null;
if (groupByFields != null && !groupByFields.isEmpty()) {
if (groupByFields.size() > 1) {
// MULTI-FIELD FROUP BY
final Object[] fields = new Object[groupByFields.size()];
for (int i = 0; i < groupByFields.size(); ++i) {
final String field = groupByFields.get(i);
if (field.startsWith("$"))
fields[i] = context.getVariable(field);
else
fields[i] = doc.field(field);
}
fieldValue = fields;
} else {
final String field = groupByFields.get(0);
if (field != null) {
if (field.startsWith("$"))
fieldValue = context.getVariable(field);
else
fieldValue = doc.field(field);
}
}
}
getProjectionGroup(fieldValue).applyRecord(iRecord);
return;
}
}
if (orderedFields == null && expandTarget == null) {
// SEND THE RESULT INLINE
if (request.getResultListener() != null)
request.getResultListener().result(iRecord);
} else {
// COLLECT ALL THE RECORDS AND ORDER THEM AT THE END
if (tempResult == null)
tempResult = new ArrayList<OIdentifiable>();
((Collection<OIdentifiable>) tempResult).add(iRecord);
}
}
protected ORuntimeResult getProjectionGroup(final Object fieldValue) {
ORuntimeResult group = null;
final long projectionElapsed = (Long) context.getVariable("projectionElapsed", 0l);
final long begin = System.currentTimeMillis();
try {
Object key = null;
if (groupedResult == null)
groupedResult = new LinkedHashMap<Object, ORuntimeResult>();
if (fieldValue != null) {
if (fieldValue.getClass().isArray()) {
// LOOK IT BY HASH (FASTER THAN COMPARE EACH SINGLE VALUE)
final Object[] array = (Object[]) fieldValue;
final StringBuilder keyArray = new StringBuilder();
for (Object o : array) {
if (keyArray.length() > 0)
keyArray.append(",");
if (o != null)
keyArray.append(o instanceof OIdentifiable ? ((OIdentifiable) o).getIdentity().toString() : o.toString());
else
keyArray.append("null");
}
key = keyArray.toString();
} else
// LOKUP FOR THE FIELD
key = fieldValue;
}
group = groupedResult.get(key);
if (group == null) {
group = new ORuntimeResult(fieldValue, createProjectionFromDefinition(), resultCount, context);
groupedResult.put(key, group);
}
return group;
} finally {
context.setVariable("projectionElapsed", projectionElapsed + (System.currentTimeMillis() - begin));
}
}
private int getQueryFetchLimit() {
if (orderedFields != null) {
return -1;
}
final int sqlLimit;
final int requestLimit;
if (limit > -1)
sqlLimit = limit;
else
sqlLimit = -1;
if (request.getLimit() > -1)
requestLimit = request.getLimit();
else
requestLimit = -1;
if (sqlLimit == -1)
return requestLimit;
if (requestLimit == -1)
return sqlLimit;
return Math.min(sqlLimit, requestLimit);
}
public Map<String, Object> getProjections() {
return projections;
}
public List<OPair<String, String>> getOrderedFields() {
return orderedFields;
}
protected void parseGroupBy(final String w) {
parserRequiredKeyword(KEYWORD_BY);
groupByFields = new ArrayList<String>();
while (!parserIsEnded() && (groupByFields.size() == 0 || parserGetLastSeparator() == ',' || parserGetCurrentChar() == ',')) {
final String fieldName = parserRequiredWord(false, "Field name expected");
groupByFields.add(fieldName);
parserSkipWhiteSpaces();
}
if (groupByFields.size() == 0)
throwParsingException("Group by field set was missed. Example: GROUP BY name, salary");
// AGGREGATE IT
getProjectionGroup(null);
}
protected void parseOrderBy(final String w) {
parserRequiredKeyword(KEYWORD_BY);
String fieldOrdering = null;
orderedFields = new ArrayList<OPair<String, String>>();
while (!parserIsEnded() && (orderedFields.size() == 0 || parserGetLastSeparator() == ',' || parserGetCurrentChar() == ',')) {
final String fieldName = parserRequiredWord(false, "Field name expected");
parserOptionalWord(true);
final String word = parserGetLastWord();
if (word.length() == 0)
// END CLAUSE: SET AS ASC BY DEFAULT
fieldOrdering = KEYWORD_ASC;
else if (word.equals(KEYWORD_LIMIT) || word.equals(KEYWORD_SKIP)) {
// NEXT CLAUSE: SET AS ASC BY DEFAULT
fieldOrdering = KEYWORD_ASC;
parserGoBack();
} else {
if (word.equals(KEYWORD_ASC))
fieldOrdering = KEYWORD_ASC;
else if (word.equals(KEYWORD_DESC))
fieldOrdering = KEYWORD_DESC;
else
throwParsingException("Ordering mode '" + word + "' not supported. Valid is 'ASC', 'DESC' or nothing ('ASC' by default)");
}
orderedFields.add(new OPair<String, String>(fieldName, fieldOrdering));
parserSkipWhiteSpaces();
}
if (orderedFields.size() == 0)
throwParsingException("Order by field set was missed. Example: ORDER BY name ASC, salary DESC");
}
@Override
protected void searchInClasses() {
final OClass cls = parsedTarget.getTargetClasses().keySet().iterator().next();
if (searchForIndexes(cls)) {
} else
super.searchInClasses();
}
@SuppressWarnings("rawtypes")
private boolean searchForIndexes(final OClass iSchemaClass) {
final ODatabaseRecord database = getDatabase();
database.checkSecurity(ODatabaseSecurityResources.CLASS, ORole.PERMISSION_READ, iSchemaClass.getName().toLowerCase());
// Create set that is sorted by amount of fields in OIndexSearchResult items
// so the most specific restrictions will be processed first.
final List<OIndexSearchResult> indexSearchResults = new ArrayList<OIndexSearchResult>();
// fetch all possible variants of subqueries that can be used in indexes.
if (compiledFilter == null)
return false;
analyzeQueryBranch(iSchemaClass, compiledFilter.getRootCondition(), indexSearchResults);
// most specific will be processed first
Collections.sort(indexSearchResults, new Comparator<OIndexSearchResult>() {
public int compare(final OIndexSearchResult searchResultOne, final OIndexSearchResult searchResultTwo) {
return searchResultTwo.getFieldCount() - searchResultOne.getFieldCount();
}
});
// go through all variants to choose which one can be used for index search.
for (final OIndexSearchResult searchResult : indexSearchResults) {
final List<OIndex<?>> involvedIndexes = getInvolvedIndexes(iSchemaClass, searchResult);
Collections.sort(involvedIndexes, IndexComparator.INSTANCE);
// go through all possible index for given set of fields.
for (final OIndex index : involvedIndexes) {
if (index.isRebuiding())
continue;
final OIndexDefinition indexDefinition = index.getDefinition();
final OQueryOperator operator = searchResult.lastOperator;
// we need to test that last field in query subset and field in index that has the same position
// are equals.
if (!OIndexSearchResult.isIndexEqualityOperator(operator)) {
final String lastFiled = searchResult.lastField.getItemName(searchResult.lastField.getItemCount() - 1);
final String relatedIndexField = indexDefinition.getFields().get(searchResult.fieldValuePairs.size());
if (!lastFiled.equals(relatedIndexField))
continue;
}
final int searchResultFieldsCount = searchResult.fields().size();
final List<Object> keyParams = new ArrayList<Object>(searchResultFieldsCount);
// We get only subset contained in processed sub query.
for (final String fieldName : indexDefinition.getFields().subList(0, searchResultFieldsCount)) {
final Object fieldValue = searchResult.fieldValuePairs.get(fieldName);
if (fieldValue instanceof OSQLQuery<?>)
return false;
if (fieldValue != null)
keyParams.add(fieldValue);
else {
if (searchResult.lastValue instanceof OSQLQuery<?>)
return false;
keyParams.add(searchResult.lastValue);
}
}
INDEX_OPERATION_TYPE opType = null;
if (context.isRecordingMetrics()) {
Set<String> idxNames = (Set<String>) context.getVariable("involvedIndexes");
if (idxNames == null) {
idxNames = new HashSet<String>();
context.setVariable("involvedIndexes", idxNames);
}
if (index instanceof OChainedIndexProxy) {
idxNames.addAll(((OChainedIndexProxy) index).getIndexNames());
} else
idxNames.add(index.getName());
}
if (projections != null && projections.size() == 1) {
final Object v = projections.values().iterator().next();
if (v instanceof OSQLFunctionRuntime && ((OSQLFunctionRuntime) v).getFunction() instanceof OSQLFunctionCount) {
if (!(compiledFilter.getRootCondition().getLeft() instanceof OSQLFilterCondition || compiledFilter.getRootCondition()
.getRight() instanceof OSQLFilterCondition))
// OPTIMIZATION: JUST COUNT IT
opType = INDEX_OPERATION_TYPE.COUNT;
}
}
if (opType == null)
opType = INDEX_OPERATION_TYPE.GET;
OQueryOperator.IndexResultListener resultListener;
if (fetchLimit < 0 || opType == INDEX_OPERATION_TYPE.COUNT)
resultListener = null;
else
resultListener = new IndexResultListener();
Object result;
try {
result = operator.executeIndexQuery(context, index, opType, keyParams, resultListener, fetchLimit);
} catch (Exception e) {
OLogManager
.instance()
.error(
this,
"Error on using index %s in query '%s'. Probably you need to rebuild indexes. Now executing query using cluster scan",
e, index.getName(), request != null && request.getText() != null ? request.getText() : "");
return false;
}
if (result == null)
continue;
if (opType == INDEX_OPERATION_TYPE.COUNT) {
// OPTIMIZATION: EMBED THE RESULT IN A DOCUMENT AND AVOID THE CLASSIC PATH
final String projName = projectionDefinition.keySet().iterator().next();
projectionDefinition.clear();
getProjectionGroup(null).applyValue(projName, result);
} else
fillSearchIndexResultSet(result);
return true;
}
}
return false;
}
private static List<OIndex<?>> getInvolvedIndexes(OClass iSchemaClass, OIndexSearchResult searchResultFields) {
final Set<OIndex<?>> involvedIndexes = iSchemaClass.getInvolvedIndexes(searchResultFields.fields());
final List<OIndex<?>> result = new ArrayList<OIndex<?>>(involvedIndexes.size());
for (OIndex<?> involvedIndex : involvedIndexes) {
if (searchResultFields.lastField.isLong()) {
result.addAll(OChainedIndexProxy.createdProxy(involvedIndex, searchResultFields.lastField, getDatabase()));
} else {
result.add(involvedIndex);
}
}
return result;
}
private static OIndexSearchResult analyzeQueryBranch(final OClass iSchemaClass, OSQLFilterCondition iCondition,
final List<OIndexSearchResult> iIndexSearchResults) {
if (iCondition == null)
return null;
OQueryOperator operator = iCondition.getOperator();
while (operator == null) {
if (iCondition.getRight() == null && iCondition.getLeft() instanceof OSQLFilterCondition) {
iCondition = (OSQLFilterCondition) iCondition.getLeft();
operator = iCondition.getOperator();
} else {
return null;
}
}
final OIndexReuseType indexReuseType = operator.getIndexReuseType(iCondition.getLeft(), iCondition.getRight());
if (indexReuseType.equals(OIndexReuseType.INDEX_INTERSECTION)) {
final OIndexSearchResult leftResult = analyzeQueryBranch(iSchemaClass, (OSQLFilterCondition) iCondition.getLeft(),
iIndexSearchResults);
final OIndexSearchResult rightResult = analyzeQueryBranch(iSchemaClass, (OSQLFilterCondition) iCondition.getRight(),
iIndexSearchResults);
if (leftResult != null && rightResult != null) {
if (leftResult.canBeMerged(rightResult)) {
final OIndexSearchResult mergeResult = leftResult.merge(rightResult);
if (iSchemaClass.areIndexed(mergeResult.fields()))
iIndexSearchResults.add(mergeResult);
return leftResult.merge(rightResult);
}
}
return null;
} else if (indexReuseType.equals(OIndexReuseType.INDEX_METHOD)) {
OIndexSearchResult result = createIndexedProperty(iCondition, iCondition.getLeft());
if (result == null)
result = createIndexedProperty(iCondition, iCondition.getRight());
if (result == null)
return null;
if (checkIndexExistence(iSchemaClass, result))
iIndexSearchResults.add(result);
return result;
}
return null;
}
/**
* Add SQL filter field to the search candidate list.
*
* @param iCondition Condition item
* @param iItem Value to search
* @return true if the property was indexed and found, otherwise false
*/
private static OIndexSearchResult createIndexedProperty(final OSQLFilterCondition iCondition, final Object iItem) {
if (iItem == null || !(iItem instanceof OSQLFilterItemField))
return null;
if (iCondition.getLeft() instanceof OSQLFilterItemField && iCondition.getRight() instanceof OSQLFilterItemField)
return null;
final OSQLFilterItemField item = (OSQLFilterItemField) iItem;
if (item.hasChainOperators() && !item.isFieldChain())
return null;
final Object origValue = iCondition.getLeft() == iItem ? iCondition.getRight() : iCondition.getLeft();
if (iCondition.getOperator() instanceof OQueryOperatorBetween || iCondition.getOperator() instanceof OQueryOperatorIn) {
return new OIndexSearchResult(iCondition.getOperator(), item.getFieldChain(), origValue);
}
final Object value = OSQLHelper.getValue(origValue);
if (value == null)
return null;
return new OIndexSearchResult(iCondition.getOperator(), item.getFieldChain(), value);
}
private void fillSearchIndexResultSet(final Object indexResult) {
if (indexResult != null) {
if (indexResult instanceof Collection<?>) {
Collection<OIdentifiable> indexResultSet = (Collection<OIdentifiable>) indexResult;
context.updateMetric("indexReads", indexResultSet.size());
for (OIdentifiable identifiable : indexResultSet) {
ORecord<?> record = identifiable.getRecord();
// Don't throw exceptions is record is null, as indexed queries may fail when using record level security
if ((record != null) && filter((ORecordInternal<?>) record)) {
final boolean continueResultParsing = handleResult(record, false);
if (!continueResultParsing)
break;
}
}
} else {
final ORecord<?> record = ((OIdentifiable) indexResult).getRecord();
if (filter((ORecordInternal<?>) record))
handleResult(record, true);
}
}
}
protected int parseProjections() {
if (!parserOptionalKeyword(KEYWORD_SELECT))
return -1;
int upperBound = OStringSerializerHelper.getLowerIndexOf(parserTextUpperCase, parserGetCurrentPosition(), KEYWORD_FROM_2FIND,
KEYWORD_LET_2FIND);
if (upperBound == -1)
// UP TO THE END
upperBound = parserText.length();
final String projectionString = parserText.substring(parserGetCurrentPosition(), upperBound).trim();
if (projectionString.length() > 0) {
// EXTRACT PROJECTIONS
projections = new LinkedHashMap<String, Object>();
projectionDefinition = new LinkedHashMap<String, String>();
final List<String> items = OStringSerializerHelper.smartSplit(projectionString, ',');
String fieldName;
int beginPos;
int endPos;
for (String projection : items) {
projection = projection.trim();
if (projectionDefinition == null)
throw new OCommandSQLParsingException("Projection not allowed with FLATTEN() and EXPAND() operators");
fieldName = null;
endPos = projection.toUpperCase(Locale.ENGLISH).indexOf(KEYWORD_AS);
if (endPos > -1) {
// EXTRACT ALIAS
fieldName = projection.substring(endPos + KEYWORD_AS.length()).trim();
projection = projection.substring(0, endPos).trim();
if (projectionDefinition.containsKey(fieldName))
throw new OCommandSQLParsingException("Field '" + fieldName
+ "' is duplicated in current SELECT, choose a different name");
} else {
// EXTRACT THE FIELD NAME WITHOUT FUNCTIONS AND/OR LINKS
beginPos = projection.charAt(0) == '@' ? 1 : 0;
endPos = extractProjectionNameSubstringEndPosition(projection);
fieldName = endPos > -1 ? projection.substring(beginPos, endPos) : projection.substring(beginPos);
fieldName = OStringSerializerHelper.getStringContent(fieldName);
// FIND A UNIQUE NAME BY ADDING A COUNTER
for (int fieldIndex = 2; projectionDefinition.containsKey(fieldName); ++fieldIndex)
fieldName += fieldIndex;
}
String p = projection.toUpperCase(Locale.ENGLISH);
if (p.startsWith("FLATTEN(") || p.startsWith("EXPAND(")) {
if (p.startsWith("FLATTEN("))
OLogManager.instance().debug(this, "FLATTEN() operator has been replaced by EXPAND()");
List<String> pars = OStringSerializerHelper.getParameters(projection);
if (pars.size() != 1) {
throw new OCommandSQLParsingException(
"EXPAND/FLATTEN operators expects the field name as parameter. Example EXPAND( out )");
}
expandTarget = OSQLHelper.parseValue(this, pars.get(0).trim(), context);
// BY PASS THIS AS PROJECTION BUT TREAT IT AS SPECIAL
projectionDefinition = null;
projections = null;
if (groupedResult == null && expandTarget instanceof OSQLFunctionRuntime
&& ((OSQLFunctionRuntime) expandTarget).aggregateResults())
getProjectionGroup(null);
continue;
}
projectionDefinition.put(fieldName, projection);
}
if (projectionDefinition != null
&& (projectionDefinition.size() > 1 || !projectionDefinition.values().iterator().next().equals("*"))) {
projections = createProjectionFromDefinition();
for (Object p : projections.values()) {
if (groupedResult == null && p instanceof OSQLFunctionRuntime && ((OSQLFunctionRuntime) p).aggregateResults()) {
// AGGREGATE IT
getProjectionGroup(null);
break;
}
}
} else {
// TREATS SELECT * AS NO PROJECTION
projectionDefinition = null;
projections = null;
}
}
if (upperBound < parserText.length() - 1)
parserSetCurrentPosition(upperBound);
else
parserSetEndOfText();
return parserGetCurrentPosition();
}
protected Map<String, Object> createProjectionFromDefinition() {
if (projectionDefinition == null)
return new LinkedHashMap<String, Object>();
final Map<String, Object> projections = new LinkedHashMap<String, Object>(projectionDefinition.size());
for (Entry<String, String> p : projectionDefinition.entrySet()) {
final Object projectionValue = OSQLHelper.parseValue(this, p.getValue(), context);
projections.put(p.getKey(), projectionValue);
}
return projections;
}
protected int extractProjectionNameSubstringEndPosition(final String projection) {
int endPos;
final int pos1 = projection.indexOf('.');
final int pos2 = projection.indexOf('(');
final int pos3 = projection.indexOf('[');
if (pos1 > -1 && pos2 == -1 && pos3 == -1)
endPos = pos1;
else if (pos2 > -1 && pos1 == -1 && pos3 == -1)
endPos = pos2;
else if (pos3 > -1 && pos1 == -1 && pos2 == -1)
endPos = pos3;
else if (pos1 > -1 && pos2 > -1 && pos3 == -1)
endPos = Math.min(pos1, pos2);
else if (pos2 > -1 && pos3 > -1 && pos1 == -1)
endPos = Math.min(pos2, pos3);
else if (pos1 > -1 && pos3 > -1 && pos2 == -1)
endPos = Math.min(pos1, pos3);
else if (pos1 > -1 && pos2 > -1 && pos3 > -1) {
endPos = Math.min(pos1, pos2);
endPos = Math.min(endPos, pos3);
} else
endPos = -1;
return endPos;
}
private void applyOrderBy() {
if (orderedFields == null)
return;
final long startOrderBy = System.currentTimeMillis();
try {
if (tempResult instanceof OMultiCollectionIterator) {
final List<OIdentifiable> list = new ArrayList<OIdentifiable>();
for (OIdentifiable o : tempResult)
list.add(o);
tempResult = list;
}
ODocumentHelper.sort((List<? extends OIdentifiable>) tempResult, orderedFields);
orderedFields.clear();
} finally {
context.setVariable("orderByElapsed", (System.currentTimeMillis() - startOrderBy));
}
}
/**
* Extract the content of collections and/or links and put it as result
*/
private void applyExpand() {
if (expandTarget == null)
return;
Object fieldValue;
final long startExpand = System.currentTimeMillis();
try {
if (tempResult == null) {
tempResult = new ArrayList<OIdentifiable>();
if (expandTarget instanceof OSQLFilterItemVariable) {
Object r = ((OSQLFilterItemVariable) expandTarget).getValue(null, context);
if (r != null) {
if (r instanceof OIdentifiable)
((Collection<OIdentifiable>) tempResult).add((OIdentifiable) r);
else if (OMultiValue.isMultiValue(r)) {
for (Object o : OMultiValue.getMultiValueIterable(r))
((Collection<OIdentifiable>) tempResult).add((OIdentifiable) o);
}
}
}
} else {
final OMultiCollectionIterator<OIdentifiable> finalResult = new OMultiCollectionIterator<OIdentifiable>();
finalResult.setLimit(limit);
for (OIdentifiable id : tempResult) {
if (expandTarget instanceof OSQLFilterItem)
fieldValue = ((OSQLFilterItem) expandTarget).getValue(id.getRecord(), context);
else if (expandTarget instanceof OSQLFunctionRuntime)
fieldValue = ((OSQLFunctionRuntime) expandTarget).getResult();
else
fieldValue = expandTarget.toString();
if (fieldValue != null)
if (fieldValue instanceof Collection<?>) {
finalResult.add((Collection<OIdentifiable>) fieldValue);
} else if (fieldValue instanceof Map<?, ?>) {
finalResult.add(((Map<?, OIdentifiable>) fieldValue).values());
} else if (fieldValue instanceof OMultiCollectionIterator) {
finalResult.add((OMultiCollectionIterator<OIdentifiable>) fieldValue);
} else if (fieldValue instanceof OIdentifiable)
finalResult.add((OIdentifiable) fieldValue);
}
tempResult = finalResult;
}
} finally {
context.setVariable("expandElapsed", (System.currentTimeMillis() - startExpand));
}
}
private void searchInIndex() {
final OIndex<Object> index = (OIndex<Object>) getDatabase().getMetadata().getIndexManager()
.getIndex(parsedTarget.getTargetIndex());
if (index == null)
throw new OCommandExecutionException("Target index '" + parsedTarget.getTargetIndex() + "' not found");
// nothing was added yet, so index definition for manual index was not calculated
if (index.getDefinition() == null)
return;
if (compiledFilter != null && compiledFilter.getRootCondition() != null) {
if (!"KEY".equalsIgnoreCase(compiledFilter.getRootCondition().getLeft().toString()))
throw new OCommandExecutionException("'Key' field is required for queries against indexes");
final OQueryOperator indexOperator = compiledFilter.getRootCondition().getOperator();
if (indexOperator instanceof OQueryOperatorBetween) {
final Object[] values = (Object[]) compiledFilter.getRootCondition().getRight();
final Collection<ODocument> entries = index.getEntriesBetween(getIndexKey(index.getDefinition(), values[0]),
getIndexKey(index.getDefinition(), values[2]));
for (final OIdentifiable r : entries) {
final boolean continueResultParsing = handleResult(r, false);
if (!continueResultParsing)
break;
}
} else if (indexOperator instanceof OQueryOperatorMajor) {
final Object value = compiledFilter.getRootCondition().getRight();
final Collection<ODocument> entries = index.getEntriesMajor(getIndexKey(index.getDefinition(), value), false);
parseIndexSearchResult(entries);
} else if (indexOperator instanceof OQueryOperatorMajorEquals) {
final Object value = compiledFilter.getRootCondition().getRight();
final Collection<ODocument> entries = index.getEntriesMajor(getIndexKey(index.getDefinition(), value), true);
parseIndexSearchResult(entries);
} else if (indexOperator instanceof OQueryOperatorMinor) {
final Object value = compiledFilter.getRootCondition().getRight();
final Collection<ODocument> entries = index.getEntriesMinor(getIndexKey(index.getDefinition(), value), false);
parseIndexSearchResult(entries);
} else if (indexOperator instanceof OQueryOperatorMinorEquals) {
final Object value = compiledFilter.getRootCondition().getRight();
final Collection<ODocument> entries = index.getEntriesMinor(getIndexKey(index.getDefinition(), value), true);
parseIndexSearchResult(entries);
} else if (indexOperator instanceof OQueryOperatorIn) {
final List<Object> origValues = (List<Object>) compiledFilter.getRootCondition().getRight();
final List<Object> values = new ArrayList<Object>(origValues.size());
for (Object val : origValues) {
if (index.getDefinition() instanceof OCompositeIndexDefinition) {
throw new OCommandExecutionException("Operator IN not supported yet.");
}
val = getIndexKey(index.getDefinition(), val);
values.add(val);
}
final Collection<ODocument> entries = index.getEntries(values);
parseIndexSearchResult(entries);
} else {
final Object right = compiledFilter.getRootCondition().getRight();
Object keyValue = getIndexKey(index.getDefinition(), right);
final Object res;
if (index.getDefinition().getParamCount() == 1) {
// CONVERT BEFORE SEARCH IF NEEDED
final OType type = index.getDefinition().getTypes()[0];
keyValue = OType.convert(keyValue, type.getDefaultJavaType());
res = index.get(keyValue);
} else {
final Object secondKey = getIndexKey(index.getDefinition(), right);
if (keyValue instanceof OCompositeKey && secondKey instanceof OCompositeKey
&& ((OCompositeKey) keyValue).getKeys().size() == index.getDefinition().getParamCount()
&& ((OCompositeKey) secondKey).getKeys().size() == index.getDefinition().getParamCount())
res = index.get(keyValue);
else
res = index.getValuesBetween(keyValue, secondKey);
}
if (res != null)
if (res instanceof Collection<?>)
// MULTI VALUES INDEX
for (final OIdentifiable r : (Collection<OIdentifiable>) res)
handleResult(createIndexEntryAsDocument(keyValue, r.getIdentity()), true);
else
// SINGLE VALUE INDEX
handleResult(createIndexEntryAsDocument(keyValue, ((OIdentifiable) res).getIdentity()), true);
}
} else {
if (isIndexSizeQuery()) {
getProjectionGroup(null).applyValue(projections.keySet().iterator().next(), index.getSize());
return;
}
if (isIndexKeySizeQuery()) {
getProjectionGroup(null).applyValue(projections.keySet().iterator().next(), index.getKeySize());
return;
}
final OIndexInternal<?> indexInternal = index.getInternal();
if (indexInternal instanceof OSharedResource)
((OSharedResource) indexInternal).acquireExclusiveLock();
try {
// ADD ALL THE ITEMS AS RESULT
for (Iterator<Entry<Object, Object>> it = index.iterator(); it.hasNext(); ) {
final Entry<Object, Object> current = it.next();
if (current.getValue() instanceof Collection<?>) {
for (OIdentifiable identifiable : ((Set<OIdentifiable>) current.getValue()))
if (!handleResult(createIndexEntryAsDocument(current.getKey(), identifiable.getIdentity()), true))
break;
} else if (!handleResult(createIndexEntryAsDocument(current.getKey(), (OIdentifiable) current.getValue()), true))
break;
}
} finally {
if (indexInternal instanceof OSharedResource)
((OSharedResource) indexInternal).releaseExclusiveLock();
}
}
}
private boolean isIndexSizeQuery() {
if (!(groupedResult != null && projections.entrySet().size() == 1))
return false;
final Object projection = projections.values().iterator().next();
if (!(projection instanceof OSQLFunctionRuntime))
return false;
final OSQLFunctionRuntime f = (OSQLFunctionRuntime) projection;
if (!f.getRoot().equals(OSQLFunctionCount.NAME))
return false;
if (!((f.configuredParameters == null || f.configuredParameters.length == 0) || (f.configuredParameters != null
&& f.configuredParameters.length == 1 && f.configuredParameters[0].equals("*"))))
return false;
return true;
}
private boolean isIndexKeySizeQuery() {
if (!(groupedResult != null && projections.entrySet().size() == 1))
return false;
final Object projection = projections.values().iterator().next();
if (!(projection instanceof OSQLFunctionRuntime))
return false;
final OSQLFunctionRuntime f = (OSQLFunctionRuntime) projection;
if (!f.getRoot().equals(OSQLFunctionCount.NAME))
return false;
if (!(f.configuredParameters != null && f.configuredParameters.length == 1 && f.configuredParameters[0] instanceof OSQLFunctionRuntime))
return false;
final OSQLFunctionRuntime fConfigured = (OSQLFunctionRuntime) f.configuredParameters[0];
if (!fConfigured.getRoot().equals(OSQLFunctionDistinct.NAME))
return false;
if (!(fConfigured.configuredParameters != null && fConfigured.configuredParameters.length == 1 && fConfigured.configuredParameters[0] instanceof OSQLFilterItemField))
return false;
final OSQLFilterItemField field = (OSQLFilterItemField) fConfigured.configuredParameters[0];
if (!field.getRoot().equals("key"))
return false;
return true;
}
private static Object getIndexKey(final OIndexDefinition indexDefinition, Object value) {
if (indexDefinition instanceof OCompositeIndexDefinition) {
if (value instanceof List) {
final List<?> values = (List<?>) value;
List<Object> keyParams = new ArrayList<Object>(values.size());
for (Object o : values) {
keyParams.add(OSQLHelper.getValue(o));
}
return indexDefinition.createValue(keyParams);
} else {
value = OSQLHelper.getValue(value);
if (value instanceof OCompositeKey) {
return value;
} else {
return indexDefinition.createValue(value);
}
}
} else {
return OSQLHelper.getValue(value);
}
}
protected void parseIndexSearchResult(final Collection<ODocument> entries) {
for (final ODocument document : entries) {
final boolean continueResultParsing = handleResult(document, false);
if (!continueResultParsing)
break;
}
}
private static ODocument createIndexEntryAsDocument(final Object iKey, final OIdentifiable iValue) {
final ODocument doc = new ODocument().setOrdered(true);
doc.field("key", iKey);
doc.field("rid", iValue);
doc.unsetDirty();
return doc;
}
private void handleNoTarget() {
if (parsedTarget == null)
// ONLY LET, APPLY TO THEM
addResult(ORuntimeResult.createProjectionDocument(resultCount));
}
private void handleGroupBy() {
if (groupedResult != null && tempResult == null) {
final long startGroupBy = System.currentTimeMillis();
try {
tempResult = new ArrayList<OIdentifiable>();
for (Entry<Object, ORuntimeResult> g : groupedResult.entrySet()) {
if (g.getKey() != null || (groupedResult.size() == 1 && groupByFields == null)) {
final ODocument doc = g.getValue().getResult();
if (doc != null && !doc.isEmpty())
((List<OIdentifiable>) tempResult).add(doc);
}
}
} finally {
context.setVariable("groupByElapsed", (System.currentTimeMillis() - startGroupBy));
}
}
}
private static boolean checkIndexExistence(final OClass iSchemaClass, final OIndexSearchResult result) {
if (!iSchemaClass.areIndexed(result.fields()))
return false;
if (result.lastField.isLong()) {
final int fieldCount = result.lastField.getItemCount();
OClass cls = iSchemaClass.getProperty(result.lastField.getItemName(0)).getLinkedClass();
for (int i = 1; i < fieldCount; i++) {
if (cls == null || !cls.areIndexed(result.lastField.getItemName(i))) {
return false;
}
cls = cls.getProperty(result.lastField.getItemName(i)).getLinkedClass();
}
}
return true;
}
@Override
public String getSyntax() {
return "SELECT [<Projections>] FROM <Target> [LET <Assignment>*] [WHERE <Condition>*] [ORDER BY <Fields>* [ASC|DESC]*] [LIMIT <MaxRecords>] TIMEOUT <TimeoutInMs>";
}
/**
* Parses the fetchplan keyword if found.
*/
protected boolean parseFetchplan(final String w) throws OCommandSQLParsingException {
if (!w.equals(KEYWORD_FETCHPLAN))
return false;
parserSkipWhiteSpaces();
int start = parserGetCurrentPosition();
parserNextWord(true);
int end = parserGetCurrentPosition();
parserSkipWhiteSpaces();
int position = parserGetCurrentPosition();
while (!parserIsEnded()) {
parserNextWord(true);
final String word = OStringSerializerHelper.getStringContent(parserGetLastWord());
if (!word.matches(".*:-?\\d+"))
break;
end = parserGetCurrentPosition();
parserSkipWhiteSpaces();
position = parserGetCurrentPosition();
}
parserSetCurrentPosition(position);
fetchPlan = OStringSerializerHelper.getStringContent(parserText.substring(start, end));
request.setFetchPlan(fetchPlan);
return true;
}
public String getFetchPlan() {
return fetchPlan != null ? fetchPlan : request.getFetchPlan();
}
protected boolean optimizeExecution() {
if ((compiledFilter == null || (compiledFilter != null && compiledFilter.getRootCondition() == null)) && groupByFields == null
&& projections != null && projections.size() == 1) {
final long startOptimization = System.currentTimeMillis();
try {
final Map.Entry<String, Object> entry = projections.entrySet().iterator().next();
if (entry.getValue() instanceof OSQLFunctionRuntime) {
final OSQLFunctionRuntime rf = (OSQLFunctionRuntime) entry.getValue();
if (rf.function instanceof OSQLFunctionCount && rf.configuredParameters.length == 1
&& "*".equals(rf.configuredParameters[0])) {
long count = 0;
if (parsedTarget.getTargetClasses() != null) {
final OClass cls = parsedTarget.getTargetClasses().keySet().iterator().next();
count = cls.count();
} else if (parsedTarget.getTargetClusters() != null) {
for (String cluster : parsedTarget.getTargetClusters().keySet()) {
count += getDatabase().countClusterElements(cluster);
}
} else if (parsedTarget.getTargetIndex() != null) {
count += getDatabase().getMetadata().getIndexManager().getIndex(parsedTarget.getTargetIndex()).getSize();
} else {
final Iterable<? extends OIdentifiable> recs = parsedTarget.getTargetRecords();
if (recs != null) {
if (recs instanceof Collection<?>)
count += ((Collection<?>) recs).size();
else {
for (Object o : recs)
count++;
}
}
}
if (tempResult == null)
tempResult = new ArrayList<OIdentifiable>();
((Collection<OIdentifiable>) tempResult).add(new ODocument().field(entry.getKey(), count));
return true;
}
}
} finally {
context.setVariable("optimizationElapsed", (System.currentTimeMillis() - startOptimization));
}
}
if (orderedFields != null && !orderedFields.isEmpty()) {
if (parsedTarget.getTargetClasses() != null) {
final OClass cls = parsedTarget.getTargetClasses().keySet().iterator().next();
final OPair<String, String> orderByFirstField = orderedFields.iterator().next();
final OProperty p = cls.getProperty(orderByFirstField.getKey());
if (p != null) {
final Set<OIndex<?>> involvedIndexes = cls.getInvolvedIndexes(orderByFirstField.getKey());
if (involvedIndexes != null && !involvedIndexes.isEmpty()) {
for (OIndex<?> idx : involvedIndexes) {
if (idx.getKeyTypes().length == 1 && idx.supportsOrderedIterations()) {
if (idx.getType().startsWith("UNIQUE") && idx.getKeySize() < MIN_THRESHOLD_USE_INDEX_AS_TARGET
|| compiledFilter == null) {
if (orderByFirstField.getValue().equalsIgnoreCase("asc"))
target = (Iterator<? extends OIdentifiable>) idx.valuesIterator();
else
target = (Iterator<? extends OIdentifiable>) idx.valuesInverseIterator();
if (context.isRecordingMetrics()) {
Set<String> idxNames = (Set<String>) context.getVariable("involvedIndexes");
if (idxNames == null) {
idxNames = new HashSet<String>();
context.setVariable("involvedIndexes", idxNames);
}
idxNames.add(idx.getName());
}
orderedFields = null;
fetchLimit = getQueryFetchLimit();
break;
}
}
}
}
}
}
}
return false;
}
private static class IndexComparator implements Comparator<OIndex<?>> {
private static final IndexComparator INSTANCE = new IndexComparator();
public int compare(final OIndex<?> indexOne, final OIndex<?> indexTwo) {
return indexOne.getDefinition().getParamCount() - indexTwo.getDefinition().getParamCount();
}
}
private final class IndexResultListener implements OQueryOperator.IndexResultListener {
private final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
@Override
public Object getResult() {
return result;
}
@Override
public boolean addResult(OIdentifiable value) {
if (compiledFilter == null || Boolean.TRUE.equals(compiledFilter.evaluate(value.getRecord(), null, context)))
result.add(value);
return fetchLimit < 0 || fetchLimit >= result.size();
}
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_sql_OCommandExecutorSQLSelect.java |
1,640 | @Component("blCollectionFieldMetadataProvider")
@Scope("prototype")
public class CollectionFieldMetadataProvider extends AdvancedCollectionFieldMetadataProvider {
private static final Log LOG = LogFactory.getLog(CollectionFieldMetadataProvider.class);
protected boolean canHandleFieldForConfiguredMetadata(AddMetadataRequest addMetadataRequest, Map<String, FieldMetadata> metadata) {
AdminPresentationCollection annot = addMetadataRequest.getRequestedField().getAnnotation(AdminPresentationCollection.class);
return annot != null;
}
protected boolean canHandleAnnotationOverride(OverrideViaAnnotationRequest overrideViaAnnotationRequest, Map<String, FieldMetadata> metadata) {
AdminPresentationOverrides myOverrides = overrideViaAnnotationRequest.getRequestedEntity().getAnnotation(AdminPresentationOverrides.class);
AdminPresentationMergeOverrides myMergeOverrides = overrideViaAnnotationRequest.getRequestedEntity().getAnnotation(AdminPresentationMergeOverrides.class);
return (myOverrides != null && !ArrayUtils.isEmpty(myOverrides.collections()) || myMergeOverrides != null);
}
@Override
public FieldProviderResponse addMetadata(AddMetadataRequest addMetadataRequest, Map<String, FieldMetadata> metadata) {
if (!canHandleFieldForConfiguredMetadata(addMetadataRequest, metadata)) {
return FieldProviderResponse.NOT_HANDLED;
}
AdminPresentationCollection annot = addMetadataRequest.getRequestedField().getAnnotation(AdminPresentationCollection
.class);
FieldInfo info = buildFieldInfo(addMetadataRequest.getRequestedField());
FieldMetadataOverride override = constructBasicCollectionMetadataOverride(annot);
buildCollectionMetadata(addMetadataRequest.getParentClass(), addMetadataRequest.getTargetClass(),
metadata, info, override);
setClassOwnership(addMetadataRequest.getParentClass(), addMetadataRequest.getTargetClass(), metadata, info);
return FieldProviderResponse.HANDLED;
}
@Override
public FieldProviderResponse overrideViaAnnotation(OverrideViaAnnotationRequest overrideViaAnnotationRequest, Map<String, FieldMetadata> metadata) {
if (!canHandleAnnotationOverride(overrideViaAnnotationRequest, metadata)) {
return FieldProviderResponse.NOT_HANDLED;
}
Map<String, AdminPresentationCollectionOverride> presentationCollectionOverrides = new HashMap<String, AdminPresentationCollectionOverride>();
AdminPresentationOverrides myOverrides = overrideViaAnnotationRequest.getRequestedEntity().getAnnotation(AdminPresentationOverrides.class);
if (myOverrides != null) {
for (AdminPresentationCollectionOverride myOverride : myOverrides.collections()) {
presentationCollectionOverrides.put(myOverride.name(), myOverride);
}
}
for (String propertyName : presentationCollectionOverrides.keySet()) {
for (String key : metadata.keySet()) {
if (key.startsWith(propertyName)) {
buildAdminPresentationCollectionOverride(overrideViaAnnotationRequest.getPrefix(), overrideViaAnnotationRequest.getParentExcluded(), metadata, presentationCollectionOverrides, propertyName, key, overrideViaAnnotationRequest.getDynamicEntityDao());
}
}
}
AdminPresentationMergeOverrides myMergeOverrides = overrideViaAnnotationRequest.getRequestedEntity().getAnnotation(AdminPresentationMergeOverrides.class);
if (myMergeOverrides != null) {
for (AdminPresentationMergeOverride override : myMergeOverrides.value()) {
String propertyName = override.name();
Map<String, FieldMetadata> loopMap = new HashMap<String, FieldMetadata>();
loopMap.putAll(metadata);
for (Map.Entry<String, FieldMetadata> entry : loopMap.entrySet()) {
if (entry.getKey().startsWith(propertyName) || StringUtils.isEmpty(propertyName)) {
FieldMetadata targetMetadata = entry.getValue();
if (targetMetadata instanceof BasicCollectionMetadata) {
BasicCollectionMetadata serverMetadata = (BasicCollectionMetadata) targetMetadata;
if (serverMetadata.getTargetClass() != null) {
try {
Class<?> targetClass = Class.forName(serverMetadata.getTargetClass());
Class<?> parentClass = null;
if (serverMetadata.getOwningClass() != null) {
parentClass = Class.forName(serverMetadata.getOwningClass());
}
String fieldName = serverMetadata.getFieldName();
Field field = overrideViaAnnotationRequest.getDynamicEntityDao().getFieldManager()
.getField(targetClass, fieldName);
Map<String, FieldMetadata> temp = new HashMap<String, FieldMetadata>(1);
temp.put(field.getName(), serverMetadata);
FieldInfo info = buildFieldInfo(field);
FieldMetadataOverride fieldMetadataOverride = overrideCollectionMergeMetadata(override);
if (serverMetadata.getExcluded() != null && serverMetadata.getExcluded() &&
(fieldMetadataOverride.getExcluded() == null || fieldMetadataOverride.getExcluded())) {
continue;
}
buildCollectionMetadata(parentClass, targetClass, temp, info, fieldMetadataOverride);
serverMetadata = (BasicCollectionMetadata) temp.get(field.getName());
metadata.put(entry.getKey(), serverMetadata);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
}
}
}
return FieldProviderResponse.HANDLED;
}
@Override
public FieldProviderResponse overrideViaXml(OverrideViaXmlRequest overrideViaXmlRequest, Map<String, FieldMetadata> metadata) {
Map<String, FieldMetadataOverride> overrides = getTargetedOverride(overrideViaXmlRequest.getDynamicEntityDao(), overrideViaXmlRequest.getRequestedConfigKey(), overrideViaXmlRequest.getRequestedCeilingEntity());
if (overrides != null) {
for (String propertyName : overrides.keySet()) {
final FieldMetadataOverride localMetadata = overrides.get(propertyName);
for (String key : metadata.keySet()) {
if (key.equals(propertyName)) {
try {
if (metadata.get(key) instanceof BasicCollectionMetadata) {
BasicCollectionMetadata serverMetadata = (BasicCollectionMetadata) metadata.get(key);
if (serverMetadata.getTargetClass() != null) {
Class<?> targetClass = Class.forName(serverMetadata.getTargetClass());
Class<?> parentClass = null;
if (serverMetadata.getOwningClass() != null) {
parentClass = Class.forName(serverMetadata.getOwningClass());
}
String fieldName = serverMetadata.getFieldName();
Field field = overrideViaXmlRequest.getDynamicEntityDao().getFieldManager().getField(targetClass, fieldName);
Map<String, FieldMetadata> temp = new HashMap<String, FieldMetadata>(1);
temp.put(field.getName(), serverMetadata);
FieldInfo info = buildFieldInfo(field);
buildCollectionMetadata(parentClass, targetClass, temp, info, localMetadata);
serverMetadata = (BasicCollectionMetadata) temp.get(field.getName());
metadata.put(key, serverMetadata);
if (overrideViaXmlRequest.getParentExcluded()) {
if (LOG.isDebugEnabled()) {
LOG.debug("applyCollectionMetadataOverrides:Excluding " + key + "because parent is marked as excluded.");
}
serverMetadata.setExcluded(true);
}
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
}
return FieldProviderResponse.HANDLED;
}
protected void buildAdminPresentationCollectionOverride(String prefix, Boolean isParentExcluded, Map<String, FieldMetadata> mergedProperties, Map<String, AdminPresentationCollectionOverride> presentationCollectionOverrides, String propertyName, String key, DynamicEntityDao dynamicEntityDao) {
AdminPresentationCollectionOverride override = presentationCollectionOverrides.get(propertyName);
if (override != null) {
AdminPresentationCollection annot = override.value();
if (annot != null) {
String testKey = prefix + key;
if ((testKey.startsWith(propertyName + ".") || testKey.equals(propertyName)) && annot.excluded()) {
FieldMetadata metadata = mergedProperties.get(key);
if (LOG.isDebugEnabled()) {
LOG.debug("buildAdminPresentationCollectionOverride:Excluding " + key + "because an override annotation declared " + testKey + "to be excluded");
}
metadata.setExcluded(true);
return;
}
if ((testKey.startsWith(propertyName + ".") || testKey.equals(propertyName)) && !annot.excluded()) {
FieldMetadata metadata = mergedProperties.get(key);
if (!isParentExcluded) {
if (LOG.isDebugEnabled()) {
LOG.debug("buildAdminPresentationCollectionOverride:Showing " + key + "because an override annotation declared " + testKey + " to not be excluded");
}
metadata.setExcluded(false);
}
}
if (!(mergedProperties.get(key) instanceof BasicCollectionMetadata)) {
return;
}
BasicCollectionMetadata serverMetadata = (BasicCollectionMetadata) mergedProperties.get(key);
if (serverMetadata.getTargetClass() != null) {
try {
Class<?> targetClass = Class.forName(serverMetadata.getTargetClass());
Class<?> parentClass = null;
if (serverMetadata.getOwningClass() != null) {
parentClass = Class.forName(serverMetadata.getOwningClass());
}
String fieldName = serverMetadata.getFieldName();
Field field = dynamicEntityDao.getFieldManager().getField(targetClass, fieldName);
FieldMetadataOverride localMetadata = constructBasicCollectionMetadataOverride(annot);
//do not include the previous metadata - we want to construct a fresh metadata from the override annotation
Map<String, FieldMetadata> temp = new HashMap<String, FieldMetadata>(1);
FieldInfo info = buildFieldInfo(field);
buildCollectionMetadata(parentClass, targetClass, temp, info, localMetadata);
BasicCollectionMetadata result = (BasicCollectionMetadata) temp.get(field.getName());
result.setInheritedFromType(serverMetadata.getInheritedFromType());
result.setAvailableToTypes(serverMetadata.getAvailableToTypes());
mergedProperties.put(key, result);
if (isParentExcluded) {
if (LOG.isDebugEnabled()) {
LOG.debug("buildAdminPresentationCollectionOverride:Excluding " + key + "because the parent was excluded");
}
serverMetadata.setExcluded(true);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
}
protected FieldMetadataOverride overrideCollectionMergeMetadata(AdminPresentationMergeOverride merge) {
FieldMetadataOverride fieldMetadataOverride = new FieldMetadataOverride();
Map<String, AdminPresentationMergeEntry> overrideValues = getAdminPresentationEntries(merge.mergeEntries());
for (Map.Entry<String, AdminPresentationMergeEntry> entry : overrideValues.entrySet()) {
String stringValue = entry.getValue().overrideValue();
if (entry.getKey().equals(PropertyType.AdminPresentationCollection.ADDTYPE)) {
fieldMetadataOverride.setAddType(OperationType.valueOf(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentationCollection.CURRENCYCODEFIELD)) {
fieldMetadataOverride.setCurrencyCodeField(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationCollection.CUSTOMCRITERIA)) {
fieldMetadataOverride.setCustomCriteria(entry.getValue().stringArrayOverrideValue());
} else if (entry.getKey().equals(PropertyType.AdminPresentationCollection.EXCLUDED)) {
fieldMetadataOverride.setExcluded(StringUtils.isEmpty(stringValue) ? entry.getValue()
.booleanOverrideValue() :
Boolean.parseBoolean(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentationCollection.FRIENDLYNAME)) {
fieldMetadataOverride.setFriendlyName(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationCollection.MANYTOFIELD)) {
fieldMetadataOverride.setManyToField(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationCollection.OPERATIONTYPES)) {
AdminPresentationOperationTypes operationType = entry.getValue().operationTypes();
fieldMetadataOverride.setAddType(operationType.addType());
fieldMetadataOverride.setRemoveType(operationType.removeType());
fieldMetadataOverride.setUpdateType(operationType.updateType());
fieldMetadataOverride.setFetchType(operationType.fetchType());
fieldMetadataOverride.setInspectType(operationType.inspectType());
} else if (entry.getKey().equals(PropertyType.AdminPresentationCollection.ORDER)) {
fieldMetadataOverride.setOrder(StringUtils.isEmpty(stringValue) ? entry.getValue().intOverrideValue() :
Integer.parseInt(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentationCollection.MANYTOFIELD)) {
fieldMetadataOverride.setManyToField(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationCollection.READONLY)) {
fieldMetadataOverride.setReadOnly(StringUtils.isEmpty(stringValue) ? entry.getValue()
.booleanOverrideValue() :
Boolean.parseBoolean(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentationCollection.SECURITYLEVEL)) {
fieldMetadataOverride.setSecurityLevel(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationCollection.SHOWIFPROPERTY)) {
fieldMetadataOverride.setShowIfProperty(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationCollection.TAB)) {
fieldMetadataOverride.setTab(stringValue);
} else if (entry.getKey().equals(PropertyType.AdminPresentationCollection.TABORDER)) {
fieldMetadataOverride.setTabOrder(StringUtils.isEmpty(stringValue) ? entry.getValue()
.intOverrideValue() :
Integer.parseInt(stringValue));
} else if (entry.getKey().equals(PropertyType.AdminPresentationCollection.USESERVERSIDEINSPECTIONCACHE)) {
fieldMetadataOverride.setUseServerSideInspectionCache(StringUtils.isEmpty(stringValue) ? entry
.getValue().booleanOverrideValue() :
Boolean.parseBoolean(stringValue));
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Unrecognized type: " + entry.getKey() + ". Not setting on collection field.");
}
}
}
return fieldMetadataOverride;
}
protected FieldMetadataOverride constructBasicCollectionMetadataOverride(AdminPresentationCollection annotColl) {
if (annotColl != null) {
FieldMetadataOverride override = new FieldMetadataOverride();
override.setAddMethodType(annotColl.addType());
override.setManyToField(annotColl.manyToField());
override.setCustomCriteria(annotColl.customCriteria());
override.setUseServerSideInspectionCache(annotColl.useServerSideInspectionCache());
override.setExcluded(annotColl.excluded());
override.setFriendlyName(annotColl.friendlyName());
override.setReadOnly(annotColl.readOnly());
override.setOrder(annotColl.order());
override.setTab(annotColl.tab());
override.setTabOrder(annotColl.tabOrder());
override.setSecurityLevel(annotColl.securityLevel());
override.setAddType(annotColl.operationTypes().addType());
override.setFetchType(annotColl.operationTypes().fetchType());
override.setRemoveType(annotColl.operationTypes().removeType());
override.setUpdateType(annotColl.operationTypes().updateType());
override.setInspectType(annotColl.operationTypes().inspectType());
override.setShowIfProperty(annotColl.showIfProperty());
override.setCurrencyCodeField(annotColl.currencyCodeField());
return override;
}
throw new IllegalArgumentException("AdminPresentationCollection annotation not found on Field");
}
protected void buildCollectionMetadata(Class<?> parentClass, Class<?> targetClass, Map<String, FieldMetadata> attributes, FieldInfo field, FieldMetadataOverride collectionMetadata) {
BasicCollectionMetadata serverMetadata = (BasicCollectionMetadata) attributes.get(field.getName());
Class<?> resolvedClass = parentClass==null?targetClass:parentClass;
BasicCollectionMetadata metadata;
if (serverMetadata != null) {
metadata = serverMetadata;
} else {
metadata = new BasicCollectionMetadata();
}
metadata.setTargetClass(targetClass.getName());
metadata.setFieldName(field.getName());
if (collectionMetadata.getReadOnly() != null) {
metadata.setMutable(!collectionMetadata.getReadOnly());
}
if (collectionMetadata.getAddMethodType() != null) {
metadata.setAddMethodType(collectionMetadata.getAddMethodType());
}
if (collectionMetadata.getShowIfProperty()!=null) {
metadata.setShowIfProperty(collectionMetadata.getShowIfProperty());
}
org.broadleafcommerce.openadmin.dto.OperationTypes dtoOperationTypes = new org.broadleafcommerce.openadmin.dto.OperationTypes(OperationType.BASIC, OperationType.BASIC, OperationType.BASIC, OperationType.BASIC, OperationType.BASIC);
if (collectionMetadata.getAddType() != null) {
dtoOperationTypes.setAddType(collectionMetadata.getAddType());
}
if (collectionMetadata.getRemoveType() != null) {
dtoOperationTypes.setRemoveType(collectionMetadata.getRemoveType());
}
if (collectionMetadata.getFetchType() != null) {
dtoOperationTypes.setFetchType(collectionMetadata.getFetchType());
}
if (collectionMetadata.getInspectType() != null) {
dtoOperationTypes.setInspectType(collectionMetadata.getInspectType());
}
if (collectionMetadata.getUpdateType() != null) {
dtoOperationTypes.setUpdateType(collectionMetadata.getUpdateType());
}
if (AddMethodType.LOOKUP == metadata.getAddMethodType()) {
dtoOperationTypes.setRemoveType(OperationType.NONDESTRUCTIVEREMOVE);
}
//don't allow additional non-persistent properties or additional foreign keys for an advanced collection datasource - they don't make sense in this context
PersistencePerspective persistencePerspective;
if (serverMetadata != null) {
persistencePerspective = metadata.getPersistencePerspective();
persistencePerspective.setOperationTypes(dtoOperationTypes);
} else {
persistencePerspective = new PersistencePerspective(dtoOperationTypes, new String[]{}, new ForeignKey[]{});
metadata.setPersistencePerspective(persistencePerspective);
}
String foreignKeyName = null;
if (serverMetadata != null) {
foreignKeyName = ((ForeignKey) serverMetadata.getPersistencePerspective().getPersistencePerspectiveItems
().get(PersistencePerspectiveItemType.FOREIGNKEY)).getManyToField();
}
if (!StringUtils.isEmpty(collectionMetadata.getManyToField())) {
foreignKeyName = collectionMetadata.getManyToField();
}
if (foreignKeyName == null && !StringUtils.isEmpty(field.getOneToManyMappedBy())) {
foreignKeyName = field.getOneToManyMappedBy();
}
if (foreignKeyName == null && !StringUtils.isEmpty(field.getManyToManyMappedBy())) {
foreignKeyName = field.getManyToManyMappedBy();
}
if (StringUtils.isEmpty(foreignKeyName)) {
throw new IllegalArgumentException("Unable to infer a ManyToOne field name for the @AdminPresentationCollection annotated field("+field.getName()+"). If not using the mappedBy property of @OneToMany or @ManyToMany, please make sure to explicitly define the manyToField property");
}
if (serverMetadata != null) {
ForeignKey foreignKey = (ForeignKey) metadata.getPersistencePerspective().getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY);
foreignKey.setManyToField(foreignKeyName);
foreignKey.setForeignKeyClass(resolvedClass.getName());
foreignKey.setMutable(metadata.isMutable());
foreignKey.setOriginatingField(field.getName());
} else {
ForeignKey foreignKey = new ForeignKey(foreignKeyName, resolvedClass.getName(), null, ForeignKeyRestrictionType.ID_EQ);
persistencePerspective.addPersistencePerspectiveItem(PersistencePerspectiveItemType.FOREIGNKEY, foreignKey);
foreignKey.setMutable(metadata.isMutable());
foreignKey.setOriginatingField(field.getName());
}
String ceiling = null;
checkCeiling: {
if (field.getGenericType() instanceof ParameterizedType) {
try {
ParameterizedType pt = (ParameterizedType) field.getGenericType();
java.lang.reflect.Type collectionType = pt.getActualTypeArguments()[0];
String ceilingEntityName = ((Class<?>) collectionType).getName();
ceiling = entityConfiguration.lookupEntityClass(ceilingEntityName).getName();
break checkCeiling;
} catch (NoSuchBeanDefinitionException e) {
// We weren't successful at looking at entity configuration to find the type of this collection.
// We will continue and attempt to find it via the Hibernate annotations
}
}
if (!StringUtils.isEmpty(field.getOneToManyTargetEntity()) && !void.class.getName().equals(field.getOneToManyTargetEntity())) {
ceiling = field.getOneToManyTargetEntity();
break checkCeiling;
}
if (!StringUtils.isEmpty(field.getManyToManyTargetEntity()) && !void.class.getName().equals(field.getManyToManyTargetEntity())) {
ceiling = field.getManyToManyTargetEntity();
break checkCeiling;
}
}
if (!StringUtils.isEmpty(ceiling)) {
metadata.setCollectionCeilingEntity(ceiling);
}
if (collectionMetadata.getExcluded() != null) {
if (LOG.isDebugEnabled()) {
if (collectionMetadata.getExcluded()) {
LOG.debug("buildCollectionMetadata:Excluding " + field.getName() + " because it was explicitly declared in config");
} else {
LOG.debug("buildCollectionMetadata:Showing " + field.getName() + " because it was explicitly declared in config");
}
}
metadata.setExcluded(collectionMetadata.getExcluded());
}
if (collectionMetadata.getFriendlyName() != null) {
metadata.setFriendlyName(collectionMetadata.getFriendlyName());
}
if (collectionMetadata.getSecurityLevel() != null) {
metadata.setSecurityLevel(collectionMetadata.getSecurityLevel());
}
if (collectionMetadata.getOrder() != null) {
metadata.setOrder(collectionMetadata.getOrder());
}
if (collectionMetadata.getTab() != null) {
metadata.setTab(collectionMetadata.getTab());
}
if (collectionMetadata.getTabOrder() != null) {
metadata.setTabOrder(collectionMetadata.getTabOrder());
}
if (collectionMetadata.getCustomCriteria() != null) {
metadata.setCustomCriteria(collectionMetadata.getCustomCriteria());
}
if (collectionMetadata.getUseServerSideInspectionCache() != null) {
persistencePerspective.setUseServerSideInspectionCache(collectionMetadata.getUseServerSideInspectionCache());
}
if (collectionMetadata.getCurrencyCodeField()!=null) {
metadata.setCurrencyCodeField(collectionMetadata.getCurrencyCodeField());
}
attributes.put(field.getName(), metadata);
}
@Override
public int getOrder() {
return FieldMetadataProvider.COLLECTION;
}
} | 1no label
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_dao_provider_metadata_CollectionFieldMetadataProvider.java |
1,217 | public class DefaultPageCacheRecyclerModule extends AbstractModule {
private final Settings settings;
public DefaultPageCacheRecyclerModule(Settings settings) {
this.settings = settings;
}
@Override
protected void configure() {
bind(PageCacheRecycler.class).asEagerSingleton();
}
} | 0true
| src_main_java_org_elasticsearch_cache_recycler_DefaultPageCacheRecyclerModule.java |
4,195 | private class SnapshotContext extends Context {
private final Store store;
private final IndexShardSnapshotStatus snapshotStatus;
/**
* Constructs new context
*
* @param snapshotId snapshot id
* @param shardId shard to be snapshotted
* @param snapshotStatus snapshot status to report progress
*/
public SnapshotContext(SnapshotId snapshotId, ShardId shardId, IndexShardSnapshotStatus snapshotStatus) {
super(snapshotId, shardId);
store = indicesService.indexServiceSafe(shardId.getIndex()).shardInjectorSafe(shardId.id()).getInstance(Store.class);
this.snapshotStatus = snapshotStatus;
}
/**
* Create snapshot from index commit point
*
* @param snapshotIndexCommit
*/
public void snapshot(SnapshotIndexCommit snapshotIndexCommit) {
logger.debug("[{}] [{}] snapshot to [{}] ...", shardId, snapshotId, repositoryName);
final ImmutableMap<String, BlobMetaData> blobs;
try {
blobs = blobContainer.listBlobs();
} catch (IOException e) {
throw new IndexShardSnapshotFailedException(shardId, "failed to list blobs", e);
}
long generation = findLatestFileNameGeneration(blobs);
BlobStoreIndexShardSnapshots snapshots = buildBlobStoreIndexShardSnapshots(blobs);
snapshotStatus.updateStage(IndexShardSnapshotStatus.Stage.STARTED);
final CountDownLatch indexLatch = new CountDownLatch(snapshotIndexCommit.getFiles().length);
final CopyOnWriteArrayList<Throwable> failures = new CopyOnWriteArrayList<Throwable>();
final List<BlobStoreIndexShardSnapshot.FileInfo> indexCommitPointFiles = newArrayList();
int indexNumberOfFiles = 0;
long indexTotalFilesSize = 0;
for (String fileName : snapshotIndexCommit.getFiles()) {
if (snapshotStatus.aborted()) {
logger.debug("[{}] [{}] Aborted on the file [{}], exiting", shardId, snapshotId, fileName);
throw new IndexShardSnapshotFailedException(shardId, "Aborted");
}
logger.trace("[{}] [{}] Processing [{}]", shardId, snapshotId, fileName);
final StoreFileMetaData md;
try {
md = store.metaData(fileName);
} catch (IOException e) {
throw new IndexShardSnapshotFailedException(shardId, "Failed to get store file metadata", e);
}
boolean snapshotRequired = false;
// TODO: For now segment files are copied on each commit because segment files don't have checksum
// if (snapshot.indexChanged() && fileName.equals(snapshotIndexCommit.getSegmentsFileName())) {
// snapshotRequired = true; // we want to always snapshot the segment file if the index changed
// }
BlobStoreIndexShardSnapshot.FileInfo fileInfo = snapshots.findPhysicalIndexFile(fileName);
if (fileInfo == null || !fileInfo.isSame(md) || !snapshotFileExistsInBlobs(fileInfo, blobs)) {
// commit point file does not exists in any commit point, or has different length, or does not fully exists in the listed blobs
snapshotRequired = true;
}
if (snapshotRequired) {
indexNumberOfFiles++;
indexTotalFilesSize += md.length();
// create a new FileInfo
try {
BlobStoreIndexShardSnapshot.FileInfo snapshotFileInfo = new BlobStoreIndexShardSnapshot.FileInfo(fileNameFromGeneration(++generation), fileName, md.length(), chunkSize, md.checksum());
indexCommitPointFiles.add(snapshotFileInfo);
snapshotFile(snapshotFileInfo, indexLatch, failures);
} catch (IOException e) {
failures.add(e);
}
} else {
indexCommitPointFiles.add(fileInfo);
indexLatch.countDown();
}
}
snapshotStatus.files(indexNumberOfFiles, indexTotalFilesSize);
snapshotStatus.indexVersion(snapshotIndexCommit.getGeneration());
try {
indexLatch.await();
} catch (InterruptedException e) {
failures.add(e);
Thread.currentThread().interrupt();
}
if (!failures.isEmpty()) {
throw new IndexShardSnapshotFailedException(shardId, "Failed to perform snapshot (index files)", failures.get(0));
}
// now create and write the commit point
snapshotStatus.updateStage(IndexShardSnapshotStatus.Stage.FINALIZE);
String commitPointName = snapshotBlobName(snapshotId);
BlobStoreIndexShardSnapshot snapshot = new BlobStoreIndexShardSnapshot(snapshotId.getSnapshot(), snapshotIndexCommit.getGeneration(), indexCommitPointFiles);
try {
byte[] snapshotData = writeSnapshot(snapshot);
logger.trace("[{}] [{}] writing shard snapshot file", shardId, snapshotId);
blobContainer.writeBlob(commitPointName, new BytesStreamInput(snapshotData, false), snapshotData.length);
} catch (IOException e) {
throw new IndexShardSnapshotFailedException(shardId, "Failed to write commit point", e);
}
// delete all files that are not referenced by any commit point
// build a new BlobStoreIndexShardSnapshot, that includes this one and all the saved ones
List<BlobStoreIndexShardSnapshot> newSnapshotsList = Lists.newArrayList();
newSnapshotsList.add(snapshot);
for (BlobStoreIndexShardSnapshot point : snapshots) {
newSnapshotsList.add(point);
}
cleanup(newSnapshotsList, blobs);
snapshotStatus.updateStage(IndexShardSnapshotStatus.Stage.DONE);
}
/**
* Snapshot individual file
* <p/>
* This is asynchronous method. Upon completion of the operation latch is getting counted down and any failures are
* added to the {@code failures} list
*
* @param fileInfo file to be snapshotted
* @param latch latch that should be counted down once file is snapshoted
* @param failures thread-safe list of failures
* @throws IOException
*/
private void snapshotFile(final BlobStoreIndexShardSnapshot.FileInfo fileInfo, final CountDownLatch latch, final List<Throwable> failures) throws IOException {
final AtomicLong counter = new AtomicLong(fileInfo.numberOfParts());
for (long i = 0; i < fileInfo.numberOfParts(); i++) {
IndexInput indexInput = null;
try {
indexInput = store.openInputRaw(fileInfo.physicalName(), IOContext.READONCE);
indexInput.seek(i * fileInfo.partBytes());
InputStreamIndexInput inputStreamIndexInput = new ThreadSafeInputStreamIndexInput(indexInput, fileInfo.partBytes());
final IndexInput fIndexInput = indexInput;
long size = inputStreamIndexInput.actualSizeToRead();
InputStream inputStream;
if (snapshotRateLimiter != null) {
inputStream = new RateLimitingInputStream(inputStreamIndexInput, snapshotRateLimiter, snapshotThrottleListener);
} else {
inputStream = inputStreamIndexInput;
}
blobContainer.writeBlob(fileInfo.partName(i), inputStream, size, new ImmutableBlobContainer.WriterListener() {
@Override
public void onCompleted() {
IOUtils.closeWhileHandlingException(fIndexInput);
if (counter.decrementAndGet() == 0) {
latch.countDown();
}
}
@Override
public void onFailure(Throwable t) {
IOUtils.closeWhileHandlingException(fIndexInput);
failures.add(t);
if (counter.decrementAndGet() == 0) {
latch.countDown();
}
}
});
} catch (Throwable e) {
IOUtils.closeWhileHandlingException(indexInput);
failures.add(e);
latch.countDown();
}
}
}
/**
* Checks if snapshot file already exists in the list of blobs
*
* @param fileInfo file to check
* @param blobs list of blobs
* @return true if file exists in the list of blobs
*/
private boolean snapshotFileExistsInBlobs(BlobStoreIndexShardSnapshot.FileInfo fileInfo, ImmutableMap<String, BlobMetaData> blobs) {
BlobMetaData blobMetaData = blobs.get(fileInfo.name());
if (blobMetaData != null) {
return blobMetaData.length() == fileInfo.length();
} else if (blobs.containsKey(fileInfo.partName(0))) {
// multi part file sum up the size and check
int part = 0;
long totalSize = 0;
while (true) {
blobMetaData = blobs.get(fileInfo.partName(part++));
if (blobMetaData == null) {
break;
}
totalSize += blobMetaData.length();
}
return totalSize == fileInfo.length();
}
// no file, not exact and not multipart
return false;
}
} | 1no label
| src_main_java_org_elasticsearch_index_snapshots_blobstore_BlobStoreIndexShardRepository.java |
509 | public class DefaultTimeSource implements TimeSource {
public long timeInMillis() {
return System.currentTimeMillis();
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_time_DefaultTimeSource.java |
1,048 | @Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_ORDER_MULTISHIP_OPTION")
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
public class OrderMultishipOptionImpl implements OrderMultishipOption {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "OrderMultishipOptionId")
@GenericGenerator(
name="OrderMultishipOptionId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="OrderMultishipOptionImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.order.domain.OrderMultishipOptionImpl")
}
)
@Column(name = "ORDER_MULTISHIP_OPTION_ID")
protected Long id;
@ManyToOne(targetEntity = OrderImpl.class)
@JoinColumn(name = "ORDER_ID")
@Index(name="MULTISHIP_OPTION_ORDER_INDEX", columnNames={"ORDER_ID"})
protected Order order;
@ManyToOne(targetEntity = OrderItemImpl.class)
@JoinColumn(name = "ORDER_ITEM_ID")
protected OrderItem orderItem;
@ManyToOne(targetEntity = AddressImpl.class)
@JoinColumn(name = "ADDRESS_ID")
protected Address address;
@ManyToOne(targetEntity = FulfillmentOptionImpl.class)
@JoinColumn(name = "FULFILLMENT_OPTION_ID")
protected FulfillmentOption fulfillmentOption;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public Order getOrder() {
return order;
}
@Override
public void setOrder(Order order) {
this.order = order;
}
@Override
public OrderItem getOrderItem() {
return orderItem;
}
@Override
public void setOrderItem(OrderItem orderItem) {
this.orderItem = orderItem;
}
@Override
public Address getAddress() {
return address;
}
@Override
public void setAddress(Address address) {
this.address = address;
}
@Override
public FulfillmentOption getFulfillmentOption() {
return fulfillmentOption;
}
@Override
public void setFulfillmentOption(FulfillmentOption fulfillmentOption) {
this.fulfillmentOption = fulfillmentOption;
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_OrderMultishipOptionImpl.java |
923 | while (makeDbCall(iMyDb, new ODbRelatedCall<Boolean>() {
public Boolean call() {
return myIterator.hasNext();
}
})) { | 0true
| core_src_main_java_com_orientechnologies_orient_core_record_impl_ODocumentHelper.java |
7 | @RunWith(HazelcastSerialClassRunner.class)
@Category(SlowTest.class)
public class RestTest {
final static Config config = new XmlConfigBuilder().build();
@Before
@After
public void killAllHazelcastInstances() throws IOException {
Hazelcast.shutdownAll();
}
@Test
public void testTtl_issue1783() throws IOException, InterruptedException {
final Config conf = new Config();
String name = "map";
final CountDownLatch latch = new CountDownLatch(1);
final MapConfig mapConfig = conf.getMapConfig(name);
mapConfig.setTimeToLiveSeconds(3);
mapConfig.addEntryListenerConfig(new EntryListenerConfig()
.setImplementation(new EntryAdapter() {
@Override
public void entryEvicted(EntryEvent event) {
latch.countDown();
}
}));
final HazelcastInstance instance = Hazelcast.newHazelcastInstance(conf);
final HTTPCommunicator communicator = new HTTPCommunicator(instance);
communicator.put(name, "key", "value");
String value = communicator.get(name, "key");
assertNotNull(value);
assertEquals("value", value);
assertTrue(latch.await(30, TimeUnit.SECONDS));
value = communicator.get(name, "key");
assertTrue(value.isEmpty());
}
@Test
public void testRestSimple() throws IOException {
final HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
final HTTPCommunicator communicator = new HTTPCommunicator(instance);
final String name = "testRestSimple";
for (int i = 0; i < 100; i++) {
assertEquals(HttpURLConnection.HTTP_OK, communicator.put(name, String.valueOf(i), String.valueOf(i * 10)));
}
for (int i = 0; i < 100; i++) {
String actual = communicator.get(name, String.valueOf(i));
assertEquals(String.valueOf(i * 10), actual);
}
communicator.deleteAll(name);
for (int i = 0; i < 100; i++) {
String actual = communicator.get(name, String.valueOf(i));
assertEquals("", actual);
}
for (int i = 0; i < 100; i++) {
assertEquals(HttpURLConnection.HTTP_OK, communicator.put(name, String.valueOf(i), String.valueOf(i * 10)));
}
for (int i = 0; i < 100; i++) {
assertEquals(String.valueOf(i * 10), communicator.get(name, String.valueOf(i)));
}
for (int i = 0; i < 100; i++) {
assertEquals(HttpURLConnection.HTTP_OK, communicator.delete(name, String.valueOf(i)));
}
for (int i = 0; i < 100; i++) {
assertEquals("", communicator.get(name, String.valueOf(i)));
}
for (int i = 0; i < 100; i++) {
assertEquals(HttpURLConnection.HTTP_OK, communicator.offer(name, String.valueOf(i)));
}
for (int i = 0; i < 100; i++) {
assertEquals(String.valueOf(i), communicator.poll(name, 2));
}
}
@Test
public void testQueueSizeEmpty() throws IOException {
final HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
final HTTPCommunicator communicator = new HTTPCommunicator(instance);
final String name = "testQueueSizeEmpty";
IQueue queue = instance.getQueue(name);
Assert.assertEquals(queue.size(), communicator.size(name));
}
@Test
public void testQueueSizeNonEmpty() throws IOException {
final HazelcastInstance instance = Hazelcast.newHazelcastInstance(config);
final HTTPCommunicator communicator = new HTTPCommunicator(instance);
final String name = "testQueueSizeNotEmpty";
final int num_items = 100;
IQueue queue = instance.getQueue(name);
for (int i = 0; i < num_items; i++) {
queue.add(i);
}
Assert.assertEquals(queue.size(), communicator.size(name));
}
private class HTTPCommunicator {
final HazelcastInstance instance;
final String address;
HTTPCommunicator(HazelcastInstance instance) {
this.instance = instance;
this.address = "http:/" + instance.getCluster().getLocalMember().getInetSocketAddress().toString() + "/hazelcast/rest/";
}
public String poll(String queueName, long timeout) {
String url = address + "queues/" + queueName + "/" + String.valueOf(timeout);
String result = doGet(url);
return result;
}
public int size(String queueName) {
String url = address + "queues/" + queueName + "/size";
Integer result = Integer.parseInt(doGet(url));
return result;
}
public int offer(String queueName, String data) throws IOException {
String url = address + "queues/" + queueName;
/** set up the http connection parameters */
HttpURLConnection urlConnection = (HttpURLConnection) (new URL(url)).openConnection();
urlConnection.setRequestMethod("POST");
urlConnection.setDoOutput(true);
urlConnection.setDoInput(true);
urlConnection.setUseCaches(false);
urlConnection.setAllowUserInteraction(false);
urlConnection.setRequestProperty("Content-type", "text/xml; charset=" + "UTF-8");
/** post the data */
OutputStream out = null;
out = urlConnection.getOutputStream();
Writer writer = new OutputStreamWriter(out, "UTF-8");
writer.write(data);
writer.close();
out.close();
return urlConnection.getResponseCode();
}
public String get(String mapName, String key) {
String url = address + "maps/" + mapName + "/" + key;
String result = doGet(url);
return result;
}
public int put(String mapName, String key, String value) throws IOException {
String url = address + "maps/" + mapName + "/" + key;
/** set up the http connection parameters */
HttpURLConnection urlConnection = (HttpURLConnection) (new URL(url)).openConnection();
urlConnection.setRequestMethod("POST");
urlConnection.setDoOutput(true);
urlConnection.setDoInput(true);
urlConnection.setUseCaches(false);
urlConnection.setAllowUserInteraction(false);
urlConnection.setRequestProperty("Content-type", "text/xml; charset=" + "UTF-8");
/** post the data */
OutputStream out = urlConnection.getOutputStream();
Writer writer = new OutputStreamWriter(out, "UTF-8");
writer.write(value);
writer.close();
out.close();
return urlConnection.getResponseCode();
}
public int deleteAll(String mapName) throws IOException {
String url = address + "maps/" + mapName;
/** set up the http connection parameters */
HttpURLConnection urlConnection = (HttpURLConnection) (new URL(url)).openConnection();
urlConnection.setRequestMethod("DELETE");
urlConnection.setDoOutput(true);
urlConnection.setDoInput(true);
urlConnection.setUseCaches(false);
urlConnection.setAllowUserInteraction(false);
urlConnection.setRequestProperty("Content-type", "text/xml; charset=" + "UTF-8");
return urlConnection.getResponseCode();
}
public int delete(String mapName, String key) throws IOException {
String url = address + "maps/" + mapName + "/" + key;
/** set up the http connection parameters */
HttpURLConnection urlConnection = (HttpURLConnection) (new URL(url)).openConnection();
urlConnection.setRequestMethod("DELETE");
urlConnection.setDoOutput(true);
urlConnection.setDoInput(true);
urlConnection.setUseCaches(false);
urlConnection.setAllowUserInteraction(false);
urlConnection.setRequestProperty("Content-type", "text/xml; charset=" + "UTF-8");
return urlConnection.getResponseCode();
}
private String doGet(final String url) {
String result = null;
try {
HttpURLConnection httpUrlConnection = (HttpURLConnection) (new URL(url)).openConnection();
BufferedReader rd = new BufferedReader(new InputStreamReader(httpUrlConnection.getInputStream()));
StringBuilder data = new StringBuilder(150);
String line;
while ((line = rd.readLine()) != null) data.append(line);
rd.close();
result = data.toString();
httpUrlConnection.disconnect();
} catch (Exception e) {
e.printStackTrace();
}
return result;
}
}
} | 0true
| hazelcast_src_test_java_com_hazelcast_ascii_RestTest.java |
750 | public class GetResponse extends ActionResponse implements Iterable<GetField>, ToXContent {
private GetResult getResult;
GetResponse() {
}
GetResponse(GetResult getResult) {
this.getResult = getResult;
}
/**
* Does the document exists.
*/
public boolean isExists() {
return getResult.isExists();
}
/**
* The index the document was fetched from.
*/
public String getIndex() {
return getResult.getIndex();
}
/**
* The type of the document.
*/
public String getType() {
return getResult.getType();
}
/**
* The id of the document.
*/
public String getId() {
return getResult.getId();
}
/**
* The version of the doc.
*/
public long getVersion() {
return getResult.getVersion();
}
/**
* The source of the document if exists.
*/
public byte[] getSourceAsBytes() {
return getResult.source();
}
/**
* Returns the internal source bytes, as they are returned without munging (for example,
* might still be compressed).
*/
public BytesReference getSourceInternal() {
return getResult.internalSourceRef();
}
/**
* Returns bytes reference, also un compress the source if needed.
*/
public BytesReference getSourceAsBytesRef() {
return getResult.sourceRef();
}
/**
* Is the source empty (not available) or not.
*/
public boolean isSourceEmpty() {
return getResult.isSourceEmpty();
}
/**
* The source of the document (as a string).
*/
public String getSourceAsString() {
return getResult.sourceAsString();
}
/**
* The source of the document (As a map).
*/
@SuppressWarnings({"unchecked"})
public Map<String, Object> getSourceAsMap() throws ElasticsearchParseException {
return getResult.sourceAsMap();
}
public Map<String, Object> getSource() {
return getResult.getSource();
}
public Map<String, GetField> getFields() {
return getResult.getFields();
}
public GetField getField(String name) {
return getResult.field(name);
}
@Override
public Iterator<GetField> iterator() {
return getResult.iterator();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return getResult.toXContent(builder, params);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
getResult = GetResult.readGetResult(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
getResult.writeTo(out);
}
} | 0true
| src_main_java_org_elasticsearch_action_get_GetResponse.java |
69 | @SuppressWarnings("serial")
static final class MapReduceEntriesTask<K,V,U>
extends BulkTask<K,V,U> {
final Fun<Map.Entry<K,V>, ? extends U> transformer;
final BiFun<? super U, ? super U, ? extends U> reducer;
U result;
MapReduceEntriesTask<K,V,U> rights, nextRight;
MapReduceEntriesTask
(BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
MapReduceEntriesTask<K,V,U> nextRight,
Fun<Map.Entry<K,V>, ? extends U> transformer,
BiFun<? super U, ? super U, ? extends U> reducer) {
super(p, b, i, f, t); this.nextRight = nextRight;
this.transformer = transformer;
this.reducer = reducer;
}
public final U getRawResult() { return result; }
public final void compute() {
final Fun<Map.Entry<K,V>, ? extends U> transformer;
final BiFun<? super U, ? super U, ? extends U> reducer;
if ((transformer = this.transformer) != null &&
(reducer = this.reducer) != null) {
for (int i = baseIndex, f, h; batch > 0 &&
(h = ((f = baseLimit) + i) >>> 1) > i;) {
addToPendingCount(1);
(rights = new MapReduceEntriesTask<K,V,U>
(this, batch >>>= 1, baseLimit = h, f, tab,
rights, transformer, reducer)).fork();
}
U r = null;
for (Node<K,V> p; (p = advance()) != null; ) {
U u;
if ((u = transformer.apply(p)) != null)
r = (r == null) ? u : reducer.apply(r, u);
}
result = r;
CountedCompleter<?> c;
for (c = firstComplete(); c != null; c = c.nextComplete()) {
@SuppressWarnings("unchecked") MapReduceEntriesTask<K,V,U>
t = (MapReduceEntriesTask<K,V,U>)c,
s = t.rights;
while (s != null) {
U tr, sr;
if ((sr = s.result) != null)
t.result = (((tr = t.result) == null) ? sr :
reducer.apply(tr, sr));
s = t.rights = s.nextRight;
}
}
}
}
} | 0true
| src_main_java_jsr166e_ConcurrentHashMapV8.java |
1,326 | public abstract class OAbstractPageWALRecord extends OOperationUnitRecord {
private OLogSequenceNumber lsn;
private long pageIndex;
private long fileId;
protected OAbstractPageWALRecord() {
}
protected OAbstractPageWALRecord(long pageIndex, long fileId, OOperationUnitId operationUnitId) {
super(operationUnitId);
this.pageIndex = pageIndex;
this.fileId = fileId;
}
@Override
public int toStream(byte[] content, int offset) {
offset = super.toStream(content, offset);
OLongSerializer.INSTANCE.serializeNative(pageIndex, content, offset);
offset += OLongSerializer.LONG_SIZE;
OLongSerializer.INSTANCE.serializeNative(fileId, content, offset);
offset += OLongSerializer.LONG_SIZE;
return offset;
}
@Override
public int fromStream(byte[] content, int offset) {
offset = super.fromStream(content, offset);
pageIndex = OLongSerializer.INSTANCE.deserializeNative(content, offset);
offset += OLongSerializer.LONG_SIZE;
fileId = OLongSerializer.INSTANCE.deserializeNative(content, offset);
offset += OLongSerializer.LONG_SIZE;
return offset;
}
@Override
public int serializedSize() {
return super.serializedSize() + 2 * OLongSerializer.LONG_SIZE;
}
public long getPageIndex() {
return pageIndex;
}
public long getFileId() {
return fileId;
}
@Override
public OLogSequenceNumber getLsn() {
return lsn;
}
@Override
public void setLsn(OLogSequenceNumber lsn) {
this.lsn = lsn;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
if (!super.equals(o))
return false;
OAbstractPageWALRecord that = (OAbstractPageWALRecord) o;
if (fileId != that.fileId)
return false;
if (pageIndex != that.pageIndex)
return false;
if (lsn != null ? !lsn.equals(that.lsn) : that.lsn != null)
return false;
return true;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + (lsn != null ? lsn.hashCode() : 0);
result = 31 * result + (int) (pageIndex ^ (pageIndex >>> 32));
result = 31 * result + (int) (fileId ^ (fileId >>> 32));
return result;
}
@Override
public String toString() {
return "OAbstractPageWALRecord{" + "lsn=" + lsn + ", pageIndex=" + pageIndex + ", fileId=" + fileId + '}';
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_storage_impl_local_paginated_wal_OAbstractPageWALRecord.java |
793 | new Callable<OFunctionLibrary>() {
public OFunctionLibrary call() {
final OFunctionLibraryImpl instance = new OFunctionLibraryImpl();
if (iLoad)
instance.load();
return instance;
}
}), database); | 0true
| core_src_main_java_com_orientechnologies_orient_core_metadata_OMetadataDefault.java |
1,051 | public final class TermVectorFields extends Fields {
private final ObjectLongOpenHashMap<String> fieldMap;
private final BytesReference termVectors;
final boolean hasTermStatistic;
final boolean hasFieldStatistic;
/**
* @param headerRef Stores offsets per field in the {@code termVectors} and some
* header information as {@link BytesRef}.
* @param termVectors Stores the actual term vectors as a {@link BytesRef}.
*/
public TermVectorFields(BytesReference headerRef, BytesReference termVectors) throws IOException {
BytesStreamInput header = new BytesStreamInput(headerRef);
fieldMap = new ObjectLongOpenHashMap<String>();
// here we read the header to fill the field offset map
String headerString = header.readString();
assert headerString.equals("TV");
int version = header.readInt();
assert version == -1;
hasTermStatistic = header.readBoolean();
hasFieldStatistic = header.readBoolean();
final int numFields = header.readVInt();
for (int i = 0; i < numFields; i++) {
fieldMap.put((header.readString()), header.readVLong());
}
header.close();
// reference to the term vector data
this.termVectors = termVectors;
}
@Override
public Iterator<String> iterator() {
final Iterator<ObjectLongCursor<String>> iterator = fieldMap.iterator();
return new Iterator<String>() {
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public String next() {
return iterator.next().key;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
@Override
public Terms terms(String field) throws IOException {
// first, find where in the termVectors bytes the actual term vector for
// this field is stored
if (!fieldMap.containsKey(field)) {
return null; // we don't have it.
}
long offset = fieldMap.lget();
final BytesStreamInput perFieldTermVectorInput = new BytesStreamInput(this.termVectors);
perFieldTermVectorInput.reset();
perFieldTermVectorInput.skip(offset);
// read how many terms....
final long numTerms = perFieldTermVectorInput.readVLong();
// ...if positions etc. were stored....
final boolean hasPositions = perFieldTermVectorInput.readBoolean();
final boolean hasOffsets = perFieldTermVectorInput.readBoolean();
final boolean hasPayloads = perFieldTermVectorInput.readBoolean();
// read the field statistics
final long sumTotalTermFreq = hasFieldStatistic ? readPotentiallyNegativeVLong(perFieldTermVectorInput) : -1;
final long sumDocFreq = hasFieldStatistic ? readPotentiallyNegativeVLong(perFieldTermVectorInput) : -1;
final int docCount = hasFieldStatistic ? readPotentiallyNegativeVInt(perFieldTermVectorInput) : -1;
return new Terms() {
@Override
public TermsEnum iterator(TermsEnum reuse) throws IOException {
// convert bytes ref for the terms to actual data
return new TermsEnum() {
int currentTerm = 0;
int freq = 0;
int docFreq = -1;
long totalTermFrequency = -1;
int[] positions = new int[1];
int[] startOffsets = new int[1];
int[] endOffsets = new int[1];
BytesRef[] payloads = new BytesRef[1];
final BytesRef spare = new BytesRef();
@Override
public BytesRef next() throws IOException {
if (currentTerm++ < numTerms) {
// term string. first the size...
int termVectorSize = perFieldTermVectorInput.readVInt();
spare.grow(termVectorSize);
// ...then the value.
perFieldTermVectorInput.readBytes(spare.bytes, 0, termVectorSize);
spare.length = termVectorSize;
if (hasTermStatistic) {
docFreq = readPotentiallyNegativeVInt(perFieldTermVectorInput);
totalTermFrequency = readPotentiallyNegativeVLong(perFieldTermVectorInput);
}
freq = readPotentiallyNegativeVInt(perFieldTermVectorInput);
// grow the arrays to read the values. this is just
// for performance reasons. Re-use memory instead of
// realloc.
growBuffers();
// finally, read the values into the arrays
// curentPosition etc. so that we can just iterate
// later
writeInfos(perFieldTermVectorInput);
return spare;
} else {
return null;
}
}
private void writeInfos(final BytesStreamInput input) throws IOException {
for (int i = 0; i < freq; i++) {
if (hasPositions) {
positions[i] = input.readVInt();
}
if (hasOffsets) {
startOffsets[i] = input.readVInt();
endOffsets[i] = input.readVInt();
}
if (hasPayloads) {
int payloadLength = input.readVInt();
if (payloads[i] == null) {
payloads[i] = new BytesRef(payloadLength);
} else {
payloads[i].grow(payloadLength);
}
input.readBytes(payloads[i].bytes, 0, payloadLength);
payloads[i].length = payloadLength;
payloads[i].offset = 0;
}
}
}
private void growBuffers() {
if (hasPositions) {
positions = grow(positions, freq);
}
if (hasOffsets) {
startOffsets = grow(startOffsets, freq);
endOffsets = grow(endOffsets, freq);
}
if (hasPayloads) {
if (payloads.length < freq) {
final BytesRef[] newArray = new BytesRef[ArrayUtil.oversize(freq, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
System.arraycopy(payloads, 0, newArray, 0, payloads.length);
payloads = newArray;
}
}
}
@Override
public Comparator<BytesRef> getComparator() {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
@Override
public SeekStatus seekCeil(BytesRef text) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void seekExact(long ord) throws IOException {
throw new UnsupportedOperationException("Seek is not supported");
}
@Override
public BytesRef term() throws IOException {
return spare;
}
@Override
public long ord() throws IOException {
throw new UnsupportedOperationException("ordinals are not supported");
}
@Override
public int docFreq() throws IOException {
return docFreq;
}
@Override
public long totalTermFreq() throws IOException {
return totalTermFrequency;
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
return docsAndPositions(liveDocs, reuse instanceof DocsAndPositionsEnum ? (DocsAndPositionsEnum) reuse : null, 0);
}
@Override
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags) throws IOException {
final TermVectorsDocsAndPosEnum retVal = (reuse instanceof TermVectorsDocsAndPosEnum ? (TermVectorsDocsAndPosEnum) reuse
: new TermVectorsDocsAndPosEnum());
return retVal.reset(hasPositions ? positions : null, hasOffsets ? startOffsets : null, hasOffsets ? endOffsets
: null, hasPayloads ? payloads : null, freq);
}
};
}
@Override
public Comparator<BytesRef> getComparator() {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
@Override
public long size() throws IOException {
return numTerms;
}
@Override
public long getSumTotalTermFreq() throws IOException {
return sumTotalTermFreq;
}
@Override
public long getSumDocFreq() throws IOException {
return sumDocFreq;
}
@Override
public int getDocCount() throws IOException {
return docCount;
}
@Override
public boolean hasFreqs() {
return true;
}
@Override
public boolean hasOffsets() {
return hasOffsets;
}
@Override
public boolean hasPositions() {
return hasPositions;
}
@Override
public boolean hasPayloads() {
return hasPayloads;
}
};
}
@Override
public int size() {
return fieldMap.size();
}
private final class TermVectorsDocsAndPosEnum extends DocsAndPositionsEnum {
private boolean hasPositions;
private boolean hasOffsets;
private boolean hasPayloads;
int curPos = -1;
int doc = -1;
private int freq;
private int[] startOffsets;
private int[] positions;
private BytesRef[] payloads;
private int[] endOffsets;
private DocsAndPositionsEnum reset(int[] positions, int[] startOffsets, int[] endOffsets, BytesRef[] payloads, int freq) {
curPos = -1;
doc = -1;
this.hasPositions = positions != null;
this.hasOffsets = startOffsets != null;
this.hasPayloads = payloads != null;
this.freq = freq;
this.startOffsets = startOffsets;
this.endOffsets = endOffsets;
this.payloads = payloads;
this.positions = positions;
return this;
}
@Override
public int nextDoc() throws IOException {
return doc = (doc == -1 ? 0 : NO_MORE_DOCS);
}
@Override
public int docID() {
return doc;
}
@Override
public int advance(int target) throws IOException {
while (nextDoc() < target && doc != NO_MORE_DOCS) {
}
return doc;
}
@Override
public int freq() throws IOException {
return freq;
}
// call nextPosition once before calling this one
// because else counter is not advanced
@Override
public int startOffset() throws IOException {
assert curPos < freq && curPos >= 0;
return hasOffsets ? startOffsets[curPos] : -1;
}
@Override
// can return -1 if posistions were not requested or
// stored but offsets were stored and requested
public int nextPosition() throws IOException {
assert curPos + 1 < freq;
++curPos;
// this is kind of cheating but if you don't need positions
// we safe lots fo space on the wire
return hasPositions ? positions[curPos] : -1;
}
@Override
public BytesRef getPayload() throws IOException {
assert curPos < freq && curPos >= 0;
return hasPayloads ? payloads[curPos] : null;
}
@Override
public int endOffset() throws IOException {
assert curPos < freq && curPos >= 0;
return hasOffsets ? endOffsets[curPos] : -1;
}
@Override
public long cost() {
return 1;
}
}
// read a vInt. this is used if the integer might be negative. In this case,
// the writer writes a 0 for -1 or value +1 and accordingly we have to
// substract 1 again
// adds one to mock not existing term freq
int readPotentiallyNegativeVInt(BytesStreamInput stream) throws IOException {
return stream.readVInt() - 1;
}
// read a vLong. this is used if the integer might be negative. In this
// case, the writer writes a 0 for -1 or value +1 and accordingly we have to
// substract 1 again
// adds one to mock not existing term freq
long readPotentiallyNegativeVLong(BytesStreamInput stream) throws IOException {
return stream.readVLong() - 1;
}
} | 0true
| src_main_java_org_elasticsearch_action_termvector_TermVectorFields.java |
70 | class AssignToIfIsProposal extends LocalProposal {
protected DocumentChange createChange(IDocument document, Node expanse,
Integer stopIndex) {
DocumentChange change =
new DocumentChange("Assign to If Is", document);
change.setEdit(new MultiTextEdit());
change.addEdit(new InsertEdit(offset, "if (is Nothing " + initialName + " = "));
String terminal = expanse.getEndToken().getText();
if (!terminal.equals(";")) {
change.addEdit(new InsertEdit(stopIndex+1, ") {}"));
exitPos = stopIndex+13;
}
else {
change.addEdit(new ReplaceEdit(stopIndex, 1, ") {}"));
exitPos = stopIndex+12;
}
return change;
}
public AssignToIfIsProposal(Tree.CompilationUnit cu,
Node node, int currentOffset) {
super(cu, node, currentOffset);
}
protected void addLinkedPositions(IDocument document, Unit unit)
throws BadLocationException {
ProposalPosition typePosition =
new ProposalPosition(document, offset+7, 7, 1,
getCaseTypeProposals(offset+7, unit, type));
ProposalPosition namePosition =
new ProposalPosition(document, offset+15, initialName.length(), 0,
getNameProposals(offset+15, 1, nameProposals));
LinkedMode.addLinkedPosition(linkedModeModel, typePosition);
LinkedMode.addLinkedPosition(linkedModeModel, namePosition);
}
@Override
String[] computeNameProposals(Node expression) {
return super.computeNameProposals(expression);
}
@Override
public String getDisplayString() {
return "Assign expression to 'if (is)' condition";
}
@Override
boolean isEnabled(ProducedType resultType) {
return true;
}
static void addAssignToIfIsProposal(Tree.CompilationUnit cu,
Collection<ICompletionProposal> proposals,
Node node, int currentOffset) {
AssignToIfIsProposal prop =
new AssignToIfIsProposal(cu, node, currentOffset);
if (prop.isEnabled()) {
proposals.add(prop);
}
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_AssignToIfIsProposal.java |
1,600 | public interface HashFunction {
/**
* Calculate a hash value for routing
* @param routing String to calculate the hash value from
* @return hash value of the given routing string
*/
int hash(String routing);
/**
* Calculate a hash value for routing and its type
* @param type types name
* @param routing String to calculate the hash value from
* @return hash value of the given type and routing string
*/
int hash(String type, String id);
} | 0true
| src_main_java_org_elasticsearch_cluster_routing_operation_hash_HashFunction.java |
1,647 | public class AddMetadataFromFieldTypeRequest {
private final Field requestedField;
private final Class<?> targetClass;
private final ForeignKey foreignField;
private final ForeignKey[] additionalForeignFields;
private final MergedPropertyType mergedPropertyType;
private final List<Property> componentProperties;
private final String idProperty;
private final String prefix;
private final String requestedPropertyName;
private final Type type;
private final boolean propertyForeignKey;
private final int additionalForeignKeyIndexPosition;
private final Map<String, FieldMetadata> presentationAttributes;
private final FieldMetadata presentationAttribute;
private final SupportedFieldType explicitType;
private final Class<?> returnedClass;
private final DynamicEntityDao dynamicEntityDao;
public AddMetadataFromFieldTypeRequest(Field requestedField, Class<?> targetClass, ForeignKey foreignField,
ForeignKey[] additionalForeignFields,
MergedPropertyType mergedPropertyType, List<Property> componentProperties,
String idProperty,
String prefix, String requestedPropertyName, Type type,
boolean propertyForeignKey, int additionalForeignKeyIndexPosition,
Map<String, FieldMetadata> presentationAttributes,
FieldMetadata presentationAttribute, SupportedFieldType explicitType,
Class<?> returnedClass, DynamicEntityDao dynamicEntityDao) {
this.requestedField = requestedField;
this.targetClass = targetClass;
this.foreignField = foreignField;
this.additionalForeignFields = additionalForeignFields;
this.mergedPropertyType = mergedPropertyType;
this.componentProperties = componentProperties;
this.idProperty = idProperty;
this.prefix = prefix;
this.requestedPropertyName = requestedPropertyName;
this.type = type;
this.propertyForeignKey = propertyForeignKey;
this.additionalForeignKeyIndexPosition = additionalForeignKeyIndexPosition;
this.presentationAttributes = presentationAttributes;
this.presentationAttribute = presentationAttribute;
this.explicitType = explicitType;
this.returnedClass = returnedClass;
this.dynamicEntityDao = dynamicEntityDao;
}
public Field getRequestedField() {
return requestedField;
}
public Class<?> getTargetClass() {
return targetClass;
}
public ForeignKey getForeignField() {
return foreignField;
}
public ForeignKey[] getAdditionalForeignFields() {
return additionalForeignFields;
}
public MergedPropertyType getMergedPropertyType() {
return mergedPropertyType;
}
public List<Property> getComponentProperties() {
return componentProperties;
}
public String getIdProperty() {
return idProperty;
}
public String getPrefix() {
return prefix;
}
public String getRequestedPropertyName() {
return requestedPropertyName;
}
public Type getType() {
return type;
}
public boolean isPropertyForeignKey() {
return propertyForeignKey;
}
public int getAdditionalForeignKeyIndexPosition() {
return additionalForeignKeyIndexPosition;
}
public Map<String, FieldMetadata> getPresentationAttributes() {
return presentationAttributes;
}
public FieldMetadata getPresentationAttribute() {
return presentationAttribute;
}
public SupportedFieldType getExplicitType() {
return explicitType;
}
public Class<?> getReturnedClass() {
return returnedClass;
}
public DynamicEntityDao getDynamicEntityDao() {
return dynamicEntityDao;
}
} | 0true
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_dao_provider_metadata_request_AddMetadataFromFieldTypeRequest.java |
2,837 | SYMBOL {
@Override
public boolean isTokenChar(int c) {
switch (Character.getType(c)) {
case Character.CURRENCY_SYMBOL:
case Character.MATH_SYMBOL:
case Character.OTHER_SYMBOL:
return true;
default:
return false;
}
}
} | 0true
| src_main_java_org_elasticsearch_index_analysis_CharMatcher.java |
269 | public class Navigation {
public static void gotoDeclaration(Declaration d, IProject project) {
gotoDeclaration(d, project, getCurrentEditor());
}
public static void gotoDeclaration(Declaration d, IProject project,
IEditorPart editor) {
// if (project!=null) {
if (project!=null && editor instanceof CeylonEditor) {
CeylonEditor ce = (CeylonEditor) editor;
CeylonParseController cpc = ce.getParseController();
IProject ep = cpc.getProject();
if (ep != null && ep.equals(project)) {
Node node = getReferencedNode(d, getCompilationUnit(d, cpc));
if (node != null) {
gotoNode(node, project);
return;
}
}
}
if (d.getUnit() instanceof CeylonUnit) {
CeylonUnit ceylonUnit = (CeylonUnit) d.getUnit();
Node node = getReferencedNode(d, ceylonUnit.getCompilationUnit());
if (node != null) {
gotoNode(node, project);
}
else if (ceylonUnit instanceof CeylonBinaryUnit) {
CeylonBinaryUnit binaryUnit = (CeylonBinaryUnit) ceylonUnit;
if (isJavaLikeFileName(binaryUnit.getSourceRelativePath())) {
gotoJavaNode(d);
}
}
}
else {
gotoJavaNode(d);
}
// }
// else {
// //it's coming from the "unversioned" JDK module, which
// //we don't display multiple choices for, so just pick
// //the first available project
// gotoJavaNode(d);
// }
}
public static void gotoDeclaration(IProject project, PhasedUnit pu,
Declaration declaration) {
IEditorInput editorInput = getEditorInput(pu.getUnit());
Node node = getReferencedNode(declaration,
pu.getCompilationUnit());
try {
CeylonEditor editor = (CeylonEditor)
getActivePage().openEditor(editorInput, EDITOR_ID);
editor.selectAndReveal(getIdentifyingNode(node).getStartIndex(),
declaration.getName().length());
}
catch (PartInitException e) {
e.printStackTrace();
}
}
public static void gotoNode(Node node, IProject project) {
Unit unit = node.getUnit();
int length = Nodes.getLength(node);
int startOffset = Nodes.getStartOffset(node);
if (unit instanceof IResourceAware) {
IFile file = ((IResourceAware) unit).getFileResource();
if (file != null) {
gotoFile(file, startOffset, length);
return;
}
}
gotoLocation(getNodePath(node, project),
startOffset,
length);
}
public static void gotoLocation(Unit unit, int startOffset, int length) {
if (unit instanceof IResourceAware) {
IFile file = ((IResourceAware) unit).getFileResource();
if (file != null) {
gotoFile(file, startOffset, length);
return;
}
}
IPath path;
if (unit instanceof IProjectAware) {
path = getUnitPath(((IProjectAware) unit).getProject(), unit);
} else {
path = getUnitPath(null, unit);
}
gotoLocation(path, startOffset, length);
}
public static void gotoLocation(IPath path, int offset) {
gotoLocation(path, offset, 0);
}
public static void gotoLocation(IPath path, int offset, int length) {
if (path==null || path.isEmpty()) return;
IEditorInput editorInput = getEditorInput(path);
try {
CeylonEditor editor = (CeylonEditor) getActivePage()
.openEditor(editorInput, EDITOR_ID);
editor.selectAndReveal(offset, length);
}
catch (PartInitException pie) {
pie.printStackTrace();
}
}
public static void gotoFile(IFile file, int offset, int length) {
IWorkbenchPage page = getActivePage();
IEditorInput input = new FileEditorInput(file);
if (input!=null) {
IEditorPart part = page.findEditor(input);
ITextEditor editor = null;
if (part instanceof ITextEditor) {
editor = (ITextEditor) part;
}
else {
try {
editor = (ITextEditor)
page.openEditor(input, EDITOR_ID);
}
catch (PartInitException e) {
e.printStackTrace();
return;
}
}
if (offset>=0) {
editor.selectAndReveal(offset, length);
}
page.activate(editor);
}
}
// public static boolean belongsToProject(Unit unit, IProject project) {
// if (project == null) {
// return false;
// }
// return (unit instanceof IResourceAware) &&
// project.equals(((IResourceAware)unit).getProjectResource());
// }
public static IPath getNodePath(Node node, IProject project) {
Unit unit = node.getUnit();
return getUnitPath(project, unit);
}
public static IPath getUnitPath(IProject project, Unit unit) {
if (unit instanceof IResourceAware) {
IFile fileResource = ((IResourceAware) unit).getFileResource();
if (fileResource!=null) {
return fileResource.getLocation();
}
else if (project!=null) {
return project.getLocation().append(unit.getRelativePath());
}
}
if ((unit instanceof ExternalSourceFile ) ||
(unit instanceof CeylonBinaryUnit )) {
IdePhasedUnit externalPhasedUnit = ((CeylonUnit) unit).getPhasedUnit();
return new Path(externalPhasedUnit.getUnitFile().getPath());
}
return null;
}
public static void gotoDeclaration(Referenceable model,
CeylonEditor editor) {
gotoDeclaration(model, editor.getParseController());
}
public static void gotoDeclaration(Referenceable model,
CeylonParseController controller) {
if (model!=null) {
Node refNode = getReferencedNode(model, controller);
if (refNode!=null) {
gotoNode(refNode, controller.getProject());
}
else if (model instanceof Declaration) {
gotoJavaNode((Declaration) model);
}
}
}
/**
* Selects and reveals the given offset and length in the given editor part.
*/
//private static void revealInEditor(IEditorPart editor, final int offset, final int length) {
// if (editor instanceof ITextEditor) {
// ((ITextEditor) editor).selectAndReveal(offset, length);
// return;
// }
// // Support for non-text editor - try IGotoMarker interface
// if (editor instanceof IGotoMarker) {
// final IEditorInput input= editor.getEditorInput();
// if (input instanceof IFileEditorInput) {
// final IGotoMarker gotoMarkerTarget= (IGotoMarker) editor;
// WorkspaceModifyOperation op= new WorkspaceModifyOperation() {
// protected void execute(IProgressMonitor monitor) throws CoreException {
// IMarker marker= null;
// try {
// marker = ((IFileEditorInput) input).getFile().createMarker(IMarker.TEXT);
// String[] attributeNames = new String[] {IMarker.CHAR_START, IMarker.CHAR_END};
// Object[] values = new Object[] {offset, offset + length};
// marker.setAttributes(attributeNames, values);
// gotoMarkerTarget.gotoMarker(marker);
// } finally {
// if (marker!=null)
// marker.delete();
// }
// }
// };
// try {
// op.run(null);
// } catch (InvocationTargetException ex) {
// // reveal failed
// } catch (InterruptedException e) {
// Assert.isTrue(false, "this operation can not be canceled"); //$NON-NLS-1$
// }
// }
// return;
// }
// /*
// * Workaround: send out a text selection XXX: Needs to be improved, see https://bugs.eclipse.org/bugs/show_bug.cgi?id=32214
// */
// if (editor != null && editor.getEditorSite().getSelectionProvider() != null) {
// IEditorSite site= editor.getEditorSite();
// if (site==null)
// return;
// ISelectionProvider provider= editor.getEditorSite().getSelectionProvider();
// if (provider==null)
// return;
// provider.setSelection(new TextSelection(offset, length));
// }
//}
private static IEditorPart openInEditor(IFile file, boolean activate)
throws PartInitException {
if (file!=null) {
IWorkbenchPage p = getWorkbench().getActiveWorkbenchWindow().getActivePage();
if (p!=null) {
IEditorPart editorPart = IDE.openEditor(p, file, activate);
Navigation.initializeHighlightRange(editorPart);
return editorPart;
}
}
return null;
}
/**
* Selects and reveals the given region in the given editor part.
*/
//private static void revealInEditor(IEditorPart part, IRegion region) {
// if (part!=null && region!=null)
// revealInEditor(part, region.getOffset(), region.getLength());
//}
private static IEditorPart openInEditor(IEditorInput input, String editorID, boolean activate)
throws PartInitException {
if (input!=null) {
IWorkbenchPage p = getWorkbench().getActiveWorkbenchWindow().getActivePage();
if (p!=null) {
IEditorPart editorPart = p.openEditor(input, editorID, activate);
Navigation.initializeHighlightRange(editorPart);
return editorPart;
}
}
return null;
}
/**
* Opens an editor suitable for a model element, IFile, IStorage...
*
* @return the IEditorPart or null if wrong element type or opening failed
*/
public static IEditorPart openInEditor(Object inputElement, boolean activate) throws PartInitException {
if (inputElement instanceof IFile)
return openInEditor((IFile) inputElement, activate);
IEditorInput input= EditorUtil.getEditorInput(inputElement);
if (input!=null)
return openInEditor(input, EditorUtil.getEditorID(input, inputElement), activate);
return null;
}
/**
* Opens an editor suitable for a model element, <code>IFile</code>, or <code>IStorage</code>.
* The editor is activated by default.
*
* @return the IEditorPart or null if wrong element type or opening failed
*/
public static IEditorPart openInEditor(Object inputElement) throws PartInitException {
return openInEditor(inputElement, true);
}
public static void gotoLocation(final IResource file, final int offset, int length) {
Map<String, Object> map = new HashMap<String, Object>();
map.put(IMarker.CHAR_START, offset);
map.put(IMarker.CHAR_END, offset+length);
if (file instanceof IFile && CeylonBuilder.isCeylon((IFile) file)) {
map.put(IDE.EDITOR_ID_ATTR, CeylonPlugin.EDITOR_ID);
}
try {
IMarker marker = file.createMarker(IMarker.TEXT);
marker.setAttributes(map);
IDE.openEditor(EditorUtil.getActivePage(), marker);
marker.delete();
}
catch (CoreException ce) {} //deliberately swallow it
/*try {
IEditorPart editor = EditorUtility.isOpenInEditor(path);
if (editor == null) {
editor = EditorUtility.openInEditor(path);
}
EditorUtility.revealInEditor(editor, targetOffset, 0);
}
catch (PartInitException e) {
e.printStackTrace();
}*/
}
public static void gotoLocation(final IResource file, final int offset) {
gotoLocation(file, offset, 0);
}
/**
* Tests if a given input element is currently shown in an editor
*
* @return the IEditorPart if shown, null if element is not open in an editor
*/
//private static IEditorPart isOpenInEditor(Object inputElement) {
// IEditorInput input= null;
// input= getEditorInput(inputElement);
// if (input!=null) {
// IWorkbenchPage p= getWorkbench().getActiveWorkbenchWindow().getActivePage();
// if (p!=null) {
// return p.findEditor(input);
// }
// }
// return null;
//}
private static void initializeHighlightRange(IEditorPart editorPart) {
if (editorPart instanceof ITextEditor) {
IAction toggleAction =
editorPart.getEditorSite().getActionBars()
.getGlobalActionHandler(TOGGLE_SHOW_SELECTED_ELEMENT_ONLY);
boolean enable = toggleAction!=null;
if (enable && editorPart instanceof CeylonEditor) {
// TODO Maybe support show segments?
enable = false; // CeylonPlugin.getInstance().getPreferenceStore().getBoolean(PreferenceConstants.EDITOR_SHOW_SEGMENTS);
}
else {
enable = enable &&
toggleAction.isEnabled() &&
toggleAction.isChecked();
}
if (enable) {
if (toggleAction instanceof TextEditorAction) {
// Reset the action
((TextEditorAction) toggleAction).setEditor(null);
// Restore the action
((TextEditorAction) toggleAction).setEditor((ITextEditor) editorPart);
}
else {
// Un-check
toggleAction.run();
// Check
toggleAction.run();
}
}
}
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_Navigation.java |
4,519 | bulkRequest.execute(new ActionListener<BulkResponse>() {
@Override
public void onResponse(BulkResponse bulkResponse) {
logger.trace("bulk took " + bulkResponse.getTookInMillis() + "ms");
}
@Override
public void onFailure(Throwable e) {
logger.warn("failed to execute bulk");
}
}); | 1no label
| src_main_java_org_elasticsearch_indices_ttl_IndicesTTLService.java |
3,624 | public class TypeParsers {
public static final String MULTI_FIELD_CONTENT_TYPE = "multi_field";
public static final Mapper.TypeParser multiFieldConverterTypeParser = new Mapper.TypeParser() {
@Override
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
ContentPath.Type pathType = null;
AbstractFieldMapper.Builder mainFieldBuilder = null;
List<AbstractFieldMapper.Builder> fields = null;
String firstType = null;
for (Map.Entry<String, Object> entry : node.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("path")) {
pathType = parsePathType(name, fieldNode.toString());
} else if (fieldName.equals("fields")) {
Map<String, Object> fieldsNode = (Map<String, Object>) fieldNode;
for (Map.Entry<String, Object> entry1 : fieldsNode.entrySet()) {
String propName = entry1.getKey();
Map<String, Object> propNode = (Map<String, Object>) entry1.getValue();
String type;
Object typeNode = propNode.get("type");
if (typeNode != null) {
type = typeNode.toString();
if (firstType == null) {
firstType = type;
}
} else {
throw new MapperParsingException("No type specified for property [" + propName + "]");
}
Mapper.TypeParser typeParser = parserContext.typeParser(type);
if (typeParser == null) {
throw new MapperParsingException("No handler for type [" + type + "] declared on field [" + fieldName + "]");
}
if (propName.equals(name)) {
mainFieldBuilder = (AbstractFieldMapper.Builder) typeParser.parse(propName, propNode, parserContext);
} else {
if (fields == null) {
fields = new ArrayList<AbstractFieldMapper.Builder>(2);
}
fields.add((AbstractFieldMapper.Builder) typeParser.parse(propName, propNode, parserContext));
}
}
}
}
if (mainFieldBuilder == null) {
if (fields == null) {
// No fields at all were specified in multi_field, so lets return a non indexed string field.
return new StringFieldMapper.Builder(name).index(false);
}
Mapper.TypeParser typeParser = parserContext.typeParser(firstType);
if (typeParser == null) {
// The first multi field's type is unknown
mainFieldBuilder = new StringFieldMapper.Builder(name).index(false);
} else {
Mapper.Builder substitute = typeParser.parse(name, Collections.<String, Object>emptyMap(), parserContext);
if (substitute instanceof AbstractFieldMapper.Builder) {
mainFieldBuilder = ((AbstractFieldMapper.Builder) substitute).index(false);
} else {
// The first multi isn't a core field type
mainFieldBuilder = new StringFieldMapper.Builder(name).index(false);
}
}
}
if (fields != null && pathType != null) {
for (Mapper.Builder field : fields) {
mainFieldBuilder.addMultiField(field);
}
mainFieldBuilder.multiFieldPathType(pathType);
} else if (fields != null) {
for (Mapper.Builder field : fields) {
mainFieldBuilder.addMultiField(field);
}
} else if (pathType != null) {
mainFieldBuilder.multiFieldPathType(pathType);
}
return mainFieldBuilder;
}
};
public static final String DOC_VALUES = "doc_values";
public static final String INDEX_OPTIONS_DOCS = "docs";
public static final String INDEX_OPTIONS_FREQS = "freqs";
public static final String INDEX_OPTIONS_POSITIONS = "positions";
public static final String INDEX_OPTIONS_OFFSETS = "offsets";
public static void parseNumberField(NumberFieldMapper.Builder builder, String name, Map<String, Object> numberNode, Mapper.TypeParser.ParserContext parserContext) {
parseField(builder, name, numberNode, parserContext);
for (Map.Entry<String, Object> entry : numberNode.entrySet()) {
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("precision_step")) {
builder.precisionStep(nodeIntegerValue(propNode));
} else if (propName.equals("ignore_malformed")) {
builder.ignoreMalformed(nodeBooleanValue(propNode));
} else if (propName.equals("coerce")) {
builder.coerce(nodeBooleanValue(propNode));
} else if (propName.equals("omit_norms")) {
builder.omitNorms(nodeBooleanValue(propNode));
} else if (propName.equals("similarity")) {
builder.similarity(parserContext.similarityLookupService().similarity(propNode.toString()));
} else {
parseMultiField(builder, name, numberNode, parserContext, propName, propNode);
}
}
}
public static void parseField(AbstractFieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
for (Map.Entry<String, Object> entry : fieldNode.entrySet()) {
final String propName = Strings.toUnderscoreCase(entry.getKey());
final Object propNode = entry.getValue();
if (propName.equals("index_name")) {
builder.indexName(propNode.toString());
} else if (propName.equals("store")) {
builder.store(parseStore(name, propNode.toString()));
} else if (propName.equals("index")) {
parseIndex(name, propNode.toString(), builder);
} else if (propName.equals("tokenized")) {
builder.tokenized(nodeBooleanValue(propNode));
} else if (propName.equals(DOC_VALUES)) {
builder.docValues(nodeBooleanValue(propNode));
} else if (propName.equals("term_vector")) {
parseTermVector(name, propNode.toString(), builder);
} else if (propName.equals("boost")) {
builder.boost(nodeFloatValue(propNode));
} else if (propName.equals("store_term_vectors")) {
builder.storeTermVectors(nodeBooleanValue(propNode));
} else if (propName.equals("store_term_vector_offsets")) {
builder.storeTermVectorOffsets(nodeBooleanValue(propNode));
} else if (propName.equals("store_term_vector_positions")) {
builder.storeTermVectorPositions(nodeBooleanValue(propNode));
} else if (propName.equals("store_term_vector_payloads")) {
builder.storeTermVectorPayloads(nodeBooleanValue(propNode));
} else if (propName.equals("omit_norms")) {
builder.omitNorms(nodeBooleanValue(propNode));
} else if (propName.equals("norms")) {
final Map<String, Object> properties = nodeMapValue(propNode, "norms");
for (Map.Entry<String, Object> entry2 : properties.entrySet()) {
final String propName2 = Strings.toUnderscoreCase(entry2.getKey());
final Object propNode2 = entry2.getValue();
if (propName2.equals("enabled")) {
builder.omitNorms(!nodeBooleanValue(propNode2));
} else if (propName2.equals(Loading.KEY)) {
builder.normsLoading(Loading.parse(nodeStringValue(propNode2, null), null));
}
}
} else if (propName.equals("omit_term_freq_and_positions")) {
final IndexOptions op = nodeBooleanValue(propNode) ? IndexOptions.DOCS_ONLY : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
if (parserContext.indexVersionCreated().onOrAfter(Version.V_1_0_0_RC2)) {
throw new ElasticsearchParseException("'omit_term_freq_and_positions' is not supported anymore - use ['index_options' : '" + op.name() + "'] instead");
}
// deprecated option for BW compat
builder.indexOptions(op);
} else if (propName.equals("index_options")) {
builder.indexOptions(nodeIndexOptionValue(propNode));
} else if (propName.equals("analyzer")) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
builder.indexAnalyzer(analyzer);
builder.searchAnalyzer(analyzer);
} else if (propName.equals("index_analyzer")) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
builder.indexAnalyzer(analyzer);
} else if (propName.equals("search_analyzer")) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
if (analyzer == null) {
throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
}
builder.searchAnalyzer(analyzer);
} else if (propName.equals("include_in_all")) {
builder.includeInAll(nodeBooleanValue(propNode));
} else if (propName.equals("postings_format")) {
String postingFormatName = propNode.toString();
builder.postingsFormat(parserContext.postingFormatService().get(postingFormatName));
} else if (propName.equals(DOC_VALUES_FORMAT)) {
String docValuesFormatName = propNode.toString();
builder.docValuesFormat(parserContext.docValuesFormatService().get(docValuesFormatName));
} else if (propName.equals("similarity")) {
builder.similarity(parserContext.similarityLookupService().similarity(propNode.toString()));
} else if (propName.equals("fielddata")) {
final Settings settings = ImmutableSettings.builder().put(SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(propNode, "fielddata"))).build();
builder.fieldDataSettings(settings);
} else if (propName.equals("copy_to")) {
parseCopyFields(propNode, builder);
}
}
}
public static void parseMultiField(AbstractFieldMapper.Builder builder, String name, Map<String, Object> node, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) {
if (propName.equals("path")) {
builder.multiFieldPathType(parsePathType(name, propNode.toString()));
} else if (propName.equals("fields")) {
@SuppressWarnings("unchecked")
Map<String, Object> multiFieldsPropNodes = (Map<String, Object>) propNode;
for (Map.Entry<String, Object> multiFieldEntry : multiFieldsPropNodes.entrySet()) {
String multiFieldName = multiFieldEntry.getKey();
if (!(multiFieldEntry.getValue() instanceof Map)) {
throw new MapperParsingException("Illegal field [" + multiFieldName + "], only fields can be specified inside fields");
}
@SuppressWarnings("unchecked")
Map<String, Object> multiFieldNodes = (Map<String, Object>) multiFieldEntry.getValue();
String type;
Object typeNode = multiFieldNodes.get("type");
if (typeNode != null) {
type = typeNode.toString();
} else {
throw new MapperParsingException("No type specified for property [" + multiFieldName + "]");
}
Mapper.TypeParser typeParser = parserContext.typeParser(type);
if (typeParser == null) {
throw new MapperParsingException("No handler for type [" + type + "] declared on field [" + multiFieldName + "]");
}
builder.addMultiField(typeParser.parse(multiFieldName, multiFieldNodes, parserContext));
}
}
}
private static IndexOptions nodeIndexOptionValue(final Object propNode) {
final String value = propNode.toString();
if (INDEX_OPTIONS_OFFSETS.equalsIgnoreCase(value)) {
return IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
} else if (INDEX_OPTIONS_POSITIONS.equalsIgnoreCase(value)) {
return IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
} else if (INDEX_OPTIONS_FREQS.equalsIgnoreCase(value)) {
return IndexOptions.DOCS_AND_FREQS;
} else if (INDEX_OPTIONS_DOCS.equalsIgnoreCase(value)) {
return IndexOptions.DOCS_ONLY;
} else {
throw new ElasticsearchParseException("Failed to parse index option [" + value + "]");
}
}
public static FormatDateTimeFormatter parseDateTimeFormatter(String fieldName, Object node) {
return Joda.forPattern(node.toString());
}
public static void parseTermVector(String fieldName, String termVector, AbstractFieldMapper.Builder builder) throws MapperParsingException {
termVector = Strings.toUnderscoreCase(termVector);
if ("no".equals(termVector)) {
builder.storeTermVectors(false);
} else if ("yes".equals(termVector)) {
builder.storeTermVectors(true);
} else if ("with_offsets".equals(termVector)) {
builder.storeTermVectorOffsets(true);
} else if ("with_positions".equals(termVector)) {
builder.storeTermVectorPositions(true);
} else if ("with_positions_offsets".equals(termVector)) {
builder.storeTermVectorPositions(true);
builder.storeTermVectorOffsets(true);
} else if ("with_positions_payloads".equals(termVector)) {
builder.storeTermVectorPositions(true);
builder.storeTermVectorPayloads(true);
} else if ("with_positions_offsets_payloads".equals(termVector)) {
builder.storeTermVectorPositions(true);
builder.storeTermVectorOffsets(true);
builder.storeTermVectorPayloads(true);
} else {
throw new MapperParsingException("Wrong value for termVector [" + termVector + "] for field [" + fieldName + "]");
}
}
public static void parseIndex(String fieldName, String index, AbstractFieldMapper.Builder builder) throws MapperParsingException {
index = Strings.toUnderscoreCase(index);
if ("no".equals(index)) {
builder.index(false);
} else if ("not_analyzed".equals(index)) {
builder.index(true);
builder.tokenized(false);
} else if ("analyzed".equals(index)) {
builder.index(true);
builder.tokenized(true);
} else {
throw new MapperParsingException("Wrong value for index [" + index + "] for field [" + fieldName + "]");
}
}
public static boolean parseDocValues(String docValues) {
if ("no".equals(docValues)) {
return false;
} else if ("yes".equals(docValues)) {
return true;
} else {
return nodeBooleanValue(docValues);
}
}
public static boolean parseStore(String fieldName, String store) throws MapperParsingException {
if ("no".equals(store)) {
return false;
} else if ("yes".equals(store)) {
return true;
} else {
return nodeBooleanValue(store);
}
}
public static ContentPath.Type parsePathType(String name, String path) throws MapperParsingException {
path = Strings.toUnderscoreCase(path);
if ("just_name".equals(path)) {
return ContentPath.Type.JUST_NAME;
} else if ("full".equals(path)) {
return ContentPath.Type.FULL;
} else {
throw new MapperParsingException("Wrong value for pathType [" + path + "] for object [" + name + "]");
}
}
@SuppressWarnings("unchecked")
public static void parseCopyFields(Object propNode, AbstractFieldMapper.Builder builder) {
AbstractFieldMapper.CopyTo.Builder copyToBuilder = new AbstractFieldMapper.CopyTo.Builder();
if (isArray(propNode)) {
for(Object node : (List<Object>) propNode) {
copyToBuilder.add(nodeStringValue(node, null));
}
} else {
copyToBuilder.add(nodeStringValue(propNode, null));
}
builder.copyTo(copyToBuilder.build());
}
} | 0true
| src_main_java_org_elasticsearch_index_mapper_core_TypeParsers.java |
1,860 | static class AnnotationInstanceStrategy implements AnnotationStrategy {
final Annotation annotation;
AnnotationInstanceStrategy(Annotation annotation) {
this.annotation = checkNotNull(annotation, "annotation");
}
public boolean hasAttributes() {
return true;
}
public AnnotationStrategy withoutAttributes() {
return new AnnotationTypeStrategy(getAnnotationType(), annotation);
}
public Annotation getAnnotation() {
return annotation;
}
public Class<? extends Annotation> getAnnotationType() {
return annotation.annotationType();
}
@Override
public boolean equals(Object o) {
if (!(o instanceof AnnotationInstanceStrategy)) {
return false;
}
AnnotationInstanceStrategy other = (AnnotationInstanceStrategy) o;
return annotation.equals(other.annotation);
}
@Override
public int hashCode() {
return annotation.hashCode();
}
@Override
public String toString() {
return annotation.toString();
}
} | 0true
| src_main_java_org_elasticsearch_common_inject_Key.java |
141 | final class GenericClientExceptionConverter implements ClientExceptionConverter {
@Override
public Object convert(Throwable t) {
StringWriter s = new StringWriter();
t.printStackTrace(new PrintWriter(s));
String clazzName = t.getClass().getName();
return new GenericError(clazzName, t.getMessage(), s.toString(), 0);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_client_GenericClientExceptionConverter.java |
5,222 | public class InternalDateHistogram extends InternalHistogram<InternalDateHistogram.Bucket> implements DateHistogram {
final static Type TYPE = new Type("date_histogram", "dhisto");
final static Factory FACTORY = new Factory();
private final static AggregationStreams.Stream STREAM = new AggregationStreams.Stream() {
@Override
public InternalDateHistogram readResult(StreamInput in) throws IOException {
InternalDateHistogram histogram = new InternalDateHistogram();
histogram.readFrom(in);
return histogram;
}
};
public static void registerStream() {
AggregationStreams.registerStream(STREAM, TYPE.stream());
}
static class Bucket extends InternalHistogram.Bucket implements DateHistogram.Bucket {
private final ValueFormatter formatter;
Bucket(long key, long docCount, InternalAggregations aggregations, ValueFormatter formatter) {
super(key, docCount, aggregations);
this.formatter = formatter;
}
@Override
public String getKey() {
return formatter != null ? formatter.format(key) : DateFieldMapper.Defaults.DATE_TIME_FORMATTER.printer().print(key);
}
@Override
public DateTime getKeyAsDate() {
return new DateTime(key);
}
}
static class Factory extends InternalHistogram.Factory<InternalDateHistogram.Bucket> {
private Factory() {
}
@Override
public String type() {
return TYPE.name();
}
@Override
public InternalDateHistogram create(String name, List<InternalDateHistogram.Bucket> buckets, InternalOrder order,
long minDocCount, EmptyBucketInfo emptyBucketInfo, ValueFormatter formatter, boolean keyed) {
return new InternalDateHistogram(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed);
}
@Override
public InternalDateHistogram.Bucket createBucket(long key, long docCount, InternalAggregations aggregations, ValueFormatter formatter) {
return new Bucket(key, docCount, aggregations, formatter);
}
}
InternalDateHistogram() {} // for serialization
InternalDateHistogram(String name, List<InternalDateHistogram.Bucket> buckets, InternalOrder order, long minDocCount,
EmptyBucketInfo emptyBucketInfo, ValueFormatter formatter, boolean keyed) {
super(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed);
}
@Override
public Type type() {
return TYPE;
}
@Override
public DateHistogram.Bucket getBucketByKey(DateTime key) {
return getBucketByKey(key.getMillis());
}
@Override
protected InternalDateHistogram.Bucket createBucket(long key, long docCount, InternalAggregations aggregations, ValueFormatter formatter) {
return new Bucket(key, docCount, aggregations, formatter);
}
} | 1no label
| src_main_java_org_elasticsearch_search_aggregations_bucket_histogram_InternalDateHistogram.java |
43 | @Persistent
public class PersistentBufferKey {
@KeyField(1) String feedID;
@KeyField(2) Long timestamp;
public PersistentBufferKey() {
//
}
public PersistentBufferKey(String feedID, Long timestamp) {
this.feedID = feedID;
this.timestamp = timestamp;
}
} | 0true
| timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_PersistentBufferKey.java |
131 | public class OSafeBinaryConverter implements OBinaryConverter {
public static final OSafeBinaryConverter INSTANCE = new OSafeBinaryConverter();
public void putShort(byte[] buffer, int index, short value, ByteOrder byteOrder) {
if (byteOrder.equals(ByteOrder.BIG_ENDIAN))
short2BytesBigEndian(value, buffer, index);
else
short2BytesLittleEndian(value, buffer, index);
}
public short getShort(byte[] buffer, int index, ByteOrder byteOrder) {
if (byteOrder.equals(ByteOrder.BIG_ENDIAN))
return bytes2ShortBigEndian(buffer, index);
return bytes2ShortLittleEndian(buffer, index);
}
public void putInt(byte[] buffer, int pointer, int value, ByteOrder byteOrder) {
if (byteOrder.equals(ByteOrder.BIG_ENDIAN))
int2BytesBigEndian(value, buffer, pointer);
else
int2BytesLittleEndian(value, buffer, pointer);
}
public int getInt(byte[] buffer, int pointer, ByteOrder byteOrder) {
if (byteOrder.equals(ByteOrder.BIG_ENDIAN))
return bytes2IntBigEndian(buffer, pointer);
return bytes2IntLittleEndian(buffer, pointer);
}
public void putLong(byte[] buffer, int index, long value, ByteOrder byteOrder) {
if (byteOrder.equals(ByteOrder.BIG_ENDIAN))
long2BytesBigEndian(value, buffer, index);
else
long2BytesLittleEndian(value, buffer, index);
}
public long getLong(byte[] buffer, int index, ByteOrder byteOrder) {
if (byteOrder.equals(ByteOrder.BIG_ENDIAN))
return bytes2LongBigEndian(buffer, index);
return bytes2LongLittleEndian(buffer, index);
}
public void putChar(byte[] buffer, int index, char character, ByteOrder byteOrder) {
if (byteOrder.equals(ByteOrder.BIG_ENDIAN)) {
buffer[index] = (byte) (character >>> 8);
buffer[index + 1] = (byte) character;
} else {
buffer[index + 1] = (byte) (character >>> 8);
buffer[index] = (byte) character;
}
}
public char getChar(byte[] buffer, int index, ByteOrder byteOrder) {
if (byteOrder.equals(ByteOrder.BIG_ENDIAN))
return (char) (((buffer[index] & 0xFF) << 8) + (buffer[index + 1] & 0xFF));
return (char) (((buffer[index + 1] & 0xFF) << 8) + (buffer[index] & 0xFF));
}
public boolean nativeAccelerationUsed() {
return false;
}
private static byte[] short2BytesBigEndian(final short value, final byte[] b, final int iBeginOffset) {
b[iBeginOffset] = (byte) ((value >>> 8) & 0xFF);
b[iBeginOffset + 1] = (byte) (value & 0xFF);
return b;
}
private static byte[] short2BytesLittleEndian(final short value, final byte[] b, final int iBeginOffset) {
b[iBeginOffset + 1] = (byte) ((value >>> 8) & 0xFF);
b[iBeginOffset] = (byte) (value & 0xFF);
return b;
}
private static short bytes2ShortBigEndian(final byte[] b, final int offset) {
return (short) ((b[offset] << 8) | (b[offset + 1] & 0xff));
}
private static short bytes2ShortLittleEndian(final byte[] b, final int offset) {
return (short) ((b[offset + 1] << 8) | (b[offset] & 0xff));
}
private static int bytes2IntBigEndian(final byte[] b, final int offset) {
return (b[offset]) << 24 | (0xff & b[offset + 1]) << 16 | (0xff & b[offset + 2]) << 8 | ((0xff & b[offset + 3]));
}
private static int bytes2IntLittleEndian(final byte[] b, final int offset) {
return (b[offset + 3]) << 24 | (0xff & b[offset + 2]) << 16 | (0xff & b[offset + 1]) << 8 | ((0xff & b[offset]));
}
private static byte[] int2BytesBigEndian(final int value, final byte[] b, final int iBeginOffset) {
b[iBeginOffset] = (byte) ((value >>> 24) & 0xFF);
b[iBeginOffset + 1] = (byte) ((value >>> 16) & 0xFF);
b[iBeginOffset + 2] = (byte) ((value >>> 8) & 0xFF);
b[iBeginOffset + 3] = (byte) (value & 0xFF);
return b;
}
private static byte[] int2BytesLittleEndian(final int value, final byte[] b, final int iBeginOffset) {
b[iBeginOffset + 3] = (byte) ((value >>> 24) & 0xFF);
b[iBeginOffset + 2] = (byte) ((value >>> 16) & 0xFF);
b[iBeginOffset + 1] = (byte) ((value >>> 8) & 0xFF);
b[iBeginOffset] = (byte) (value & 0xFF);
return b;
}
private static byte[] long2BytesBigEndian(final long value, final byte[] b, final int iBeginOffset) {
b[iBeginOffset] = (byte) ((value >>> 56) & 0xFF);
b[iBeginOffset + 1] = (byte) ((value >>> 48) & 0xFF);
b[iBeginOffset + 2] = (byte) ((value >>> 40) & 0xFF);
b[iBeginOffset + 3] = (byte) ((value >>> 32) & 0xFF);
b[iBeginOffset + 4] = (byte) ((value >>> 24) & 0xFF);
b[iBeginOffset + 5] = (byte) ((value >>> 16) & 0xFF);
b[iBeginOffset + 6] = (byte) ((value >>> 8) & 0xFF);
b[iBeginOffset + 7] = (byte) (value & 0xFF);
return b;
}
private static byte[] long2BytesLittleEndian(final long value, final byte[] b, final int iBeginOffset) {
b[iBeginOffset + 7] = (byte) ((value >>> 56) & 0xFF);
b[iBeginOffset + 6] = (byte) ((value >>> 48) & 0xFF);
b[iBeginOffset + 5] = (byte) ((value >>> 40) & 0xFF);
b[iBeginOffset + 4] = (byte) ((value >>> 32) & 0xFF);
b[iBeginOffset + 3] = (byte) ((value >>> 24) & 0xFF);
b[iBeginOffset + 2] = (byte) ((value >>> 16) & 0xFF);
b[iBeginOffset + 1] = (byte) ((value >>> 8) & 0xFF);
b[iBeginOffset] = (byte) (value & 0xFF);
return b;
}
private static long bytes2LongBigEndian(final byte[] b, final int offset) {
return ((0xff & b[offset + 7]) | (0xff & b[offset + 6]) << 8 | (0xff & b[offset + 5]) << 16
| (long) (0xff & b[offset + 4]) << 24 | (long) (0xff & b[offset + 3]) << 32 | (long) (0xff & b[offset + 2]) << 40
| (long) (0xff & b[offset + 1]) << 48 | (long) (0xff & b[offset]) << 56);
}
private static long bytes2LongLittleEndian(final byte[] b, final int offset) {
return ((0xff & b[offset]) | (0xff & b[offset + 1]) << 8 | (0xff & b[offset + 2]) << 16 | (long) (0xff & b[offset + 3]) << 24
| (long) (0xff & b[offset + 4]) << 32 | (long) (0xff & b[offset + 5]) << 40 | (long) (0xff & b[offset + 6]) << 48 | (long) (0xff & b[offset + 7]) << 56);
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_serialization_OSafeBinaryConverter.java |
1,766 | public class TargetModeType implements Serializable {
private static final long serialVersionUID = 1L;
private static final Map<String, TargetModeType> TYPES = new HashMap<String, TargetModeType>();
public static final TargetModeType SANDBOX = new TargetModeType("sandbox", "entity manager target for the sandbox datasource");
public static final TargetModeType STAGE = new TargetModeType("stage", "entity manager target for the stage datasource");
public static final TargetModeType PRODUCTION = new TargetModeType("production", "entity manager target for the production datasource");
public static TargetModeType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public TargetModeType() {
//do nothing
}
public TargetModeType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)) {
TYPES.put(type, this);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
TargetModeType other = (TargetModeType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
} | 1no label
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_service_persistence_TargetModeType.java |
1,519 | private static final class RandomAllocationDecider extends AllocationDecider {
private final Random random;
public RandomAllocationDecider(Random random) {
super(ImmutableSettings.EMPTY);
this.random = random;
}
public boolean allwaysSayYes = false;
@Override
public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation allocation) {
return getRandomDecision();
}
private Decision getRandomDecision() {
if (allwaysSayYes) {
return Decision.YES;
}
switch (random.nextInt(10)) {
case 9:
case 8:
case 7:
case 6:
case 5:
return Decision.NO;
case 4:
return Decision.THROTTLE;
case 3:
case 2:
case 1:
return Decision.YES;
default:
return Decision.ALWAYS;
}
}
@Override
public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
return getRandomDecision();
}
@Override
public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
return getRandomDecision();
}
} | 0true
| src_test_java_org_elasticsearch_cluster_routing_allocation_RandomAllocationDeciderTests.java |
2,835 | WHITESPACE {
@Override
public boolean isTokenChar(int c) {
return Character.isWhitespace(c);
}
}, | 0true
| src_main_java_org_elasticsearch_index_analysis_CharMatcher.java |
229 | @Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
/**
* Tells to OrientDB to call the method BEFORE the record is marshalled and written to the database.
* Applies only to the entity Objects reachable by the OrientDB engine after have registered them.
*/
public @interface OBeforeSerialization {
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_annotation_OBeforeSerialization.java |
1,762 | map.addEntryListener(new EntryAdapter() {
public void entryEvicted(EntryEvent event) {
latch.countDown();
}
}, false); | 0true
| hazelcast_src_test_java_com_hazelcast_map_EvictionTest.java |
3,931 | public class RangeFilterParser implements FilterParser {
public static final String NAME = "range";
@Inject
public RangeFilterParser() {
}
@Override
public String[] names() {
return new String[]{NAME};
}
@Override
public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
Boolean cache = null;
CacheKeyFilter.Key cacheKey = null;
String fieldName = null;
Object from = null;
Object to = null;
boolean includeLower = true;
boolean includeUpper = true;
String execution = "index";
String filterName = null;
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else {
if ("from".equals(currentFieldName)) {
from = parser.objectBytes();
} else if ("to".equals(currentFieldName)) {
to = parser.objectBytes();
} else if ("include_lower".equals(currentFieldName) || "includeLower".equals(currentFieldName)) {
includeLower = parser.booleanValue();
} else if ("include_upper".equals(currentFieldName) || "includeUpper".equals(currentFieldName)) {
includeUpper = parser.booleanValue();
} else if ("gt".equals(currentFieldName)) {
from = parser.objectBytes();
includeLower = false;
} else if ("gte".equals(currentFieldName) || "ge".equals(currentFieldName)) {
from = parser.objectBytes();
includeLower = true;
} else if ("lt".equals(currentFieldName)) {
to = parser.objectBytes();
includeUpper = false;
} else if ("lte".equals(currentFieldName) || "le".equals(currentFieldName)) {
to = parser.objectBytes();
includeUpper = true;
} else {
throw new QueryParsingException(parseContext.index(), "[range] filter does not support [" + currentFieldName + "]");
}
}
}
} else if (token.isValue()) {
if ("_name".equals(currentFieldName)) {
filterName = parser.text();
} else if ("_cache".equals(currentFieldName)) {
cache = parser.booleanValue();
} else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) {
cacheKey = new CacheKeyFilter.Key(parser.text());
} else if ("execution".equals(currentFieldName)) {
execution = parser.text();
} else {
throw new QueryParsingException(parseContext.index(), "[range] filter does not support [" + currentFieldName + "]");
}
}
}
if (fieldName == null) {
throw new QueryParsingException(parseContext.index(), "[range] filter no field specified for range filter");
}
Filter filter = null;
MapperService.SmartNameFieldMappers smartNameFieldMappers = parseContext.smartFieldMappers(fieldName);
if (smartNameFieldMappers != null) {
if (smartNameFieldMappers.hasMapper()) {
boolean explicitlyCached = cache != null && cache;
if (execution.equals("index")) {
if (cache == null) {
cache = true;
}
FieldMapper mapper = smartNameFieldMappers.mapper();
if (mapper instanceof DateFieldMapper) {
filter = ((DateFieldMapper) mapper).rangeFilter(from, to, includeLower, includeUpper, parseContext, explicitlyCached);
} else {
filter = mapper.rangeFilter(from, to, includeLower, includeUpper, parseContext);
}
} else if ("fielddata".equals(execution)) {
if (cache == null) {
cache = false;
}
FieldMapper mapper = smartNameFieldMappers.mapper();
if (!(mapper instanceof NumberFieldMapper)) {
throw new QueryParsingException(parseContext.index(), "[range] filter field [" + fieldName + "] is not a numeric type");
}
if (mapper instanceof DateFieldMapper) {
filter = ((DateFieldMapper) mapper).rangeFilter(parseContext.fieldData(), from, to, includeLower, includeUpper, parseContext, explicitlyCached);
} else {
filter = ((NumberFieldMapper) mapper).rangeFilter(parseContext.fieldData(), from, to, includeLower, includeUpper, parseContext);
}
} else {
throw new QueryParsingException(parseContext.index(), "[range] filter doesn't support [" + execution + "] execution");
}
}
}
if (filter == null) {
if (cache == null) {
cache = true;
}
filter = new TermRangeFilter(fieldName, BytesRefs.toBytesRef(from), BytesRefs.toBytesRef(to), includeLower, includeUpper);
}
if (cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
filter = wrapSmartNameFilter(filter, smartNameFieldMappers, parseContext);
if (filterName != null) {
parseContext.addNamedFilter(filterName, filter);
}
return filter;
}
} | 1no label
| src_main_java_org_elasticsearch_index_query_RangeFilterParser.java |
45 | public class PackageCompletions {
static final class PackageDescriptorProposal extends CompletionProposal {
PackageDescriptorProposal(int offset, String prefix, String packageName) {
super(offset, prefix, PACKAGE,
"package " + packageName,
"package " + packageName + ";");
}
@Override
protected boolean qualifiedNameIsPath() {
return true;
}
}
static final class PackageProposal extends CompletionProposal {
private final boolean withBody;
private final int len;
private final Package p;
private final String completed;
private final CeylonParseController cpc;
PackageProposal(int offset, String prefix, boolean withBody,
int len, Package p, String completed,
CeylonParseController cpc) {
super(offset, prefix, PACKAGE, completed,
completed.substring(len));
this.withBody = withBody;
this.len = len;
this.p = p;
this.completed = completed;
this.cpc = cpc;
}
@Override
public Point getSelection(IDocument document) {
if (withBody) {
return new Point(offset+completed.length()-prefix.length()-len-5, 3);
}
else {
return new Point(offset+completed.length()-prefix.length()-len, 0);
}
}
@Override
public void apply(IDocument document) {
super.apply(document);
if (withBody &&
EditorsUI.getPreferenceStore()
.getBoolean(LINKED_MODE)) {
final LinkedModeModel linkedModeModel = new LinkedModeModel();
final Point selection = getSelection(document);
List<ICompletionProposal> proposals = new ArrayList<ICompletionProposal>();
for (final Declaration d: p.getMembers()) {
if (Util.isResolvable(d) && d.isShared() &&
!isOverloadedVersion(d)) {
proposals.add(new ICompletionProposal() {
@Override
public Point getSelection(IDocument document) {
return null;
}
@Override
public Image getImage() {
return getImageForDeclaration(d);
}
@Override
public String getDisplayString() {
return d.getName();
}
@Override
public IContextInformation getContextInformation() {
return null;
}
@Override
public String getAdditionalProposalInfo() {
return null;
}
@Override
public void apply(IDocument document) {
try {
document.replace(selection.x, selection.y,
d.getName());
}
catch (BadLocationException e) {
e.printStackTrace();
}
linkedModeModel.exit(ILinkedModeListener.UPDATE_CARET);
}
});
}
}
if (!proposals.isEmpty()) {
ProposalPosition linkedPosition =
new ProposalPosition(document, selection.x, selection.y, 0,
proposals.toArray(NO_COMPLETIONS));
try {
LinkedMode.addLinkedPosition(linkedModeModel, linkedPosition);
LinkedMode.installLinkedMode((CeylonEditor) EditorUtil.getCurrentEditor(),
document, linkedModeModel, this, new LinkedMode.NullExitPolicy(),
-1, 0);
}
catch (BadLocationException ble) {
ble.printStackTrace();
}
}
}
}
@Override
public String getAdditionalProposalInfo() {
return getDocumentationFor(cpc, p);
}
@Override
protected boolean qualifiedNameIsPath() {
return true;
}
}
static void addPackageCompletions(CeylonParseController cpc,
int offset, String prefix, Tree.ImportPath path, Node node,
List<ICompletionProposal> result, boolean withBody) {
String fullPath = fullPath(offset, prefix, path);
addPackageCompletions(offset, prefix, node, result, fullPath.length(),
fullPath+prefix, cpc, withBody);
}
private static void addPackageCompletions(final int offset, final String prefix,
Node node, List<ICompletionProposal> result, final int len, String pfp,
final CeylonParseController cpc, final boolean withBody) {
//TODO: someday it would be nice to propose from all packages
// and auto-add the module dependency!
/*TypeChecker tc = CeylonBuilder.getProjectTypeChecker(cpc.getProject().getRawProject());
if (tc!=null) {
for (Module m: tc.getContext().getModules().getListOfModules()) {*/
//Set<Package> packages = new HashSet<Package>();
Unit unit = node.getUnit();
if (unit!=null) { //a null unit can occur if we have not finished parsing the file
Module module = unit.getPackage().getModule();
for (final Package p: module.getAllPackages()) {
//if (!packages.contains(p)) {
//packages.add(p);
//if ( p.getModule().equals(module) || p.isShared() ) {
final String pkg = escapePackageName(p);
if (!pkg.isEmpty() && pkg.startsWith(pfp)) {
boolean already = false;
if (!pfp.equals(pkg)) {
//don't add already imported packages, unless
//it is an exact match to the typed path
for (ImportList il: node.getUnit().getImportLists()) {
if (il.getImportedScope()==p) {
already = true;
break;
}
}
}
if (!already) {
result.add(new PackageProposal(offset, prefix, withBody,
len, p, pkg + (withBody ? " { ... }" : ""), cpc));
}
}
//}
}
}
}
static void addPackageDescriptorCompletion(CeylonParseController cpc,
int offset, String prefix, List<ICompletionProposal> result) {
if (!"package".startsWith(prefix)) return;
IFile file = cpc.getProject().getFile(cpc.getPath());
String packageName = getPackageName(file);
if (packageName!=null) {
result.add(new PackageDescriptorProposal(offset, prefix, packageName));
}
}
static void addCurrentPackageNameCompletion(CeylonParseController cpc,
int offset, String prefix, List<ICompletionProposal> result) {
IFile file = cpc.getProject().getFile(cpc.getPath());
String moduleName = getPackageName(file);
if (moduleName!=null) {
result.add(new CompletionProposal(offset, prefix,
isModuleDescriptor(cpc) ? MODULE : PACKAGE,
moduleName, moduleName));
}
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_PackageCompletions.java |
1,562 | @ManagedDescription("IMap")
public class MapMBean extends HazelcastMBean<IMap> {
private final AtomicLong totalAddedEntryCount = new AtomicLong();
private final AtomicLong totalRemovedEntryCount = new AtomicLong();
private final AtomicLong totalUpdatedEntryCount = new AtomicLong();
private final AtomicLong totalEvictedEntryCount = new AtomicLong();
private final String listenerId;
protected MapMBean(IMap managedObject, ManagementService service) {
super(managedObject, service);
objectName = service.createObjectName("IMap", managedObject.getName());
//todo: using the event system to register number of adds/remove is an very expensive price to pay.
EntryListener entryListener = new EntryListener() {
public void entryAdded(EntryEvent event) {
totalAddedEntryCount.incrementAndGet();
}
public void entryRemoved(EntryEvent event) {
totalRemovedEntryCount.incrementAndGet();
}
public void entryUpdated(EntryEvent event) {
totalUpdatedEntryCount.incrementAndGet();
}
public void entryEvicted(EntryEvent event) {
totalEvictedEntryCount.incrementAndGet();
}
};
listenerId = managedObject.addEntryListener(entryListener, false);
}
public void preDeregister() throws Exception {
super.preDeregister();
try {
managedObject.removeEntryListener(listenerId);
} catch (Exception ignored) {
}
}
@ManagedAnnotation("localOwnedEntryCount")
@ManagedDescription("number of entries owned on this member")
public long getLocalOwnedEntryCount(){
return managedObject.getLocalMapStats().getOwnedEntryCount();
}
@ManagedAnnotation("localBackupEntryCount")
@ManagedDescription("the number of backup entries hold on this member")
public long getLocalBackupEntryCount(){
return managedObject.getLocalMapStats().getBackupEntryCount();
}
@ManagedAnnotation("localBackupCount")
@ManagedDescription("the number of backups per entry on this member")
public int getLocalBackupCount() {
return managedObject.getLocalMapStats().getBackupCount();
}
@ManagedAnnotation("localOwnedEntryMemoryCost")
@ManagedDescription("memory cost (number of bytes) of owned entries on this member")
public long getLocalOwnedEntryMemoryCost(){
return managedObject.getLocalMapStats().getOwnedEntryMemoryCost();
}
@ManagedAnnotation("localBackupEntryMemoryCost")
@ManagedDescription("memory cost (number of bytes) of backup entries on this member.")
public long getLocalBackupEntryMemoryCost(){
return managedObject.getLocalMapStats().getBackupEntryMemoryCost();
}
@ManagedAnnotation("localCreationTime")
@ManagedDescription("the creation time of this map on this member.")
public long getLocalCreationTime(){
return managedObject.getLocalMapStats().getCreationTime();
}
@ManagedAnnotation("localLastAccessTime")
@ManagedDescription("the last access (read) time of the locally owned entries.")
public long getLocalLastAccessTime(){
return managedObject.getLocalMapStats().getLastAccessTime();
}
@ManagedAnnotation("localLastUpdateTime")
@ManagedDescription("the last update time of the locally owned entries.")
public long getLocalLastUpdateTime(){
return managedObject.getLocalMapStats().getLastUpdateTime();
}
@ManagedAnnotation("localHits")
@ManagedDescription("the number of hits (reads) of the locally owned entries.")
public long getLocalHits(){
return managedObject.getLocalMapStats().getHits();
}
@ManagedAnnotation("localLockedEntryCount")
@ManagedDescription("the number of currently locked locally owned keys.")
public long getLocalLockedEntryCount(){
return managedObject.getLocalMapStats().getLockedEntryCount();
}
@ManagedAnnotation("localDirtyEntryCount")
@ManagedDescription("the number of entries that the member owns and are dirty on this member")
public long getLocalDirtyEntryCount(){
return managedObject.getLocalMapStats().getDirtyEntryCount();
}
@ManagedAnnotation("localPutOperationCount")
@ManagedDescription("the number of put operations on this member")
public long getLocalPutOperationCount(){
return managedObject.getLocalMapStats().getPutOperationCount();
}
@ManagedAnnotation("localGetOperationCount")
@ManagedDescription("number of get operations on this member")
public long getLocalGetOperationCount(){
return managedObject.getLocalMapStats().getGetOperationCount();
}
@ManagedAnnotation("localRemoveOperationCount")
@ManagedDescription("number of remove operations on this member")
public long getLocalRemoveOperationCount(){
return managedObject.getLocalMapStats().getRemoveOperationCount();
}
@ManagedAnnotation("localTotalPutLatency")
@ManagedDescription("the total latency of put operations. To get the average latency, divide to number of puts")
public long getLocalTotalPutLatency(){
return managedObject.getLocalMapStats().getTotalPutLatency();
}
@ManagedAnnotation("localTotalGetLatency")
@ManagedDescription("the total latency of get operations. To get the average latency, divide to number of gets")
public long getLocalTotalGetLatency(){
return managedObject.getLocalMapStats().getTotalGetLatency();
}
@ManagedAnnotation("localTotalRemoveLatency")
@ManagedDescription("the total latency of remove operations. To get the average latency, divide to number of gets")
public long getLocalTotalRemoveLatency(){
return managedObject.getLocalMapStats().getTotalRemoveLatency();
}
@ManagedAnnotation("localMaxPutLatency")
@ManagedDescription("the maximum latency of put operations. To get the average latency, divide to number of puts")
public long getLocalMaxPutLatency(){
return managedObject.getLocalMapStats().getMaxPutLatency();
}
@ManagedAnnotation("localMaxGetLatency")
@ManagedDescription("the maximum latency of get operations. To get the average latency, divide to number of gets")
public long getLocalMaxGetLatency(){
return managedObject.getLocalMapStats().getMaxGetLatency();
}
@ManagedAnnotation("localMaxRemoveLatency")
@ManagedDescription("the maximum latency of remove operations. To get the average latency, divide to number of gets")
public long getMaxRemoveLatency(){
return managedObject.getLocalMapStats().getMaxRemoveLatency();
}
@ManagedAnnotation("localEventOperationCount")
@ManagedDescription("number of events received on this member")
public long getLocalEventOperationCount(){
return managedObject.getLocalMapStats().getEventOperationCount();
}
@ManagedAnnotation("localOtherOperationCount")
@ManagedDescription("the total number of other operations on this member")
public long getLocalOtherOperationCount(){
return managedObject.getLocalMapStats().getOtherOperationCount();
}
@ManagedAnnotation("localTotal")
@ManagedDescription("the total number of operations on this member")
public long localTotal(){
return managedObject.getLocalMapStats().total();
}
@ManagedAnnotation("localHeapCost")
@ManagedDescription("the total heap cost of map, near cache and heap cost")
public long localHeapCost(){
return managedObject.getLocalMapStats().getHeapCost();
}
@ManagedAnnotation("name")
@ManagedDescription("name of the map")
public String getName(){
return managedObject.getName();
}
@ManagedAnnotation("size")
@ManagedDescription("size of the map")
public int getSize(){
return managedObject.size();
}
@ManagedAnnotation("config")
@ManagedDescription("MapConfig")
public String getConfig(){
return service.instance.getConfig().findMapConfig(managedObject.getName()).toString();
}
@ManagedAnnotation("totalAddedEntryCount")
public long getTotalAddedEntryCount(){
return totalAddedEntryCount.get();
}
@ManagedAnnotation("totalRemovedEntryCount")
public long getTotalRemovedEntryCount() {
return totalRemovedEntryCount.get();
}
@ManagedAnnotation("totalUpdatedEntryCount")
public long getTotalUpdatedEntryCount() {
return totalUpdatedEntryCount.get();
}
@ManagedAnnotation("totalEvictedEntryCount")
public long getTotalEvictedEntryCount() {
return totalEvictedEntryCount.get();
}
@ManagedAnnotation(value = "clear", operation = true)
@ManagedDescription("Clear Map")
public void clear(){
managedObject.clear();
}
@ManagedAnnotation(value = "values", operation = true)
public String values(String query){
Collection coll;
if (query != null && !query.isEmpty()){
Predicate predicate = new SqlPredicate(query);
coll = managedObject.values(predicate);
}
else {
coll = managedObject.values();
}
StringBuilder buf = new StringBuilder();
if (coll.size() == 0){
buf.append("Empty");
}
else {
buf.append("[");
for (Object obj: coll){
buf.append(obj);
buf.append(", ");
}
buf.replace(buf.length()-1, buf.length(), "]");
}
return buf.toString();
}
@ManagedAnnotation(value = "entrySet", operation = true)
public String entrySet(String query){
Set<Map.Entry> entrySet;
if (query != null && !query.isEmpty()){
Predicate predicate = new SqlPredicate(query);
entrySet = managedObject.entrySet(predicate);
}
else {
entrySet = managedObject.entrySet();
}
StringBuilder buf = new StringBuilder();
if (entrySet.size() == 0){
buf.append("Empty");
}
else {
buf.append("[");
for (Map.Entry entry: entrySet){
buf.append("{key:");
buf.append(entry.getKey());
buf.append(", value:");
buf.append(entry.getValue());
buf.append("}, ");
}
buf.replace(buf.length()-1, buf.length(), "]");
}
return buf.toString();
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_jmx_MapMBean.java |
150 | class MoveToUnitProposal implements ICompletionProposal {
private final CeylonEditor editor;
private final String name;
public MoveToUnitProposal(String name, CeylonEditor editor) {
this.editor = editor;
this.name = name;
}
@Override
public Point getSelection(IDocument doc) {
return null;
}
@Override
public Image getImage() {
return CeylonLabelProvider.MOVE;
}
@Override
public String getDisplayString() {
return "Move '" + name + "' to another source file";
}
@Override
public IContextInformation getContextInformation() {
return null;
}
@Override
public String getAdditionalProposalInfo() {
return null;
}
@Override
public void apply(IDocument doc) {
new MoveToUnitRefactoringAction(editor).run();
}
static void add(Collection<ICompletionProposal> proposals,
CeylonEditor editor) {
if (canMoveDeclaration(editor)) {
proposals.add(new MoveToUnitProposal(getDeclarationName(editor),
editor));
}
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_MoveToUnitProposal.java |
927 | public abstract class BroadcastOperationRequestBuilder<Request extends BroadcastOperationRequest<Request>, Response extends BroadcastOperationResponse, RequestBuilder extends BroadcastOperationRequestBuilder<Request, Response, RequestBuilder>>
extends ActionRequestBuilder<Request, Response, RequestBuilder> {
protected BroadcastOperationRequestBuilder(InternalGenericClient client, Request request) {
super(client, request);
}
@SuppressWarnings("unchecked")
public final RequestBuilder setIndices(String... indices) {
request.indices(indices);
return (RequestBuilder) this;
}
/**
* Controls the operation threading model.
*/
@SuppressWarnings("unchecked")
public final RequestBuilder setOperationThreading(BroadcastOperationThreading operationThreading) {
request.operationThreading(operationThreading);
return (RequestBuilder) this;
}
/**
* Controls the operation threading model.
*/
@SuppressWarnings("unchecked")
public RequestBuilder setOperationThreading(String operationThreading) {
request.operationThreading(operationThreading);
return (RequestBuilder) this;
}
@SuppressWarnings("unchecked")
public final RequestBuilder setIndicesOptions(IndicesOptions indicesOptions) {
request.indicesOptions(indicesOptions);
return (RequestBuilder) this;
}
} | 0true
| src_main_java_org_elasticsearch_action_support_broadcast_BroadcastOperationRequestBuilder.java |
749 | public class OBonsaiBucketAbstract extends ODurablePage {
public OBonsaiBucketAbstract(ODirectMemoryPointer pagePointer, TrackMode trackMode) {
super(pagePointer, trackMode);
}
protected void setBucketPointer(int pageOffset, OBonsaiBucketPointer value) throws IOException {
setLongValue(pageOffset, value.getPageIndex());
setIntValue(pageOffset + OLongSerializer.LONG_SIZE, value.getPageOffset());
}
protected OBonsaiBucketPointer getBucketPointer(int freePointer) {
final long pageIndex = getLongValue(freePointer);
final int pageOffset = getIntValue(freePointer + OLongSerializer.LONG_SIZE);
return new OBonsaiBucketPointer(pageIndex, pageOffset);
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_index_sbtreebonsai_local_OBonsaiBucketAbstract.java |
139 | final class WriteLockView implements Lock {
public void lock() { writeLock(); }
public void lockInterruptibly() throws InterruptedException {
writeLockInterruptibly();
}
public boolean tryLock() { return tryWriteLock() != 0L; }
public boolean tryLock(long time, TimeUnit unit)
throws InterruptedException {
return tryWriteLock(time, unit) != 0L;
}
public void unlock() { unstampedUnlockWrite(); }
public Condition newCondition() {
throw new UnsupportedOperationException();
}
} | 0true
| src_main_java_jsr166e_StampedLock.java |
1,002 | public class OStreamSerializerListRID implements OStreamSerializer, OBinarySerializer<OMVRBTreeRIDSet> {
public static final String NAME = "y";
public static final OStreamSerializerListRID INSTANCE = new OStreamSerializerListRID();
private static final ORecordSerializerSchemaAware2CSV FORMAT = (ORecordSerializerSchemaAware2CSV) ORecordSerializerFactory
.instance().getFormat(ORecordSerializerSchemaAware2CSV.NAME);
public static final byte ID = 19;
public Object fromStream(final byte[] iStream) throws IOException {
if (iStream == null)
return null;
final String s = OBinaryProtocol.bytes2string(iStream);
return FORMAT.embeddedCollectionFromStream(null, OType.EMBEDDEDSET, null, OType.LINK, s);
}
public byte[] toStream(final Object iObject) throws IOException {
if (iObject == null)
return null;
return ((OMVRBTreeRIDSet) iObject).toStream();
}
public String getName() {
return NAME;
}
@Override
public int getObjectSize(OMVRBTreeRIDSet object, Object... hints) {
final byte[] serializedSet = object.toStream();
return OBinaryTypeSerializer.INSTANCE.getObjectSize(serializedSet);
}
@Override
public int getObjectSize(byte[] stream, int startPosition) {
return OBinaryTypeSerializer.INSTANCE.getObjectSize(stream, startPosition);
}
@Override
public void serialize(OMVRBTreeRIDSet object, byte[] stream, int startPosition, Object... hints) {
final byte[] serializedSet = object.toStream();
OBinaryTypeSerializer.INSTANCE.serialize(serializedSet, stream, startPosition);
}
@Override
public OMVRBTreeRIDSet deserialize(byte[] stream, int startPosition) {
final byte[] serializedSet = OBinaryTypeSerializer.INSTANCE.deserialize(stream, startPosition);
final String s = OBinaryProtocol.bytes2string(serializedSet);
return (OMVRBTreeRIDSet) FORMAT.embeddedCollectionFromStream(null, OType.EMBEDDEDSET, null, OType.LINK, s);
}
@Override
public byte getId() {
return ID;
}
@Override
public boolean isFixedLength() {
return false;
}
@Override
public int getFixedLength() {
return 0;
}
@Override
public void serializeNative(OMVRBTreeRIDSet object, byte[] stream, int startPosition, Object... hints) {
final byte[] serializedSet = object.toStream();
OBinaryTypeSerializer.INSTANCE.serializeNative(serializedSet, stream, startPosition);
}
@Override
public OMVRBTreeRIDSet deserializeNative(byte[] stream, int startPosition) {
final byte[] serializedSet = OBinaryTypeSerializer.INSTANCE.deserializeNative(stream, startPosition);
final String s = OBinaryProtocol.bytes2string(serializedSet);
return (OMVRBTreeRIDSet) FORMAT.embeddedCollectionFromStream(null, OType.EMBEDDEDSET, null, OType.LINK, s);
}
@Override
public int getObjectSizeNative(byte[] stream, int startPosition) {
return OBinaryTypeSerializer.INSTANCE.getObjectSizeNative(stream, startPosition);
}
@Override
public void serializeInDirectMemory(OMVRBTreeRIDSet object, ODirectMemoryPointer pointer, long offset, Object... hints) {
final byte[] serializedSet = object.toStream();
OBinaryTypeSerializer.INSTANCE.serializeInDirectMemory(serializedSet, pointer, offset);
}
@Override
public OMVRBTreeRIDSet deserializeFromDirectMemory(ODirectMemoryPointer pointer, long offset) {
final byte[] serializedSet = OBinaryTypeSerializer.INSTANCE.deserializeFromDirectMemory(pointer, offset);
final String s = OBinaryProtocol.bytes2string(serializedSet);
return (OMVRBTreeRIDSet) FORMAT.embeddedCollectionFromStream(null, OType.EMBEDDEDSET, null, OType.LINK, s);
}
@Override
public int getObjectSizeInDirectMemory(ODirectMemoryPointer pointer, long offset) {
return OBinaryTypeSerializer.INSTANCE.getObjectSizeInDirectMemory(pointer, offset);
}
@Override
public OMVRBTreeRIDSet preprocess(OMVRBTreeRIDSet value, Object... hints) {
return value;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_serialization_serializer_stream_OStreamSerializerListRID.java |
92 | class ConvertToBlockProposal extends CorrectionProposal {
ConvertToBlockProposal(String desc, int offset, TextChange change) {
super(desc, change, new Region(offset, 0));
}
static void addConvertToBlockProposal(IDocument doc,
Collection<ICompletionProposal> proposals, IFile file,
Node decNode) {
TextChange change = new TextFileChange("Convert to Block", file);
change.setEdit(new MultiTextEdit());
int offset;
int len;
String semi;
boolean isVoid;
String addedKeyword = null;
String desc = "Convert => to block";
if (decNode instanceof Tree.MethodDeclaration) {
Tree.MethodDeclaration md = (Tree.MethodDeclaration) decNode;
Method dm = md.getDeclarationModel();
if (dm==null || dm.isParameter()) return;
isVoid = dm.isDeclaredVoid();
List<Tree.ParameterList> pls = md.getParameterLists();
if (pls.isEmpty()) return;
offset = pls.get(pls.size()-1).getStopIndex()+1;
len = md.getSpecifierExpression().getExpression().getStartIndex() - offset;
semi = "";
}
else if (decNode instanceof Tree.AttributeDeclaration) {
Tree.AttributeDeclaration ad = (Tree.AttributeDeclaration) decNode;
Value dm = ad.getDeclarationModel();
if (dm==null || dm.isParameter()) return;
isVoid = false;
offset = ad.getIdentifier().getStopIndex()+1;
len = ad.getSpecifierOrInitializerExpression().getExpression().getStartIndex() - offset;
semi = "";
}
else if (decNode instanceof Tree.AttributeSetterDefinition) {
Tree.AttributeSetterDefinition asd = (Tree.AttributeSetterDefinition) decNode;
isVoid = true;
offset = asd.getIdentifier().getStopIndex()+1;
len = asd.getSpecifierExpression().getExpression().getStartIndex() - offset;
semi = "";
}
else if (decNode instanceof Tree.MethodArgument) {
Tree.MethodArgument ma = (Tree.MethodArgument) decNode;
Method dm = ma.getDeclarationModel();
if (dm==null) return;
isVoid = dm.isDeclaredVoid();
if (ma.getType().getToken()==null) {
addedKeyword = "function ";
}
List<Tree.ParameterList> pls = ma.getParameterLists();
if (pls.isEmpty()) return;
offset = pls.get(pls.size()-1).getStopIndex()+1;
len = ma.getSpecifierExpression().getExpression().getStartIndex() - offset;
semi = "";
}
else if (decNode instanceof Tree.AttributeArgument) {
Tree.AttributeArgument aa = (Tree.AttributeArgument) decNode;
isVoid = false;
if (aa.getType().getToken()==null) {
addedKeyword = "value ";
}
offset = aa.getIdentifier().getStopIndex()+1;
len = aa.getSpecifierExpression().getExpression().getStartIndex() - offset;
semi = "";
}
else if (decNode instanceof Tree.FunctionArgument) {
Tree.FunctionArgument fun = (Tree.FunctionArgument) decNode;
Method dm = fun.getDeclarationModel();
if (dm==null) return;
isVoid = dm.isDeclaredVoid();
List<Tree.ParameterList> pls = fun.getParameterLists();
if (pls.isEmpty()) return;
offset = pls.get(pls.size()-1).getStopIndex()+1;
len = fun.getExpression().getStartIndex() - offset;
semi = ";";
desc = "Convert anonymous function => to block";
}
else {
return;
}
if (addedKeyword!=null) {
change.addEdit(new InsertEdit(decNode.getStartIndex(), addedKeyword));
}
change.addEdit(new ReplaceEdit(offset, len, " {" + (isVoid?"":" return") + " "));
change.addEdit(new InsertEdit(decNode.getStopIndex()+1, semi + " }"));
proposals.add(new ConvertToBlockProposal(desc, offset + 3, change));
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_ConvertToBlockProposal.java |
512 | public class TransportDeleteIndexAction extends TransportMasterNodeOperationAction<DeleteIndexRequest, DeleteIndexResponse> {
private final MetaDataDeleteIndexService deleteIndexService;
private final DestructiveOperations destructiveOperations;
@Inject
public TransportDeleteIndexAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, MetaDataDeleteIndexService deleteIndexService,
NodeSettingsService nodeSettingsService) {
super(settings, transportService, clusterService, threadPool);
this.deleteIndexService = deleteIndexService;
this.destructiveOperations = new DestructiveOperations(logger, settings, nodeSettingsService);
}
@Override
protected String executor() {
return ThreadPool.Names.SAME;
}
@Override
protected String transportAction() {
return DeleteIndexAction.NAME;
}
@Override
protected DeleteIndexRequest newRequest() {
return new DeleteIndexRequest();
}
@Override
protected DeleteIndexResponse newResponse() {
return new DeleteIndexResponse();
}
@Override
protected void doExecute(DeleteIndexRequest request, ActionListener<DeleteIndexResponse> listener) {
destructiveOperations.failDestructive(request.indices());
super.doExecute(request, listener);
}
@Override
protected ClusterBlockException checkBlock(DeleteIndexRequest request, ClusterState state) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA, request.indices());
}
@Override
protected void masterOperation(final DeleteIndexRequest request, final ClusterState state, final ActionListener<DeleteIndexResponse> listener) throws ElasticsearchException {
request.indices(state.metaData().concreteIndices(request.indices(), request.indicesOptions()));
if (request.indices().length == 0) {
listener.onResponse(new DeleteIndexResponse(true));
return;
}
// TODO: this API should be improved, currently, if one delete index failed, we send a failure, we should send a response array that includes all the indices that were deleted
final CountDown count = new CountDown(request.indices().length);
for (final String index : request.indices()) {
deleteIndexService.deleteIndex(new MetaDataDeleteIndexService.Request(index).timeout(request.timeout()).masterTimeout(request.masterNodeTimeout()), new MetaDataDeleteIndexService.Listener() {
private volatile Throwable lastFailure;
private volatile boolean ack = true;
@Override
public void onResponse(MetaDataDeleteIndexService.Response response) {
if (!response.acknowledged()) {
ack = false;
}
if (count.countDown()) {
if (lastFailure != null) {
listener.onFailure(lastFailure);
} else {
listener.onResponse(new DeleteIndexResponse(ack));
}
}
}
@Override
public void onFailure(Throwable t) {
logger.debug("[{}] failed to delete index", t, index);
lastFailure = t;
if (count.countDown()) {
listener.onFailure(t);
}
}
});
}
}
} | 1no label
| src_main_java_org_elasticsearch_action_admin_indices_delete_TransportDeleteIndexAction.java |
101 | public enum Text implements TitanPredicate {
/**
* Whether the text contains a given term as a token in the text (case insensitive)
*/
CONTAINS {
@Override
public boolean evaluate(Object value, Object condition) {
this.preevaluate(value,condition);
if (value == null) return false;
return evaluateRaw(value.toString(),(String)condition);
}
@Override
public boolean evaluateRaw(String value, String terms) {
Set<String> tokens = Sets.newHashSet(tokenize(value.toLowerCase()));
terms = terms.trim();
List<String> tokenTerms = tokenize(terms.toLowerCase());
if (!terms.isEmpty() && tokenTerms.isEmpty()) return false;
for (String term : tokenTerms) {
if (!tokens.contains(term)) return false;
}
return true;
}
@Override
public boolean isValidCondition(Object condition) {
if (condition == null) return false;
else if (condition instanceof String && StringUtils.isNotBlank((String) condition)) return true;
else return false;
}
},
/**
* Whether the text contains a token that starts with a given term (case insensitive)
*/
CONTAINS_PREFIX {
@Override
public boolean evaluate(Object value, Object condition) {
this.preevaluate(value,condition);
if (value == null) return false;
return evaluateRaw(value.toString(),(String)condition);
}
@Override
public boolean evaluateRaw(String value, String prefix) {
for (String token : tokenize(value.toLowerCase())) {
if (PREFIX.evaluateRaw(token,prefix.toLowerCase())) return true;
}
return false;
}
@Override
public boolean isValidCondition(Object condition) {
return condition != null && condition instanceof String;
}
},
/**
* Whether the text contains a token that matches a regular expression
*/
CONTAINS_REGEX {
@Override
public boolean evaluate(Object value, Object condition) {
this.preevaluate(value,condition);
if (value == null) return false;
return evaluateRaw(value.toString(),(String)condition);
}
@Override
public boolean evaluateRaw(String value, String regex) {
for (String token : tokenize(value.toLowerCase())) {
if (REGEX.evaluateRaw(token,regex)) return true;
}
return false;
}
@Override
public boolean isValidCondition(Object condition) {
return condition != null && condition instanceof String && StringUtils.isNotBlank(condition.toString());
}
},
/**
* Whether the text starts with a given prefix (case sensitive)
*/
PREFIX {
@Override
public boolean evaluate(Object value, Object condition) {
this.preevaluate(value,condition);
if (value==null) return false;
return evaluateRaw(value.toString(),(String)condition);
}
@Override
public boolean evaluateRaw(String value, String prefix) {
return value.startsWith(prefix.trim());
}
@Override
public boolean isValidCondition(Object condition) {
return condition != null && condition instanceof String;
}
},
/**
* Whether the text matches a regular expression (case sensitive)
*/
REGEX {
@Override
public boolean evaluate(Object value, Object condition) {
this.preevaluate(value,condition);
if (value == null) return false;
return evaluateRaw(value.toString(),(String)condition);
}
public boolean evaluateRaw(String value, String regex) {
return value.matches(regex);
}
@Override
public boolean isValidCondition(Object condition) {
return condition != null && condition instanceof String && StringUtils.isNotBlank(condition.toString());
}
};
private static final Logger log = LoggerFactory.getLogger(Text.class);
public void preevaluate(Object value, Object condition) {
Preconditions.checkArgument(this.isValidCondition(condition), "Invalid condition provided: %s", condition);
if (!(value instanceof String)) log.debug("Value not a string: " + value);
}
abstract boolean evaluateRaw(String value, String condition);
private static final int MIN_TOKEN_LENGTH = 1;
public static List<String> tokenize(String str) {
ArrayList<String> tokens = new ArrayList<String>();
int previous = 0;
for (int p = 0; p < str.length(); p++) {
if (!Character.isLetterOrDigit(str.charAt(p))) {
if (p > previous + MIN_TOKEN_LENGTH) tokens.add(str.substring(previous, p));
previous = p + 1;
}
}
if (previous + MIN_TOKEN_LENGTH < str.length()) tokens.add(str.substring(previous, str.length()));
return tokens;
}
@Override
public boolean isValidValueType(Class<?> clazz) {
Preconditions.checkNotNull(clazz);
return clazz.equals(String.class);
}
@Override
public boolean hasNegation() {
return false;
}
@Override
public TitanPredicate negate() {
throw new UnsupportedOperationException();
}
@Override
public boolean isQNF() {
return true;
}
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_Text.java |
207 | public class EhcacheHydratedCacheManagerImpl extends AbstractHydratedCacheManager {
private static final Log LOG = LogFactory.getLog(EhcacheHydratedCacheManagerImpl.class);
private static final EhcacheHydratedCacheManagerImpl MANAGER = new EhcacheHydratedCacheManagerImpl();
public static EhcacheHydratedCacheManagerImpl getInstance() {
return MANAGER;
}
private Map<String, List<String>> cacheMembersByEntity = Collections.synchronizedMap(new HashMap<String, List<String>>(100));
private Cache heap = null;
private EhcacheHydratedCacheManagerImpl() {
//CacheManager.getInstance() and CacheManager.create() cannot be called in this constructor because it will create two cache manager instances
}
private Cache getHeap() {
if (heap == null) {
if (CacheManager.getInstance().cacheExists("hydrated-cache")) {
heap = CacheManager.getInstance().getCache("hydrated-cache");
} else {
CacheConfiguration config = new CacheConfiguration("hydrated-cache", 0).eternal(true).overflowToDisk(false).maxElementsInMemory(100000);
Cache cache = new Cache(config);
CacheManager.create().addCache(cache);
heap = cache;
}
}
return heap;
}
@Override
public Object getHydratedCacheElementItem(String cacheRegion, String cacheName, Serializable elementKey, String elementItemName) {
Object response = null;
Element element;
String myKey = cacheRegion + '_' + cacheName + '_' + elementItemName + '_' + elementKey;
element = getHeap().get(myKey);
if (element != null) {
response = element.getObjectValue();
}
return response;
}
@Override
public void addHydratedCacheElementItem(String cacheRegion, String cacheName, Serializable elementKey, String elementItemName, Object elementValue) {
String heapKey = cacheRegion + '_' + cacheName + '_' + elementItemName + '_' + elementKey;
String nameKey = cacheRegion + '_' + cacheName + '_' + elementKey;
Element element = new Element(heapKey, elementValue);
if (!cacheMembersByEntity.containsKey(nameKey)) {
List<String> myMembers = new ArrayList<String>(50);
myMembers.add(elementItemName);
cacheMembersByEntity.put(nameKey, myMembers);
} else {
List<String> myMembers = cacheMembersByEntity.get(nameKey);
myMembers.add(elementItemName);
}
getHeap().put(element);
}
protected void removeCache(String cacheRegion, Serializable key) {
String cacheName = cacheRegion;
if (key instanceof CacheKey) {
cacheName = ((CacheKey) key).getEntityOrRoleName();
key = ((CacheKey) key).getKey();
}
String nameKey = cacheRegion + '_' + cacheName + '_' + key;
if (cacheMembersByEntity.containsKey(nameKey)) {
String[] members = new String[cacheMembersByEntity.get(nameKey).size()];
members = cacheMembersByEntity.get(nameKey).toArray(members);
for (String myMember : members) {
String itemKey = cacheRegion + '_' + cacheName + '_' + myMember + '_' + key;
getHeap().remove(itemKey);
}
cacheMembersByEntity.remove(nameKey);
}
}
protected void removeAll(String cacheName) {
//do nothing
}
@Override
public void notifyElementEvicted(Ehcache arg0, Element arg1) {
removeCache(arg0.getName(), arg1.getKey());
}
@Override
public void notifyElementExpired(Ehcache arg0, Element arg1) {
removeCache(arg0.getName(), arg1.getKey());
}
@Override
public void notifyElementPut(Ehcache arg0, Element arg1) throws CacheException {
//do nothing
}
@Override
public void notifyElementRemoved(Ehcache arg0, Element arg1) throws CacheException {
removeCache(arg0.getName(), arg1.getKey());
}
@Override
public void notifyElementUpdated(Ehcache arg0, Element arg1) throws CacheException {
removeCache(arg0.getName(), arg1.getKey());
}
@Override
public void notifyRemoveAll(Ehcache arg0) {
removeAll(arg0.getName());
}
} | 1no label
| common_src_main_java_org_broadleafcommerce_common_cache_engine_EhcacheHydratedCacheManagerImpl.java |
1,866 | boolean b = h1.executeTransaction(options, new TransactionalTask<Boolean>() {
public Boolean execute(TransactionalTaskContext context) throws TransactionException {
final TransactionalMap<Object, Object> txMap = context.getMap("default");
txMap.put("3", "3");
map2.put("4", "4");
assertEquals(true, txMap.remove("1", "1"));
assertEquals(false, txMap.remove("2", "1"));
assertEquals("1", map2.get("1"));
assertEquals(null, txMap.get("1"));
assertEquals(true, txMap.remove("2", "2"));
assertEquals(false, txMap.remove("3", null));
assertEquals(false, txMap.remove("5", "2"));
assertEquals(2, txMap.size());
return true;
}
}); | 0true
| hazelcast_src_test_java_com_hazelcast_map_MapTransactionTest.java |
1,341 | completableFuture.andThen(new ExecutionCallback() {
@Override
public void onResponse(Object response) {
reference1.set(response);
latch2.countDown();
}
@Override
public void onFailure(Throwable t) {
reference1.set(t);
latch2.countDown();
}
}); | 0true
| hazelcast_src_test_java_com_hazelcast_executor_ExecutorServiceTest.java |
1,632 | public class ConsoleCommandRequest implements ConsoleRequest {
private String command;
public ConsoleCommandRequest() {
}
public ConsoleCommandRequest(String command) {
this.command = command;
}
@Override
public int getType() {
return ConsoleRequestConstants.REQUEST_TYPE_CONSOLE_COMMAND;
}
@Override
public void writeResponse(ManagementCenterService mcs, ObjectDataOutput dos) throws Exception {
ConsoleCommandHandler handler = mcs.getCommandHandler();
try {
final String output = handler.handleCommand(command);
writeLongString(dos, output);
} catch (Throwable e) {
writeLongString(dos, "Error: " + e.getClass().getSimpleName() + "[" + e.getMessage() + "]");
}
}
@Override
public Object readResponse(ObjectDataInput in) throws IOException {
return readLongString(in);
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(command);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
command = in.readUTF();
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_management_request_ConsoleCommandRequest.java |
561 | public class ClassPathApplicationContextTask extends Task {
private String path;
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_util_sql_ClassPathApplicationContextTask.java |
9 | @Component("blSkuCustomPersistenceHandler")
public class SkuCustomPersistenceHandler extends CustomPersistenceHandlerAdapter {
private static final Log LOG = LogFactory.getLog(SkuCustomPersistenceHandler.class);
public static String PRODUCT_OPTION_FIELD_PREFIX = "productOption";
@Resource(name="blAdornedTargetListPersistenceModule")
protected AdornedTargetListPersistenceModule adornedPersistenceModule;
/**
* This represents the field that all of the product option values will be stored in. This would be used in the case
* where there are a bunch of product options and displaying each option as a grid header would have everything
* squashed together. Filtering on this field is currently unsupported.
*/
public static String CONSOLIDATED_PRODUCT_OPTIONS_FIELD_NAME = "consolidatedProductOptions";
public static String CONSOLIDATED_PRODUCT_OPTIONS_DELIMETER = "; ";
@Resource(name="blCatalogService")
protected CatalogService catalogService;
@Resource(name = "blSkuRestrictionFactory")
protected RestrictionFactory skuRestrictionFactory;
@Override
public Boolean canHandleInspect(PersistencePackage persistencePackage) {
return canHandle(persistencePackage, persistencePackage.getPersistencePerspective().getOperationTypes()
.getInspectType());
}
@Override
public Boolean canHandleFetch(PersistencePackage persistencePackage) {
OperationType fetchType = persistencePackage.getPersistencePerspective().getOperationTypes().getFetchType();
return canHandle(persistencePackage, fetchType);
}
@Override
public Boolean canHandleAdd(PersistencePackage persistencePackage) {
OperationType addType = persistencePackage.getPersistencePerspective().getOperationTypes().getAddType();
return canHandle(persistencePackage, addType);
}
@Override
public Boolean canHandleUpdate(PersistencePackage persistencePackage) {
OperationType updateType = persistencePackage.getPersistencePerspective().getOperationTypes().getUpdateType();
return canHandle(persistencePackage, updateType);
}
/**
* Since this is the default for all Skus, it's possible that we are providing custom criteria for this
* Sku lookup. In that case, we probably want to delegate to a child class, so only use this particular
* persistence handler if there is no custom criteria being used and the ceiling entity is an instance of Sku. The
* exception to this rule is when we are pulling back Media, since the admin actually uses Sku for the ceiling entity
* class name. That should be handled by the map structure module though, so only handle things in the Sku custom
* persistence handler for OperationType.BASIC
*
*/
protected Boolean canHandle(PersistencePackage persistencePackage, OperationType operationType) {
String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname();
try {
Class testClass = Class.forName(ceilingEntityFullyQualifiedClassname);
return Sku.class.isAssignableFrom(testClass) &&
//ArrayUtils.isEmpty(persistencePackage.getCustomCriteria()) &&
OperationType.BASIC.equals(operationType) &&
(persistencePackage.getPersistencePerspective().getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST) == null);
} catch (ClassNotFoundException e) {
return false;
}
}
/**
* Build out the extra fields for the product options
*/
@Override
public DynamicResultSet inspect(PersistencePackage persistencePackage, DynamicEntityDao dynamicEntityDao, InspectHelper helper) throws ServiceException {
try {
PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective();
Map<MergedPropertyType, Map<String, FieldMetadata>> allMergedProperties = new HashMap<MergedPropertyType, Map<String, FieldMetadata>>();
//Grab the default properties for the Sku
Map<String, FieldMetadata> properties = helper.getSimpleMergedProperties(Sku.class.getName(), persistencePerspective);
if (persistencePackage.getCustomCriteria() == null || persistencePackage.getCustomCriteria().length == 0) {
//look up all the ProductOptions and then create new fields for each of them
List<ProductOption> options = catalogService.readAllProductOptions();
int order = 0;
for (ProductOption option : options) {
//add this to the built Sku properties
FieldMetadata md = createIndividualOptionField(option, order);
if (md != null) {
properties.put("productOption" + option.getId(), md);
}
}
} else {
// If we have a product to filter the list of available product options, then use it
Long productId = Long.parseLong(persistencePackage.getCustomCriteria()[0]);
Product product = catalogService.findProductById(productId);
for (ProductOption option : product.getProductOptions()) {
FieldMetadata md = createIndividualOptionField(option, 0);
if (md != null) {
properties.put("productOption" + option.getId(), md);
}
}
}
//also build the consolidated field; if using the SkuBasicClientEntityModule then this field will be
//permanently hidden
properties.put(CONSOLIDATED_PRODUCT_OPTIONS_FIELD_NAME, createConsolidatedOptionField(SkuImpl.class));
allMergedProperties.put(MergedPropertyType.PRIMARY, properties);
//allow the adorned list to contribute properties as well in the case of Sku bundle items
adornedPersistenceModule.setPersistenceManager((PersistenceManager)helper);
adornedPersistenceModule.updateMergedProperties(persistencePackage, allMergedProperties);
Class<?>[] entityClasses = dynamicEntityDao.getAllPolymorphicEntitiesFromCeiling(Sku.class);
ClassMetadata mergedMetadata = helper.getMergedClassMetadata(entityClasses, allMergedProperties);
DynamicResultSet results = new DynamicResultSet(mergedMetadata, null, null);
return results;
} catch (Exception e) {
ServiceException ex = new ServiceException("Unable to retrieve inspection results for " +
persistencePackage.getCeilingEntityFullyQualifiedClassname(), e);
throw ex;
}
}
/**
* Creates the metadata necessary for displaying all of the product option values in a single field. The display of this
* field is a single string with every product option value appended to it separated by a semicolon. This method should
* be invoked on an inspect for whatever is utilizing this so that the property will be ready to be populated on fetch.
*
* The metadata that is returned will also be set to prominent by default so that it will be ready to display on whatever
* grid is being inspected. If you do not want this behavior you will need to override this functionality in the metadata
* that is returned.
*
* @param inheritedFromType which type this should appear on. This would normally be SkuImpl.class, but if you want to
* display this field with a different entity then this should be that entity
* @return
*/
public static FieldMetadata createConsolidatedOptionField(Class<?> inheritedFromType) {
BasicFieldMetadata metadata = new BasicFieldMetadata();
metadata.setFieldType(SupportedFieldType.STRING);
metadata.setMutable(false);
metadata.setInheritedFromType(inheritedFromType.getName());
metadata.setAvailableToTypes(new String[] { SkuImpl.class.getName() });
metadata.setForeignKeyCollection(false);
metadata.setMergedPropertyType(MergedPropertyType.PRIMARY);
metadata.setName(CONSOLIDATED_PRODUCT_OPTIONS_FIELD_NAME);
metadata.setFriendlyName(CONSOLIDATED_PRODUCT_OPTIONS_FIELD_NAME);
metadata.setGroup("");
metadata.setExplicitFieldType(SupportedFieldType.UNKNOWN);
metadata.setProminent(true);
metadata.setVisibility(VisibilityEnum.FORM_HIDDEN);
metadata.setBroadleafEnumeration("");
metadata.setReadOnly(true);
metadata.setRequiredOverride(false);
metadata.setGridOrder(Integer.MAX_VALUE);
return metadata;
}
/**
* Returns a {@link Property} filled out with a delimited list of the <b>values</b> that are passed in. This should be
* invoked on a fetch and the returned property should be added to the fetched {@link Entity} dto.
*
* @param values
* @return
* @see {@link #createConsolidatedOptionField(Class)};
*/
public static Property getConsolidatedOptionProperty(List<ProductOptionValue> values) {
Property optionValueProperty = new Property();
optionValueProperty.setName(CONSOLIDATED_PRODUCT_OPTIONS_FIELD_NAME);
//order the values by the display order of their correspond product option
// Collections.sort(values, new Comparator<ProductOptionValue>() {
//
// @Override
// public int compare(ProductOptionValue value1, ProductOptionValue value2) {
// return new CompareToBuilder().append(value1.getProductOption().getDisplayOrder(),
// value2.getProductOption().getDisplayOrder()).toComparison();
// }
// });
ArrayList<String> stringValues = new ArrayList<String>();
CollectionUtils.collect(values, new Transformer() {
@Override
public Object transform(Object input) {
return ((ProductOptionValue) input).getAttributeValue();
}
}, stringValues);
optionValueProperty.setValue(StringUtils.join(stringValues, CONSOLIDATED_PRODUCT_OPTIONS_DELIMETER));
return optionValueProperty;
}
/**
* @return a blank {@link Property} corresponding to the CONSOLIDATED_PRODUCT_OPTIONS_FIELD_NAME
*/
public static Property getBlankConsolidatedOptionProperty() {
Property optionValueProperty = new Property();
optionValueProperty.setName(CONSOLIDATED_PRODUCT_OPTIONS_FIELD_NAME);
optionValueProperty.setValue("");
return optionValueProperty;
}
/**
* <p>Creates an individual property for the specified product option. This should set up an enum field whose values will
* be the option values for this option. This is useful when you would like to display each product option in as its
* own field in a grid so that you can further filter by product option values.</p>
* <p>In order for these fields to be utilized property on the fetch, in the GWT frontend you must use the
* for your datasource.</p>
*
* @param option
* @param order
* @return
*/
public static FieldMetadata createIndividualOptionField(ProductOption option, int order) {
BasicFieldMetadata metadata = new BasicFieldMetadata();
List<ProductOptionValue> allowedValues = option.getAllowedValues();
if (CollectionUtils.isNotEmpty(allowedValues)) {
metadata.setFieldType(SupportedFieldType.EXPLICIT_ENUMERATION);
metadata.setMutable(true);
metadata.setInheritedFromType(SkuImpl.class.getName());
metadata.setAvailableToTypes(new String[] { SkuImpl.class.getName() });
metadata.setForeignKeyCollection(false);
metadata.setMergedPropertyType(MergedPropertyType.PRIMARY);
//Set up the enumeration based on the product option values
String[][] optionValues = new String[allowedValues.size()][2];
for (int i = 0; i < allowedValues.size(); i++) {
ProductOptionValue value = option.getAllowedValues().get(i);
optionValues[i][0] = value.getId().toString();
optionValues[i][1] = value.getAttributeValue();
}
metadata.setEnumerationValues(optionValues);
metadata.setName(PRODUCT_OPTION_FIELD_PREFIX + option.getId());
metadata.setFriendlyName(option.getLabel());
metadata.setGroup("productOption_group");
metadata.setGroupOrder(-1);
metadata.setOrder(order);
metadata.setExplicitFieldType(SupportedFieldType.UNKNOWN);
metadata.setProminent(false);
metadata.setVisibility(VisibilityEnum.FORM_EXPLICITLY_SHOWN);
metadata.setBroadleafEnumeration("");
metadata.setReadOnly(false);
metadata.setRequiredOverride(BooleanUtils.isFalse(option.getRequired()));
return metadata;
}
return null;
}
@SuppressWarnings("unchecked")
@Override
public DynamicResultSet fetch(PersistencePackage persistencePackage, CriteriaTransferObject cto, DynamicEntityDao dynamicEntityDao, RecordHelper helper) throws ServiceException {
String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname();
try {
PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective();
//get the default properties from Sku and its subclasses
Map<String, FieldMetadata> originalProps = helper.getSimpleMergedProperties(Sku.class.getName(), persistencePerspective);
//Pull back the Skus based on the criteria from the client
List<FilterMapping> filterMappings = helper.getFilterMappings(persistencePerspective, cto, ceilingEntityFullyQualifiedClassname, originalProps, skuRestrictionFactory);
//allow subclasses to provide additional criteria before executing the query
applyProductOptionValueCriteria(filterMappings, cto, persistencePackage, null);
applyAdditionalFetchCriteria(filterMappings, cto, persistencePackage);
List<Serializable> records = helper.getPersistentRecords(persistencePackage.getCeilingEntityFullyQualifiedClassname(), filterMappings, cto.getFirstResult(), cto.getMaxResults());
//Convert Skus into the client-side Entity representation
Entity[] payload = helper.getRecords(originalProps, records);
int totalRecords = helper.getTotalRecords(persistencePackage.getCeilingEntityFullyQualifiedClassname(), filterMappings);
//Now fill out the relevant properties for the product options for the Skus that were returned
for (int i = 0; i < records.size(); i++) {
Sku sku = (Sku) records.get(i);
Entity entity = payload[i];
List<ProductOptionValue> optionValues = sku.getProductOptionValues();
for (ProductOptionValue value : optionValues) {
Property optionProperty = new Property();
optionProperty.setName(PRODUCT_OPTION_FIELD_PREFIX + value.getProductOption().getId());
optionProperty.setValue(value.getId().toString());
entity.addProperty(optionProperty);
}
if (CollectionUtils.isNotEmpty(optionValues)) {
entity.addProperty(getConsolidatedOptionProperty(optionValues));
} else {
entity.addProperty(getBlankConsolidatedOptionProperty());
}
}
return new DynamicResultSet(payload, totalRecords);
} catch (Exception e) {
throw new ServiceException("Unable to perform fetch for entity: " + ceilingEntityFullyQualifiedClassname, e);
}
}
public static void applyProductOptionValueCriteria(List<FilterMapping> filterMappings, CriteriaTransferObject cto, PersistencePackage persistencePackage, String skuPropertyPrefix) {
//if the front
final List<Long> productOptionValueFilterIDs = new ArrayList<Long>();
for (String filterProperty : cto.getCriteriaMap().keySet()) {
if (filterProperty.startsWith(PRODUCT_OPTION_FIELD_PREFIX)) {
FilterAndSortCriteria criteria = cto.get(filterProperty);
productOptionValueFilterIDs.add(Long.parseLong(criteria.getFilterValues().get(0)));
}
}
//also determine if there is a consolidated POV query
final List<String> productOptionValueFilterValues = new ArrayList<String>();
FilterAndSortCriteria consolidatedCriteria = cto.get(CONSOLIDATED_PRODUCT_OPTIONS_FIELD_NAME);
if (!consolidatedCriteria.getFilterValues().isEmpty()) {
//the criteria in this case would be a semi-colon delimeter value list
productOptionValueFilterValues.addAll(Arrays.asList(StringUtils.split(consolidatedCriteria.getFilterValues().get(0), CONSOLIDATED_PRODUCT_OPTIONS_DELIMETER)));
}
if (productOptionValueFilterIDs.size() > 0) {
FilterMapping filterMapping = new FilterMapping()
.withFieldPath(new FieldPath().withTargetProperty(StringUtils.isEmpty(skuPropertyPrefix)?"":skuPropertyPrefix + "productOptionValues.id"))
.withDirectFilterValues(productOptionValueFilterIDs)
.withRestriction(new Restriction()
.withPredicateProvider(new PredicateProvider() {
@Override
public Predicate buildPredicate(CriteriaBuilder builder, FieldPathBuilder fieldPathBuilder,
From root, String ceilingEntity,
String fullPropertyName, Path explicitPath, List directValues) {
return explicitPath.as(Long.class).in(directValues);
}
})
);
filterMappings.add(filterMapping);
}
if (productOptionValueFilterValues.size() > 0) {
FilterMapping filterMapping = new FilterMapping()
.withFieldPath(new FieldPath().withTargetProperty(StringUtils.isEmpty(skuPropertyPrefix)?"":skuPropertyPrefix + "productOptionValues.attributeValue"))
.withDirectFilterValues(productOptionValueFilterValues)
.withRestriction(new Restriction()
.withPredicateProvider(new PredicateProvider() {
@Override
public Predicate buildPredicate(CriteriaBuilder builder, FieldPathBuilder fieldPathBuilder,
From root, String ceilingEntity,
String fullPropertyName, Path explicitPath, List directValues) {
return explicitPath.as(String.class).in(directValues);
}
})
);
filterMappings.add(filterMapping);
}
}
/**
* <p>Available override point for subclasses if they would like to add additional criteria via the queryCritiera. At the
* point that this method has been called, criteria from the frontend has already been applied, thus allowing you to
* override from there as well.</p>
* <p>Subclasses that choose to override this should also call this super method so that correct filter criteria
* can be applied for product option values</p>
*
*/
public void applyAdditionalFetchCriteria(List<FilterMapping> filterMappings, CriteriaTransferObject cto, PersistencePackage persistencePackage) {
//unimplemented
}
@Override
public Entity add(PersistencePackage persistencePackage, DynamicEntityDao dynamicEntityDao, RecordHelper helper) throws ServiceException {
Entity entity = persistencePackage.getEntity();
try {
//Fill out the Sku instance from the form
PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective();
Sku adminInstance = (Sku) Class.forName(entity.getType()[0]).newInstance();
Map<String, FieldMetadata> adminProperties = helper.getSimpleMergedProperties(Sku.class.getName(), persistencePerspective);
adminInstance = (Sku) helper.createPopulatedInstance(adminInstance, entity, adminProperties, false);
//Verify that there isn't already a Sku for this particular product option value combo
Entity errorEntity = validateUniqueProductOptionValueCombination(adminInstance.getProduct(),
getProductOptionProperties(entity),
null);
if (errorEntity != null) {
entity.setValidationErrors(errorEntity.getValidationErrors());
return entity;
}
//persist the newly-created Sku
adminInstance = (Sku) dynamicEntityDao.persist(adminInstance);
//associate the product option values
associateProductOptionValuesToSku(entity, adminInstance, dynamicEntityDao);
//After associating the product option values, save off the Sku
adminInstance = (Sku) dynamicEntityDao.merge(adminInstance);
//Fill out the DTO and add in the product option value properties to it
Entity result = helper.getRecord(adminProperties, adminInstance, null, null);
for (Property property : getProductOptionProperties(entity)) {
result.addProperty(property);
}
return result;
} catch (Exception e) {
throw new ServiceException("Unable to perform fetch for entity: " + Sku.class.getName(), e);
}
}
@Override
public Entity update(PersistencePackage persistencePackage, DynamicEntityDao dynamicEntityDao, RecordHelper helper) throws ServiceException {
Entity entity = persistencePackage.getEntity();
try {
//Fill out the Sku instance from the form
PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective();
Map<String, FieldMetadata> adminProperties = helper.getSimpleMergedProperties(Sku.class.getName(), persistencePerspective);
Object primaryKey = helper.getPrimaryKey(entity, adminProperties);
Sku adminInstance = (Sku) dynamicEntityDao.retrieve(Class.forName(entity.getType()[0]), primaryKey);
adminInstance = (Sku) helper.createPopulatedInstance(adminInstance, entity, adminProperties, false);
//Verify that there isn't already a Sku for this particular product option value combo
Entity errorEntity = validateUniqueProductOptionValueCombination(adminInstance.getProduct(),
getProductOptionProperties(entity),
adminInstance);
if (errorEntity != null) {
entity.setValidationErrors(errorEntity.getValidationErrors());
return entity;
}
associateProductOptionValuesToSku(entity, adminInstance, dynamicEntityDao);
adminInstance = (Sku) dynamicEntityDao.merge(adminInstance);
//Fill out the DTO and add in the product option value properties to it
Entity result = helper.getRecord(adminProperties, adminInstance, null, null);
for (Property property : getProductOptionProperties(entity)) {
result.addProperty(property);
}
return result;
} catch (Exception e) {
throw new ServiceException("Unable to perform fetch for entity: " + Sku.class.getName(), e);
}
}
/**
* This initially removes all of the product option values that are currently related to the Sku and then re-associates
* the {@link ProductOptionValue}s
* @param entity
* @param adminInstance
*/
protected void associateProductOptionValuesToSku(Entity entity, Sku adminInstance, DynamicEntityDao dynamicEntityDao) {
//Get the list of product option value ids that were selected from the form
List<Long> productOptionValueIds = new ArrayList<Long>();
for (Property property : getProductOptionProperties(entity)) {
productOptionValueIds.add(Long.parseLong(property.getValue()));
}
//remove the current list of product option values from the Sku
if (adminInstance.getProductOptionValues().size() > 0) {
adminInstance.getProductOptionValues().clear();
dynamicEntityDao.merge(adminInstance);
}
//Associate the product option values from the form with the Sku
List<ProductOption> productOptions = adminInstance.getProduct().getProductOptions();
for (ProductOption option : productOptions) {
for (ProductOptionValue value : option.getAllowedValues()) {
if (productOptionValueIds.contains(value.getId())) {
adminInstance.getProductOptionValues().add(value);
}
}
}
}
protected List<Property> getProductOptionProperties(Entity entity) {
List<Property> productOptionProperties = new ArrayList<Property>();
for (Property property : entity.getProperties()) {
if (property.getName().startsWith(PRODUCT_OPTION_FIELD_PREFIX)) {
productOptionProperties.add(property);
}
}
return productOptionProperties;
}
/**
* Ensures that the given list of {@link ProductOptionValue} IDs is unique for the given {@link Product}
* @param product
* @param productOptionValueIds
* @param currentSku - for update operations, this is the current Sku that is being updated; should be excluded from
* attempting validation
* @return <b>null</b> if successfully validation, the error entity otherwise
*/
protected Entity validateUniqueProductOptionValueCombination(Product product, List<Property> productOptionProperties, Sku currentSku) {
//do not attempt POV validation if no PO properties were passed in
if (CollectionUtils.isNotEmpty(productOptionProperties)) {
List<Long> productOptionValueIds = new ArrayList<Long>();
for (Property property : productOptionProperties) {
productOptionValueIds.add(Long.parseLong(property.getValue()));
}
boolean validated = true;
for (Sku sku : product.getAdditionalSkus()) {
if (currentSku == null || !sku.getId().equals(currentSku.getId())) {
List<Long> testList = new ArrayList<Long>();
for (ProductOptionValue optionValue : sku.getProductOptionValues()) {
testList.add(optionValue.getId());
}
if (CollectionUtils.isNotEmpty(testList) &&
productOptionValueIds.containsAll(testList) &&
productOptionValueIds.size() == testList.size()) {
validated = false;
break;
}
}
}
if (!validated) {
Entity errorEntity = new Entity();
for (Property productOptionProperty : productOptionProperties) {
errorEntity.addValidationError(productOptionProperty.getName(), "uniqueSkuError");
}
return errorEntity;
}
}
return null;
}
} | 1no label
| admin_broadleaf-admin-module_src_main_java_org_broadleafcommerce_admin_server_service_handler_SkuCustomPersistenceHandler.java |
1,107 | public class ReconstructCartResponse {
private Order order;
private List<OrderItem> removedItems = new ArrayList<OrderItem>();
public Order getOrder() {
return order;
}
public void setOrder(Order order) {
this.order = order;
}
public List<OrderItem> getRemovedItems() {
return removedItems;
}
public void setRemovedItems(List<OrderItem> removedItems) {
this.removedItems = removedItems;
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_service_call_ReconstructCartResponse.java |
5,195 | public class InternalGeoHashGrid extends InternalAggregation implements GeoHashGrid {
public static final Type TYPE = new Type("geohash_grid", "ghcells");
public static AggregationStreams.Stream STREAM = new AggregationStreams.Stream() {
@Override
public InternalGeoHashGrid readResult(StreamInput in) throws IOException {
InternalGeoHashGrid buckets = new InternalGeoHashGrid();
buckets.readFrom(in);
return buckets;
}
};
public static void registerStreams() {
AggregationStreams.registerStream(STREAM, TYPE.stream());
}
static class Bucket implements GeoHashGrid.Bucket, Comparable<Bucket> {
protected long geohashAsLong;
protected long docCount;
protected InternalAggregations aggregations;
public Bucket(long geohashAsLong, long docCount, InternalAggregations aggregations) {
this.docCount = docCount;
this.aggregations = aggregations;
this.geohashAsLong = geohashAsLong;
}
public String getKey() {
return GeoHashUtils.toString(geohashAsLong);
}
@Override
public Text getKeyAsText() {
return new StringText(getKey());
}
public GeoPoint getKeyAsGeoPoint() {
return GeoHashUtils.decode(geohashAsLong);
}
@Override
public long getDocCount() {
return docCount;
}
@Override
public Aggregations getAggregations() {
return aggregations;
}
@Override
public int compareTo(Bucket other) {
if (this.geohashAsLong > other.geohashAsLong) {
return 1;
}
if (this.geohashAsLong < other.geohashAsLong) {
return -1;
}
return 0;
}
public Bucket reduce(List<? extends Bucket> buckets, CacheRecycler cacheRecycler) {
if (buckets.size() == 1) {
// we still need to reduce the sub aggs
Bucket bucket = buckets.get(0);
bucket.aggregations.reduce(cacheRecycler);
return bucket;
}
Bucket reduced = null;
List<InternalAggregations> aggregationsList = new ArrayList<InternalAggregations>(buckets.size());
for (Bucket bucket : buckets) {
if (reduced == null) {
reduced = bucket;
} else {
reduced.docCount += bucket.docCount;
}
aggregationsList.add(bucket.aggregations);
}
reduced.aggregations = InternalAggregations.reduce(aggregationsList, cacheRecycler);
return reduced;
}
@Override
public Number getKeyAsNumber() {
return geohashAsLong;
}
}
private int requiredSize;
private Collection<Bucket> buckets;
protected Map<String, Bucket> bucketMap;
InternalGeoHashGrid() {
} // for serialization
public InternalGeoHashGrid(String name, int requiredSize, Collection<Bucket> buckets) {
super(name);
this.requiredSize = requiredSize;
this.buckets = buckets;
}
@Override
public Type type() {
return TYPE;
}
@Override
public Collection<GeoHashGrid.Bucket> getBuckets() {
Object o = buckets;
return (Collection<GeoHashGrid.Bucket>) o;
}
@Override
public GeoHashGrid.Bucket getBucketByKey(String geohash) {
if (bucketMap == null) {
bucketMap = new HashMap<String, Bucket>(buckets.size());
for (Bucket bucket : buckets) {
bucketMap.put(bucket.getKey(), bucket);
}
}
return bucketMap.get(geohash);
}
@Override
public GeoHashGrid.Bucket getBucketByKey(Number key) {
return getBucketByKey(GeoHashUtils.toString(key.longValue()));
}
@Override
public GeoHashGrid.Bucket getBucketByKey(GeoPoint key) {
return getBucketByKey(key.geohash());
}
@Override
public InternalGeoHashGrid reduce(ReduceContext reduceContext) {
List<InternalAggregation> aggregations = reduceContext.aggregations();
if (aggregations.size() == 1) {
InternalGeoHashGrid grid = (InternalGeoHashGrid) aggregations.get(0);
grid.reduceAndTrimBuckets(reduceContext.cacheRecycler());
return grid;
}
InternalGeoHashGrid reduced = null;
Recycler.V<LongObjectOpenHashMap<List<Bucket>>> buckets = null;
for (InternalAggregation aggregation : aggregations) {
InternalGeoHashGrid grid = (InternalGeoHashGrid) aggregation;
if (reduced == null) {
reduced = grid;
}
if (buckets == null) {
buckets = reduceContext.cacheRecycler().longObjectMap(grid.buckets.size());
}
for (Bucket bucket : grid.buckets) {
List<Bucket> existingBuckets = buckets.v().get(bucket.geohashAsLong);
if (existingBuckets == null) {
existingBuckets = new ArrayList<Bucket>(aggregations.size());
buckets.v().put(bucket.geohashAsLong, existingBuckets);
}
existingBuckets.add(bucket);
}
}
if (reduced == null) {
// there are only unmapped terms, so we just return the first one (no need to reduce)
return (InternalGeoHashGrid) aggregations.get(0);
}
// TODO: would it be better to sort the backing array buffer of the hppc map directly instead of using a PQ?
final int size = Math.min(requiredSize, buckets.v().size());
BucketPriorityQueue ordered = new BucketPriorityQueue(size);
Object[] internalBuckets = buckets.v().values;
boolean[] states = buckets.v().allocated;
for (int i = 0; i < states.length; i++) {
if (states[i]) {
List<Bucket> sameCellBuckets = (List<Bucket>) internalBuckets[i];
ordered.insertWithOverflow(sameCellBuckets.get(0).reduce(sameCellBuckets, reduceContext.cacheRecycler()));
}
}
buckets.release();
Bucket[] list = new Bucket[ordered.size()];
for (int i = ordered.size() - 1; i >= 0; i--) {
list[i] = ordered.pop();
}
reduced.buckets = Arrays.asList(list);
return reduced;
}
protected void reduceAndTrimBuckets(CacheRecycler cacheRecycler) {
if (requiredSize > buckets.size()) { // nothing to trim
for (Bucket bucket : buckets) {
bucket.aggregations.reduce(cacheRecycler);
}
return;
}
List<Bucket> trimmedBuckets = new ArrayList<Bucket>(requiredSize);
for (Bucket bucket : buckets) {
if (trimmedBuckets.size() >= requiredSize) {
break;
}
bucket.aggregations.reduce(cacheRecycler);
trimmedBuckets.add(bucket);
}
buckets = trimmedBuckets;
}
@Override
public void readFrom(StreamInput in) throws IOException {
this.name = in.readString();
this.requiredSize = in.readVInt();
int size = in.readVInt();
List<Bucket> buckets = new ArrayList<Bucket>(size);
for (int i = 0; i < size; i++) {
buckets.add(new Bucket(in.readLong(), in.readVLong(), InternalAggregations.readAggregations(in)));
}
this.buckets = buckets;
this.bucketMap = null;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeVInt(requiredSize);
out.writeVInt(buckets.size());
for (Bucket bucket : buckets) {
out.writeLong(bucket.geohashAsLong);
out.writeVLong(bucket.getDocCount());
((InternalAggregations) bucket.getAggregations()).writeTo(out);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.startArray(CommonFields.BUCKETS);
for (Bucket bucket : buckets) {
builder.startObject();
builder.field(CommonFields.KEY, bucket.getKeyAsText());
builder.field(CommonFields.DOC_COUNT, bucket.getDocCount());
((InternalAggregations) bucket.getAggregations()).toXContentInternal(builder, params);
builder.endObject();
}
builder.endArray();
builder.endObject();
return builder;
}
static class BucketPriorityQueue extends PriorityQueue<Bucket> {
public BucketPriorityQueue(int size) {
super(size);
}
@Override
protected boolean lessThan(Bucket o1, Bucket o2) {
long i = o2.getDocCount() - o1.getDocCount();
if (i == 0) {
i = o2.compareTo(o1);
if (i == 0) {
i = System.identityHashCode(o2) - System.identityHashCode(o1);
}
}
return i > 0;
}
}
} | 1no label
| src_main_java_org_elasticsearch_search_aggregations_bucket_geogrid_InternalGeoHashGrid.java |
106 | public static class Order {
public static final int Basic = 1000;
public static final int Page = 2000;
public static final int Rules = 1000;
} | 0true
| admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_page_domain_PageImpl.java |
30 | final class ValueInverseIterator extends AbstractEntryIterator<K, V, V> {
ValueInverseIterator(final OMVRBTreeEntry<K, V> last) {
super(last);
// we have to set ourselves after current index to make iterator work
if (last != null) {
pageIndex = last.getTree().getPageIndex() + 1;
}
}
@Override
public boolean hasNext() {
return hasPrevious();
}
@Override
public V next() {
return prevValue();
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_collection_OMVRBTree.java |
1,370 | public static enum OPERATION {
PUT, REMOVE, CLEAR
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_tx_OTransactionIndexChanges.java |
2,119 | public class Loggers {
private final static String commonPrefix = System.getProperty("es.logger.prefix", "org.elasticsearch.");
public static final String SPACE = " ";
private static boolean consoleLoggingEnabled = true;
public static void disableConsoleLogging() {
consoleLoggingEnabled = false;
}
public static void enableConsoleLogging() {
consoleLoggingEnabled = true;
}
public static boolean consoleLoggingEnabled() {
return consoleLoggingEnabled;
}
public static ESLogger getLogger(Class clazz, Settings settings, ShardId shardId, String... prefixes) {
return getLogger(clazz, settings, shardId.index(), Lists.asList(Integer.toString(shardId.id()), prefixes).toArray(new String[0]));
}
public static ESLogger getLogger(Class clazz, Settings settings, Index index, String... prefixes) {
return getLogger(clazz, settings, Lists.asList(SPACE, index.name(), prefixes).toArray(new String[0]));
}
public static ESLogger getLogger(Class clazz, Settings settings, RiverName riverName, String... prefixes) {
List<String> l = Lists.newArrayList();
l.add(SPACE);
l.add(riverName.type());
l.add(riverName.name());
l.addAll(Lists.newArrayList(prefixes));
return getLogger(clazz, settings, l.toArray(new String[l.size()]));
}
public static ESLogger getLogger(Class clazz, Settings settings, String... prefixes) {
return getLogger(buildClassLoggerName(clazz), settings, prefixes);
}
public static ESLogger getLogger(String loggerName, Settings settings, String... prefixes) {
List<String> prefixesList = newArrayList();
if (settings.getAsBoolean("logger.logHostAddress", false)) {
try {
prefixesList.add(InetAddress.getLocalHost().getHostAddress());
} catch (UnknownHostException e) {
// ignore
}
}
if (settings.getAsBoolean("logger.logHostName", false)) {
try {
prefixesList.add(InetAddress.getLocalHost().getHostName());
} catch (UnknownHostException e) {
// ignore
}
}
String name = settings.get("name");
if (name != null) {
prefixesList.add(name);
}
if (prefixes != null && prefixes.length > 0) {
prefixesList.addAll(asList(prefixes));
}
return getLogger(getLoggerName(loggerName), prefixesList.toArray(new String[prefixesList.size()]));
}
public static ESLogger getLogger(ESLogger parentLogger, String s) {
return ESLoggerFactory.getLogger(parentLogger.getPrefix(), getLoggerName(parentLogger.getName() + s));
}
public static ESLogger getLogger(String s) {
return ESLoggerFactory.getLogger(getLoggerName(s));
}
public static ESLogger getLogger(Class clazz) {
return ESLoggerFactory.getLogger(getLoggerName(buildClassLoggerName(clazz)));
}
public static ESLogger getLogger(Class clazz, String... prefixes) {
return getLogger(buildClassLoggerName(clazz), prefixes);
}
public static ESLogger getLogger(String name, String... prefixes) {
String prefix = null;
if (prefixes != null && prefixes.length > 0) {
StringBuilder sb = new StringBuilder();
for (String prefixX : prefixes) {
if (prefixX != null) {
if (prefixX.equals(SPACE)) {
sb.append(" ");
} else {
sb.append("[").append(prefixX).append("]");
}
}
}
if (sb.length() > 0) {
sb.append(" ");
prefix = sb.toString();
}
}
return ESLoggerFactory.getLogger(prefix, getLoggerName(name));
}
private static String buildClassLoggerName(Class clazz) {
String name = clazz.getName();
if (name.startsWith("org.elasticsearch.")) {
name = Classes.getPackageName(clazz);
}
return name;
}
private static String getLoggerName(String name) {
if (name.startsWith("org.elasticsearch.")) {
name = name.substring("org.elasticsearch.".length());
}
return commonPrefix + name;
}
} | 0true
| src_main_java_org_elasticsearch_common_logging_Loggers.java |
623 | public class SandBoxContext {
private static final ThreadLocal<SandBoxContext> SANDBOXCONTEXT = ThreadLocalManager.createThreadLocal(SandBoxContext.class);
public static SandBoxContext getSandBoxContext() {
return SANDBOXCONTEXT.get();
}
public static void setSandBoxContext(SandBoxContext sandBoxContext) {
SANDBOXCONTEXT.set(sandBoxContext);
}
protected Long sandBoxId;
protected Boolean previewMode = false;
/**
* @return the sandBoxName
*/
public Long getSandBoxId() {
return sandBoxId;
}
/**
* @param sandBoxId the sandBoxName to set
*/
public void setSandBoxId(Long sandBoxId) {
this.sandBoxId = sandBoxId;
}
public Boolean getPreviewMode() {
return previewMode;
}
public void setPreviewMode(Boolean previewMode) {
this.previewMode = previewMode;
}
public SandBoxContext clone() {
SandBoxContext myContext = new SandBoxContext();
myContext.setSandBoxId(getSandBoxId());
myContext.setPreviewMode(getPreviewMode());
return myContext;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_web_SandBoxContext.java |
314 | public class TestStoreRecoverer
{
@Test
public void shouldNotWantToRecoverIntactStore() throws Exception
{
File store = null;
store = createIntactStore();
StoreRecoverer recoverer = new StoreRecoverer( fileSystem );
assertThat( recoverer.recoveryNeededAt( store, new HashMap<String, String>() ), is( false ) );
}
@Test
public void shouldWantToRecoverBrokenStore() throws Exception
{
File store = createIntactStore();
fileSystem.deleteFile( new File( store, "nioneo_logical.log.active" ) );
StoreRecoverer recoverer = new StoreRecoverer( fileSystem );
assertThat( recoverer.recoveryNeededAt( store, new HashMap<String, String>() ), is( true ) );
}
@Test
public void shouldBeAbleToRecoverBrokenStore() throws Exception
{
File store = createIntactStore();
fileSystem.deleteFile( new File( store, "nioneo_logical.log.active" ) );
StoreRecoverer recoverer = new StoreRecoverer( fileSystem );
assertThat( recoverer.recoveryNeededAt( store, new HashMap<String, String>() ), is( true ) );
// Don't call recoverer.recover, because currently it's hard coded to start an embedded db
new TestGraphDatabaseFactory().setFileSystem( fileSystem ).newImpermanentDatabase( store.getPath() ).shutdown();
assertThat( recoverer.recoveryNeededAt( store, new HashMap<String, String>() ), is( false ) );
}
private File createIntactStore() throws IOException
{
File storeDir = new File( "dir" );
new TestGraphDatabaseFactory().setFileSystem( fileSystem ).newImpermanentDatabase( storeDir.getPath() ).shutdown();
return storeDir;
}
private final EphemeralFileSystemAbstraction fileSystem = new EphemeralFileSystemAbstraction();
} | 0true
| community_kernel_src_test_java_org_neo4j_kernel_impl_recovery_TestStoreRecoverer.java |
1,587 | pMap = BLCMapUtils.keyedMap(properties, new TypedClosure<String, Property>() {
@Override
public String getKey(Property value) {
return value.getName();
}
}); | 0true
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_dto_Entity.java |
248 | @Test
public class ODefaultCacheTest {
public void enabledAfterStartup() {
// Given cache created
// And not started
// And not enabled
OCache sut = newCache();
// When started
sut.startup();
// Then it should be enabled
assertTrue(sut.isEnabled());
}
public void disabledAfterShutdown() {
// Given running cache
OCache sut = runningCache();
// When started
sut.shutdown();
// Then it should be disabled
assertFalse(sut.isEnabled());
}
public void disablesOnlyIfWasEnabled() {
// Given enabled cache
OCache sut = enabledCache();
// When disabled more than once
boolean disableConfirmed = sut.disable();
boolean disableNotConfirmed = sut.disable();
// Then should return confirmation of switching from enabled to disabled state for first time
// And no confirmation on subsequent disables
assertTrue(disableConfirmed);
assertFalse(disableNotConfirmed);
}
public void enablesOnlyIfWasDisabled() {
// Given disabled cache
OCache sut = newCache();
// When enabled more than once
boolean enableConfirmed = sut.enable();
boolean enableNotConfirmed = sut.enable();
// Then should return confirmation of switching from disabled to enabled state for first time
// And no confirmation on subsequent enables
assertTrue(enableConfirmed);
assertFalse(enableNotConfirmed);
}
public void doesNothingWhileDisabled() {
// Given cache created
// And not started
// And not enabled
OCache sut = new ODefaultCache(null, 1);
// When any operation called on it
ODocument record = new ODocument();
ORID recordId = record.getIdentity();
sut.put(record);
ORecordInternal<?> recordGot = sut.get(recordId);
int cacheSizeAfterPut = sut.size();
ORecordInternal<?> recordRemoved = sut.remove(recordId);
int cacheSizeAfterRemove = sut.size();
// Then it has no effect on cache's state
assertEquals(sut.isEnabled(), false, "Cache should be disabled at creation");
assertEquals(recordGot, null, "Cache should return empty records while disabled");
assertEquals(recordRemoved, null, "Cache should return empty records while disabled");
assertEquals(cacheSizeAfterPut, 0, "Cache should ignore insert while disabled");
assertEquals(cacheSizeAfterRemove, cacheSizeAfterPut, "Cache should ignore remove while disabled");
}
public void hasZeroSizeAfterClear() {
// Given enabled non-empty cache
OCache sut = enabledNonEmptyCache();
// When cleared
sut.clear();
// Then size of cache should be zero
assertEquals(sut.size(), 0, "Cache was not cleaned up");
}
public void providesAccessToAllKeysInCache() {
// Given enabled non-empty cache
OCache sut = enabledNonEmptyCache();
// When asked for keys
Collection<ORID> keys = sut.keys();
// Then keys count should be same as size of cache
// And records available for keys
assertEquals(keys.size(), sut.size(), "Cache provided not all keys?");
for (ORID key : keys) {
assertNotNull(sut.get(key));
}
}
public void storesRecordsUsingTheirIdentity() {
// Given an enabled cache
OCache sut = enabledCache();
// When new record put into
ORecordId id = new ORecordId(1, OClusterPositionFactory.INSTANCE.valueOf(1));
ODocument record = new ODocument(id);
sut.put(record);
// Then it can be retrieved later by it's id
assertEquals(sut.get(id), record);
}
public void storesRecordsOnlyOnceForEveryIdentity() {
// Given an enabled cache
OCache sut = enabledCache();
final int initialSize = sut.size();
// When some records with same identity put in several times
ODocument first = new ODocument(new ORecordId(1, OClusterPositionFactory.INSTANCE.valueOf(1)));
ODocument last = new ODocument(new ORecordId(1, OClusterPositionFactory.INSTANCE.valueOf(1)));
sut.put(first);
sut.put(last);
// Then cache ends up storing only one item
assertEquals(sut.size(), initialSize + 1);
}
public void removesOnlyOnce() {
// Given an enabled cache with records in it
OCache sut = enabledCache();
ORecordId id = new ORecordId(1, OClusterPositionFactory.INSTANCE.valueOf(1));
ODocument record = new ODocument(id);
sut.put(record);
sut.remove(id);
// When removing already removed record
ORecordInternal<?> removedSecond = sut.remove(id);
// Then empty result returned
assertNull(removedSecond);
}
public void storesNoMoreElementsThanSpecifiedLimit() {
// Given an enabled cache
OCache sut = enabledCache();
// When stored more distinct elements than cache limit allows
for (int i = sut.limit() + 2; i > 0; i--)
sut.put(new ODocument(new ORecordId(i, OClusterPositionFactory.INSTANCE.valueOf(i))));
// Then size of cache should be exactly as it's limit
assertEquals(sut.size(), sut.limit(), "Cache doesn't meet limit requirements");
}
private ODefaultCache newCache() {
return new ODefaultCache(null, 5);
}
private OCache enabledCache() {
ODefaultCache cache = newCache();
cache.enable();
return cache;
}
private OCache enabledNonEmptyCache() {
OCache cache = enabledCache();
cache.put(new ODocument(new ORecordId(1, OClusterPositionFactory.INSTANCE.valueOf(1))));
cache.put(new ODocument(new ORecordId(2, OClusterPositionFactory.INSTANCE.valueOf(2))));
return cache;
}
private OCache runningCache() {
ODefaultCache cache = newCache();
cache.startup();
return cache;
}
} | 0true
| core_src_test_java_com_orientechnologies_orient_core_cache_ODefaultCacheTest.java |
727 | public class TxCollectionItem extends CollectionItem {
String transactionId;
boolean removeOperation;
public TxCollectionItem() {
}
public TxCollectionItem(CollectionItem item) {
super(item.itemId, item.value);
}
public TxCollectionItem(long itemId, Data value, String transactionId, boolean removeOperation) {
super(itemId, value);
this.transactionId = transactionId;
this.removeOperation = removeOperation;
}
public String getTransactionId() {
return transactionId;
}
public boolean isRemoveOperation() {
return removeOperation;
}
public TxCollectionItem setTransactionId(String transactionId) {
this.transactionId = transactionId;
return this;
}
public TxCollectionItem setRemoveOperation(boolean removeOperation) {
this.removeOperation = removeOperation;
return this;
}
@Override
public int getId() {
return CollectionDataSerializerHook.TX_COLLECTION_ITEM;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
super.writeData(out);
out.writeUTF(transactionId);
out.writeBoolean(removeOperation);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
super.readData(in);
transactionId = in.readUTF();
removeOperation = in.readBoolean();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof TxCollectionItem)) {
return false;
}
if (!super.equals(o)) {
return false;
}
TxCollectionItem that = (TxCollectionItem) o;
if (removeOperation != that.removeOperation) {
return false;
}
if (!transactionId.equals(that.transactionId)) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + transactionId.hashCode();
result = 31 * result + (removeOperation ? 1 : 0);
return result;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_collection_TxCollectionItem.java |
1,213 | public interface ORecordCallback<T> {
public enum OPERATION {
CREATE, READ, UPDATE, DELETE
}
public void call(ORecordId iRID, T iParameter);
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_storage_ORecordCallback.java |
405 | class DumMigrationListener implements MigrationListener {
@Override
public void migrationStarted(MigrationEvent migrationEvent) {
}
@Override
public void migrationCompleted(MigrationEvent migrationEvent) {
}
@Override
public void migrationFailed(MigrationEvent migrationEvent) {
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_partitionservice_PartitionServiceProxyTest.java |
907 | public class ODocumentComparator implements Comparator<OIdentifiable> {
private List<OPair<String, String>> orderCriteria;
public ODocumentComparator(final List<OPair<String, String>> iOrderCriteria) {
this.orderCriteria = iOrderCriteria;
}
@SuppressWarnings("unchecked")
public int compare(final OIdentifiable iDoc1, final OIdentifiable iDoc2) {
if (iDoc1 != null && iDoc1.equals(iDoc2))
return 0;
Object fieldValue1;
Object fieldValue2;
int partialResult = 0;
for (OPair<String, String> field : orderCriteria) {
final String fieldName = field.getKey();
final String ordering = field.getValue();
fieldValue1 = ((ODocument) iDoc1.getRecord()).field(fieldName);
fieldValue2 = ((ODocument) iDoc2.getRecord()).field(fieldName);
if (fieldValue1 == null && fieldValue2 == null) {
continue;
}
if (fieldValue1 == null)
return factor(-1, ordering);
if (fieldValue2 == null)
return factor(1, ordering);
if (!(fieldValue1 instanceof Comparable<?>))
throw new IllegalArgumentException("Cannot sort documents because the field '" + fieldName + "' is not comparable");
partialResult = ((Comparable<Object>) fieldValue1).compareTo(fieldValue2);
partialResult = factor(partialResult, ordering);
if (partialResult != 0)
break;
// CONTINUE WITH THE NEXT FIELD
}
return partialResult;
}
private int factor(final int partialResult, final String iOrdering) {
if (iOrdering.equals(OCommandExecutorSQLSelect.KEYWORD_DESC))
// INVERT THE ORDERING
return partialResult * -1;
return partialResult;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_record_impl_ODocumentComparator.java |
1,183 | public interface MapLoader<K, V> {
/**
* Loads the value of a given key. If distributed map doesn't contain the value
* for the given key then Hazelcast will call implementation's load (key) method
* to obtain the value. Implementation can use any means of loading the given key;
* such as an O/R mapping tool, simple SQL or reading a file etc.
*
* @param key
* @return value of the key
*/
V load(K key);
/**
* Loads given keys. This is batch load operation so that implementation can
* optimize the multiple loads.
*
* @param keys keys of the values entries to load
* @return map of loaded key-value pairs.
*/
Map<K, V> loadAll(Collection<K> keys);
/**
* Loads all of the keys from the store.
*
* @return all the keys
*/
Set<K> loadAllKeys();
} | 0true
| hazelcast_src_main_java_com_hazelcast_core_MapLoader.java |
2,773 | public class AlreadyExpiredException extends ElasticsearchException implements IgnoreOnRecoveryEngineException {
private String index;
private String type;
private String id;
private final long timestamp;
private final long ttl;
private final long now;
public AlreadyExpiredException(String index, String type, String id, long timestamp, long ttl, long now) {
super("already expired [" + index + "]/[" + type + "]/[" + id + "] due to expire at [" + (timestamp + ttl) + "] and was processed at [" + now + "]");
this.index = index;
this.type = type;
this.id = id;
this.timestamp = timestamp;
this.ttl = ttl;
this.now = now;
}
public String index() {
return index;
}
public String type() {
return type;
}
public String id() {
return id;
}
public long timestamp() {
return timestamp;
}
public long ttl() {
return ttl;
}
public long now() {
return now;
}
} | 0true
| src_main_java_org_elasticsearch_index_AlreadyExpiredException.java |
3,028 | public static class Helper {
/**
* Looks up and creates {@link DocValuesFormatProvider} for the given name.
* <p>
* The settings for the created {@link DocValuesFormatProvider} is taken from the given index settings.
* All settings with the {@value DocValuesFormatProvider#POSTINGS_FORMAT_SETTINGS_PREFIX} prefix
* and the formats name as the key are passed to the factory.
* </p>
*
* @param indexSettings the index settings to configure the postings format
* @param name the name of the doc values format to lookup
* @param docValuesFormatFactories the factory mapping to lookup the {@link Factory} to create the {@link DocValuesFormatProvider}
* @return a fully configured {@link DocValuesFormatProvider} for the given name.
* @throws org.elasticsearch.ElasticsearchIllegalArgumentException
* if the no {@link DocValuesFormatProvider} for the given name parameter could be found.
*/
public static DocValuesFormatProvider lookup(@IndexSettings Settings indexSettings, String name, Map<String, Factory> docValuesFormatFactories) throws ElasticsearchIllegalArgumentException {
Factory factory = docValuesFormatFactories.get(name);
if (factory == null) {
throw new ElasticsearchIllegalArgumentException("failed to find doc_values_format [" + name + "]");
}
Settings settings = indexSettings.getGroups(DOC_VALUES_FORMAT_SETTINGS_PREFIX).get(name);
if (settings == null) {
settings = ImmutableSettings.Builder.EMPTY_SETTINGS;
}
return factory.create(name, settings);
}
} | 0true
| src_main_java_org_elasticsearch_index_codec_docvaluesformat_DocValuesFormatProvider.java |
526 | public static class CBAuthorisation implements Serializable {
private int amount;
public void setAmount(int amount) {
this.amount = amount;
}
public int getAmount() {
return amount;
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_txn_ClientTxnMapTest.java |
87 | public interface ObjectByObjectToInt<A,B> {int apply(A a, B b); } | 0true
| src_main_java_jsr166e_ConcurrentHashMapV8.java |
1,063 | public enum EvictionPolicy {
LRU, LFU, NONE
} | 0true
| hazelcast_src_main_java_com_hazelcast_config_MapConfig.java |
3,500 | public static class BuilderContext {
private final Settings indexSettings;
private final ContentPath contentPath;
public BuilderContext(@Nullable Settings indexSettings, ContentPath contentPath) {
this.contentPath = contentPath;
this.indexSettings = indexSettings;
}
public ContentPath path() {
return this.contentPath;
}
@Nullable
public Settings indexSettings() {
return this.indexSettings;
}
@Nullable
public Version indexCreatedVersion() {
if (indexSettings == null) {
return null;
}
return indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, null);
}
} | 0true
| src_main_java_org_elasticsearch_index_mapper_Mapper.java |
1,273 | @SuppressWarnings("unchecked")
public class InternalTransportIndicesAdminClient extends AbstractIndicesAdminClient implements IndicesAdminClient {
private final TransportClientNodesService nodesService;
private final ThreadPool threadPool;
private final ImmutableMap<IndicesAction, TransportActionNodeProxy> actions;
@Inject
public InternalTransportIndicesAdminClient(Settings settings, TransportClientNodesService nodesService, TransportService transportService, ThreadPool threadPool,
Map<String, GenericAction> actions) {
this.nodesService = nodesService;
this.threadPool = threadPool;
MapBuilder<IndicesAction, TransportActionNodeProxy> actionsBuilder = new MapBuilder<IndicesAction, TransportActionNodeProxy>();
for (GenericAction action : actions.values()) {
if (action instanceof IndicesAction) {
actionsBuilder.put((IndicesAction) action, new TransportActionNodeProxy(settings, action, transportService));
}
}
this.actions = actionsBuilder.immutableMap();
}
@Override
public ThreadPool threadPool() {
return this.threadPool;
}
@SuppressWarnings("unchecked")
@Override
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> ActionFuture<Response> execute(final IndicesAction<Request, Response, RequestBuilder> action, final Request request) {
final TransportActionNodeProxy<Request, Response> proxy = actions.get(action);
return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<Response>>() {
@Override
public ActionFuture<Response> doWithNode(DiscoveryNode node) throws ElasticsearchException {
return proxy.execute(node, request);
}
});
}
@SuppressWarnings("unchecked")
@Override
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> void execute(final IndicesAction<Request, Response, RequestBuilder> action, final Request request, ActionListener<Response> listener) {
final TransportActionNodeProxy<Request, Response> proxy = actions.get(action);
nodesService.execute(new TransportClientNodesService.NodeListenerCallback<Response>() {
@Override
public void doWithNode(DiscoveryNode node, ActionListener<Response> listener) throws ElasticsearchException {
proxy.execute(node, request, listener);
}
}, listener);
}
} | 1no label
| src_main_java_org_elasticsearch_client_transport_support_InternalTransportIndicesAdminClient.java |
505 | public interface SiteConfigProvider {
public void configSite(Site site);
public void init(Map<String, Object> map);
} | 0true
| common_src_main_java_org_broadleafcommerce_common_site_service_provider_SiteConfigProvider.java |
3,331 | static class LongValues extends DenseLongValues {
private final BigFloatArrayList values;
LongValues(BigFloatArrayList values) {
super(false);
this.values = values;
}
@Override
public long nextValue() {
return (long) values.get(docId);
}
} | 0true
| src_main_java_org_elasticsearch_index_fielddata_plain_FloatArrayAtomicFieldData.java |
977 | public class IsLockedOperation extends BaseLockOperation {
public IsLockedOperation() {
}
public IsLockedOperation(ObjectNamespace namespace, Data key) {
super(namespace, key, ANY_THREAD);
}
public IsLockedOperation(ObjectNamespace namespace, Data key, long threadId) {
super(namespace, key, threadId);
}
@Override
public int getId() {
return LockDataSerializerHook.IS_LOCKED;
}
@Override
public void run() throws Exception {
LockStoreImpl lockStore = getLockStore();
if (threadId == ANY_THREAD) {
response = lockStore.isLocked(key);
} else {
response = lockStore.isLockedBy(key, getCallerUuid(), threadId);
}
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_concurrent_lock_operations_IsLockedOperation.java |
1,346 | public static interface Listener {
void onNodeIndexDeleted(String index, String nodeId);
void onNodeIndexStoreDeleted(String index, String nodeId);
} | 0true
| src_main_java_org_elasticsearch_cluster_action_index_NodeIndexDeletedAction.java |
1,600 | public class PersistencePerspective implements Serializable {
private static final long serialVersionUID = 1L;
protected String[] additionalNonPersistentProperties = new String[]{};
protected ForeignKey[] additionalForeignKeys = new ForeignKey[]{};
protected Map<PersistencePerspectiveItemType, PersistencePerspectiveItem> persistencePerspectiveItems = new HashMap<PersistencePerspectiveItemType, PersistencePerspectiveItem>();
protected OperationTypes operationTypes = new OperationTypes();
protected Boolean populateToOneFields = false;
protected String[] excludeFields = new String[]{};
protected String[] includeFields = new String[]{};
protected String configurationKey;
protected Boolean showArchivedFields = false;
protected Boolean useServerSideInspectionCache = true;
public PersistencePerspective() {
}
public PersistencePerspective(OperationTypes operationTypes, String[] additionalNonPersistentProperties, ForeignKey[] additionalForeignKeys) {
setAdditionalNonPersistentProperties(additionalNonPersistentProperties);
setAdditionalForeignKeys(additionalForeignKeys);
this.operationTypes = operationTypes;
}
public String[] getAdditionalNonPersistentProperties() {
return additionalNonPersistentProperties;
}
public void setAdditionalNonPersistentProperties(String[] additionalNonPersistentProperties) {
this.additionalNonPersistentProperties = additionalNonPersistentProperties;
Arrays.sort(this.additionalNonPersistentProperties);
}
public ForeignKey[] getAdditionalForeignKeys() {
return additionalForeignKeys;
}
public void setAdditionalForeignKeys(ForeignKey[] additionalForeignKeys) {
this.additionalForeignKeys = additionalForeignKeys;
Arrays.sort(this.additionalForeignKeys, new Comparator<ForeignKey>() {
public int compare(ForeignKey o1, ForeignKey o2) {
return o1.getManyToField().compareTo(o2.getManyToField());
}
});
}
public OperationTypes getOperationTypes() {
return operationTypes;
}
public void setOperationTypes(OperationTypes operationTypes) {
this.operationTypes = operationTypes;
}
public void addPersistencePerspectiveItem(PersistencePerspectiveItemType type, PersistencePerspectiveItem item) {
persistencePerspectiveItems.put(type, item);
}
public Map<PersistencePerspectiveItemType, PersistencePerspectiveItem> getPersistencePerspectiveItems() {
return persistencePerspectiveItems;
}
public void setPersistencePerspectiveItems(Map<PersistencePerspectiveItemType, PersistencePerspectiveItem> persistencePerspectiveItems) {
this.persistencePerspectiveItems = persistencePerspectiveItems;
}
/**
* Retrieves whether or not ManyToOne and OneToOne field boundaries
* will be traversed when retrieving and populating entity fields.
* Implementation should use the @AdminPresentationClass annotation
* instead.
*
* @return Whether or not ManyToOne and OneToOne field boundaries will be crossed.
*/
@Deprecated
public Boolean getPopulateToOneFields() {
return populateToOneFields;
}
/**
* Sets whether or not ManyToOne and OneToOne field boundaries
* will be traversed when retrieving and populating entity fields.
* Implementation should use the @AdminPresentationClass annotation
* instead.
*
* @return Whether or not ManyToOne and OneToOne field boundaries will be crossed.
*/
@Deprecated
public void setPopulateToOneFields(Boolean populateToOneFields) {
this.populateToOneFields = populateToOneFields;
}
/**
* Retrieve the list of fields to exclude from the admin presentation.
* Implementations should use the excluded property of the AdminPresentation
* annotation instead, or use an AdminPresentationOverride if re-enabling a
* Broadleaf field is desired. If multiple datasources point to the same
* entity, but different exclusion behavior is required, a custom persistence
* handler may be employed with different inspect method implementations to
* account for the variations.
*
* @return list of fields to exclude from the admin
*/
@Deprecated
public String[] getExcludeFields() {
return excludeFields;
}
/**
* Set the list of fields to exclude from the admin presentation.
* Implementations should use the excluded property of the AdminPresentation
* annotation instead, or use an AdminPresentationOverride if re-enabling a
* Broadleaf field is desired. If multiple datasources point to the same
* entity, but different exclusion behavior is required, a custom persistence
* handler may be employed with different inspect method implementations to
* account for the variations.
*
* @param excludeManyToOneFields
*/
@Deprecated
public void setExcludeFields(String[] excludeManyToOneFields) {
this.excludeFields = excludeManyToOneFields;
Arrays.sort(this.excludeFields);
}
/**
* Get the list of fields to include in the admin presentation.
* Implementations should use excludeFields instead.
*
* @return list of fields to include in the admin
*/
@Deprecated
public String[] getIncludeFields() {
return includeFields;
}
/**
* Set the list of fields to include in the admin presentation.
* Implementations should use excludeFields instead.
*
* @param includeManyToOneFields
*/
@Deprecated
public void setIncludeFields(String[] includeManyToOneFields) {
this.includeFields = includeManyToOneFields;
Arrays.sort(this.includeFields);
}
public String getConfigurationKey() {
return configurationKey;
}
public void setConfigurationKey(String configurationKey) {
this.configurationKey = configurationKey;
}
public Boolean getShowArchivedFields() {
return showArchivedFields;
}
public void setShowArchivedFields(Boolean showArchivedFields) {
this.showArchivedFields = showArchivedFields;
}
public Boolean getUseServerSideInspectionCache() {
return useServerSideInspectionCache;
}
public void setUseServerSideInspectionCache(Boolean useServerSideInspectionCache) {
this.useServerSideInspectionCache = useServerSideInspectionCache;
}
public PersistencePerspective clonePersistencePerspective() {
PersistencePerspective persistencePerspective = new PersistencePerspective();
persistencePerspective.operationTypes = operationTypes.cloneOperationTypes();
if (additionalNonPersistentProperties != null) {
persistencePerspective.additionalNonPersistentProperties = new String[additionalNonPersistentProperties.length];
System.arraycopy(additionalNonPersistentProperties, 0, persistencePerspective.additionalNonPersistentProperties, 0, additionalNonPersistentProperties.length);
}
if (additionalForeignKeys != null) {
persistencePerspective.additionalForeignKeys = new ForeignKey[additionalForeignKeys.length];
for (int j=0; j<additionalForeignKeys.length;j++){
persistencePerspective.additionalForeignKeys[j] = additionalForeignKeys[j].cloneForeignKey();
}
}
if (this.persistencePerspectiveItems != null) {
Map<PersistencePerspectiveItemType, PersistencePerspectiveItem> persistencePerspectiveItems = new HashMap<PersistencePerspectiveItemType, PersistencePerspectiveItem>(this.persistencePerspectiveItems.size());
for (Map.Entry<PersistencePerspectiveItemType, PersistencePerspectiveItem> entry : this.persistencePerspectiveItems.entrySet()) {
persistencePerspectiveItems.put(entry.getKey(), entry.getValue().clonePersistencePerspectiveItem());
}
persistencePerspective.persistencePerspectiveItems = persistencePerspectiveItems;
}
persistencePerspective.populateToOneFields = populateToOneFields;
persistencePerspective.configurationKey = configurationKey;
persistencePerspective.showArchivedFields = showArchivedFields;
persistencePerspective.useServerSideInspectionCache = useServerSideInspectionCache;
if (excludeFields != null) {
persistencePerspective.excludeFields = new String[excludeFields.length];
System.arraycopy(excludeFields, 0, persistencePerspective.excludeFields, 0, excludeFields.length);
}
if (includeFields != null) {
persistencePerspective.includeFields = new String[includeFields.length];
System.arraycopy(includeFields, 0, persistencePerspective.includeFields, 0, includeFields.length);
}
return persistencePerspective;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof PersistencePerspective)) return false;
PersistencePerspective that = (PersistencePerspective) o;
if (!Arrays.equals(additionalForeignKeys, that.additionalForeignKeys)) return false;
if (!Arrays.equals(additionalNonPersistentProperties, that.additionalNonPersistentProperties))
return false;
if (configurationKey != null ? !configurationKey.equals(that.configurationKey) : that.configurationKey != null)
return false;
if (!Arrays.equals(excludeFields, that.excludeFields)) return false;
if (!Arrays.equals(includeFields, that.includeFields)) return false;
if (operationTypes != null ? !operationTypes.equals(that.operationTypes) : that.operationTypes != null)
return false;
if (persistencePerspectiveItems != null ? !persistencePerspectiveItems.equals(that.persistencePerspectiveItems) : that.persistencePerspectiveItems != null)
return false;
if (populateToOneFields != null ? !populateToOneFields.equals(that.populateToOneFields) : that.populateToOneFields != null)
return false;
if (showArchivedFields != null ? !showArchivedFields.equals(that.showArchivedFields) : that.showArchivedFields != null)
return false;
if (useServerSideInspectionCache != null ? !useServerSideInspectionCache.equals(that.useServerSideInspectionCache) : that.useServerSideInspectionCache != null)
return false;
return true;
}
@Override
public int hashCode() {
int result = additionalNonPersistentProperties != null ? Arrays.hashCode(additionalNonPersistentProperties) : 0;
result = 31 * result + (additionalForeignKeys != null ? Arrays.hashCode(additionalForeignKeys) : 0);
result = 31 * result + (persistencePerspectiveItems != null ? persistencePerspectiveItems.hashCode() : 0);
result = 31 * result + (operationTypes != null ? operationTypes.hashCode() : 0);
result = 31 * result + (populateToOneFields != null ? populateToOneFields.hashCode() : 0);
result = 31 * result + (excludeFields != null ? Arrays.hashCode(excludeFields) : 0);
result = 31 * result + (includeFields != null ? Arrays.hashCode(includeFields) : 0);
result = 31 * result + (configurationKey != null ? configurationKey.hashCode() : 0);
result = 31 * result + (showArchivedFields != null ? showArchivedFields.hashCode() : 0);
result = 31 * result + (useServerSideInspectionCache != null ? useServerSideInspectionCache.hashCode() : 0);
return result;
}
} | 1no label
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_dto_PersistencePerspective.java |
1,018 | public class ReleaseOperation extends SemaphoreBackupAwareOperation implements Notifier, IdentifiedDataSerializable {
public ReleaseOperation() {
}
public ReleaseOperation(String name, int permitCount) {
super(name, permitCount);
}
@Override
public void run() throws Exception {
Permit permit = getPermit();
permit.release(permitCount, getCallerUuid());
response = true;
}
@Override
public boolean shouldNotify() {
return permitCount > 0;
}
@Override
public WaitNotifyKey getNotifiedKey() {
return new SemaphoreWaitNotifyKey(name, "acquire");
}
@Override
public boolean shouldBackup() {
return permitCount > 0;
}
@Override
public Operation getBackupOperation() {
return new ReleaseBackupOperation(name, permitCount, getCallerUuid());
}
@Override
public int getFactoryId() {
return SemaphoreDataSerializerHook.F_ID;
}
@Override
public int getId() {
return SemaphoreDataSerializerHook.RELEASE_OPERATION;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_concurrent_semaphore_operations_ReleaseOperation.java |
17 | public interface BiAction<A,B> { void accept(A a, B b); } | 0true
| src_main_java_jsr166e_CompletableFuture.java |
422 | @Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.FIELD})
public @interface ConfigurationItem {
/**
* Item name for the error message (could also be a key to a properties file to support localization)
*/
public static String ERROR_MESSAGE = "errorMessage";
/**
* <p>The name of the validation configuration item</p>
*
* @return the config item name
*/
String itemName();
/**
* <p>The value for the validation configuration item</p>
*
* @return the config item value
*/
String itemValue();
} | 0true
| common_src_main_java_org_broadleafcommerce_common_presentation_ConfigurationItem.java |
2,267 | public static enum StackType {
IPv4, IPv6, Unknown
} | 0true
| src_main_java_org_elasticsearch_common_network_NetworkUtils.java |
1,425 | public class OChannelBinaryProtocol {
// OUTGOING
public static final byte REQUEST_SHUTDOWN = 1;
public static final byte REQUEST_CONNECT = 2;
public static final byte REQUEST_DB_OPEN = 3;
public static final byte REQUEST_DB_CREATE = 4;
public static final byte REQUEST_DB_CLOSE = 5;
public static final byte REQUEST_DB_EXIST = 6;
public static final byte REQUEST_DB_DROP = 7;
public static final byte REQUEST_DB_SIZE = 8;
public static final byte REQUEST_DB_COUNTRECORDS = 9;
public static final byte REQUEST_DATACLUSTER_ADD = 10;
public static final byte REQUEST_DATACLUSTER_DROP = 11;
public static final byte REQUEST_DATACLUSTER_COUNT = 12;
public static final byte REQUEST_DATACLUSTER_DATARANGE = 13;
public static final byte REQUEST_DATACLUSTER_COPY = 14;
public static final byte REQUEST_DATACLUSTER_LH_CLUSTER_IS_USED = 16; // since 1.2.0
public static final byte REQUEST_DATASEGMENT_ADD = 20;
public static final byte REQUEST_DATASEGMENT_DROP = 21;
public static final byte REQUEST_RECORD_METADATA = 29; // since 1.4.0
public static final byte REQUEST_RECORD_LOAD = 30;
public static final byte REQUEST_RECORD_CREATE = 31;
public static final byte REQUEST_RECORD_UPDATE = 32;
public static final byte REQUEST_RECORD_DELETE = 33;
public static final byte REQUEST_RECORD_COPY = 34;
public static final byte REQUEST_POSITIONS_HIGHER = 36; // since 1.3.0
public static final byte REQUEST_POSITIONS_LOWER = 37; // since 1.3.0
public static final byte REQUEST_RECORD_CLEAN_OUT = 38; // since 1.3.0
public static final byte REQUEST_POSITIONS_FLOOR = 39; // since 1.3.0
public static final byte REQUEST_COUNT = 40; // DEPRECATED: USE REQUEST_DATACLUSTER_COUNT
public static final byte REQUEST_COMMAND = 41;
public static final byte REQUEST_POSITIONS_CEILING = 42; // since 1.3.0
public static final byte REQUEST_TX_COMMIT = 60;
public static final byte REQUEST_CONFIG_GET = 70;
public static final byte REQUEST_CONFIG_SET = 71;
public static final byte REQUEST_CONFIG_LIST = 72;
public static final byte REQUEST_DB_RELOAD = 73; // SINCE 1.0rc4
public static final byte REQUEST_DB_LIST = 74; // SINCE 1.0rc6
public static final byte REQUEST_PUSH_RECORD = 79;
public static final byte REQUEST_PUSH_DISTRIB_CONFIG = 80;
// DISTRIBUTED
public static final byte REQUEST_DB_COPY = 90; // SINCE 1.0rc8
public static final byte REQUEST_REPLICATION = 91; // SINCE 1.0
public static final byte REQUEST_CLUSTER = 92; // SINCE 1.0
public static final byte REQUEST_DB_TRANSFER = 93; // SINCE 1.0.2
// Lock + sync
public static final byte REQUEST_DB_FREEZE = 94; // SINCE 1.1.0
public static final byte REQUEST_DB_RELEASE = 95; // SINCE 1.1.0
public static final byte REQUEST_DATACLUSTER_FREEZE = 96;
public static final byte REQUEST_DATACLUSTER_RELEASE = 97;
// INCOMING
public static final byte RESPONSE_STATUS_OK = 0;
public static final byte RESPONSE_STATUS_ERROR = 1;
public static final byte PUSH_DATA = 3;
// CONSTANTS
public static final short RECORD_NULL = -2;
public static final short RECORD_RID = -3;
// FOR MORE INFO: https://github.com/orientechnologies/orientdb/wiki/Network-Binary-Protocol#wiki-Compatibility
public static final int CURRENT_PROTOCOL_VERSION = 19; // SENT AS SHORT AS FIRST PACKET AFTER SOCKET CONNECTION
public static OIdentifiable readIdentifiable(final OChannelBinaryAsynchClient network) throws IOException {
final int classId = network.readShort();
if (classId == RECORD_NULL)
return null;
if (classId == RECORD_RID) {
return network.readRID();
} else {
final ORecordInternal<?> record = Orient.instance().getRecordFactoryManager().newInstance(network.readByte());
final ORecordId rid = network.readRID();
final ORecordVersion version = network.readVersion();
final byte[] content = network.readBytes();
record.fill(rid, version, content, false);
return record;
}
}
} | 0true
| enterprise_src_main_java_com_orientechnologies_orient_enterprise_channel_binary_OChannelBinaryProtocol.java |
874 | return new DataSerializableFactory() {
@Override
public IdentifiedDataSerializable create(int typeId) {
switch (typeId) {
case AWAIT_OPERATION:
return new AwaitOperation();
case COUNT_DOWN_LATCH_BACKUP_OPERATION:
return new CountDownLatchBackupOperation();
case COUNT_DOWN_LATCH_REPLICATION_OPERATION:
return new CountDownLatchReplicationOperation();
case COUNT_DOWN_OPERATION:
return new CountDownOperation();
case GET_COUNT_OPERATION:
return new GetCountOperation();
case SET_COUNT_OPERATION:
return new SetCountOperation();
default:
return null;
}
}
}; | 0true
| hazelcast_src_main_java_com_hazelcast_concurrent_countdownlatch_CountDownLatchDataSerializerHook.java |
1,266 | public class PricingProcessContextFactory implements ProcessContextFactory<Order> {
@Override
public ProcessContext createContext(Order seedData) throws WorkflowException {
PricingContext context = new PricingContext();
context.setSeedData(seedData);
return context;
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_pricing_service_workflow_PricingProcessContextFactory.java |
1,290 | clusterService.submitStateUpdateTask(Integer.toString(i), new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
return currentState;
}
@Override
public void onFailure(String source, Throwable t) {
fail();
}
}); | 0true
| src_test_java_org_elasticsearch_cluster_ClusterServiceTests.java |
802 | public class OFunctionUtilWrapper {
private OFunction f;
public OFunctionUtilWrapper(final OFunction f) {
this.f = f;
}
public boolean exists(final Object... iValues) {
if (iValues != null)
for (Object o : iValues)
if (o != null && !o.equals("undefined") && !o.equals("null"))
return true;
return false;
}
public boolean containsArray(final Object[] iArray, final Object value) {
if (iArray != null && value != null)
for (Object o : iArray)
if (o != null && o.equals(value))
return true;
return false;
}
public Object value(final Object iValue) {
return iValue != null ? iValue : null;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_metadata_function_OFunctionUtilWrapper.java |
1,631 | public class ClusterPropsRequest implements ConsoleRequest {
public ClusterPropsRequest() {
}
@Override
public int getType() {
return ConsoleRequestConstants.REQUEST_TYPE_CLUSTER_PROPERTIES;
}
@Override
public Object readResponse(ObjectDataInput in) throws IOException {
Map<String, String> properties = new LinkedHashMap<String, String>();
int size = in.readInt();
String[] temp;
for (int i = 0; i < size; i++) {
temp = in.readUTF().split(":#");
properties.put(temp[0], temp.length == 1 ? "" : temp[1]);
}
return properties;
}
@Override
public void writeResponse(ManagementCenterService mcs, ObjectDataOutput dos) throws Exception {
Runtime runtime = Runtime.getRuntime();
RuntimeMXBean runtimeMxBean = ManagementFactory.getRuntimeMXBean();
InternalPartitionService partitionService = mcs.getHazelcastInstance().node.getPartitionService();
Map<String, String> properties = new LinkedHashMap<String, String>();
properties.put("hazelcast.cl_version", mcs.getHazelcastInstance().node.getBuildInfo().getVersion());
properties.put("date.cl_startTime", Long.toString(runtimeMxBean.getStartTime()));
properties.put("seconds.cl_upTime", Long.toString(runtimeMxBean.getUptime()));
properties.put("memory.cl_freeMemory", Long.toString(runtime.freeMemory()));
properties.put("memory.cl_totalMemory", Long.toString(runtime.totalMemory()));
properties.put("memory.cl_maxMemory", Long.toString(runtime.maxMemory()));
properties.put("return.hasOngoingMigration", Boolean.toString(partitionService.hasOnGoingMigration()));
properties.put("data.cl_migrationTasksCount", Long.toString(partitionService.getMigrationQueueSize()));
dos.writeInt(properties.size());
for (Map.Entry<String, String> entry : properties.entrySet()) {
dos.writeUTF(entry.getKey() + ":#" + entry.getValue());
}
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
}
@Override
public void readData(ObjectDataInput in) throws IOException {
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_management_request_ClusterPropsRequest.java |
996 | class OperationTransportHandler extends BaseTransportRequestHandler<Request> {
@Override
public Request newInstance() {
return newRequestInstance();
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
@Override
public void messageReceived(final Request request, final TransportChannel channel) throws Exception {
// no need to have a threaded listener since we just send back a response
request.listenerThreaded(false);
// if we have a local operation, execute it on a thread since we don't spawn
request.operationThreaded(true);
execute(request, new ActionListener<Response>() {
@Override
public void onResponse(Response result) {
try {
channel.sendResponse(result);
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(e);
} catch (Throwable e1) {
logger.warn("Failed to send response for " + transportAction, e1);
}
}
});
}
} | 0true
| src_main_java_org_elasticsearch_action_support_replication_TransportShardReplicationOperationAction.java |
936 | public class OfferRuleType implements Serializable, BroadleafEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, OfferRuleType> TYPES = new LinkedHashMap<String, OfferRuleType>();
public static final OfferRuleType ORDER = new OfferRuleType("ORDER", "Order");
public static final OfferRuleType FULFILLMENT_GROUP = new OfferRuleType("FULFILLMENT_GROUP", "Fulfillment Group");
public static final OfferRuleType CUSTOMER = new OfferRuleType("CUSTOMER", "Customer");
public static final OfferRuleType TIME = new OfferRuleType("TIME", "Time");
public static final OfferRuleType REQUEST = new OfferRuleType("REQUEST", "Request");
public static OfferRuleType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public OfferRuleType() {
//do nothing
}
public OfferRuleType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)) {
TYPES.put(type, this);
}
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OfferRuleType other = (OfferRuleType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
} | 1no label
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_service_type_OfferRuleType.java |
887 | private class AsyncAction {
private final SearchScrollRequest request;
private final ActionListener<SearchResponse> listener;
private final ParsedScrollId scrollId;
private final DiscoveryNodes nodes;
private volatile AtomicArray<ShardSearchFailure> shardFailures;
final AtomicArray<QuerySearchResult> queryResults;
final AtomicArray<FetchSearchResult> fetchResults;
private volatile ScoreDoc[] sortedShardList;
private final AtomicInteger successfulOps;
private final long startTime = System.currentTimeMillis();
private AsyncAction(SearchScrollRequest request, ParsedScrollId scrollId, ActionListener<SearchResponse> listener) {
this.request = request;
this.listener = listener;
this.scrollId = scrollId;
this.nodes = clusterService.state().nodes();
this.successfulOps = new AtomicInteger(scrollId.getContext().length);
this.queryResults = new AtomicArray<QuerySearchResult>(scrollId.getContext().length);
this.fetchResults = new AtomicArray<FetchSearchResult>(scrollId.getContext().length);
}
protected final ShardSearchFailure[] buildShardFailures() {
if (shardFailures == null) {
return ShardSearchFailure.EMPTY_ARRAY;
}
List<AtomicArray.Entry<ShardSearchFailure>> entries = shardFailures.asList();
ShardSearchFailure[] failures = new ShardSearchFailure[entries.size()];
for (int i = 0; i < failures.length; i++) {
failures[i] = entries.get(i).value;
}
return failures;
}
// we do our best to return the shard failures, but its ok if its not fully concurrently safe
// we simply try and return as much as possible
protected final void addShardFailure(final int shardIndex, ShardSearchFailure failure) {
if (shardFailures == null) {
shardFailures = new AtomicArray<ShardSearchFailure>(scrollId.getContext().length);
}
shardFailures.set(shardIndex, failure);
}
public void start() {
if (scrollId.getContext().length == 0) {
listener.onFailure(new SearchPhaseExecutionException("query", "no nodes to search on", null));
return;
}
final AtomicInteger counter = new AtomicInteger(scrollId.getContext().length);
int localOperations = 0;
Tuple<String, Long>[] context = scrollId.getContext();
for (int i = 0; i < context.length; i++) {
Tuple<String, Long> target = context[i];
DiscoveryNode node = nodes.get(target.v1());
if (node != null) {
if (nodes.localNodeId().equals(node.id())) {
localOperations++;
} else {
executeQueryPhase(i, counter, node, target.v2());
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Node [" + target.v1() + "] not available for scroll request [" + scrollId.getSource() + "]");
}
successfulOps.decrementAndGet();
if (counter.decrementAndGet() == 0) {
executeFetchPhase();
}
}
}
if (localOperations > 0) {
if (request.operationThreading() == SearchOperationThreading.SINGLE_THREAD) {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
Tuple<String, Long>[] context1 = scrollId.getContext();
for (int i = 0; i < context1.length; i++) {
Tuple<String, Long> target = context1[i];
DiscoveryNode node = nodes.get(target.v1());
if (node != null && nodes.localNodeId().equals(node.id())) {
executeQueryPhase(i, counter, node, target.v2());
}
}
}
});
} else {
boolean localAsync = request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD;
Tuple<String, Long>[] context1 = scrollId.getContext();
for (int i = 0; i < context1.length; i++) {
final Tuple<String, Long> target = context1[i];
final int shardIndex = i;
final DiscoveryNode node = nodes.get(target.v1());
if (node != null && nodes.localNodeId().equals(node.id())) {
try {
if (localAsync) {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
executeQueryPhase(shardIndex, counter, node, target.v2());
}
});
} else {
executeQueryPhase(shardIndex, counter, node, target.v2());
}
} catch (Throwable t) {
onQueryPhaseFailure(shardIndex, counter, target.v2(), t);
}
}
}
}
}
}
private void executeQueryPhase(final int shardIndex, final AtomicInteger counter, DiscoveryNode node, final long searchId) {
searchService.sendExecuteQuery(node, internalScrollSearchRequest(searchId, request), new SearchServiceListener<QuerySearchResult>() {
@Override
public void onResult(QuerySearchResult result) {
queryResults.set(shardIndex, result);
if (counter.decrementAndGet() == 0) {
executeFetchPhase();
}
}
@Override
public void onFailure(Throwable t) {
onQueryPhaseFailure(shardIndex, counter, searchId, t);
}
});
}
void onQueryPhaseFailure(final int shardIndex, final AtomicInteger counter, final long searchId, Throwable t) {
if (logger.isDebugEnabled()) {
logger.debug("[{}] Failed to execute query phase", t, searchId);
}
addShardFailure(shardIndex, new ShardSearchFailure(t));
successfulOps.decrementAndGet();
if (counter.decrementAndGet() == 0) {
executeFetchPhase();
}
}
private void executeFetchPhase() {
sortedShardList = searchPhaseController.sortDocs(queryResults);
AtomicArray<IntArrayList> docIdsToLoad = new AtomicArray<IntArrayList>(queryResults.length());
searchPhaseController.fillDocIdsToLoad(docIdsToLoad, sortedShardList);
if (docIdsToLoad.asList().isEmpty()) {
finishHim();
}
final AtomicInteger counter = new AtomicInteger(docIdsToLoad.asList().size());
for (final AtomicArray.Entry<IntArrayList> entry : docIdsToLoad.asList()) {
IntArrayList docIds = entry.value;
final QuerySearchResult querySearchResult = queryResults.get(entry.index);
FetchSearchRequest fetchSearchRequest = new FetchSearchRequest(request, querySearchResult.id(), docIds);
DiscoveryNode node = nodes.get(querySearchResult.shardTarget().nodeId());
searchService.sendExecuteFetch(node, fetchSearchRequest, new SearchServiceListener<FetchSearchResult>() {
@Override
public void onResult(FetchSearchResult result) {
result.shardTarget(querySearchResult.shardTarget());
fetchResults.set(entry.index, result);
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
@Override
public void onFailure(Throwable t) {
if (logger.isDebugEnabled()) {
logger.debug("Failed to execute fetch phase", t);
}
successfulOps.decrementAndGet();
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
});
}
}
private void finishHim() {
try {
innerFinishHim();
} catch (Throwable e) {
listener.onFailure(new ReduceSearchPhaseException("fetch", "", e, buildShardFailures()));
}
}
private void innerFinishHim() {
InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryResults, fetchResults);
String scrollId = null;
if (request.scroll() != null) {
scrollId = request.scrollId();
}
listener.onResponse(new SearchResponse(internalResponse, scrollId, this.scrollId.getContext().length, successfulOps.get(),
System.currentTimeMillis() - startTime, buildShardFailures()));
}
} | 1no label
| src_main_java_org_elasticsearch_action_search_type_TransportSearchScrollQueryThenFetchAction.java |
1,461 | public class OGraphCommandExecutorSQLFactory implements OCommandExecutorSQLFactory {
private static final Map<String, Class<? extends OCommandExecutorSQLAbstract>> COMMANDS;
static {
// COMMANDS
final Map<String, Class<? extends OCommandExecutorSQLAbstract>> commands = new HashMap<String, Class<? extends OCommandExecutorSQLAbstract>>();
commands.put(OCommandExecutorSQLCreateEdge.NAME, OCommandExecutorSQLCreateEdge.class);
commands.put(OCommandExecutorSQLDeleteEdge.NAME, OCommandExecutorSQLDeleteEdge.class);
commands.put(OCommandExecutorSQLCreateVertex.NAME, OCommandExecutorSQLCreateVertex.class);
commands.put(OCommandExecutorSQLDeleteVertex.NAME, OCommandExecutorSQLDeleteVertex.class);
COMMANDS = Collections.unmodifiableMap(commands);
}
/**
* {@inheritDoc}
*/
public Set<String> getCommandNames() {
return COMMANDS.keySet();
}
/**
* {@inheritDoc}
*/
public OCommandExecutorSQLAbstract createCommand(final String name) throws OCommandExecutionException {
final Class<? extends OCommandExecutorSQLAbstract> clazz = COMMANDS.get(name);
if (clazz == null) {
throw new OCommandExecutionException("Unknowned command name :" + name);
}
try {
return clazz.newInstance();
} catch (Exception e) {
throw new OCommandExecutionException("Error in creation of command " + name
+ "(). Probably there is not an empty constructor or the constructor generates errors", e);
}
}
/**
* Returns a OrientBaseGraph implementation from the current database in thread local.
*
* @return
*/
public static OrientBaseGraph getGraph() {
ODatabaseRecord database = ODatabaseRecordThreadLocal.INSTANCE.get();
if (!(database instanceof ODatabaseDocumentTx))
database = new ODatabaseDocumentTx((ODatabaseRecordTx) database);
return new OrientGraphNoTx((ODatabaseDocumentTx) database);
}
} | 1no label
| graphdb_src_main_java_com_orientechnologies_orient_graph_sql_OGraphCommandExecutorSQLFactory.java |
1,030 | private class ShardTransportHandler extends BaseTransportRequestHandler<ShardSingleOperationRequest> {
@Override
public ShardSingleOperationRequest newInstance() {
return new ShardSingleOperationRequest();
}
@Override
public String executor() {
return executor;
}
@Override
public void messageReceived(final ShardSingleOperationRequest request, final TransportChannel channel) throws Exception {
Response response = shardOperation(request.request(), request.shardId());
channel.sendResponse(response);
}
} | 0true
| src_main_java_org_elasticsearch_action_support_single_shard_TransportShardSingleOperationAction.java |
719 | public abstract class CollectionService implements ManagedService, RemoteService,
EventPublishingService<CollectionEvent, ItemListener>, TransactionalService, MigrationAwareService {
protected NodeEngine nodeEngine;
protected CollectionService(NodeEngine nodeEngine) {
this.nodeEngine = nodeEngine;
}
@Override
public void init(NodeEngine nodeEngine, Properties properties) {
}
@Override
public void reset() {
getContainerMap().clear();
}
@Override
public void shutdown(boolean terminate) {
reset();
}
@Override
public void destroyDistributedObject(String name) {
getContainerMap().remove(name);
nodeEngine.getEventService().deregisterAllListeners(getServiceName(), name);
}
public abstract CollectionContainer getOrCreateContainer(String name, boolean backup);
public abstract Map<String, ? extends CollectionContainer> getContainerMap();
public abstract String getServiceName();
@Override
public void dispatchEvent(CollectionEvent event, ItemListener listener) {
ItemEvent itemEvent = new ItemEvent(event.name, event.eventType, nodeEngine.toObject(event.data),
nodeEngine.getClusterService().getMember(event.caller));
if (event.eventType.equals(ItemEventType.ADDED)) {
listener.itemAdded(itemEvent);
} else {
listener.itemRemoved(itemEvent);
}
}
@Override
public void rollbackTransaction(String transactionId) {
final Set<String> collectionNames = getContainerMap().keySet();
InternalPartitionService partitionService = nodeEngine.getPartitionService();
OperationService operationService = nodeEngine.getOperationService();
for (String name : collectionNames) {
int partitionId = partitionService.getPartitionId(StringPartitioningStrategy.getPartitionKey(name));
Operation operation = new CollectionTransactionRollbackOperation(name, transactionId)
.setPartitionId(partitionId)
.setService(this)
.setNodeEngine(nodeEngine);
operationService.executeOperation(operation);
}
}
@Override
public void beforeMigration(PartitionMigrationEvent event) {
}
public Map<String, CollectionContainer> getMigrationData(PartitionReplicationEvent event) {
Map<String, CollectionContainer> migrationData = new HashMap<String, CollectionContainer>();
InternalPartitionService partitionService = nodeEngine.getPartitionService();
for (Map.Entry<String, ? extends CollectionContainer> entry : getContainerMap().entrySet()) {
String name = entry.getKey();
int partitionId = partitionService.getPartitionId(StringPartitioningStrategy.getPartitionKey(name));
CollectionContainer container = entry.getValue();
if (partitionId == event.getPartitionId() && container.getConfig().getTotalBackupCount() >= event.getReplicaIndex()) {
migrationData.put(name, container);
}
}
return migrationData;
}
@Override
public void commitMigration(PartitionMigrationEvent event) {
if (event.getMigrationEndpoint() == MigrationEndpoint.SOURCE) {
clearMigrationData(event.getPartitionId());
}
}
@Override
public void rollbackMigration(PartitionMigrationEvent event) {
if (event.getMigrationEndpoint() == MigrationEndpoint.DESTINATION) {
clearMigrationData(event.getPartitionId());
}
}
@Override
public void clearPartitionReplica(int partitionId) {
clearMigrationData(partitionId);
}
private void clearMigrationData(int partitionId) {
final Set<? extends Map.Entry<String, ? extends CollectionContainer>> entrySet = getContainerMap().entrySet();
final Iterator<? extends Map.Entry<String, ? extends CollectionContainer>> iterator = entrySet.iterator();
InternalPartitionService partitionService = nodeEngine.getPartitionService();
while (iterator.hasNext()) {
final Map.Entry<String, ? extends CollectionContainer> entry = iterator.next();
final String name = entry.getKey();
final CollectionContainer container = entry.getValue();
int containerPartitionId = partitionService.getPartitionId(StringPartitioningStrategy.getPartitionKey(name));
if (containerPartitionId == partitionId) {
container.destroy();
iterator.remove();
}
}
}
public void addContainer(String name, CollectionContainer container) {
final Map map = getContainerMap();
map.put(name, container);
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_collection_CollectionService.java |
101 | serializationConfig.addPortableFactory(5, new PortableFactory() {
public Portable create(int classId) {
return new SamplePortable();
}
}); | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_ClientIssueTest.java |
2,423 | public class MultiMapEventFilter implements EventFilter, DataSerializable {
boolean includeValue;
Data key;
public MultiMapEventFilter() {
}
public MultiMapEventFilter(boolean includeValue, Data key) {
this.includeValue = includeValue;
this.key = key;
}
public boolean isIncludeValue() {
return includeValue;
}
public Data getKey() {
return key;
}
public void writeData(ObjectDataOutput out) throws IOException {
out.writeBoolean(includeValue);
IOUtil.writeNullableData(out, key);
}
public void readData(ObjectDataInput in) throws IOException {
includeValue = in.readBoolean();
key = IOUtil.readNullableData(in);
}
public boolean eval(Object arg) {
return false;
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_multimap_MultiMapEventFilter.java |
1,588 | public interface LoggingService {
void addLogListener(Level level, LogListener logListener);
void removeLogListener(LogListener logListener);
ILogger getLogger(String name);
ILogger getLogger(Class type);
} | 0true
| hazelcast_src_main_java_com_hazelcast_logging_LoggingService.java |
2,616 | static class JoinRequest extends TransportRequest {
DiscoveryNode node;
boolean withClusterState;
private JoinRequest() {
}
private JoinRequest(DiscoveryNode node, boolean withClusterState) {
this.node = node;
this.withClusterState = withClusterState;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
node = DiscoveryNode.readNode(in);
withClusterState = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
node.writeTo(out);
out.writeBoolean(withClusterState);
}
} | 0true
| src_main_java_org_elasticsearch_discovery_zen_membership_MembershipAction.java |
606 | @Component("blRequestFilter")
public class BroadleafRequestFilter extends OncePerRequestFilter {
private final Log LOG = LogFactory.getLog(getClass());
/**
* Parameter/Attribute name for the current language
*/
public static String REQUEST_DTO_PARAM_NAME = "blRequestDTO";
// Properties to manage URLs that will not be processed by this filter.
private static final String BLC_ADMIN_GWT = "org.broadleafcommerce.admin";
private static final String BLC_ADMIN_PREFIX = "blcadmin";
private static final String BLC_ADMIN_SERVICE = ".service";
private Set<String> ignoreSuffixes;
@Resource(name = "blRequestProcessor")
protected BroadleafRequestProcessor requestProcessor;
@Override
public void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) throws IOException, ServletException {
if (!shouldProcessURL(request, request.getRequestURI())) {
if (LOG.isTraceEnabled()) {
LOG.trace("Process URL not processing URL " + request.getRequestURI());
}
filterChain.doFilter(request, response);
return;
}
if (LOG.isTraceEnabled()) {
String requestURIWithoutContext;
if (request.getContextPath() != null) {
requestURIWithoutContext = request.getRequestURI().substring(request.getContextPath().length());
} else {
requestURIWithoutContext = request.getRequestURI();
}
// Remove JSESSION-ID or other modifiers
int pos = requestURIWithoutContext.indexOf(";");
if (pos >= 0) {
requestURIWithoutContext = requestURIWithoutContext.substring(0, pos);
}
LOG.trace("Process URL Filter Begin " + requestURIWithoutContext);
}
if (request.getAttribute(REQUEST_DTO_PARAM_NAME) == null) {
request.setAttribute(REQUEST_DTO_PARAM_NAME, new RequestDTOImpl(request));
}
try {
requestProcessor.process(new ServletWebRequest(request, response));
filterChain.doFilter(request, response);
} catch (SiteNotFoundException e) {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
} finally {
requestProcessor.postProcess(new ServletWebRequest(request, response));
}
}
/**
* Determines if the passed in URL should be processed by the content management system.
* <p/>
* By default, this method returns false for any BLC-Admin URLs and service calls and for all common image/digital mime-types (as determined by an internal call to {@code getIgnoreSuffixes}.
* <p/>
* This check is called with the {@code doFilterInternal} method to short-circuit the content processing which can be expensive for requests that do not require it.
*
* @param requestURI
* - the HttpServletRequest.getRequestURI
* @return true if the {@code HttpServletRequest} should be processed
*/
protected boolean shouldProcessURL(HttpServletRequest request, String requestURI) {
if (requestURI.contains(BLC_ADMIN_GWT) || requestURI.endsWith(BLC_ADMIN_SERVICE) || requestURI.contains(BLC_ADMIN_PREFIX)) {
if (LOG.isTraceEnabled()) {
LOG.trace("BroadleafProcessURLFilter ignoring admin request URI " + requestURI);
}
return false;
} else {
int pos = requestURI.lastIndexOf(".");
if (pos > 0) {
// String suffix = requestURI.substring(pos);
// if (getIgnoreSuffixes().contains(suffix.toLowerCase())) {
// if (LOG.isTraceEnabled()) {
// LOG.trace("BroadleafProcessURLFilter ignoring request due to suffix " + requestURI);
// }
// return false;
// }
}
}
return true;
}
/**
* Returns a set of suffixes that can be ignored by content processing. The following are returned:
* <p/>
* <B>List of suffixes ignored:</B>
*
* ".aif", ".aiff", ".asf", ".avi", ".bin", ".bmp", ".doc", ".eps", ".gif", ".hqx", ".jpg", ".jpeg", ".mid", ".midi", ".mov", ".mp3", ".mpg", ".mpeg", ".p65", ".pdf", ".pic", ".pict", ".png", ".ppt", ".psd", ".qxd", ".ram", ".ra", ".rm", ".sea", ".sit", ".stk", ".swf", ".tif", ".tiff", ".txt", ".rtf", ".vob", ".wav", ".wmf", ".xls", ".zip";
*
* @return set of suffixes to ignore.
*/
protected Set getIgnoreSuffixes() {
if (ignoreSuffixes == null || ignoreSuffixes.isEmpty()) {
String[] ignoreSuffixList = { ".aif", ".aiff", ".asf", ".avi", ".bin", ".bmp", ".css", ".doc", ".eps", ".gif", ".hqx", ".js", ".jpg", ".jpeg", ".mid", ".midi", ".mov", ".mp3", ".mpg", ".mpeg", ".p65", ".pdf", ".pic", ".pict", ".png", ".ppt", ".psd", ".qxd", ".ram", ".ra", ".rm", ".sea", ".sit", ".stk", ".swf", ".tif", ".tiff", ".txt", ".rtf", ".vob", ".wav", ".wmf", ".xls", ".zip" };
ignoreSuffixes = new HashSet<String>(Arrays.asList(ignoreSuffixList));
}
return ignoreSuffixes;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_web_BroadleafRequestFilter.java |
2,357 | public static interface PortCallback {
boolean onPortNumber(int portNumber);
} | 0true
| src_main_java_org_elasticsearch_common_transport_PortsRange.java |
209 | public class TemporaryBackendException extends BackendException {
private static final long serialVersionUID = 9286719478969781L;
/**
* @param msg Exception message
*/
public TemporaryBackendException(String msg) {
super(msg);
}
/**
* @param msg Exception message
* @param cause Cause of the exception
*/
public TemporaryBackendException(String msg, Throwable cause) {
super(msg, cause);
}
/**
* Constructs an exception with a generic message
*
* @param cause Cause of the exception
*/
public TemporaryBackendException(Throwable cause) {
this("Temporary failure in storage backend", cause);
}
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_TemporaryBackendException.java |
1,410 | Job registerCeylonModules = new Job("Load the Ceylon Metamodel for plugin dependencies") {
protected IStatus run(IProgressMonitor monitor) {
Activator.loadBundleAsModule(bundleContext.getBundle());
return Status.OK_STATUS;
};
}; | 1no label
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_ui_CeylonPlugin.java |
764 | class TransportHandler extends BaseTransportRequestHandler<MultiGetRequest> {
@Override
public MultiGetRequest newInstance() {
return new MultiGetRequest();
}
@Override
public void messageReceived(final MultiGetRequest request, final TransportChannel channel) throws Exception {
// no need to use threaded listener, since we just send a response
request.listenerThreaded(false);
execute(request, new ActionListener<MultiGetResponse>() {
@Override
public void onResponse(MultiGetResponse response) {
try {
channel.sendResponse(response);
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(e);
} catch (Exception e1) {
logger.warn("Failed to send error response for action [" + MultiGetAction.NAME + "] and request [" + request + "]", e1);
}
}
});
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
} | 0true
| src_main_java_org_elasticsearch_action_get_TransportMultiGetAction.java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.