Unnamed: 0
int64 0
6.45k
| func
stringlengths 29
253k
| target
class label 2
classes | project
stringlengths 36
167
|
---|---|---|---|
429 | public interface KeyInformation {
/**
* Returns the data type of the key's values.
*
* @return
*/
public Class<?> getDataType();
/**
* Returns the parameters of the key's configuration.
*
* @return
*/
public Parameter[] getParameters();
/**
* Returns the {@link com.thinkaurelius.titan.core.Cardinality} for this key.
* @return
*/
public Cardinality getCardinality();
public interface StoreRetriever {
/**
* Returns the {@link KeyInformation} for a particular key for this store
* @param key
* @return
*/
public KeyInformation get(String key);
}
public interface IndexRetriever {
/**
* Returns the {@link KeyInformation} for a particular key in a given store.
*
* @param store
* @param key
* @return
*/
public KeyInformation get(String store, String key);
/**
* Returns a {@link StoreRetriever} for the given store on this IndexRetriever
* @param store
* @return
*/
public StoreRetriever get(String store);
}
public interface Retriever {
/**
* Returns the {@link IndexRetriever} for a given index.
* @param index
* @return
*/
public IndexRetriever get(String index);
}
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_indexing_KeyInformation.java |
878 | public class PromotionQualifier implements Serializable{
private static final long serialVersionUID = 1L;
private Offer promotion;
private OfferItemCriteria itemCriteria;
private int quantity;
private int finalizedQuantity;
public Offer getPromotion() {
return promotion;
}
public void setPromotion(Offer promotion) {
this.promotion = promotion;
}
public OfferItemCriteria getItemCriteria() {
return itemCriteria;
}
public void setItemCriteria(OfferItemCriteria itemCriteria) {
this.itemCriteria = itemCriteria;
}
public int getQuantity() {
return quantity;
}
public void setQuantity(int quantity) {
this.quantity = quantity;
}
public int getFinalizedQuantity() {
return finalizedQuantity;
}
public void setFinalizedQuantity(int finalizedQuantity) {
this.finalizedQuantity = finalizedQuantity;
}
public void incrementQuantity(int quantity) {
this.quantity = this.quantity + quantity;
}
public PromotionQualifier copy() {
PromotionQualifier pq = new PromotionQualifier();
pq.setItemCriteria(itemCriteria);
pq.setPromotion(promotion);
pq.setQuantity(quantity);
pq.setFinalizedQuantity(finalizedQuantity);
return pq;
}
public void resetQty(int qty) {
quantity = qty;
finalizedQuantity = qty;
}
public PromotionQualifier split(int splitItemQty) {
PromotionQualifier returnQualifier = copy();
int newQty = finalizedQuantity - splitItemQty;
if (newQty <= 0) {
throw new IllegalArgumentException("Splitting PromotionQualifier resulted in a negative quantity");
}
setFinalizedQuantity(newQty);
setQuantity(newQty);
returnQualifier.setQuantity(splitItemQty);
returnQualifier.setFinalizedQuantity(splitItemQty);
return returnQualifier;
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_service_discount_PromotionQualifier.java |
3,989 | public class FunctionScoreQueryParser implements QueryParser {
public static final String NAME = "function_score";
ScoreFunctionParserMapper funtionParserMapper;
@Inject
public FunctionScoreQueryParser(ScoreFunctionParserMapper funtionParserMapper) {
this.funtionParserMapper = funtionParserMapper;
}
@Override
public String[] names() {
return new String[] { NAME, Strings.toCamelCase(NAME) };
}
private static final ImmutableMap<String, CombineFunction> combineFunctionsMap;
static {
CombineFunction[] values = CombineFunction.values();
Builder<String, CombineFunction> combineFunctionMapBuilder = ImmutableMap.<String, CombineFunction>builder();
for (CombineFunction combineFunction : values) {
combineFunctionMapBuilder.put(combineFunction.getName(), combineFunction);
}
combineFunctionsMap = combineFunctionMapBuilder.build();
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
Query query = null;
float boost = 1.0f;
FiltersFunctionScoreQuery.ScoreMode scoreMode = FiltersFunctionScoreQuery.ScoreMode.Multiply;
ArrayList<FiltersFunctionScoreQuery.FilterFunction> filterFunctions = new ArrayList<FiltersFunctionScoreQuery.FilterFunction>();
float maxBoost = Float.MAX_VALUE;
String currentFieldName = null;
XContentParser.Token token;
CombineFunction combineFunction = CombineFunction.MULT;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if ("query".equals(currentFieldName)) {
query = parseContext.parseInnerQuery();
} else if ("filter".equals(currentFieldName)) {
query = new XConstantScoreQuery(parseContext.parseInnerFilter());
} else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) {
scoreMode = parseScoreMode(parseContext, parser);
} else if ("boost_mode".equals(currentFieldName) || "boostMode".equals(currentFieldName)) {
combineFunction = parseBoostMode(parseContext, parser);
} else if ("max_boost".equals(currentFieldName) || "maxBoost".equals(currentFieldName)) {
maxBoost = parser.floatValue();
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else if ("functions".equals(currentFieldName)) {
currentFieldName = parseFiltersAndFunctions(parseContext, parser, filterFunctions, currentFieldName);
} else {
// we tru to parse a score function. If there is no score
// function for the current field name,
// funtionParserMapper.get() will throw an Exception.
filterFunctions.add(new FiltersFunctionScoreQuery.FilterFunction(null, funtionParserMapper.get(parseContext.index(),
currentFieldName).parse(parseContext, parser)));
}
}
if (query == null) {
query = Queries.newMatchAllQuery();
}
// if all filter elements returned null, just use the query
if (filterFunctions.isEmpty()) {
return query;
}
// handle cases where only one score function and no filter was
// provided. In this case we create a FunctionScoreQuery.
if (filterFunctions.size() == 1 && filterFunctions.get(0).filter == null) {
FunctionScoreQuery theQuery = new FunctionScoreQuery(query, filterFunctions.get(0).function);
if (combineFunction != null) {
theQuery.setCombineFunction(combineFunction);
}
theQuery.setBoost(boost);
theQuery.setMaxBoost(maxBoost);
return theQuery;
// in all other cases we create a FiltersFunctionScoreQuery.
} else {
FiltersFunctionScoreQuery functionScoreQuery = new FiltersFunctionScoreQuery(query, scoreMode,
filterFunctions.toArray(new FiltersFunctionScoreQuery.FilterFunction[filterFunctions.size()]), maxBoost);
if (combineFunction != null) {
functionScoreQuery.setCombineFunction(combineFunction);
}
functionScoreQuery.setBoost(boost);
return functionScoreQuery;
}
}
private String parseFiltersAndFunctions(QueryParseContext parseContext, XContentParser parser,
ArrayList<FiltersFunctionScoreQuery.FilterFunction> filterFunctions, String currentFieldName) throws IOException {
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
Filter filter = null;
ScoreFunction scoreFunction = null;
if (token != XContentParser.Token.START_OBJECT) {
throw new QueryParsingException(parseContext.index(), NAME + ": malformed query, expected a "
+ XContentParser.Token.START_OBJECT + " while parsing functions but got a " + token);
} else {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else {
if ("filter".equals(currentFieldName)) {
filter = parseContext.parseInnerFilter();
} else {
// do not need to check null here,
// funtionParserMapper throws exception if parser
// non-existent
ScoreFunctionParser functionParser = funtionParserMapper.get(parseContext.index(), currentFieldName);
scoreFunction = functionParser.parse(parseContext, parser);
}
}
}
}
if (filter == null) {
filter = Queries.MATCH_ALL_FILTER;
}
filterFunctions.add(new FiltersFunctionScoreQuery.FilterFunction(filter, scoreFunction));
}
return currentFieldName;
}
private FiltersFunctionScoreQuery.ScoreMode parseScoreMode(QueryParseContext parseContext, XContentParser parser) throws IOException {
String scoreMode = parser.text();
if ("avg".equals(scoreMode)) {
return FiltersFunctionScoreQuery.ScoreMode.Avg;
} else if ("max".equals(scoreMode)) {
return FiltersFunctionScoreQuery.ScoreMode.Max;
} else if ("min".equals(scoreMode)) {
return FiltersFunctionScoreQuery.ScoreMode.Min;
} else if ("sum".equals(scoreMode)) {
return FiltersFunctionScoreQuery.ScoreMode.Sum;
} else if ("multiply".equals(scoreMode)) {
return FiltersFunctionScoreQuery.ScoreMode.Multiply;
} else if ("first".equals(scoreMode)) {
return FiltersFunctionScoreQuery.ScoreMode.First;
} else {
throw new QueryParsingException(parseContext.index(), NAME + " illegal score_mode [" + scoreMode + "]");
}
}
private CombineFunction parseBoostMode(QueryParseContext parseContext, XContentParser parser) throws IOException {
String boostMode = parser.text();
CombineFunction cf = combineFunctionsMap.get(boostMode);
if (cf == null) {
throw new QueryParsingException(parseContext.index(), NAME + " illegal boost_mode [" + boostMode + "]");
}
return cf;
}
} | 1no label
| src_main_java_org_elasticsearch_index_query_functionscore_FunctionScoreQueryParser.java |
3,330 | static class DoubleValues extends DenseDoubleValues {
private final BigFloatArrayList values;
DoubleValues(BigFloatArrayList values) {
super(false);
this.values = values;
}
@Override
public double nextValue() {
return values.get(docId);
}
} | 0true
| src_main_java_org_elasticsearch_index_fielddata_plain_FloatArrayAtomicFieldData.java |
98 | @Repository("blPageDao")
public class PageDaoImpl implements PageDao {
private static SandBox DUMMY_SANDBOX = new SandBoxImpl();
{
DUMMY_SANDBOX.setId(-1l);
}
@PersistenceContext(unitName = "blPU")
protected EntityManager em;
@Resource(name="blEntityConfiguration")
protected EntityConfiguration entityConfiguration;
@Override
public Page readPageById(Long id) {
return em.find(PageImpl.class, id);
}
@Override
public PageTemplate readPageTemplateById(Long id) {
return em.find(PageTemplateImpl.class, id);
}
@Override
public PageTemplate savePageTemplate(PageTemplate template) {
return em.merge(template);
}
@Override
public Map<String, PageField> readPageFieldsByPage(Page page) {
Query query = em.createNamedQuery("BC_READ_PAGE_FIELDS_BY_PAGE_ID");
query.setParameter("page", page);
query.setHint(QueryHints.HINT_CACHEABLE, true);
List<PageField> pageFields = query.getResultList();
Map<String, PageField> pageFieldMap = new HashMap<String, PageField>();
for (PageField pageField : pageFields) {
pageFieldMap.put(pageField.getFieldKey(), pageField);
}
return pageFieldMap;
}
@Override
public Page updatePage(Page page) {
return em.merge(page);
}
@Override
public void delete(Page page) {
if (!em.contains(page)) {
page = readPageById(page.getId());
}
em.remove(page);
}
@Override
public Page addPage(Page clonedPage) {
return em.merge(clonedPage);
}
@Override
public List<Page> findPageByURI(SandBox sandBox, Locale fullLocale, Locale languageOnlyLocale, String uri) {
Query query;
if (languageOnlyLocale == null) {
languageOnlyLocale = fullLocale;
}
// locale
if (sandBox == null) {
query = em.createNamedQuery("BC_READ_PAGE_BY_URI");
} else if (SandBoxType.PRODUCTION.equals(sandBox.getSandBoxType())) {
query = em.createNamedQuery("BC_READ_PAGE_BY_URI_AND_PRODUCTION_SANDBOX");
query.setParameter("sandbox", sandBox);
} else {
query = em.createNamedQuery("BC_READ_PAGE_BY_URI_AND_USER_SANDBOX");
query.setParameter("sandboxId", sandBox.getId());
}
query.setParameter("fullLocale", fullLocale);
query.setParameter("languageOnlyLocale", languageOnlyLocale);
query.setParameter("uri", uri);
return query.getResultList();
}
@Override
public List<Page> readAllPages() {
CriteriaBuilder builder = em.getCriteriaBuilder();
CriteriaQuery<Page> criteria = builder.createQuery(Page.class);
Root<PageImpl> page = criteria.from(PageImpl.class);
criteria.select(page);
try {
return em.createQuery(criteria).getResultList();
} catch (NoResultException e) {
return new ArrayList<Page>();
}
}
@Override
public List<PageTemplate> readAllPageTemplates() {
CriteriaBuilder builder = em.getCriteriaBuilder();
CriteriaQuery<PageTemplate> criteria = builder.createQuery(PageTemplate.class);
Root<PageTemplateImpl> template = criteria.from(PageTemplateImpl.class);
criteria.select(template);
try {
return em.createQuery(criteria).getResultList();
} catch (NoResultException e) {
return new ArrayList<PageTemplate>();
}
}
@Override
public List<Page> findPageByURI(SandBox sandBox, Locale locale, String uri) {
return findPageByURI(sandBox, locale, null, uri);
}
@Override
public void detachPage(Page page) {
em.detach(page);
}
} | 0true
| admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_page_dao_PageDaoImpl.java |
482 | private static class RowIterator implements KeyIterator {
private final Iterator<Map.Entry<StaticBuffer, ColumnValueStore>> rows;
private final SliceQuery columnSlice;
private final StoreTransaction transaction;
private Map.Entry<StaticBuffer, ColumnValueStore> currentRow;
private Map.Entry<StaticBuffer, ColumnValueStore> nextRow;
private boolean isClosed;
public RowIterator(Iterator<Map.Entry<StaticBuffer, ColumnValueStore>> rows,
@Nullable SliceQuery columns,
final StoreTransaction transaction) {
this.rows = Iterators.filter(rows, new Predicate<Map.Entry<StaticBuffer, ColumnValueStore>>() {
@Override
public boolean apply(@Nullable Map.Entry<StaticBuffer, ColumnValueStore> entry) {
return entry != null && !entry.getValue().isEmpty(transaction);
}
});
this.columnSlice = columns;
this.transaction = transaction;
}
@Override
public RecordIterator<Entry> getEntries() {
ensureOpen();
if (columnSlice == null)
throw new IllegalStateException("getEntries() requires SliceQuery to be set.");
final KeySliceQuery keySlice = new KeySliceQuery(currentRow.getKey(), columnSlice);
return new RecordIterator<Entry>() {
private final Iterator<Entry> items = currentRow.getValue().getSlice(keySlice, transaction).iterator();
@Override
public boolean hasNext() {
ensureOpen();
return items.hasNext();
}
@Override
public Entry next() {
ensureOpen();
return items.next();
}
@Override
public void close() {
isClosed = true;
}
@Override
public void remove() {
throw new UnsupportedOperationException("Column removal not supported");
}
};
}
@Override
public boolean hasNext() {
ensureOpen();
if (null != nextRow)
return true;
while (rows.hasNext()) {
nextRow = rows.next();
List<Entry> ents = nextRow.getValue().getSlice(new KeySliceQuery(nextRow.getKey(), columnSlice), transaction);
if (null != ents && 0 < ents.size())
break;
}
return null != nextRow;
}
@Override
public StaticBuffer next() {
ensureOpen();
Preconditions.checkNotNull(nextRow);
currentRow = nextRow;
nextRow = null;
;
return currentRow.getKey();
}
@Override
public void close() {
isClosed = true;
}
private void ensureOpen() {
if (isClosed)
throw new IllegalStateException("Iterator has been closed.");
}
@Override
public void remove() {
throw new UnsupportedOperationException("Key removal not supported");
}
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_keycolumnvalue_inmemory_InMemoryKeyColumnValueStore.java |
19 | final class InverseEntryIterator extends AbstractEntryIterator<K, V, Map.Entry<K, V>> {
InverseEntryIterator(final OMVRBTreeEntry<K, V> last) {
super(last);
// we have to set ourselves after current index to make iterator work
if (last != null) {
pageIndex = last.getTree().getPageIndex() + 1;
}
}
public Map.Entry<K, V> next() {
return prevEntry();
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_collection_OMVRBTree.java |
3,266 | public class TopicPermission extends InstancePermission {
private static final int PUBLISH = 0x4;
private static final int LISTEN = 0x8;
private static final int ALL = CREATE | DESTROY | LISTEN | PUBLISH;
public TopicPermission(String name, String... actions) {
super(name, actions);
}
@Override
protected int initMask(String[] actions) {
int mask = NONE;
for (String action : actions) {
if (ActionConstants.ACTION_ALL.equals(action)) {
return ALL;
}
if (ActionConstants.ACTION_CREATE.equals(action)) {
mask |= CREATE;
} else if (ActionConstants.ACTION_PUBLISH.equals(action)) {
mask |= PUBLISH;
} else if (ActionConstants.ACTION_DESTROY.equals(action)) {
mask |= DESTROY;
} else if (ActionConstants.ACTION_LISTEN.equals(action)) {
mask |= LISTEN;
}
}
return mask;
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_security_permission_TopicPermission.java |
308 | new Thread() {
public void run() {
map.lock(key);
lockedLatch.countDown();
}
}.start(); | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapLockTest.java |
381 | public interface OIdentifiable extends Comparable<OIdentifiable>, Comparator<OIdentifiable> {
/**
* Returns the record identity.
*
* @return ORID instance
*/
public ORID getIdentity();
/**
* Returns the record instance.
*
* @return ORecord instance
*/
public <T extends ORecord<?>> T getRecord();
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_db_record_OIdentifiable.java |
272 | result.append(desc, new Styler() {
@Override
public void applyStyles(TextStyle textStyle) {
textStyle.font=new Font(title.getDisplay(), fontDatas);
}
}); | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_PeekDefinitionPopup.java |
723 | public interface SkuFee extends Serializable {
public Long getId();
public void setId(Long id);
/**
* Get the name of the surcharge
*
* @return the surcharge name
*/
public String getName();
/**
* Sets the name of the surcharge
*
* @param name
*/
public void setName(String name);
/**
* Get the description of the surcharge
*
* @return the surcharge description
*/
public String getDescription();
/**
* Sets the fee description
*
* @param description
*/
public void setDescription(String description);
/**
* Gets the amount to charge for this surcharge
*
* @return the fee amount
*/
public Money getAmount();
/**
* Sets the amount to charge for this surcharge
*
* @param amount
*/
public void setAmount(Money amount);
/**
* Gets whether or not this surcharge is taxable.
*
* @return true if the surcharge is taxable, false otherwise. Defaults to <b>false</b>
*/
public Boolean getTaxable();
/**
* Sets whether or not this surcharge should be included in tax calculations
*
* @param taxable
*/
public void setTaxable(Boolean taxable);
/**
* Gets the optional MVEL expression used as additional criteria to determine if
* this fee applies
*
* @return the MVEL expression of extra criteria to determine if this
* fee applies
*/
public String getExpression();
/**
* Sets the MVEL expression used to determine if this fee should be applied. If this is
* null or empty, this fee will always be applied
*
* @param expression - a valid MVEL expression
*/
public void setExpression(String expression);
public SkuFeeType getFeeType();
public void setFeeType(SkuFeeType feeType);
/**
* Gets the Skus associated with this surcharge
*
* @return Skus that have this particular surcharge
*/
public List<Sku> getSkus();
/**
* Sets the Skus associated with this surcharge
*
* @param skus
*/
public void setSkus(List<Sku> skus);
BroadleafCurrency getCurrency();
void setCurrency(BroadleafCurrency currency);
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_domain_SkuFee.java |
1,405 | public interface OBackupable {
void backup(OutputStream out, Map<String, Object> options, Callable<Object> callable) throws IOException;
void restore(InputStream in, Map<String, Object> options, Callable<Object> callable) throws IOException;
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_util_OBackupable.java |
986 | @SuppressWarnings({ "unchecked", "serial" })
public abstract class ORecordSerializerCSVAbstract extends ORecordSerializerStringAbstract {
public static final char FIELD_VALUE_SEPARATOR = ':';
private final boolean preferSBTreeRIDSet = OGlobalConfiguration.PREFER_SBTREE_SET.getValueAsBoolean();
protected abstract ORecordSchemaAware<?> newObject(final String iClassName);
public Object fieldFromStream(final ORecordInternal<?> iSourceRecord, final OType iType, OClass iLinkedClass, OType iLinkedType,
final String iName, final String iValue) {
if (iValue == null)
return null;
switch (iType) {
case EMBEDDEDLIST:
case EMBEDDEDSET:
return embeddedCollectionFromStream((ODocument) iSourceRecord, iType, iLinkedClass, iLinkedType, iValue);
case LINKSET:
if (iValue.startsWith(OStringSerializerHelper.LINKSET_PREFIX))
return OSBTreeRIDSet.fromStream(iValue, iSourceRecord);
case LINKLIST: {
if (iValue.length() == 0)
return null;
// REMOVE BEGIN & END COLLECTIONS CHARACTERS IF IT'S A COLLECTION
final String value = iValue.startsWith("[") || iValue.startsWith("<") ? iValue.substring(1, iValue.length() - 1) : iValue;
return iType == OType.LINKLIST ? new ORecordLazyList((ODocument) iSourceRecord).setStreamedContent(new StringBuilder(value))
: new OMVRBTreeRIDSet(iSourceRecord).fromStream(new StringBuilder(iValue));
}
case LINKMAP: {
if (iValue.length() == 0)
return null;
// REMOVE BEGIN & END MAP CHARACTERS
String value = iValue.substring(1, iValue.length() - 1);
@SuppressWarnings("rawtypes")
final Map map = new ORecordLazyMap((ODocument) iSourceRecord, ODocument.RECORD_TYPE);
if (value.length() == 0)
return map;
final List<String> items = OStringSerializerHelper.smartSplit(value, OStringSerializerHelper.RECORD_SEPARATOR, true);
// EMBEDDED LITERALS
for (String item : items) {
if (item != null && !item.isEmpty()) {
final List<String> entry = OStringSerializerHelper.smartSplit(item, OStringSerializerHelper.ENTRY_SEPARATOR);
if (!entry.isEmpty()) {
String mapValue = entry.get(1);
if (mapValue != null && !mapValue.isEmpty())
mapValue = mapValue.substring(1);
map.put(fieldTypeFromStream((ODocument) iSourceRecord, OType.STRING, entry.get(0)), new ORecordId(mapValue));
}
}
}
return map;
}
case EMBEDDEDMAP:
return embeddedMapFromStream((ODocument) iSourceRecord, iLinkedType, iValue, iName);
case LINK:
if (iValue.length() > 1) {
int pos = iValue.indexOf(OStringSerializerHelper.CLASS_SEPARATOR);
if (pos > -1)
ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSchema().getClass(iValue.substring(1, pos));
else
pos = 0;
final String linkAsString = iValue.substring(pos + 1);
try {
return new ORecordId(linkAsString);
} catch (NumberFormatException e) {
OLogManager.instance().error(this, "Error on unmarshalling field '%s' of record '%s': value '%s' is not a link", iName,
iSourceRecord, linkAsString);
return new ORecordId();
}
} else
return null;
case EMBEDDED:
if (iValue.length() > 2) {
// REMOVE BEGIN & END EMBEDDED CHARACTERS
final String value = iValue.substring(1, iValue.length() - 1);
// RECORD
return ((ODocument) OStringSerializerEmbedded.INSTANCE.fromStream(value)).addOwner(iSourceRecord);
} else
return null;
default:
return fieldTypeFromStream((ODocument) iSourceRecord, iType, iValue);
}
}
public Map<String, Object> embeddedMapFromStream(final ODocument iSourceDocument, final OType iLinkedType, final String iValue,
final String iName) {
if (iValue.length() == 0)
return null;
// REMOVE BEGIN & END MAP CHARACTERS
String value = iValue.substring(1, iValue.length() - 1);
@SuppressWarnings("rawtypes")
Map map;
if (iLinkedType == OType.LINK || iLinkedType == OType.EMBEDDED)
map = new ORecordLazyMap(iSourceDocument, ODocument.RECORD_TYPE);
else
map = new OTrackedMap<Object>(iSourceDocument);
if (value.length() == 0)
return map;
final List<String> items = OStringSerializerHelper.smartSplit(value, OStringSerializerHelper.RECORD_SEPARATOR, true);
// EMBEDDED LITERALS
if (map instanceof ORecordElement)
((ORecordElement) map).setInternalStatus(STATUS.UNMARSHALLING);
for (String item : items) {
if (item != null && !item.isEmpty()) {
final List<String> entries = OStringSerializerHelper.smartSplit(item, OStringSerializerHelper.ENTRY_SEPARATOR, true);
if (!entries.isEmpty()) {
final Object mapValueObject;
if (entries.size() > 1) {
String mapValue = entries.get(1);
final OType linkedType;
if (iLinkedType == null)
if (!mapValue.isEmpty()) {
linkedType = getType(mapValue);
if ((iName == null || iSourceDocument.fieldType(iName) == null || iSourceDocument.fieldType(iName) != OType.EMBEDDEDMAP)
&& isConvertToLinkedMap(map, linkedType)) {
// CONVERT IT TO A LAZY MAP
map = new ORecordLazyMap(iSourceDocument, ODocument.RECORD_TYPE);
((ORecordElement) map).setInternalStatus(STATUS.UNMARSHALLING);
}
} else
linkedType = OType.EMBEDDED;
else
linkedType = iLinkedType;
if (linkedType == OType.EMBEDDED && mapValue.length() >= 2)
mapValue = mapValue.substring(1, mapValue.length() - 1);
mapValueObject = fieldTypeFromStream(iSourceDocument, linkedType, mapValue);
if (mapValueObject != null && mapValueObject instanceof ODocument)
((ODocument) mapValueObject).addOwner(iSourceDocument);
} else
mapValueObject = null;
final Object key = fieldTypeFromStream(iSourceDocument, OType.STRING, entries.get(0));
try {
map.put(key, mapValueObject);
} catch (ClassCastException e) {
throw new OSerializationException("Cannot load map because the type was not the expected: key=" + key + "(type "
+ key.getClass().toString() + "), value=" + mapValueObject + "(type " + key.getClass() + ")", e);
}
}
}
}
if (map instanceof ORecordElement)
((ORecordElement) map).setInternalStatus(STATUS.LOADED);
return map;
}
protected boolean isConvertToLinkedMap(Map<?, ?> map, final OType linkedType) {
boolean convert = (linkedType == OType.LINK && !(map instanceof ORecordLazyMap));
if (convert) {
for (Object value : map.values())
if (!(value instanceof OIdentifiable))
return false;
}
return convert;
}
public void fieldToStream(final ODocument iRecord, final StringBuilder iOutput, OUserObject2RecordHandler iObjHandler,
final OType iType, final OClass iLinkedClass, final OType iLinkedType, final String iName, final Object iValue,
final Set<ODocument> iMarshalledRecords, final boolean iSaveOnlyDirty) {
if (iValue == null)
return;
final long timer = PROFILER.startChrono();
switch (iType) {
case LINK: {
if (!(iValue instanceof OIdentifiable))
throw new OSerializationException(
"Found an unexpected type during marshalling of a LINK where a OIdentifiable (ORID or any Record) was expected. The string representation of the object is: "
+ iValue);
if (!((OIdentifiable) iValue).getIdentity().isValid() && iValue instanceof ODocument && ((ODocument) iValue).isEmbedded()) {
// WRONG: IT'S EMBEDDED!
fieldToStream(iRecord, iOutput, iObjHandler, OType.EMBEDDED, iLinkedClass, iLinkedType, iName, iValue, iMarshalledRecords,
iSaveOnlyDirty);
} else {
final Object link = linkToStream(iOutput, iRecord, iValue);
if (link != null)
// OVERWRITE CONTENT
iRecord.field(iName, link);
PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.link2string"), "Serialize link to string", timer);
}
break;
}
case LINKLIST: {
iOutput.append(OStringSerializerHelper.LIST_BEGIN);
if (iValue instanceof ORecordLazyList && ((ORecordLazyList) iValue).getStreamedContent() != null) {
iOutput.append(((ORecordLazyList) iValue).getStreamedContent());
PROFILER.updateCounter(PROFILER.getProcessMetric("serializer.record.string.linkList2string.cached"),
"Serialize linklist to string in stream mode", +1);
} else {
final ORecordLazyList coll;
final Iterator<OIdentifiable> it;
if (iValue instanceof OMultiCollectionIterator<?>) {
final OMultiCollectionIterator<OIdentifiable> iterator = (OMultiCollectionIterator<OIdentifiable>) iValue;
iterator.reset();
it = iterator;
coll = null;
} else if (!(iValue instanceof ORecordLazyList)) {
// FIRST TIME: CONVERT THE ENTIRE COLLECTION
coll = new ORecordLazyList(iRecord);
if (iValue.getClass().isArray()) {
Iterable<Object> iterab = OMultiValue.getMultiValueIterable(iValue);
for (Object i : iterab) {
coll.add((OIdentifiable) i);
}
} else {
coll.addAll((Collection<? extends OIdentifiable>) iValue);
((Collection<? extends OIdentifiable>) iValue).clear();
}
iRecord.field(iName, coll);
it = coll.rawIterator();
} else {
// LAZY LIST
coll = (ORecordLazyList) iValue;
if (coll.getStreamedContent() != null) {
// APPEND STREAMED CONTENT
iOutput.append(coll.getStreamedContent());
PROFILER.updateCounter(PROFILER.getProcessMetric("serializer.record.string.linkList2string.cached"),
"Serialize linklist to string in stream mode", +1);
it = coll.newItemsIterator();
} else
it = coll.rawIterator();
}
if (it != null && it.hasNext()) {
final StringBuilder buffer = new StringBuilder();
for (int items = 0; it.hasNext(); items++) {
if (items > 0)
buffer.append(OStringSerializerHelper.RECORD_SEPARATOR);
final OIdentifiable item = it.next();
final OIdentifiable newRid = linkToStream(buffer, iRecord, item);
if (newRid != null)
((OLazyIterator<OIdentifiable>) it).update(newRid);
}
if (coll != null)
coll.convertRecords2Links();
iOutput.append(buffer);
// UPDATE THE STREAM
if (coll != null)
coll.setStreamedContent(buffer);
}
}
iOutput.append(OStringSerializerHelper.LIST_END);
PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.linkList2string"), "Serialize linklist to string",
timer);
break;
}
case LINKSET: {
final OStringBuilderSerializable coll;
if (!(iValue instanceof OMVRBTreeRIDSet || iValue instanceof OSBTreeRIDSet)) {
// FIRST TIME: CONVERT THE ENTIRE COLLECTION
if (preferSBTreeRIDSet && iRecord.getDatabase().getStorage() instanceof OLocalPaginatedStorage)
coll = new OSBTreeRIDSet(iRecord, (Collection<OIdentifiable>) iValue);
else
coll = new OMVRBTreeRIDSet(iRecord, (Collection<OIdentifiable>) iValue);
iRecord.field(iName, coll);
} else
// LAZY SET
coll = (OStringBuilderSerializable) iValue;
coll.toStream(iOutput);
PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.linkSet2string"), "Serialize linkset to string",
timer);
break;
}
case LINKMAP: {
iOutput.append(OStringSerializerHelper.MAP_BEGIN);
Map<Object, Object> map = (Map<Object, Object>) iValue;
// LINKED MAP
if (map instanceof OLazyObjectMapInterface<?>)
((OLazyObjectMapInterface<?>) map).setConvertToRecord(false);
boolean invalidMap = false;
try {
int items = 0;
for (Map.Entry<Object, Object> entry : map.entrySet()) {
if (items++ > 0)
iOutput.append(OStringSerializerHelper.RECORD_SEPARATOR);
fieldTypeToString(iOutput, OType.STRING, entry.getKey());
iOutput.append(OStringSerializerHelper.ENTRY_SEPARATOR);
final Object link = linkToStream(iOutput, iRecord, entry.getValue());
if (link != null && !invalidMap)
// IDENTITY IS CHANGED, RE-SET INTO THE COLLECTION TO RECOMPUTE THE HASH
invalidMap = true;
}
} finally {
if (map instanceof OLazyObjectMapInterface<?>) {
((OLazyObjectMapInterface<?>) map).setConvertToRecord(true);
}
}
if (invalidMap) {
final ORecordLazyMap newMap = new ORecordLazyMap(iRecord, ODocument.RECORD_TYPE);
// REPLACE ALL CHANGED ITEMS
for (Map.Entry<Object, Object> entry : map.entrySet()) {
newMap.put(entry.getKey(), (OIdentifiable) entry.getValue());
}
map.clear();
iRecord.field(iName, newMap);
}
iOutput.append(OStringSerializerHelper.MAP_END);
PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.linkMap2string"), "Serialize linkmap to string",
timer);
break;
}
case EMBEDDED:
if (iValue instanceof ORecordInternal<?>) {
iOutput.append(OStringSerializerHelper.EMBEDDED_BEGIN);
toString((ORecordInternal<?>) iValue, iOutput, null, iObjHandler, iMarshalledRecords, false, true);
iOutput.append(OStringSerializerHelper.EMBEDDED_END);
} else if (iValue != null)
iOutput.append(iValue.toString());
PROFILER
.stopChrono(PROFILER.getProcessMetric("serializer.record.string.embed2string"), "Serialize embedded to string", timer);
break;
case EMBEDDEDLIST:
embeddedCollectionToStream(null, iObjHandler, iOutput, iLinkedClass, iLinkedType, iValue, iMarshalledRecords, iSaveOnlyDirty,
false);
PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.embedList2string"),
"Serialize embeddedlist to string", timer);
break;
case EMBEDDEDSET:
embeddedCollectionToStream(null, iObjHandler, iOutput, iLinkedClass, iLinkedType, iValue, iMarshalledRecords, iSaveOnlyDirty,
true);
PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.embedSet2string"), "Serialize embeddedset to string",
timer);
break;
case EMBEDDEDMAP: {
embeddedMapToStream(null, iObjHandler, iOutput, iLinkedClass, iLinkedType, iValue, iMarshalledRecords, iSaveOnlyDirty);
PROFILER.stopChrono(PROFILER.getProcessMetric("serializer.record.string.embedMap2string"), "Serialize embeddedmap to string",
timer);
break;
}
default:
fieldTypeToString(iOutput, iType, iValue);
}
}
public void embeddedMapToStream(ODatabaseComplex<?> iDatabase, final OUserObject2RecordHandler iObjHandler,
final StringBuilder iOutput, final OClass iLinkedClass, OType iLinkedType, final Object iValue,
final Set<ODocument> iMarshalledRecords, final boolean iSaveOnlyDirty) {
iOutput.append(OStringSerializerHelper.MAP_BEGIN);
if (iValue != null) {
int items = 0;
// EMBEDDED OBJECTS
for (Entry<String, Object> o : ((Map<String, Object>) iValue).entrySet()) {
if (items > 0)
iOutput.append(OStringSerializerHelper.RECORD_SEPARATOR);
if (o != null) {
fieldTypeToString(iOutput, OType.STRING, o.getKey());
iOutput.append(OStringSerializerHelper.ENTRY_SEPARATOR);
if (o.getValue() instanceof ORecord<?>) {
final ODocument record;
if (o.getValue() instanceof ODocument)
record = (ODocument) o.getValue();
else {
if (iDatabase == null && ODatabaseRecordThreadLocal.INSTANCE.isDefined())
iDatabase = ODatabaseRecordThreadLocal.INSTANCE.get();
record = OObjectSerializerHelperManager.getInstance().toStream(
o.getValue(),
new ODocument(o.getValue().getClass().getSimpleName()),
iDatabase instanceof ODatabaseObject ? ((ODatabaseObject) iDatabase).getEntityManager()
: OEntityManagerInternal.INSTANCE, iLinkedClass,
iObjHandler != null ? iObjHandler : new OUserObject2RecordHandler() {
public Object getUserObjectByRecord(OIdentifiable iRecord, final String iFetchPlan) {
return iRecord;
}
public ORecordInternal<?> getRecordByUserObject(Object iPojo, boolean iCreateIfNotAvailable) {
return new ODocument(iLinkedClass);
}
public boolean existsUserObjectByRID(ORID iRID) {
return false;
}
public void registerUserObject(Object iObject, ORecordInternal<?> iRecord) {
}
public void registerUserObjectAfterLinkSave(ORecordInternal<?> iRecord) {
}
}, null, iSaveOnlyDirty);
}
iOutput.append(OStringSerializerHelper.EMBEDDED_BEGIN);
toString(record, iOutput, null, iObjHandler, iMarshalledRecords, false, true);
iOutput.append(OStringSerializerHelper.EMBEDDED_END);
} else if (o.getValue() instanceof Set<?>) {
// SUB SET
fieldTypeToString(iOutput, OType.EMBEDDEDSET, o.getValue());
} else if (o.getValue() instanceof Collection<?>) {
// SUB LIST
fieldTypeToString(iOutput, OType.EMBEDDEDLIST, o.getValue());
} else if (o.getValue() instanceof Map<?, ?>) {
// SUB MAP
fieldTypeToString(iOutput, OType.EMBEDDEDMAP, o.getValue());
} else {
// EMBEDDED LITERALS
if (iLinkedType == null && o.getValue() != null) {
fieldTypeToString(iOutput, OType.getTypeByClass(o.getValue().getClass()), o.getValue());
} else {
fieldTypeToString(iOutput, iLinkedType, o.getValue());
}
}
}
items++;
}
}
iOutput.append(OStringSerializerHelper.MAP_END);
}
public Object embeddedCollectionFromStream(final ODocument iDocument, final OType iType, OClass iLinkedClass,
final OType iLinkedType, final String iValue) {
if (iValue.length() == 0)
return null;
// REMOVE BEGIN & END COLLECTIONS CHARACTERS IF IT'S A COLLECTION
final String value = iValue.charAt(0) == OStringSerializerHelper.LIST_BEGIN
|| iValue.charAt(0) == OStringSerializerHelper.SET_BEGIN ? iValue.substring(1, iValue.length() - 1) : iValue;
Collection<?> coll;
if (iLinkedType == OType.LINK) {
if (iDocument != null)
coll = (Collection<?>) (iType == OType.EMBEDDEDLIST ? new ORecordLazyList(iDocument).setStreamedContent(new StringBuilder(
value)) : new OMVRBTreeRIDSet(iDocument).fromStream(new StringBuilder(value)));
else {
if (iType == OType.EMBEDDEDLIST)
coll = (Collection<?>) new ORecordLazyList().setStreamedContent(new StringBuilder(value));
else {
final OMVRBTreeRIDSet set = new OMVRBTreeRIDSet();
set.setAutoConvertToRecord(false);
set.fromStream(new StringBuilder(value));
return set;
}
}
} else
coll = iType == OType.EMBEDDEDLIST ? new OTrackedList<Object>(iDocument) : new OTrackedSet<Object>(iDocument);
if (value.length() == 0)
return coll;
OType linkedType;
if (coll instanceof ORecordElement)
((ORecordElement) coll).setInternalStatus(STATUS.UNMARSHALLING);
final List<String> items = OStringSerializerHelper.smartSplit(value, OStringSerializerHelper.RECORD_SEPARATOR, true);
for (String item : items) {
Object objectToAdd = null;
linkedType = null;
if (item.equals("null"))
// NULL VALUE
objectToAdd = null;
else if (item.length() > 2 && item.charAt(0) == OStringSerializerHelper.EMBEDDED_BEGIN) {
// REMOVE EMBEDDED BEGIN/END CHARS
item = item.substring(1, item.length() - 1);
if (!item.isEmpty()) {
// EMBEDDED RECORD, EXTRACT THE CLASS NAME IF DIFFERENT BY THE PASSED (SUB-CLASS OR IT WAS PASSED NULL)
iLinkedClass = OStringSerializerHelper.getRecordClassName(item, iLinkedClass);
if (iLinkedClass != null)
objectToAdd = fromString(item, new ODocument(iLinkedClass.getName()), null);
else
// EMBEDDED OBJECT
objectToAdd = fieldTypeFromStream(iDocument, OType.EMBEDDED, item);
}
} else {
if (linkedType == null) {
final char begin = item.charAt(0);
// AUTO-DETERMINE LINKED TYPE
if (begin == OStringSerializerHelper.LINK)
linkedType = OType.LINK;
else
linkedType = getType(item);
if (linkedType == null)
throw new IllegalArgumentException(
"Linked type cannot be null. Probably the serialized type has not stored the type along with data");
}
if (iLinkedType == OType.CUSTOM)
item = item.substring(1, item.length() - 1);
objectToAdd = fieldTypeFromStream(iDocument, linkedType, item);
}
if (objectToAdd != null && objectToAdd instanceof ODocument && coll instanceof ORecordElement)
((ODocument) objectToAdd).addOwner((ORecordElement) coll);
((Collection<Object>) coll).add(objectToAdd);
}
if (coll instanceof ORecordElement)
((ORecordElement) coll).setInternalStatus(STATUS.LOADED);
return coll;
}
public StringBuilder embeddedCollectionToStream(ODatabaseComplex<?> iDatabase, final OUserObject2RecordHandler iObjHandler,
final StringBuilder iOutput, final OClass iLinkedClass, final OType iLinkedType, final Object iValue,
final Set<ODocument> iMarshalledRecords, final boolean iSaveOnlyDirty, final boolean iSet) {
iOutput.append(iSet ? OStringSerializerHelper.SET_BEGIN : OStringSerializerHelper.LIST_BEGIN);
final Iterator<Object> iterator = OMultiValue.getMultiValueIterator(iValue);
OType linkedType = iLinkedType;
for (int i = 0; iterator.hasNext(); ++i) {
final Object o = iterator.next();
if (i > 0)
iOutput.append(OStringSerializerHelper.RECORD_SEPARATOR);
if (o == null) {
iOutput.append("null");
continue;
}
OIdentifiable id = null;
ODocument doc = null;
final OClass linkedClass;
if (!(o instanceof OIdentifiable)) {
final String fieldBound = OObjectSerializerHelperManager.getInstance().getDocumentBoundField(o.getClass());
if (fieldBound != null) {
OObjectSerializerHelperManager.getInstance().invokeCallback(o, null, OBeforeSerialization.class);
doc = (ODocument) OObjectSerializerHelperManager.getInstance().getFieldValue(o, fieldBound);
OObjectSerializerHelperManager.getInstance().invokeCallback(o, doc, OAfterSerialization.class);
id = doc;
} else if (iLinkedType == null)
linkedType = OType.getTypeByClass(o.getClass());
linkedClass = iLinkedClass;
} else {
id = (OIdentifiable) o;
if (iLinkedType == null)
// AUTO-DETERMINE LINKED TYPE
if (id.getIdentity().isValid())
linkedType = OType.LINK;
else
linkedType = OType.EMBEDDED;
if (id instanceof ODocument) {
doc = (ODocument) id;
if (id.getIdentity().isTemporary())
doc.save();
linkedClass = doc.getSchemaClass();
} else
linkedClass = null;
}
if (id != null && linkedType != OType.LINK)
iOutput.append(OStringSerializerHelper.EMBEDDED_BEGIN);
if (linkedType == OType.EMBEDDED && o instanceof OIdentifiable)
toString((ORecordInternal<?>) ((OIdentifiable) o).getRecord(), iOutput, null);
else if (linkedType != OType.LINK && (linkedClass != null || doc != null)) {
if (id == null) {
// EMBEDDED OBJECTS
if (iDatabase == null && ODatabaseRecordThreadLocal.INSTANCE.isDefined())
iDatabase = ODatabaseRecordThreadLocal.INSTANCE.get();
id = OObjectSerializerHelperManager.getInstance().toStream(
o,
new ODocument(o.getClass().getSimpleName()),
iDatabase instanceof ODatabaseObject ? ((ODatabaseObject) iDatabase).getEntityManager()
: OEntityManagerInternal.INSTANCE, iLinkedClass,
iObjHandler != null ? iObjHandler : new OUserObject2RecordHandler() {
public Object getUserObjectByRecord(OIdentifiable iRecord, final String iFetchPlan) {
return iRecord;
}
public ORecordInternal<?> getRecordByUserObject(Object iPojo, boolean iCreateIfNotAvailable) {
return new ODocument(linkedClass);
}
public boolean existsUserObjectByRID(ORID iRID) {
return false;
}
public void registerUserObject(Object iObject, ORecordInternal<?> iRecord) {
}
public void registerUserObjectAfterLinkSave(ORecordInternal<?> iRecord) {
}
}, null, iSaveOnlyDirty);
}
toString(doc, iOutput, null, iObjHandler, iMarshalledRecords, false, true);
} else {
// EMBEDDED LITERALS
if (iLinkedType == null) {
if (o != null)
linkedType = OType.getTypeByClass(o.getClass());
} else if (iLinkedType == OType.CUSTOM)
iOutput.append(OStringSerializerHelper.CUSTOM_TYPE);
fieldTypeToString(iOutput, linkedType, o);
}
if (id != null && linkedType != OType.LINK)
iOutput.append(OStringSerializerHelper.EMBEDDED_END);
}
iOutput.append(iSet ? OStringSerializerHelper.SET_END : OStringSerializerHelper.LIST_END);
return iOutput;
}
/**
* Serialize the link.
*
* @param buffer
* @param iParentRecord
* @param iLinked
* Can be an instance of ORID or a Record<?>
* @return
*/
private static OIdentifiable linkToStream(final StringBuilder buffer, final ORecordSchemaAware<?> iParentRecord, Object iLinked) {
if (iLinked == null)
// NULL REFERENCE
return null;
OIdentifiable resultRid = null;
ORID rid;
if (iLinked instanceof ORID) {
// JUST THE REFERENCE
rid = (ORID) iLinked;
if (rid.isValid() && rid.isNew()) {
// SAVE AT THE FLY AND STORE THE NEW RID
final ORecord<?> record = rid.getRecord();
final ODatabaseRecord database = ODatabaseRecordThreadLocal.INSTANCE.get();
if (database.getTransaction().isActive()) {
// USE THE DEFAULT CLUSTER
database.save((ORecordInternal<?>) record);
} else
database.save((ORecordInternal<?>) record);
if (record != null)
rid = record.getIdentity();
resultRid = rid;
}
} else {
if (iLinked instanceof String)
iLinked = new ORecordId((String) iLinked);
else if (!(iLinked instanceof ORecordInternal<?>)) {
// NOT RECORD: TRY TO EXTRACT THE DOCUMENT IF ANY
final String boundDocumentField = OObjectSerializerHelperManager.getInstance().getDocumentBoundField(iLinked.getClass());
if (boundDocumentField != null)
iLinked = OObjectSerializerHelperManager.getInstance().getFieldValue(iLinked, boundDocumentField);
}
if (!(iLinked instanceof OIdentifiable))
throw new IllegalArgumentException("Invalid object received. Expected a OIdentifiable but received type="
+ iLinked.getClass().getName() + " and value=" + iLinked);
// RECORD
ORecordInternal<?> iLinkedRecord = ((OIdentifiable) iLinked).getRecord();
rid = iLinkedRecord.getIdentity();
if ((rid.isNew() && !rid.isTemporary()) || iLinkedRecord.isDirty()) {
final ODatabaseRecord database = ODatabaseRecordThreadLocal.INSTANCE.get();
if (iLinkedRecord instanceof ODocument) {
final OClass schemaClass = ((ODocument) iLinkedRecord).getSchemaClass();
database.save(iLinkedRecord, schemaClass != null ? database.getClusterNameById(schemaClass.getDefaultClusterId()) : null);
} else
// STORE THE TRAVERSED OBJECT TO KNOW THE RECORD ID. CALL THIS VERSION TO AVOID CLEAR OF STACK IN THREAD-LOCAL
database.save(iLinkedRecord);
final ODatabaseComplex<?> dbOwner = database.getDatabaseOwner();
dbOwner.registerUserObjectAfterLinkSave(iLinkedRecord);
resultRid = iLinkedRecord;
}
final ODatabaseRecord database = ODatabaseRecordThreadLocal.INSTANCE.get();
if (iParentRecord != null && database instanceof ODatabaseRecord) {
final ODatabaseRecord db = database;
if (!db.isRetainRecords())
// REPLACE CURRENT RECORD WITH ITS ID: THIS SAVES A LOT OF MEMORY
resultRid = iLinkedRecord.getIdentity();
}
}
if (rid.isValid())
rid.toString(buffer);
return resultRid;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_serialization_serializer_record_string_ORecordSerializerCSVAbstract.java |
3,311 | static final class LongValues extends DenseLongValues {
private final BigDoubleArrayList values;
LongValues(BigDoubleArrayList values) {
super(false);
this.values = values;
}
@Override
public long nextValue() {
return (long) values.get(docId);
}
} | 0true
| src_main_java_org_elasticsearch_index_fielddata_plain_DoubleArrayAtomicFieldData.java |
2,946 | @AnalysisSettingsRequired
public class StemmerOverrideTokenFilterFactory extends AbstractTokenFilterFactory {
private final StemmerOverrideMap overrideMap;
@Inject
public StemmerOverrideTokenFilterFactory(Index index, @IndexSettings Settings indexSettings, Environment env, @Assisted String name, @Assisted Settings settings) throws IOException {
super(index, indexSettings, name, settings);
List<String> rules = Analysis.getWordList(env, settings, "rules");
if (rules == null) {
throw new ElasticsearchIllegalArgumentException("stemmer override filter requires either `rules` or `rules_path` to be configured");
}
StemmerOverrideFilter.Builder builder = new StemmerOverrideFilter.Builder(false);
parseRules(rules, builder, "=>");
overrideMap = builder.build();
}
@Override
public TokenStream create(TokenStream tokenStream) {
return new StemmerOverrideFilter(tokenStream, overrideMap);
}
static void parseRules(List<String> rules, StemmerOverrideFilter.Builder builder, String mappingSep) {
for (String rule : rules) {
String key, override;
List<String> mapping = Strings.splitSmart(rule, mappingSep, false);
if (mapping.size() == 2) {
key = mapping.get(0).trim();
override = mapping.get(1).trim();
} else {
throw new RuntimeException("Invalid Keyword override Rule:" + rule);
}
if (key.isEmpty() || override.isEmpty()) {
throw new RuntimeException("Invalid Keyword override Rule:" + rule);
} else {
builder.add(key, override);
}
}
}
} | 0true
| src_main_java_org_elasticsearch_index_analysis_StemmerOverrideTokenFilterFactory.java |
1,829 | return new Initializable<T>() {
public T get(Errors errors) throws ErrorsException {
return instance;
}
@Override
public String toString() {
return String.valueOf(instance);
}
}; | 0true
| src_main_java_org_elasticsearch_common_inject_Initializables.java |
855 | public class ParsedScrollId {
public static final String QUERY_THEN_FETCH_TYPE = "queryThenFetch";
public static final String QUERY_AND_FETCH_TYPE = "queryAndFetch";
public static final String SCAN = "scan";
private final String source;
private final String type;
private final Tuple<String, Long>[] context;
private final Map<String, String> attributes;
public ParsedScrollId(String source, String type, Tuple<String, Long>[] context, Map<String, String> attributes) {
this.source = source;
this.type = type;
this.context = context;
this.attributes = attributes;
}
public String getSource() {
return source;
}
public String getType() {
return type;
}
public Tuple<String, Long>[] getContext() {
return context;
}
public Map<String, String> getAttributes() {
return this.attributes;
}
} | 0true
| src_main_java_org_elasticsearch_action_search_type_ParsedScrollId.java |
100 | static class Segment<K,V> extends ReentrantLock implements Serializable {
private static final long serialVersionUID = 2249069246763182397L;
final float loadFactor;
Segment(float lf) { this.loadFactor = lf; }
} | 0true
| src_main_java_jsr166e_ConcurrentHashMapV8.java |
209 | public class QueryParserSettings {
public static final boolean DEFAULT_ALLOW_LEADING_WILDCARD = true;
public static final boolean DEFAULT_ANALYZE_WILDCARD = false;
public static final float DEFAULT_BOOST = 1.f;
private String queryString;
private String defaultField;
private float boost = DEFAULT_BOOST;
private MapperQueryParser.Operator defaultOperator = QueryParser.Operator.OR;
private boolean autoGeneratePhraseQueries = false;
private boolean allowLeadingWildcard = DEFAULT_ALLOW_LEADING_WILDCARD;
private boolean lowercaseExpandedTerms = true;
private boolean enablePositionIncrements = true;
private int phraseSlop = 0;
private float fuzzyMinSim = FuzzyQuery.defaultMinSimilarity;
private int fuzzyPrefixLength = FuzzyQuery.defaultPrefixLength;
private int fuzzyMaxExpansions = FuzzyQuery.defaultMaxExpansions;
private MultiTermQuery.RewriteMethod fuzzyRewriteMethod = null;
private boolean analyzeWildcard = DEFAULT_ANALYZE_WILDCARD;
private boolean escape = false;
private Analyzer defaultAnalyzer = null;
private Analyzer defaultQuoteAnalyzer = null;
private Analyzer forcedAnalyzer = null;
private Analyzer forcedQuoteAnalyzer = null;
private String quoteFieldSuffix = null;
private MultiTermQuery.RewriteMethod rewriteMethod = MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT;
private String minimumShouldMatch;
private boolean lenient;
List<String> fields = null;
Collection<String> queryTypes = null;
ObjectFloatOpenHashMap<String> boosts = null;
float tieBreaker = 0.0f;
boolean useDisMax = true;
public boolean isCacheable() {
// a hack for now :) to determine if a query string is cacheable
return !queryString.contains("now");
}
public String queryString() {
return queryString;
}
public void queryString(String queryString) {
this.queryString = queryString;
}
public String defaultField() {
return defaultField;
}
public void defaultField(String defaultField) {
this.defaultField = defaultField;
}
public float boost() {
return boost;
}
public void boost(float boost) {
this.boost = boost;
}
public QueryParser.Operator defaultOperator() {
return defaultOperator;
}
public void defaultOperator(QueryParser.Operator defaultOperator) {
this.defaultOperator = defaultOperator;
}
public boolean autoGeneratePhraseQueries() {
return autoGeneratePhraseQueries;
}
public void autoGeneratePhraseQueries(boolean autoGeneratePhraseQueries) {
this.autoGeneratePhraseQueries = autoGeneratePhraseQueries;
}
public boolean allowLeadingWildcard() {
return allowLeadingWildcard;
}
public void allowLeadingWildcard(boolean allowLeadingWildcard) {
this.allowLeadingWildcard = allowLeadingWildcard;
}
public boolean lowercaseExpandedTerms() {
return lowercaseExpandedTerms;
}
public void lowercaseExpandedTerms(boolean lowercaseExpandedTerms) {
this.lowercaseExpandedTerms = lowercaseExpandedTerms;
}
public boolean enablePositionIncrements() {
return enablePositionIncrements;
}
public void enablePositionIncrements(boolean enablePositionIncrements) {
this.enablePositionIncrements = enablePositionIncrements;
}
public int phraseSlop() {
return phraseSlop;
}
public void phraseSlop(int phraseSlop) {
this.phraseSlop = phraseSlop;
}
public float fuzzyMinSim() {
return fuzzyMinSim;
}
public void fuzzyMinSim(float fuzzyMinSim) {
this.fuzzyMinSim = fuzzyMinSim;
}
public int fuzzyPrefixLength() {
return fuzzyPrefixLength;
}
public void fuzzyPrefixLength(int fuzzyPrefixLength) {
this.fuzzyPrefixLength = fuzzyPrefixLength;
}
public int fuzzyMaxExpansions() {
return fuzzyMaxExpansions;
}
public void fuzzyMaxExpansions(int fuzzyMaxExpansions) {
this.fuzzyMaxExpansions = fuzzyMaxExpansions;
}
public MultiTermQuery.RewriteMethod fuzzyRewriteMethod() {
return fuzzyRewriteMethod;
}
public void fuzzyRewriteMethod(MultiTermQuery.RewriteMethod fuzzyRewriteMethod) {
this.fuzzyRewriteMethod = fuzzyRewriteMethod;
}
public boolean escape() {
return escape;
}
public void escape(boolean escape) {
this.escape = escape;
}
public Analyzer defaultAnalyzer() {
return defaultAnalyzer;
}
public void defaultAnalyzer(Analyzer defaultAnalyzer) {
this.defaultAnalyzer = defaultAnalyzer;
}
public Analyzer defaultQuoteAnalyzer() {
return defaultQuoteAnalyzer;
}
public void defaultQuoteAnalyzer(Analyzer defaultAnalyzer) {
this.defaultQuoteAnalyzer = defaultAnalyzer;
}
public Analyzer forcedAnalyzer() {
return forcedAnalyzer;
}
public void forcedAnalyzer(Analyzer forcedAnalyzer) {
this.forcedAnalyzer = forcedAnalyzer;
}
public Analyzer forcedQuoteAnalyzer() {
return forcedQuoteAnalyzer;
}
public void forcedQuoteAnalyzer(Analyzer forcedAnalyzer) {
this.forcedQuoteAnalyzer = forcedAnalyzer;
}
public boolean analyzeWildcard() {
return this.analyzeWildcard;
}
public void analyzeWildcard(boolean analyzeWildcard) {
this.analyzeWildcard = analyzeWildcard;
}
public MultiTermQuery.RewriteMethod rewriteMethod() {
return this.rewriteMethod;
}
public void rewriteMethod(MultiTermQuery.RewriteMethod rewriteMethod) {
this.rewriteMethod = rewriteMethod;
}
public String minimumShouldMatch() {
return this.minimumShouldMatch;
}
public void minimumShouldMatch(String minimumShouldMatch) {
this.minimumShouldMatch = minimumShouldMatch;
}
public void quoteFieldSuffix(String quoteFieldSuffix) {
this.quoteFieldSuffix = quoteFieldSuffix;
}
public String quoteFieldSuffix() {
return this.quoteFieldSuffix;
}
public void lenient(boolean lenient) {
this.lenient = lenient;
}
public boolean lenient() {
return this.lenient;
}
public List<String> fields() {
return fields;
}
public void fields(List<String> fields) {
this.fields = fields;
}
public Collection<String> queryTypes() {
return queryTypes;
}
public void queryTypes(Collection<String> queryTypes) {
this.queryTypes = queryTypes;
}
public ObjectFloatOpenHashMap<String> boosts() {
return boosts;
}
public void boosts(ObjectFloatOpenHashMap<String> boosts) {
this.boosts = boosts;
}
public float tieBreaker() {
return tieBreaker;
}
public void tieBreaker(float tieBreaker) {
this.tieBreaker = tieBreaker;
}
public boolean useDisMax() {
return useDisMax;
}
public void useDisMax(boolean useDisMax) {
this.useDisMax = useDisMax;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
QueryParserSettings that = (QueryParserSettings) o;
if (autoGeneratePhraseQueries != that.autoGeneratePhraseQueries()) return false;
if (allowLeadingWildcard != that.allowLeadingWildcard) return false;
if (Float.compare(that.boost, boost) != 0) return false;
if (enablePositionIncrements != that.enablePositionIncrements) return false;
if (escape != that.escape) return false;
if (analyzeWildcard != that.analyzeWildcard) return false;
if (Float.compare(that.fuzzyMinSim, fuzzyMinSim) != 0) return false;
if (fuzzyPrefixLength != that.fuzzyPrefixLength) return false;
if (fuzzyMaxExpansions != that.fuzzyMaxExpansions) return false;
if (fuzzyRewriteMethod != null ? !fuzzyRewriteMethod.equals(that.fuzzyRewriteMethod) : that.fuzzyRewriteMethod != null)
return false;
if (lowercaseExpandedTerms != that.lowercaseExpandedTerms) return false;
if (phraseSlop != that.phraseSlop) return false;
if (defaultAnalyzer != null ? !defaultAnalyzer.equals(that.defaultAnalyzer) : that.defaultAnalyzer != null)
return false;
if (defaultQuoteAnalyzer != null ? !defaultQuoteAnalyzer.equals(that.defaultQuoteAnalyzer) : that.defaultQuoteAnalyzer != null)
return false;
if (forcedAnalyzer != null ? !forcedAnalyzer.equals(that.forcedAnalyzer) : that.forcedAnalyzer != null)
return false;
if (forcedQuoteAnalyzer != null ? !forcedQuoteAnalyzer.equals(that.forcedQuoteAnalyzer) : that.forcedQuoteAnalyzer != null)
return false;
if (defaultField != null ? !defaultField.equals(that.defaultField) : that.defaultField != null) return false;
if (defaultOperator != that.defaultOperator) return false;
if (queryString != null ? !queryString.equals(that.queryString) : that.queryString != null) return false;
if (rewriteMethod != null ? !rewriteMethod.equals(that.rewriteMethod) : that.rewriteMethod != null)
return false;
if (minimumShouldMatch != null ? !minimumShouldMatch.equals(that.minimumShouldMatch) : that.minimumShouldMatch != null)
return false;
if (quoteFieldSuffix != null ? !quoteFieldSuffix.equals(that.quoteFieldSuffix) : that.quoteFieldSuffix != null)
return false;
if (lenient != that.lenient) {
return false;
}
if (Float.compare(that.tieBreaker, tieBreaker) != 0) return false;
if (useDisMax != that.useDisMax) return false;
if (boosts != null ? !boosts.equals(that.boosts) : that.boosts != null) return false;
if (fields != null ? !fields.equals(that.fields) : that.fields != null) return false;
if (queryTypes != null ? !queryTypes.equals(that.queryTypes) : that.queryTypes != null) return false;
return true;
}
@Override
public int hashCode() {
int result = queryString != null ? queryString.hashCode() : 0;
result = 31 * result + (defaultField != null ? defaultField.hashCode() : 0);
result = 31 * result + (boost != +0.0f ? Float.floatToIntBits(boost) : 0);
result = 31 * result + (defaultOperator != null ? defaultOperator.hashCode() : 0);
result = 31 * result + (autoGeneratePhraseQueries ? 1 : 0);
result = 31 * result + (allowLeadingWildcard ? 1 : 0);
result = 31 * result + (lowercaseExpandedTerms ? 1 : 0);
result = 31 * result + (enablePositionIncrements ? 1 : 0);
result = 31 * result + phraseSlop;
result = 31 * result + (fuzzyMinSim != +0.0f ? Float.floatToIntBits(fuzzyMinSim) : 0);
result = 31 * result + fuzzyPrefixLength;
result = 31 * result + (escape ? 1 : 0);
result = 31 * result + (defaultAnalyzer != null ? defaultAnalyzer.hashCode() : 0);
result = 31 * result + (defaultQuoteAnalyzer != null ? defaultQuoteAnalyzer.hashCode() : 0);
result = 31 * result + (forcedAnalyzer != null ? forcedAnalyzer.hashCode() : 0);
result = 31 * result + (forcedQuoteAnalyzer != null ? forcedQuoteAnalyzer.hashCode() : 0);
result = 31 * result + (analyzeWildcard ? 1 : 0);
result = 31 * result + (fields != null ? fields.hashCode() : 0);
result = 31 * result + (queryTypes != null ? queryTypes.hashCode() : 0);
result = 31 * result + (boosts != null ? boosts.hashCode() : 0);
result = 31 * result + (tieBreaker != +0.0f ? Float.floatToIntBits(tieBreaker) : 0);
result = 31 * result + (useDisMax ? 1 : 0);
return result;
}
} | 0true
| src_main_java_org_apache_lucene_queryparser_classic_QueryParserSettings.java |
378 | .ackTimeout(request.timeout()), new ActionListener<RepositoriesService.RegisterRepositoryResponse>() {
@Override
public void onResponse(RepositoriesService.RegisterRepositoryResponse response) {
listener.onResponse(new PutRepositoryResponse(response.isAcknowledged()));
}
@Override
public void onFailure(Throwable e) {
listener.onFailure(e);
}
}); | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_repositories_put_TransportPutRepositoryAction.java |
1,075 | public class MemberAttributeConfigReadOnly extends MemberAttributeConfig {
MemberAttributeConfigReadOnly(MemberAttributeConfig source) {
super(source);
}
@Override
public void setStringAttribute(String key, String value) {
throw new UnsupportedOperationException("This config is read-only");
}
@Override
public void setBooleanAttribute(String key, boolean value) {
throw new UnsupportedOperationException("This config is read-only");
}
@Override
public void setByteAttribute(String key, byte value) {
throw new UnsupportedOperationException("This config is read-only");
}
@Override
public void setShortAttribute(String key, short value) {
throw new UnsupportedOperationException("This config is read-only");
}
@Override
public void setIntAttribute(String key, int value) {
throw new UnsupportedOperationException("This config is read-only");
}
@Override
public void setLongAttribute(String key, long value) {
throw new UnsupportedOperationException("This config is read-only");
}
@Override
public void setFloatAttribute(String key, float value) {
throw new UnsupportedOperationException("This config is read-only");
}
@Override
public void setDoubleAttribute(String key, double value) {
throw new UnsupportedOperationException("This config is read-only");
}
@Override
public void removeAttribute(String key) {
throw new UnsupportedOperationException("This config is read-only");
}
@Override
public Map<String, Object> getAttributes() {
return Collections.unmodifiableMap(super.getAttributes());
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_config_MemberAttributeConfigReadOnly.java |
6,029 | public final class NoisyChannelSpellChecker {
public static final double REAL_WORD_LIKELYHOOD = 0.95d;
public static final int DEFAULT_TOKEN_LIMIT = 10;
private final double realWordLikelihood;
private final boolean requireUnigram;
private final int tokenLimit;
public NoisyChannelSpellChecker() {
this(REAL_WORD_LIKELYHOOD);
}
public NoisyChannelSpellChecker(double nonErrorLikelihood) {
this(nonErrorLikelihood, true, DEFAULT_TOKEN_LIMIT);
}
public NoisyChannelSpellChecker(double nonErrorLikelihood, boolean requireUnigram, int tokenLimit) {
this.realWordLikelihood = nonErrorLikelihood;
this.requireUnigram = requireUnigram;
this.tokenLimit = tokenLimit;
}
public Result getCorrections(TokenStream stream, final CandidateGenerator generator,
float maxErrors, int numCorrections, IndexReader reader, WordScorer wordScorer, BytesRef separator, float confidence, int gramSize) throws IOException {
final List<CandidateSet> candidateSetsList = new ArrayList<DirectCandidateGenerator.CandidateSet>();
SuggestUtils.analyze(stream, new SuggestUtils.TokenConsumer() {
CandidateSet currentSet = null;
private TypeAttribute typeAttribute;
private final BytesRef termsRef = new BytesRef();
private boolean anyUnigram = false;
private boolean anyTokens = false;
@Override
public void reset(TokenStream stream) {
super.reset(stream);
typeAttribute = stream.addAttribute(TypeAttribute.class);
}
@Override
public void nextToken() throws IOException {
anyTokens = true;
BytesRef term = fillBytesRef(termsRef);
if (requireUnigram && typeAttribute.type() == ShingleFilter.DEFAULT_TOKEN_TYPE) {
return;
}
anyUnigram = true;
if (posIncAttr.getPositionIncrement() == 0 && typeAttribute.type() == SynonymFilter.TYPE_SYNONYM) {
assert currentSet != null;
long freq = 0;
if ((freq = generator.frequency(term)) > 0) {
currentSet.addOneCandidate(generator.createCandidate(BytesRef.deepCopyOf(term), freq, realWordLikelihood));
}
} else {
if (currentSet != null) {
candidateSetsList.add(currentSet);
}
currentSet = new CandidateSet(Candidate.EMPTY, generator.createCandidate(BytesRef.deepCopyOf(term), true));
}
}
@Override
public void end() {
if (currentSet != null) {
candidateSetsList.add(currentSet);
}
if (requireUnigram && !anyUnigram && anyTokens) {
throw new IllegalStateException("At least one unigram is required but all tokens were ngrams");
}
}
});
if (candidateSetsList.isEmpty() || candidateSetsList.size() >= tokenLimit) {
return Result.EMPTY;
}
for (CandidateSet candidateSet : candidateSetsList) {
generator.drawCandidates(candidateSet);
}
double cutoffScore = Double.MIN_VALUE;
CandidateScorer scorer = new CandidateScorer(wordScorer, numCorrections, gramSize);
CandidateSet[] candidateSets = candidateSetsList.toArray(new CandidateSet[candidateSetsList.size()]);
if (confidence > 0.0) {
Candidate[] candidates = new Candidate[candidateSets.length];
for (int i = 0; i < candidates.length; i++) {
candidates[i] = candidateSets[i].originalTerm;
}
double inputPhraseScore = scorer.score(candidates, candidateSets);
cutoffScore = inputPhraseScore * confidence;
}
Correction[] findBestCandiates = scorer.findBestCandiates(candidateSets, maxErrors, cutoffScore);
return new Result(findBestCandiates, cutoffScore);
}
public Result getCorrections(Analyzer analyzer, BytesRef query, CandidateGenerator generator,
float maxErrors, int numCorrections, IndexReader reader, String analysisField, WordScorer scorer, float confidence, int gramSize) throws IOException {
return getCorrections(tokenStream(analyzer, query, new CharsRef(), analysisField), generator, maxErrors, numCorrections, reader, scorer, new BytesRef(" "), confidence, gramSize);
}
public TokenStream tokenStream(Analyzer analyzer, BytesRef query, CharsRef spare, String field) throws IOException {
UnicodeUtil.UTF8toUTF16(query, spare);
return analyzer.tokenStream(field, new FastCharArrayReader(spare.chars, spare.offset, spare.length));
}
public static class Result {
public static final Result EMPTY = new Result(Correction.EMPTY, Double.MIN_VALUE);
public final Correction[] corrections;
public final double cutoffScore;
public Result(Correction[] corrections, double cutoffScore) {
this.corrections = corrections;
this.cutoffScore = cutoffScore;
}
}
} | 1no label
| src_main_java_org_elasticsearch_search_suggest_phrase_NoisyChannelSpellChecker.java |
803 | public class AtomicLongPortableHook implements PortableHook {
static final int F_ID = FactoryIdHelper.getFactoryId(FactoryIdHelper.ATOMIC_LONG_PORTABLE_FACTORY, -17);
static final int ADD_AND_GET = 1;
static final int COMPARE_AND_SET = 2;
static final int GET_AND_ADD = 3;
static final int GET_AND_SET = 4;
static final int SET = 5;
static final int APPLY = 6;
static final int ALTER = 7;
static final int ALTER_AND_GET = 8;
static final int GET_AND_ALTER = 9;
@Override
public int getFactoryId() {
return F_ID;
}
@Override
public PortableFactory createFactory() {
return new PortableFactory() {
@Override
public Portable create(int classId) {
switch (classId) {
case ADD_AND_GET:
return new AddAndGetRequest();
case COMPARE_AND_SET:
return new CompareAndSetRequest();
case GET_AND_ADD:
return new GetAndAddRequest();
case GET_AND_SET:
return new GetAndSetRequest();
case SET:
return new SetRequest();
case APPLY:
return new ApplyRequest();
case ALTER:
return new AlterRequest();
case ALTER_AND_GET:
return new AlterAndGetRequest();
case GET_AND_ALTER:
return new GetAndAlterRequest();
default:
return null;
}
}
};
}
@Override
public Collection<ClassDefinition> getBuiltinDefinitions() {
return null;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_concurrent_atomiclong_client_AtomicLongPortableHook.java |
889 | threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
executeQueryPhase(shardIndex, counter, node, target.v2());
}
}); | 0true
| src_main_java_org_elasticsearch_action_search_type_TransportSearchScrollQueryThenFetchAction.java |
700 | public class BulkShardRequest extends ShardReplicationOperationRequest<BulkShardRequest> {
private int shardId;
private BulkItemRequest[] items;
private boolean refresh;
BulkShardRequest() {
}
BulkShardRequest(String index, int shardId, boolean refresh, BulkItemRequest[] items) {
this.index = index;
this.shardId = shardId;
this.items = items;
this.refresh = refresh;
}
boolean refresh() {
return this.refresh;
}
int shardId() {
return shardId;
}
BulkItemRequest[] items() {
return items;
}
/**
* Before we fork on a local thread, make sure we copy over the bytes if they are unsafe
*/
@Override
public void beforeLocalFork() {
for (BulkItemRequest item : items) {
if (item.request() instanceof InstanceShardOperationRequest) {
((InstanceShardOperationRequest) item.request()).beforeLocalFork();
} else {
((ShardReplicationOperationRequest) item.request()).beforeLocalFork();
}
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(shardId);
out.writeVInt(items.length);
for (BulkItemRequest item : items) {
if (item != null) {
out.writeBoolean(true);
item.writeTo(out);
} else {
out.writeBoolean(false);
}
}
out.writeBoolean(refresh);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
shardId = in.readVInt();
items = new BulkItemRequest[in.readVInt()];
for (int i = 0; i < items.length; i++) {
if (in.readBoolean()) {
items[i] = BulkItemRequest.readBulkItem(in);
}
}
refresh = in.readBoolean();
}
} | 0true
| src_main_java_org_elasticsearch_action_bulk_BulkShardRequest.java |
648 | public class CollectionContainsOperation extends CollectionOperation {
Set<Data> valueSet;
public CollectionContainsOperation() {
}
public CollectionContainsOperation(String name, Set<Data> valueSet) {
super(name);
this.valueSet = valueSet;
}
@Override
public int getId() {
return CollectionDataSerializerHook.COLLECTION_CONTAINS;
}
@Override
public void beforeRun() throws Exception {
}
@Override
public void run() throws Exception {
response = getOrCreateContainer().contains(valueSet);
}
@Override
public void afterRun() throws Exception {
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeInt(valueSet.size());
for (Data value : valueSet) {
value.writeData(out);
}
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
final int size = in.readInt();
valueSet = new HashSet<Data>(size);
for (int i = 0; i < size; i++) {
final Data value = new Data();
value.readData(in);
valueSet.add(value);
}
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_collection_CollectionContainsOperation.java |
422 | public class ClientMapReduceProxy
extends ClientProxy
implements JobTracker {
private final ConcurrentMap<String, ClientTrackableJob> trackableJobs = new ConcurrentHashMap<String, ClientTrackableJob>();
public ClientMapReduceProxy(String instanceName, String serviceName, String objectName) {
super(instanceName, serviceName, objectName);
}
@Override
protected void onDestroy() {
for (ClientTrackableJob trackableJob : trackableJobs.values()) {
trackableJob.completableFuture.cancel(false);
}
}
@Override
public <K, V> Job<K, V> newJob(KeyValueSource<K, V> source) {
return new ClientJob<K, V>(getName(), source);
}
@Override
public <V> TrackableJob<V> getTrackableJob(String jobId) {
return trackableJobs.get(jobId);
}
@Override
public String toString() {
return "JobTracker{" + "name='" + getName() + '\'' + '}';
}
/*
* Removed for now since it is moved to Hazelcast 3.3
@Override
public <K, V> ProcessJob<K, V> newProcessJob(KeyValueSource<K, V> source) {
// TODO
return null;
}*/
private <T> T invoke(InvocationClientRequest request, String jobId) throws Exception {
ClientContext context = getContext();
ClientInvocationService cis = context.getInvocationService();
ClientTrackableJob trackableJob = trackableJobs.get(jobId);
if (trackableJob != null) {
Address runningMember = trackableJob.jobOwner;
ICompletableFuture<T> future = cis.invokeOnTarget(request, runningMember);
return future.get();
}
return null;
}
private class ClientJob<KeyIn, ValueIn> extends AbstractJob<KeyIn, ValueIn> {
public ClientJob(String name, KeyValueSource<KeyIn, ValueIn> keyValueSource) {
super(name, ClientMapReduceProxy.this, keyValueSource);
}
@Override
protected <T> JobCompletableFuture<T> invoke(final Collator collator) {
try {
final String jobId = UuidUtil.buildRandomUuidString();
ClientContext context = getContext();
ClientInvocationService cis = context.getInvocationService();
ClientMapReduceRequest request = new ClientMapReduceRequest(name, jobId, keys,
predicate, mapper, combinerFactory, reducerFactory, keyValueSource,
chunkSize, topologyChangedStrategy);
final ClientCompletableFuture completableFuture = new ClientCompletableFuture(jobId);
ClientCallFuture future = (ClientCallFuture) cis.invokeOnRandomTarget(request, null);
future.andThen(new ExecutionCallback() {
@Override
public void onResponse(Object response) {
try {
if (collator != null) {
response = collator.collate(((Map) response).entrySet());
}
} finally {
completableFuture.setResult(response);
trackableJobs.remove(jobId);
}
}
@Override
public void onFailure(Throwable t) {
try {
if (t instanceof ExecutionException
&& t.getCause() instanceof CancellationException) {
t = t.getCause();
}
completableFuture.setResult(t);
} finally {
trackableJobs.remove(jobId);
}
}
});
Address runningMember = future.getConnection().getRemoteEndpoint();
trackableJobs.putIfAbsent(jobId, new ClientTrackableJob<T>(jobId, runningMember, completableFuture));
return completableFuture;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
private class ClientCompletableFuture<V>
extends AbstractCompletableFuture<V>
implements JobCompletableFuture<V> {
private final String jobId;
private final CountDownLatch latch;
private volatile boolean cancelled;
protected ClientCompletableFuture(String jobId) {
super(null, Logger.getLogger(ClientCompletableFuture.class));
this.jobId = jobId;
this.latch = new CountDownLatch(1);
}
@Override
public String getJobId() {
return jobId;
}
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
try {
cancelled = (Boolean) invoke(new ClientCancellationRequest(getName(), jobId), jobId);
} catch (Exception ignore) {
}
return cancelled;
}
@Override
public boolean isCancelled() {
return cancelled;
}
@Override
public void setResult(Object result) {
super.setResult(result);
latch.countDown();
}
@Override
public V get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException {
ValidationUtil.isNotNull(unit, "unit");
if (!latch.await(timeout, unit) || !isDone()) {
throw new TimeoutException("timeout reached");
}
return getResult();
}
@Override
protected ExecutorService getAsyncExecutor() {
return getContext().getExecutionService().getAsyncExecutor();
}
}
private final class ClientTrackableJob<V>
implements TrackableJob<V> {
private final String jobId;
private final Address jobOwner;
private final AbstractCompletableFuture<V> completableFuture;
private ClientTrackableJob(String jobId, Address jobOwner,
AbstractCompletableFuture<V> completableFuture) {
this.jobId = jobId;
this.jobOwner = jobOwner;
this.completableFuture = completableFuture;
}
@Override
public JobTracker getJobTracker() {
return ClientMapReduceProxy.this;
}
@Override
public String getName() {
return ClientMapReduceProxy.this.getName();
}
@Override
public String getJobId() {
return jobId;
}
@Override
public ICompletableFuture<V> getCompletableFuture() {
return completableFuture;
}
@Override
public JobProcessInformation getJobProcessInformation() {
try {
return invoke(new ClientJobProcessInformationRequest(getName(), jobId), jobId);
} catch (Exception ignore) {
}
return null;
}
}
} | 1no label
| hazelcast-client_src_main_java_com_hazelcast_client_proxy_ClientMapReduceProxy.java |
450 | public class KeySliceQuery extends SliceQuery {
private final StaticBuffer key;
public KeySliceQuery(StaticBuffer key, StaticBuffer sliceStart, StaticBuffer sliceEnd) {
super(sliceStart, sliceEnd);
Preconditions.checkNotNull(key);
this.key=key;
}
public KeySliceQuery(StaticBuffer key, SliceQuery query) {
super(query);
Preconditions.checkNotNull(key);
this.key=key;
}
/**
*
* @return the key of this query
*/
public StaticBuffer getKey() {
return key;
}
@Override
public KeySliceQuery setLimit(int limit) {
super.setLimit(limit);
return this;
}
@Override
public KeySliceQuery updateLimit(int newLimit) {
return new KeySliceQuery(key,this).setLimit(newLimit);
}
@Override
public int hashCode() {
return new HashCodeBuilder().append(key).appendSuper(super.hashCode()).toHashCode();
}
@Override
public boolean equals(Object other) {
if (this==other) return true;
else if (other==null) return false;
else if (!getClass().isInstance(other)) return false;
KeySliceQuery oth = (KeySliceQuery)other;
return key.equals(oth.key) && super.equals(oth);
}
public boolean subsumes(KeySliceQuery oth) {
return key.equals(oth.key) && super.subsumes(oth);
}
@Override
public String toString() {
return String.format("KeySliceQuery(key: %s, start: %s, end: %s, limit:%d)", key, getSliceStart(), getSliceEnd(), getLimit());
}
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_keycolumnvalue_KeySliceQuery.java |
2,041 | public final class InjectionRequest<T> implements Element {
private final Object source;
private final TypeLiteral<T> type;
private final T instance;
public InjectionRequest(Object source, TypeLiteral<T> type, T instance) {
this.source = checkNotNull(source, "source");
this.type = checkNotNull(type, "type");
this.instance = checkNotNull(instance, "instance");
}
public Object getSource() {
return source;
}
public T getInstance() {
return instance;
}
public TypeLiteral<T> getType() {
return type;
}
/**
* Returns the instance methods and fields of {@code instance} that will be injected to fulfill
* this request.
*
* @return a possibly empty set of injection points. The set has a specified iteration order. All
* fields are returned and then all methods. Within the fields, supertype fields are returned
* before subtype fields. Similarly, supertype methods are returned before subtype methods.
* @throws ConfigurationException if there is a malformed injection point on the class of {@code
* instance}, such as a field with multiple binding annotations. The exception's {@link
* ConfigurationException#getPartialValue() partial value} is a {@code Set<InjectionPoint>}
* of the valid injection points.
*/
public Set<InjectionPoint> getInjectionPoints() throws ConfigurationException {
return InjectionPoint.forInstanceMethodsAndFields(instance.getClass());
}
public <R> R acceptVisitor(ElementVisitor<R> visitor) {
return visitor.visit(this);
}
public void applyTo(Binder binder) {
binder.withSource(getSource()).requestInjection(type, instance);
}
} | 0true
| src_main_java_org_elasticsearch_common_inject_spi_InjectionRequest.java |
1,932 | public class MapContainsValueRequest extends AllPartitionsClientRequest implements Portable, RetryableRequest, SecureRequest {
private String name;
private Data value;
public MapContainsValueRequest() {
}
public MapContainsValueRequest(String name, Data value) {
this.name = name;
this.value = value;
}
@Override
protected OperationFactory createOperationFactory() {
return new ContainsValueOperationFactory(name, value);
}
@Override
protected Object reduce(Map<Integer, Object> map) {
for (Object contains : map.values()) {
if(Boolean.TRUE.equals(contains))
return true;
}
return false;
}
public String getServiceName() {
return MapService.SERVICE_NAME;
}
@Override
public int getFactoryId() {
return MapPortableHook.F_ID;
}
public int getClassId() {
return MapPortableHook.CONTAINS_VALUE;
}
public void write(PortableWriter writer) throws IOException {
writer.writeUTF("n", name);
final ObjectDataOutput out = writer.getRawDataOutput();
value.writeData(out);
}
public void read(PortableReader reader) throws IOException {
name = reader.readUTF("n");
final ObjectDataInput in = reader.getRawDataInput();
value = new Data();
value.readData(in);
}
public Permission getRequiredPermission() {
return new MapPermission(name, ActionConstants.ACTION_READ);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_map_client_MapContainsValueRequest.java |
1,869 | boolean b = h1.executeTransaction(options, new TransactionalTask<Boolean>() {
public Boolean execute(TransactionalTaskContext context) throws TransactionException {
final TransactionalMap<Object, Object> txMap = context.getMap("default");
assertNull(txMap.replace("1", "value"));
txMap.put("1", "value2");
assertEquals("value2", txMap.replace("1", "value3"));
assertEquals("value3", txMap.get("1"));
assertNull(map2.get("1"));
assertNull(map2.get("2"));
return true;
}
}); | 0true
| hazelcast_src_test_java_com_hazelcast_map_MapTransactionTest.java |
456 | public static class AdminPresentationToOneLookup {
public static final String LOOKUPDISPLAYPROPERTY = "lookupDisplayProperty";
public static final String USESERVERSIDEINSPECTIONCACHE = "useServerSideInspectionCache";
public static final String LOOKUPTYPE = "lookupType";
public static final String CUSTOMCRITERIA = "customCriteria";
public static final String FORCEPOPULATECHILDPROPERTIES = "forcePopulateChildProperties";
} | 0true
| common_src_main_java_org_broadleafcommerce_common_presentation_override_PropertyType.java |
23 | static class EdgeVertex extends Vertex {
private SortedSet<Edge> outEdges = new ConcurrentSkipListSet<Edge>(new Comparator<Edge>() {
@Override
public int compare(Edge e1, Edge e2) {
return e1.getEnd().compareTo(e2.getEnd());
}
});
EdgeVertex(long id) {
super(id);
}
@Override
public Iterable<Vertex> getNeighbors(final int value) {
return Iterables.transform(Iterables.filter(outEdges, new Predicate<Edge>() {
@Override
public boolean apply(@Nullable Edge edge) {
return !CHECK_VALUE || ((Integer) edge.getProperty("number")).intValue() == value;
}
}), new Function<Edge, Vertex>() {
@Override
public Vertex apply(@Nullable Edge edge) {
return edge.getEnd();
}
});
}
void addOutEdge(Edge e) {
outEdges.add(e);
}
} | 0true
| titan-test_src_main_java_com_thinkaurelius_titan_TestByteBuffer.java |
1,044 | public class OCommandExecutorSQLGrant extends OCommandExecutorSQLPermissionAbstract {
public static final String KEYWORD_GRANT = "GRANT";
private static final String KEYWORD_TO = "TO";
@SuppressWarnings("unchecked")
public OCommandExecutorSQLGrant parse(final OCommandRequest iRequest) {
init((OCommandRequestText) iRequest);
privilege = ORole.PERMISSION_NONE;
resource = null;
role = null;
StringBuilder word = new StringBuilder();
int oldPos = 0;
int pos = nextWord(parserText, parserTextUpperCase, oldPos, word, true);
if (pos == -1 || !word.toString().equals(KEYWORD_GRANT))
throw new OCommandSQLParsingException("Keyword " + KEYWORD_GRANT + " not found. Use " + getSyntax(), parserText, oldPos);
pos = nextWord(parserText, parserTextUpperCase, pos, word, true);
if (pos == -1)
throw new OCommandSQLParsingException("Invalid privilege", parserText, oldPos);
parsePrivilege(word, oldPos);
pos = nextWord(parserText, parserTextUpperCase, pos, word, true);
if (pos == -1 || !word.toString().equals(KEYWORD_ON))
throw new OCommandSQLParsingException("Keyword " + KEYWORD_ON + " not found. Use " + getSyntax(), parserText, oldPos);
pos = nextWord(parserText, parserText, pos, word, true);
if (pos == -1)
throw new OCommandSQLParsingException("Invalid resource", parserText, oldPos);
resource = word.toString();
pos = nextWord(parserText, parserTextUpperCase, pos, word, true);
if (pos == -1 || !word.toString().equals(KEYWORD_TO))
throw new OCommandSQLParsingException("Keyword " + KEYWORD_TO + " not found. Use " + getSyntax(), parserText, oldPos);
pos = nextWord(parserText, parserText, pos, word, true);
if (pos == -1)
throw new OCommandSQLParsingException("Invalid role", parserText, oldPos);
final String roleName = word.toString();
role = getDatabase().getMetadata().getSecurity().getRole(roleName);
if (role == null)
throw new OCommandSQLParsingException("Invalid role: " + roleName);
return this;
}
/**
* Execute the GRANT.
*/
public Object execute(final Map<Object, Object> iArgs) {
if (role == null)
throw new OCommandExecutionException("Cannot execute the command because it has not been parsed yet");
role.grant(resource, privilege);
role.save();
return role;
}
public String getSyntax() {
return "GRANT <permission> ON <resource> TO <role>";
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_sql_OCommandExecutorSQLGrant.java |
1,110 | differences.add(new OSQLFunctionDifference() {
@Override
protected boolean returnDistributedResult() {
return true;
}
}); | 0true
| core_src_test_java_com_orientechnologies_orient_core_sql_functions_coll_SQLFunctionDifferenceTest.java |
476 | public interface EventHandler<E> {
void handle(E event);
/**
* This method is called when registration request response is successfully returned from node.
*
* Note that this method will also be called while first registered node is dead
* and re-registering to a second node.
*/
void onListenerRegister();
} | 0true
| hazelcast-client_src_main_java_com_hazelcast_client_spi_EventHandler.java |
33 | @edu.umd.cs.findbugs.annotations.SuppressWarnings("EI_EXPOSE_REP")
public class MemcacheEntry implements DataSerializable, TextCommandConstants {
private byte[] bytes;
private byte[] value;
private int flag;
public MemcacheEntry(String key, byte[] value, int flag) {
byte[] flagBytes = stringToBytes(" " + flag + " ");
byte[] valueLen = stringToBytes(String.valueOf(value.length));
byte[] keyBytes = stringToBytes(key);
this.value = value.clone();
int size = VALUE_SPACE.length
+ keyBytes.length
+ flagBytes.length
+ valueLen.length
+ RETURN.length
+ value.length
+ RETURN.length;
ByteBuffer entryBuffer = ByteBuffer.allocate(size);
entryBuffer.put(VALUE_SPACE);
entryBuffer.put(keyBytes);
entryBuffer.put(flagBytes);
entryBuffer.put(valueLen);
entryBuffer.put(RETURN);
entryBuffer.put(value);
entryBuffer.put(RETURN);
this.bytes = entryBuffer.array();
this.flag = flag;
}
public MemcacheEntry() {
}
public void readData(ObjectDataInput in) throws IOException {
int size = in.readInt();
bytes = new byte[size];
in.readFully(bytes);
size = in.readInt();
value = new byte[size];
in.readFully(value);
flag = in.readInt();
}
public void writeData(ObjectDataOutput out) throws IOException {
out.writeInt(bytes.length);
out.write(bytes);
out.writeInt(value.length);
out.write(value);
out.writeInt(flag);
}
public ByteBuffer toNewBuffer() {
return ByteBuffer.wrap(bytes);
}
public int getFlag() {
return flag;
}
public byte[] getBytes() {
return bytes;
}
public byte[] getValue() {
return value;
}
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
MemcacheEntry that = (MemcacheEntry) o;
if (flag != that.flag) {
return false;
}
if (!Arrays.equals(bytes, that.bytes)) {
return false;
}
if (!Arrays.equals(value, that.value)) {
return false;
}
return true;
}
public int hashCode() {
int result = bytes != null ? Arrays.hashCode(bytes) : 0;
result = 31 * result + (value != null ? Arrays.hashCode(value) : 0);
result = 31 * result + flag;
return result;
}
public String toString() {
return "MemcacheEntry{"
+ "bytes="
+ bytesToString(bytes)
+ ", flag="
+ flag
+ '}';
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_ascii_memcache_MemcacheEntry.java |
3,474 | public class ContentPath {
public static enum Type {
JUST_NAME,
FULL,
}
private Type pathType;
private final char delimiter;
private final StringBuilder sb;
private final int offset;
private int index = 0;
private String[] path = new String[10];
private String sourcePath;
public ContentPath() {
this(0);
}
/**
* Constructs a json path with an offset. The offset will result an <tt>offset</tt>
* number of path elements to not be included in {@link #pathAsText(String)}.
*/
public ContentPath(int offset) {
this.delimiter = '.';
this.sb = new StringBuilder();
this.offset = offset;
reset();
}
public void reset() {
this.index = 0;
this.sourcePath = null;
}
public void add(String name) {
path[index++] = name;
if (index == path.length) { // expand if needed
String[] newPath = new String[path.length + 10];
System.arraycopy(path, 0, newPath, 0, path.length);
path = newPath;
}
}
public void remove() {
path[index--] = null;
}
public String pathAsText(String name) {
if (pathType == Type.JUST_NAME) {
return name;
}
return fullPathAsText(name);
}
public String fullPathAsText(String name) {
sb.setLength(0);
for (int i = offset; i < index; i++) {
sb.append(path[i]).append(delimiter);
}
sb.append(name);
return sb.toString();
}
public Type pathType() {
return pathType;
}
public void pathType(Type type) {
this.pathType = type;
}
public String sourcePath(String sourcePath) {
String orig = this.sourcePath;
this.sourcePath = sourcePath;
return orig;
}
public String sourcePath() {
return this.sourcePath;
}
} | 0true
| src_main_java_org_elasticsearch_index_mapper_ContentPath.java |
438 | public enum UnspecifiedBooleanType {
TRUE,FALSE,UNSPECIFIED
} | 0true
| common_src_main_java_org_broadleafcommerce_common_presentation_client_UnspecifiedBooleanType.java |
1,602 | public class OReplicationConflictException extends ODistributedException {
private static final String MESSAGE_REMOTE_VERSION = "remote=v";
private static final String MESSAGE_LOCAL_VERSION = "local=v";
private static final long serialVersionUID = 1L;
private final ORID localRID;
private final int localVersion;
private final ORID remoteRID;
private final int remoteVersion;
/**
* Rebuilds the original exception from the message.
*/
public OReplicationConflictException(final String message) {
super(message);
int beginPos = message.indexOf(ORID.PREFIX);
int endPos = message.indexOf(' ', beginPos);
localRID = new ORecordId(message.substring(beginPos, endPos));
beginPos = message.indexOf(MESSAGE_LOCAL_VERSION, endPos) + MESSAGE_LOCAL_VERSION.length();
endPos = message.indexOf(' ', beginPos);
localVersion = Integer.parseInt(message.substring(beginPos, endPos));
beginPos = message.indexOf(MESSAGE_REMOTE_VERSION, endPos) + MESSAGE_REMOTE_VERSION.length();
endPos = message.indexOf(')', beginPos);
remoteVersion = Integer.parseInt(message.substring(beginPos, endPos));
remoteRID = null;
}
public OReplicationConflictException(final String message, final ORID iRID, final int iDatabaseVersion, final int iRecordVersion) {
super(message);
localRID = iRID;
remoteRID = null;
localVersion = iDatabaseVersion;
remoteVersion = iRecordVersion;
}
public OReplicationConflictException(final String message, final ORID iOriginalRID, final ORID iRemoteRID) {
super(message);
localRID = iOriginalRID;
remoteRID = iRemoteRID;
localVersion = remoteVersion = 0;
}
@Override
public String getMessage() {
final StringBuilder buffer = new StringBuilder(super.getMessage());
if (remoteRID != null) {
// RID CONFLICT
buffer.append("local RID=");
buffer.append(localRID);
buffer.append(" remote RID=");
buffer.append(remoteRID);
} else {
// VERSION CONFLICT
buffer.append("local=v");
buffer.append(localVersion);
buffer.append(" remote=v");
buffer.append(remoteVersion);
}
return buffer.toString();
}
@Override
public String toString() {
return getMessage();
}
public int getLocalVersion() {
return localVersion;
}
public int getRemoteVersion() {
return remoteVersion;
}
public ORID getLocalRID() {
return localRID;
}
public ORID getRemoteRID() {
return remoteRID;
}
} | 0true
| server_src_main_java_com_orientechnologies_orient_server_distributed_conflict_OReplicationConflictException.java |
207 | public class MapperQueryParser extends QueryParser {
public static final ImmutableMap<String, FieldQueryExtension> fieldQueryExtensions;
static {
fieldQueryExtensions = ImmutableMap.<String, FieldQueryExtension>builder()
.put(ExistsFieldQueryExtension.NAME, new ExistsFieldQueryExtension())
.put(MissingFieldQueryExtension.NAME, new MissingFieldQueryExtension())
.build();
}
private final QueryParseContext parseContext;
private QueryParserSettings settings;
private Analyzer quoteAnalyzer;
private boolean forcedAnalyzer;
private boolean forcedQuoteAnalyzer;
private FieldMapper currentMapper;
private boolean analyzeWildcard;
private String quoteFieldSuffix;
public MapperQueryParser(QueryParseContext parseContext) {
super(Lucene.QUERYPARSER_VERSION, null, null);
this.parseContext = parseContext;
}
public MapperQueryParser(QueryParserSettings settings, QueryParseContext parseContext) {
super(Lucene.QUERYPARSER_VERSION, settings.defaultField(), settings.defaultAnalyzer());
this.parseContext = parseContext;
reset(settings);
}
public void reset(QueryParserSettings settings) {
this.settings = settings;
this.field = settings.defaultField();
if (settings.fields() != null) {
if (settings.fields.size() == 1) {
// just mark it as the default field
this.field = settings.fields().get(0);
} else {
// otherwise, we need to have the default field being null...
this.field = null;
}
}
this.forcedAnalyzer = settings.forcedAnalyzer() != null;
this.setAnalyzer(forcedAnalyzer ? settings.forcedAnalyzer() : settings.defaultAnalyzer());
if (settings.forcedQuoteAnalyzer() != null) {
this.forcedQuoteAnalyzer = true;
this.quoteAnalyzer = settings.forcedQuoteAnalyzer();
} else if (forcedAnalyzer) {
this.forcedQuoteAnalyzer = true;
this.quoteAnalyzer = settings.forcedAnalyzer();
} else {
this.forcedAnalyzer = false;
this.quoteAnalyzer = settings.defaultQuoteAnalyzer();
}
this.quoteFieldSuffix = settings.quoteFieldSuffix();
setMultiTermRewriteMethod(settings.rewriteMethod());
setEnablePositionIncrements(settings.enablePositionIncrements());
setAutoGeneratePhraseQueries(settings.autoGeneratePhraseQueries());
setAllowLeadingWildcard(settings.allowLeadingWildcard());
setLowercaseExpandedTerms(settings.lowercaseExpandedTerms());
setPhraseSlop(settings.phraseSlop());
setDefaultOperator(settings.defaultOperator());
setFuzzyMinSim(settings.fuzzyMinSim());
setFuzzyPrefixLength(settings.fuzzyPrefixLength());
this.analyzeWildcard = settings.analyzeWildcard();
}
/**
* We override this one so we can get the fuzzy part to be treated as string, so people can do: "age:10~5" or "timestamp:2012-10-10~5d"
*/
@Override
Query handleBareFuzzy(String qfield, Token fuzzySlop, String termImage) throws ParseException {
if (fuzzySlop.image.length() == 1) {
return getFuzzyQuery(qfield, termImage, Float.toString(fuzzyMinSim));
}
return getFuzzyQuery(qfield, termImage, fuzzySlop.image.substring(1));
}
@Override
protected Query newTermQuery(Term term) {
if (currentMapper != null) {
Query termQuery = currentMapper.queryStringTermQuery(term);
if (termQuery != null) {
return termQuery;
}
}
return super.newTermQuery(term);
}
@Override
protected Query newMatchAllDocsQuery() {
return Queries.newMatchAllQuery();
}
@Override
public Query getFieldQuery(String field, String queryText, boolean quoted) throws ParseException {
FieldQueryExtension fieldQueryExtension = fieldQueryExtensions.get(field);
if (fieldQueryExtension != null) {
return fieldQueryExtension.query(parseContext, queryText);
}
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
if (fields.size() == 1) {
return getFieldQuerySingle(fields.iterator().next(), queryText, quoted);
}
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = getFieldQuerySingle(mField, queryText, quoted);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<BooleanClause>();
for (String mField : fields) {
Query q = getFieldQuerySingle(mField, queryText, quoted);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
return getBooleanQuery(clauses, true);
}
} else {
return getFieldQuerySingle(field, queryText, quoted);
}
}
private Query getFieldQuerySingle(String field, String queryText, boolean quoted) throws ParseException {
if (!quoted && queryText.length() > 1) {
if (queryText.charAt(0) == '>') {
if (queryText.length() > 2) {
if (queryText.charAt(1) == '=') {
return getRangeQuerySingle(field, queryText.substring(2), null, true, true);
}
}
return getRangeQuerySingle(field, queryText.substring(1), null, false, true);
} else if (queryText.charAt(0) == '<') {
if (queryText.length() > 2) {
if (queryText.charAt(1) == '=') {
return getRangeQuerySingle(field, null, queryText.substring(2), true, true);
}
}
return getRangeQuerySingle(field, null, queryText.substring(1), true, false);
}
}
currentMapper = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
MapperService.SmartNameFieldMappers fieldMappers = null;
if (quoted) {
setAnalyzer(quoteAnalyzer);
if (quoteFieldSuffix != null) {
fieldMappers = parseContext.smartFieldMappers(field + quoteFieldSuffix);
}
}
if (fieldMappers == null) {
fieldMappers = parseContext.smartFieldMappers(field);
}
if (fieldMappers != null) {
if (quoted) {
if (!forcedQuoteAnalyzer) {
setAnalyzer(fieldMappers.searchQuoteAnalyzer());
}
} else {
if (!forcedAnalyzer) {
setAnalyzer(fieldMappers.searchAnalyzer());
}
}
currentMapper = fieldMappers.fieldMappers().mapper();
if (currentMapper != null) {
Query query = null;
if (currentMapper.useTermQueryWithQueryString()) {
try {
if (fieldMappers.explicitTypeInNameWithDocMapper()) {
String[] previousTypes = QueryParseContext.setTypesWithPrevious(new String[]{fieldMappers.docMapper().type()});
try {
query = currentMapper.termQuery(queryText, parseContext);
} finally {
QueryParseContext.setTypes(previousTypes);
}
} else {
query = currentMapper.termQuery(queryText, parseContext);
}
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
} else {
throw e;
}
}
}
if (query == null) {
query = super.getFieldQuery(currentMapper.names().indexName(), queryText, quoted);
}
return wrapSmartNameQuery(query, fieldMappers, parseContext);
}
}
return super.getFieldQuery(field, queryText, quoted);
} finally {
setAnalyzer(oldAnalyzer);
}
}
@Override
protected Query getFieldQuery(String field, String queryText, int slop) throws ParseException {
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = super.getFieldQuery(mField, queryText, slop);
if (q != null) {
added = true;
applyBoost(mField, q);
applySlop(q, slop);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<BooleanClause>();
for (String mField : fields) {
Query q = super.getFieldQuery(mField, queryText, slop);
if (q != null) {
applyBoost(mField, q);
applySlop(q, slop);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
return getBooleanQuery(clauses, true);
}
} else {
return super.getFieldQuery(field, queryText, slop);
}
}
@Override
protected Query getRangeQuery(String field, String part1, String part2, boolean startInclusive, boolean endInclusive) throws ParseException {
if ("*".equals(part1)) {
part1 = null;
}
if ("*".equals(part2)) {
part2 = null;
}
Collection<String> fields = extractMultiFields(field);
if (fields == null) {
return getRangeQuerySingle(field, part1, part2, startInclusive, endInclusive);
}
if (fields.size() == 1) {
return getRangeQuerySingle(fields.iterator().next(), part1, part2, startInclusive, endInclusive);
}
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = getRangeQuerySingle(mField, part1, part2, startInclusive, endInclusive);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<BooleanClause>();
for (String mField : fields) {
Query q = getRangeQuerySingle(mField, part1, part2, startInclusive, endInclusive);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
return getBooleanQuery(clauses, true);
}
}
private Query getRangeQuerySingle(String field, String part1, String part2, boolean startInclusive, boolean endInclusive) {
currentMapper = null;
MapperService.SmartNameFieldMappers fieldMappers = parseContext.smartFieldMappers(field);
if (fieldMappers != null) {
currentMapper = fieldMappers.fieldMappers().mapper();
if (currentMapper != null) {
if (lowercaseExpandedTerms && !currentMapper.isNumeric()) {
part1 = part1 == null ? null : part1.toLowerCase(locale);
part2 = part2 == null ? null : part2.toLowerCase(locale);
}
try {
Query rangeQuery = currentMapper.rangeQuery(part1, part2, startInclusive, endInclusive, parseContext);
return wrapSmartNameQuery(rangeQuery, fieldMappers, parseContext);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
}
throw e;
}
}
}
return newRangeQuery(field, part1, part2, startInclusive, endInclusive);
}
protected Query getFuzzyQuery(String field, String termStr, String minSimilarity) throws ParseException {
if (lowercaseExpandedTerms) {
termStr = termStr.toLowerCase(locale);
}
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
if (fields.size() == 1) {
return getFuzzyQuerySingle(fields.iterator().next(), termStr, minSimilarity);
}
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = getFuzzyQuerySingle(mField, termStr, minSimilarity);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<BooleanClause>();
for (String mField : fields) {
Query q = getFuzzyQuerySingle(mField, termStr, minSimilarity);
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
return getBooleanQuery(clauses, true);
}
} else {
return getFuzzyQuerySingle(field, termStr, minSimilarity);
}
}
private Query getFuzzyQuerySingle(String field, String termStr, String minSimilarity) throws ParseException {
currentMapper = null;
MapperService.SmartNameFieldMappers fieldMappers = parseContext.smartFieldMappers(field);
if (fieldMappers != null) {
currentMapper = fieldMappers.fieldMappers().mapper();
if (currentMapper != null) {
try {
//LUCENE 4 UPGRADE I disabled transpositions here by default - maybe this needs to be changed
Query fuzzyQuery = currentMapper.fuzzyQuery(termStr, Fuzziness.build(minSimilarity), fuzzyPrefixLength, settings.fuzzyMaxExpansions(), false);
return wrapSmartNameQuery(fuzzyQuery, fieldMappers, parseContext);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
}
throw e;
}
}
}
return super.getFuzzyQuery(field, termStr, Float.parseFloat(minSimilarity));
}
@Override
protected Query newFuzzyQuery(Term term, float minimumSimilarity, int prefixLength) {
String text = term.text();
int numEdits = FuzzyQuery.floatToEdits(minimumSimilarity, text.codePointCount(0, text.length()));
//LUCENE 4 UPGRADE I disabled transpositions here by default - maybe this needs to be changed
FuzzyQuery query = new FuzzyQuery(term, numEdits, prefixLength, settings.fuzzyMaxExpansions(), false);
QueryParsers.setRewriteMethod(query, settings.fuzzyRewriteMethod());
return query;
}
@Override
protected Query getPrefixQuery(String field, String termStr) throws ParseException {
if (lowercaseExpandedTerms) {
termStr = termStr.toLowerCase(locale);
}
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
if (fields.size() == 1) {
return getPrefixQuerySingle(fields.iterator().next(), termStr);
}
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = getPrefixQuerySingle(mField, termStr);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<BooleanClause>();
for (String mField : fields) {
Query q = getPrefixQuerySingle(mField, termStr);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
return getBooleanQuery(clauses, true);
}
} else {
return getPrefixQuerySingle(field, termStr);
}
}
private Query getPrefixQuerySingle(String field, String termStr) throws ParseException {
currentMapper = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
MapperService.SmartNameFieldMappers fieldMappers = parseContext.smartFieldMappers(field);
if (fieldMappers != null) {
if (!forcedAnalyzer) {
setAnalyzer(fieldMappers.searchAnalyzer());
}
currentMapper = fieldMappers.fieldMappers().mapper();
if (currentMapper != null) {
Query query = null;
if (currentMapper.useTermQueryWithQueryString()) {
if (fieldMappers.explicitTypeInNameWithDocMapper()) {
String[] previousTypes = QueryParseContext.setTypesWithPrevious(new String[]{fieldMappers.docMapper().type()});
try {
query = currentMapper.prefixQuery(termStr, multiTermRewriteMethod, parseContext);
} finally {
QueryParseContext.setTypes(previousTypes);
}
} else {
query = currentMapper.prefixQuery(termStr, multiTermRewriteMethod, parseContext);
}
}
if (query == null) {
query = getPossiblyAnalyzedPrefixQuery(currentMapper.names().indexName(), termStr);
}
return wrapSmartNameQuery(query, fieldMappers, parseContext);
}
}
return getPossiblyAnalyzedPrefixQuery(field, termStr);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
}
throw e;
} finally {
setAnalyzer(oldAnalyzer);
}
}
private Query getPossiblyAnalyzedPrefixQuery(String field, String termStr) throws ParseException {
if (!analyzeWildcard) {
return super.getPrefixQuery(field, termStr);
}
// get Analyzer from superclass and tokenize the term
TokenStream source;
try {
source = getAnalyzer().tokenStream(field, termStr);
source.reset();
} catch (IOException e) {
return super.getPrefixQuery(field, termStr);
}
List<String> tlist = new ArrayList<String>();
CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class);
while (true) {
try {
if (!source.incrementToken()) break;
} catch (IOException e) {
break;
}
tlist.add(termAtt.toString());
}
try {
source.close();
} catch (IOException e) {
// ignore
}
if (tlist.size() == 1) {
return super.getPrefixQuery(field, tlist.get(0));
} else {
// build a boolean query with prefix on each one...
List<BooleanClause> clauses = new ArrayList<BooleanClause>();
for (String token : tlist) {
clauses.add(new BooleanClause(super.getPrefixQuery(field, token), BooleanClause.Occur.SHOULD));
}
return getBooleanQuery(clauses, true);
//return super.getPrefixQuery(field, termStr);
/* this means that the analyzer used either added or consumed
* (common for a stemmer) tokens, and we can't build a PrefixQuery */
// throw new ParseException("Cannot build PrefixQuery with analyzer "
// + getAnalyzer().getClass()
// + (tlist.size() > 1 ? " - token(s) added" : " - token consumed"));
}
}
@Override
protected Query getWildcardQuery(String field, String termStr) throws ParseException {
if (termStr.equals("*")) {
// we want to optimize for match all query for the "*:*", and "*" cases
if ("*".equals(field) || Objects.equal(field, this.field)) {
String actualField = field;
if (actualField == null) {
actualField = this.field;
}
if (actualField == null) {
return newMatchAllDocsQuery();
}
if ("*".equals(actualField) || "_all".equals(actualField)) {
return newMatchAllDocsQuery();
}
// effectively, we check if a field exists or not
return fieldQueryExtensions.get(ExistsFieldQueryExtension.NAME).query(parseContext, actualField);
}
}
if (lowercaseExpandedTerms) {
termStr = termStr.toLowerCase(locale);
}
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
if (fields.size() == 1) {
return getWildcardQuerySingle(fields.iterator().next(), termStr);
}
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = getWildcardQuerySingle(mField, termStr);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<BooleanClause>();
for (String mField : fields) {
Query q = getWildcardQuerySingle(mField, termStr);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
return getBooleanQuery(clauses, true);
}
} else {
return getWildcardQuerySingle(field, termStr);
}
}
private Query getWildcardQuerySingle(String field, String termStr) throws ParseException {
String indexedNameField = field;
currentMapper = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
MapperService.SmartNameFieldMappers fieldMappers = parseContext.smartFieldMappers(field);
if (fieldMappers != null) {
if (!forcedAnalyzer) {
setAnalyzer(fieldMappers.searchAnalyzer());
}
currentMapper = fieldMappers.fieldMappers().mapper();
if (currentMapper != null) {
indexedNameField = currentMapper.names().indexName();
}
return wrapSmartNameQuery(getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr), fieldMappers, parseContext);
}
return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
}
throw e;
} finally {
setAnalyzer(oldAnalyzer);
}
}
private Query getPossiblyAnalyzedWildcardQuery(String field, String termStr) throws ParseException {
if (!analyzeWildcard) {
return super.getWildcardQuery(field, termStr);
}
boolean isWithinToken = (!termStr.startsWith("?") && !termStr.startsWith("*"));
StringBuilder aggStr = new StringBuilder();
StringBuilder tmp = new StringBuilder();
for (int i = 0; i < termStr.length(); i++) {
char c = termStr.charAt(i);
if (c == '?' || c == '*') {
if (isWithinToken) {
try {
TokenStream source = getAnalyzer().tokenStream(field, tmp.toString());
source.reset();
CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class);
if (source.incrementToken()) {
String term = termAtt.toString();
if (term.length() == 0) {
// no tokens, just use what we have now
aggStr.append(tmp);
} else {
aggStr.append(term);
}
} else {
// no tokens, just use what we have now
aggStr.append(tmp);
}
source.close();
} catch (IOException e) {
aggStr.append(tmp);
}
tmp.setLength(0);
}
isWithinToken = false;
aggStr.append(c);
} else {
tmp.append(c);
isWithinToken = true;
}
}
if (isWithinToken) {
try {
TokenStream source = getAnalyzer().tokenStream(field, tmp.toString());
source.reset();
CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class);
if (source.incrementToken()) {
String term = termAtt.toString();
if (term.length() == 0) {
// no tokens, just use what we have now
aggStr.append(tmp);
} else {
aggStr.append(term);
}
} else {
// no tokens, just use what we have now
aggStr.append(tmp);
}
source.close();
} catch (IOException e) {
aggStr.append(tmp);
}
}
return super.getWildcardQuery(field, aggStr.toString());
}
@Override
protected Query getRegexpQuery(String field, String termStr) throws ParseException {
if (lowercaseExpandedTerms) {
termStr = termStr.toLowerCase(locale);
}
Collection<String> fields = extractMultiFields(field);
if (fields != null) {
if (fields.size() == 1) {
return getRegexpQuerySingle(fields.iterator().next(), termStr);
}
if (settings.useDisMax()) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(settings.tieBreaker());
boolean added = false;
for (String mField : fields) {
Query q = getRegexpQuerySingle(mField, termStr);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
}
}
if (!added) {
return null;
}
return disMaxQuery;
} else {
List<BooleanClause> clauses = new ArrayList<BooleanClause>();
for (String mField : fields) {
Query q = getRegexpQuerySingle(mField, termStr);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
return getBooleanQuery(clauses, true);
}
} else {
return getRegexpQuerySingle(field, termStr);
}
}
private Query getRegexpQuerySingle(String field, String termStr) throws ParseException {
currentMapper = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
MapperService.SmartNameFieldMappers fieldMappers = parseContext.smartFieldMappers(field);
if (fieldMappers != null) {
if (!forcedAnalyzer) {
setAnalyzer(fieldMappers.searchAnalyzer());
}
currentMapper = fieldMappers.fieldMappers().mapper();
if (currentMapper != null) {
Query query = null;
if (currentMapper.useTermQueryWithQueryString()) {
if (fieldMappers.explicitTypeInNameWithDocMapper()) {
String[] previousTypes = QueryParseContext.setTypesWithPrevious(new String[]{fieldMappers.docMapper().type()});
try {
query = currentMapper.regexpQuery(termStr, RegExp.ALL, multiTermRewriteMethod, parseContext);
} finally {
QueryParseContext.setTypes(previousTypes);
}
} else {
query = currentMapper.regexpQuery(termStr, RegExp.ALL, multiTermRewriteMethod, parseContext);
}
}
if (query == null) {
query = super.getRegexpQuery(field, termStr);
}
return wrapSmartNameQuery(query, fieldMappers, parseContext);
}
}
return super.getRegexpQuery(field, termStr);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
}
throw e;
} finally {
setAnalyzer(oldAnalyzer);
}
}
@Override
protected Query getBooleanQuery(List<BooleanClause> clauses, boolean disableCoord) throws ParseException {
Query q = super.getBooleanQuery(clauses, disableCoord);
if (q == null) {
return null;
}
return optimizeQuery(fixNegativeQueryIfNeeded(q));
}
private void applyBoost(String field, Query q) {
if (settings.boosts() != null) {
float boost = 1f;
if (settings.boosts().containsKey(field)) {
boost = settings.boosts().lget();
}
q.setBoost(boost);
}
}
private void applySlop(Query q, int slop) {
if (q instanceof XFilteredQuery) {
applySlop(((XFilteredQuery)q).getQuery(), slop);
}
if (q instanceof PhraseQuery) {
((PhraseQuery) q).setSlop(slop);
} else if (q instanceof MultiPhraseQuery) {
((MultiPhraseQuery) q).setSlop(slop);
}
}
private Collection<String> extractMultiFields(String field) {
Collection<String> fields = null;
if (field != null) {
fields = parseContext.simpleMatchToIndexNames(field);
} else {
fields = settings.fields();
}
return fields;
}
public Query parse(String query) throws ParseException {
if (query.trim().isEmpty()) {
// if the query string is empty we return no docs / empty result
// the behavior is simple to change in the client if all docs is required
// or a default query
return new MatchNoDocsQuery();
}
return super.parse(query);
}
} | 0true
| src_main_java_org_apache_lucene_queryparser_classic_MapperQueryParser.java |
488 | public final class ClientClusterServiceImpl implements ClientClusterService {
private static final ILogger LOGGER = Logger.getLogger(ClientClusterService.class);
private static final int SLEEP_TIME = 1000;
private final HazelcastClient client;
private final ClientConnectionManagerImpl connectionManager;
private final ClusterListenerThread clusterThread;
private final AtomicReference<Map<Address, MemberImpl>> membersRef = new AtomicReference<Map<Address, MemberImpl>>();
private final ConcurrentMap<String, MembershipListener> listeners = new ConcurrentHashMap<String, MembershipListener>();
public ClientClusterServiceImpl(HazelcastClient client) {
this.client = client;
this.connectionManager = (ClientConnectionManagerImpl) client.getConnectionManager();
clusterThread = new ClusterListenerThread(client.getThreadGroup(), client.getName() + ".cluster-listener");
final ClientConfig clientConfig = getClientConfig();
final List<ListenerConfig> listenerConfigs = client.getClientConfig().getListenerConfigs();
if (listenerConfigs != null && !listenerConfigs.isEmpty()) {
for (ListenerConfig listenerConfig : listenerConfigs) {
EventListener listener = listenerConfig.getImplementation();
if (listener == null) {
try {
listener = ClassLoaderUtil.newInstance(clientConfig.getClassLoader(), listenerConfig.getClassName());
} catch (Exception e) {
LOGGER.severe(e);
}
}
if (listener instanceof MembershipListener) {
addMembershipListenerWithoutInit((MembershipListener) listener);
}
}
}
}
public MemberImpl getMember(Address address) {
final Map<Address, MemberImpl> members = membersRef.get();
return members != null ? members.get(address) : null;
}
public MemberImpl getMember(String uuid) {
final Collection<MemberImpl> memberList = getMemberList();
for (MemberImpl member : memberList) {
if (uuid.equals(member.getUuid())) {
return member;
}
}
return null;
}
public Collection<MemberImpl> getMemberList() {
final Map<Address, MemberImpl> members = membersRef.get();
return members != null ? members.values() : Collections.<MemberImpl>emptySet();
}
public Address getMasterAddress() {
final Collection<MemberImpl> memberList = getMemberList();
return !memberList.isEmpty() ? memberList.iterator().next().getAddress() : null;
}
public int getSize() {
return getMemberList().size();
}
public long getClusterTime() {
return Clock.currentTimeMillis();
}
public Client getLocalClient() {
ClientPrincipal cp = connectionManager.getPrincipal();
ClientConnection conn = clusterThread.conn;
return new ClientImpl(cp != null ? cp.getUuid() : null, conn != null ? conn.getLocalSocketAddress() : null);
}
private SerializationService getSerializationService() {
return client.getSerializationService();
}
public String addMembershipListenerWithInit(MembershipListener listener) {
final String id = UuidUtil.buildRandomUuidString();
listeners.put(id, listener);
if (listener instanceof InitialMembershipListener) {
// TODO: needs sync with membership events...
final Cluster cluster = client.getCluster();
((InitialMembershipListener) listener).init(new InitialMembershipEvent(cluster, cluster.getMembers()));
}
return id;
}
public String addMembershipListenerWithoutInit(MembershipListener listener) {
final String id = UUID.randomUUID().toString();
listeners.put(id, listener);
return id;
}
private void initMembershipListener() {
for (MembershipListener membershipListener : listeners.values()) {
if (membershipListener instanceof InitialMembershipListener) {
// TODO: needs sync with membership events...
final Cluster cluster = client.getCluster();
((InitialMembershipListener) membershipListener).init(new InitialMembershipEvent(cluster, cluster.getMembers()));
}
}
}
public boolean removeMembershipListener(String registrationId) {
return listeners.remove(registrationId) != null;
}
public void start() {
clusterThread.start();
try {
clusterThread.await();
} catch (InterruptedException e) {
throw new HazelcastException(e);
}
initMembershipListener();
// started
}
public void stop() {
clusterThread.shutdown();
}
private final class ClusterListenerThread extends Thread {
private volatile ClientConnection conn;
private final List<MemberImpl> members = new LinkedList<MemberImpl>();
private final CountDownLatch latch = new CountDownLatch(1);
private ClusterListenerThread(ThreadGroup group, String name) {
super(group, name);
}
public void await() throws InterruptedException {
latch.await();
}
public void run() {
while (!Thread.currentThread().isInterrupted()) {
try {
if (conn == null) {
try {
conn = pickConnection();
} catch (Exception e) {
LOGGER.severe("Error while connecting to cluster!", e);
client.getLifecycleService().shutdown();
latch.countDown();
return;
}
}
getInvocationService().triggerFailedListeners();
loadInitialMemberList();
listenMembershipEvents();
} catch (Exception e) {
if (client.getLifecycleService().isRunning()) {
if (LOGGER.isFinestEnabled()) {
LOGGER.warning("Error while listening cluster events! -> " + conn, e);
} else {
LOGGER.warning("Error while listening cluster events! -> " + conn + ", Error: " + e.toString());
}
}
connectionManager.markOwnerConnectionAsClosed();
IOUtil.closeResource(conn);
conn = null;
fireConnectionEvent(true);
}
try {
Thread.sleep(SLEEP_TIME);
} catch (InterruptedException e) {
latch.countDown();
break;
}
}
}
private ClientInvocationServiceImpl getInvocationService() {
return (ClientInvocationServiceImpl) client.getInvocationService();
}
private ClientConnection pickConnection() throws Exception {
final List<InetSocketAddress> socketAddresses = new LinkedList<InetSocketAddress>();
if (!members.isEmpty()) {
for (MemberImpl member : members) {
socketAddresses.add(member.getInetSocketAddress());
}
Collections.shuffle(socketAddresses);
}
socketAddresses.addAll(getConfigAddresses());
return connectToOne(socketAddresses);
}
private void loadInitialMemberList() throws Exception {
final SerializationService serializationService = getSerializationService();
final AddMembershipListenerRequest request = new AddMembershipListenerRequest();
final SerializableCollection coll = (SerializableCollection) connectionManager.sendAndReceive(request, conn);
Map<String, MemberImpl> prevMembers = Collections.emptyMap();
if (!members.isEmpty()) {
prevMembers = new HashMap<String, MemberImpl>(members.size());
for (MemberImpl member : members) {
prevMembers.put(member.getUuid(), member);
}
members.clear();
}
for (Data data : coll) {
members.add((MemberImpl) serializationService.toObject(data));
}
updateMembersRef();
LOGGER.info(membersString());
final List<MembershipEvent> events = new LinkedList<MembershipEvent>();
final Set<Member> eventMembers = Collections.unmodifiableSet(new LinkedHashSet<Member>(members));
for (MemberImpl member : members) {
final MemberImpl former = prevMembers.remove(member.getUuid());
if (former == null) {
events.add(new MembershipEvent(client.getCluster(), member, MembershipEvent.MEMBER_ADDED, eventMembers));
}
}
for (MemberImpl member : prevMembers.values()) {
events.add(new MembershipEvent(client.getCluster(), member, MembershipEvent.MEMBER_REMOVED, eventMembers));
}
for (MembershipEvent event : events) {
fireMembershipEvent(event);
}
latch.countDown();
}
private void listenMembershipEvents() throws IOException {
final SerializationService serializationService = getSerializationService();
while (!Thread.currentThread().isInterrupted()) {
final Data clientResponseData = conn.read();
final ClientResponse clientResponse = serializationService.toObject(clientResponseData);
final Object eventObject = serializationService.toObject(clientResponse.getResponse());
final ClientMembershipEvent event = (ClientMembershipEvent) eventObject;
final MemberImpl member = (MemberImpl) event.getMember();
boolean membersUpdated = false;
if (event.getEventType() == MembershipEvent.MEMBER_ADDED) {
members.add(member);
membersUpdated = true;
} else if (event.getEventType() == ClientMembershipEvent.MEMBER_REMOVED) {
members.remove(member);
membersUpdated = true;
// getConnectionManager().removeConnectionPool(member.getAddress()); //TODO
} else if (event.getEventType() == ClientMembershipEvent.MEMBER_ATTRIBUTE_CHANGED) {
MemberAttributeChange memberAttributeChange = event.getMemberAttributeChange();
Map<Address, MemberImpl> memberMap = membersRef.get();
if (memberMap != null) {
for (MemberImpl target : memberMap.values()) {
if (target.getUuid().equals(memberAttributeChange.getUuid())) {
final MemberAttributeOperationType operationType = memberAttributeChange.getOperationType();
final String key = memberAttributeChange.getKey();
final Object value = memberAttributeChange.getValue();
target.updateAttribute(operationType, key, value);
MemberAttributeEvent memberAttributeEvent = new MemberAttributeEvent(
client.getCluster(), target, operationType, key, value);
fireMemberAttributeEvent(memberAttributeEvent);
break;
}
}
}
}
if (membersUpdated) {
((ClientPartitionServiceImpl) client.getClientPartitionService()).refreshPartitions();
updateMembersRef();
LOGGER.info(membersString());
fireMembershipEvent(new MembershipEvent(client.getCluster(), member, event.getEventType(),
Collections.unmodifiableSet(new LinkedHashSet<Member>(members))));
}
}
}
private void fireMembershipEvent(final MembershipEvent event) {
client.getClientExecutionService().executeInternal(new Runnable() {
public void run() {
for (MembershipListener listener : listeners.values()) {
if (event.getEventType() == MembershipEvent.MEMBER_ADDED) {
listener.memberAdded(event);
} else {
listener.memberRemoved(event);
}
}
}
});
}
private void fireMemberAttributeEvent(final MemberAttributeEvent event) {
client.getClientExecutionService().executeInternal(new Runnable() {
@Override
public void run() {
for (MembershipListener listener : listeners.values()) {
listener.memberAttributeChanged(event);
}
}
});
}
private void updateMembersRef() {
final Map<Address, MemberImpl> map = new LinkedHashMap<Address, MemberImpl>(members.size());
for (MemberImpl member : members) {
map.put(member.getAddress(), member);
}
membersRef.set(Collections.unmodifiableMap(map));
}
void shutdown() {
interrupt();
final ClientConnection c = conn;
if (c != null) {
c.close();
}
}
}
private ClientConnection connectToOne(final Collection<InetSocketAddress> socketAddresses) throws Exception {
final ClientNetworkConfig networkConfig = getClientConfig().getNetworkConfig();
final int connectionAttemptLimit = networkConfig.getConnectionAttemptLimit();
final int connectionAttemptPeriod = networkConfig.getConnectionAttemptPeriod();
int attempt = 0;
Throwable lastError = null;
while (true) {
final long nextTry = Clock.currentTimeMillis() + connectionAttemptPeriod;
for (InetSocketAddress isa : socketAddresses) {
Address address = new Address(isa);
try {
final ClientConnection connection = connectionManager.ownerConnection(address);
fireConnectionEvent(false);
return connection;
} catch (IOException e) {
lastError = e;
LOGGER.finest("IO error during initial connection...", e);
} catch (AuthenticationException e) {
lastError = e;
LOGGER.warning("Authentication error on " + address, e);
}
}
if (attempt++ >= connectionAttemptLimit) {
break;
}
final long remainingTime = nextTry - Clock.currentTimeMillis();
LOGGER.warning(
String.format("Unable to get alive cluster connection,"
+ " try in %d ms later, attempt %d of %d.",
Math.max(0, remainingTime), attempt, connectionAttemptLimit));
if (remainingTime > 0) {
try {
Thread.sleep(remainingTime);
} catch (InterruptedException e) {
break;
}
}
}
throw new IllegalStateException("Unable to connect to any address in the config!", lastError);
}
private void fireConnectionEvent(boolean disconnected) {
final LifecycleServiceImpl lifecycleService = (LifecycleServiceImpl) client.getLifecycleService();
final LifecycleState state = disconnected ? LifecycleState.CLIENT_DISCONNECTED : LifecycleState.CLIENT_CONNECTED;
lifecycleService.fireLifecycleEvent(state);
}
private Collection<InetSocketAddress> getConfigAddresses() {
final List<InetSocketAddress> socketAddresses = new LinkedList<InetSocketAddress>();
final List<String> addresses = getClientConfig().getAddresses();
Collections.shuffle(addresses);
for (String address : addresses) {
socketAddresses.addAll(AddressHelper.getSocketAddresses(address));
}
return socketAddresses;
}
private ClientConfig getClientConfig() {
return client.getClientConfig();
}
private String membersString() {
StringBuilder sb = new StringBuilder("\n\nMembers [");
final Collection<MemberImpl> members = getMemberList();
sb.append(members != null ? members.size() : 0);
sb.append("] {");
if (members != null) {
for (Member member : members) {
sb.append("\n\t").append(member);
}
}
sb.append("\n}\n");
return sb.toString();
}
} | 1no label
| hazelcast-client_src_main_java_com_hazelcast_client_spi_impl_ClientClusterServiceImpl.java |
1,430 | public class ProductHandlerMapping extends BLCAbstractHandlerMapping {
private String controllerName="blProductController";
@Resource(name = "blCatalogService")
private CatalogService catalogService;
public static final String CURRENT_PRODUCT_ATTRIBUTE_NAME = "currentProduct";
@Override
protected Object getHandlerInternal(HttpServletRequest request) throws Exception {
BroadleafRequestContext context = BroadleafRequestContext.getBroadleafRequestContext();
if (context != null && context.getRequestURIWithoutContext() != null) {
Product product = catalogService.findProductByURI(context.getRequestURIWithoutContext());
if (product != null) {
context.getRequest().setAttribute(CURRENT_PRODUCT_ATTRIBUTE_NAME, product);
return controllerName;
}
}
return null;
}
} | 0true
| core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_catalog_ProductHandlerMapping.java |
4,189 | public class BlobStoreIndexShardRepository extends AbstractComponent implements IndexShardRepository {
private BlobStore blobStore;
private BlobPath basePath;
private final String repositoryName;
private ByteSizeValue chunkSize;
private final IndicesService indicesService;
private RateLimiter snapshotRateLimiter;
private RateLimiter restoreRateLimiter;
private RateLimiterListener rateLimiterListener;
private RateLimitingInputStream.Listener snapshotThrottleListener;
private static final String SNAPSHOT_PREFIX = "snapshot-";
@Inject
BlobStoreIndexShardRepository(Settings settings, RepositoryName repositoryName, IndicesService indicesService) {
super(settings);
this.repositoryName = repositoryName.name();
this.indicesService = indicesService;
}
/**
* Called by {@link org.elasticsearch.repositories.blobstore.BlobStoreRepository} on repository startup
*
* @param blobStore blob store
* @param basePath base path to blob store
* @param chunkSize chunk size
*/
public void initialize(BlobStore blobStore, BlobPath basePath, ByteSizeValue chunkSize,
RateLimiter snapshotRateLimiter, RateLimiter restoreRateLimiter,
final RateLimiterListener rateLimiterListener) {
this.blobStore = blobStore;
this.basePath = basePath;
this.chunkSize = chunkSize;
this.snapshotRateLimiter = snapshotRateLimiter;
this.restoreRateLimiter = restoreRateLimiter;
this.rateLimiterListener = rateLimiterListener;
this.snapshotThrottleListener = new RateLimitingInputStream.Listener() {
@Override
public void onPause(long nanos) {
rateLimiterListener.onSnapshotPause(nanos);
}
};
}
/**
* {@inheritDoc}
*/
@Override
public void snapshot(SnapshotId snapshotId, ShardId shardId, SnapshotIndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus) {
SnapshotContext snapshotContext = new SnapshotContext(snapshotId, shardId, snapshotStatus);
snapshotStatus.startTime(System.currentTimeMillis());
try {
snapshotContext.snapshot(snapshotIndexCommit);
snapshotStatus.time(System.currentTimeMillis() - snapshotStatus.startTime());
snapshotStatus.updateStage(IndexShardSnapshotStatus.Stage.DONE);
} catch (Throwable e) {
snapshotStatus.time(System.currentTimeMillis() - snapshotStatus.startTime());
snapshotStatus.updateStage(IndexShardSnapshotStatus.Stage.FAILURE);
if (e instanceof IndexShardSnapshotFailedException) {
throw (IndexShardSnapshotFailedException) e;
} else {
throw new IndexShardSnapshotFailedException(shardId, e.getMessage(), e);
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void restore(SnapshotId snapshotId, ShardId shardId, ShardId snapshotShardId, RecoveryStatus recoveryStatus) {
RestoreContext snapshotContext = new RestoreContext(snapshotId, shardId, snapshotShardId, recoveryStatus);
try {
recoveryStatus.index().startTime(System.currentTimeMillis());
snapshotContext.restore();
recoveryStatus.index().time(System.currentTimeMillis() - recoveryStatus.index().startTime());
} catch (Throwable e) {
throw new IndexShardRestoreFailedException(shardId, "failed to restore snapshot [" + snapshotId.getSnapshot() + "]", e);
}
}
/**
* Delete shard snapshot
*
* @param snapshotId snapshot id
* @param shardId shard id
*/
public void delete(SnapshotId snapshotId, ShardId shardId) {
Context context = new Context(snapshotId, shardId, shardId);
context.delete();
}
@Override
public String toString() {
return "BlobStoreIndexShardRepository[" +
"[" + repositoryName +
"], [" + blobStore + ']' +
']';
}
/**
* Returns shard snapshot metadata file name
*
* @param snapshotId snapshot id
* @return shard snapshot metadata file name
*/
private String snapshotBlobName(SnapshotId snapshotId) {
return SNAPSHOT_PREFIX + snapshotId.getSnapshot();
}
/**
* Serializes snapshot to JSON
*
* @param snapshot snapshot
* @return JSON representation of the snapshot
* @throws IOException
*/
public static byte[] writeSnapshot(BlobStoreIndexShardSnapshot snapshot) throws IOException {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint();
BlobStoreIndexShardSnapshot.toXContent(snapshot, builder, ToXContent.EMPTY_PARAMS);
return builder.bytes().toBytes();
}
/**
* Parses JSON representation of a snapshot
*
* @param data JSON
* @return snapshot
* @throws IOException
*/
public static BlobStoreIndexShardSnapshot readSnapshot(byte[] data) throws IOException {
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(data);
try {
parser.nextToken();
return BlobStoreIndexShardSnapshot.fromXContent(parser);
} finally {
parser.close();
}
}
/**
* Context for snapshot/restore operations
*/
private class Context {
protected final SnapshotId snapshotId;
protected final ShardId shardId;
protected final ImmutableBlobContainer blobContainer;
public Context(SnapshotId snapshotId, ShardId shardId) {
this(snapshotId, shardId, shardId);
}
public Context(SnapshotId snapshotId, ShardId shardId, ShardId snapshotShardId) {
this.snapshotId = snapshotId;
this.shardId = shardId;
blobContainer = blobStore.immutableBlobContainer(basePath.add("indices").add(snapshotShardId.getIndex()).add(Integer.toString(snapshotShardId.getId())));
}
/**
* Delete shard snapshot
*/
public void delete() {
final ImmutableMap<String, BlobMetaData> blobs;
try {
blobs = blobContainer.listBlobs();
} catch (IOException e) {
throw new IndexShardSnapshotException(shardId, "Failed to list content of gateway", e);
}
BlobStoreIndexShardSnapshots snapshots = buildBlobStoreIndexShardSnapshots(blobs);
String commitPointName = snapshotBlobName(snapshotId);
try {
blobContainer.deleteBlob(commitPointName);
} catch (IOException e) {
logger.debug("[{}] [{}] failed to delete shard snapshot file", shardId, snapshotId);
}
// delete all files that are not referenced by any commit point
// build a new BlobStoreIndexShardSnapshot, that includes this one and all the saved ones
List<BlobStoreIndexShardSnapshot> newSnapshotsList = Lists.newArrayList();
for (BlobStoreIndexShardSnapshot point : snapshots) {
if (!point.snapshot().equals(snapshotId.getSnapshot())) {
newSnapshotsList.add(point);
}
}
cleanup(newSnapshotsList, blobs);
}
/**
* Removes all unreferenced files from the repository
*
* @param snapshots list of active snapshots in the container
* @param blobs list of blobs in the container
*/
protected void cleanup(List<BlobStoreIndexShardSnapshot> snapshots, ImmutableMap<String, BlobMetaData> blobs) {
BlobStoreIndexShardSnapshots newSnapshots = new BlobStoreIndexShardSnapshots(snapshots);
// now go over all the blobs, and if they don't exists in a snapshot, delete them
for (String blobName : blobs.keySet()) {
if (!blobName.startsWith("__")) {
continue;
}
if (newSnapshots.findNameFile(FileInfo.canonicalName(blobName)) == null) {
try {
blobContainer.deleteBlob(blobName);
} catch (IOException e) {
logger.debug("[{}] [{}] error deleting blob [{}] during cleanup", e, snapshotId, shardId, blobName);
}
}
}
}
/**
* Generates blob name
*
* @param generation the blob number
* @return the blob name
*/
protected String fileNameFromGeneration(long generation) {
return "__" + Long.toString(generation, Character.MAX_RADIX);
}
/**
* Finds the next available blob number
*
* @param blobs list of blobs in the repository
* @return next available blob number
*/
protected long findLatestFileNameGeneration(ImmutableMap<String, BlobMetaData> blobs) {
long generation = -1;
for (String name : blobs.keySet()) {
if (!name.startsWith("__")) {
continue;
}
name = FileInfo.canonicalName(name);
try {
long currentGen = Long.parseLong(name.substring(2) /*__*/, Character.MAX_RADIX);
if (currentGen > generation) {
generation = currentGen;
}
} catch (NumberFormatException e) {
logger.warn("file [{}] does not conform to the '__' schema");
}
}
return generation;
}
/**
* Loads all available snapshots in the repository
*
* @param blobs list of blobs in repository
* @return BlobStoreIndexShardSnapshots
*/
protected BlobStoreIndexShardSnapshots buildBlobStoreIndexShardSnapshots(ImmutableMap<String, BlobMetaData> blobs) {
List<BlobStoreIndexShardSnapshot> snapshots = Lists.newArrayList();
for (String name : blobs.keySet()) {
if (name.startsWith(SNAPSHOT_PREFIX)) {
try {
snapshots.add(readSnapshot(blobContainer.readBlobFully(name)));
} catch (IOException e) {
logger.warn("failed to read commit point [{}]", e, name);
}
}
}
return new BlobStoreIndexShardSnapshots(snapshots);
}
}
/**
* Context for snapshot operations
*/
private class SnapshotContext extends Context {
private final Store store;
private final IndexShardSnapshotStatus snapshotStatus;
/**
* Constructs new context
*
* @param snapshotId snapshot id
* @param shardId shard to be snapshotted
* @param snapshotStatus snapshot status to report progress
*/
public SnapshotContext(SnapshotId snapshotId, ShardId shardId, IndexShardSnapshotStatus snapshotStatus) {
super(snapshotId, shardId);
store = indicesService.indexServiceSafe(shardId.getIndex()).shardInjectorSafe(shardId.id()).getInstance(Store.class);
this.snapshotStatus = snapshotStatus;
}
/**
* Create snapshot from index commit point
*
* @param snapshotIndexCommit
*/
public void snapshot(SnapshotIndexCommit snapshotIndexCommit) {
logger.debug("[{}] [{}] snapshot to [{}] ...", shardId, snapshotId, repositoryName);
final ImmutableMap<String, BlobMetaData> blobs;
try {
blobs = blobContainer.listBlobs();
} catch (IOException e) {
throw new IndexShardSnapshotFailedException(shardId, "failed to list blobs", e);
}
long generation = findLatestFileNameGeneration(blobs);
BlobStoreIndexShardSnapshots snapshots = buildBlobStoreIndexShardSnapshots(blobs);
snapshotStatus.updateStage(IndexShardSnapshotStatus.Stage.STARTED);
final CountDownLatch indexLatch = new CountDownLatch(snapshotIndexCommit.getFiles().length);
final CopyOnWriteArrayList<Throwable> failures = new CopyOnWriteArrayList<Throwable>();
final List<BlobStoreIndexShardSnapshot.FileInfo> indexCommitPointFiles = newArrayList();
int indexNumberOfFiles = 0;
long indexTotalFilesSize = 0;
for (String fileName : snapshotIndexCommit.getFiles()) {
if (snapshotStatus.aborted()) {
logger.debug("[{}] [{}] Aborted on the file [{}], exiting", shardId, snapshotId, fileName);
throw new IndexShardSnapshotFailedException(shardId, "Aborted");
}
logger.trace("[{}] [{}] Processing [{}]", shardId, snapshotId, fileName);
final StoreFileMetaData md;
try {
md = store.metaData(fileName);
} catch (IOException e) {
throw new IndexShardSnapshotFailedException(shardId, "Failed to get store file metadata", e);
}
boolean snapshotRequired = false;
// TODO: For now segment files are copied on each commit because segment files don't have checksum
// if (snapshot.indexChanged() && fileName.equals(snapshotIndexCommit.getSegmentsFileName())) {
// snapshotRequired = true; // we want to always snapshot the segment file if the index changed
// }
BlobStoreIndexShardSnapshot.FileInfo fileInfo = snapshots.findPhysicalIndexFile(fileName);
if (fileInfo == null || !fileInfo.isSame(md) || !snapshotFileExistsInBlobs(fileInfo, blobs)) {
// commit point file does not exists in any commit point, or has different length, or does not fully exists in the listed blobs
snapshotRequired = true;
}
if (snapshotRequired) {
indexNumberOfFiles++;
indexTotalFilesSize += md.length();
// create a new FileInfo
try {
BlobStoreIndexShardSnapshot.FileInfo snapshotFileInfo = new BlobStoreIndexShardSnapshot.FileInfo(fileNameFromGeneration(++generation), fileName, md.length(), chunkSize, md.checksum());
indexCommitPointFiles.add(snapshotFileInfo);
snapshotFile(snapshotFileInfo, indexLatch, failures);
} catch (IOException e) {
failures.add(e);
}
} else {
indexCommitPointFiles.add(fileInfo);
indexLatch.countDown();
}
}
snapshotStatus.files(indexNumberOfFiles, indexTotalFilesSize);
snapshotStatus.indexVersion(snapshotIndexCommit.getGeneration());
try {
indexLatch.await();
} catch (InterruptedException e) {
failures.add(e);
Thread.currentThread().interrupt();
}
if (!failures.isEmpty()) {
throw new IndexShardSnapshotFailedException(shardId, "Failed to perform snapshot (index files)", failures.get(0));
}
// now create and write the commit point
snapshotStatus.updateStage(IndexShardSnapshotStatus.Stage.FINALIZE);
String commitPointName = snapshotBlobName(snapshotId);
BlobStoreIndexShardSnapshot snapshot = new BlobStoreIndexShardSnapshot(snapshotId.getSnapshot(), snapshotIndexCommit.getGeneration(), indexCommitPointFiles);
try {
byte[] snapshotData = writeSnapshot(snapshot);
logger.trace("[{}] [{}] writing shard snapshot file", shardId, snapshotId);
blobContainer.writeBlob(commitPointName, new BytesStreamInput(snapshotData, false), snapshotData.length);
} catch (IOException e) {
throw new IndexShardSnapshotFailedException(shardId, "Failed to write commit point", e);
}
// delete all files that are not referenced by any commit point
// build a new BlobStoreIndexShardSnapshot, that includes this one and all the saved ones
List<BlobStoreIndexShardSnapshot> newSnapshotsList = Lists.newArrayList();
newSnapshotsList.add(snapshot);
for (BlobStoreIndexShardSnapshot point : snapshots) {
newSnapshotsList.add(point);
}
cleanup(newSnapshotsList, blobs);
snapshotStatus.updateStage(IndexShardSnapshotStatus.Stage.DONE);
}
/**
* Snapshot individual file
* <p/>
* This is asynchronous method. Upon completion of the operation latch is getting counted down and any failures are
* added to the {@code failures} list
*
* @param fileInfo file to be snapshotted
* @param latch latch that should be counted down once file is snapshoted
* @param failures thread-safe list of failures
* @throws IOException
*/
private void snapshotFile(final BlobStoreIndexShardSnapshot.FileInfo fileInfo, final CountDownLatch latch, final List<Throwable> failures) throws IOException {
final AtomicLong counter = new AtomicLong(fileInfo.numberOfParts());
for (long i = 0; i < fileInfo.numberOfParts(); i++) {
IndexInput indexInput = null;
try {
indexInput = store.openInputRaw(fileInfo.physicalName(), IOContext.READONCE);
indexInput.seek(i * fileInfo.partBytes());
InputStreamIndexInput inputStreamIndexInput = new ThreadSafeInputStreamIndexInput(indexInput, fileInfo.partBytes());
final IndexInput fIndexInput = indexInput;
long size = inputStreamIndexInput.actualSizeToRead();
InputStream inputStream;
if (snapshotRateLimiter != null) {
inputStream = new RateLimitingInputStream(inputStreamIndexInput, snapshotRateLimiter, snapshotThrottleListener);
} else {
inputStream = inputStreamIndexInput;
}
blobContainer.writeBlob(fileInfo.partName(i), inputStream, size, new ImmutableBlobContainer.WriterListener() {
@Override
public void onCompleted() {
IOUtils.closeWhileHandlingException(fIndexInput);
if (counter.decrementAndGet() == 0) {
latch.countDown();
}
}
@Override
public void onFailure(Throwable t) {
IOUtils.closeWhileHandlingException(fIndexInput);
failures.add(t);
if (counter.decrementAndGet() == 0) {
latch.countDown();
}
}
});
} catch (Throwable e) {
IOUtils.closeWhileHandlingException(indexInput);
failures.add(e);
latch.countDown();
}
}
}
/**
* Checks if snapshot file already exists in the list of blobs
*
* @param fileInfo file to check
* @param blobs list of blobs
* @return true if file exists in the list of blobs
*/
private boolean snapshotFileExistsInBlobs(BlobStoreIndexShardSnapshot.FileInfo fileInfo, ImmutableMap<String, BlobMetaData> blobs) {
BlobMetaData blobMetaData = blobs.get(fileInfo.name());
if (blobMetaData != null) {
return blobMetaData.length() == fileInfo.length();
} else if (blobs.containsKey(fileInfo.partName(0))) {
// multi part file sum up the size and check
int part = 0;
long totalSize = 0;
while (true) {
blobMetaData = blobs.get(fileInfo.partName(part++));
if (blobMetaData == null) {
break;
}
totalSize += blobMetaData.length();
}
return totalSize == fileInfo.length();
}
// no file, not exact and not multipart
return false;
}
}
/**
* Context for restore operations
*/
private class RestoreContext extends Context {
private final Store store;
private final RecoveryStatus recoveryStatus;
/**
* Constructs new restore context
*
* @param snapshotId snapshot id
* @param shardId shard to be restored
* @param snapshotShardId shard in the snapshot that data should be restored from
* @param recoveryStatus recovery status to report progress
*/
public RestoreContext(SnapshotId snapshotId, ShardId shardId, ShardId snapshotShardId, RecoveryStatus recoveryStatus) {
super(snapshotId, shardId, snapshotShardId);
store = indicesService.indexServiceSafe(shardId.getIndex()).shardInjectorSafe(shardId.id()).getInstance(Store.class);
this.recoveryStatus = recoveryStatus;
}
/**
* Performs restore operation
*/
public void restore() {
logger.debug("[{}] [{}] restoring to [{}] ...", snapshotId, repositoryName, shardId);
BlobStoreIndexShardSnapshot snapshot;
try {
snapshot = readSnapshot(blobContainer.readBlobFully(snapshotBlobName(snapshotId)));
} catch (IOException ex) {
throw new IndexShardRestoreFailedException(shardId, "failed to read shard snapshot file", ex);
}
recoveryStatus.updateStage(RecoveryStatus.Stage.INDEX);
int numberOfFiles = 0;
long totalSize = 0;
int numberOfReusedFiles = 0;
long reusedTotalSize = 0;
List<FileInfo> filesToRecover = Lists.newArrayList();
for (FileInfo fileInfo : snapshot.indexFiles()) {
String fileName = fileInfo.physicalName();
StoreFileMetaData md = null;
try {
md = store.metaData(fileName);
} catch (IOException e) {
// no file
}
numberOfFiles++;
// we don't compute checksum for segments, so always recover them
if (!fileName.startsWith("segments") && md != null && fileInfo.isSame(md)) {
totalSize += md.length();
numberOfReusedFiles++;
reusedTotalSize += md.length();
if (logger.isTraceEnabled()) {
logger.trace("not_recovering [{}], exists in local store and is same", fileInfo.physicalName());
}
} else {
totalSize += fileInfo.length();
filesToRecover.add(fileInfo);
if (logger.isTraceEnabled()) {
if (md == null) {
logger.trace("recovering [{}], does not exists in local store", fileInfo.physicalName());
} else {
logger.trace("recovering [{}], exists in local store but is different", fileInfo.physicalName());
}
}
}
}
recoveryStatus.index().files(numberOfFiles, totalSize, numberOfReusedFiles, reusedTotalSize);
if (filesToRecover.isEmpty()) {
logger.trace("no files to recover, all exists within the local store");
}
if (logger.isTraceEnabled()) {
logger.trace("[{}] [{}] recovering_files [{}] with total_size [{}], reusing_files [{}] with reused_size [{}]", shardId, snapshotId, numberOfFiles, new ByteSizeValue(totalSize), numberOfReusedFiles, new ByteSizeValue(reusedTotalSize));
}
final CountDownLatch latch = new CountDownLatch(filesToRecover.size());
final CopyOnWriteArrayList<Throwable> failures = new CopyOnWriteArrayList<Throwable>();
for (final FileInfo fileToRecover : filesToRecover) {
logger.trace("[{}] [{}] restoring file [{}]", shardId, snapshotId, fileToRecover.name());
restoreFile(fileToRecover, latch, failures);
}
try {
latch.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
if (!failures.isEmpty()) {
throw new IndexShardRestoreFailedException(shardId, "Failed to recover index", failures.get(0));
}
// read the snapshot data persisted
long version = -1;
try {
if (Lucene.indexExists(store.directory())) {
version = Lucene.readSegmentInfos(store.directory()).getVersion();
}
} catch (IOException e) {
throw new IndexShardRestoreFailedException(shardId, "Failed to fetch index version after copying it over", e);
}
recoveryStatus.index().updateVersion(version);
/// now, go over and clean files that are in the store, but were not in the snapshot
try {
for (String storeFile : store.directory().listAll()) {
if (!snapshot.containPhysicalIndexFile(storeFile)) {
try {
store.directory().deleteFile(storeFile);
} catch (IOException e) {
// ignore
}
}
}
} catch (IOException e) {
// ignore
}
}
/**
* Restores a file
* This is asynchronous method. Upon completion of the operation latch is getting counted down and any failures are
* added to the {@code failures} list
*
* @param fileInfo file to be restored
* @param latch latch that should be counted down once file is snapshoted
* @param failures thread-safe list of failures
*/
private void restoreFile(final FileInfo fileInfo, final CountDownLatch latch, final List<Throwable> failures) {
final IndexOutput indexOutput;
try {
// we create an output with no checksum, this is because the pure binary data of the file is not
// the checksum (because of seek). We will create the checksum file once copying is done
indexOutput = store.createOutputRaw(fileInfo.physicalName());
} catch (IOException e) {
failures.add(e);
latch.countDown();
return;
}
String firstFileToRecover = fileInfo.partName(0);
final AtomicInteger partIndex = new AtomicInteger();
blobContainer.readBlob(firstFileToRecover, new BlobContainer.ReadBlobListener() {
@Override
public synchronized void onPartial(byte[] data, int offset, int size) throws IOException {
recoveryStatus.index().addCurrentFilesSize(size);
indexOutput.writeBytes(data, offset, size);
if (restoreRateLimiter != null) {
rateLimiterListener.onRestorePause(restoreRateLimiter.pause(size));
}
}
@Override
public synchronized void onCompleted() {
int part = partIndex.incrementAndGet();
if (part < fileInfo.numberOfParts()) {
String partName = fileInfo.partName(part);
// continue with the new part
blobContainer.readBlob(partName, this);
return;
} else {
// we are done...
try {
indexOutput.close();
// write the checksum
if (fileInfo.checksum() != null) {
store.writeChecksum(fileInfo.physicalName(), fileInfo.checksum());
}
store.directory().sync(Collections.singleton(fileInfo.physicalName()));
} catch (IOException e) {
onFailure(e);
return;
}
}
latch.countDown();
}
@Override
public void onFailure(Throwable t) {
failures.add(t);
latch.countDown();
}
});
}
}
public interface RateLimiterListener {
void onRestorePause(long nanos);
void onSnapshotPause(long nanos);
}
} | 1no label
| src_main_java_org_elasticsearch_index_snapshots_blobstore_BlobStoreIndexShardRepository.java |
433 | public class RawQuery extends BaseQuery {
private final String store;
private final String query;
private final Parameter[] parameters;
private int offset;
public RawQuery(String store, String query, Parameter[] parameters) {
Preconditions.checkNotNull(store);
Preconditions.checkArgument(StringUtils.isNotBlank(query));
Preconditions.checkNotNull(parameters);
this.store = store;
this.query = query;
this.parameters = parameters;
this.offset = 0;
}
public RawQuery setOffset(int offset) {
Preconditions.checkArgument(offset>=0,"Invalid offset: %s",offset);
this.offset=offset;
return this;
}
@Override
public RawQuery setLimit(int limit) {
super.setLimit(limit);
return this;
}
public int getOffset() {
return offset;
}
public String getStore() {
return store;
}
public String getQuery() {
return query;
}
public Parameter[] getParameters() {
return parameters;
}
public static class Result<O> {
private final O result;
private final double score;
public Result(O result, double score) {
this.result = result;
this.score = score;
}
public O getResult() {
return result;
}
public double getScore() {
return score;
}
}
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_indexing_RawQuery.java |
295 | new Thread() {
public void run() {
if (!l.tryLock()) {
latch.countDown();
}
try {
if (l.tryLock(5, TimeUnit.SECONDS)) {
latch.countDown();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}.start(); | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_lock_ClientLockTest.java |
1,422 | public class OChannelBinaryAsynchClientSynch extends OChannelBinaryAsynchClient {
public OChannelBinaryAsynchClientSynch(final String remoteHost, final int remotePort, final OContextConfiguration iConfig)
throws IOException {
super(remoteHost, remotePort, iConfig, OChannelBinaryProtocol.CURRENT_PROTOCOL_VERSION);
}
} | 0true
| enterprise_src_main_java_com_orientechnologies_orient_enterprise_channel_binary_OChannelBinaryAsynchClientSynch.java |
916 | public class LockProxy extends AbstractDistributedObject<LockServiceImpl> implements ILock {
private final String name;
private final LockProxySupport lockSupport;
private final Data key;
private final int partitionId;
public LockProxy(NodeEngine nodeEngine, LockServiceImpl lockService, String name) {
super(nodeEngine, lockService);
this.name = name;
this.key = getNameAsPartitionAwareData();
this.lockSupport = new LockProxySupport(new InternalLockNamespace(name));
this.partitionId = getNodeEngine().getPartitionService().getPartitionId(key);
}
@Override
public boolean isLocked() {
return lockSupport.isLocked(getNodeEngine(), key);
}
@Override
public boolean isLockedByCurrentThread() {
return lockSupport.isLockedByCurrentThread(getNodeEngine(), key);
}
@Override
public int getLockCount() {
return lockSupport.getLockCount(getNodeEngine(), key);
}
@Override
public long getRemainingLeaseTime() {
return lockSupport.getRemainingLeaseTime(getNodeEngine(), key);
}
@Override
public void lock() {
lockSupport.lock(getNodeEngine(), key);
}
@Override
public void lock(long leaseTime, TimeUnit timeUnit) {
shouldBePositive(leaseTime, "leaseTime");
lockSupport.lock(getNodeEngine(), key, timeUnit.toMillis(leaseTime));
}
@Override
public void lockInterruptibly() throws InterruptedException {
lock();
}
@Override
public boolean tryLock() {
return lockSupport.tryLock(getNodeEngine(), key);
}
@Override
public boolean tryLock(long time, TimeUnit unit) throws InterruptedException {
if (unit == null) {
throw new NullPointerException("unit can't be null");
}
return lockSupport.tryLock(getNodeEngine(), key, time, unit);
}
@Override
public void unlock() {
lockSupport.unlock(getNodeEngine(), key);
}
@Override
public void forceUnlock() {
lockSupport.forceUnlock(getNodeEngine(), key);
}
@Override
public Condition newCondition() {
throw new UnsupportedOperationException("Use ICondition.newCondition(String name) instead!");
}
@Override
public ICondition newCondition(String name) {
if (name == null) {
throw new NullPointerException("Condition name can't be null");
}
return new ConditionImpl(this, name);
}
@Override
public String getName() {
return name;
}
@Override
public String getServiceName() {
return LockService.SERVICE_NAME;
}
@Deprecated
public Object getKey() {
return getName();
}
public Data getKeyData() {
return key;
}
public int getPartitionId() {
return partitionId;
}
ObjectNamespace getNamespace() {
return lockSupport.getNamespace();
}
// will be removed when HazelcastInstance.getLock(Object key) is removed from API
public static String convertToStringKey(Object key, SerializationService serializationService) {
if (key instanceof String) {
return String.valueOf(key);
} else {
Data data = serializationService.toData(key, PARTITIONING_STRATEGY);
// name = Integer.toString(data.hashCode());
byte[] buffer = data.getBuffer();
return Arrays.toString(buffer);
}
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("ILock{");
sb.append("name='").append(name).append('\'');
sb.append('}');
return sb.toString();
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_concurrent_lock_LockProxy.java |
172 | public static class Stats {
public AtomicLong puts = new AtomicLong();
public AtomicLong gets = new AtomicLong();
public AtomicLong removes = new AtomicLong();
public Stats getAndReset() {
long putsNow = puts.getAndSet(0);
long getsNow = gets.getAndSet(0);
long removesNow = removes.getAndSet(0);
Stats newOne = new Stats();
newOne.puts.set(putsNow);
newOne.gets.set(getsNow);
newOne.removes.set(removesNow);
return newOne;
}
public long total() {
return puts.get() + gets.get() + removes.get();
}
public String toString() {
return "total= " + total() + ", gets:" + gets.get() + ", puts: " + puts.get() + ", removes:" + removes.get();
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_SimpleMapTestFromClient.java |
215 | protected abstract class PreviousSubWordAction extends TextNavigationAction {
protected CeylonWordIterator fIterator= new CeylonWordIterator();
/**
* Creates a new previous sub-word action.
*
* @param code Action code for the default operation. Must be an action code from @see org.eclipse.swt.custom.ST.
*/
protected PreviousSubWordAction(final int code) {
super(getSourceViewer().getTextWidget(), code);
}
@Override
public void run() {
// Check whether we are in a java code partition and the preference is enabled
final IPreferenceStore store= getPreferenceStore();
if (!store.getBoolean(SUB_WORD_NAVIGATION)) {
super.run();
return;
}
final ISourceViewer viewer= getSourceViewer();
final IDocument document= viewer.getDocument();
try {
fIterator.setText((CharacterIterator) new DocumentCharacterIterator(document));
int position= widgetOffset2ModelOffset(viewer, viewer.getTextWidget().getCaretOffset());
if (position == -1)
return;
int previous= findPreviousPosition(position);
if (isBlockSelectionModeEnabled() &&
document.getLineOfOffset(previous)!=document.getLineOfOffset(position)) {
super.run(); // may navigate into virtual white space
} else if (previous != BreakIterator.DONE) {
setCaretPosition(previous);
getTextWidget().showSelection();
fireSelectionChanged();
}
} catch (BadLocationException x) {
// ignore - getLineOfOffset failed
}
}
/**
* Finds the previous position before the given position.
*
* @param position the current position
* @return the previous position
*/
protected int findPreviousPosition(int position) {
ISourceViewer viewer= getSourceViewer();
int widget= -1;
int previous= position;
while (previous != BreakIterator.DONE && widget == -1) { // XXX: optimize
previous= fIterator.preceding(previous);
if (previous != BreakIterator.DONE)
widget= modelOffset2WidgetOffset(viewer, previous);
}
IDocument document= viewer.getDocument();
LinkedModeModel model= LinkedModeModel.getModel(document, position);
if (model != null && previous != BreakIterator.DONE) {
LinkedPosition linkedPosition=
model.findPosition(new LinkedPosition(document, position, 0));
if (linkedPosition != null) {
int linkedPositionOffset= linkedPosition.getOffset();
if (position != linkedPositionOffset && previous < linkedPositionOffset)
previous= linkedPositionOffset;
} else {
LinkedPosition previousLinkedPosition=
model.findPosition(new LinkedPosition(document, previous, 0));
if (previousLinkedPosition != null) {
int previousLinkedPositionEnd=
previousLinkedPosition.getOffset() + previousLinkedPosition.getLength();
if (position != previousLinkedPositionEnd && previous < previousLinkedPositionEnd)
previous= previousLinkedPositionEnd;
}
}
}
return previous;
}
/**
* Sets the caret position to the sub-word boundary given with <code>position</code>.
*
* @param position Position where the action should move the caret
*/
protected abstract void setCaretPosition(int position);
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_CeylonEditor.java |
3,090 | static interface Operation {
static enum Type {
CREATE,
INDEX,
DELETE
}
static enum Origin {
PRIMARY,
REPLICA,
RECOVERY
}
Type opType();
Origin origin();
} | 0true
| src_main_java_org_elasticsearch_index_engine_Engine.java |
3,514 | public class MapperServiceModule extends AbstractModule {
@Override
protected void configure() {
bind(MapperService.class).asEagerSingleton();
}
} | 0true
| src_main_java_org_elasticsearch_index_mapper_MapperServiceModule.java |
3,412 | public class CommitPointsTests extends ElasticsearchTestCase {
private final ESLogger logger = Loggers.getLogger(CommitPointsTests.class);
@Test
public void testCommitPointXContent() throws Exception {
ArrayList<CommitPoint.FileInfo> indexFiles = Lists.newArrayList();
indexFiles.add(new CommitPoint.FileInfo("file1", "file1_p", 100, "ck1"));
indexFiles.add(new CommitPoint.FileInfo("file2", "file2_p", 200, "ck2"));
ArrayList<CommitPoint.FileInfo> translogFiles = Lists.newArrayList();
translogFiles.add(new CommitPoint.FileInfo("t_file1", "t_file1_p", 100, null));
translogFiles.add(new CommitPoint.FileInfo("t_file2", "t_file2_p", 200, null));
CommitPoint commitPoint = new CommitPoint(1, "test", CommitPoint.Type.GENERATED, indexFiles, translogFiles);
byte[] serialized = CommitPoints.toXContent(commitPoint);
logger.info("serialized commit_point {}", new String(serialized, Charsets.UTF_8));
CommitPoint desCp = CommitPoints.fromXContent(serialized);
assertThat(desCp.version(), equalTo(commitPoint.version()));
assertThat(desCp.name(), equalTo(commitPoint.name()));
assertThat(desCp.indexFiles().size(), equalTo(commitPoint.indexFiles().size()));
for (int i = 0; i < desCp.indexFiles().size(); i++) {
assertThat(desCp.indexFiles().get(i).name(), equalTo(commitPoint.indexFiles().get(i).name()));
assertThat(desCp.indexFiles().get(i).physicalName(), equalTo(commitPoint.indexFiles().get(i).physicalName()));
assertThat(desCp.indexFiles().get(i).length(), equalTo(commitPoint.indexFiles().get(i).length()));
assertThat(desCp.indexFiles().get(i).checksum(), equalTo(commitPoint.indexFiles().get(i).checksum()));
}
assertThat(desCp.translogFiles().size(), equalTo(commitPoint.translogFiles().size()));
for (int i = 0; i < desCp.indexFiles().size(); i++) {
assertThat(desCp.translogFiles().get(i).name(), equalTo(commitPoint.translogFiles().get(i).name()));
assertThat(desCp.translogFiles().get(i).physicalName(), equalTo(commitPoint.translogFiles().get(i).physicalName()));
assertThat(desCp.translogFiles().get(i).length(), equalTo(commitPoint.translogFiles().get(i).length()));
assertThat(desCp.translogFiles().get(i).checksum(), nullValue());
}
}
} | 0true
| src_test_java_org_elasticsearch_index_gateway_CommitPointsTests.java |
1,925 | public interface AnnotatedBindingBuilder<T> extends LinkedBindingBuilder<T> {
/**
* See the EDSL examples at {@link org.elasticsearch.common.inject.Binder}.
*/
LinkedBindingBuilder<T> annotatedWith(
Class<? extends Annotation> annotationType);
/**
* See the EDSL examples at {@link org.elasticsearch.common.inject.Binder}.
*/
LinkedBindingBuilder<T> annotatedWith(Annotation annotation);
} | 0true
| src_main_java_org_elasticsearch_common_inject_binder_AnnotatedBindingBuilder.java |
2,033 | return new AnnotatedElementBuilder() {
public void annotatedWith(Class<? extends Annotation> annotationType) {
}
public void annotatedWith(Annotation annotation) {
}
}; | 0true
| src_main_java_org_elasticsearch_common_inject_spi_Elements.java |
396 | public class ClusterSearchShardsResponse extends ActionResponse implements ToXContent {
private ClusterSearchShardsGroup[] groups;
private DiscoveryNode[] nodes;
ClusterSearchShardsResponse() {
}
public ClusterSearchShardsGroup[] getGroups() {
return groups;
}
public DiscoveryNode[] getNodes() {
return nodes;
}
public ClusterSearchShardsResponse(ClusterSearchShardsGroup[] groups, DiscoveryNode[] nodes) {
this.groups = groups;
this.nodes = nodes;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
groups = new ClusterSearchShardsGroup[in.readVInt()];
for (int i = 0; i < groups.length; i++) {
groups[i] = ClusterSearchShardsGroup.readSearchShardsGroupResponse(in);
}
nodes = new DiscoveryNode[in.readVInt()];
for (int i = 0; i < nodes.length; i++) {
nodes[i] = DiscoveryNode.readNode(in);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(groups.length);
for (ClusterSearchShardsGroup response : groups) {
response.writeTo(out);
}
out.writeVInt(nodes.length);
for (DiscoveryNode node : nodes) {
node.writeTo(out);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject("nodes");
for (DiscoveryNode node : nodes) {
builder.startObject(node.getId(), XContentBuilder.FieldCaseConversion.NONE);
builder.field("name", node.name());
builder.field("transport_address", node.getAddress());
if (!node.attributes().isEmpty()) {
builder.startObject("attributes");
for (Map.Entry<String, String> attr : node.attributes().entrySet()) {
builder.field(attr.getKey(), attr.getValue());
}
builder.endObject();
}
builder.endObject();
}
builder.endObject();
builder.startArray("shards");
for (ClusterSearchShardsGroup group : groups) {
group.toXContent(builder, params);
}
builder.endArray();
return builder;
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_shards_ClusterSearchShardsResponse.java |
158 | public abstract class MultiPartitionClientRequest extends ClientRequest {
@Override
final void process() throws Exception {
ClientEndpoint endpoint = getEndpoint();
OperationFactory operationFactory = new OperationFactoryWrapper(createOperationFactory(), endpoint.getUuid());
Map<Integer, Object> map = clientEngine.invokeOnPartitions(getServiceName(), operationFactory, getPartitions());
Object result = reduce(map);
endpoint.sendResponse(result, getCallId());
}
protected abstract OperationFactory createOperationFactory();
protected abstract Object reduce(Map<Integer, Object> map);
public abstract Collection<Integer> getPartitions();
} | 0true
| hazelcast_src_main_java_com_hazelcast_client_MultiPartitionClientRequest.java |
2,078 | public class PartitionCheckIfLoadedOperationFactory implements OperationFactory {
String name;
public PartitionCheckIfLoadedOperationFactory() {
}
public PartitionCheckIfLoadedOperationFactory(String name) {
this.name = name;
}
@Override
public Operation createOperation() {
return new PartitionCheckIfLoadedOperation(name);
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(name);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
name = in.readUTF();
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_map_operation_PartitionCheckIfLoadedOperationFactory.java |
1,812 | class ConstructorInjector<T> {
private final ImmutableSet<InjectionPoint> injectableMembers;
private final SingleParameterInjector<?>[] parameterInjectors;
private final ConstructionProxy<T> constructionProxy;
private final MembersInjectorImpl<T> membersInjector;
ConstructorInjector(ImmutableSet<InjectionPoint> injectableMembers,
ConstructionProxy<T> constructionProxy,
SingleParameterInjector<?>[] parameterInjectors,
MembersInjectorImpl<T> membersInjector)
throws ErrorsException {
this.injectableMembers = injectableMembers;
this.constructionProxy = constructionProxy;
this.parameterInjectors = parameterInjectors;
this.membersInjector = membersInjector;
}
public ImmutableSet<InjectionPoint> getInjectableMembers() {
return injectableMembers;
}
ConstructionProxy<T> getConstructionProxy() {
return constructionProxy;
}
/**
* Construct an instance. Returns {@code Object} instead of {@code T} because
* it may return a proxy.
*/
Object construct(Errors errors, InternalContext context, Class<?> expectedType)
throws ErrorsException {
ConstructionContext<T> constructionContext = context.getConstructionContext(this);
// We have a circular reference between constructors. Return a proxy.
if (constructionContext.isConstructing()) {
// TODO (crazybob): if we can't proxy this object, can we proxy the other object?
return constructionContext.createProxy(errors, expectedType);
}
// If we're re-entering this factory while injecting fields or methods,
// return the same instance. This prevents infinite loops.
T t = constructionContext.getCurrentReference();
if (t != null) {
return t;
}
try {
// First time through...
constructionContext.startConstruction();
try {
Object[] parameters = SingleParameterInjector.getAll(errors, context, parameterInjectors);
t = constructionProxy.newInstance(parameters);
constructionContext.setProxyDelegates(t);
} finally {
constructionContext.finishConstruction();
}
// Store reference. If an injector re-enters this factory, they'll get the same reference.
constructionContext.setCurrentReference(t);
membersInjector.injectMembers(t, errors, context);
membersInjector.notifyListeners(t, errors);
return t;
} catch (InvocationTargetException userException) {
Throwable cause = userException.getCause() != null
? userException.getCause()
: userException;
throw errors.withSource(constructionProxy.getInjectionPoint())
.errorInjectingConstructor(cause).toException();
} finally {
constructionContext.removeCurrentReference();
}
}
} | 0true
| src_main_java_org_elasticsearch_common_inject_ConstructorInjector.java |
981 | public class ORecordSerializerFactory {
private static final ORecordSerializerFactory instance = new ORecordSerializerFactory();
private Map<String, ORecordSerializer> implementations = new HashMap<String, ORecordSerializer>();
private ORecordSerializer defaultRecordFormat;
public ORecordSerializerFactory() {
defaultRecordFormat = new ORecordSerializerRaw();
register(ORecordSerializerSchemaAware2CSV.NAME, new ORecordSerializerSchemaAware2CSV());
register(ORecordSerializerJSON.NAME, new ORecordSerializerJSON());
register(ORecordSerializerRaw.NAME, defaultRecordFormat);
}
/**
* Registers record serializer implementation.
*
* @param iName
* Name to register, use JSON to overwrite default JSON serializer
* @param iInstance
* Serializer implementation
*/
public void register(final String iName, final ORecordSerializer iInstance) {
implementations.put(iName, iInstance);
}
public Collection<ORecordSerializer> getFormats() {
return implementations.values();
}
public ORecordSerializer getFormat(final String iFormatName) {
if (iFormatName == null)
return null;
return implementations.get(iFormatName);
}
public ORecordSerializer getFormatForObject(final Object iObject, final String iFormatName) {
if (iObject == null)
return null;
ORecordSerializer recordFormat = null;
if (iFormatName != null)
recordFormat = implementations.get(iObject.getClass().getSimpleName() + "2" + iFormatName);
if (recordFormat == null)
recordFormat = defaultRecordFormat;
return recordFormat;
}
public ORecordSerializer getDefaultRecordFormat() {
return defaultRecordFormat;
}
public void setDefaultRecordFormat(final ORecordSerializer iDefaultFormat) {
this.defaultRecordFormat = iDefaultFormat;
}
public static ORecordSerializerFactory instance() {
return instance;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_serialization_serializer_record_ORecordSerializerFactory.java |
1,362 | public static class MemberCheck implements Callable<Member>, Serializable, HazelcastInstanceAware {
private Member localMember;
public Member call() throws Exception {
return localMember;
}
@Override
public void setHazelcastInstance(HazelcastInstance hazelcastInstance) {
localMember = hazelcastInstance.getCluster().getLocalMember();
}
} | 0true
| hazelcast_src_test_java_com_hazelcast_executor_ExecutorServiceTest.java |
1,354 | public abstract class SourceFile extends CeylonUnit {
public SourceFile(IdePhasedUnit phasedUnit) {
createPhasedUnitRef(phasedUnit);
}
@Override
protected IdePhasedUnit setPhasedUnitIfNecessary() { return phasedUnitRef.get(); }
@Override
public String getSourceFullPath() {
return getFullPath();
}
@Override
public String getCeylonFileName() {
return getFilename();
}
} | 1no label
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_core_model_SourceFile.java |
835 | public class MVELTest extends BaseTest {
private StringBuffer functions = new StringBuffer();
public MVELTest() {
InputStream is = this.getClass().getResourceAsStream("/org/broadleafcommerce/core/offer/service/mvelFunctions.mvel");
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(is));
String line;
while ((line = reader.readLine()) != null) {
functions.append(line);
}
functions.append(" ");
} catch(Exception e){
throw new RuntimeException(e);
} finally {
if (is != null) {
try {
is.close();
} catch (Exception e){
logger.error(e);
}
}
}
}
@Test
public void testOfferAppliesToItemsInCategoryAndOrderValueGreaterThanFifty() {
//----------------------------------------------------------------------------------------------------
// Mock up some order data
OrderImpl order = new OrderImpl();
CategoryImpl category = new CategoryImpl();
category.setName("t-shirt");
DiscreteOrderItemImpl orderItem = new DiscreteOrderItemImpl();
ProductImpl product = new ProductImpl();
ArrayList<CategoryProductXref> categories = new ArrayList<CategoryProductXref>();
CategoryProductXref categoryXref = new CategoryProductXrefImpl();
categoryXref.setProduct(product);
categoryXref.setCategory(category);
categories.add(categoryXref);
product.setAllParentCategoryXrefs(categories);
orderItem.setProduct(product);
order.getOrderItems().add(orderItem);
order.setSubTotal(new Money(110D));
//Set up MVEL Context
ParserContext context = new ParserContext();
//Import OfferType into the MVEL context since it may be used
context.addImport("OfferType", OfferType.class);
//Compile the MVEL Expression
Serializable domainExp1 = MVEL.compileExpression("result = false; for (cat : currentItem.product.allParentCategories) {if (cat.name == 't-shirt') {result = true;}}; return result and order.subTotal.amount >= 50", context);
//Add variables to a HashMap that should be passed in to execute the expression
HashMap<String, Object> domainVars = new HashMap<String, Object>();
domainVars.put("order", order);
domainVars.put("currentItem", orderItem);
//Execute the expression
Boolean expressionOutcome1 = (Boolean)MVEL.executeExpression(domainExp1, domainVars);
assert expressionOutcome1 != null && expressionOutcome1;
//Do the same thing using a different expression.
Serializable domainExp2 = MVEL.compileExpression("($ in currentItem.product.allParentCategories if $.name == 't-shirt') != empty and order.subTotal.amount >= 50", context);
Boolean expressionOutcome2 = (Boolean)MVEL.executeExpression(domainExp2, domainVars);
assert expressionOutcome2 != null && expressionOutcome2;
}
@Test
public void testBasicMVELFunctions() {
//First, set up out functions
HashMap<String, Object> functionMap = new HashMap<String, Object>();
StringBuffer functions = new StringBuffer("def any(x, y) { return x or y } def all(x, y) { return x and y } ");
MVEL.eval(functions.toString(), functionMap); //This stores that functions in the map we pass in.
HashMap<String, Object> vars = new HashMap<String, Object>(functionMap); //Now, we need to pass the functions in to our variable map
vars.put("fg", "Hello");
StringBuffer expression = new StringBuffer();
expression.append("return all(fg == 'Hello', true)");
Boolean result = (Boolean)MVEL.eval(expression.toString(), vars);
assert result != null && result;
expression = new StringBuffer();
expression.append("return any(fg == 'Goodbye', false)");
Boolean result2 = (Boolean)MVEL.eval(expression.toString(), vars);
assert result2 != null && ! result2;
}
@Test
public void testOfferAppliesToSpecificItems() {
DiscreteOrderItemImpl orderItem = new DiscreteOrderItemImpl();
Sku sku = new SkuImpl();
sku.setRetailPrice(new Money("1"));
sku.setId(1234L);
orderItem.setSku(sku);
OfferImpl offer = new OfferImpl();
offer.setType(OfferType.ORDER_ITEM);
//Set up MVEL Context
ParserContext context = new ParserContext();
//Import OfferType into the MVEL context since it may be used
context.addImport("OfferType", OfferType.class);
context.addImport("FulfillmentType", FulfillmentType.class);
//Compile the MVEL Expression
Serializable domainExp1 = MVEL.compileExpression("offer.type == OfferType.ORDER_ITEM and (currentItem.sku.id in [1234, 2345, 5678])", context);
//Add variables to a HashMap that should be passed in to execute the expression
HashMap<String, Object> domainVars = new HashMap<String, Object>();
domainVars.put("currentItem", orderItem);
domainVars.put("offer", offer);
//Execute the expression
Boolean expressionOutcome1 = (Boolean)MVEL.executeExpression(domainExp1, domainVars);
assert expressionOutcome1 != null && expressionOutcome1;
}
//@Test
//TODO fix this test
public void testOfferAppliesToHatsWhenOneLawnmowerIsPurchased() {
OrderImpl order = new OrderImpl();
ArrayList<OrderItem> items = new ArrayList<OrderItem>();
order.setOrderItems(items);
DiscreteOrderItemImpl item = new DiscreteOrderItemImpl();
Money amount = new Money(10D);
items.add(item);
item.setSalePrice(amount);
ProductImpl product = new ProductImpl();
CategoryImpl category = new CategoryImpl();
category.setName("hat");
product.setDefaultCategory(category);
item.setProduct(product);
item.setQuantity(3);
DiscreteOrderItemImpl item2 = new DiscreteOrderItemImpl();
Money amount2 = new Money(250D);
items.add(item2);
item2.setSalePrice(amount2);
ProductImpl product2 = new ProductImpl();
CategoryImpl category2 = new CategoryImpl();
category2.setName("lawnmower");
product2.setDefaultCategory(category2);
item2.setProduct(product2);
item2.setQuantity(1);
HashMap<String, Object> vars = new HashMap<String, Object>();
vars.put("currentItem", item);
vars.put("order", order);
vars.put("doMark", false);
//This test makes use of the static MVEL function "orderContains(quantity)".
StringBuffer expression = new StringBuffer(functions);
expression.append("def evalItemForOrderContains(item) {")
.append(" return item.product.defaultCategory.name == 'lawnmower'")
.append(" } ")
.append(" return (orderContainsPlusMark(1) and currentItem.product.defaultCategory.name == 'hat');");
Boolean result = (Boolean)MVEL.eval(expression.toString(), vars);
assert result != null && result;
}
//@Test
//No longer a valid test
// public void testMarkLawnmowerWhenOfferAppliesToHats() {
// OrderImpl order = new OrderImpl();
// ArrayList<OrderItem> items = new ArrayList<OrderItem>();
// order.setOrderItems(items);
// DiscreteOrderItemImpl item = new DiscreteOrderItemImpl();
// Money amount = new Money(10D);
// items.add(item);
// item.setSalePrice(amount);
// ProductImpl product = new ProductImpl();
// CategoryImpl category = new CategoryImpl();
// category.setName("hat");
// product.setDefaultCategory(category);
// item.setProduct(product);
// item.setQuantity(3);
//
// DiscreteOrderItemImpl item2 = new DiscreteOrderItemImpl();
// Money amount2 = new Money(250D);
// items.add(item2);
// item2.setSalePrice(amount2);
// ProductImpl product2 = new ProductImpl();
// CategoryImpl category2 = new CategoryImpl();
// category2.setName("lawnmower");
// product2.setDefaultCategory(category2);
// item2.setProduct(product2);
// item2.setQuantity(1);
//
// HashMap<String, Object> vars = new HashMap<String, Object>();
// vars.put("currentItem", item);
// vars.put("order", order);
// vars.put("doMark", true);
//
// //This test makes use of the static MVEL function "orderContains(quantity)".
// StringBuffer expression = new StringBuffer(functions);
// expression.append("def evalItemForOrderContains(item) {")
// .append(" return item.product.defaultCategory.name == 'lawnmower'")
// .append(" } ")
// .append(" return (orderContainsPlusMark(1) and currentItem.product.defaultCategory.name == 'hat');");
//
// Boolean result = (Boolean)MVEL.eval(expression.toString(), vars);
// assert result != null && result;
// assert item2.getMarkedForOffer() == 1;
// assert item.getMarkedForOffer() == 0;
// }
@Test
public void testOfferAppliesToFulfillmentGroup() {
OrderImpl order = new OrderImpl();
order.setSubTotal(new Money(110D));
FulfillmentGroupImpl group = new FulfillmentGroupImpl();
group.setPrimary(true);
OfferImpl offer = new OfferImpl();
offer.setType(OfferType.FULFILLMENT_GROUP);
order.getFulfillmentGroups().add(group);
//Set up MVEL Context
ParserContext context = new ParserContext();
//Import OfferType into the MVEL context since it may be used
context.addImport("OfferType", OfferType.class);
context.addImport("FulfillmentType", FulfillmentType.class);
//Compile the MVEL Expression
//This could test SHIPPING, or PICK_UP_AT_STORE, etc.
//Could also apply to order instead of FULFILLMENT_GROUP
Serializable domainExp1 = MVEL.compileExpression("offer.type.equals(OfferType.FULFILLMENT_GROUP) and (($ in order.fulfillmentGroups if $.type.equals(FulfillmentType.PHYSICAL)) != empty)", context);
//Add variables to a HashMap that should be passed in to execute the expression
HashMap<String, Object> domainVars = new HashMap<String, Object>();
domainVars.put("order", order);
domainVars.put("offer", offer);
//Execute the expression
Boolean expressionOutcome1 = (Boolean)MVEL.executeExpression(domainExp1, domainVars);
assert expressionOutcome1 != null && expressionOutcome1;
}
} | 0true
| integration_src_test_java_org_broadleafcommerce_core_offer_service_MVELTest.java |
6,204 | public final class MockInternalEngine extends InternalEngine implements Engine {
public static final ConcurrentMap<AssertingSearcher, RuntimeException> INFLIGHT_ENGINE_SEARCHERS = new ConcurrentHashMap<AssertingSearcher, RuntimeException>();
public static final String WRAP_READER_RATIO = "index.engine.mock.random.wrap_reader_ratio";
public static final String READER_WRAPPER_TYPE = "index.engine.mock.random.wrapper";
private final Random random;
private final boolean wrapReader;
private final Class<? extends FilterDirectoryReader> wrapper;
@Inject
public MockInternalEngine(ShardId shardId, @IndexSettings Settings indexSettings, ThreadPool threadPool,
IndexSettingsService indexSettingsService, ShardIndexingService indexingService, @Nullable IndicesWarmer warmer, Store store,
SnapshotDeletionPolicy deletionPolicy, Translog translog, MergePolicyProvider mergePolicyProvider,
MergeSchedulerProvider mergeScheduler, AnalysisService analysisService, SimilarityService similarityService,
CodecService codecService) throws EngineException {
super(shardId, indexSettings, threadPool, indexSettingsService, indexingService, warmer, store,
deletionPolicy, translog, mergePolicyProvider, mergeScheduler, analysisService, similarityService, codecService);
final long seed = indexSettings.getAsLong(ElasticsearchIntegrationTest.INDEX_SEED_SETTING, 0l);
random = new Random(seed);
final double ratio = indexSettings.getAsDouble(WRAP_READER_RATIO, 0.0d); // DISABLED by default - AssertingDR is crazy slow
wrapper = indexSettings.getAsClass(READER_WRAPPER_TYPE, AssertingDirectoryReader.class);
wrapReader = random.nextDouble() < ratio;
if (logger.isTraceEnabled()) {
logger.trace("Using [{}] for shard [{}] seed: [{}] wrapReader: [{}]", this.getClass().getName(), shardId, seed, wrapReader);
}
}
public void close() throws ElasticsearchException {
try {
super.close();
} finally {
if (logger.isTraceEnabled()) {
// log debug if we have pending searchers
for (Entry<MockInternalEngine.AssertingSearcher, RuntimeException> entry : MockInternalEngine.INFLIGHT_ENGINE_SEARCHERS.entrySet()) {
logger.trace("Unreleased Searchers instance for shard [{}]", entry.getValue(), entry.getKey().shardId);
}
}
}
}
@Override
protected Searcher newSearcher(String source, IndexSearcher searcher, SearcherManager manager) throws EngineException {
IndexReader reader = searcher.getIndexReader();
IndexReader wrappedReader = reader;
if (reader instanceof DirectoryReader && wrapReader) {
wrappedReader = wrapReader((DirectoryReader) reader);
}
// this executes basic query checks and asserts that weights are normalized only once etc.
final AssertingIndexSearcher assertingIndexSearcher = new AssertingIndexSearcher(random, wrappedReader);
assertingIndexSearcher.setSimilarity(searcher.getSimilarity());
// pass the original searcher to the super.newSearcher() method to make sure this is the searcher that will
// be released later on. If we wrap an index reader here must not pass the wrapped version to the manager
// on release otherwise the reader will be closed too early. - good news, stuff will fail all over the place if we don't get this right here
return new AssertingSearcher(assertingIndexSearcher, super.newSearcher(source, searcher, manager), shardId);
}
private DirectoryReader wrapReader(DirectoryReader reader) {
try {
Constructor<?>[] constructors = wrapper.getConstructors();
Constructor<?> nonRandom = null;
for (Constructor<?> constructor : constructors) {
Class<?>[] parameterTypes = constructor.getParameterTypes();
if (parameterTypes.length > 0 && parameterTypes[0] == DirectoryReader.class) {
if (parameterTypes.length == 1) {
nonRandom = constructor;
} else if (parameterTypes.length == 2 && parameterTypes[1] == Settings.class) {
return (DirectoryReader) constructor.newInstance(reader, indexSettings);
}
}
}
if (nonRandom != null) {
return (DirectoryReader) nonRandom.newInstance(reader);
}
} catch (Exception e) {
throw new ElasticsearchException("Can not wrap reader", e);
}
return reader;
}
public final class AssertingSearcher implements Searcher {
private final Searcher wrappedSearcher;
private final ShardId shardId;
private final IndexSearcher indexSearcher;
private RuntimeException firstReleaseStack;
private final Object lock = new Object();
private final int initialRefCount;
public AssertingSearcher(IndexSearcher indexSearcher, Searcher wrappedSearcher, ShardId shardId) {
// we only use the given index searcher here instead of the IS of the wrapped searcher. the IS might be a wrapped searcher
// with a wrapped reader.
this.wrappedSearcher = wrappedSearcher;
this.shardId = shardId;
initialRefCount = wrappedSearcher.reader().getRefCount();
this.indexSearcher = indexSearcher;
assert initialRefCount > 0 : "IndexReader#getRefCount() was [" + initialRefCount + "] expected a value > [0] - reader is already closed";
INFLIGHT_ENGINE_SEARCHERS.put(this, new RuntimeException("Unreleased Searcher, source [" + wrappedSearcher.source() + "]"));
}
@Override
public String source() {
return wrappedSearcher.source();
}
@Override
public boolean release() throws ElasticsearchException {
RuntimeException remove = INFLIGHT_ENGINE_SEARCHERS.remove(this);
synchronized (lock) {
// make sure we only get this once and store the stack of the first caller!
if (remove == null) {
assert firstReleaseStack != null;
AssertionError error = new AssertionError("Released Searcher more than once, source [" + wrappedSearcher.source() + "]");
error.initCause(firstReleaseStack);
throw error;
} else {
assert firstReleaseStack == null;
firstReleaseStack = new RuntimeException("Searcher Released first here, source [" + wrappedSearcher.source() + "]");
}
}
final int refCount = wrappedSearcher.reader().getRefCount();
// this assert seems to be paranoid but given LUCENE-5362 we better add some assertions here to make sure we catch any potential
// problems.
assert refCount > 0 : "IndexReader#getRefCount() was [" + refCount + "] expected a value > [0] - reader is already closed. Initial refCount was: [" + initialRefCount + "]";
try {
return wrappedSearcher.release();
} catch (RuntimeException ex) {
logger.debug("Failed to release searcher", ex);
throw ex;
}
}
@Override
public IndexReader reader() {
return indexSearcher.getIndexReader();
}
@Override
public IndexSearcher searcher() {
return indexSearcher;
}
public ShardId shardId() {
return shardId;
}
}
public static abstract class DirectoryReaderWrapper extends FilterDirectoryReader {
protected final SubReaderWrapper subReaderWrapper;
public DirectoryReaderWrapper(DirectoryReader in, SubReaderWrapper subReaderWrapper) {
super(in, subReaderWrapper);
this.subReaderWrapper = subReaderWrapper;
}
@Override
public Object getCoreCacheKey() {
return in.getCoreCacheKey();
}
@Override
public Object getCombinedCoreAndDeletesKey() {
return in.getCombinedCoreAndDeletesKey();
}
}
} | 1no label
| src_test_java_org_elasticsearch_test_engine_MockInternalEngine.java |
4,664 | private final PercolatorType matchPercolator = new PercolatorType() {
@Override
public byte id() {
return 0x03;
}
@Override
public ReduceResult reduce(List<PercolateShardResponse> shardResults) {
long foundMatches = 0;
int numMatches = 0;
for (PercolateShardResponse response : shardResults) {
foundMatches += response.count();
numMatches += response.matches().length;
}
int requestedSize = shardResults.get(0).requestedSize();
// Use a custom impl of AbstractBigArray for Object[]?
List<PercolateResponse.Match> finalMatches = new ArrayList<PercolateResponse.Match>(requestedSize == 0 ? numMatches : requestedSize);
outer:
for (PercolateShardResponse response : shardResults) {
Text index = new StringText(response.getIndex());
for (int i = 0; i < response.matches().length; i++) {
float score = response.scores().length == 0 ? NO_SCORE : response.scores()[i];
Text match = new BytesText(new BytesArray(response.matches()[i]));
Map<String, HighlightField> hl = response.hls().isEmpty() ? null : response.hls().get(i);
finalMatches.add(new PercolateResponse.Match(index, match, score, hl));
if (requestedSize != 0 && finalMatches.size() == requestedSize) {
break outer;
}
}
}
assert !shardResults.isEmpty();
InternalFacets reducedFacets = reduceFacets(shardResults);
InternalAggregations reducedAggregations = reduceAggregations(shardResults);
return new ReduceResult(foundMatches, finalMatches.toArray(new PercolateResponse.Match[finalMatches.size()]), reducedFacets, reducedAggregations);
}
@Override
public PercolateShardResponse doPercolate(PercolateShardRequest request, PercolateContext context) {
long count = 0;
List<BytesRef> matches = new ArrayList<BytesRef>();
List<Map<String, HighlightField>> hls = new ArrayList<Map<String, HighlightField>>();
Lucene.ExistsCollector collector = new Lucene.ExistsCollector();
for (Map.Entry<HashedBytesRef, Query> entry : context.percolateQueries().entrySet()) {
collector.reset();
if (context.highlight() != null) {
context.parsedQuery(new ParsedQuery(entry.getValue(), ImmutableMap.<String, Filter>of()));
context.hitContext().cache().clear();
}
try {
context.docSearcher().search(entry.getValue(), collector);
} catch (Throwable e) {
logger.warn("[" + entry.getKey() + "] failed to execute query", e);
}
if (collector.exists()) {
if (!context.limit || count < context.size) {
matches.add(entry.getKey().bytes);
if (context.highlight() != null) {
highlightPhase.hitExecute(context, context.hitContext());
hls.add(context.hitContext().hit().getHighlightFields());
}
}
count++;
}
}
BytesRef[] finalMatches = matches.toArray(new BytesRef[matches.size()]);
return new PercolateShardResponse(finalMatches, hls, count, context, request.index(), request.shardId());
}
}; | 1no label
| src_main_java_org_elasticsearch_percolator_PercolatorService.java |
1,117 | public class TopicConfigReadOnly extends TopicConfig {
public TopicConfigReadOnly(TopicConfig config) {
super(config);
}
public List<ListenerConfig> getMessageListenerConfigs() {
final List<ListenerConfig> messageListenerConfigs = super.getMessageListenerConfigs();
final List<ListenerConfig> readOnlyMessageListenerConfigs = new ArrayList<ListenerConfig>(messageListenerConfigs.size());
for (ListenerConfig messageListenerConfig : messageListenerConfigs) {
readOnlyMessageListenerConfigs.add(messageListenerConfig.getAsReadOnly());
}
return Collections.unmodifiableList(readOnlyMessageListenerConfigs);
}
public TopicConfig setName(String name) {
throw new UnsupportedOperationException("This config is read-only topic: " + getName());
}
public TopicConfig setGlobalOrderingEnabled(boolean globalOrderingEnabled) {
throw new UnsupportedOperationException("This config is read-only topic: " + getName());
}
public TopicConfig addMessageListenerConfig(ListenerConfig listenerConfig) {
throw new UnsupportedOperationException("This config is read-only topic: " + getName());
}
public TopicConfig setMessageListenerConfigs(List<ListenerConfig> listenerConfigs) {
throw new UnsupportedOperationException("This config is read-only topic: " + getName());
}
public TopicConfig setStatisticsEnabled(boolean statisticsEnabled) {
throw new UnsupportedOperationException("This config is read-only topic: " + getName());
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_config_TopicConfigReadOnly.java |
2,895 | public class NumericAnalyzerTests extends ElasticsearchTestCase {
@Test
public void testAttributeEqual() throws IOException {
final int precisionStep = 8;
final double value = randomDouble();
NumericDoubleAnalyzer analyzer = new NumericDoubleAnalyzer(precisionStep);
final TokenStream ts1 = analyzer.tokenStream("dummy", String.valueOf(value));
final NumericTokenStream ts2 = new NumericTokenStream(precisionStep);
ts2.setDoubleValue(value);
final NumericTermAttribute numTerm1 = ts1.addAttribute(NumericTermAttribute.class);
final NumericTermAttribute numTerm2 = ts1.addAttribute(NumericTermAttribute.class);
final PositionIncrementAttribute posInc1 = ts1.addAttribute(PositionIncrementAttribute.class);
final PositionIncrementAttribute posInc2 = ts1.addAttribute(PositionIncrementAttribute.class);
ts1.reset();
ts2.reset();
while (ts1.incrementToken()) {
assertThat(ts2.incrementToken(), is(true));
assertThat(posInc1, equalTo(posInc2));
// can't use equalTo directly on the numeric attribute cause it doesn't implement equals (LUCENE-5070)
assertThat(numTerm1.getRawValue(), equalTo(numTerm2.getRawValue()));
assertThat(numTerm2.getShift(), equalTo(numTerm2.getShift()));
}
assertThat(ts2.incrementToken(), is(false));
ts1.end();
ts2.end();
}
} | 0true
| src_test_java_org_elasticsearch_index_analysis_NumericAnalyzerTests.java |
63 | FindDeclarationNodeVisitor fdv = new FindDeclarationNodeVisitor(typeDec) {
@Override
public void visit(Tree.ObjectDefinition that) {
if (that.getDeclarationModel().getType().getDeclaration().equals(typeDec)) {
declarationNode = that;
}
super.visit(that);
}
}; | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_AddSatisfiesProposal.java |
1,220 | Executors.newSingleThreadExecutor().execute(new Runnable() {
public void run() {
while (true) {
try {
//noinspection BusyWait
Thread.sleep(STATS_SECONDS * ONE_SECOND);
System.out.println("cluster SIZE:"
+ allTest.hazelcast.getCluster().getMembers().size());
allTest.mapStats();
allTest.qStats();
allTest.topicStats();
} catch (InterruptedException ignored) {
return;
}
}
}
}); | 0true
| hazelcast_src_main_java_com_hazelcast_examples_AllTest.java |
613 | indexEngine.getValuesMajor(iRangeFrom, isInclusive, MultiValuesTransformer.INSTANCE, new OIndexEngine.ValuesResultListener() {
@Override
public boolean addResult(OIdentifiable identifiable) {
return valuesResultListener.addResult(identifiable);
}
}); | 1no label
| core_src_main_java_com_orientechnologies_orient_core_index_OIndexMultiValues.java |
2,217 | public final class XFilteredQuery extends Query {
private final Filter rawFilter;
private final FilteredQuery delegate;
private final FilterStrategy strategy;
/**
* Constructs a new query which applies a filter to the results of the original query.
* {@link Filter#getDocIdSet} will be called every time this query is used in a search.
*
* @param query Query to be filtered, cannot be <code>null</code>.
* @param filter Filter to apply to query results, cannot be <code>null</code>.
*/
public XFilteredQuery(Query query, Filter filter) {
this(query, filter, FilteredQuery.RANDOM_ACCESS_FILTER_STRATEGY);
}
/**
* Expert: Constructs a new query which applies a filter to the results of the original query.
* {@link Filter#getDocIdSet} will be called every time this query is used in a search.
*
* @param query Query to be filtered, cannot be <code>null</code>.
* @param filter Filter to apply to query results, cannot be <code>null</code>.
* @param strategy a filter strategy used to create a filtered scorer.
* @see FilterStrategy
*/
public XFilteredQuery(Query query, Filter filter, FilterStrategy strategy) {
this(new FilteredQuery(query, new ApplyAcceptedDocsFilter(filter), strategy), filter, strategy);
}
private XFilteredQuery(FilteredQuery delegate, Filter filter, FilterStrategy strategy) {
this.delegate = delegate;
// CHANGE: we need to wrap it in post application of accepted docs
this.rawFilter = filter;
this.strategy = strategy;
}
/**
* Returns a Weight that applies the filter to the enclosed query's Weight.
* This is accomplished by overriding the Scorer returned by the Weight.
*/
@Override
public Weight createWeight(final IndexSearcher searcher) throws IOException {
return delegate.createWeight(searcher);
}
/**
* Rewrites the query. If the wrapped is an instance of
* {@link MatchAllDocsQuery} it returns a {@link ConstantScoreQuery}. Otherwise
* it returns a new {@code FilteredQuery} wrapping the rewritten query.
*/
@Override
public Query rewrite(IndexReader reader) throws IOException {
Query query = delegate.getQuery();
final Query queryRewritten = query.rewrite(reader);
// CHANGE: if we push back to Lucene, would love to have an extension for "isMatchAllQuery"
if (queryRewritten instanceof MatchAllDocsQuery || Queries.isConstantMatchAllQuery(queryRewritten)) {
// Special case: If the query is a MatchAllDocsQuery, we only
// return a CSQ(filter).
final Query rewritten = new ConstantScoreQuery(delegate.getFilter());
// Combine boost of MatchAllDocsQuery and the wrapped rewritten query:
rewritten.setBoost(delegate.getBoost() * queryRewritten.getBoost());
return rewritten;
}
if (queryRewritten != query) {
// rewrite to a new FilteredQuery wrapping the rewritten query
final Query rewritten = new XFilteredQuery(queryRewritten, rawFilter, strategy);
rewritten.setBoost(delegate.getBoost());
return rewritten;
} else {
// nothing to rewrite, we are done!
return this;
}
}
@Override
public void setBoost(float b) {
delegate.setBoost(b);
}
@Override
public float getBoost() {
return delegate.getBoost();
}
/**
* Returns this FilteredQuery's (unfiltered) Query
*/
public final Query getQuery() {
return delegate.getQuery();
}
/**
* Returns this FilteredQuery's filter
*/
public final Filter getFilter() {
// CHANGE: unwrap the accepted docs filter
if (rawFilter instanceof ApplyAcceptedDocsFilter) {
return ((ApplyAcceptedDocsFilter) rawFilter).filter();
}
return rawFilter;
}
// inherit javadoc
@Override
public void extractTerms(Set<Term> terms) {
delegate.extractTerms(terms);
}
/**
* Prints a user-readable version of this query.
*/
@Override
public String toString(String s) {
return delegate.toString(s);
}
/**
* Returns true iff <code>o</code> is equal to this.
*/
@Override
public boolean equals(Object o) {
if (!(o instanceof XFilteredQuery)) {
return false;
} else {
return delegate.equals(((XFilteredQuery)o).delegate);
}
}
/**
* Returns a hash code value for this object.
*/
@Override
public int hashCode() {
return delegate.hashCode();
}
// CHANGE: Add custom random access strategy, allowing to set the threshold
// CHANGE: Add filter first filter strategy
public static final FilterStrategy ALWAYS_RANDOM_ACCESS_FILTER_STRATEGY = new CustomRandomAccessFilterStrategy(0);
public static final CustomRandomAccessFilterStrategy CUSTOM_FILTER_STRATEGY = new CustomRandomAccessFilterStrategy();
/**
* Extends {@link org.apache.lucene.search.FilteredQuery.RandomAccessFilterStrategy}.
* <p/>
* Adds a threshold value, which defaults to -1. When set to -1, it will check if the filter docSet is
* *not* a fast docSet, and if not, it will use {@link FilteredQuery#QUERY_FIRST_FILTER_STRATEGY} (since
* the assumption is that its a "slow" filter and better computed only on whatever matched the query).
* <p/>
* If the threshold value is 0, it always tries to pass "down" the filter as acceptDocs, and it the filter
* can't be represented as Bits (never really), then it uses {@link FilteredQuery#LEAP_FROG_QUERY_FIRST_STRATEGY}.
* <p/>
* If the above conditions are not met, then it reverts to the {@link FilteredQuery.RandomAccessFilterStrategy} logic,
* with the threshold used to control {@link #useRandomAccess(org.apache.lucene.util.Bits, int)}.
*/
public static class CustomRandomAccessFilterStrategy extends FilteredQuery.RandomAccessFilterStrategy {
private final int threshold;
public CustomRandomAccessFilterStrategy() {
this.threshold = -1;
}
public CustomRandomAccessFilterStrategy(int threshold) {
this.threshold = threshold;
}
@Override
public Scorer filteredScorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Weight weight, DocIdSet docIdSet) throws IOException {
// CHANGE: If threshold is 0, always pass down the accept docs, don't pay the price of calling nextDoc even...
if (threshold == 0) {
final Bits filterAcceptDocs = docIdSet.bits();
if (filterAcceptDocs != null) {
return weight.scorer(context, scoreDocsInOrder, topScorer, filterAcceptDocs);
} else {
return FilteredQuery.LEAP_FROG_QUERY_FIRST_STRATEGY.filteredScorer(context, scoreDocsInOrder, topScorer, weight, docIdSet);
}
}
// CHANGE: handle "default" value
if (threshold == -1) {
// default value, don't iterate on only apply filter after query if its not a "fast" docIdSet
if (!DocIdSets.isFastIterator(docIdSet)) {
return FilteredQuery.QUERY_FIRST_FILTER_STRATEGY.filteredScorer(context, scoreDocsInOrder, topScorer, weight, docIdSet);
}
}
return super.filteredScorer(context, scoreDocsInOrder, topScorer, weight, docIdSet);
}
/**
* Expert: decides if a filter should be executed as "random-access" or not.
* random-access means the filter "filters" in a similar way as deleted docs are filtered
* in Lucene. This is faster when the filter accepts many documents.
* However, when the filter is very sparse, it can be faster to execute the query+filter
* as a conjunction in some cases.
* <p/>
* The default implementation returns <code>true</code> if the first document accepted by the
* filter is < threshold, if threshold is -1 (the default), then it checks for < 100.
*/
protected boolean useRandomAccess(Bits bits, int firstFilterDoc) {
// "default"
if (threshold == -1) {
return firstFilterDoc < 100;
}
//TODO once we have a cost API on filters and scorers we should rethink this heuristic
return firstFilterDoc < threshold;
}
}
@Override
public Query clone() {
return new XFilteredQuery((FilteredQuery) delegate.clone(), rawFilter, strategy);
}
} | 0true
| src_main_java_org_elasticsearch_common_lucene_search_XFilteredQuery.java |
1,173 | public static class Group {
public static class Name {
public static final String Items = "PaymentInfoImpl_Items";
}
public static class Order {
public static final int Items = 1000;
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_payment_domain_PaymentInfoImpl.java |
65 | public class RFC2104HMAC {
private RFC2104HMAC(){}
public static String calculateRFC2104HMAC(String data, String key)
throws SignatureException {
String result;
try {
SecretKeySpec signingKey = new SecretKeySpec(stringToBytes(key),
SIGNATURE_METHOD);
Mac mac = Mac.getInstance(SIGNATURE_METHOD);
mac.init(signingKey);
byte[] rawSignature = mac.doFinal(stringToBytes(data));
result = bytesToString(encode(rawSignature));
result = result.trim();
} catch (Exception e) {
throw new SignatureException("Failed to generate HMAC : "
+ e.getMessage());
}
return result;
}
} | 0true
| hazelcast-cloud_src_main_java_com_hazelcast_aws_security_RFC2104HMAC.java |
816 | public static class Order {
public static final int Description = 1000;
public static final int Amount = 2000;
public static final int ActivityRange = 3000;
public static final int Qualifiers = 4000;
public static final int ItemTarget = 5000;
public static final int Advanced = 1000;
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_domain_OfferImpl.java |
2,680 | interface GatewayStateRecoveredListener {
void onSuccess(ClusterState recoveredState);
void onFailure(String message);
} | 0true
| src_main_java_org_elasticsearch_gateway_Gateway.java |
31 | public class IncrementCommandProcessor extends MemcacheCommandProcessor<IncrementCommand> {
private final ILogger logger;
public IncrementCommandProcessor(TextCommandServiceImpl textCommandService) {
super(textCommandService);
logger = textCommandService.getNode().getLogger(this.getClass().getName());
}
public void handle(IncrementCommand incrementCommand) {
String key = null;
try {
key = URLDecoder.decode(incrementCommand.getKey(), "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new HazelcastException(e);
}
String mapName = DEFAULT_MAP_NAME;
int index = key.indexOf(':');
if (index != -1) {
mapName = MAP_NAME_PRECEDER + key.substring(0, index);
key = key.substring(index + 1);
}
try {
textCommandService.lock(mapName, key);
} catch (Exception e) {
incrementCommand.setResponse(NOT_FOUND);
if (incrementCommand.shouldReply()) {
textCommandService.sendResponse(incrementCommand);
}
return;
}
Object value = textCommandService.get(mapName, key);
MemcacheEntry entry = null;
if (value != null) {
if (value instanceof MemcacheEntry) {
entry = (MemcacheEntry) value;
} else if (value instanceof byte[]) {
entry = new MemcacheEntry(incrementCommand.getKey(), (byte[]) value, 0);
} else if (value instanceof String) {
entry = new MemcacheEntry(incrementCommand.getKey(), stringToBytes((String) value), 0);
} else {
try {
entry = new MemcacheEntry(incrementCommand.getKey(), textCommandService.toByteArray(value), 0);
} catch (Exception e) {
throw ExceptionUtil.rethrow(e);
}
}
final byte[] value1 = entry.getValue();
final long current = (value1 == null || value1.length == 0) ? 0 : byteArrayToLong(value1);
long result = -1;
if (incrementCommand.getType() == TextCommandType.INCREMENT) {
result = current + incrementCommand.getValue();
result = 0 > result ? Long.MAX_VALUE : result;
textCommandService.incrementIncHitCount();
} else if (incrementCommand.getType() == TextCommandType.DECREMENT) {
result = current - incrementCommand.getValue();
result = 0 > result ? 0 : result;
textCommandService.incrementDecrHitCount();
}
incrementCommand.setResponse(ByteUtil.concatenate(stringToBytes(String.valueOf(result)), RETURN));
MemcacheEntry newValue = new MemcacheEntry(key, longToByteArray(result), entry.getFlag());
textCommandService.put(mapName, key, newValue);
} else {
if (incrementCommand.getType() == TextCommandType.INCREMENT) {
textCommandService.incrementIncMissCount();
} else {
textCommandService.incrementDecrMissCount();
}
incrementCommand.setResponse(NOT_FOUND);
}
textCommandService.unlock(mapName, key);
if (incrementCommand.shouldReply()) {
textCommandService.sendResponse(incrementCommand);
}
}
public void handleRejection(IncrementCommand incrementCommand) {
incrementCommand.setResponse(NOT_FOUND);
if (incrementCommand.shouldReply()) {
textCommandService.sendResponse(incrementCommand);
}
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_ascii_memcache_IncrementCommandProcessor.java |
405 | public class UnmaskRange {
public static final int BEGINNINGTYPE = 0;
public static final int ENDTYPE = 1;
private int positionType;
private int length;
public UnmaskRange(int startPosition, int length) {
this.positionType = startPosition;
this.length = length;
}
public int getPositionType() {
return positionType;
}
public int getLength() {
return length;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_payment_UnmaskRange.java |
1,222 | public class SimplePaymentContext implements ProcessContext {
public final static long serialVersionUID = 1L;
private boolean stopEntireProcess = false;
private PaymentSeed seedData;
public void setSeedData(Object seedObject) {
this.seedData = (PaymentSeed) seedObject;
}
public boolean stopProcess() {
this.stopEntireProcess = true;
return stopEntireProcess;
}
public boolean isStopped() {
return stopEntireProcess;
}
public PaymentSeed getSeedData() {
return seedData;
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_payment_service_workflow_SimplePaymentContext.java |
210 | @Deprecated
public class HydratedCacheElement extends Hashtable<String, Object> {
private static final long serialVersionUID = 1L;
public Object getCacheElementItem(String elementItemName, Serializable parentKey) {
return get(elementItemName + "_" + parentKey);
}
public Object putCacheElementItem(String elementItemName, Serializable parentKey, Object value) {
return put(elementItemName +"_"+parentKey, value);
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_cache_engine_HydratedCacheElement.java |
256 | return new RecordIterator<Entry>() {
final Iterator<Entry> columns = CassandraHelper.makeEntryIterator(
Iterables.filter(currentRow.cf.getSortedColumns(), new FilterDeletedColumns(nowMillis)),
entryGetter,
sliceQuery.getSliceEnd(),
sliceQuery.getLimit());
//cfToEntries(currentRow.cf, sliceQuery).iterator();
@Override
public boolean hasNext() {
ensureOpen();
return columns.hasNext();
}
@Override
public Entry next() {
ensureOpen();
return columns.next();
}
@Override
public void close() {
isClosed = true;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}; | 0true
| titan-cassandra_src_main_java_com_thinkaurelius_titan_diskstorage_cassandra_embedded_CassandraEmbeddedKeyColumnValueStore.java |
17 | public class TestByteBuffer {
private static final int NUM = 1000;
private static final double FRACTION = 0.2;
private static final int ROUNDSIZE = 5;
private static final int TRIALS = 5;
private static final boolean CHECK_VALUE = true;
private static final Random random = new Random();
public static void main(String[] args) {
SummaryStatistics statObject = new SummaryStatistics();
SummaryStatistics statByte = new SummaryStatistics();
for (int i = 0; i < 10; i++) {
statByte.addValue(testByte());
statObject.addValue(testObject());
}
System.out.println("Time (ms) Object: " + statObject.getMean() + " | " + statObject.getStandardDeviation());
System.out.println("Time (ms) Byte: " + statByte.getMean() + " | " + statByte.getStandardDeviation());
}
private static long testObject() {
EdgeVertex[] vertices = new EdgeVertex[NUM];
for (int i = 0; i < NUM; i++) {
vertices[i] = new EdgeVertex(i);
}
for (int i = 0; i < NUM; i++) {
for (int j = 0; j < NUM; j++) {
if (i == j) continue;
if (Math.random() < FRACTION) {
Edge e = new Edge(vertices[i], "connect", vertices[j]);
e.setProperty("number", random.nextInt(ROUNDSIZE));
vertices[i].addOutEdge(e);
}
}
}
long time = System.currentTimeMillis();
long sum = 0;
for (int t = 0; t < TRIALS; t++) {
for (int i = 0; i < NUM; i++) {
for (Vertex v : vertices[i].getNeighbors(0)) {
sum += v.getId();
}
}
}
time = System.currentTimeMillis() - time;
return time;
}
private static long testByte() {
LongObjectMap<ConcurrentSkipListSet<ByteEntry>> tx = new LongObjectOpenHashMap<ConcurrentSkipListSet<ByteEntry>>(NUM);
for (int i = 0; i < NUM; i++) {
tx.put(i, new ConcurrentSkipListSet<ByteEntry>());
}
for (int i = 0; i < NUM; i++) {
for (int j = 0; j < NUM; j++) {
if (i == j) continue;
if (Math.random() < FRACTION) {
ByteBuffer key = ByteBuffer.allocate(16);
key.putLong(5).putLong(j).flip();
ByteBuffer value = ByteBuffer.allocate(4);
value.putInt(random.nextInt(ROUNDSIZE)).flip();
tx.get(i).add(new ByteEntry(key, value));
}
}
}
long time = System.currentTimeMillis();
long sum = 0;
for (int t = 0; t < TRIALS; t++) {
for (int i = 0; i < NUM; i++) {
for (Vertex v : (new ByteVertex(i, tx)).getNeighbors(0)) {
sum += v.getId();
}
}
}
time = System.currentTimeMillis() - time;
return time;
}
static abstract class Vertex implements Comparable<Vertex> {
protected final long id;
Vertex(long id) {
this.id = id;
}
@Override
public int compareTo(Vertex vertex) {
return Long.valueOf(id).compareTo(vertex.id);
}
public long getId() {
return id;
}
public abstract Iterable<Vertex> getNeighbors(int value);
}
static class EdgeVertex extends Vertex {
private SortedSet<Edge> outEdges = new ConcurrentSkipListSet<Edge>(new Comparator<Edge>() {
@Override
public int compare(Edge e1, Edge e2) {
return e1.getEnd().compareTo(e2.getEnd());
}
});
EdgeVertex(long id) {
super(id);
}
@Override
public Iterable<Vertex> getNeighbors(final int value) {
return Iterables.transform(Iterables.filter(outEdges, new Predicate<Edge>() {
@Override
public boolean apply(@Nullable Edge edge) {
return !CHECK_VALUE || ((Integer) edge.getProperty("number")).intValue() == value;
}
}), new Function<Edge, Vertex>() {
@Override
public Vertex apply(@Nullable Edge edge) {
return edge.getEnd();
}
});
}
void addOutEdge(Edge e) {
outEdges.add(e);
}
}
static class ByteVertex extends Vertex {
private final LongObjectMap<ConcurrentSkipListSet<ByteEntry>> tx;
private final SortedSet<ByteEntry> set;
ByteVertex(long id, LongObjectMap<ConcurrentSkipListSet<ByteEntry>> tx) {
super(id);
this.tx = tx;
this.set = (SortedSet<ByteEntry>) tx.get(id);
}
@Override
public Iterable<Vertex> getNeighbors(final int value) {
// SortedSet<ByteEntry> set = (SortedSet<ByteEntry>) tx.get(id);
return Iterables.transform(Iterables.filter(set, new Predicate<ByteEntry>() {
@Override
public boolean apply(@Nullable ByteEntry entry) {
return !CHECK_VALUE || entry.value.getInt(0) == value;
}
}), new Function<ByteEntry, Vertex>() {
@Override
public Vertex apply(@Nullable ByteEntry entry) {
return new ByteVertex(entry.key.getLong(8), tx);
}
});
}
}
static class Edge {
private final Vertex start;
private final Vertex end;
private final String label;
private final Map<String, Object> properties = new HashMap<String, Object>();
Edge(Vertex start, String label, Vertex end) {
this.label = label;
this.end = end;
this.start = start;
}
public String getLabel() {
return label;
}
void setProperty(String key, Object value) {
properties.put(key, value);
}
public Object getProperty(String key) {
return properties.get(key);
}
public Vertex getStart() {
return start;
}
public Vertex getEnd() {
return end;
}
public Vertex getOther(Vertex v) {
if (start.equals(v)) return end;
else if (end.equals(v)) return start;
throw new IllegalArgumentException();
}
}
static class ByteEntry implements Comparable<ByteEntry> {
final ByteBuffer key;
final ByteBuffer value;
ByteEntry(ByteBuffer key, ByteBuffer value) {
this.value = value;
this.key = key;
}
@Override
public int compareTo(ByteEntry byteEntry) {
return key.compareTo(byteEntry.key);
}
}
} | 0true
| titan-test_src_main_java_com_thinkaurelius_titan_TestByteBuffer.java |
1,902 | public enum Stage {
/**
* We're running in a tool (an IDE plugin for example). We need binding meta data but not a
* functioning Injector. Do not inject members of instances. Do not load eager singletons. Do as
* little as possible so our tools run nice and snappy. Injectors created in this stage cannot
* be used to satisfy injections.
*/
TOOL,
/**
* We want fast startup times at the expense of runtime performance and some up front error
* checking.
*/
DEVELOPMENT,
/**
* We want to catch errors as early as possible and take performance hits up front.
*/
PRODUCTION
} | 0true
| src_main_java_org_elasticsearch_common_inject_Stage.java |
780 | @SuppressWarnings("deprecation")
public class OfferDataProvider {
@DataProvider(name = "offerDataProvider")
public static Object[][] provideBasicOffer(){
List<Offer> allOffers = new ArrayList<Offer>();
OfferImpl o = new OfferImpl();
o.setDiscountType(OfferDiscountType.AMOUNT_OFF);
o.setValue(new BigDecimal("5.00"));
o.setName("Some test offer");
o.setPriority(100);
o.setStackable(true);
o.setStartDate(SystemTime.asDate());
o.setEndDate(new Date(SystemTime.asMillis()+100000000));
o.setApplyDiscountToMarkedItems(false);
o.setTargetSystem("WEB");
o.setType(OfferType.ORDER_ITEM);
o.setAppliesToOrderRules(
"package org.broadleafcommerce.core.offer.service;"+
"import org.broadleafcommerce.core.offer.domain.Offer;"+
"import org.broadleafcommerce.core.order.domain.Order;"+
"import org.broadleafcommerce.core.order.domain.OrderItem;"+
"import org.broadleafcommerce.type.OfferType;"+
"import java.util.List;"+
"global List orderItems;"+
"global List offerPackages;"+
"rule \"Offer 1 Rule\" "+
"salience 100"+
"when "+
" orderItem : OrderItem(sku == 1) "+
" "+
" then"+
" System.err.println(\"applying offer 1\");"+
" orderItem.addRulesCandidateOffer"+
"end");
allOffers.add(o);
o = new OfferImpl();
o.setDiscountType(OfferDiscountType.AMOUNT_OFF);
o.setValue(new BigDecimal("5.00"));
o.setName("Second test offer");
o.setPriority(100);
o.setStackable(false);
o.setStartDate(SystemTime.asDate());
o.setEndDate(new Date(SystemTime.asMillis()+100000000));
o.setApplyDiscountToMarkedItems(false);
o.setTargetSystem("WEB");
o.setType(OfferType.FULFILLMENT_GROUP);
o.setAppliesToOrderRules(
"package org.broadleafcommerce.core.offer.service;"+
"import org.broadleafcommerce.core.offer.domain.Offer;"+
"import org.broadleafcommerce.core.order.domain.Order;"+
"import org.broadleafcommerce.core.order.domain.OrderItem;"+
"import org.broadleafcommerce.type.OfferType;"+
"import java.util.List;"+
"global List orderItems;"+
"global List offerPackages;"+
"rule \"Offer 1 Rule\" "+
"salience 100"+
"when "+
" orderItem : OrderItem(retailPrice >= 100)"+
" then"+
" System.err.println(\"applying offer 2\");"+
" insert(offer);"+
"end");
allOffers.add(o);
return new Object[][] {{allOffers}};
}
} | 0true
| integration_src_test_java_org_broadleafcommerce_core_offer_OfferDataProvider.java |
2,764 | static final class Fields {
static final XContentBuilderString HTTP = new XContentBuilderString("http");
static final XContentBuilderString CURRENT_OPEN = new XContentBuilderString("current_open");
static final XContentBuilderString TOTAL_OPENED = new XContentBuilderString("total_opened");
} | 0true
| src_main_java_org_elasticsearch_http_HttpStats.java |
1,550 | private class BeanInfo {
final String name;
final String description;
transient Method method;
public BeanInfo(String name, String description, Method method){
this.name = name;
this.description = description;
this.method = method;
}
public MBeanAttributeInfo getAttributeInfo() {
try {
return new MBeanAttributeInfo(name, description, method, null);
} catch (IntrospectionException e) {
throw new IllegalArgumentException();
}
}
public MBeanOperationInfo getOperationInfo(){
return new MBeanOperationInfo(description, method);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_jmx_HazelcastMBean.java |
77 | public interface AttributeSerializer<V> extends AttributeHandler<V> {
/**
* Reads an attribute from the given ReadBuffer.
* <p/>
* It is expected that this read operation adjusts the position in the ReadBuffer to after the attribute value.
*
* @param buffer ReadBuffer to read attribute from
* @return Read attribute
*/
public V read(ScanBuffer buffer);
/**
* Writes the attribute value to the given WriteBuffer.
* <p/>
* It is expected that this write operation adjusts the position in the WriteBuffer to after the attribute value.
*
* @param buffer WriteBuffer to write attribute to
* @param attribute Attribute to write to WriteBuffer
*/
public void write(WriteBuffer buffer, V attribute);
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_AttributeSerializer.java |
696 | public interface ProductBundle extends Product, Serializable {
/**
* @return The pricing model for this bundle
*
* ITEM_SUM indicates that the bundle is priced by the sum of the contained items.
* BUNDLE indicates that the bundle is priced by the price on the bundle itself.
*/
public ProductBundlePricingModelType getPricingModel();
/**
*
* @param pricingModel
* <b>ITEM_SUM</b> if the retailPrice and salePrice of this
* bundle should be the composition of its items, <b>BUNDLE</b>
* if this retailPrice and salePrice should come from the default
* Sku
*/
public void setPricingModel(ProductBundlePricingModelType pricingModel);
/**
* @return the sum of the retail prices of the bundle items
*/
public Money getBundleItemsRetailPrice();
/**
* @return the sum of the sale prices of the bundle items
*/
public Money getBundleItemsSalePrice();
/**
* Gets whether or not this should be bundled together if the individual
* Products are added to the cart. For instance, if this Bundle is composed
* of Item1 and Item2, and the user adds Item1 and Item2 to the cart
* separately, if this is true then these items will be bundled into a
* single BundleOrderItem instead of unique items in the cart
*
* <b>NOTE: THIS IS NOT YET SUPPORTED BY BROADLEAF</b>
*
* @return <b>true</b> if the items in this bundle should be automatically
* bundled together when added to the cart separately, <b>false</b>
* otherwise
*/
public Boolean getAutoBundle();
/**
* Sets whether or not this should be bundled together if the individual
* Products are added to the cart. For instance, if this Bundle is composed
* of Item1 and Item2, and the user adds Item1 and Item2 to the cart
* separately, if this is true then these items will be bundled into a
* single BundleOrderItem instead of unique items in the cart
*
* <b>NOTE: THIS IS NOT YET SUPPORTED BY BROADLEAF</b>
*
* @param autoBundle
* Whether or not the items in the bundle should be auto-bundled
* if added to the cart separately
*/
public void setAutoBundle(Boolean autoBundle);
/**
* Gets whether or not the items in this bundle should be considered for
* promotions using the promotion engine <br />
* <br />
* Note: this is only applicable when the pricing model is the sum of the
* bundle items
*
* <b>NOTE: THIS IS NOT YET SUPPORTED BY BROADLEAF</b>
*
* @return <b>true</b> if the items should be included in the promotion
* engine, <b>false</b> otherwise
*/
public Boolean getItemsPromotable();
/**
* Sets whether or not the items in this bundle should be considered for
* promotions using the promotion engine
*
* <b>NOTE: THIS IS NOT YET SUPPORTED BY BROADLEAF</b>
*
* @param itemsPromotable
* Whether or not the items in the bundle should be considered
* for promotions
*/
public void setItemsPromotable(Boolean itemsPromotable);
/**
* Gets whether or not the bundle itself should be promotable. <br>
* <b>Note:</b> this should only be used if the pricing model for the bundle
* uses the pricing on the bundle itself and not on the sum of its bundle
* items
*
* <b>NOTE: THIS IS NOT YET SUPPORTED BY BROADLEAF</b>
*
* @return <b>true</b> if the bundle itself should be available for
* promotion, <b>false</b> otherwise
*/
public Boolean getBundlePromotable();
/**
* Gets whether or not the bundle itself should be promotable. <br>
* <b>Note:</b> this should only be used if the pricing model for the bundle
* uses the pricing on the bundle itself and not on the sum of its bundle
* items
*
* <b>NOTE: THIS IS NOT YET SUPPORTED BY BROADLEAF</b>
*
* @param bundlePromotable
* Whether or not the bundle itself should be available for
* promotion
*/
public void setBundlePromotable(Boolean bundlePromotable);
public List<SkuBundleItem> getSkuBundleItems();
public void setSkuBundleItems(List<SkuBundleItem> bundleItems);
/**
* Used to determine the order for automatic bundling.
* @return
*/
public Integer getPriority();
public void setPriority(Integer priority);
/**
* Calculates the potential savings by summing up the retail prices of the
* contained items and comparing to the actual bundle prices.
*
* Used to determine the order for automatic bundling in case items might
* qualify for multiple bundles.
*
* @return
*/
public BigDecimal getPotentialSavings();
/**
* @return whether or not the product bundle is on sale
*/
public boolean isOnSale();
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_domain_ProductBundle.java |
274 | public interface JMSEmailServiceProducer extends EmailServiceProducer {
/**
* @return the emailServiceTemplate
*/
public JmsTemplate getEmailServiceTemplate();
/**
* @param emailServiceTemplate the emailServiceTemplate to set
*/
public void setEmailServiceTemplate(JmsTemplate emailServiceTemplate);
/**
* @return the emailServiceDestination
*/
public Destination getEmailServiceDestination();
/**
* @param emailServiceDestination the emailServiceDestination to set
*/
public void setEmailServiceDestination(Destination emailServiceDestination);
} | 0true
| common_src_main_java_org_broadleafcommerce_common_email_service_jms_JMSEmailServiceProducer.java |
2,592 | threadPool.generic().execute(new Runnable() {
@Override
public void run() {
for (Listener listener : listeners) {
listener.onDisconnectedFromMaster();
}
}
}); | 0true
| src_main_java_org_elasticsearch_discovery_zen_fd_MasterFaultDetection.java |
1,411 | private static final SoftLock LOCK_SUCCESS = new SoftLock() {}; | 1no label
| hazelcast-hibernate_hazelcast-hibernate3_src_main_java_com_hazelcast_hibernate_distributed_IMapRegionCache.java |
558 | public abstract class OAbstractIndexDefinition extends ODocumentWrapperNoClass implements OIndexDefinition {
protected OCollate collate = new ODefaultCollate();
protected OAbstractIndexDefinition() {
super(new ODocument());
}
public OCollate getCollate() {
return collate;
}
public void setCollate(final OCollate collate) {
if (collate == null)
throw new IllegalArgumentException("COLLATE cannot be null");
this.collate = collate;
}
public void setCollate(String iCollate) {
if (iCollate == null)
iCollate = ODefaultCollate.NAME;
setCollate(OSQLEngine.getCollate(iCollate));
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_index_OAbstractIndexDefinition.java |
1,470 | new OGremlinHelper.OGremlinCallback() {
@Override
public boolean call(ScriptEngine iEngine, OrientBaseGraph iGraph) {
final ODocument document = (ODocument) iCurrentRecord;
if (document.getSchemaClass() != null && document.getSchemaClass().isSubClassOf("E")) {
// EDGE TYPE, CREATE THE BLUEPRINTS'S WRAPPER
OrientEdge graphElement = (OrientEdge) new OrientElementIterable<OrientEdge>(iGraph, Arrays
.asList(new ODocument[] { document })).iterator().next();
iEngine.getBindings(ScriptContext.ENGINE_SCOPE).put("current", graphElement);
iEngine.getBindings(ScriptContext.ENGINE_SCOPE).put("it", graphElement); // FRAMES LIKE SYNTAX
} else {
// VERTEX TYPE, CREATE THE BLUEPRINTS'S WRAPPER
OrientVertex graphElement = (OrientVertex) new OrientElementIterable<OrientVertex>(iGraph, Arrays
.asList(new ODocument[] { document })).iterator().next();
iEngine.getBindings(ScriptContext.ENGINE_SCOPE).put("current", graphElement);
iEngine.getBindings(ScriptContext.ENGINE_SCOPE).put("it", graphElement); // FRAMES LIKE SYNTAX
}
return true;
}
}, null); | 1no label
| graphdb_src_main_java_com_orientechnologies_orient_graph_sql_functions_OSQLFunctionGremlin.java |
665 | sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
return addToResult(transformer, valuesResultListener, entry.getValue());
}
}); | 0true
| core_src_main_java_com_orientechnologies_orient_core_index_engine_OSBTreeIndexEngine.java |
2,508 | public class XContentString extends SerializedString {
public XContentString(String v) {
super(v);
}
} | 0true
| src_main_java_org_elasticsearch_common_xcontent_XContentString.java |
611 | @Component("blSandBoxResolver")
public class BroadleafSandBoxResolverImpl implements BroadleafSandBoxResolver {
private final Log LOG = LogFactory.getLog(BroadleafSandBoxResolverImpl.class);
/**
* Property used to disable sandbox mode. Some implementations will want to
* turn off sandboxes in production.
*/
protected Boolean sandBoxPreviewEnabled = true;
// Request Parameters and Attributes for Sandbox Mode properties - mostly values to manage dates.
private static String SANDBOX_ID_VAR = "blSandboxId";
private static String SANDBOX_DATE_TIME_VAR = "blSandboxDateTime";
private static final SimpleDateFormat CONTENT_DATE_FORMATTER = new SimpleDateFormat("yyyyMMddHHmm");
private static final SimpleDateFormat CONTENT_DATE_DISPLAY_FORMATTER = new SimpleDateFormat("MM/dd/yyyy");
private static final SimpleDateFormat CONTENT_DATE_DISPLAY_HOURS_FORMATTER = new SimpleDateFormat("h");
private static final SimpleDateFormat CONTENT_DATE_DISPLAY_MINUTES_FORMATTER = new SimpleDateFormat("mm");
private static final SimpleDateFormat CONTENT_DATE_PARSE_FORMAT = new SimpleDateFormat("MM/dd/yyyy hh:mm aa");
private static String SANDBOX_DATE_TIME_RIBBON_OVERRIDE_PARAM = "blSandboxDateTimeRibbonOverride";
private static final String SANDBOX_DISPLAY_DATE_TIME_DATE_PARAM = "blSandboxDisplayDateTimeDate";
private static final String SANDBOX_DISPLAY_DATE_TIME_HOURS_PARAM = "blSandboxDisplayDateTimeHours";
private static final String SANDBOX_DISPLAY_DATE_TIME_MINUTES_PARAM = "blSandboxDisplayDateTimeMinutes";
private static final String SANDBOX_DISPLAY_DATE_TIME_AMPM_PARAM = "blSandboxDisplayDateTimeAMPM";
/**
* Request attribute to store the current sandbox
*/
public static String SANDBOX_VAR = "blSandbox";
@Resource(name = "blSandBoxDao")
private SandBoxDao sandBoxDao;
/**
* Determines the current sandbox based on other parameters on the request such as
* the blSandBoxId parameters.
*
* If the {@link #getSandBoxPreviewEnabled()}, then this method will not return a user
* SandBox.
*
*/
@Override
public SandBox resolveSandBox(HttpServletRequest request, Site site) {
return resolveSandBox(new ServletWebRequest(request), site);
}
@Override
public SandBox resolveSandBox(WebRequest request, Site site) {
SandBox currentSandbox = null;
if (!sandBoxPreviewEnabled) {
if (LOG.isTraceEnabled()) {
LOG.trace("Sandbox preview disabled. Setting sandbox to production");
}
request.setAttribute(SANDBOX_VAR, currentSandbox, WebRequest.SCOPE_REQUEST);
} else {
Long sandboxId = null;
// Clear the sandBox - second parameter is to support legacy implementations.
if ( (request.getParameter("blClearSandBox") == null) || (request.getParameter("blSandboxDateTimeRibbonProduction") == null)) {
sandboxId = lookupSandboxId(request);
} else {
if (LOG.isTraceEnabled()) {
LOG.trace("Removing sandbox from session.");
}
if (BLCRequestUtils.isOKtoUseSession(request)) {
request.removeAttribute(SANDBOX_DATE_TIME_VAR, WebRequest.SCOPE_GLOBAL_SESSION);
request.removeAttribute(SANDBOX_ID_VAR, WebRequest.SCOPE_GLOBAL_SESSION);
}
}
if (sandboxId != null) {
currentSandbox = sandBoxDao.retrieve(sandboxId);
request.setAttribute(SANDBOX_VAR, currentSandbox, WebRequest.SCOPE_REQUEST);
if (currentSandbox != null && !SandBoxType.PRODUCTION.equals(currentSandbox.getSandBoxType())) {
setContentTime(request);
}
}
if (currentSandbox == null && site != null) {
currentSandbox = site.getProductionSandbox();
}
}
if (LOG.isTraceEnabled()) {
if (currentSandbox != null) {
LOG.trace("Serving request using sandbox: " + currentSandbox);
} else {
LOG.trace("Serving request without a sandbox.");
}
}
Date currentSystemDateTime = SystemTime.asDate(true);
Calendar sandboxDateTimeCalendar = Calendar.getInstance();
sandboxDateTimeCalendar.setTime(currentSystemDateTime);
request.setAttribute(SANDBOX_DISPLAY_DATE_TIME_DATE_PARAM, CONTENT_DATE_DISPLAY_FORMATTER.format(currentSystemDateTime), WebRequest.SCOPE_REQUEST);
request.setAttribute(SANDBOX_DISPLAY_DATE_TIME_HOURS_PARAM, CONTENT_DATE_DISPLAY_HOURS_FORMATTER.format(currentSystemDateTime), WebRequest.SCOPE_REQUEST);
request.setAttribute(SANDBOX_DISPLAY_DATE_TIME_MINUTES_PARAM, CONTENT_DATE_DISPLAY_MINUTES_FORMATTER.format(currentSystemDateTime), WebRequest.SCOPE_REQUEST);
request.setAttribute(SANDBOX_DISPLAY_DATE_TIME_AMPM_PARAM, sandboxDateTimeCalendar.get(Calendar.AM_PM), WebRequest.SCOPE_REQUEST);
return currentSandbox;
}
/**
* If another filter has already set the language as a request attribute, that will be honored.
* Otherwise, the request parameter is checked followed by the session attribute.
*
* @param request
* @param site
* @return
*/
private Long lookupSandboxId(WebRequest request) {
String sandboxIdStr = request.getParameter(SANDBOX_ID_VAR);
Long sandboxId = null;
if (sandboxIdStr != null) {
try {
sandboxId = Long.valueOf(sandboxIdStr);
if (LOG.isTraceEnabled()) {
LOG.trace("SandboxId found on request " + sandboxId);
}
} catch (NumberFormatException nfe) {
LOG.warn("blcSandboxId parameter could not be converted into a Long", nfe);
}
}
if (BLCRequestUtils.isOKtoUseSession(request)) {
if (sandboxId == null) {
// check the session
sandboxId = (Long) request.getAttribute(SANDBOX_ID_VAR, WebRequest.SCOPE_GLOBAL_SESSION);
if (LOG.isTraceEnabled()) {
if (sandboxId != null) {
LOG.trace("SandboxId found in session " + sandboxId);
}
}
} else {
request.setAttribute(SANDBOX_ID_VAR, sandboxId, WebRequest.SCOPE_GLOBAL_SESSION);
}
}
return sandboxId;
}
/**
* Allows a user in SandBox mode to override the current time and date being used by the system.
*
* @param request
*/
private void setContentTime(WebRequest request) {
String sandboxDateTimeParam = request.getParameter(SANDBOX_DATE_TIME_VAR);
if (sandBoxPreviewEnabled) {
sandboxDateTimeParam = null;
}
Date overrideTime = null;
try {
if (request.getParameter(SANDBOX_DATE_TIME_RIBBON_OVERRIDE_PARAM) != null) {
overrideTime = readDateFromRequest(request);
} else if (sandboxDateTimeParam != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Setting date/time using " + sandboxDateTimeParam);
}
overrideTime = CONTENT_DATE_FORMATTER.parse(sandboxDateTimeParam);
}
} catch (ParseException e) {
LOG.debug(e);
}
if (BLCRequestUtils.isOKtoUseSession(request)) {
if (overrideTime == null) {
overrideTime = (Date) request.getAttribute(SANDBOX_DATE_TIME_VAR, WebRequest.SCOPE_GLOBAL_SESSION);
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Setting date-time for sandbox mode to " + overrideTime + " for sandboxDateTimeParam = " + sandboxDateTimeParam);
}
request.setAttribute(SANDBOX_DATE_TIME_VAR, overrideTime, WebRequest.SCOPE_GLOBAL_SESSION);
}
}
if (overrideTime != null) {
FixedTimeSource ft = new FixedTimeSource(overrideTime.getTime());
SystemTime.setLocalTimeSource(ft);
} else {
SystemTime.resetLocalTimeSource();
}
}
private Date readDateFromRequest(WebRequest request) throws ParseException {
String date = request.getParameter(SANDBOX_DISPLAY_DATE_TIME_DATE_PARAM);
String minutes = request.getParameter(SANDBOX_DISPLAY_DATE_TIME_MINUTES_PARAM);
String hours = request.getParameter(SANDBOX_DISPLAY_DATE_TIME_HOURS_PARAM);
String ampm = request.getParameter(SANDBOX_DISPLAY_DATE_TIME_AMPM_PARAM);
if (StringUtils.isEmpty(minutes)) {
minutes = Integer.toString(SystemTime.asCalendar().get(Calendar.MINUTE));
}
if (StringUtils.isEmpty(hours)) {
hours = Integer.toString(SystemTime.asCalendar().get(Calendar.HOUR_OF_DAY));
}
String dateString = date + " " + hours + ":" + minutes + " " + ampm;
if (LOG.isDebugEnabled()) {
LOG.debug("Setting date/time using " + dateString);
}
Date parsedDate = CONTENT_DATE_PARSE_FORMAT.parse(dateString);
return parsedDate;
}
/**
* Sets whether or not the site can be viewed in preview mode.
* @return
*/
public Boolean getSandBoxPreviewEnabled() {
return sandBoxPreviewEnabled;
}
public void setSandBoxPreviewEnabled(Boolean sandBoxPreviewEnabled) {
this.sandBoxPreviewEnabled = sandBoxPreviewEnabled;
}
} | 1no label
| common_src_main_java_org_broadleafcommerce_common_web_BroadleafSandBoxResolverImpl.java |
465 | public static class AliasActions {
private String[] indices = Strings.EMPTY_ARRAY;
private String[] aliases = Strings.EMPTY_ARRAY;
private AliasAction aliasAction;
public AliasActions(AliasAction.Type type, String[] indices, String[] aliases) {
aliasAction = new AliasAction(type);
indices(indices);
aliases(aliases);
}
public AliasActions(AliasAction.Type type, String index, String alias) {
aliasAction = new AliasAction(type);
indices(index);
aliases(alias);
}
AliasActions(AliasAction.Type type, String[] index, String alias) {
aliasAction = new AliasAction(type);
indices(index);
aliases(alias);
}
public AliasActions(AliasAction action) {
this.aliasAction = action;
indices(action.index());
aliases(action.alias());
}
public AliasActions(Type type, String index, String[] aliases) {
aliasAction = new AliasAction(type);
indices(index);
aliases(aliases);
}
public AliasActions() {
}
public AliasActions filter(Map<String, Object> filter) {
aliasAction.filter(filter);
return this;
}
public AliasActions filter(FilterBuilder filter) {
aliasAction.filter(filter);
return this;
}
public Type actionType() {
return aliasAction.actionType();
}
public void routing(String routing) {
aliasAction.routing(routing);
}
public void searchRouting(String searchRouting) {
aliasAction.searchRouting(searchRouting);
}
public void indexRouting(String indexRouting) {
aliasAction.indexRouting(indexRouting);
}
public AliasActions filter(String filter) {
aliasAction.filter(filter);
return this;
}
public void indices(String... indices) {
List<String> finalIndices = new ArrayList<String>();
for (String index : indices) {
if (index != null) {
finalIndices.add(index);
}
}
this.indices = finalIndices.toArray(new String[finalIndices.size()]);
}
public void aliases(String... aliases) {
this.aliases = aliases;
}
public String[] aliases() {
return aliases;
}
public String[] indices() {
return indices;
}
public AliasAction aliasAction() {
return aliasAction;
}
public String[] concreteAliases(MetaData metaData, String concreteIndex) {
if (aliasAction.actionType() == Type.REMOVE) {
//for DELETE we expand the aliases
String[] indexAsArray = {concreteIndex};
ImmutableOpenMap<String, ImmutableList<AliasMetaData>> aliasMetaData = metaData.findAliases(aliases, indexAsArray);
List<String> finalAliases = new ArrayList<String> ();
for (ObjectCursor<ImmutableList<AliasMetaData>> curAliases : aliasMetaData.values()) {
for (AliasMetaData aliasMeta: curAliases.value) {
finalAliases.add(aliasMeta.alias());
}
}
return finalAliases.toArray(new String[finalAliases.size()]);
} else {
//for add we just return the current aliases
return aliases;
}
}
public AliasActions readFrom(StreamInput in) throws IOException {
indices = in.readStringArray();
aliases = in.readStringArray();
aliasAction = readAliasAction(in);
return this;
}
public void writeTo(StreamOutput out) throws IOException {
out.writeStringArray(indices);
out.writeStringArray(aliases);
this.aliasAction.writeTo(out);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_alias_IndicesAliasesRequest.java |
1,849 | interface MethodInvoker {
Object invoke(Object target, Object... parameters)
throws IllegalAccessException, InvocationTargetException;
} | 0true
| src_main_java_org_elasticsearch_common_inject_InjectorImpl.java |
1,454 | public class OrderMultishipOptionForm implements Serializable {
private static final long serialVersionUID = -5989681894142759293L;
protected List<OrderMultishipOptionDTO> options;
public List<OrderMultishipOptionDTO> getOptions() {
return options;
}
public void setOptions(List<OrderMultishipOptionDTO> options) {
this.options = options;
}
} | 0true
| core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_checkout_model_OrderMultishipOptionForm.java |
241 | public interface OCacheLevelTwoLocator {
public OCache primaryCache(final String storageName);
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_cache_OCacheLevelTwoLocator.java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.