Unnamed: 0
int64 0
6.45k
| func
stringlengths 29
253k
| target
class label 2
classes | project
stringlengths 36
167
|
---|---|---|---|
5,815 | public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
private final Highlighters highlighters;
@Inject
public HighlightPhase(Settings settings, Highlighters highlighters) {
super(settings);
this.highlighters = highlighters;
}
@Override
public Map<String, ? extends SearchParseElement> parseElements() {
return ImmutableMap.of("highlight", new HighlighterParseElement());
}
@Override
public boolean hitsExecutionNeeded(SearchContext context) {
return false;
}
@Override
public void hitsExecute(SearchContext context, InternalSearchHit[] hits) throws ElasticsearchException {
}
@Override
public boolean hitExecutionNeeded(SearchContext context) {
return context.highlight() != null;
}
@Override
public void hitExecute(SearchContext context, HitContext hitContext) throws ElasticsearchException {
Map<String, HighlightField> highlightFields = newHashMap();
for (SearchContextHighlight.Field field : context.highlight().fields()) {
Set<String> fieldNamesToHighlight;
if (Regex.isSimpleMatchPattern(field.field())) {
DocumentMapper documentMapper = context.mapperService().documentMapper(hitContext.hit().type());
fieldNamesToHighlight = documentMapper.mappers().simpleMatchToFullName(field.field());
} else {
fieldNamesToHighlight = ImmutableSet.of(field.field());
}
if (field.forceSource()) {
SourceFieldMapper sourceFieldMapper = context.mapperService().documentMapper(hitContext.hit().type()).sourceMapper();
if (!sourceFieldMapper.enabled()) {
throw new ElasticsearchIllegalArgumentException("source is forced for field [" + field.field() + "] but type [" + hitContext.hit().type() + "] has disabled _source");
}
}
for (String fieldName : fieldNamesToHighlight) {
FieldMapper<?> fieldMapper = getMapperForField(fieldName, context, hitContext);
if (fieldMapper == null) {
continue;
}
if (field.highlighterType() == null) {
boolean useFastVectorHighlighter = fieldMapper.fieldType().storeTermVectors() && fieldMapper.fieldType().storeTermVectorOffsets() && fieldMapper.fieldType().storeTermVectorPositions();
if (useFastVectorHighlighter) {
field.highlighterType("fvh");
} else if (fieldMapper.fieldType().indexOptions() == FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) {
field.highlighterType("postings");
} else {
field.highlighterType("plain");
}
}
Highlighter highlighter = highlighters.get(field.highlighterType());
if (highlighter == null) {
throw new ElasticsearchIllegalArgumentException("unknown highlighter type [" + field.highlighterType() + "] for the field [" + fieldName + "]");
}
HighlighterContext.HighlightQuery highlightQuery;
if (field.highlightQuery() == null) {
highlightQuery = new HighlighterContext.HighlightQuery(context.parsedQuery().query(), context.query(), context.queryRewritten());
} else {
highlightQuery = new HighlighterContext.HighlightQuery(field.highlightQuery(), field.highlightQuery(), false);
}
HighlighterContext highlighterContext = new HighlighterContext(fieldName, field, fieldMapper, context, hitContext, highlightQuery);
HighlightField highlightField = highlighter.highlight(highlighterContext);
if (highlightField != null) {
highlightFields.put(highlightField.name(), highlightField);
}
}
}
hitContext.hit().highlightFields(highlightFields);
}
private FieldMapper<?> getMapperForField(String fieldName, SearchContext searchContext, HitContext hitContext) {
DocumentMapper documentMapper = searchContext.mapperService().documentMapper(hitContext.hit().type());
FieldMapper<?> mapper = documentMapper.mappers().smartNameFieldMapper(fieldName);
if (mapper == null) {
MapperService.SmartNameFieldMappers fullMapper = searchContext.mapperService().smartName(fieldName);
if (fullMapper == null || !fullMapper.hasDocMapper() || fullMapper.docMapper().type().equals(hitContext.hit().type())) {
return null;
}
mapper = fullMapper.mapper();
}
return mapper;
}
} | 1no label
| src_main_java_org_elasticsearch_search_highlight_HighlightPhase.java |
150 | public class OCharSerializer implements OBinarySerializer<Character> {
private static final OBinaryConverter BINARY_CONVERTER = OBinaryConverterFactory.getConverter();
/**
* size of char value in bytes
*/
public static final int CHAR_SIZE = 2;
public static OCharSerializer INSTANCE = new OCharSerializer();
public static final byte ID = 3;
public int getObjectSize(final Character object, Object... hints) {
return CHAR_SIZE;
}
public void serialize(final Character object, final byte[] stream, final int startPosition, Object... hints) {
stream[startPosition] = (byte) (object >>> 8);
stream[startPosition + 1] = (byte) (object.charValue());
}
public Character deserialize(final byte[] stream, final int startPosition) {
return (char) (((stream[startPosition] & 0xFF) << 8) + (stream[startPosition + 1] & 0xFF));
}
public int getObjectSize(final byte[] stream, final int startPosition) {
return CHAR_SIZE;
}
public byte getId() {
return ID;
}
public int getObjectSizeNative(byte[] stream, int startPosition) {
return CHAR_SIZE;
}
public void serializeNative(Character object, byte[] stream, int startPosition, Object... hints) {
BINARY_CONVERTER.putChar(stream, startPosition, object, ByteOrder.nativeOrder());
}
public Character deserializeNative(byte[] stream, int startPosition) {
return BINARY_CONVERTER.getChar(stream, startPosition, ByteOrder.nativeOrder());
}
@Override
public void serializeInDirectMemory(Character object, ODirectMemoryPointer pointer, long offset, Object... hints) {
pointer.setChar(offset, object);
}
@Override
public Character deserializeFromDirectMemory(ODirectMemoryPointer pointer, long offset) {
return pointer.getChar(offset);
}
@Override
public int getObjectSizeInDirectMemory(ODirectMemoryPointer pointer, long offset) {
return CHAR_SIZE;
}
public boolean isFixedLength() {
return true;
}
public int getFixedLength() {
return CHAR_SIZE;
}
@Override
public Character preprocess(Character value, Object... hints) {
return value;
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_serialization_types_OCharSerializer.java |
467 | public interface CachableStaticBuffer extends StaticBuffer {
public int getCacheMarker();
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_keycolumnvalue_cache_CachableStaticBuffer.java |
1,312 | class ApplySettings implements NodeSettingsService.Listener {
@Override
public void onRefreshSettings(Settings settings) {
TimeValue newUpdateFrequency = settings.getAsTime(INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, null);
// ClusterInfoService is only enabled if the DiskThresholdDecider is enabled
Boolean newEnabled = settings.getAsBoolean(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, null);
if (newUpdateFrequency != null) {
if (newUpdateFrequency.getMillis() < TimeValue.timeValueSeconds(10).getMillis()) {
logger.warn("[{}] set too low [{}] (< 10s)", INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, newUpdateFrequency);
throw new IllegalStateException("Unable to set " + INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL + " less than 10 seconds");
} else {
logger.info("updating [{}] from [{}] to [{}]", INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, updateFrequency, newUpdateFrequency);
InternalClusterInfoService.this.updateFrequency = newUpdateFrequency;
}
}
// We don't log about enabling it here, because the DiskThresholdDecider will already be logging about enable/disable
if (newEnabled != null) {
InternalClusterInfoService.this.enabled = newEnabled;
}
}
} | 0true
| src_main_java_org_elasticsearch_cluster_InternalClusterInfoService.java |
2,046 | public final class ContainsValueOperationFactory implements OperationFactory {
private String name;
private Data value;
public ContainsValueOperationFactory() {
}
public ContainsValueOperationFactory(String name, Data value) {
this.name = name;
this.value = value;
}
@Override
public Operation createOperation() {
return new ContainsValueOperation(name, value);
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(name);
out.writeObject(value);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
name = in.readUTF();
value = in.readObject();
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_map_operation_ContainsValueOperationFactory.java |
380 | List<Entry> blocks = BackendOperation.execute(new BackendOperation.Transactional<List<Entry>>() {
@Override
public List<Entry> call(StoreTransaction txh) throws BackendException {
return idStore.getSlice(new KeySliceQuery(partitionKey, LOWER_SLICE, UPPER_SLICE).setLimit(5), txh);
}
},this,times); | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_idmanagement_ConsistentKeyIDAuthority.java |
2,096 | private class PartitionCallable implements Callable<Collection<QueryableEntry>> {
int partition;
SerializationService ss;
Comparator<Map.Entry> wrapperComparator;
private PartitionCallable(SerializationService ss, int partition, Comparator<Map.Entry> wrapperComparator) {
this.ss = ss;
this.partition = partition;
this.wrapperComparator = wrapperComparator;
}
public Collection<QueryableEntry> call() throws Exception {
final PartitionContainer container = mapService.getPartitionContainer(partition);
final RecordStore recordStore = container.getRecordStore(name);
LinkedList<QueryableEntry> partitionResult = new LinkedList<QueryableEntry>();
for (Record record : recordStore.getReadonlyRecordMap().values()) {
final Data key = record.getKey();
Object value = record.getCachedValue();
if (value == Record.NOT_CACHED) {
value = record.getValue();
if (value != null && value instanceof Data) {
value = ss.toObject(value);
}
} else {
value = ss.toObject(record.getValue());
record.setCachedValue(value);
}
if (value == null) {
continue;
}
final QueryEntry queryEntry = new QueryEntry(ss, key, key, value);
if (predicate.apply(queryEntry)) {
if (pagingPredicate != null) {
Map.Entry anchor = pagingPredicate.getAnchor();
final Comparator comparator = pagingPredicate.getComparator();
if (anchor != null &&
SortingUtil.compare(comparator, pagingPredicate.getIterationType(), anchor, queryEntry) >= 0) {
continue;
}
}
partitionResult.add(queryEntry);
}
}
if (pagingPredicate != null) {
Collections.sort(partitionResult, wrapperComparator);
if (partitionResult.size() > pagingPredicate.getPageSize()) {
return partitionResult.subList(0, pagingPredicate.getPageSize());
}
}
return partitionResult;
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_map_operation_QueryOperation.java |
115 | public class ExpandTypeProposal extends CorrectionProposal {
private static final class FindTypeVisitor extends Visitor {
private final IRegion region;
Tree.Type result;
private FindTypeVisitor(IRegion region) {
this.region = region;
}
@Override
public void visit(Tree.Type that) {
super.visit(that);
Integer start = that.getStartIndex();
Integer stop = that.getStopIndex();
if (start!=null && stop!=null &&
region.getOffset()<=start &&
region.getOffset()+region.getLength()>=stop+1) {
result = that;
}
}
}
public ExpandTypeProposal(String name, Change change, Region selection) {
super(name, change, selection);
}
public static void addExpandTypeProposal(CeylonEditor editor,
Node node, IFile file, IDocument doc,
Collection<ICompletionProposal> proposals) {
if (node==null) return;
FindTypeVisitor ftv = new FindTypeVisitor(editor.getSelection());
node.visit(ftv);
Tree.Type result = ftv.result;
if (result!=null) {
ProducedType type = result.getTypeModel();
int start = result.getStartIndex();
int len = result.getStopIndex()-start+1;
String text;
try {
text = doc.get(start, len);
}
catch (Exception e) {
e.printStackTrace();
return;
}
String unabbreviated =
new ProducedTypeNamePrinter(false)
.getProducedTypeName(type, node.getUnit());
if (!unabbreviated.equals(text)) {
TextChange change = new TextFileChange("Expand Type", file);
change.setEdit(new ReplaceEdit(start, len, unabbreviated));
proposals.add(new ExpandTypeProposal("Expand type abbreviation",
change, new Region(start, unabbreviated.length())));
}
}
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_ExpandTypeProposal.java |
53 | final class ReturnValueContextInfo implements IContextInformation {
@Override
public String getInformationDisplayString() {
if (declaration instanceof TypedDeclaration) {
return getType().getProducedTypeName(getUnit());
}
else {
return null;
}
}
@Override
public Image getImage() {
return getImageForDeclaration(declaration);
}
@Override
public String getContextDisplayString() {
return "Return value of '" + declaration.getName() + "'";
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_RefinementCompletionProposal.java |
1,103 | public static enum Data {
SINGLE_VALUES_DENSE_ENUM {
public int numValues() {
return 1;
}
@Override
public long nextValue() {
return RANDOM.nextInt(16);
}
},
SINGLE_VALUED_DENSE_DATE {
public int numValues() {
return 1;
}
@Override
public long nextValue() {
// somewhere in-between 2010 and 2012
return 1000L * (40L * SECONDS_PER_YEAR + RANDOM.nextInt(2 * SECONDS_PER_YEAR));
}
},
MULTI_VALUED_DATE {
public int numValues() {
return RANDOM.nextInt(3);
}
@Override
public long nextValue() {
// somewhere in-between 2010 and 2012
return 1000L * (40L * SECONDS_PER_YEAR + RANDOM.nextInt(2 * SECONDS_PER_YEAR));
}
},
MULTI_VALUED_ENUM {
public int numValues() {
return RANDOM.nextInt(3);
}
@Override
public long nextValue() {
return 3 + RANDOM.nextInt(8);
}
},
SINGLE_VALUED_SPARSE_RANDOM {
public int numValues() {
return RANDOM.nextFloat() < 0.1f ? 1 : 0;
}
@Override
public long nextValue() {
return RANDOM.nextLong();
}
},
MULTI_VALUED_SPARSE_RANDOM {
public int numValues() {
return RANDOM.nextFloat() < 0.1f ? 1 + RANDOM.nextInt(5) : 0;
}
@Override
public long nextValue() {
return RANDOM.nextLong();
}
},
MULTI_VALUED_DENSE_RANDOM {
public int numValues() {
return 1 + RANDOM.nextInt(3);
}
@Override
public long nextValue() {
return RANDOM.nextLong();
}
};
public abstract int numValues();
public abstract long nextValue();
} | 0true
| src_test_java_org_elasticsearch_benchmark_fielddata_LongFieldDataBenchmark.java |
2,103 | public class ReplaceIfSameOperation extends BasePutOperation {
private Data testValue;
private boolean successful = false;
public ReplaceIfSameOperation(String name, Data dataKey, Data testValue, Data value) {
super(name, dataKey, value);
this.testValue = testValue;
}
public ReplaceIfSameOperation() {
}
public void run() {
successful = recordStore.replace(dataKey, testValue, dataValue);
}
public void afterRun() {
if (successful)
super.afterRun();
}
public Object getResponse() {
return successful;
}
public boolean shouldBackup() {
return successful;
}
@Override
public void onWaitExpire() {
getResponseHandler().sendResponse(false);
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
IOUtil.writeNullableData(out, testValue);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
testValue = IOUtil.readNullableData(in);
}
@Override
public String toString() {
return "ReplaceIfSameOperation{" + name + "}";
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_map_operation_ReplaceIfSameOperation.java |
678 | public static class Entry<K, V> {
public final K key;
public final V value;
public final long hashCode;
public Entry(K key, V value, long hashCode) {
this.key = key;
this.value = value;
this.hashCode = hashCode;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_index_hashindex_local_OHashIndexBucket.java |
1,248 | new OProfilerHookValue() {
public Object getValue() {
return metricOverlappedPageUsingChannel;
}
}); | 0true
| core_src_main_java_com_orientechnologies_orient_core_storage_fs_OMMapManagerOld.java |
73 | class AssignToTryProposal extends LocalProposal {
protected DocumentChange createChange(IDocument document, Node expanse,
Integer stopIndex) {
DocumentChange change =
new DocumentChange("Assign to Try", document);
change.setEdit(new MultiTextEdit());
change.addEdit(new InsertEdit(offset, "try (" + initialName + " = "));
String terminal = expanse.getEndToken().getText();
if (!terminal.equals(";")) {
change.addEdit(new InsertEdit(stopIndex+1, ") {}"));
exitPos = stopIndex+3;
}
else {
change.addEdit(new ReplaceEdit(stopIndex, 1, ") {}"));
exitPos = stopIndex+2;
}
return change;
}
public AssignToTryProposal(Tree.CompilationUnit cu,
Node node, int currentOffset) {
super(cu, node, currentOffset);
}
protected void addLinkedPositions(IDocument document, Unit unit)
throws BadLocationException {
// ProposalPosition typePosition =
// new ProposalPosition(document, offset, 5, 1,
// getSupertypeProposals(offset, unit,
// type, true, "value"));
ProposalPosition namePosition =
new ProposalPosition(document, offset+5, initialName.length(), 0,
getNameProposals(offset+5, 0, nameProposals));
// LinkedMode.addLinkedPosition(linkedModeModel, typePosition);
LinkedMode.addLinkedPosition(linkedModeModel, namePosition);
}
@Override
String[] computeNameProposals(Node expression) {
return super.computeNameProposals(expression);
}
@Override
public String getDisplayString() {
return "Assign expression to 'try'";
}
@Override
boolean isEnabled(ProducedType resultType) {
return resultType!=null &&
rootNode.getUnit().isUsableType(resultType);
}
static void addAssignToTryProposal(Tree.CompilationUnit cu,
Collection<ICompletionProposal> proposals,
Node node, int currentOffset) {
AssignToTryProposal prop =
new AssignToTryProposal(cu, node, currentOffset);
if (prop.isEnabled()) {
proposals.add(prop);
}
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_AssignToTryProposal.java |
1,022 | public static class FieldOrder {
public static final int NAME = 1000;
public static final int CUSTOMER = 2000;
public static final int TOTAL = 3000;
public static final int STATUS = 4000;
public static final int SUBTOTAL = 5000;
public static final int ORDERNUMBER = 6000;
public static final int TOTALTAX = 7000;
public static final int TOTALFGCHARGES = 8000;
public static final int SUBMITDATE = 9000;
public static final int EMAILADDRESS = 10000;
public static final int ADJUSTMENTS = 1000;
public static final int OFFERCODES = 2000;
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_OrderImpl.java |
5,865 | public class SourceLookup implements Map {
private AtomicReader reader;
private int docId = -1;
private BytesReference sourceAsBytes;
private Map<String, Object> source;
private XContentType sourceContentType;
public Map<String, Object> source() {
return source;
}
public XContentType sourceContentType() {
return sourceContentType;
}
private Map<String, Object> loadSourceIfNeeded() {
if (source != null) {
return source;
}
if (sourceAsBytes != null) {
Tuple<XContentType, Map<String, Object>> tuple = sourceAsMapAndType(sourceAsBytes);
sourceContentType = tuple.v1();
source = tuple.v2();
return source;
}
try {
JustSourceFieldsVisitor sourceFieldVisitor = new JustSourceFieldsVisitor();
reader.document(docId, sourceFieldVisitor);
BytesReference source = sourceFieldVisitor.source();
if (source == null) {
this.source = ImmutableMap.of();
this.sourceContentType = null;
} else {
Tuple<XContentType, Map<String, Object>> tuple = sourceAsMapAndType(source);
this.sourceContentType = tuple.v1();
this.source = tuple.v2();
}
} catch (Exception e) {
throw new ElasticsearchParseException("failed to parse / load source", e);
}
return this.source;
}
public static Tuple<XContentType, Map<String, Object>> sourceAsMapAndType(BytesReference source) throws ElasticsearchParseException {
return XContentHelper.convertToMap(source, false);
}
public static Map<String, Object> sourceAsMap(BytesReference source) throws ElasticsearchParseException {
return sourceAsMapAndType(source).v2();
}
public static Tuple<XContentType, Map<String, Object>> sourceAsMapAndType(byte[] bytes, int offset, int length) throws ElasticsearchParseException {
return XContentHelper.convertToMap(bytes, offset, length, false);
}
public static Map<String, Object> sourceAsMap(byte[] bytes, int offset, int length) throws ElasticsearchParseException {
return sourceAsMapAndType(bytes, offset, length).v2();
}
public void setNextReader(AtomicReaderContext context) {
if (this.reader == context.reader()) { // if we are called with the same reader, don't invalidate source
return;
}
this.reader = context.reader();
this.source = null;
this.sourceAsBytes = null;
this.docId = -1;
}
public void setNextDocId(int docId) {
if (this.docId == docId) { // if we are called with the same docId, don't invalidate source
return;
}
this.docId = docId;
this.sourceAsBytes = null;
this.source = null;
}
public void setNextSource(BytesReference source) {
this.sourceAsBytes = source;
}
public void setNextSource(Map<String, Object> source) {
this.source = source;
}
/**
* Internal source representation, might be compressed....
*/
public BytesReference internalSourceRef() {
return sourceAsBytes;
}
/**
* Returns the values associated with the path. Those are "low" level values, and it can
* handle path expression where an array/list is navigated within.
*/
public List<Object> extractRawValues(String path) {
return XContentMapValues.extractRawValues(path, loadSourceIfNeeded());
}
public Object filter(String[] includes, String[] excludes) {
return XContentMapValues.filter(loadSourceIfNeeded(), includes, excludes);
}
public Object extractValue(String path) {
return XContentMapValues.extractValue(path, loadSourceIfNeeded());
}
@Override
public Object get(Object key) {
return loadSourceIfNeeded().get(key);
}
@Override
public int size() {
return loadSourceIfNeeded().size();
}
@Override
public boolean isEmpty() {
return loadSourceIfNeeded().isEmpty();
}
@Override
public boolean containsKey(Object key) {
return loadSourceIfNeeded().containsKey(key);
}
@Override
public boolean containsValue(Object value) {
return loadSourceIfNeeded().containsValue(value);
}
@Override
public Set keySet() {
return loadSourceIfNeeded().keySet();
}
@Override
public Collection values() {
return loadSourceIfNeeded().values();
}
@Override
public Set entrySet() {
return loadSourceIfNeeded().entrySet();
}
@Override
public Object put(Object key, Object value) {
throw new UnsupportedOperationException();
}
@Override
public Object remove(Object key) {
throw new UnsupportedOperationException();
}
@Override
public void putAll(Map m) {
throw new UnsupportedOperationException();
}
@Override
public void clear() {
throw new UnsupportedOperationException();
}
} | 1no label
| src_main_java_org_elasticsearch_search_lookup_SourceLookup.java |
1,432 | abstract public class BaseTagLibTest extends TestCase {
protected HttpServletRequest request;
protected PageContext pageContext;
protected CatalogService catalogService;
public BaseTagLibTest() {
pageContext = EasyMock.createMock(PageContext.class);
request = EasyMock.createMock(HttpServletRequest.class);
catalogService = EasyMock.createMock(CatalogService.class);
setup();
}
public void replayBaseMockObjects() {
EasyMock.replay(request, pageContext, catalogService);
}
public void replayAdditionalMockObjects(){
EasyMock.replay(request, pageContext, catalogService);
}
public void replayAdditionalMockObjects(Object o){
EasyMock.replay(request, pageContext, catalogService, o);
}
public void replayAdditionalMockObjects(Object o1, Object o2){
EasyMock.replay(request, pageContext, catalogService, o1, o2);
}
public void setPageContext(PageContext pageContext) {
this.pageContext = pageContext;
}
public void verifyBaseMockObjects() {
EasyMock.verify(request, pageContext, catalogService);
}
public void verifyBaseMockObjects(Object o) {
EasyMock.verify(request, pageContext, catalogService, o);
}
public void verifyBaseMockObjects(Object o1, Object o2) {
EasyMock.verify(request, pageContext, catalogService, o1, o2);
}
public abstract void setup();
} | 0true
| core_broadleaf-framework-web_src_test_java_org_broadleafcommerce_core_web_catalog_taglib_BaseTagLibTest.java |
1,307 | public class DiskUsage {
final String nodeId;
final long totalBytes;
final long freeBytes;
public DiskUsage(String nodeId, long totalBytes, long freeBytes) {
if ((totalBytes < freeBytes) || (totalBytes < 0)) {
throw new IllegalStateException("Free bytes [" + freeBytes +
"] cannot be less than 0 or greater than total bytes [" + totalBytes + "]");
}
this.nodeId = nodeId;
this.totalBytes = totalBytes;
this.freeBytes = freeBytes;
}
public double getFreeDiskAsPercentage() {
double freePct = 100.0 * ((double)freeBytes / totalBytes);
return freePct;
}
public long getFreeBytes() {
return freeBytes;
}
public long getTotalBytes() {
return totalBytes;
}
public long getUsedBytes() {
return getTotalBytes() - getFreeBytes();
}
public String toString() {
return "[" + nodeId + "] free: " + getFreeBytes() + "[" + getFreeDiskAsPercentage() + "]";
}
} | 0true
| src_main_java_org_elasticsearch_cluster_DiskUsage.java |
1,984 | TestEventBasedMapStore testMapStore = new TestEventBasedMapStore() {
@Override
public Set loadAllKeys() {
Set keys = new HashSet(super.loadAllKeys());
// Include an extra key that will *not* be returned by loadAll().
keys.add(keyWithNullValue);
return keys;
}
}; | 0true
| hazelcast_src_test_java_com_hazelcast_map_mapstore_MapStoreTest.java |
183 | futures.add(es.submit(new Callable<Void>() {
@Override
public Void call() {
try {
getBlock();
} catch (BackendException e) {
throw new RuntimeException(e);
}
return null;
}
private void getBlock() throws BackendException {
for (int i = 0; i < blocksPerThread; i++) {
IDBlock block = targetAuthority.getIDBlock(targetPartition,targetNamespace,
GET_ID_BLOCK_TIMEOUT);
Assert.assertNotNull(block);
blocks.add(block);
}
}
})); | 0true
| titan-test_src_main_java_com_thinkaurelius_titan_diskstorage_IDAuthorityTest.java |
1,021 | class TransportHandler extends BaseTransportRequestHandler<Request> {
@Override
public Request newInstance() {
return newRequest();
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
@Override
public void messageReceived(Request request, final TransportChannel channel) throws Exception {
// no need to have a threaded listener since we just send back a response
request.listenerThreaded(false);
execute(request, new ActionListener<Response>() {
@Override
public void onResponse(Response result) {
try {
channel.sendResponse(result);
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(e);
} catch (Exception e1) {
logger.warn("Failed to send response for get", e1);
}
}
});
}
} | 0true
| src_main_java_org_elasticsearch_action_support_single_instance_TransportInstanceSingleOperationAction.java |
227 | @RunWith(HazelcastParallelClassRunner.class)
@Category(QuickTest.class)
public class ClientExecutorServiceExecuteTest {
static final int CLUSTER_SIZE = 3;
static HazelcastInstance instance1;
static HazelcastInstance instance2;
static HazelcastInstance instance3;
static HazelcastInstance client;
@BeforeClass
public static void init() {
instance1 = Hazelcast.newHazelcastInstance();
instance2 = Hazelcast.newHazelcastInstance();
instance3 = Hazelcast.newHazelcastInstance();
client = HazelcastClient.newHazelcastClient();
}
@AfterClass
public static void destroy() {
client.shutdown();
Hazelcast.shutdownAll();
}
@Test
public void testExecute() {
final IExecutorService service = client.getExecutorService(randomString());
final String mapName = randomString();
service.execute( new MapPutRunnable(mapName));
final IMap map = client.getMap(mapName);
assertTrueEventually(new AssertTask() {
public void run() throws Exception {
assertEquals(1, map.size());
}
});
}
@Test
public void testExecute_withMemberSelector() {
final IExecutorService service = client.getExecutorService(randomString());
final String mapName = randomString();
final MemberSelector selector = new SelectAllMembers();
service.execute( new MapPutRunnable(mapName), selector);
final IMap map = client.getMap(mapName);
assertTrueEventually(new AssertTask() {
public void run() throws Exception {
assertEquals(1, map.size());
}
});
}
@Test(expected = NullPointerException.class)
public void testExecute_whenTaskNull() {
IExecutorService service = client.getExecutorService(randomString());
service.execute( null );
}
@Test
public void testExecuteOnKeyOwner() {
final IExecutorService service = client.getExecutorService(randomString());
final String mapName = randomString();
final Member member = instance1.getCluster().getLocalMember();
final String targetUuid = member.getUuid();
final String key = generateKeyOwnedBy(instance1);
service.executeOnKeyOwner(new MapPutRunnable(mapName), key);
final IMap map = client.getMap(mapName);
assertTrueEventually(new AssertTask() {
public void run() throws Exception {
assertTrue(map.containsKey(targetUuid));
}
});
}
@Test(expected = NullPointerException.class)
public void testExecuteOnKeyOwner_whenKeyNull() {
IExecutorService service = client.getExecutorService(randomString());
service.executeOnKeyOwner(new MapPutRunnable("map"), null);
}
@Test
public void testExecuteOnMember(){
final IExecutorService service = client.getExecutorService(randomString());
final String mapName = randomString();
final Member member = instance1.getCluster().getLocalMember();
final String targetUuid = member.getUuid();
service.executeOnMember(new MapPutRunnable(mapName), member);
final IMap map = client.getMap(mapName);
assertTrueEventually(new AssertTask() {
public void run() throws Exception {
assertTrue(map.containsKey(targetUuid));
}
});
}
@Test(expected = NullPointerException.class)
public void testExecuteOnMember_WhenMemberNull() {
IExecutorService service = client.getExecutorService(randomString());
service.executeOnMember(new MapPutRunnable("map"), null);
}
@Test
public void testExecuteOnMembers() {
final IExecutorService service = client.getExecutorService(randomString());
final String mapName = randomString();
final Collection collection = new ArrayList();
final Member member1 = instance1.getCluster().getLocalMember();
final Member member3 = instance3.getCluster().getLocalMember();
collection.add(member1);
collection.add(member3);
service.executeOnMembers(new MapPutRunnable(mapName), collection);
final IMap map = client.getMap(mapName);
assertTrueEventually(new AssertTask() {
public void run() throws Exception {
assertTrue(map.containsKey(member1.getUuid()));
assertTrue(map.containsKey(member3.getUuid()));
}
});
}
@Test
public void testExecuteOnMembers_withEmptyCollection() {
final IExecutorService service = client.getExecutorService(randomString());
final String mapName = randomString();
final Collection collection = new ArrayList();
service.executeOnMembers(new MapPutRunnable(mapName), collection);
final IMap map = client.getMap(mapName);
assertTrueEventually(new AssertTask() {
public void run() throws Exception {
assertTrue(map.isEmpty());
}
});
}
@Test(expected = NullPointerException.class)
public void testExecuteOnMembers_WhenCollectionNull() {
IExecutorService service = client.getExecutorService(randomString());
Collection collection = null;
service.executeOnMembers(new MapPutRunnable("task"), collection);
}
@Test
public void testExecuteOnMembers_withSelector() {
final IExecutorService service = client.getExecutorService(randomString());
final String mapName = randomString();
final MemberSelector selector = new SelectAllMembers();
service.executeOnMembers(new MapPutRunnable(mapName), selector);
final IMap map = client.getMap(mapName);
assertTrueEventually(new AssertTask() {
public void run() throws Exception {
assertEquals(CLUSTER_SIZE, map.size());
}
});
}
@Test(expected = IllegalArgumentException.class)
public void testExecuteOnMembers_whenSelectorNull() {
IExecutorService service = client.getExecutorService(randomString());
MemberSelector selector = null;
service.executeOnMembers(new MapPutRunnable("task"), selector);
}
@Test
public void testExecuteOnAllMembers() {
final IExecutorService service = client.getExecutorService(randomString());
final String mapName = randomString();
service.executeOnAllMembers(new MapPutRunnable(mapName));
final IMap map = client.getMap(mapName);
assertTrueEventually(new AssertTask() {
public void run() throws Exception {
assertEquals(CLUSTER_SIZE, map.size());
}
});
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_executor_ClientExecutorServiceExecuteTest.java |
138 | public interface IndexBuilder {
/**
* Adds the given key to the composite key of this index
*
* @param key
* @return this IndexBuilder
*/
public IndexBuilder addKey(PropertyKey key);
/**
* Adds the given key and associated parameters to the composite key of this index
* @param key
* @param parameters
* @return this IndexBuilder
*/
public IndexBuilder addKey(PropertyKey key, Parameter... parameters);
/**
* Restricts this index to only those elements that have the provided schemaType. If this graph index indexes
* vertices, then the argument is expected to be a vertex label and only vertices with that label will be indexed.
* Likewise, for edges and properties only those with the matching relation type will be indexed.
*
* @param schemaType
* @return this IndexBuilder
*/
public IndexBuilder indexOnly(TitanSchemaType schemaType);
/**
* Makes this a unique index for the configured element type,
* i.e. an index key can be associated with at most one element in the graph.
*
* @return this IndexBuilder
*/
public IndexBuilder unique();
/**
* Builds a composite index according to the specification
*
* @return the created composite {@link TitanGraphIndex}
*/
public TitanGraphIndex buildCompositeIndex();
/**
* Builds a mixed index according to the specification against the backend index with the given name (i.e.
* the name under which that index is configured in the graph configuration)
*
* @param backingIndex the name of the mixed index
* @return the created mixed {@link TitanGraphIndex}
*/
public TitanGraphIndex buildMixedIndex(String backingIndex);
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_core_schema_TitanManagement.java |
2,940 | public class StandardAnalyzerProvider extends AbstractIndexAnalyzerProvider<StandardAnalyzer> {
private final StandardAnalyzer standardAnalyzer;
private final Version esVersion;
@Inject
public StandardAnalyzerProvider(Index index, @IndexSettings Settings indexSettings, Environment env, @Assisted String name, @Assisted Settings settings) {
super(index, indexSettings, name, settings);
this.esVersion = indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT);
final CharArraySet defaultStopwords;
if (esVersion.onOrAfter(Version.V_1_0_0_Beta1)) {
defaultStopwords = CharArraySet.EMPTY_SET;
} else {
defaultStopwords = StopAnalyzer.ENGLISH_STOP_WORDS_SET;
}
CharArraySet stopWords = Analysis.parseStopWords(env, settings, defaultStopwords, version);
int maxTokenLength = settings.getAsInt("max_token_length", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH);
standardAnalyzer = new StandardAnalyzer(version, stopWords);
standardAnalyzer.setMaxTokenLength(maxTokenLength);
}
@Override
public StandardAnalyzer get() {
return this.standardAnalyzer;
}
} | 0true
| src_main_java_org_elasticsearch_index_analysis_StandardAnalyzerProvider.java |
1,223 | longPage = build(type, maxCount(limit, BigArrays.LONG_PAGE_SIZE, longsWeight, totalWeight), searchThreadPoolSize, availableProcessors, new Recycler.C<long[]>() {
@Override
public long[] newInstance(int sizing) {
return new long[BigArrays.LONG_PAGE_SIZE];
}
@Override
public void clear(long[] value) {}
}); | 0true
| src_main_java_org_elasticsearch_cache_recycler_PageCacheRecycler.java |
90 | public interface Duration extends Comparable<Duration> {
/**
* Returns the length of this duration in the given {@link TimeUnit}.
*
* @param unit
* @return
*/
public long getLength(TimeUnit unit);
/**
* Whether this duration is of zero length.
* @return
*/
public boolean isZeroLength();
/**
* Returns the native unit used by this duration. The actual time length is specified in this unit of time.
* </p>
* @return
*/
public TimeUnit getNativeUnit();
/**
* Returns a new duration that equals the length of this duration minus the length of the given duration
* in the unit of this duration.
*
* @param subtrahend
* @return
*/
public Duration sub(Duration subtrahend);
/**
* Returns a new duration that equals the combined length of this and the given duration in the
* unit of this duration.
*
* @param addend
* @return
*/
public Duration add(Duration addend);
/**
* Multiplies the length of this duration by the given multiplier. The multiplier must be a non-negative number.
*
* @param multiplier
* @return
*/
public Duration multiply(double multiplier);
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_Duration.java |
280 | public class OCommandExecutorScript extends OCommandExecutorAbstract {
protected OCommandScript request;
public OCommandExecutorScript() {
}
@SuppressWarnings("unchecked")
public OCommandExecutorScript parse(final OCommandRequest iRequest) {
request = (OCommandScript) iRequest;
return this;
}
public Object execute(final Map<Object, Object> iArgs) {
return executeInContext(context, iArgs);
}
public Object executeInContext(final OCommandContext iContext, final Map<Object, Object> iArgs) {
final String language = request.getLanguage();
parserText = request.getText();
ODatabaseRecord db = ODatabaseRecordThreadLocal.INSTANCE.getIfDefined();
if (db != null && !(db instanceof ODatabaseRecordTx))
db = db.getUnderlying();
final OScriptManager scriptManager = Orient.instance().getScriptManager();
CompiledScript compiledScript = request.getCompiledScript();
if (compiledScript == null) {
ScriptEngine scriptEngine = scriptManager.getEngine(language);
// COMPILE FUNCTION LIBRARY
String lib = scriptManager.getLibrary(db, language);
if (lib == null)
lib = "";
parserText = lib + parserText;
Compilable c = (Compilable) scriptEngine;
try {
compiledScript = c.compile(parserText);
} catch (ScriptException e) {
scriptManager.getErrorMessage(e, parserText);
}
request.setCompiledScript(compiledScript);
}
final Bindings binding = scriptManager.bind(compiledScript.getEngine().createBindings(), (ODatabaseRecordTx) db, iContext,
iArgs);
try {
return compiledScript.eval(binding);
} catch (ScriptException e) {
throw new OCommandScriptException("Error on execution of the script", request.getText(), e.getColumnNumber(), e);
} finally {
scriptManager.unbind(binding);
}
}
public boolean isIdempotent() {
return false;
}
@Override
protected void throwSyntaxErrorException(String iText) {
throw new OCommandScriptException("Error on execution of the script: " + iText, request.getText(), 0);
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_command_script_OCommandExecutorScript.java |
2,334 | public class BarTestClass {
} | 0true
| src_test_java_org_elasticsearch_common_settings_bar_BarTestClass.java |
1,515 | public class PreferPrimaryAllocationTests extends ElasticsearchAllocationTestCase {
private final ESLogger logger = Loggers.getLogger(PreferPrimaryAllocationTests.class);
@Test
public void testPreferPrimaryAllocationOverReplicas() {
logger.info("create an allocation with 1 initial recoveries");
AllocationService strategy = createAllocationService(settingsBuilder()
.put("cluster.routing.allocation.node_concurrent_recoveries", 1)
.put("cluster.routing.allocation.node_initial_primaries_recoveries", 1)
.build());
logger.info("create several indices with no replicas, and wait till all are allocated");
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test1").numberOfShards(10).numberOfReplicas(0))
.put(IndexMetaData.builder("test2").numberOfShards(10).numberOfReplicas(0))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test1"))
.addAsNew(metaData.index("test2"))
.build();
ClusterState clusterState = ClusterState.builder().metaData(metaData).routingTable(routingTable).build();
logger.info("adding two nodes and performing rerouting till all are allocated");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))).build();
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
while (!clusterState.routingNodes().shardsWithState(INITIALIZING).isEmpty()) {
routingTable = strategy.applyStartedShards(clusterState, clusterState.routingNodes().shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
}
logger.info("increasing the number of replicas to 1, and perform a reroute (to get the replicas allocation going)");
routingTable = RoutingTable.builder(routingTable).updateNumberOfReplicas(1).build();
metaData = MetaData.builder(clusterState.metaData()).updateNumberOfReplicas(1).build();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).metaData(metaData).build();
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
logger.info("2 replicas should be initializing now for the existing indices (we throttle to 1)");
assertThat(clusterState.routingNodes().shardsWithState(INITIALIZING).size(), equalTo(2));
logger.info("create a new index");
metaData = MetaData.builder(clusterState.metaData())
.put(IndexMetaData.builder("new_index").numberOfShards(4).numberOfReplicas(0))
.build();
routingTable = RoutingTable.builder(clusterState.routingTable())
.addAsNew(metaData.index("new_index"))
.build();
clusterState = ClusterState.builder(clusterState).metaData(metaData).routingTable(routingTable).build();
logger.info("reroute, verify that primaries for the new index primary shards are allocated");
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(clusterState.routingTable().index("new_index").shardsWithState(INITIALIZING).size(), equalTo(2));
}
} | 0true
| src_test_java_org_elasticsearch_cluster_routing_allocation_PreferPrimaryAllocationTests.java |
2,443 | static class ExecutorScalingQueue<E> extends LinkedTransferQueue<E> {
ThreadPoolExecutor executor;
public ExecutorScalingQueue() {
}
@Override
public boolean offer(E e) {
if (!tryTransfer(e)) {
int left = executor.getMaximumPoolSize() - executor.getCorePoolSize();
if (left > 0) {
return false;
} else {
return super.offer(e);
}
} else {
return true;
}
}
} | 0true
| src_main_java_org_elasticsearch_common_util_concurrent_EsExecutors.java |
4,235 | static class CustomRAMDirectory extends RAMDirectory {
public synchronized void renameTo(String from, String to) throws IOException {
RAMFile fromFile = fileMap.get(from);
if (fromFile == null)
throw new FileNotFoundException(from);
RAMFile toFile = fileMap.get(to);
if (toFile != null) {
sizeInBytes.addAndGet(-fileLength(from));
fileMap.remove(from);
}
fileMap.put(to, fromFile);
}
@Override
public String toString() {
return "ram";
}
} | 1no label
| src_main_java_org_elasticsearch_index_store_ram_RamDirectoryService.java |
3,332 | public static class SingleFixedSet extends FloatArrayAtomicFieldData {
private final BigFloatArrayList values;
private final FixedBitSet set;
private final long numOrd;
public SingleFixedSet(BigFloatArrayList values, int numDocs, FixedBitSet set, long numOrd) {
super(numDocs);
this.values = values;
this.set = set;
this.numOrd = numOrd;
}
@Override
public boolean isMultiValued() {
return false;
}
@Override
public boolean isValuesOrdered() {
return false;
}
@Override
public long getNumberUniqueValues() {
return numOrd;
}
@Override
public long getMemorySizeInBytes() {
if (size == -1) {
size = RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + values.sizeInBytes() + RamUsageEstimator.sizeOf(set.getBits());
}
return size;
}
@Override
public LongValues getLongValues() {
return new LongValues(values, set);
}
@Override
public DoubleValues getDoubleValues() {
return new DoubleValues(values, set);
}
static class LongValues extends org.elasticsearch.index.fielddata.LongValues {
private final BigFloatArrayList values;
private final FixedBitSet set;
LongValues(BigFloatArrayList values, FixedBitSet set) {
super(false);
this.values = values;
this.set = set;
}
@Override
public int setDocument(int docId) {
this.docId = docId;
return set.get(docId) ? 1 : 0;
}
@Override
public long nextValue() {
return (long) values.get(docId);
}
}
static class DoubleValues extends org.elasticsearch.index.fielddata.DoubleValues {
private final BigFloatArrayList values;
private final FixedBitSet set;
DoubleValues(BigFloatArrayList values, FixedBitSet set) {
super(false);
this.values = values;
this.set = set;
}
@Override
public int setDocument(int docId) {
this.docId = docId;
return set.get(docId) ? 1 : 0;
}
@Override
public double nextValue() {
return values.get(docId);
}
}
} | 0true
| src_main_java_org_elasticsearch_index_fielddata_plain_FloatArrayAtomicFieldData.java |
294 | public interface ShardOperationFailedException extends Streamable, Serializable {
/**
* The index the operation failed on. Might return <tt>null</tt> if it can't be derived.
*/
String index();
/**
* The index the operation failed on. Might return <tt>-1</tt> if it can't be derived.
*/
int shardId();
/**
* The reason of the failure.
*/
String reason();
/**
* The status of the failure.
*/
RestStatus status();
} | 0true
| src_main_java_org_elasticsearch_action_ShardOperationFailedException.java |
174 | public class BroadleafProcessURLFilterTest extends TestCase {
public void testShouldProcessURL() throws Exception {
BroadleafProcessURLFilter cf = new BroadleafProcessURLFilter();
// Should fail
assertFalse("Image resource should not be processed by content filter.", cf.shouldProcessURL(null, "/path/subpath/test.jpg"));
assertFalse("URLs containing org.broadleafcommerce.admin should not be processed.", cf.shouldProcessURL(null, "/path/org.broadleafcommerce.admin/admintest"));
assertTrue("/about_us should be processed by the content filter", cf.shouldProcessURL(null, "/about_us"));
assertTrue("*.htm resources should be processed by the content filter", cf.shouldProcessURL(null, "/test.htm"));
}
} | 0true
| admin_broadleaf-contentmanagement-module_src_test_java_org_broadleafcommerce_cms_web_BroadleafProcessURLFilterTest.java |
641 | @Test
public class OPropertyListIndexDefinitionTest {
private OPropertyListIndexDefinition propertyIndex;
@BeforeMethod
public void beforeMethod() {
propertyIndex = new OPropertyListIndexDefinition("testClass", "fOne", OType.INTEGER);
}
public void testCreateValueSingleParameter() {
final Object result = propertyIndex.createValue(Collections.singletonList(Arrays.asList("12", "23")));
Assert.assertTrue(result instanceof Collection);
final Collection<?> collectionResult = (Collection<?>) result;
Assert.assertEquals(collectionResult.size(), 2);
Assert.assertTrue(collectionResult.contains(12));
Assert.assertTrue(collectionResult.contains(23));
}
public void testCreateValueTwoParameters() {
final Object result = propertyIndex.createValue(Arrays.asList(Arrays.asList("12", "23"), "25"));
Assert.assertTrue(result instanceof Collection);
final Collection<?> collectionResult = (Collection<?>) result;
Assert.assertEquals(collectionResult.size(), 2);
Assert.assertTrue(collectionResult.contains(12));
Assert.assertTrue(collectionResult.contains(23));
}
public void testCreateValueWrongParameter() {
Assert.assertNull(propertyIndex.createValue(Collections.singletonList("tt")));
}
public void testCreateValueSingleParameterArrayParams() {
final Object result = propertyIndex.createValue((Object) Arrays.asList("12", "23"));
Assert.assertTrue(result instanceof Collection);
final Collection<?> collectionResult = (Collection<?>) result;
Assert.assertEquals(collectionResult.size(), 2);
Assert.assertTrue(collectionResult.contains(12));
Assert.assertTrue(collectionResult.contains(23));
}
public void testCreateValueTwoParametersArrayParams() {
final Object result = propertyIndex.createValue(Arrays.asList("12", "23"), "25");
Assert.assertTrue(result instanceof Collection);
final Collection<?> collectionResult = (Collection<?>) result;
Assert.assertEquals(collectionResult.size(), 2);
Assert.assertTrue(collectionResult.contains(12));
Assert.assertTrue(collectionResult.contains(23));
}
public void testCreateValueWrongParameterArrayParams() {
Assert.assertNull(propertyIndex.createValue("tt"));
}
public void testGetDocumentValueToIndex() {
final ODocument document = new ODocument();
document.field("fOne", Arrays.asList("12", "23"));
document.field("fTwo", 10);
final Object result = propertyIndex.getDocumentValueToIndex(document);
Assert.assertTrue(result instanceof Collection);
final Collection<?> collectionResult = (Collection<?>) result;
Assert.assertEquals(collectionResult.size(), 2);
Assert.assertTrue(collectionResult.contains(12));
Assert.assertTrue(collectionResult.contains(23));
}
public void testCreateSingleValue() {
final Object result = propertyIndex.createSingleValue("12");
Assert.assertEquals(result, 12);
}
@Test(expectedExceptions = NumberFormatException.class)
public void testCreateSingleValueWrongParameter() {
propertyIndex.createSingleValue("tt");
}
public void testProcessChangeEventAddOnce() {
final Map<Object, Integer> keysToAdd = new HashMap<Object, Integer>();
final Map<Object, Integer> keysToRemove = new HashMap<Object, Integer>();
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEvent = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.ADD, 0, 42);
propertyIndex.processChangeEvent(multiValueChangeEvent, keysToAdd, keysToRemove);
final Map<Object, Integer> addedKeys = new HashMap<Object, Integer>();
addedKeys.put(42, 1);
final Map<Object, Integer> removedKeys = new HashMap<Object, Integer>();
Assert.assertEquals(keysToAdd, addedKeys);
Assert.assertEquals(keysToRemove, removedKeys);
}
public void testProcessChangeEventAddOnceWithConversion() {
final Map<Object, Integer> keysToAdd = new HashMap<Object, Integer>();
final Map<Object, Integer> keysToRemove = new HashMap<Object, Integer>();
final OMultiValueChangeEvent<Integer, String> multiValueChangeEvent = new OMultiValueChangeEvent<Integer, String>(
OMultiValueChangeEvent.OChangeType.ADD, 0, "42");
propertyIndex.processChangeEvent(multiValueChangeEvent, keysToAdd, keysToRemove);
final Map<Object, Integer> addedKeys = new HashMap<Object, Integer>();
addedKeys.put(42, 1);
final Map<Object, Integer> removedKeys = new HashMap<Object, Integer>();
Assert.assertEquals(keysToAdd, addedKeys);
Assert.assertEquals(keysToRemove, removedKeys);
}
public void testProcessChangeEventAddTwoTimes() {
final Map<Object, Integer> keysToAdd = new HashMap<Object, Integer>();
final Map<Object, Integer> keysToRemove = new HashMap<Object, Integer>();
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventOne = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.ADD, 0, 42);
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventTwo = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.ADD, 1, 42);
propertyIndex.processChangeEvent(multiValueChangeEventOne, keysToAdd, keysToRemove);
propertyIndex.processChangeEvent(multiValueChangeEventTwo, keysToAdd, keysToRemove);
final Map<Object, Integer> addedKeys = new HashMap<Object, Integer>();
addedKeys.put(42, 2);
final Map<Object, Integer> removedKeys = new HashMap<Object, Integer>();
Assert.assertEquals(keysToAdd, addedKeys);
Assert.assertEquals(keysToRemove, removedKeys);
}
public void testProcessChangeEventAddTwoValues() {
final Map<Object, Integer> keysToAdd = new HashMap<Object, Integer>();
final Map<Object, Integer> keysToRemove = new HashMap<Object, Integer>();
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventOne = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.ADD, 0, 42);
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventTwo = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.ADD, 1, 43);
propertyIndex.processChangeEvent(multiValueChangeEventOne, keysToAdd, keysToRemove);
propertyIndex.processChangeEvent(multiValueChangeEventTwo, keysToAdd, keysToRemove);
final Map<Object, Integer> addedKeys = new HashMap<Object, Integer>();
addedKeys.put(42, 1);
addedKeys.put(43, 1);
final Map<Object, Integer> removedKeys = new HashMap<Object, Integer>();
Assert.assertEquals(keysToAdd, addedKeys);
Assert.assertEquals(keysToRemove, removedKeys);
}
public void testProcessChangeEventRemoveOnce() {
final Map<Object, Integer> keysToAdd = new HashMap<Object, Integer>();
final Map<Object, Integer> keysToRemove = new HashMap<Object, Integer>();
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEvent = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.REMOVE, 0, null, 42);
propertyIndex.processChangeEvent(multiValueChangeEvent, keysToAdd, keysToRemove);
final Map<Object, Integer> addedKeys = new HashMap<Object, Integer>();
final Map<Object, Integer> removedKeys = new HashMap<Object, Integer>();
removedKeys.put(42, 1);
Assert.assertEquals(keysToAdd, addedKeys);
Assert.assertEquals(keysToRemove, removedKeys);
}
public void testProcessChangeEventRemoveOnceWithConversion() {
final Map<Object, Integer> keysToAdd = new HashMap<Object, Integer>();
final Map<Object, Integer> keysToRemove = new HashMap<Object, Integer>();
final OMultiValueChangeEvent<Integer, String> multiValueChangeEvent = new OMultiValueChangeEvent<Integer, String>(
OMultiValueChangeEvent.OChangeType.REMOVE, 0, null, "42");
propertyIndex.processChangeEvent(multiValueChangeEvent, keysToAdd, keysToRemove);
final Map<Object, Integer> addedKeys = new HashMap<Object, Integer>();
final Map<Object, Integer> removedKeys = new HashMap<Object, Integer>();
removedKeys.put(42, 1);
Assert.assertEquals(keysToAdd, addedKeys);
Assert.assertEquals(keysToRemove, removedKeys);
}
public void testProcessChangeEventRemoveTwoTimes() {
final Map<Object, Integer> keysToAdd = new HashMap<Object, Integer>();
final Map<Object, Integer> keysToRemove = new HashMap<Object, Integer>();
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventOne = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.REMOVE, 0, null, 42);
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventTwo = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.REMOVE, 1, null, 42);
propertyIndex.processChangeEvent(multiValueChangeEventOne, keysToAdd, keysToRemove);
propertyIndex.processChangeEvent(multiValueChangeEventTwo, keysToAdd, keysToRemove);
final Map<Object, Integer> addedKeys = new HashMap<Object, Integer>();
final Map<Object, Integer> removedKeys = new HashMap<Object, Integer>();
removedKeys.put(42, 2);
Assert.assertEquals(keysToAdd, addedKeys);
Assert.assertEquals(keysToRemove, removedKeys);
}
public void testProcessChangeEventAddTwoTimesInvValue() {
final Map<Object, Integer> keysToAdd = new HashMap<Object, Integer>();
final Map<Object, Integer> keysToRemove = new HashMap<Object, Integer>();
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventOne = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.ADD, 0, 42);
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventTwo = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.ADD, 1, 555);
propertyIndex.processChangeEvent(multiValueChangeEventOne, keysToAdd, keysToRemove);
propertyIndex.processChangeEvent(multiValueChangeEventTwo, keysToAdd, keysToRemove);
final Map<Object, Integer> addedKeys = new HashMap<Object, Integer>();
addedKeys.put(42, 1);
addedKeys.put(555, 1);
final Map<Object, Integer> removedKeys = new HashMap<Object, Integer>();
Assert.assertEquals(keysToAdd, addedKeys);
Assert.assertEquals(keysToRemove, removedKeys);
}
public void testProcessChangeEventAddRemove() {
final Map<Object, Integer> keysToAdd = new HashMap<Object, Integer>();
final Map<Object, Integer> keysToRemove = new HashMap<Object, Integer>();
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventOne = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.ADD, 0, 42);
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventTwo = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.REMOVE, 0, null, 42);
propertyIndex.processChangeEvent(multiValueChangeEventOne, keysToAdd, keysToRemove);
propertyIndex.processChangeEvent(multiValueChangeEventTwo, keysToAdd, keysToRemove);
final Map<Object, Integer> addedKeys = new HashMap<Object, Integer>();
final Map<Object, Integer> removedKeys = new HashMap<Object, Integer>();
Assert.assertEquals(keysToAdd, addedKeys);
Assert.assertEquals(keysToRemove, removedKeys);
}
public void testProcessChangeEventAddRemoveInvValue() {
final Map<Object, Integer> keysToAdd = new HashMap<Object, Integer>();
final Map<Object, Integer> keysToRemove = new HashMap<Object, Integer>();
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventOne = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.ADD, 0, 42);
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventTwo = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.REMOVE, 0, null, 55);
propertyIndex.processChangeEvent(multiValueChangeEventOne, keysToAdd, keysToRemove);
propertyIndex.processChangeEvent(multiValueChangeEventTwo, keysToAdd, keysToRemove);
final Map<Object, Integer> addedKeys = new HashMap<Object, Integer>();
addedKeys.put(42, 1);
final Map<Object, Integer> removedKeys = new HashMap<Object, Integer>();
removedKeys.put(55, 1);
Assert.assertEquals(keysToAdd, addedKeys);
Assert.assertEquals(keysToRemove, removedKeys);
}
public void testProcessChangeEventAddTwiceRemoveOnce() {
final Map<Object, Integer> keysToAdd = new HashMap<Object, Integer>();
final Map<Object, Integer> keysToRemove = new HashMap<Object, Integer>();
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventOne = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.ADD, 0, 42);
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventTwo = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.ADD, 1, 42);
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventThree = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.REMOVE, 0, null, 42);
propertyIndex.processChangeEvent(multiValueChangeEventOne, keysToAdd, keysToRemove);
propertyIndex.processChangeEvent(multiValueChangeEventTwo, keysToAdd, keysToRemove);
propertyIndex.processChangeEvent(multiValueChangeEventThree, keysToAdd, keysToRemove);
final Map<Object, Integer> addedKeys = new HashMap<Object, Integer>();
addedKeys.put(42, 1);
final Map<Object, Integer> removedKeys = new HashMap<Object, Integer>();
Assert.assertEquals(keysToAdd, addedKeys);
Assert.assertEquals(keysToRemove, removedKeys);
}
public void testProcessChangeEventAddOnceRemoveTwice() {
final Map<Object, Integer> keysToAdd = new HashMap<Object, Integer>();
final Map<Object, Integer> keysToRemove = new HashMap<Object, Integer>();
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventOne = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.REMOVE, 0, null, 42);
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventTwo = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.ADD, 0, 42);
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventThree = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.REMOVE, 0, null, 42);
propertyIndex.processChangeEvent(multiValueChangeEventOne, keysToAdd, keysToRemove);
propertyIndex.processChangeEvent(multiValueChangeEventTwo, keysToAdd, keysToRemove);
propertyIndex.processChangeEvent(multiValueChangeEventThree, keysToAdd, keysToRemove);
final Map<Object, Integer> addedKeys = new HashMap<Object, Integer>();
final Map<Object, Integer> removedKeys = new HashMap<Object, Integer>();
removedKeys.put(42, 1);
Assert.assertEquals(keysToAdd, addedKeys);
Assert.assertEquals(keysToRemove, removedKeys);
}
public void testProcessChangeEventRemoveTwoTimesAddOnce() {
final Map<Object, Integer> keysToAdd = new HashMap<Object, Integer>();
final Map<Object, Integer> keysToRemove = new HashMap<Object, Integer>();
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventOne = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.REMOVE, 0, null, 42);
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventTwo = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.REMOVE, 1, null, 42);
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEventThree = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.ADD, 1, 42);
propertyIndex.processChangeEvent(multiValueChangeEventOne, keysToAdd, keysToRemove);
propertyIndex.processChangeEvent(multiValueChangeEventTwo, keysToAdd, keysToRemove);
propertyIndex.processChangeEvent(multiValueChangeEventThree, keysToAdd, keysToRemove);
final Map<Object, Integer> addedKeys = new HashMap<Object, Integer>();
final Map<Object, Integer> removedKeys = new HashMap<Object, Integer>();
removedKeys.put(42, 1);
Assert.assertEquals(keysToAdd, addedKeys);
Assert.assertEquals(keysToRemove, removedKeys);
}
public void testProcessChangeEventUpdate() {
final Map<Object, Integer> keysToAdd = new HashMap<Object, Integer>();
final Map<Object, Integer> keysToRemove = new HashMap<Object, Integer>();
final OMultiValueChangeEvent<Integer, Integer> multiValueChangeEvent = new OMultiValueChangeEvent<Integer, Integer>(
OMultiValueChangeEvent.OChangeType.UPDATE, 0, 41, 42);
propertyIndex.processChangeEvent(multiValueChangeEvent, keysToAdd, keysToRemove);
final Map<Object, Integer> addedKeys = new HashMap<Object, Integer>();
addedKeys.put(41, 1);
final Map<Object, Integer> removedKeys = new HashMap<Object, Integer>();
removedKeys.put(42, 1);
Assert.assertEquals(keysToAdd, addedKeys);
Assert.assertEquals(keysToRemove, removedKeys);
}
public void testProcessChangeEventUpdateConvertValues() {
final Map<Object, Integer> keysToAdd = new HashMap<Object, Integer>();
final Map<Object, Integer> keysToRemove = new HashMap<Object, Integer>();
final OMultiValueChangeEvent<Integer, String> multiValueChangeEvent = new OMultiValueChangeEvent<Integer, String>(
OMultiValueChangeEvent.OChangeType.UPDATE, 0, "41", "42");
propertyIndex.processChangeEvent(multiValueChangeEvent, keysToAdd, keysToRemove);
final Map<Object, Integer> addedKeys = new HashMap<Object, Integer>();
addedKeys.put(41, 1);
final Map<Object, Integer> removedKeys = new HashMap<Object, Integer>();
removedKeys.put(42, 1);
Assert.assertEquals(keysToAdd, addedKeys);
Assert.assertEquals(keysToRemove, removedKeys);
}
} | 0true
| core_src_test_java_com_orientechnologies_orient_core_index_OPropertyListIndexDefinitionTest.java |
3,841 | public class GeoDistanceFilterParser implements FilterParser {
public static final String NAME = "geo_distance";
@Inject
public GeoDistanceFilterParser() {
}
@Override
public String[] names() {
return new String[]{NAME, "geoDistance"};
}
@Override
public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
XContentParser.Token token;
boolean cache = false;
CacheKeyFilter.Key cacheKey = null;
String filterName = null;
String currentFieldName = null;
GeoPoint point = new GeoPoint();
String fieldName = null;
double distance = 0;
Object vDistance = null;
DistanceUnit unit = DistanceUnit.DEFAULT;
GeoDistance geoDistance = GeoDistance.DEFAULT;
String optimizeBbox = "memory";
boolean normalizeLon = true;
boolean normalizeLat = true;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
fieldName = currentFieldName;
GeoPoint.parse(parser, point);
} else if (token == XContentParser.Token.START_OBJECT) {
// the json in the format of -> field : { lat : 30, lon : 12 }
String currentName = parser.currentName();
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentName = parser.currentName();
} else if (token.isValue()) {
if (currentName.equals(GeoPointFieldMapper.Names.LAT)) {
point.resetLat(parser.doubleValue());
} else if (currentName.equals(GeoPointFieldMapper.Names.LON)) {
point.resetLon(parser.doubleValue());
} else if (currentName.equals(GeoPointFieldMapper.Names.GEOHASH)) {
GeoHashUtils.decode(parser.text(), point);
} else {
throw new QueryParsingException(parseContext.index(), "[geo_distance] filter does not support [" + currentFieldName + "]");
}
}
}
} else if (token.isValue()) {
if (currentFieldName.equals("distance")) {
if (token == XContentParser.Token.VALUE_STRING) {
vDistance = parser.text(); // a String
} else {
vDistance = parser.numberValue(); // a Number
}
} else if (currentFieldName.equals("unit")) {
unit = DistanceUnit.fromString(parser.text());
} else if (currentFieldName.equals("distance_type") || currentFieldName.equals("distanceType")) {
geoDistance = GeoDistance.fromString(parser.text());
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LAT_SUFFIX)) {
point.resetLat(parser.doubleValue());
fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.LAT_SUFFIX.length());
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LON_SUFFIX)) {
point.resetLon(parser.doubleValue());
fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.LON_SUFFIX.length());
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.GEOHASH_SUFFIX)) {
GeoHashUtils.decode(parser.text(), point);
fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.GEOHASH_SUFFIX.length());
} else if ("_name".equals(currentFieldName)) {
filterName = parser.text();
} else if ("_cache".equals(currentFieldName)) {
cache = parser.booleanValue();
} else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) {
cacheKey = new CacheKeyFilter.Key(parser.text());
} else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) {
optimizeBbox = parser.textOrNull();
} else if ("normalize".equals(currentFieldName)) {
normalizeLat = parser.booleanValue();
normalizeLon = parser.booleanValue();
} else {
point.resetFromString(parser.text());
fieldName = currentFieldName;
}
}
}
if (vDistance instanceof Number) {
distance = DistanceUnit.DEFAULT.convert(((Number) vDistance).doubleValue(), unit);
} else {
distance = DistanceUnit.parse((String) vDistance, unit, DistanceUnit.DEFAULT);
}
distance = geoDistance.normalize(distance, DistanceUnit.DEFAULT);
if (normalizeLat || normalizeLon) {
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
}
MapperService.SmartNameFieldMappers smartMappers = parseContext.smartFieldMappers(fieldName);
if (smartMappers == null || !smartMappers.hasMapper()) {
throw new QueryParsingException(parseContext.index(), "failed to find geo_point field [" + fieldName + "]");
}
FieldMapper<?> mapper = smartMappers.mapper();
if (!(mapper instanceof GeoPointFieldMapper)) {
throw new QueryParsingException(parseContext.index(), "field [" + fieldName + "] is not a geo_point field");
}
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper) mapper);
IndexGeoPointFieldData<?> indexFieldData = parseContext.fieldData().getForField(mapper);
Filter filter = new GeoDistanceFilter(point.lat(), point.lon(), distance, geoDistance, indexFieldData, geoMapper, optimizeBbox);
if (cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
filter = wrapSmartNameFilter(filter, smartMappers, parseContext);
if (filterName != null) {
parseContext.addNamedFilter(filterName, filter);
}
return filter;
}
} | 1no label
| src_main_java_org_elasticsearch_index_query_GeoDistanceFilterParser.java |
175 | static final class AdaptedCallable<T> extends ForkJoinTask<T>
implements RunnableFuture<T> {
final Callable<? extends T> callable;
T result;
AdaptedCallable(Callable<? extends T> callable) {
if (callable == null) throw new NullPointerException();
this.callable = callable;
}
public final T getRawResult() { return result; }
public final void setRawResult(T v) { result = v; }
public final boolean exec() {
try {
result = callable.call();
return true;
} catch (Error err) {
throw err;
} catch (RuntimeException rex) {
throw rex;
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
public final void run() { invoke(); }
private static final long serialVersionUID = 2838392045355241008L;
} | 0true
| src_main_java_jsr166y_ForkJoinTask.java |
1,827 | public class MapEntrySimple<K,V> extends AbstractMap.SimpleEntry<K,V> {
private boolean modified = false;
public MapEntrySimple(K key, V value) {
super(key, value);
}
@Override
public V setValue(V value) {
modified = true;
return super.setValue(value);
}
public boolean isModified() {
return modified;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_map_MapEntrySimple.java |
2,589 | public class ElectMasterService extends AbstractComponent {
public static final String DISCOVERY_ZEN_MINIMUM_MASTER_NODES = "discovery.zen.minimum_master_nodes";
private final NodeComparator nodeComparator = new NodeComparator();
private volatile int minimumMasterNodes;
public ElectMasterService(Settings settings) {
super(settings);
this.minimumMasterNodes = settings.getAsInt(DISCOVERY_ZEN_MINIMUM_MASTER_NODES, -1);
logger.debug("using minimum_master_nodes [{}]", minimumMasterNodes);
}
public void minimumMasterNodes(int minimumMasterNodes) {
this.minimumMasterNodes = minimumMasterNodes;
}
public int minimumMasterNodes() {
return minimumMasterNodes;
}
public boolean hasEnoughMasterNodes(Iterable<DiscoveryNode> nodes) {
if (minimumMasterNodes < 1) {
return true;
}
int count = 0;
for (DiscoveryNode node : nodes) {
if (node.masterNode()) {
count++;
}
}
return count >= minimumMasterNodes;
}
/**
* Returns a list of the next possible masters.
*/
public DiscoveryNode[] nextPossibleMasters(ObjectContainer<DiscoveryNode> nodes, int numberOfPossibleMasters) {
List<DiscoveryNode> sortedNodes = sortedMasterNodes(Arrays.asList(nodes.toArray(DiscoveryNode.class)));
if (sortedNodes == null) {
return new DiscoveryNode[0];
}
List<DiscoveryNode> nextPossibleMasters = Lists.newArrayListWithCapacity(numberOfPossibleMasters);
int counter = 0;
for (DiscoveryNode nextPossibleMaster : sortedNodes) {
if (++counter >= numberOfPossibleMasters) {
break;
}
nextPossibleMasters.add(nextPossibleMaster);
}
return nextPossibleMasters.toArray(new DiscoveryNode[nextPossibleMasters.size()]);
}
/**
* Elects a new master out of the possible nodes, returning it. Returns <tt>null</tt>
* if no master has been elected.
*/
public DiscoveryNode electMaster(Iterable<DiscoveryNode> nodes) {
List<DiscoveryNode> sortedNodes = sortedMasterNodes(nodes);
if (sortedNodes == null || sortedNodes.isEmpty()) {
return null;
}
return sortedNodes.get(0);
}
private List<DiscoveryNode> sortedMasterNodes(Iterable<DiscoveryNode> nodes) {
List<DiscoveryNode> possibleNodes = Lists.newArrayList(nodes);
if (possibleNodes.isEmpty()) {
return null;
}
// clean non master nodes
for (Iterator<DiscoveryNode> it = possibleNodes.iterator(); it.hasNext(); ) {
DiscoveryNode node = it.next();
if (!node.masterNode()) {
it.remove();
}
}
CollectionUtil.introSort(possibleNodes, nodeComparator);
return possibleNodes;
}
private static class NodeComparator implements Comparator<DiscoveryNode> {
@Override
public int compare(DiscoveryNode o1, DiscoveryNode o2) {
return o1.id().compareTo(o2.id());
}
}
} | 1no label
| src_main_java_org_elasticsearch_discovery_zen_elect_ElectMasterService.java |
839 | LINKLIST("LinkList", 14, new Class<?>[] { List.class }, new Class<?>[] { List.class }) {
}, | 0true
| core_src_main_java_com_orientechnologies_orient_core_metadata_schema_OType.java |
1,569 | public abstract class AbstractServerClusterInsertTest extends AbstractServerClusterTest {
protected static final int delayWriter = 0;
protected static final int delayReader = 1000;
protected static final int writerCount = 5;
protected int count = 1000;
protected long beginInstances;
public String getDatabaseName() {
return "distributed";
}
protected abstract String getDatabaseURL(ServerRun server);
/**
* Event called right after the database has been created and right before to be replicated to the X servers
*
* @param db
* Current database
*/
protected void onAfterDatabaseCreation(final ODatabaseDocumentTx db) {
System.out.println("Creating database schema...");
// CREATE BASIC SCHEMA
OClass personClass = db.getMetadata().getSchema().createClass("Person");
personClass.createProperty("id", OType.STRING);
personClass.createProperty("name", OType.STRING);
personClass.createProperty("birthday", OType.DATE);
personClass.createProperty("children", OType.INTEGER);
final OSchema schema = db.getMetadata().getSchema();
OClass person = schema.getClass("Person");
person.createIndex("Person.name", INDEX_TYPE.UNIQUE, "name");
OClass customer = schema.createClass("Customer", person);
customer.createProperty("totalSold", OType.DECIMAL);
OClass provider = schema.createClass("Provider", person);
provider.createProperty("totalPurchased", OType.DECIMAL);
new ODocument("Customer").fields("name", "Jay", "surname", "Miner").save();
new ODocument("Customer").fields("name", "Luke", "surname", "Skywalker").save();
new ODocument("Provider").fields("name", "Yoda", "surname", "Nothing").save();
}
private void dropIndexNode1() {
ServerRun server = serverInstance.get(0);
ODatabaseDocumentTx database = ODatabaseDocumentPool.global().acquire(getDatabaseURL(server), "admin", "admin");
try {
Object result = database.command(new OCommandSQL("drop index Person.name")).execute();
System.out.println("dropIndexNode1: Node1 drop index: " + result);
} finally {
database.close();
}
// CHECK ON NODE 1
server = serverInstance.get(1);
database = ODatabaseDocumentPool.global().acquire(getDatabaseURL(server), "admin", "admin");
try {
database.getMetadata().getIndexManager().reload();
Assert.assertNull(database.getMetadata().getIndexManager().getIndex("Person.name"));
System.out.println("dropIndexNode1: Node2 hasn't the index too, ok");
} finally {
database.close();
}
}
private void recreateIndexNode2() {
// RE-CREATE INDEX ON NODE 1
ServerRun server = serverInstance.get(1);
ODatabaseDocumentTx database = ODatabaseDocumentPool.global().acquire(getDatabaseURL(server), "admin", "admin");
try {
Object result = database.command(new OCommandSQL("create index Person.name on Person (name) unique")).execute();
System.out.println("recreateIndexNode2: Node2 created index: " + result);
Assert.assertEquals((long) (count * serverInstance.size()) + beginInstances, result);
} finally {
database.close();
}
// CHECK ON NODE 1
server = serverInstance.get(0);
database = ODatabaseDocumentPool.global().acquire(getDatabaseURL(server), "admin", "admin");
try {
final long indexSize = database.getMetadata().getIndexManager().getIndex("Person.name").getSize();
Assert.assertEquals((long) (count * serverInstance.size()) + beginInstances, indexSize);
System.out.println("recreateIndexNode2: Node1 has the index too, ok");
} finally {
database.close();
}
}
public void executeTest() throws Exception {
ODatabaseDocumentTx database = ODatabaseDocumentPool.global().acquire(getDatabaseURL(serverInstance.get(0)), "admin", "admin");
try {
List<ODocument> result = database.query(new OSQLSynchQuery<OIdentifiable>("select count(*) from Person"));
beginInstances = result.get(0).field("count");
} finally {
database.close();
}
System.out.println("Creating Writers and Readers threads...");
final ExecutorService executor = Executors.newCachedThreadPool();
int i = 0;
List<Callable<Void>> workers = new ArrayList<Callable<Void>>();
for (ServerRun server : serverInstance) {
for (int j = 0; j < writerCount; j++) {
Writer writer = new Writer(i++, getDatabaseURL(server));
workers.add(writer);
}
Reader reader = new Reader(getDatabaseURL(server));
workers.add(reader);
}
List<Future<Void>> futures = executor.invokeAll(workers);
System.out.println("Threads started, waiting for the end");
executor.shutdown();
Assert.assertTrue(executor.awaitTermination(10, TimeUnit.MINUTES));
for (Future<Void> future : futures) {
future.get();
}
System.out.println("All threads have finished, shutting down server instances");
for (ServerRun server : serverInstance) {
printStats(getDatabaseURL(server));
}
checkInsertedEntries();
checkIndexedEntries();
dropIndexNode1();
recreateIndexNode2();
}
private void checkIndexedEntries() {
ODatabaseDocumentTx database;
for (ServerRun server : serverInstance) {
database = ODatabaseDocumentPool.global().acquire(getDatabaseURL(server), "admin", "admin");
try {
final long indexSize = database.getMetadata().getIndexManager().getIndex("Person.name").getSize();
Assert.assertEquals((long) (count * serverInstance.size()) + beginInstances, indexSize);
System.out.println("From metadata: indexes " + indexSize + " items");
List<ODocument> result = database.query(new OSQLSynchQuery<OIdentifiable>("select count(*) from index:Person.name"));
Assert.assertEquals((long) (count * serverInstance.size()) + beginInstances,
((Long) result.get(0).field("count")).longValue());
System.out.println("From sql: indexes " + indexSize + " items");
} finally {
database.close();
}
}
}
private void checkInsertedEntries() {
ODatabaseDocumentTx database;
int i;
for (ServerRun server : serverInstance) {
database = ODatabaseDocumentPool.global().acquire(getDatabaseURL(server), "admin", "admin");
try {
List<ODocument> result = database.query(new OSQLSynchQuery<OIdentifiable>("select count(*) from Person"));
final long total = result.get(0).field("count");
if (total != (long) (count * serverInstance.size()) + beginInstances) {
// ERROR: DUMP ALL THE RECORDS
result = database.query(new OSQLSynchQuery<OIdentifiable>("select from Person"));
i = 0;
for (ODocument d : result) {
System.out.println((i++) + ": " + d);
}
}
Assert.assertEquals((long) (count * serverInstance.size()) + beginInstances, total);
} finally {
database.close();
}
}
}
class Writer implements Callable<Void> {
private final String databaseUrl;
private int serverId;
public Writer(final int iServerId, final String db) {
serverId = iServerId;
databaseUrl = db;
}
@Override
public Void call() throws Exception {
String name = Integer.toString(serverId);
for (int i = 0; i < count; i++) {
final ODatabaseDocumentTx database = ODatabaseDocumentPool.global().acquire(databaseUrl, "admin", "admin");
try {
if ((i + 1) % 100 == 0)
System.out.println("\nWriter " + database.getURL() + " managed " + (i + 1) + "/" + count + " records so far");
createRecord(database, i);
updateRecord(database, i);
checkRecord(database, i);
Thread.sleep(delayWriter);
} catch (InterruptedException e) {
System.out.println("Writer received interrupt (db=" + database.getURL());
Thread.currentThread().interrupt();
break;
} catch (Exception e) {
System.out.println("Writer received exception (db=" + database.getURL());
e.printStackTrace();
break;
} finally {
database.close();
}
}
System.out.println("\nWriter " + name + " END");
return null;
}
private void createRecord(ODatabaseDocumentTx database, int i) {
final int uniqueId = count * serverId + i;
ODocument person = new ODocument("Person").fields("id", UUID.randomUUID().toString(), "name", "Billy" + uniqueId, "surname",
"Mayes" + uniqueId, "birthday", new Date(), "children", uniqueId);
database.save(person);
}
private void updateRecord(ODatabaseDocumentTx database, int i) {
ODocument doc = loadRecord(database, i);
doc.field("updated", true);
doc.save();
}
private void checkRecord(ODatabaseDocumentTx database, int i) {
ODocument doc = loadRecord(database, i);
Assert.assertEquals(doc.field("updated"), Boolean.TRUE);
}
private ODocument loadRecord(ODatabaseDocumentTx database, int i) {
final int uniqueId = count * serverId + i;
List<ODocument> result = database.query(new OSQLSynchQuery<ODocument>("select from Person where name = 'Billy" + uniqueId
+ "'"));
if (result.size() == 0)
Assert.assertTrue("No record found with name = 'Billy" + uniqueId + "'!", false);
else if (result.size() > 1)
Assert.assertTrue(result.size() + " records found with name = 'Billy" + uniqueId + "'!", false);
return result.get(0);
}
}
class Reader implements Callable<Void> {
private final String databaseUrl;
public Reader(final String db) {
databaseUrl = db;
}
@Override
public Void call() throws Exception {
try {
while (!Thread.interrupted()) {
try {
printStats(databaseUrl);
Thread.sleep(delayReader);
} catch (Exception e) {
break;
}
}
} finally {
printStats(databaseUrl);
}
return null;
}
}
private void printStats(final String databaseUrl) {
final ODatabaseDocumentTx database = ODatabaseDocumentPool.global().acquire(databaseUrl, "admin", "admin");
try {
List<ODocument> result = database.query(new OSQLSynchQuery<OIdentifiable>("select count(*) from Person"));
final String name = database.getURL();
System.out.println("\nReader " + name + " sql count: " + result.get(0) + " counting class: " + database.countClass("Person")
+ " counting cluster: " + database.countClusterElements("Person"));
if (database.getMetadata().getSchema().existsClass("ODistributedConflict"))
try {
List<ODocument> conflicts = database
.query(new OSQLSynchQuery<OIdentifiable>("select count(*) from ODistributedConflict"));
long totalConflicts = conflicts.get(0).field("count");
Assert.assertEquals(0l, totalConflicts);
System.out.println("\nReader " + name + " conflicts: " + totalConflicts);
} catch (OQueryParsingException e) {
// IGNORE IT
}
} finally {
database.close();
}
}
} | 0true
| distributed_src_test_java_com_orientechnologies_orient_server_distributed_AbstractServerClusterInsertTest.java |
2,594 | class SocketPacketReader implements SocketReader {
private static final int CONST_BUFFER_NO = 4;
Packet packet;
final PacketReader packetReader;
final TcpIpConnection connection;
final IOService ioService;
final ILogger logger;
public SocketPacketReader(TcpIpConnection connection) {
this.connection = connection;
this.ioService = connection.getConnectionManager().ioService;
this.logger = ioService.getLogger(getClass().getName());
boolean symmetricEncryptionEnabled = CipherHelper.isSymmetricEncryptionEnabled(ioService);
if (symmetricEncryptionEnabled) {
packetReader = new SymmetricCipherPacketReader();
logger.info("Reader started with SymmetricEncryption");
} else {
packetReader = new DefaultPacketReader();
}
}
public void read(ByteBuffer inBuffer) throws Exception {
packetReader.readPacket(inBuffer);
}
private void enqueueFullPacket(final Packet p) {
p.setConn(connection);
ioService.handleMemberPacket(p);
}
private interface PacketReader {
void readPacket(ByteBuffer inBuffer) throws Exception;
}
private class DefaultPacketReader implements PacketReader {
public void readPacket(ByteBuffer inBuffer) {
while (inBuffer.hasRemaining()) {
if (packet == null) {
packet = obtainReadable();
}
boolean complete = packet.readFrom(inBuffer);
if (complete) {
enqueueFullPacket(packet);
packet = null;
} else {
break;
}
}
}
}
private final class SymmetricCipherPacketReader implements PacketReader {
int size = -1;
final Cipher cipher;
ByteBuffer cipherBuffer = ByteBuffer.allocate(ioService.getSocketReceiveBufferSize() * IOService.KILO_BYTE);
private SymmetricCipherPacketReader() {
cipher = init();
}
Cipher init() {
Cipher c;
try {
c = CipherHelper.createSymmetricReaderCipher(ioService.getSymmetricEncryptionConfig());
} catch (Exception e) {
logger.severe("Symmetric Cipher for ReadHandler cannot be initialized.", e);
CipherHelper.handleCipherException(e, connection);
throw ExceptionUtil.rethrow(e);
}
return c;
}
public void readPacket(ByteBuffer inBuffer) throws Exception {
while (inBuffer.hasRemaining()) {
try {
if (size == -1) {
if (inBuffer.remaining() < CONST_BUFFER_NO) {
return;
}
size = inBuffer.getInt();
if (cipherBuffer.capacity() < size) {
cipherBuffer = ByteBuffer.allocate(size);
}
}
int remaining = inBuffer.remaining();
if (remaining < size) {
cipher.update(inBuffer, cipherBuffer);
size -= remaining;
} else if (remaining == size) {
cipher.doFinal(inBuffer, cipherBuffer);
size = -1;
} else {
int oldLimit = inBuffer.limit();
int newLimit = inBuffer.position() + size;
inBuffer.limit(newLimit);
cipher.doFinal(inBuffer, cipherBuffer);
inBuffer.limit(oldLimit);
size = -1;
}
} catch (ShortBufferException e) {
logger.warning(e);
}
cipherBuffer.flip();
while (cipherBuffer.hasRemaining()) {
if (packet == null) {
packet = obtainReadable();
}
boolean complete = packet.readFrom(cipherBuffer);
if (complete) {
enqueueFullPacket(packet);
packet = null;
}
}
cipherBuffer.clear();
}
}
}
public Packet obtainReadable() {
return new Packet(ioService.getSerializationContext());
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_nio_SocketPacketReader.java |
37 | public class TitanSpecificBlueprintsTestSuite extends TestSuite {
public TitanSpecificBlueprintsTestSuite(final GraphTest graphTest) {
super(graphTest);
}
public void testVertexReattachment() {
TransactionalGraph graph = (TransactionalGraph) graphTest.generateGraph();
Vertex a = graph.addVertex(null);
Vertex b = graph.addVertex(null);
Edge e = graph.addEdge(null, a, b, "friend");
graph.commit();
a = graph.getVertex(a);
Assert.assertNotNull(a);
Assert.assertEquals(1, BaseTest.count(a.getVertices(Direction.OUT)));
graph.shutdown();
}
} | 0true
| titan-test_src_main_java_com_thinkaurelius_titan_blueprints_TitanSpecificBlueprintsTestSuite.java |
2,066 | public interface OverriddenModuleBuilder {
/**
* See the EDSL example at {@link Modules#override(Module[]) override()}.
*/
Module with(Module... overrides);
/**
* See the EDSL example at {@link Modules#override(Module[]) override()}.
*/
Module with(Iterable<? extends Module> overrides);
} | 0true
| src_main_java_org_elasticsearch_common_inject_util_Modules.java |
298 | @ResponseStatus(value= HttpStatus.FORBIDDEN, reason="Access is denied")
public class SecurityServiceException extends ServiceException {
public SecurityServiceException() {
super();
}
public SecurityServiceException(Throwable cause) {
super(cause);
}
public SecurityServiceException(String message) {
super(message);
}
public SecurityServiceException(String message, Throwable cause) {
super(message, cause);
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_exception_SecurityServiceException.java |
588 | class ShardRefreshResponse extends BroadcastShardOperationResponse {
ShardRefreshResponse() {
}
public ShardRefreshResponse(String index, int shardId) {
super(index, shardId);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_refresh_ShardRefreshResponse.java |
130 | static final class CreateAdder
implements ConcurrentHashMapV8.Fun<Object, LongAdder> {
public LongAdder apply(Object unused) { return new LongAdder(); }
} | 0true
| src_main_java_jsr166e_LongAdderTable.java |
128 | {
@Override
public void run()
{
db1.shutdown();
}
} ); | 0true
| community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_xaframework_TestApplyTransactions.java |
618 | @Component("blTimeZoneResolver")
public class BroadleafTimeZoneResolverImpl implements BroadleafTimeZoneResolver {
private final Log LOG = LogFactory.getLog(BroadleafTimeZoneResolverImpl.class);
/**
* Parameter/Attribute name for the current language
*/
public static String TIMEZONE_VAR = "blTimeZone";
/**
* Parameter/Attribute name for the current language
*/
public static String TIMEZONE_CODE_PARAM = "blTimeZoneCode";
@Override
public TimeZone resolveTimeZone(WebRequest request) {
TimeZone timeZone = null;
// First check for request attribute
timeZone = (TimeZone) request.getAttribute(TIMEZONE_VAR, WebRequest.SCOPE_REQUEST);
// Second, check for a request parameter
if (timeZone == null && BLCRequestUtils.getURLorHeaderParameter(request, TIMEZONE_CODE_PARAM) != null) {
String timeZoneCode = BLCRequestUtils.getURLorHeaderParameter(request, TIMEZONE_CODE_PARAM);
timeZone = TimeZone.getTimeZone(timeZoneCode);
if (LOG.isTraceEnabled()) {
LOG.trace("Attempt to find TimeZone by param " + timeZoneCode + " resulted in " + timeZone);
}
}
// Third, check the session
if (timeZone == null && BLCRequestUtils.isOKtoUseSession(request)) {
//@TODO verify if we should take this from global session
timeZone = (TimeZone) request.getAttribute(TIMEZONE_VAR, WebRequest.SCOPE_GLOBAL_SESSION);
if (LOG.isTraceEnabled()) {
LOG.trace("Attempt to find timezone from session resulted in " + timeZone);
}
}
// Finally, use the default
if (timeZone == null) {
timeZone = TimeZone.getDefault();
if (LOG.isTraceEnabled()) {
LOG.trace("timezone set to default timezone " + timeZone);
}
}
if (BLCRequestUtils.isOKtoUseSession(request)) {
request.setAttribute(TIMEZONE_VAR, timeZone, WebRequest.SCOPE_GLOBAL_SESSION);
}
return timeZone;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_web_BroadleafTimeZoneResolverImpl.java |
336 | runnables.add(new Runnable() {
public void run() {
Widget[] items= fViewer.testFindItems(element);
for (int i= 0; i < items.length; i++) {
Widget item= items[i];
if (item instanceof TreeItem && !item.isDisposed()) {
TreeItem parentItem= ((TreeItem) item).getParentItem();
if (parentItem != null && !parentItem.isDisposed() && parent.equals(parentItem.getData())) {
return; // no add, element already added (most likely by a refresh)
}
}
}
fViewer.add(parent, element);
}
}); | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_explorer_PackageExplorerContentProvider.java |
1,614 | public class MemberListenerImpl implements MembershipListener {
@Override
public void memberAdded(MembershipEvent membershipEvent) {
try {
Member member = membershipEvent.getMember();
if (member != null && instance.node.isMaster() && urlChanged) {
Operation operation = new UpdateManagementCenterUrlOperation(managementCenterUrl);
callOnMember(member, operation);
}
} catch (Exception e) {
logger.warning("Web server url cannot be send to the newly joined member", e);
}
}
@Override
public void memberRemoved(MembershipEvent membershipEvent) {
}
@Override
public void memberAttributeChanged(MemberAttributeEvent memberAttributeEvent) {
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_management_ManagementCenterService.java |
39 | {
@Override
public HighAvailabilityMemberState getHighAvailabilityMemberState()
{
return memberStateMachine.getCurrentState();
}
} ); | 1no label
| enterprise_ha_src_main_java_org_neo4j_kernel_ha_HighlyAvailableGraphDatabase.java |
2,283 | abstract class FilterRecycler<T> implements Recycler<T> {
/** Get the delegate instance to foward calls to. */
protected abstract Recycler<T> getDelegate();
/** Wrap a recycled reference. */
protected Recycler.V<T> wrap(Recycler.V<T> delegate) {
return delegate;
}
@Override
public Recycler.V<T> obtain(int sizing) {
return wrap(getDelegate().obtain(sizing));
}
@Override
public Recycler.V<T> obtain() {
return wrap(getDelegate().obtain());
}
@Override
public void close() {
getDelegate().close();
}
} | 0true
| src_main_java_org_elasticsearch_common_recycler_FilterRecycler.java |
1,420 | public static class RemoveRequest {
final String name;
TimeValue masterTimeout = MasterNodeOperationRequest.DEFAULT_MASTER_NODE_TIMEOUT;
public RemoveRequest(String name) {
this.name = name;
}
public RemoveRequest masterTimeout(TimeValue masterTimeout) {
this.masterTimeout = masterTimeout;
return this;
}
} | 0true
| src_main_java_org_elasticsearch_cluster_metadata_MetaDataIndexTemplateService.java |
2,177 | public class ApplyAcceptedDocsFilter extends Filter {
private final Filter filter;
public ApplyAcceptedDocsFilter(Filter filter) {
this.filter = filter;
}
@Override
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
DocIdSet docIdSet = filter.getDocIdSet(context, acceptDocs);
if (DocIdSets.isEmpty(docIdSet)) {
return null;
}
if (acceptDocs == null) {
return docIdSet;
}
if (acceptDocs == context.reader().getLiveDocs()) {
// optimized wrapper for not deleted cases
return new NotDeletedDocIdSet(docIdSet, acceptDocs);
}
return BitsFilteredDocIdSet.wrap(docIdSet, acceptDocs);
}
public Filter filter() {
return this.filter;
}
@Override
public String toString() {
return filter.toString();
}
static class NotDeletedDocIdSet extends DocIdSet {
private final DocIdSet innerSet;
private final Bits liveDocs;
NotDeletedDocIdSet(DocIdSet innerSet, Bits liveDocs) {
this.innerSet = innerSet;
this.liveDocs = liveDocs;
}
@Override
public boolean isCacheable() {
return innerSet.isCacheable();
}
@Override
public Bits bits() throws IOException {
Bits bits = innerSet.bits();
if (bits == null) {
return null;
}
return new NotDeleteBits(bits, liveDocs);
}
@Override
public DocIdSetIterator iterator() throws IOException {
if (!DocIdSets.isFastIterator(innerSet) && liveDocs instanceof FixedBitSet) {
// might as well iterate over the live docs..., since the iterator is not fast enough
// but we can only do that if we have Bits..., in short, we reverse the order...
Bits bits = innerSet.bits();
if (bits != null) {
return new NotDeletedDocIdSetIterator(((FixedBitSet) liveDocs).iterator(), bits);
}
}
DocIdSetIterator iterator = innerSet.iterator();
if (iterator == null) {
return null;
}
return new NotDeletedDocIdSetIterator(iterator, liveDocs);
}
}
static class NotDeleteBits implements Bits {
private final Bits bits;
private final Bits liveDocs;
NotDeleteBits(Bits bits, Bits liveDocs) {
this.bits = bits;
this.liveDocs = liveDocs;
}
@Override
public boolean get(int index) {
return liveDocs.get(index) && bits.get(index);
}
@Override
public int length() {
return bits.length();
}
}
static class NotDeletedDocIdSetIterator extends FilteredDocIdSetIterator {
private final Bits match;
NotDeletedDocIdSetIterator(DocIdSetIterator innerIter, Bits match) {
super(innerIter);
this.match = match;
}
@Override
protected boolean match(int doc) {
return match.get(doc);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((filter == null) ? 0 : filter.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
ApplyAcceptedDocsFilter other = (ApplyAcceptedDocsFilter) obj;
if (filter == null) {
if (other.filter != null)
return false;
} else if (!filter.equals(other.filter))
return false;
return true;
}
} | 0true
| src_main_java_org_elasticsearch_common_lucene_search_ApplyAcceptedDocsFilter.java |
70 | public interface TitanTransaction extends TitanGraphTransaction {
/* ---------------------------------------------------------------
* Modifications
* ---------------------------------------------------------------
*/
/**
* Creates a new vertex in the graph with the given vertex id and the given vertex label.
* Note, that an exception is thrown if the vertex id is not a valid Titan vertex id or if a vertex with the given
* id already exists.
* <p/>
* Custom id setting must be enabled via the configuration option {@link com.thinkaurelius.titan.graphdb.configuration.GraphDatabaseConfiguration#ALLOW_SETTING_VERTEX_ID}.
* <p/>
* Use {@link com.thinkaurelius.titan.core.util.TitanId#toVertexId(long)} to construct a valid Titan vertex id from a user id.
*
* @param id vertex id of the vertex to be created
* @param vertexLabel vertex label for this vertex - can be null if no vertex label should be set.
* @return New vertex
*/
public TitanVertex addVertex(Long id, VertexLabel vertexLabel);
/**
* Creates a new edge connecting the specified vertices.
* <p/>
* Creates and returns a new {@link TitanEdge} with given label connecting the vertices in the order
* specified.
*
* @param label label of the edge to be created
* @param outVertex outgoing vertex of the edge
* @param inVertex incoming vertex of the edge
* @return new edge
*/
public TitanEdge addEdge(TitanVertex outVertex, TitanVertex inVertex, EdgeLabel label);
/**
* Creates a new edge connecting the specified vertices.
* <p/>
* Creates and returns a new {@link TitanEdge} with given label connecting the vertices in the order
* specified.
* <br />
* Automatically creates the edge label if it does not exist and automatic creation of types is enabled. Otherwise,
* this method with throw an {@link IllegalArgumentException}.
*
* @param label label of the edge to be created
* @param outVertex outgoing vertex of the edge
* @param inVertex incoming vertex of the edge
* @return new edge
*/
public TitanEdge addEdge(TitanVertex outVertex, TitanVertex inVertex, String label);
/**
* Creates a new property for the given vertex and key with the specified value.
* <p/>
* Creates and returns a new {@link TitanProperty} with specified property key and the given object being the value.
*
* @param key key of the property to be created
* @param vertex vertex for which to create the property
* @param value value of the property to be created
* @return new property
* @throws IllegalArgumentException if the value does not match the data type of the given property key.
*/
public TitanProperty addProperty(TitanVertex vertex, PropertyKey key, Object value);
/**
* Creates a new property for the given vertex and key with the specified value.
* <p/>
* Creates and returns a new {@link TitanProperty} with specified property key and the given object being the value.
* <br />
* Automatically creates the property key if it does not exist and automatic creation of types is enabled. Otherwise,
* this method with throw an {@link IllegalArgumentException}.
*
* @param key key of the property to be created
* @param vertex vertex for which to create the property
* @param value value of the property to be created
* @return new property
* @throws IllegalArgumentException if the value does not match the data type of the given property key.
*/
public TitanProperty addProperty(TitanVertex vertex, String key, Object value);
/**
* Retrieves all vertices which have a property of the given key with the specified value.
* <p/>
* For this operation to be efficient, please ensure that the given property key is indexed.
* Some storage backends may not support this method without a pre-configured index.
*
* @param key key
* @param value value value
* @return All vertices which have a property of the given key with the specified value.
* @see com.thinkaurelius.titan.core.schema.TitanManagement#buildIndex(String, Class)
*/
public Iterable<TitanVertex> getVertices(PropertyKey key, Object value);
/**
* Retrieves all vertices which have a property of the given key with the specified value.
* <p/>
* For this operation to be efficient, please ensure that the given property key is indexed.
* Some storage backends may not support this method without a pre-configured index.
*
* @param key key
* @param value value value
* @return All edges which have a property of the given key with the specified value.
* @see com.thinkaurelius.titan.core.schema.TitanManagement#buildIndex(String, Class)
*/
public Iterable<TitanEdge> getEdges(PropertyKey key, Object value);
/* ---------------------------------------------------------------
* Closing and admin
* ---------------------------------------------------------------
*/
/**
* Commits and closes the transaction.
* <p/>
* Will attempt to persist all modifications which may result in exceptions in case of persistence failures or
* lock contention.
* <br />
* The call releases data structures if possible. All element references (e.g. vertex objects) retrieved
* through this transaction are stale after the transaction closes and should no longer be used.
*
* @throws com.thinkaurelius.titan.diskstorage.BackendException
* if an error arises during persistence
*/
public void commit();
/**
* Aborts and closes the transaction. Will discard all modifications.
* <p/>
* The call releases data structures if possible. All element references (e.g. vertex objects) retrieved
* through this transaction are stale after the transaction closes and should no longer be used.
*
* @throws com.thinkaurelius.titan.diskstorage.BackendException
* if an error arises when releasing the transaction handle
*/
public void rollback();
/**
* Checks whether the transaction is still open.
*
* @return true, when the transaction is open, else false
*/
public boolean isOpen();
/**
* Checks whether the transaction has been closed.
*
* @return true, if the transaction has been closed, else false
*/
public boolean isClosed();
/**
* Checks whether any changes to the graph database have been made in this transaction.
* <p/>
* A modification may be an edge or vertex update, addition, or deletion.
*
* @return true, if the transaction contains updates, else false.
*/
public boolean hasModifications();
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_core_TitanTransaction.java |
340 | threadPool.schedule(request.delay, ThreadPool.Names.GENERIC, new Runnable() {
@Override
public void run() {
boolean restartWithWrapper = false;
if (System.getProperty("elasticsearch-service") != null) {
try {
Class wrapperManager = settings.getClassLoader().loadClass("org.tanukisoftware.wrapper.WrapperManager");
logger.info("Initiating requested restart (using service)");
wrapperManager.getMethod("restartAndReturn").invoke(null);
restartWithWrapper = true;
} catch (Throwable e) {
logger.error("failed to initial restart on service wrapper", e);
}
}
if (!restartWithWrapper) {
logger.info("Initiating requested restart");
try {
node.stop();
node.start();
} catch (Exception e) {
logger.warn("Failed to restart", e);
} finally {
restartRequested.set(false);
}
}
}
}); | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_node_restart_TransportNodesRestartAction.java |
544 | public enum RESULT {
RECORD_NOT_CHANGED, RECORD_CHANGED, SKIP
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_hook_ORecordHook.java |
1,963 | public class MapUnlockRequest extends AbstractUnlockRequest implements SecureRequest {
private String name;
public MapUnlockRequest() {
}
public MapUnlockRequest(String name, Data key, long threadId) {
super(key, threadId, false);
this.name = name;
}
public MapUnlockRequest(String name, Data key, long threadId, boolean force) {
super(key, threadId, force);
this.name = name;
}
public int getFactoryId() {
return MapPortableHook.F_ID;
}
public int getClassId() {
return MapPortableHook.UNLOCK;
}
protected ObjectNamespace getNamespace() {
return new DefaultObjectNamespace(MapService.SERVICE_NAME, name);
}
public void write(PortableWriter writer) throws IOException {
writer.writeUTF("n", name);
super.write(writer);
}
public void read(PortableReader reader) throws IOException {
name = reader.readUTF("n");
super.read(reader);
}
public Permission getRequiredPermission() {
return new MapPermission(name, ActionConstants.ACTION_LOCK);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_map_client_MapUnlockRequest.java |
3,450 | public class LocalIndexGatewayModule extends AbstractModule {
@Override
protected void configure() {
bind(IndexGateway.class).to(LocalIndexGateway.class).asEagerSingleton();
}
} | 0true
| src_main_java_org_elasticsearch_index_gateway_local_LocalIndexGatewayModule.java |
131 | public static class CriteriaStructuredContentXrefPK implements Serializable {
/** The Constant serialVersionUID. */
private static final long serialVersionUID = 1L;
@ManyToOne(targetEntity = StructuredContentImpl.class, optional=false)
@JoinColumn(name = "SC_ID")
protected StructuredContent structuredContent = new StructuredContentImpl();
@ManyToOne(targetEntity = StructuredContentItemCriteriaImpl.class, optional=false)
@JoinColumn(name = "SC_ITEM_CRITERIA_ID")
protected StructuredContentItemCriteria structuredContentItemCriteria = new StructuredContentItemCriteriaImpl();
public StructuredContent getStructuredContent() {
return structuredContent;
}
public void setStructuredContent(StructuredContent structuredContent) {
this.structuredContent = structuredContent;
}
public StructuredContentItemCriteria getStructuredContentItemCriteria() {
return structuredContentItemCriteria;
}
public void setStructuredContentItemCriteria(StructuredContentItemCriteria structuredContentItemCriteria) {
this.structuredContentItemCriteria = structuredContentItemCriteria;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((structuredContent == null) ? 0 : structuredContent.hashCode());
result = prime * result + ((structuredContentItemCriteria == null) ? 0 : structuredContentItemCriteria.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
CriteriaStructuredContentXrefPK other = (CriteriaStructuredContentXrefPK) obj;
if (structuredContent == null) {
if (other.structuredContent != null)
return false;
} else if (!structuredContent.equals(other.structuredContent))
return false;
if (structuredContentItemCriteria == null) {
if (other.structuredContentItemCriteria != null)
return false;
} else if (!structuredContentItemCriteria.equals(other.structuredContentItemCriteria))
return false;
return true;
}
} | 1no label
| admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_structure_domain_CriteriaStructuredContentXref.java |
2,079 | public class PartitionWideEntryBackupOperation extends AbstractMapOperation implements BackupOperation, PartitionAwareOperation {
EntryBackupProcessor entryProcessor;
public PartitionWideEntryBackupOperation(String name, EntryBackupProcessor entryProcessor) {
super(name);
this.entryProcessor = entryProcessor;
}
public PartitionWideEntryBackupOperation() {
}
public void run() {
Map.Entry entry;
RecordStore recordStore = mapService.getRecordStore(getPartitionId(), name);
Map<Data, Record> records = recordStore.getReadonlyRecordMap();
for (Map.Entry<Data, Record> recordEntry : records.entrySet()) {
Data dataKey = recordEntry.getKey();
Record record = recordEntry.getValue();
Object objectKey = mapService.toObject(record.getKey());
Object valueBeforeProcess = mapService.toObject(record.getValue());
if (getPredicate() != null) {
QueryEntry queryEntry = new QueryEntry(getNodeEngine().getSerializationService(), dataKey, objectKey, valueBeforeProcess);
if (!getPredicate().apply(queryEntry)) {
continue;
}
}
entry = new AbstractMap.SimpleEntry(objectKey, valueBeforeProcess);
entryProcessor.processBackup(entry);
if (entry.getValue() == null){
recordStore.removeBackup(dataKey);
} else {
recordStore.putBackup(dataKey, entry.getValue());
}
}
}
@Override
public boolean returnsResponse() {
return true;
}
protected Predicate getPredicate() {
return null;
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
entryProcessor = in.readObject();
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeObject(entryProcessor);
}
@Override
public Object getResponse() {
return true;
}
@Override
public String toString() {
return "PartitionWideEntryBackupOperation{}";
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_map_operation_PartitionWideEntryBackupOperation.java |
3,416 | public interface IndexShardGateway extends IndexShardComponent, CloseableIndexComponent {
String type();
/**
* The last / on going recovery status.
*/
RecoveryStatus recoveryStatus();
/**
* The last snapshot status performed. Can be <tt>null</tt>.
*/
SnapshotStatus lastSnapshotStatus();
/**
* The current snapshot status being performed. Can be <tt>null</tt> indicating that no snapshot
* is being executed currently.
*/
SnapshotStatus currentSnapshotStatus();
/**
* Recovers the state of the shard from the gateway.
*/
void recover(boolean indexShouldExists, RecoveryStatus recoveryStatus) throws IndexShardGatewayRecoveryException;
/**
* Snapshots the given shard into the gateway.
*/
SnapshotStatus snapshot(Snapshot snapshot) throws IndexShardGatewaySnapshotFailedException;
/**
* Returns <tt>true</tt> if snapshot is even required on this gateway (i.e. mainly handles recovery).
*/
boolean requiresSnapshot();
/**
* Returns <tt>true</tt> if this gateway requires scheduling management for snapshot
* operations.
*/
boolean requiresSnapshotScheduling();
SnapshotLock obtainSnapshotLock() throws Exception;
public static interface SnapshotLock {
void release();
}
public static final SnapshotLock NO_SNAPSHOT_LOCK = new SnapshotLock() {
@Override
public void release() {
}
};
public static class Snapshot {
private final SnapshotIndexCommit indexCommit;
private final Translog.Snapshot translogSnapshot;
private final long lastIndexVersion;
private final long lastTranslogId;
private final long lastTranslogLength;
private final int lastTotalTranslogOperations;
public Snapshot(SnapshotIndexCommit indexCommit, Translog.Snapshot translogSnapshot, long lastIndexVersion, long lastTranslogId, long lastTranslogLength, int lastTotalTranslogOperations) {
this.indexCommit = indexCommit;
this.translogSnapshot = translogSnapshot;
this.lastIndexVersion = lastIndexVersion;
this.lastTranslogId = lastTranslogId;
this.lastTranslogLength = lastTranslogLength;
this.lastTotalTranslogOperations = lastTotalTranslogOperations;
}
/**
* Indicates that the index has changed from the latest snapshot.
*/
public boolean indexChanged() {
return lastIndexVersion != indexCommit.getGeneration();
}
/**
* Indicates that a new transaction log has been created. Note check this <b>before</b> you
* check {@link #sameTranslogNewOperations()}.
*/
public boolean newTranslogCreated() {
return translogSnapshot.translogId() != lastTranslogId;
}
/**
* Indicates that the same translog exists, but new operations have been appended to it. Throws
* {@link org.elasticsearch.ElasticsearchIllegalStateException} if {@link #newTranslogCreated()} is <tt>true</tt>, so
* always check that first.
*/
public boolean sameTranslogNewOperations() {
if (newTranslogCreated()) {
throw new ElasticsearchIllegalStateException("Should not be called when there is a new translog");
}
return translogSnapshot.length() > lastTranslogLength;
}
public SnapshotIndexCommit indexCommit() {
return indexCommit;
}
public Translog.Snapshot translogSnapshot() {
return translogSnapshot;
}
public long lastIndexVersion() {
return lastIndexVersion;
}
public long lastTranslogId() {
return lastTranslogId;
}
public long lastTranslogLength() {
return lastTranslogLength;
}
public int lastTotalTranslogOperations() {
return this.lastTotalTranslogOperations;
}
}
} | 0true
| src_main_java_org_elasticsearch_index_gateway_IndexShardGateway.java |
139 | {
private int size;
@Override
public boolean reached( File file, long version, LogLoader source )
{
size += fileSystem.getFileSize( file );
return size >= maxSize;
}
}; | 0true
| community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_xaframework_LogPruneStrategies.java |
1,177 | public interface ItemListener<E> extends EventListener {
/**
* Invoked when an item is added.
*
* @param item added item
*/
void itemAdded(ItemEvent<E> item);
/**
* Invoked when an item is removed.
*
* @param item removed item.
*/
void itemRemoved(ItemEvent<E> item);
} | 0true
| hazelcast_src_main_java_com_hazelcast_core_ItemListener.java |
1,173 | LOCAL {
@Override
public Transport newTransport(Settings settings, ThreadPool threadPool) {
return new LocalTransport(settings, threadPool, Version.CURRENT);
}
}, | 0true
| src_test_java_org_elasticsearch_benchmark_transport_TransportBenchmark.java |
2,739 | static class NodeRequest extends NodeOperationRequest {
ShardId shardId;
NodeRequest() {
}
NodeRequest(String nodeId, TransportNodesListGatewayStartedShards.Request request) {
super(request, nodeId);
this.shardId = request.shardId();
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
shardId = ShardId.readShardId(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
shardId.writeTo(out);
}
} | 0true
| src_main_java_org_elasticsearch_gateway_local_state_shards_TransportNodesListGatewayStartedShards.java |
358 | public class NullClassTransformer implements BroadleafClassTransformer {
@Override
public void compileJPAProperties(Properties props, Object key) throws Exception {
}
@Override
public byte[] transform(ClassLoader loader, String className, Class<?> classBeingRedefined,
ProtectionDomain protectionDomain, byte[] classfileBuffer) throws IllegalClassFormatException {
return null;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_extensibility_jpa_copy_NullClassTransformer.java |
1,260 | public class OClusterLocalHole extends OSingleFileSegment {
private static final int DEF_START_SIZE = 262144;
private static final int RECORD_SIZE = 8;
private OClusterLocal owner;
public OClusterLocalHole(final OClusterLocal iClusterLocal, final OStorageLocal iStorage, final OStorageFileConfiguration iConfig)
throws IOException {
super(iStorage, iConfig);
owner = iClusterLocal;
}
/**
* TODO Check values removing dirty entries (equals to -1)
*/
public void defrag() throws IOException {
OLogManager.instance().debug(this, "Starting to defragment the segment %s of size=%d and filled=%d", file, file.getFileSize(),
file.getFilledUpTo());
OLogManager.instance().debug(this, "Defragmentation ended for segment %s. Current size=%d and filled=%d", file,
file.getFileSize(), file.getFilledUpTo());
}
public void create() throws IOException {
file.create(DEF_START_SIZE);
}
/**
* Append the hole to the end of segment
*
* @throws IOException
*/
public long pushPosition(final long iPosition) throws IOException {
final int position = getHoles() * RECORD_SIZE;
file.allocateSpace(RECORD_SIZE);
file.writeLong(position, iPosition);
if (OLogManager.instance().isDebugEnabled())
OLogManager.instance().debug(this, "Pushed new hole %s/#%d -> #%d:%d", owner.getName(), position / RECORD_SIZE,
owner.getId(), iPosition);
return position;
}
/**
* Returns and remove the recycled position if any.
*
* @return the recycled position if found, otherwise -1 that usually means to request more space.
* @throws IOException
*/
public long popLastEntryPosition() throws IOException {
// BROWSE IN ASCENDING ORDER UNTIL A GOOD POSITION IS FOUND (!=-1)
for (int pos = getHoles() - 1; pos >= 0; --pos) {
final long recycledPosition = file.readLong(pos * RECORD_SIZE);
if (recycledPosition > -1) {
if (OLogManager.instance().isDebugEnabled())
OLogManager.instance().debug(this, "Recycled hole %s/#%d -> #%d:%d", owner.getName(), pos, owner.getId(),
recycledPosition);
// SHRINK THE FILE
file.removeTail((getHoles() - pos) * RECORD_SIZE);
return recycledPosition;
}
}
return -1;
}
/**
* Returns the recycled position if any.
*
* @return the recycled position if found, otherwise -1 that usually means to request more space.
* @throws IOException
*/
public long getEntryPosition(final int iPosition) throws IOException {
return file.readLong(iPosition * RECORD_SIZE);
}
/**
* Removes a hole. Called on transaction recover to invalidate a delete for a record. Try to shrink the file if the invalidated
* entry is not in the middle of valid entries.
*
* @param iPosition
* Record position to find and invalidate
* @return
* @throws IOException
*/
public boolean removeEntryWithPosition(final long iPosition) throws IOException {
// BROWSE IN ASCENDING ORDER UNTIL THE REQUESTED POSITION IS FOUND
boolean canShrink = true;
for (int pos = getHoles() - 1; pos >= 0; --pos) {
final long recycledPosition = file.readLong(pos * RECORD_SIZE);
if (recycledPosition == iPosition) {
if (OLogManager.instance().isDebugEnabled())
OLogManager.instance().debug(this, "Removing hole #%d containing the position #%d:%d", pos, owner.getId(),
recycledPosition);
file.writeLong(pos * RECORD_SIZE, -1);
if (canShrink)
// SHRINK THE FILE
file.removeTail((getHoles() - pos) * RECORD_SIZE);
return true;
} else if (iPosition != -1)
// NO NULL ENTRY: CAN'T SHRINK WITHOUT LOST OF ENTRIES
canShrink = false;
}
return false;
}
/**
* Computes the number of holes. Note that not all the holes could be valid.
*
* @return
*/
public int getHoles() {
return (int) file.getFilledUpTo() / RECORD_SIZE;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_storage_impl_local_OClusterLocalHole.java |
321 | private class ClientThread extends Thread {
public volatile int mapSize = 0;
public void run() {
HazelcastInstance client = HazelcastClient.newHazelcastClient();
IMap<String, String> map = client.getMap(ClientMapStoreTest.MAP_NAME);
mapSize = map.size();
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapStoreTest.java |
1,582 | public interface ODistributedPartitioningStrategy {
ODistributedPartition getPartition(final ODistributedServerManager iManager, final String iDatabaseName, final String iClusterName);
} | 0true
| server_src_main_java_com_orientechnologies_orient_server_distributed_ODistributedPartitioningStrategy.java |
1,042 | public interface OCommandExecutorSQLFactory {
/**
* @return Set of supported command names of this factory
*/
public Set<String> getCommandNames();
/**
* Create command for the given name. returned command may be a new instance each time or a constant.
*
* @param name
* @return OCommandExecutorSQLAbstract : created command
* @throws OCommandExecutionException
* : when command creation fail
*/
public OCommandExecutorSQLAbstract createCommand(String name) throws OCommandExecutionException;
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_sql_OCommandExecutorSQLFactory.java |
966 | public class OZIPCompressionUtil {
public static int compressDirectory(final String sourceFolderName, final OutputStream output) throws IOException {
final ZipOutputStream zos = new ZipOutputStream(output);
try {
zos.setLevel(9);
return addFolder(zos, sourceFolderName, sourceFolderName);
} finally {
zos.close();
}
}
/***
* Extract zipfile to outdir with complete directory structure
*
* @param zipfile
* Input .zip file
* @param outdir
* Output directory
* @throws IOException
*/
public static void uncompressDirectory(final InputStream in, final String out) throws IOException {
final File outdir = new File(out);
final ZipInputStream zin = new ZipInputStream(in);
try {
ZipEntry entry;
String name, dir;
while ((entry = zin.getNextEntry()) != null) {
name = entry.getName();
if (entry.isDirectory()) {
mkdirs(outdir, name);
continue;
}
/*
* this part is necessary because file entry can come before directory entry where is file located i.e.: /foo/foo.txt /foo/
*/
dir = getDirectoryPart(name);
if (dir != null)
mkdirs(outdir, dir);
extractFile(zin, outdir, name);
}
} finally {
zin.close();
}
}
private static void extractFile(final ZipInputStream in, final File outdir, final String name) throws IOException {
final BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(new File(outdir, name)));
try {
OIOUtils.copyStream(in, out, -1);
} finally {
out.close();
}
}
private static void mkdirs(final File outdir, final String path) {
final File d = new File(outdir, path);
if (!d.exists())
d.mkdirs();
}
private static String getDirectoryPart(final String name) {
final int s = name.lastIndexOf(File.separatorChar);
return s == -1 ? null : name.substring(0, s);
}
private static int addFolder(ZipOutputStream zos, String folderName, String baseFolderName) throws IOException {
int total = 0;
File f = new File(folderName);
if (f.exists()) {
if (f.isDirectory()) {
File f2[] = f.listFiles();
for (int i = 0; i < f2.length; i++) {
total += addFolder(zos, f2[i].getAbsolutePath(), baseFolderName);
}
} else {
// add file
// extract the relative name for entry purpose
String entryName = folderName.substring(baseFolderName.length() + 1, folderName.length());
ZipEntry ze = new ZipEntry(entryName);
zos.putNextEntry(ze);
try {
FileInputStream in = new FileInputStream(folderName);
try {
OIOUtils.copyStream(in, zos, -1);
} finally {
in.close();
}
} finally {
zos.closeEntry();
}
total++;
}
} else {
throw new IllegalArgumentException("Directory " + folderName + " not found");
}
return total;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_serialization_compression_impl_OZIPCompressionUtil.java |
192 | public interface AdminMainEntity {
public static final String MAIN_ENTITY_NAME_PROPERTY = "__adminMainEntity";
/**
* @return the display name of this entity for the admin screen
*/
public String getMainEntityName();
} | 0true
| common_src_main_java_org_broadleafcommerce_common_admin_domain_AdminMainEntity.java |
275 | public class ThriftIDAuthorityTest extends IDAuthorityTest {
public ThriftIDAuthorityTest(WriteConfiguration baseConfig) {
super(baseConfig);
}
@BeforeClass
public static void startCassandra() {
CassandraStorageSetup.startCleanEmbedded();
}
@Override
public KeyColumnValueStoreManager openStorageManager() throws BackendException {
return new CassandraThriftStoreManager(CassandraStorageSetup.getCassandraThriftConfiguration(this.getClass().getSimpleName()));
}
} | 0true
| titan-cassandra_src_test_java_com_thinkaurelius_titan_diskstorage_cassandra_thrift_ThriftIDAuthorityTest.java |
4,665 | private final PercolatorType queryPercolator = new PercolatorType() {
@Override
public byte id() {
return 0x04;
}
@Override
public ReduceResult reduce(List<PercolateShardResponse> shardResults) {
return matchPercolator.reduce(shardResults);
}
@Override
public PercolateShardResponse doPercolate(PercolateShardRequest request, PercolateContext context) {
Engine.Searcher percolatorSearcher = context.indexShard().acquireSearcher("percolate");
try {
Match match = match(logger, context, highlightPhase);
queryBasedPercolating(percolatorSearcher, context, match);
List<BytesRef> matches = match.matches();
List<Map<String, HighlightField>> hls = match.hls();
long count = match.counter();
BytesRef[] finalMatches = matches.toArray(new BytesRef[matches.size()]);
return new PercolateShardResponse(finalMatches, hls, count, context, request.index(), request.shardId());
} catch (Throwable e) {
logger.debug("failed to execute", e);
throw new PercolateException(context.indexShard().shardId(), "failed to execute", e);
} finally {
percolatorSearcher.release();
}
}
}; | 1no label
| src_main_java_org_elasticsearch_percolator_PercolatorService.java |
3,524 | public interface RootMapper extends Mapper {
void preParse(ParseContext context) throws IOException;
void postParse(ParseContext context) throws IOException;
void validate(ParseContext context) throws MapperParsingException;
/**
* Should the mapper be included in the root {@link org.elasticsearch.index.mapper.object.ObjectMapper}.
*/
boolean includeInObject();
} | 0true
| src_main_java_org_elasticsearch_index_mapper_RootMapper.java |
3,439 | public static class Translog {
private long startTime;
private long time;
private int expectedNumberOfOperations;
public long startTime() {
return this.startTime;
}
public void startTime(long startTime) {
this.startTime = startTime;
}
public long time() {
return this.time;
}
public void time(long time) {
this.time = time;
}
public int expectedNumberOfOperations() {
return expectedNumberOfOperations;
}
public void expectedNumberOfOperations(int expectedNumberOfOperations) {
this.expectedNumberOfOperations = expectedNumberOfOperations;
}
} | 0true
| src_main_java_org_elasticsearch_index_gateway_SnapshotStatus.java |
1,642 | public class MemberConfigRequest implements ConsoleRequest {
public MemberConfigRequest() {
}
@Override
public int getType() {
return ConsoleRequestConstants.REQUEST_TYPE_MEMBER_CONFIG;
}
@Override
public Object readResponse(ObjectDataInput in) throws IOException {
return in.readUTF();
}
@Override
public void writeResponse(ManagementCenterService mcs, ObjectDataOutput dos) throws Exception {
ConfigXmlGenerator configXmlGenerator = new ConfigXmlGenerator(true);
Config config = mcs.getHazelcastInstance().getConfig();
String clusterXml = configXmlGenerator.generate(config);
dos.writeUTF(clusterXml);
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
}
@Override
public void readData(ObjectDataInput in) throws IOException {
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_management_request_MemberConfigRequest.java |
1,814 | = new FailableCache<TypeLiteral<?>, ConstructorInjector<?>>() {
@SuppressWarnings("unchecked")
protected ConstructorInjector<?> create(TypeLiteral<?> type, Errors errors)
throws ErrorsException {
return createConstructor(type, errors);
}
}; | 0true
| src_main_java_org_elasticsearch_common_inject_ConstructorInjectorStore.java |
668 | constructors[COLLECTION_TXN_ADD] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
public IdentifiedDataSerializable createNew(Integer arg) {
return new CollectionTxnAddOperation();
}
}; | 0true
| hazelcast_src_main_java_com_hazelcast_collection_CollectionDataSerializerHook.java |
1,073 | private static final class Pair {
final OQueryOperator before;
final OQueryOperator after;
public Pair(final OQueryOperator before, final OQueryOperator after) {
this.before = before;
this.after = after;
}
@Override
public boolean equals(final Object obj) {
if (obj instanceof Pair) {
final Pair that = (Pair) obj;
return before == that.before && after == that.after;
}
return false;
}
@Override
public int hashCode() {
return System.identityHashCode(before) + 31 * System.identityHashCode(after);
}
@Override
public String toString() {
return before + " > " + after;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_sql_OSQLEngine.java |
1,872 | public interface Module {
/**
* Contributes bindings and other configurations for this module to {@code binder}.
* <p/>
* <p><strong>Do not invoke this method directly</strong> to install submodules. Instead use
* {@link Binder#install(Module)}, which ensures that {@link Provides provider methods} are
* discovered.
*/
void configure(Binder binder);
} | 0true
| src_main_java_org_elasticsearch_common_inject_Module.java |
584 | public class TaxException extends Exception {
private static final long serialVersionUID = 1L;
protected TaxResponse taxResponse;
public TaxException() {
super();
}
public TaxException(String message, Throwable cause) {
super(message, cause);
}
public TaxException(String message) {
super(message);
}
public TaxException(Throwable cause) {
super(cause);
}
public TaxResponse getTaxResponse() {
return taxResponse;
}
public void setTaxResponse(TaxResponse taxResponse) {
this.taxResponse = taxResponse;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_vendor_service_exception_TaxException.java |
3,388 | static final class FilterSettingFields {
static final String ACCEPTABLE_TRANSIENT_OVERHEAD_RATIO = "acceptable_transient_overhead_ratio";
static final String FREQUENCY_MIN = "filter.frequency.min";
static final String FREQUENCY_MAX = "filter.frequency.max";
static final String FREQUENCY_MIN_SEGMENT_SIZE = "filter.frequency.min_segment_size";
static final String REGEX_PATTERN = "filter.regex.pattern";
} | 0true
| src_main_java_org_elasticsearch_index_fielddata_plain_PagedBytesIndexFieldData.java |
1,025 | public static class IterableNodeList implements Iterable<Node> {
private final NodeList parent;
private final int maximum;
private final short nodeType;
public IterableNodeList(final Node node) {
this(node.getChildNodes());
}
public IterableNodeList(final NodeList list) {
this(list, (short) 0);
}
public IterableNodeList(final Node node, short nodeType) {
this(node.getChildNodes(), nodeType);
}
public IterableNodeList(final NodeList parent, short nodeType) {
this.parent = parent;
this.nodeType = nodeType;
this.maximum = parent.getLength();
}
public Iterator<Node> iterator() {
return new Iterator<Node>() {
private int index = 0;
private Node next;
private boolean findNext() {
next = null;
for (; index < maximum; index++) {
final Node item = parent.item(index);
if (nodeType == 0 || item.getNodeType() == nodeType) {
next = item;
return true;
}
}
return false;
}
public boolean hasNext() {
return findNext();
}
public Node next() {
if (findNext()) {
index++;
return next;
}
throw new NoSuchElementException();
}
public void remove() {
throw new UnsupportedOperationException();
}
};
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_config_AbstractXmlConfigHelper.java |
2,635 | threadPool.schedule(timeout, ThreadPool.Names.GENERIC, new Runnable() {
@Override
public void run() {
ConcurrentMap<DiscoveryNode, PingResponse> responses = receivedResponses.remove(id);
listener.onPing(responses.values().toArray(new PingResponse[responses.size()]));
}
}); | 0true
| src_main_java_org_elasticsearch_discovery_zen_ping_multicast_MulticastZenPing.java |
164 | public class SpecifyTypeArgumentsProposal extends CorrectionProposal {
SpecifyTypeArgumentsProposal(String type, TextFileChange change) {
super("Specify explicit type arguments '" + type + "'", change, null, REVEAL);
}
static void addSpecifyTypeArgumentsProposal(Tree.CompilationUnit cu, Node node,
Collection<ICompletionProposal> proposals, IFile file) {
Tree.MemberOrTypeExpression ref = (Tree.MemberOrTypeExpression) node;
Tree.Identifier identifier;
Tree.TypeArguments typeArguments;
if (ref instanceof Tree.BaseMemberOrTypeExpression) {
identifier = ((Tree.BaseMemberOrTypeExpression) ref).getIdentifier();
typeArguments = ((Tree.BaseMemberOrTypeExpression) ref).getTypeArguments();
}
else if (ref instanceof Tree.QualifiedMemberOrTypeExpression) {
identifier = ((Tree.QualifiedMemberOrTypeExpression) ref).getIdentifier();
typeArguments = ((Tree.QualifiedMemberOrTypeExpression) ref).getTypeArguments();
}
else {
return;
}
if (typeArguments instanceof Tree.InferredTypeArguments &&
typeArguments.getTypeModels()!=null &&
!typeArguments.getTypeModels().isEmpty()) {
StringBuilder builder = new StringBuilder("<");
for (ProducedType arg: typeArguments.getTypeModels()) {
if (isTypeUnknown(arg)) {
return;
}
if (builder.length()!=1) {
builder.append(",");
}
builder.append(arg.getProducedTypeName(node.getUnit()));
}
builder.append(">");
TextFileChange change = new TextFileChange("Specify Explicit Type Arguments", file);
change.setEdit(new InsertEdit(identifier.getStopIndex()+1, builder.toString()));
proposals.add(new SpecifyTypeArgumentsProposal(builder.toString(), change));
}
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_SpecifyTypeArgumentsProposal.java |
3,562 | public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
ByteFieldMapper.Builder builder = byteField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
builder.nullValue(nodeByteValue(propNode));
}
}
return builder;
}
} | 0true
| src_main_java_org_elasticsearch_index_mapper_core_ByteFieldMapper.java |
1,521 | public static final class Helper {
private Helper() {
}
public static void tryCloseConnections(HazelcastInstance hazelcastInstance) {
if (hazelcastInstance == null) {
return;
}
HazelcastInstanceImpl factory = (HazelcastInstanceImpl) hazelcastInstance;
closeSockets(factory);
}
private static void closeSockets(HazelcastInstanceImpl factory) {
if (factory.node.connectionManager != null) {
try {
factory.node.connectionManager.shutdown();
} catch (Throwable ignored) {
}
}
}
public static void tryShutdown(HazelcastInstance hazelcastInstance) {
if (hazelcastInstance == null) {
return;
}
HazelcastInstanceImpl factory = (HazelcastInstanceImpl) hazelcastInstance;
closeSockets(factory);
try {
factory.node.shutdown(true);
} catch (Throwable ignored) {
}
}
public static void inactivate(HazelcastInstance hazelcastInstance) {
if (hazelcastInstance == null) {
return;
}
final HazelcastInstanceImpl factory = (HazelcastInstanceImpl) hazelcastInstance;
factory.node.inactivate();
}
public static void tryStopThreads(HazelcastInstance hazelcastInstance) {
if (hazelcastInstance == null) {
return;
}
HazelcastInstanceImpl factory = (HazelcastInstanceImpl) hazelcastInstance;
try {
factory.node.threadGroup.interrupt();
} catch (Throwable ignored) {
}
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_instance_OutOfMemoryErrorDispatcher.java |
526 | public class DateUtil {
public static boolean isActive(Date startDate, Date endDate, boolean includeTime) {
Long date = SystemTime.asMillis(includeTime);
return !(startDate == null || startDate.getTime() > date || (endDate != null && endDate.getTime() < date));
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_util_DateUtil.java |
3,014 | public interface QueryParserCache extends IndexComponent, CloseableComponent {
Query get(QueryParserSettings queryString);
void put(QueryParserSettings queryString, Query query);
void clear();
} | 0true
| src_main_java_org_elasticsearch_index_cache_query_parser_QueryParserCache.java |
247 | @Test
public class ODefaultCacheCleanUpTest {
public void removesGivenAmountOfRecords() {
// Given filled cache backend
ODefaultCache.OLinkedHashMapCache sut = filledCacheBackend();
int originalSize = sut.size();
// When asked to remove eldest items
int amount = 10;
sut.removeEldest(amount);
// Then new cache size should be of original size minus amount of deleted items
assertEquals(sut.size(), originalSize - amount);
}
public void doesNotTakeDirtyRecordsIntoAccountWhenSkips() {
// Given filled cache backend
// With some dirty records in it
ODefaultCache.OLinkedHashMapCache sut = filledCacheBackendWithSomeDirtyRecords();
int originalSize = sut.size();
// When asked to remove eldest items
int amount = 10;
sut.removeEldest(amount);
// Then removes less then asked
assertTrue(amount > originalSize - sut.size());
}
public void clearsWholeCacheIfMemoryCriticallyLow() {
// Given running filled cache
ODefaultCache sut = runningFilledCache();
// When watchdog listener invoked with critically low memory
int freeMemoryPercentageBelowCriticalPoint = 8;
sut.lowMemoryListener.memoryUsageLow(1, freeMemoryPercentageBelowCriticalPoint);
// Then whole cache cleared
assertEquals(sut.size(), 0, "Cache has entries in it yet");
}
public void removesPartOfEntriesInCaseOfLowMemory() {
// Given running filled cache
ODefaultCache sut = runningFilledCache();
int originalSize = sut.size();
// When watchdog listener invoked with critically low memory
int freeMemoryPercentageBelowCriticalPoint = 20;
sut.lowMemoryListener.memoryUsageLow(1, freeMemoryPercentageBelowCriticalPoint);
// Then whole cache cleared
assertTrue(sut.size() < originalSize, "Cache was not cleaned");
assertTrue(sut.size() > 0, "Cache was cleared wholly");
}
private ODefaultCache.OLinkedHashMapCache filledCacheBackend() {
ODefaultCache.OLinkedHashMapCache cache = new ODefaultCache.OLinkedHashMapCache(100, 0.75f, 100);
for (int i = 100; i > 0; i--) {
ODocument entry = new ODocument(new ORecordId(i, OClusterPositionFactory.INSTANCE.valueOf(i)));
cache.put(entry.getIdentity(), entry);
}
return cache;
}
private ODefaultCache.OLinkedHashMapCache filledCacheBackendWithSomeDirtyRecords() {
ODefaultCache.OLinkedHashMapCache cache = filledCacheBackend();
int i = 0;
for (Map.Entry<ORID, ORecordInternal<?>> entry : cache.entrySet()) {
if (i++ % 3 == 0)
entry.getValue().setDirty();
}
return cache;
}
private ODefaultCache runningFilledCache() {
ODefaultCache cache = new ODefaultCache(null, 100);
cache.startup();
for (int i = 100; i > 0; i--)
cache.put(new ODocument(new ORecordId(i, OClusterPositionFactory.INSTANCE.valueOf(i))));
return cache;
}
} | 0true
| core_src_test_java_com_orientechnologies_orient_core_cache_ODefaultCacheCleanUpTest.java |
63 | public class ONoLock extends OAbstractLock {
public void lock() {
}
public void unlock() {
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_concur_lock_ONoLock.java |
4,491 | class FilesInfoRequestHandler extends BaseTransportRequestHandler<RecoveryFilesInfoRequest> {
@Override
public RecoveryFilesInfoRequest newInstance() {
return new RecoveryFilesInfoRequest();
}
@Override
public String executor() {
return ThreadPool.Names.GENERIC;
}
@Override
public void messageReceived(RecoveryFilesInfoRequest request, TransportChannel channel) throws Exception {
RecoveryStatus onGoingRecovery = onGoingRecoveries.get(request.recoveryId());
if (onGoingRecovery == null) {
// shard is getting closed on us
throw new IndexShardClosedException(request.shardId());
}
if (onGoingRecovery.isCanceled()) {
onGoingRecovery.sentCanceledToSource = true;
throw new IndexShardClosedException(request.shardId());
}
onGoingRecovery.phase1FileNames = request.phase1FileNames;
onGoingRecovery.phase1FileSizes = request.phase1FileSizes;
onGoingRecovery.phase1ExistingFileNames = request.phase1ExistingFileNames;
onGoingRecovery.phase1ExistingFileSizes = request.phase1ExistingFileSizes;
onGoingRecovery.phase1TotalSize = request.phase1TotalSize;
onGoingRecovery.phase1ExistingTotalSize = request.phase1ExistingTotalSize;
onGoingRecovery.stage = RecoveryStatus.Stage.INDEX;
channel.sendResponse(TransportResponse.Empty.INSTANCE);
}
} | 1no label
| src_main_java_org_elasticsearch_indices_recovery_RecoveryTarget.java |
2,458 | public class KeyedLock<T> {
private final ConcurrentMap<T, KeyLock> map = new ConcurrentHashMap<T, KeyLock>();
private final ThreadLocal<KeyLock> threadLocal = new ThreadLocal<KeyedLock.KeyLock>();
public void acquire(T key) {
while (true) {
if (threadLocal.get() != null) {
// if we are here, the thread already has the lock
throw new ElasticsearchIllegalStateException("Lock already accquired in Thread" + Thread.currentThread().getId()
+ " for key " + key);
}
KeyLock perNodeLock = map.get(key);
if (perNodeLock == null) {
KeyLock newLock = new KeyLock();
perNodeLock = map.putIfAbsent(key, newLock);
if (perNodeLock == null) {
newLock.lock();
threadLocal.set(newLock);
return;
}
}
assert perNodeLock != null;
int i = perNodeLock.count.get();
if (i > 0 && perNodeLock.count.compareAndSet(i, i + 1)) {
perNodeLock.lock();
threadLocal.set(perNodeLock);
return;
}
}
}
public void release(T key) {
KeyLock lock = threadLocal.get();
if (lock == null) {
throw new ElasticsearchIllegalStateException("Lock not accquired");
}
assert lock.isHeldByCurrentThread();
assert lock == map.get(key);
lock.unlock();
threadLocal.set(null);
int decrementAndGet = lock.count.decrementAndGet();
if (decrementAndGet == 0) {
map.remove(key, lock);
}
}
@SuppressWarnings("serial")
private final static class KeyLock extends ReentrantLock {
private final AtomicInteger count = new AtomicInteger(1);
}
public boolean hasLockedKeys() {
return !map.isEmpty();
}
} | 0true
| src_main_java_org_elasticsearch_common_util_concurrent_KeyedLock.java |
3,293 | public class BinaryDVAtomicFieldData implements AtomicFieldData<ScriptDocValues.Strings> {
private final AtomicReader reader;
private final String field;
public BinaryDVAtomicFieldData(AtomicReader reader, String field) {
this.reader = reader;
this.field = field;
}
@Override
public boolean isMultiValued() {
return false;
}
@Override
public boolean isValuesOrdered() {
return true; // single-valued
}
@Override
public int getNumDocs() {
return reader.maxDoc();
}
@Override
public long getNumberUniqueValues() {
// probably not accurate, but a good upper limit
return reader.maxDoc();
}
@Override
public long getMemorySizeInBytes() {
// TODO: Lucene doesn't expose it right now
return -1;
}
@Override
public BytesValues getBytesValues(boolean needsHashes) {
// if you want hashes to be cached, you should rather store them on disk alongside the values rather than loading them into memory
// here - not supported for now, and probably not useful since this field data only applies to _id and _uid?
final BinaryDocValues values;
final Bits docsWithField;
try {
final BinaryDocValues v = reader.getBinaryDocValues(field);
if (v == null) {
// segment has no value
values = BinaryDocValues.EMPTY;
docsWithField = new Bits.MatchNoBits(reader.maxDoc());
} else {
values = v;
final Bits b = reader.getDocsWithField(field);
docsWithField = b == null ? new Bits.MatchAllBits(reader.maxDoc()) : b;
}
} catch (IOException e) {
throw new ElasticsearchIllegalStateException("Cannot load doc values", e);
}
return new BytesValues(false) {
@Override
public int setDocument(int docId) {
this.docId = docId;
return docsWithField.get(docId) ? 1 : 0;
}
@Override
public BytesRef nextValue() {
values.get(docId, scratch);
return scratch;
}
};
}
@Override
public Strings getScriptValues() {
return new ScriptDocValues.Strings(getBytesValues(false));
}
@Override
public void close() {
// no-op
}
} | 0true
| src_main_java_org_elasticsearch_index_fielddata_plain_BinaryDVAtomicFieldData.java |
477 | public final class ProxyManager {
private static final ILogger LOGGER = Logger.getLogger(ProxyManager.class);
private static final Class[] CONSTRUCTOR_ARGUMENT_TYPES = new Class[]{String.class, String.class, String.class};
private final HazelcastClient client;
private final ConcurrentMap<String, ClientProxyFactory> proxyFactories = new ConcurrentHashMap<String, ClientProxyFactory>();
private final ConcurrentMap<ObjectNamespace, ClientProxyFuture> proxies = new ConcurrentHashMap<ObjectNamespace, ClientProxyFuture>();
public ProxyManager(HazelcastClient client) {
this.client = client;
final List<ListenerConfig> listenerConfigs = client.getClientConfig().getListenerConfigs();
if (listenerConfigs != null && !listenerConfigs.isEmpty()) {
for (ListenerConfig listenerConfig : listenerConfigs) {
if (listenerConfig.getImplementation() instanceof DistributedObjectListener) {
addDistributedObjectListener((DistributedObjectListener) listenerConfig.getImplementation());
}
}
}
}
public void init(ClientConfig config) {
// register defaults
register(MapService.SERVICE_NAME, ClientMapProxy.class);
register(QueueService.SERVICE_NAME, ClientQueueProxy.class);
register(MultiMapService.SERVICE_NAME, ClientMultiMapProxy.class);
register(ListService.SERVICE_NAME, ClientListProxy.class);
register(SetService.SERVICE_NAME, ClientSetProxy.class);
register(SemaphoreService.SERVICE_NAME, ClientSemaphoreProxy.class);
register(TopicService.SERVICE_NAME, ClientTopicProxy.class);
register(AtomicLongService.SERVICE_NAME, ClientAtomicLongProxy.class);
register(AtomicReferenceService.SERVICE_NAME, ClientAtomicReferenceProxy.class);
register(DistributedExecutorService.SERVICE_NAME, ClientExecutorServiceProxy.class);
register(LockServiceImpl.SERVICE_NAME, ClientLockProxy.class);
register(CountDownLatchService.SERVICE_NAME, ClientCountDownLatchProxy.class);
register(MapReduceService.SERVICE_NAME, ClientMapReduceProxy.class);
register(ReplicatedMapService.SERVICE_NAME, ClientReplicatedMapProxy.class);
register(IdGeneratorService.SERVICE_NAME, new ClientProxyFactory() {
public ClientProxy create(String id) {
String instanceName = client.getName();
IAtomicLong atomicLong = client.getAtomicLong(IdGeneratorService.ATOMIC_LONG_NAME + id);
return new ClientIdGeneratorProxy(instanceName, IdGeneratorService.SERVICE_NAME, id, atomicLong);
}
});
for (ProxyFactoryConfig proxyFactoryConfig : config.getProxyFactoryConfigs()) {
try {
ClassLoader classLoader = config.getClassLoader();
String className = proxyFactoryConfig.getClassName();
ClientProxyFactory clientProxyFactory = ClassLoaderUtil.newInstance(classLoader, className);
register(proxyFactoryConfig.getService(), clientProxyFactory);
} catch (Exception e) {
LOGGER.severe(e);
}
}
}
public void register(String serviceName, ClientProxyFactory factory) {
if (proxyFactories.putIfAbsent(serviceName, factory) != null) {
throw new IllegalArgumentException("Factory for service: " + serviceName + " is already registered!");
}
}
public void register(final String serviceName, final Class<? extends ClientProxy> proxyType) {
try {
register(serviceName, new ClientProxyFactory() {
@Override
public ClientProxy create(String id) {
String instanceName = client.getName();
return instantiateClientProxy(proxyType, instanceName, serviceName, id);
}
});
} catch (Exception e) {
throw new HazelcastException("Could not initialize Proxy", e);
}
}
public ClientProxy getProxy(String service, String id) {
final ObjectNamespace ns = new DefaultObjectNamespace(service, id);
ClientProxyFuture proxyFuture = proxies.get(ns);
if (proxyFuture != null) {
return proxyFuture.get();
}
final ClientProxyFactory factory = proxyFactories.get(service);
if (factory == null) {
throw new IllegalArgumentException("No factory registered for service: " + service);
}
final ClientProxy clientProxy = factory.create(id);
proxyFuture = new ClientProxyFuture();
final ClientProxyFuture current = proxies.putIfAbsent(ns, proxyFuture);
if (current != null) {
return current.get();
}
try {
initialize(clientProxy);
} catch (Exception e) {
proxies.remove(ns);
proxyFuture.set(e);
throw ExceptionUtil.rethrow(e);
}
proxyFuture.set(clientProxy);
return clientProxy;
}
public ClientProxy removeProxy(String service, String id) {
final ObjectNamespace ns = new DefaultObjectNamespace(service, id);
return proxies.remove(ns).get();
}
private void initialize(ClientProxy clientProxy) throws Exception {
ClientCreateRequest request = new ClientCreateRequest(clientProxy.getName(), clientProxy.getServiceName());
client.getInvocationService().invokeOnRandomTarget(request).get();
clientProxy.setContext(new ClientContext(client, this));
}
public Collection<? extends DistributedObject> getDistributedObjects() {
Collection<DistributedObject> objects = new LinkedList<DistributedObject>();
for (ClientProxyFuture future : proxies.values()) {
objects.add(future.get());
}
return objects;
}
public void destroy() {
for (ClientProxyFuture future : proxies.values()) {
future.get().onShutdown();
}
proxies.clear();
}
public String addDistributedObjectListener(final DistributedObjectListener listener) {
final DistributedObjectListenerRequest request = new DistributedObjectListenerRequest();
final EventHandler<PortableDistributedObjectEvent> eventHandler = new EventHandler<PortableDistributedObjectEvent>() {
public void handle(PortableDistributedObjectEvent e) {
final ObjectNamespace ns = new DefaultObjectNamespace(e.getServiceName(), e.getName());
ClientProxyFuture future = proxies.get(ns);
ClientProxy proxy = future == null ? null : future.get();
if (proxy == null) {
proxy = getProxy(e.getServiceName(), e.getName());
}
DistributedObjectEvent event = new DistributedObjectEvent(e.getEventType(), e.getServiceName(), proxy);
if (DistributedObjectEvent.EventType.CREATED.equals(e.getEventType())) {
listener.distributedObjectCreated(event);
} else if (DistributedObjectEvent.EventType.DESTROYED.equals(e.getEventType())) {
listener.distributedObjectDestroyed(event);
}
}
@Override
public void onListenerRegister() {
}
};
final ClientContext clientContext = new ClientContext(client, this);
return ListenerUtil.listen(clientContext, request, null, eventHandler);
}
public boolean removeDistributedObjectListener(String id) {
final RemoveDistributedObjectListenerRequest request = new RemoveDistributedObjectListenerRequest(id);
final ClientContext clientContext = new ClientContext(client, this);
return ListenerUtil.stopListening(clientContext, request, id);
}
private static class ClientProxyFuture {
volatile Object proxy;
ClientProxy get() {
if (proxy == null) {
boolean interrupted = false;
synchronized (this) {
while (proxy == null) {
try {
wait();
} catch (InterruptedException e) {
interrupted = true;
}
}
}
if (interrupted) {
Thread.currentThread().interrupt();
}
}
if (proxy instanceof Throwable) {
throw ExceptionUtil.rethrow((Throwable)proxy);
}
return (ClientProxy)proxy;
}
void set(Object o) {
if (o == null) {
throw new IllegalArgumentException();
}
synchronized (this) {
proxy = o;
notifyAll();
}
}
}
private <T> T instantiateClientProxy(Class<T> proxyType, String instanceName, String serviceName, String id) {
try {
final Constructor<T> constructor = proxyType.getConstructor(CONSTRUCTOR_ARGUMENT_TYPES);
return constructor.newInstance(instanceName, serviceName, id);
} catch (Exception e) {
throw ExceptionUtil.rethrow(e);
}
}
} | 1no label
| hazelcast-client_src_main_java_com_hazelcast_client_spi_ProxyManager.java |
3,699 | public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = TypeFieldMapper.NAME;
public static final String INDEX_NAME = TypeFieldMapper.NAME;
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
static {
FIELD_TYPE.setIndexed(true);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setStored(false);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_ONLY);
FIELD_TYPE.freeze();
}
} | 0true
| src_main_java_org_elasticsearch_index_mapper_internal_TypeFieldMapper.java |
6,019 | public final class DirectCandidateGenerator extends CandidateGenerator {
private final DirectSpellChecker spellchecker;
private final String field;
private final SuggestMode suggestMode;
private final TermsEnum termsEnum;
private final IndexReader reader;
private final long dictSize;
private final double logBase = 5;
private final long frequencyPlateau;
private final Analyzer preFilter;
private final Analyzer postFilter;
private final double nonErrorLikelihood;
private final boolean useTotalTermFrequency;
private final CharsRef spare = new CharsRef();
private final BytesRef byteSpare = new BytesRef();
private final int numCandidates;
public DirectCandidateGenerator(DirectSpellChecker spellchecker, String field, SuggestMode suggestMode, IndexReader reader, double nonErrorLikelihood, int numCandidates) throws IOException {
this(spellchecker, field, suggestMode, reader, nonErrorLikelihood, numCandidates, null, null, MultiFields.getTerms(reader, field));
}
public DirectCandidateGenerator(DirectSpellChecker spellchecker, String field, SuggestMode suggestMode, IndexReader reader, double nonErrorLikelihood, int numCandidates, Analyzer preFilter, Analyzer postFilter, Terms terms) throws IOException {
if (terms == null) {
throw new ElasticsearchIllegalArgumentException("generator field [" + field + "] doesn't exist");
}
this.spellchecker = spellchecker;
this.field = field;
this.numCandidates = numCandidates;
this.suggestMode = suggestMode;
this.reader = reader;
final long dictSize = terms.getSumTotalTermFreq();
this.useTotalTermFrequency = dictSize != -1;
this.dictSize = dictSize == -1 ? reader.maxDoc() : dictSize;
this.preFilter = preFilter;
this.postFilter = postFilter;
this.nonErrorLikelihood = nonErrorLikelihood;
float thresholdFrequency = spellchecker.getThresholdFrequency();
this.frequencyPlateau = thresholdFrequency >= 1.0f ? (int) thresholdFrequency: (int)(dictSize * thresholdFrequency);
termsEnum = terms.iterator(null);
}
/* (non-Javadoc)
* @see org.elasticsearch.search.suggest.phrase.CandidateGenerator#isKnownWord(org.apache.lucene.util.BytesRef)
*/
@Override
public boolean isKnownWord(BytesRef term) throws IOException {
return frequency(term) > 0;
}
/* (non-Javadoc)
* @see org.elasticsearch.search.suggest.phrase.CandidateGenerator#frequency(org.apache.lucene.util.BytesRef)
*/
@Override
public long frequency(BytesRef term) throws IOException {
term = preFilter(term, spare, byteSpare);
return internalFrequency(term);
}
public long internalFrequency(BytesRef term) throws IOException {
if (termsEnum.seekExact(term)) {
return useTotalTermFrequency ? termsEnum.totalTermFreq() : termsEnum.docFreq();
}
return 0;
}
public String getField() {
return field;
}
/* (non-Javadoc)
* @see org.elasticsearch.search.suggest.phrase.CandidateGenerator#drawCandidates(org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.CandidateSet, int)
*/
@Override
public CandidateSet drawCandidates(CandidateSet set) throws IOException {
Candidate original = set.originalTerm;
BytesRef term = preFilter(original.term, spare, byteSpare);
final long frequency = original.frequency;
spellchecker.setThresholdFrequency(this.suggestMode == SuggestMode.SUGGEST_ALWAYS ? 0 : thresholdFrequency(frequency, dictSize));
SuggestWord[] suggestSimilar = spellchecker.suggestSimilar(new Term(field, term), numCandidates, reader, this.suggestMode);
List<Candidate> candidates = new ArrayList<Candidate>(suggestSimilar.length);
for (int i = 0; i < suggestSimilar.length; i++) {
SuggestWord suggestWord = suggestSimilar[i];
BytesRef candidate = new BytesRef(suggestWord.string);
postFilter(new Candidate(candidate, internalFrequency(candidate), suggestWord.score, score(suggestWord.freq, suggestWord.score, dictSize), false), spare, byteSpare, candidates);
}
set.addCandidates(candidates);
return set;
}
protected BytesRef preFilter(final BytesRef term, final CharsRef spare, final BytesRef byteSpare) throws IOException {
if (preFilter == null) {
return term;
}
final BytesRef result = byteSpare;
SuggestUtils.analyze(preFilter, term, field, new SuggestUtils.TokenConsumer() {
@Override
public void nextToken() throws IOException {
this.fillBytesRef(result);
}
}, spare);
return result;
}
protected void postFilter(final Candidate candidate, final CharsRef spare, BytesRef byteSpare, final List<Candidate> candidates) throws IOException {
if (postFilter == null) {
candidates.add(candidate);
} else {
final BytesRef result = byteSpare;
SuggestUtils.analyze(postFilter, candidate.term, field, new SuggestUtils.TokenConsumer() {
@Override
public void nextToken() throws IOException {
this.fillBytesRef(result);
if (posIncAttr.getPositionIncrement() > 0 && result.bytesEquals(candidate.term)) {
BytesRef term = BytesRef.deepCopyOf(result);
long freq = frequency(term);
candidates.add(new Candidate(BytesRef.deepCopyOf(term), freq, candidate.stringDistance, score(candidate.frequency, candidate.stringDistance, dictSize), false));
} else {
candidates.add(new Candidate(BytesRef.deepCopyOf(result), candidate.frequency, nonErrorLikelihood, score(candidate.frequency, candidate.stringDistance, dictSize), false));
}
}
}, spare);
}
}
private double score(long frequency, double errorScore, long dictionarySize) {
return errorScore * (((double)frequency + 1) / ((double)dictionarySize +1));
}
protected long thresholdFrequency(long termFrequency, long dictionarySize) {
if (termFrequency > 0) {
return (long) Math.max(0, Math.round(termFrequency * (Math.log10(termFrequency - frequencyPlateau) * (1.0 / Math.log10(logBase))) + 1));
}
return 0;
}
public static class CandidateSet {
public Candidate[] candidates;
public final Candidate originalTerm;
public CandidateSet(Candidate[] candidates, Candidate originalTerm) {
this.candidates = candidates;
this.originalTerm = originalTerm;
}
public void addCandidates(List<Candidate> candidates) {
final Set<Candidate> set = new HashSet<DirectCandidateGenerator.Candidate>(candidates);
for (int i = 0; i < this.candidates.length; i++) {
set.add(this.candidates[i]);
}
this.candidates = set.toArray(new Candidate[set.size()]);
}
public void addOneCandidate(Candidate candidate) {
Candidate[] candidates = new Candidate[this.candidates.length + 1];
System.arraycopy(this.candidates, 0, candidates, 0, this.candidates.length);
candidates[candidates.length-1] = candidate;
this.candidates = candidates;
}
}
public static class Candidate {
public static final Candidate[] EMPTY = new Candidate[0];
public final BytesRef term;
public final double stringDistance;
public final long frequency;
public final double score;
public final boolean userInput;
public Candidate(BytesRef term, long frequency, double stringDistance, double score, boolean userInput) {
this.frequency = frequency;
this.term = term;
this.stringDistance = stringDistance;
this.score = score;
this.userInput = userInput;
}
@Override
public String toString() {
return "Candidate [term=" + term.utf8ToString() + ", stringDistance=" + stringDistance + ", frequency=" + frequency +
(userInput ? ", userInput" : "" ) + "]";
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((term == null) ? 0 : term.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Candidate other = (Candidate) obj;
if (term == null) {
if (other.term != null)
return false;
} else if (!term.equals(other.term))
return false;
return true;
}
}
@Override
public Candidate createCandidate(BytesRef term, long frequency, double channelScore, boolean userInput) throws IOException {
return new Candidate(term, frequency, channelScore, score(frequency, channelScore, dictSize), userInput);
}
} | 1no label
| src_main_java_org_elasticsearch_search_suggest_phrase_DirectCandidateGenerator.java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.