Unnamed: 0
int64 0
6.45k
| func
stringlengths 29
253k
| target
class label 2
classes | project
stringlengths 36
167
|
---|---|---|---|
3,480 | rootObjectMapper.toXContent(builder, params, new ToXContent() {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (indexAnalyzer != null && searchAnalyzer != null && indexAnalyzer.name().equals(searchAnalyzer.name()) && !indexAnalyzer.name().startsWith("_")) {
if (!indexAnalyzer.name().equals("default")) {
// same analyzers, output it once
builder.field("analyzer", indexAnalyzer.name());
}
} else {
if (indexAnalyzer != null && !indexAnalyzer.name().startsWith("_")) {
if (!indexAnalyzer.name().equals("default")) {
builder.field("index_analyzer", indexAnalyzer.name());
}
}
if (searchAnalyzer != null && !searchAnalyzer.name().startsWith("_")) {
if (!searchAnalyzer.name().equals("default")) {
builder.field("search_analyzer", searchAnalyzer.name());
}
}
}
if (meta != null && !meta.isEmpty()) {
builder.field("_meta", meta());
}
return builder;
}
// no need to pass here id and boost, since they are added to the root object mapper
// in the constructor
}, rootMappersNotIncludedInObject); | 0true
| src_main_java_org_elasticsearch_index_mapper_DocumentMapper.java |
735 | public class OSBTreeBucket<K, V> extends ODurablePage {
private static final int FREE_POINTER_OFFSET = NEXT_FREE_POSITION;
private static final int SIZE_OFFSET = FREE_POINTER_OFFSET + OIntegerSerializer.INT_SIZE;
private static final int IS_LEAF_OFFSET = SIZE_OFFSET + OIntegerSerializer.INT_SIZE;
private static final int LEFT_SIBLING_OFFSET = IS_LEAF_OFFSET + OByteSerializer.BYTE_SIZE;
private static final int RIGHT_SIBLING_OFFSET = LEFT_SIBLING_OFFSET + OLongSerializer.LONG_SIZE;
private static final int TREE_SIZE_OFFSET = RIGHT_SIBLING_OFFSET + OLongSerializer.LONG_SIZE;
private static final int KEY_SERIALIZER_OFFSET = TREE_SIZE_OFFSET + OLongSerializer.LONG_SIZE;
private static final int VALUE_SERIALIZER_OFFSET = KEY_SERIALIZER_OFFSET + OByteSerializer.BYTE_SIZE;
private static final int FREE_VALUES_LIST_OFFSET = VALUE_SERIALIZER_OFFSET + OByteSerializer.BYTE_SIZE;
private static final int POSITIONS_ARRAY_OFFSET = FREE_VALUES_LIST_OFFSET + OLongSerializer.LONG_SIZE;
private final boolean isLeaf;
private final OBinarySerializer<K> keySerializer;
private final OBinarySerializer<V> valueSerializer;
private final OType[] keyTypes;
private final Comparator<? super K> comparator = ODefaultComparator.INSTANCE;
public OSBTreeBucket(ODirectMemoryPointer cachePointer, boolean isLeaf, OBinarySerializer<K> keySerializer, OType[] keyTypes,
OBinarySerializer<V> valueSerializer, TrackMode trackMode) throws IOException {
super(cachePointer, trackMode);
this.isLeaf = isLeaf;
this.keySerializer = keySerializer;
this.keyTypes = keyTypes;
this.valueSerializer = valueSerializer;
setIntValue(FREE_POINTER_OFFSET, MAX_PAGE_SIZE_BYTES);
setIntValue(SIZE_OFFSET, 0);
setByteValue(IS_LEAF_OFFSET, (byte) (isLeaf ? 1 : 0));
setLongValue(LEFT_SIBLING_OFFSET, -1);
setLongValue(RIGHT_SIBLING_OFFSET, -1);
setLongValue(TREE_SIZE_OFFSET, 0);
setLongValue(FREE_VALUES_LIST_OFFSET, -1);
setByteValue(KEY_SERIALIZER_OFFSET, (byte) -1);
setByteValue(VALUE_SERIALIZER_OFFSET, (byte) -1);
}
public OSBTreeBucket(ODirectMemoryPointer cachePointer, OBinarySerializer<K> keySerializer, OType[] keyTypes,
OBinarySerializer<V> valueSerializer, TrackMode trackMode) {
super(cachePointer, trackMode);
this.keyTypes = keyTypes;
this.isLeaf = getByteValue(IS_LEAF_OFFSET) > 0;
this.keySerializer = keySerializer;
this.valueSerializer = valueSerializer;
}
public byte getKeySerializerId() {
return getByteValue(KEY_SERIALIZER_OFFSET);
}
public void setKeySerializerId(byte keySerializerId) {
setByteValue(KEY_SERIALIZER_OFFSET, keySerializerId);
}
public byte getValueSerializerId() {
return getByteValue(VALUE_SERIALIZER_OFFSET);
}
public void setValueSerializerId(byte valueSerializerId) {
setByteValue(VALUE_SERIALIZER_OFFSET, valueSerializerId);
}
public void setTreeSize(long size) throws IOException {
setLongValue(TREE_SIZE_OFFSET, size);
}
public long getTreeSize() {
return getLongValue(TREE_SIZE_OFFSET);
}
public boolean isEmpty() {
return size() == 0;
}
public long getValuesFreeListFirstIndex() {
return getLongValue(FREE_VALUES_LIST_OFFSET);
}
public void setValuesFreeListFirstIndex(long pageIndex) throws IOException {
setLongValue(FREE_VALUES_LIST_OFFSET, pageIndex);
}
public int find(K key) {
int low = 0;
int high = size() - 1;
while (low <= high) {
int mid = (low + high) >>> 1;
K midVal = getKey(mid);
int cmp = comparator.compare(midVal, key);
if (cmp < 0)
low = mid + 1;
else if (cmp > 0)
high = mid - 1;
else
return mid; // key found
}
return -(low + 1); // key not found.
}
public long remove(int entryIndex) throws IOException {
int entryPosition = getIntValue(POSITIONS_ARRAY_OFFSET + entryIndex * OIntegerSerializer.INT_SIZE);
int keySize = keySerializer.getObjectSizeInDirectMemory(pagePointer, entryPosition);
int entrySize;
long linkValue = -1;
if (isLeaf) {
if (valueSerializer.isFixedLength()) {
entrySize = keySize + valueSerializer.getFixedLength() + OByteSerializer.BYTE_SIZE;
} else {
final boolean isLink = pagePointer.getByte(entryPosition + keySize) > 0;
if (!isLink)
entrySize = keySize
+ valueSerializer.getObjectSizeInDirectMemory(pagePointer, entryPosition + keySize + OByteSerializer.BYTE_SIZE)
+ OByteSerializer.BYTE_SIZE;
else {
entrySize = keySize + OByteSerializer.BYTE_SIZE + OLongSerializer.LONG_SIZE;
linkValue = OLongSerializer.INSTANCE.deserializeFromDirectMemory(pagePointer, entryPosition + keySize
+ OByteSerializer.BYTE_SIZE);
}
}
} else {
throw new IllegalStateException("Remove is applies to leaf buckets only");
}
int size = size();
if (entryIndex < size - 1) {
moveData(POSITIONS_ARRAY_OFFSET + (entryIndex + 1) * OIntegerSerializer.INT_SIZE, POSITIONS_ARRAY_OFFSET + entryIndex
* OIntegerSerializer.INT_SIZE, (size - entryIndex - 1) * OIntegerSerializer.INT_SIZE);
}
size--;
setIntValue(SIZE_OFFSET, size);
int freePointer = getIntValue(FREE_POINTER_OFFSET);
if (size > 0 && entryPosition > freePointer) {
moveData(freePointer, freePointer + entrySize, entryPosition - freePointer);
}
setIntValue(FREE_POINTER_OFFSET, freePointer + entrySize);
int currentPositionOffset = POSITIONS_ARRAY_OFFSET;
for (int i = 0; i < size; i++) {
int currentEntryPosition = getIntValue(currentPositionOffset);
if (currentEntryPosition < entryPosition)
setIntValue(currentPositionOffset, currentEntryPosition + entrySize);
currentPositionOffset += OIntegerSerializer.INT_SIZE;
}
return linkValue;
}
public int size() {
return getIntValue(SIZE_OFFSET);
}
public SBTreeEntry<K, V> getEntry(int entryIndex) {
int entryPosition = getIntValue(entryIndex * OIntegerSerializer.INT_SIZE + POSITIONS_ARRAY_OFFSET);
if (isLeaf) {
K key = keySerializer.deserializeFromDirectMemory(pagePointer, entryPosition);
entryPosition += keySerializer.getObjectSizeInDirectMemory(pagePointer, entryPosition);
boolean isLinkValue = pagePointer.getByte(entryPosition) > 0;
long link = -1;
V value = null;
if (isLinkValue)
link = OLongSerializer.INSTANCE.deserializeFromDirectMemory(pagePointer, entryPosition + OByteSerializer.BYTE_SIZE);
else
value = valueSerializer.deserializeFromDirectMemory(pagePointer, entryPosition + OByteSerializer.BYTE_SIZE);
return new SBTreeEntry<K, V>(-1, -1, key, new OSBTreeValue<V>(link >= 0, link, value));
} else {
long leftChild = getLongValue(entryPosition);
entryPosition += OLongSerializer.LONG_SIZE;
long rightChild = getLongValue(entryPosition);
entryPosition += OLongSerializer.LONG_SIZE;
K key = keySerializer.deserializeFromDirectMemory(pagePointer, entryPosition);
return new SBTreeEntry<K, V>(leftChild, rightChild, key, null);
}
}
public K getKey(int index) {
int entryPosition = getIntValue(index * OIntegerSerializer.INT_SIZE + POSITIONS_ARRAY_OFFSET);
if (!isLeaf)
entryPosition += 2 * OLongSerializer.LONG_SIZE;
return keySerializer.deserializeFromDirectMemory(pagePointer, entryPosition);
}
public boolean isLeaf() {
return isLeaf;
}
public void addAll(List<SBTreeEntry<K, V>> entries) throws IOException {
for (int i = 0; i < entries.size(); i++)
addEntry(i, entries.get(i), false);
}
public void shrink(int newSize) throws IOException {
List<SBTreeEntry<K, V>> treeEntries = new ArrayList<SBTreeEntry<K, V>>(newSize);
for (int i = 0; i < newSize; i++) {
treeEntries.add(getEntry(i));
}
setIntValue(FREE_POINTER_OFFSET, MAX_PAGE_SIZE_BYTES);
setIntValue(SIZE_OFFSET, 0);
int index = 0;
for (SBTreeEntry<K, V> entry : treeEntries) {
addEntry(index, entry, false);
index++;
}
}
public boolean addEntry(int index, SBTreeEntry<K, V> treeEntry, boolean updateNeighbors) throws IOException {
final int keySize = keySerializer.getObjectSize(treeEntry.key, (Object[]) keyTypes);
int valueSize = 0;
int entrySize = keySize;
if (isLeaf) {
if (valueSerializer.isFixedLength())
valueSize = valueSerializer.getFixedLength();
else {
if (treeEntry.value.isLink())
valueSize = OLongSerializer.LONG_SIZE;
else
valueSize = valueSerializer.getObjectSize(treeEntry.value.getValue());
}
entrySize += valueSize + OByteSerializer.BYTE_SIZE;
} else
entrySize += 2 * OLongSerializer.LONG_SIZE;
int size = size();
int freePointer = getIntValue(FREE_POINTER_OFFSET);
if (freePointer - entrySize < (size + 1) * OIntegerSerializer.INT_SIZE + POSITIONS_ARRAY_OFFSET)
return false;
if (index <= size - 1) {
moveData(POSITIONS_ARRAY_OFFSET + index * OIntegerSerializer.INT_SIZE, POSITIONS_ARRAY_OFFSET + (index + 1)
* OIntegerSerializer.INT_SIZE, (size - index) * OIntegerSerializer.INT_SIZE);
}
freePointer -= entrySize;
setIntValue(FREE_POINTER_OFFSET, freePointer);
setIntValue(POSITIONS_ARRAY_OFFSET + index * OIntegerSerializer.INT_SIZE, freePointer);
setIntValue(SIZE_OFFSET, size + 1);
if (isLeaf) {
byte[] serializedKey = new byte[keySize];
keySerializer.serializeNative(treeEntry.key, serializedKey, 0, (Object[]) keyTypes);
setBinaryValue(freePointer, serializedKey);
freePointer += keySize;
setByteValue(freePointer, treeEntry.value.isLink() ? (byte) 1 : (byte) 0);
freePointer += OByteSerializer.BYTE_SIZE;
byte[] serializedValue = new byte[valueSize];
if (treeEntry.value.isLink())
OLongSerializer.INSTANCE.serializeNative(treeEntry.value.getLink(), serializedValue, 0);
else
valueSerializer.serializeNative(treeEntry.value.getValue(), serializedValue, 0);
setBinaryValue(freePointer, serializedValue);
} else {
setLongValue(freePointer, treeEntry.leftChild);
freePointer += OLongSerializer.LONG_SIZE;
setLongValue(freePointer, treeEntry.rightChild);
freePointer += OLongSerializer.LONG_SIZE;
byte[] serializedKey = new byte[keySize];
keySerializer.serializeNative(treeEntry.key, serializedKey, 0, (Object[]) keyTypes);
setBinaryValue(freePointer, serializedKey);
size++;
if (updateNeighbors && size > 1) {
if (index < size - 1) {
final int nextEntryPosition = getIntValue(POSITIONS_ARRAY_OFFSET + (index + 1) * OIntegerSerializer.INT_SIZE);
setLongValue(nextEntryPosition, treeEntry.rightChild);
}
if (index > 0) {
final int prevEntryPosition = getIntValue(POSITIONS_ARRAY_OFFSET + (index - 1) * OIntegerSerializer.INT_SIZE);
setLongValue(prevEntryPosition + OLongSerializer.LONG_SIZE, treeEntry.leftChild);
}
}
}
return true;
}
public int updateValue(int index, OSBTreeValue<V> value) throws IOException {
int entryPosition = getIntValue(index * OIntegerSerializer.INT_SIZE + POSITIONS_ARRAY_OFFSET);
entryPosition += keySerializer.getObjectSizeInDirectMemory(pagePointer, entryPosition) + OByteSerializer.BYTE_SIZE;
final int newSize = valueSerializer.getObjectSize(value.getValue());
final int oldSize = valueSerializer.getObjectSizeInDirectMemory(pagePointer, entryPosition);
if (newSize != oldSize)
return -1;
byte[] serializedValue = new byte[newSize];
valueSerializer.serializeNative(value.getValue(), serializedValue, 0);
byte[] oldSerializedValue = pagePointer.get(entryPosition, oldSize);
if (ODefaultComparator.INSTANCE.compare(oldSerializedValue, serializedValue) == 0)
return 0;
setBinaryValue(entryPosition, serializedValue);
return 1;
}
public void setLeftSibling(long pageIndex) throws IOException {
setLongValue(LEFT_SIBLING_OFFSET, pageIndex);
}
public long getLeftSibling() {
return getLongValue(LEFT_SIBLING_OFFSET);
}
public void setRightSibling(long pageIndex) throws IOException {
setLongValue(RIGHT_SIBLING_OFFSET, pageIndex);
}
public long getRightSibling() {
return getLongValue(RIGHT_SIBLING_OFFSET);
}
public static final class SBTreeEntry<K, V> implements Comparable<SBTreeEntry<K, V>> {
private final Comparator<? super K> comparator = ODefaultComparator.INSTANCE;
public final long leftChild;
public final long rightChild;
public final K key;
public final OSBTreeValue<V> value;
public SBTreeEntry(long leftChild, long rightChild, K key, OSBTreeValue<V> value) {
this.leftChild = leftChild;
this.rightChild = rightChild;
this.key = key;
this.value = value;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
final SBTreeEntry<?, ?> that = (SBTreeEntry<?, ?>) o;
if (leftChild != that.leftChild)
return false;
if (rightChild != that.rightChild)
return false;
if (!key.equals(that.key))
return false;
if (value != null ? !value.equals(that.value) : that.value != null)
return false;
return true;
}
@Override
public int hashCode() {
int result = (int) (leftChild ^ (leftChild >>> 32));
result = 31 * result + (int) (rightChild ^ (rightChild >>> 32));
result = 31 * result + key.hashCode();
result = 31 * result + (value != null ? value.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "SBTreeEntry{" + "leftChild=" + leftChild + ", rightChild=" + rightChild + ", key=" + key + ", value=" + value + '}';
}
@Override
public int compareTo(SBTreeEntry<K, V> other) {
return comparator.compare(key, other.key);
}
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_index_sbtree_local_OSBTreeBucket.java |
3,201 | public class IndexFieldDataServiceTests extends ElasticsearchTestCase {
private static Settings DOC_VALUES_SETTINGS = ImmutableSettings.builder().put(FieldDataType.FORMAT_KEY, FieldDataType.DOC_VALUES_FORMAT_VALUE).build();
@SuppressWarnings("unchecked")
public void testGetForFieldDefaults() {
final IndexFieldDataService ifdService = new IndexFieldDataService(new Index("test"), new DummyCircuitBreakerService());
for (boolean docValues : Arrays.asList(true, false)) {
final BuilderContext ctx = new BuilderContext(null, new ContentPath(1));
final StringFieldMapper stringMapper = new StringFieldMapper.Builder("string").tokenized(false).fieldDataSettings(docValues ? DOC_VALUES_SETTINGS : ImmutableSettings.EMPTY).build(ctx);
ifdService.clear();
IndexFieldData<?> fd = ifdService.getForField(stringMapper);
if (docValues) {
assertTrue(fd instanceof SortedSetDVBytesIndexFieldData);
} else {
assertTrue(fd instanceof PagedBytesIndexFieldData);
}
for (FieldMapper<?> mapper : Arrays.asList(
new ByteFieldMapper.Builder("int").fieldDataSettings(docValues ? DOC_VALUES_SETTINGS : ImmutableSettings.EMPTY).build(ctx),
new ShortFieldMapper.Builder("int").fieldDataSettings(docValues ? DOC_VALUES_SETTINGS : ImmutableSettings.EMPTY).build(ctx),
new IntegerFieldMapper.Builder("int").fieldDataSettings(docValues ? DOC_VALUES_SETTINGS : ImmutableSettings.EMPTY).build(ctx),
new LongFieldMapper.Builder("long").fieldDataSettings(docValues ? DOC_VALUES_SETTINGS : ImmutableSettings.EMPTY).build(ctx)
)) {
ifdService.clear();
fd = ifdService.getForField(mapper);
if (docValues) {
assertTrue(fd instanceof BinaryDVNumericIndexFieldData);
} else {
assertTrue(fd instanceof PackedArrayIndexFieldData);
}
}
final FloatFieldMapper floatMapper = new FloatFieldMapper.Builder("float").fieldDataSettings(docValues ? DOC_VALUES_SETTINGS : ImmutableSettings.EMPTY).build(ctx);
ifdService.clear();
fd = ifdService.getForField(floatMapper);
if (docValues) {
assertTrue(fd instanceof BinaryDVNumericIndexFieldData);
} else {
assertTrue(fd instanceof FloatArrayIndexFieldData);
}
final DoubleFieldMapper doubleMapper = new DoubleFieldMapper.Builder("double").fieldDataSettings(docValues ? DOC_VALUES_SETTINGS : ImmutableSettings.EMPTY).build(ctx);
ifdService.clear();
fd = ifdService.getForField(doubleMapper);
if (docValues) {
assertTrue(fd instanceof BinaryDVNumericIndexFieldData);
} else {
assertTrue(fd instanceof DoubleArrayIndexFieldData);
}
}
}
@SuppressWarnings("unchecked")
public void testByPassDocValues() {
final IndexFieldDataService ifdService = new IndexFieldDataService(new Index("test"), new DummyCircuitBreakerService());
final BuilderContext ctx = new BuilderContext(null, new ContentPath(1));
final StringFieldMapper stringMapper = MapperBuilders.stringField("string").tokenized(false).fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(ImmutableSettings.builder().put("format", "fst").build()).build(ctx);
ifdService.clear();
IndexFieldData<?> fd = ifdService.getForField(stringMapper);
assertTrue(fd instanceof FSTBytesIndexFieldData);
final Settings fdSettings = ImmutableSettings.builder().put("format", "array").build();
for (FieldMapper<?> mapper : Arrays.asList(
new ByteFieldMapper.Builder("int").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx),
new ShortFieldMapper.Builder("int").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx),
new IntegerFieldMapper.Builder("int").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx),
new LongFieldMapper.Builder("long").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx)
)) {
ifdService.clear();
fd = ifdService.getForField(mapper);
assertTrue(fd instanceof PackedArrayIndexFieldData);
}
final FloatFieldMapper floatMapper = MapperBuilders.floatField("float").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx);
ifdService.clear();
fd = ifdService.getForField(floatMapper);
assertTrue(fd instanceof FloatArrayIndexFieldData);
final DoubleFieldMapper doubleMapper = MapperBuilders.doubleField("double").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx);
ifdService.clear();
fd = ifdService.getForField(doubleMapper);
assertTrue(fd instanceof DoubleArrayIndexFieldData);
}
public void testChangeFieldDataFormat() throws Exception {
final IndexFieldDataService ifdService = new IndexFieldDataService(new Index("test"), new DummyCircuitBreakerService());
final BuilderContext ctx = new BuilderContext(null, new ContentPath(1));
final StringFieldMapper mapper1 = MapperBuilders.stringField("s").tokenized(false).fieldDataSettings(ImmutableSettings.builder().put(FieldDataType.FORMAT_KEY, "paged_bytes").build()).build(ctx);
final IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer()));
Document doc = new Document();
doc.add(new StringField("s", "thisisastring", Store.NO));
writer.addDocument(doc);
final IndexReader reader1 = DirectoryReader.open(writer, true);
IndexFieldData<?> ifd = ifdService.getForField(mapper1);
assertThat(ifd, instanceOf(PagedBytesIndexFieldData.class));
Set<AtomicReader> oldSegments = Collections.newSetFromMap(new IdentityHashMap<AtomicReader, Boolean>());
for (AtomicReaderContext arc : reader1.leaves()) {
oldSegments.add(arc.reader());
AtomicFieldData<?> afd = ifd.load(arc);
assertThat(afd, instanceOf(PagedBytesAtomicFieldData.class));
}
// write new segment
writer.addDocument(doc);
final IndexReader reader2 = DirectoryReader.open(writer, true);
final StringFieldMapper mapper2 = MapperBuilders.stringField("s").tokenized(false).fieldDataSettings(ImmutableSettings.builder().put(FieldDataType.FORMAT_KEY, "fst").build()).build(ctx);
ifdService.onMappingUpdate();
ifd = ifdService.getForField(mapper2);
assertThat(ifd, instanceOf(FSTBytesIndexFieldData.class));
for (AtomicReaderContext arc : reader2.leaves()) {
AtomicFieldData<?> afd = ifd.load(arc);
if (oldSegments.contains(arc.reader())) {
assertThat(afd, instanceOf(PagedBytesAtomicFieldData.class));
} else {
assertThat(afd, instanceOf(FSTBytesAtomicFieldData.class));
}
}
reader1.close();
reader2.close();
writer.close();
writer.getDirectory().close();
}
} | 0true
| src_test_java_org_elasticsearch_index_fielddata_IndexFieldDataServiceTests.java |
212 | protected class NavigatePreviousSubWordAction extends PreviousSubWordAction {
/**
* Creates a new navigate previous sub-word action.
*/
public NavigatePreviousSubWordAction() {
super(ST.WORD_PREVIOUS);
}
@Override
protected void setCaretPosition(final int position) {
getTextWidget().setCaretOffset(modelOffset2WidgetOffset(getSourceViewer(), position));
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_CeylonEditor.java |
3,572 | public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
DateFieldMapper.Builder builder = dateField(name);
parseNumberField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
builder.nullValue(propNode.toString());
} else if (propName.equals("format")) {
builder.dateTimeFormatter(parseDateTimeFormatter(propName, propNode));
} else if (propName.equals("numeric_resolution")) {
builder.timeUnit(TimeUnit.valueOf(propNode.toString().toUpperCase(Locale.ROOT)));
} else if (propName.equals("locale")) {
builder.locale(parseLocale(propNode.toString()));
}
}
return builder;
}
} | 0true
| src_main_java_org_elasticsearch_index_mapper_core_DateFieldMapper.java |
108 | public class OIOUtils {
public static final long SECOND = 1000;
public static final long MINUTE = SECOND * 60;
public static final long HOUR = MINUTE * 60;
public static final long DAY = HOUR * 24;
public static final long YEAR = DAY * 365;
public static final long WEEK = DAY * 7;
public static byte[] toStream(Externalizable iSource) throws IOException {
final ByteArrayOutputStream stream = new ByteArrayOutputStream();
final ObjectOutputStream oos = new ObjectOutputStream(stream);
iSource.writeExternal(oos);
oos.flush();
stream.flush();
return stream.toByteArray();
}
public static long getTimeAsMillisecs(final Object iSize) {
if (iSize == null)
throw new IllegalArgumentException("Time is null");
if (iSize instanceof Number)
// MILLISECS
return ((Number) iSize).longValue();
String time = iSize.toString();
boolean number = true;
for (int i = time.length() - 1; i >= 0; --i) {
if (!Character.isDigit(time.charAt(i))) {
number = false;
break;
}
}
if (number)
// MILLISECS
return Long.parseLong(time);
else {
time = time.toUpperCase(Locale.ENGLISH);
int pos = time.indexOf("MS");
final String timeAsNumber = time.replaceAll("[^\\d]", "");
if (pos > -1)
return Long.parseLong(timeAsNumber);
pos = time.indexOf("S");
if (pos > -1)
return Long.parseLong(timeAsNumber) * SECOND;
pos = time.indexOf("M");
if (pos > -1)
return Long.parseLong(timeAsNumber) * MINUTE;
pos = time.indexOf("H");
if (pos > -1)
return Long.parseLong(timeAsNumber) * HOUR;
pos = time.indexOf("D");
if (pos > -1)
return Long.parseLong(timeAsNumber) * DAY;
pos = time.indexOf('W');
if (pos > -1)
return Long.parseLong(timeAsNumber) * WEEK;
pos = time.indexOf('Y');
if (pos > -1)
return Long.parseLong(timeAsNumber) * YEAR;
// RE-THROW THE EXCEPTION
throw new IllegalArgumentException("Time '" + time + "' has a unrecognizable format");
}
}
public static String getTimeAsString(final long iTime) {
if (iTime > YEAR && iTime % YEAR == 0)
return String.format("%dy", iTime / YEAR);
if (iTime > WEEK && iTime % WEEK == 0)
return String.format("%dw", iTime / WEEK);
if (iTime > DAY && iTime % DAY == 0)
return String.format("%dd", iTime / DAY);
if (iTime > HOUR && iTime % HOUR == 0)
return String.format("%dh", iTime / HOUR);
if (iTime > MINUTE && iTime % MINUTE == 0)
return String.format("%dm", iTime / MINUTE);
if (iTime > SECOND && iTime % SECOND == 0)
return String.format("%ds", iTime / SECOND);
// MILLISECONDS
return String.format("%dms", iTime);
}
public static Date getTodayWithTime(final String iTime) throws ParseException {
final SimpleDateFormat df = new SimpleDateFormat("HH:mm:ss");
final long today = System.currentTimeMillis();
final Date rslt = new Date();
rslt.setTime(today - (today % DAY) + df.parse(iTime).getTime());
return rslt;
}
public static String readFileAsString(final File iFile) throws java.io.IOException {
return readStreamAsString(new FileInputStream(iFile));
}
public static String readStreamAsString(final InputStream iStream) throws java.io.IOException {
final StringBuffer fileData = new StringBuffer(1000);
final BufferedReader reader = new BufferedReader(new InputStreamReader(iStream));
try {
final char[] buf = new char[1024];
int numRead = 0;
while ((numRead = reader.read(buf)) != -1) {
String readData = String.valueOf(buf, 0, numRead);
fileData.append(readData);
}
} finally {
reader.close();
}
return fileData.toString();
}
public static int copyStream(final InputStream in, final OutputStream out, int iMax) throws java.io.IOException {
if (iMax < 0)
iMax = Integer.MAX_VALUE;
final byte[] buf = new byte[8192];
int byteRead = 0;
int byteTotal = 0;
while ((byteRead = in.read(buf, 0, Math.min(buf.length, iMax - byteTotal))) > 0) {
out.write(buf, 0, byteRead);
byteTotal += byteRead;
}
return byteTotal;
}
/**
* Returns the Unix file name format converting backslashes (\) to slasles (/)
*/
public static String getUnixFileName(final String iFileName) {
return iFileName != null ? iFileName.replace('\\', '/') : null;
}
public static String getRelativePathIfAny(final String iDatabaseURL, final String iBasePath) {
if (iBasePath == null) {
final int pos = iDatabaseURL.lastIndexOf('/');
if (pos > -1)
return iDatabaseURL.substring(pos + 1);
} else {
final int pos = iDatabaseURL.indexOf(iBasePath);
if (pos > -1)
return iDatabaseURL.substring(pos + iBasePath.length() + 1);
}
return iDatabaseURL;
}
public static String getDatabaseNameFromPath(final String iPath) {
return iPath.replace('/', '$');
}
public static String getPathFromDatabaseName(final String iPath) {
return iPath.replace('$', '/');
}
public static String getStringMaxLength(final String iText, final int iMax) {
return getStringMaxLength(iText, iMax, "");
}
public static String getStringMaxLength(final String iText, final int iMax, final String iOther) {
if (iText == null)
return null;
if (iMax > iText.length())
return iText;
return iText.substring(0, iMax) + iOther;
}
public static Object encode(final Object iValue) {
if (iValue instanceof String) {
return java2unicode(((String) iValue).replace("\\", "\\\\").replace("\"", "\\\""));
} else
return iValue;
}
public static String java2unicode(final String iInput) {
final StringBuilder result = new StringBuilder();
final int inputSize = iInput.length();
char ch;
String hex;
for (int i = 0; i < inputSize; i++) {
ch = iInput.charAt(i);
if (ch >= 0x0020 && ch <= 0x007e) // Does the char need to be converted to unicode?
result.append(ch); // No.
else // Yes.
{
result.append("\\u"); // standard unicode format.
hex = Integer.toHexString(ch & 0xFFFF); // Get hex value of the char.
for (int j = 0; j < 4 - hex.length(); j++)
// Prepend zeros because unicode requires 4 digits
result.append('0');
result.append(hex.toLowerCase()); // standard unicode format.
// ostr.append(hex.toLowerCase(Locale.ENGLISH));
}
}
return result.toString();
}
public static boolean isStringContent(final Object iValue) {
if (iValue == null)
return false;
final String s = iValue.toString();
if (s == null)
return false;
return s.length() > 1
&& (s.charAt(0) == '\'' && s.charAt(s.length() - 1) == '\'' || s.charAt(0) == '"' && s.charAt(s.length() - 1) == '"');
}
public static String getStringContent(final Object iValue) {
if (iValue == null)
return null;
final String s = iValue.toString();
if (s == null)
return null;
if (s.length() > 1
&& (s.charAt(0) == '\'' && s.charAt(s.length() - 1) == '\'' || s.charAt(0) == '"' && s.charAt(s.length() - 1) == '"'))
return s.substring(1, s.length() - 1);
return s;
}
public static boolean equals(final byte[] buffer, final byte[] buffer2) {
if (buffer == null || buffer2 == null || buffer.length != buffer2.length)
return false;
for (int i = 0; i < buffer.length; ++i)
if (buffer[i] != buffer2[i])
return false;
return true;
}
} | 1no label
| commons_src_main_java_com_orientechnologies_common_io_OIOUtils.java |
27 | class InvocationCompletionProposal extends CompletionProposal {
static void addProgramElementReferenceProposal(int offset, String prefix,
CeylonParseController cpc, List<ICompletionProposal> result,
Declaration dec, Scope scope, boolean isMember) {
Unit unit = cpc.getRootNode().getUnit();
result.add(new InvocationCompletionProposal(offset, prefix,
dec.getName(unit), escapeName(dec, unit),
dec, dec.getReference(), scope, cpc,
true, false, false, isMember, null));
}
static void addReferenceProposal(int offset, String prefix,
final CeylonParseController cpc, List<ICompletionProposal> result,
Declaration dec, Scope scope, boolean isMember,
ProducedReference pr, OccurrenceLocation ol) {
Unit unit = cpc.getRootNode().getUnit();
//proposal with type args
if (dec instanceof Generic) {
result.add(new InvocationCompletionProposal(offset, prefix,
getDescriptionFor(dec, unit), getTextFor(dec, unit),
dec, pr, scope, cpc, true, false, false, isMember, null));
if (((Generic) dec).getTypeParameters().isEmpty()) {
//don't add another proposal below!
return;
}
}
//proposal without type args
boolean isAbstract =
dec instanceof Class && ((Class) dec).isAbstract() ||
dec instanceof Interface;
if ((!isAbstract &&
ol!=EXTENDS && ol!=SATISFIES &&
ol!=CLASS_ALIAS && ol!=TYPE_ALIAS)) {
result.add(new InvocationCompletionProposal(offset, prefix,
dec.getName(unit), escapeName(dec, unit),
dec, pr, scope, cpc, true, false, false, isMember, null));
}
}
static void addSecondLevelProposal(int offset, String prefix,
final CeylonParseController cpc, List<ICompletionProposal> result,
Declaration dec, Scope scope, boolean isMember, ProducedReference pr,
ProducedType requiredType, OccurrenceLocation ol) {
if (!(dec instanceof Functional) &&
!(dec instanceof TypeDeclaration)) {
//add qualified member proposals
Unit unit = cpc.getRootNode().getUnit();
ProducedType type = pr.getType();
if (isTypeUnknown(type)) return;
Collection<DeclarationWithProximity> members =
type.getDeclaration().getMatchingMemberDeclarations(unit, scope, "", 0).values();
for (DeclarationWithProximity ndwp: members) {
final Declaration m = ndwp.getDeclaration();
if (m instanceof TypedDeclaration) { //TODO: member Class would also be useful!
final ProducedTypedReference ptr =
type.getTypedMember((TypedDeclaration) m,
Collections.<ProducedType>emptyList());
ProducedType mt = ptr.getType();
if (mt!=null &&
(requiredType==null || mt.isSubtypeOf(requiredType))) {
result.add(new InvocationCompletionProposal(offset, prefix,
dec.getName() + "." + getPositionalInvocationDescriptionFor(m, ol, ptr, unit, false, null),
dec.getName() + "." + getPositionalInvocationTextFor(m, ol, ptr, unit, false, null),
m, ptr, scope, cpc, true, true, false, true, dec));
}
}
}
}
}
static void addInvocationProposals(int offset, String prefix,
CeylonParseController cpc, List<ICompletionProposal> result,
Declaration dec, ProducedReference pr, Scope scope,
OccurrenceLocation ol, String typeArgs, boolean isMember) {
if (dec instanceof Functional) {
Unit unit = cpc.getRootNode().getUnit();
boolean isAbstractClass =
dec instanceof Class && ((Class) dec).isAbstract();
Functional fd = (Functional) dec;
List<ParameterList> pls = fd.getParameterLists();
if (!pls.isEmpty()) {
ParameterList parameterList = pls.get(0);
List<Parameter> ps = parameterList.getParameters();
String inexactMatches = EditorsUI.getPreferenceStore().getString(INEXACT_MATCHES);
boolean exact = (typeArgs==null ? prefix : prefix.substring(0,prefix.length()-typeArgs.length()))
.equalsIgnoreCase(dec.getName(unit));
boolean positional = exact ||
"both".equals(inexactMatches) ||
"positional".equals(inexactMatches);
boolean named = exact ||
"both".equals(inexactMatches);
if (positional &&
(!isAbstractClass || ol==EXTENDS || ol==CLASS_ALIAS)) {
if (ps.size()!=getParameters(parameterList, false, false).size()) {
result.add(new InvocationCompletionProposal(offset, prefix,
getPositionalInvocationDescriptionFor(dec, ol, pr, unit, false, typeArgs),
getPositionalInvocationTextFor(dec, ol, pr, unit, false, typeArgs), dec,
pr, scope, cpc, false, true, false, isMember, null));
}
result.add(new InvocationCompletionProposal(offset, prefix,
getPositionalInvocationDescriptionFor(dec, ol, pr, unit, true, typeArgs),
getPositionalInvocationTextFor(dec, ol, pr, unit, true, typeArgs), dec,
pr, scope, cpc, true, true, false, isMember, null));
}
if (named &&
(!isAbstractClass && ol!=EXTENDS && ol!=CLASS_ALIAS &&
!fd.isOverloaded())) {
//if there is at least one parameter,
//suggest a named argument invocation
if (ps.size()!=getParameters(parameterList, false, true).size()) {
result.add(new InvocationCompletionProposal(offset, prefix,
getNamedInvocationDescriptionFor(dec, pr, unit, false, typeArgs),
getNamedInvocationTextFor(dec, pr, unit, false, typeArgs), dec,
pr, scope, cpc, false, false, true, isMember, null));
}
if (!ps.isEmpty()) {
result.add(new InvocationCompletionProposal(offset, prefix,
getNamedInvocationDescriptionFor(dec, pr, unit, true, typeArgs),
getNamedInvocationTextFor(dec, pr, unit, true, typeArgs), dec,
pr, scope, cpc, true, false, true, isMember, null));
}
}
}
}
}
final class NestedCompletionProposal implements ICompletionProposal,
ICompletionProposalExtension2 {
private final String op;
private final int loc;
private final int index;
private final boolean basic;
private final Declaration dec;
NestedCompletionProposal(Declaration dec, int loc,
int index, boolean basic, String op) {
this.op = op;
this.loc = loc;
this.index = index;
this.basic = basic;
this.dec = dec;
}
public String getAdditionalProposalInfo() {
return null;
}
@Override
public void apply(IDocument document) {
//the following awfulness is necessary because the
//insertion point may have changed (and even its
//text may have changed, since the proposal was
//instantiated).
try {
IRegion li = document.getLineInformationOfOffset(loc);
int endOfLine = li.getOffset() + li.getLength();
int startOfArgs = getFirstPosition();
int offset = findCharCount(index, document,
loc+startOfArgs, endOfLine,
",;", "", true)+1;
if (offset>0 && document.getChar(offset)==' ') {
offset++;
}
int nextOffset = findCharCount(index+1, document,
loc+startOfArgs, endOfLine,
",;", "", true);
int middleOffset = findCharCount(1, document,
offset, nextOffset,
"=", "", true)+1;
if (middleOffset>0 &&
document.getChar(middleOffset)=='>') {
middleOffset++;
}
while (middleOffset>0 &&
document.getChar(middleOffset)==' ') {
middleOffset++;
}
if (middleOffset>offset &&
middleOffset<nextOffset) {
offset = middleOffset;
}
String str = getText(false);
if (nextOffset==-1) {
nextOffset = offset;
}
if (document.getChar(nextOffset)=='}') {
str += " ";
}
document.replace(offset, nextOffset-offset, str);
}
catch (BadLocationException e) {
e.printStackTrace();
}
//adding imports drops us out of linked mode :(
/*try {
DocumentChange tc = new DocumentChange("imports", document);
tc.setEdit(new MultiTextEdit());
HashSet<Declaration> decs = new HashSet<Declaration>();
CompilationUnit cu = cpc.getRootNode();
importDeclaration(decs, d, cu);
if (d instanceof Functional) {
List<ParameterList> pls = ((Functional) d).getParameterLists();
if (!pls.isEmpty()) {
for (Parameter p: pls.get(0).getParameters()) {
MethodOrValue pm = p.getModel();
if (pm instanceof Method) {
for (ParameterList ppl: ((Method) pm).getParameterLists()) {
for (Parameter pp: ppl.getParameters()) {
importSignatureTypes(pp.getModel(), cu, decs);
}
}
}
}
}
}
applyImports(tc, decs, cu, document);
tc.perform(new NullProgressMonitor());
}
catch (Exception e) {
e.printStackTrace();
}*/
}
private String getText(boolean description) {
StringBuilder sb = new StringBuilder()
.append(op).append(dec.getName(getUnit()));
if (dec instanceof Functional && !basic) {
appendPositionalArgs(dec, getUnit(), sb,
false, description);
}
return sb.toString();
}
@Override
public Point getSelection(IDocument document) {
return null;
}
@Override
public String getDisplayString() {
return getText(true);
}
@Override
public Image getImage() {
return getImageForDeclaration(dec);
}
@Override
public IContextInformation getContextInformation() {
return null;
}
@Override
public void apply(ITextViewer viewer, char trigger,
int stateMask, int offset) {
apply(viewer.getDocument());
}
@Override
public void selected(ITextViewer viewer, boolean smartToggle) {}
@Override
public void unselected(ITextViewer viewer) {}
@Override
public boolean validate(IDocument document, int currentOffset,
DocumentEvent event) {
if (event==null) {
return true;
}
else {
try {
IRegion li = document.getLineInformationOfOffset(loc);
int endOfLine = li.getOffset() + li.getLength();
int startOfArgs = getFirstPosition();
int offset = findCharCount(index, document,
loc+startOfArgs, endOfLine,
",;", "", true)+1;
String content = document.get(offset, currentOffset - offset);
int eq = content.indexOf("=");
if (eq>0) {
content = content.substring(eq+1);
}
String filter = content.trim().toLowerCase();
String decName = dec.getName(getUnit());
if ((op+decName).toLowerCase().startsWith(filter) ||
decName.toLowerCase().startsWith(filter)) {
return true;
}
}
catch (BadLocationException e) {
// ignore concurrently modified document
}
return false;
}
}
}
final class NestedLiteralCompletionProposal implements ICompletionProposal,
ICompletionProposalExtension2 {
private final int loc;
private final int index;
private final String value;
NestedLiteralCompletionProposal(String value, int loc,
int index) {
this.value = value;
this.loc = loc;
this.index = index;
}
public String getAdditionalProposalInfo() {
return null;
}
@Override
public void apply(IDocument document) {
//the following awfulness is necessary because the
//insertion point may have changed (and even its
//text may have changed, since the proposal was
//instantiated).
try {
IRegion li = document.getLineInformationOfOffset(loc);
int endOfLine = li.getOffset() + li.getLength();
int startOfArgs = getFirstPosition();
int offset = findCharCount(index, document,
loc+startOfArgs, endOfLine,
",;", "", true)+1;
if (offset>0 && document.getChar(offset)==' ') {
offset++;
}
int nextOffset = findCharCount(index+1, document,
loc+startOfArgs, endOfLine,
",;", "", true);
int middleOffset = findCharCount(1, document,
offset, nextOffset,
"=", "", true)+1;
if (middleOffset>0 &&
document.getChar(middleOffset)=='>') {
middleOffset++;
}
while (middleOffset>0 &&
document.getChar(middleOffset)==' ') {
middleOffset++;
}
if (middleOffset>offset &&
middleOffset<nextOffset) {
offset = middleOffset;
}
String str = value;
if (nextOffset==-1) {
nextOffset = offset;
}
if (document.getChar(nextOffset)=='}') {
str += " ";
}
document.replace(offset, nextOffset-offset, str);
}
catch (BadLocationException e) {
e.printStackTrace();
}
//adding imports drops us out of linked mode :(
/*try {
DocumentChange tc = new DocumentChange("imports", document);
tc.setEdit(new MultiTextEdit());
HashSet<Declaration> decs = new HashSet<Declaration>();
CompilationUnit cu = cpc.getRootNode();
importDeclaration(decs, d, cu);
if (d instanceof Functional) {
List<ParameterList> pls = ((Functional) d).getParameterLists();
if (!pls.isEmpty()) {
for (Parameter p: pls.get(0).getParameters()) {
MethodOrValue pm = p.getModel();
if (pm instanceof Method) {
for (ParameterList ppl: ((Method) pm).getParameterLists()) {
for (Parameter pp: ppl.getParameters()) {
importSignatureTypes(pp.getModel(), cu, decs);
}
}
}
}
}
}
applyImports(tc, decs, cu, document);
tc.perform(new NullProgressMonitor());
}
catch (Exception e) {
e.printStackTrace();
}*/
}
@Override
public Point getSelection(IDocument document) {
return null;
}
@Override
public String getDisplayString() {
return value;
}
@Override
public Image getImage() {
return getDecoratedImage(CEYLON_LITERAL, 0, false);
}
@Override
public IContextInformation getContextInformation() {
return null;
}
@Override
public void apply(ITextViewer viewer, char trigger,
int stateMask, int offset) {
apply(viewer.getDocument());
}
@Override
public void selected(ITextViewer viewer, boolean smartToggle) {}
@Override
public void unselected(ITextViewer viewer) {}
@Override
public boolean validate(IDocument document, int currentOffset,
DocumentEvent event) {
if (event==null) {
return true;
}
else {
try {
IRegion li = document.getLineInformationOfOffset(loc);
int endOfLine = li.getOffset() + li.getLength();
int startOfArgs = getFirstPosition();
int offset = findCharCount(index, document,
loc+startOfArgs, endOfLine,
",;", "", true)+1;
String content = document.get(offset, currentOffset - offset);
int eq = content.indexOf("=");
if (eq>0) {
content = content.substring(eq+1);
}
String filter = content.trim().toLowerCase();
if (value.toLowerCase().startsWith(filter)) {
return true;
}
}
catch (BadLocationException e) {
// ignore concurrently modified document
}
return false;
}
}
}
private final CeylonParseController cpc;
private final Declaration declaration;
private final ProducedReference producedReference;
private final Scope scope;
private final boolean includeDefaulted;
private final boolean namedInvocation;
private final boolean positionalInvocation;
private final boolean qualified;
private Declaration qualifyingValue;
private InvocationCompletionProposal(int offset, String prefix,
String desc, String text, Declaration dec,
ProducedReference producedReference, Scope scope,
CeylonParseController cpc, boolean includeDefaulted,
boolean positionalInvocation, boolean namedInvocation,
boolean qualified, Declaration qualifyingValue) {
super(offset, prefix, getImageForDeclaration(dec),
desc, text);
this.cpc = cpc;
this.declaration = dec;
this.producedReference = producedReference;
this.scope = scope;
this.includeDefaulted = includeDefaulted;
this.namedInvocation = namedInvocation;
this.positionalInvocation = positionalInvocation;
this.qualified = qualified;
this.qualifyingValue = qualifyingValue;
}
private Unit getUnit() {
return cpc.getRootNode().getUnit();
}
private DocumentChange createChange(IDocument document)
throws BadLocationException {
DocumentChange change =
new DocumentChange("Complete Invocation", document);
change.setEdit(new MultiTextEdit());
HashSet<Declaration> decs = new HashSet<Declaration>();
Tree.CompilationUnit cu = cpc.getRootNode();
if (qualifyingValue!=null) {
importDeclaration(decs, qualifyingValue, cu);
}
if (!qualified) {
importDeclaration(decs, declaration, cu);
}
if (positionalInvocation||namedInvocation) {
importCallableParameterParamTypes(declaration, decs, cu);
}
int il=applyImports(change, decs, cu, document);
change.addEdit(createEdit(document));
offset+=il;
return change;
}
@Override
public void apply(IDocument document) {
try {
createChange(document).perform(new NullProgressMonitor());
}
catch (Exception e) {
e.printStackTrace();
}
if (EditorsUI.getPreferenceStore()
.getBoolean(LINKED_MODE)) {
activeLinkedMode(document);
}
}
private void activeLinkedMode(IDocument document) {
if (declaration instanceof Generic) {
Generic generic = (Generic) declaration;
ParameterList paramList = null;
if (declaration instanceof Functional &&
(positionalInvocation||namedInvocation)) {
List<ParameterList> pls =
((Functional) declaration).getParameterLists();
if (!pls.isEmpty() &&
!pls.get(0).getParameters().isEmpty()) {
paramList = pls.get(0);
}
}
if (paramList!=null) {
List<Parameter> params = getParameters(paramList,
includeDefaulted, namedInvocation);
if (!params.isEmpty()) {
enterLinkedMode(document, params, null);
return; //NOTE: early exit!
}
}
List<TypeParameter> typeParams = generic.getTypeParameters();
if (!typeParams.isEmpty()) {
enterLinkedMode(document, null, typeParams);
}
}
}
@Override
public Point getSelection(IDocument document) {
int first = getFirstPosition();
if (first<=0) {
//no arg list
return super.getSelection(document);
}
int next = getNextPosition(document, first);
if (next<=0) {
//an empty arg list
return super.getSelection(document);
}
int middle = getCompletionPosition(first, next);
int start = offset-prefix.length()+first+middle;
int len = next-middle;
try {
if (document.get(start, len).trim().equals("{}")) {
start++;
len=0;
}
} catch (BadLocationException e) {}
return new Point(start, len);
}
protected int getCompletionPosition(int first, int next) {
return text.substring(first, first+next-1).lastIndexOf(' ')+1;
}
protected int getFirstPosition() {
int index;
if (namedInvocation) {
index = text.indexOf('{');
}
else if (positionalInvocation) {
index = text.indexOf('(');
}
else {
index = text.indexOf('<');
}
return index+1;
}
public int getNextPosition(IDocument document,
int lastOffset) {
int loc = offset-prefix.length();
int comma = -1;
try {
int start = loc+lastOffset;
int end = loc+text.length()-1;
if (text.endsWith(";")) {
end--;
}
comma = findCharCount(1, document, start, end,
",;", "", true) - start;
}
catch (BadLocationException e) {
e.printStackTrace();
}
if (comma<0) {
int index;
if (namedInvocation) {
index = text.lastIndexOf('}');
}
else if (positionalInvocation) {
index = text.lastIndexOf(')');
}
else {
index = text.lastIndexOf('>');
}
return index - lastOffset;
}
return comma;
}
public String getAdditionalProposalInfo() {
return getDocumentationFor(cpc, declaration,
producedReference);
}
public void enterLinkedMode(IDocument document,
List<Parameter> params,
List<TypeParameter> typeParams) {
boolean proposeTypeArguments = params==null;
int paramCount = proposeTypeArguments ?
typeParams.size() : params.size();
if (paramCount==0) return;
try {
final int loc = offset-prefix.length();
int first = getFirstPosition();
if (first<=0) return; //no arg list
int next = getNextPosition(document, first);
if (next<=0) return; //empty arg list
LinkedModeModel linkedModeModel = new LinkedModeModel();
int seq=0, param=0;
while (next>0 && param<paramCount) {
boolean voidParam = !proposeTypeArguments &&
params.get(param).isDeclaredVoid();
if (proposeTypeArguments || positionalInvocation ||
//don't create linked positions for
//void callable parameters in named
//argument lists
!voidParam) {
List<ICompletionProposal> props =
new ArrayList<ICompletionProposal>();
if (proposeTypeArguments) {
addTypeArgumentProposals(typeParams.get(seq),
loc, first, props, seq);
}
else if (!voidParam) {
addValueArgumentProposals(params.get(param),
loc, first, props, seq,
param==params.size()-1);
}
int middle = getCompletionPosition(first, next);
int start = loc+first+middle;
int len = next-middle;
if (voidParam) {
start++;
len=0;
}
ProposalPosition linkedPosition =
new ProposalPosition(document, start, len, seq,
props.toArray(NO_COMPLETIONS));
LinkedMode.addLinkedPosition(linkedModeModel, linkedPosition);
first = first+next+1;
next = getNextPosition(document, first);
seq++;
}
param++;
}
if (seq>0) {
LinkedMode.installLinkedMode((CeylonEditor) EditorUtil.getCurrentEditor(),
document, linkedModeModel, this, new LinkedMode.NullExitPolicy(),
seq, loc+text.length());
}
}
catch (Exception e) {
e.printStackTrace();
}
}
private void addValueArgumentProposals(Parameter p, final int loc,
int first, List<ICompletionProposal> props, int index,
boolean last) {
if (p.getModel().isDynamicallyTyped()) {
return;
}
ProducedType type = producedReference.getTypedParameter(p)
.getType();
if (type==null) return;
Unit unit = getUnit();
List<DeclarationWithProximity> proposals =
getSortedProposedValues(scope, unit);
for (DeclarationWithProximity dwp: proposals) {
if (dwp.getProximity()<=1) {
addValueArgumentProposal(p, loc, props, index, last,
type, unit, dwp);
}
}
addLiteralProposals(loc, props, index, type, unit);
for (DeclarationWithProximity dwp: proposals) {
if (dwp.getProximity()>1) {
addValueArgumentProposal(p, loc, props, index, last,
type, unit, dwp);
}
}
}
private void addValueArgumentProposal(Parameter p, final int loc,
List<ICompletionProposal> props, int index, boolean last,
ProducedType type, Unit unit, DeclarationWithProximity dwp) {
if (dwp.isUnimported()) {
return;
}
TypeDeclaration td = type.getDeclaration();
Declaration d = dwp.getDeclaration();
if (d instanceof Value) {
Value value = (Value) d;
if (d.getUnit().getPackage().getNameAsString()
.equals(Module.LANGUAGE_MODULE_NAME)) {
if (isIgnoredLanguageModuleValue(value)) {
return;
}
}
ProducedType vt = value.getType();
if (vt!=null && !vt.isNothing() &&
((td instanceof TypeParameter) &&
isInBounds(((TypeParameter)td).getSatisfiedTypes(), vt) ||
vt.isSubtypeOf(type))) {
boolean isIterArg = namedInvocation && last &&
unit.isIterableParameterType(type);
boolean isVarArg = p.isSequenced() && positionalInvocation;
props.add(new NestedCompletionProposal(d,
loc, index, false, isIterArg || isVarArg ? "*" : ""));
}
}
if (d instanceof Method) {
if (!d.isAnnotation()) {
Method method = (Method) d;
if (d.getUnit().getPackage().getNameAsString()
.equals(Module.LANGUAGE_MODULE_NAME)) {
if (isIgnoredLanguageModuleMethod(method)) {
return;
}
}
ProducedType mt = method.getType();
if (mt!=null && !mt.isNothing() &&
((td instanceof TypeParameter) &&
isInBounds(((TypeParameter)td).getSatisfiedTypes(), mt) ||
mt.isSubtypeOf(type))) {
boolean isIterArg = namedInvocation && last &&
unit.isIterableParameterType(type);
boolean isVarArg = p.isSequenced() && positionalInvocation;
props.add(new NestedCompletionProposal(d,
loc, index, false, isIterArg || isVarArg ? "*" : ""));
}
}
}
if (d instanceof Class) {
Class clazz = (Class) d;
if (!clazz.isAbstract() && !d.isAnnotation()) {
if (d.getUnit().getPackage().getNameAsString()
.equals(Module.LANGUAGE_MODULE_NAME)) {
if (isIgnoredLanguageModuleClass(clazz)) {
return;
}
}
ProducedType ct = clazz.getType();
if (ct!=null && !ct.isNothing() &&
((td instanceof TypeParameter) &&
isInBounds(((TypeParameter)td).getSatisfiedTypes(), ct) ||
ct.getDeclaration().equals(type.getDeclaration()) ||
ct.isSubtypeOf(type))) {
boolean isIterArg = namedInvocation && last &&
unit.isIterableParameterType(type);
boolean isVarArg = p.isSequenced() && positionalInvocation;
props.add(new NestedCompletionProposal(d, loc, index, false,
isIterArg || isVarArg ? "*" : ""));
}
}
}
}
private void addLiteralProposals(final int loc,
List<ICompletionProposal> props, int index, ProducedType type,
Unit unit) {
TypeDeclaration dtd = unit.getDefiniteType(type).getDeclaration();
if (dtd instanceof Class) {
if (dtd.equals(unit.getIntegerDeclaration())) {
props.add(new NestedLiteralCompletionProposal("0", loc, index));
props.add(new NestedLiteralCompletionProposal("1", loc, index));
}
if (dtd.equals(unit.getFloatDeclaration())) {
props.add(new NestedLiteralCompletionProposal("0.0", loc, index));
props.add(new NestedLiteralCompletionProposal("1.0", loc, index));
}
if (dtd.equals(unit.getStringDeclaration())) {
props.add(new NestedLiteralCompletionProposal("\"\"", loc, index));
}
if (dtd.equals(unit.getCharacterDeclaration())) {
props.add(new NestedLiteralCompletionProposal("' '", loc, index));
props.add(new NestedLiteralCompletionProposal("'\\n'", loc, index));
props.add(new NestedLiteralCompletionProposal("'\\t'", loc, index));
}
}
else if (dtd instanceof Interface) {
if (dtd.equals(unit.getIterableDeclaration())) {
props.add(new NestedLiteralCompletionProposal("{}", loc, index));
}
if (dtd.equals(unit.getSequentialDeclaration()) ||
dtd.equals(unit.getEmptyDeclaration())) {
props.add(new NestedLiteralCompletionProposal("[]", loc, index));
}
}
}
private void addTypeArgumentProposals(TypeParameter tp,
final int loc, int first, List<ICompletionProposal> props,
final int index) {
for (DeclarationWithProximity dwp:
getSortedProposedValues(scope, getUnit())) {
Declaration d = dwp.getDeclaration();
if (d instanceof TypeDeclaration && !dwp.isUnimported()) {
TypeDeclaration td = (TypeDeclaration) d;
ProducedType t = td.getType();
if (td.getTypeParameters().isEmpty() &&
!td.isAnnotation() &&
!(td instanceof NothingType) &&
!td.inherits(td.getUnit().getExceptionDeclaration())) {
if (td.getUnit().getPackage().getNameAsString()
.equals(Module.LANGUAGE_MODULE_NAME)) {
if (isIgnoredLanguageModuleType(td)) {
continue;
}
}
if (isInBounds(tp.getSatisfiedTypes(), t)) {
props.add(new NestedCompletionProposal(d, loc, index,
true, ""));
}
}
}
}
}
@Override
public IContextInformation getContextInformation() {
if (namedInvocation||positionalInvocation) { //TODO: context info for type arg lists!
if (declaration instanceof Functional) {
List<ParameterList> pls = ((Functional) declaration).getParameterLists();
if (!pls.isEmpty()) {
int argListOffset = isParameterInfo() ?
this.offset :
offset-prefix.length() +
text.indexOf(namedInvocation?'{':'(');
return new ParameterContextInformation(declaration,
producedReference, getUnit(),
pls.get(0), argListOffset, includeDefaulted,
namedInvocation /*!isParameterInfo()*/);
}
}
}
return null;
}
boolean isParameterInfo() {
return false;
}
static final class ParameterInfo
extends InvocationCompletionProposal {
private ParameterInfo(int offset, Declaration dec,
ProducedReference producedReference,
Scope scope, CeylonParseController cpc,
boolean namedInvocation) {
super(offset, "", "show parameters", "", dec,
producedReference, scope, cpc, true,
true, namedInvocation, false, null);
}
@Override
boolean isParameterInfo() {
return true;
}
@Override
public Point getSelection(IDocument document) {
return null;
}
@Override
public void apply(IDocument document) {}
}
static List<IContextInformation> computeParameterContextInformation(final int offset,
final Tree.CompilationUnit rootNode, final ITextViewer viewer) {
final List<IContextInformation> infos =
new ArrayList<IContextInformation>();
rootNode.visit(new Visitor() {
@Override
public void visit(Tree.InvocationExpression that) {
Tree.ArgumentList al = that.getPositionalArgumentList();
if (al==null) {
al = that.getNamedArgumentList();
}
if (al!=null) {
//TODO: should reuse logic for adjusting tokens
// from CeylonContentProposer!!
Integer start = al.getStartIndex();
Integer stop = al.getStopIndex();
if (start!=null && stop!=null && offset>start) {
String string = "";
if (offset>stop) {
try {
string = viewer.getDocument()
.get(stop+1, offset-stop-1);
}
catch (BadLocationException e) {}
}
if (string.trim().isEmpty()) {
Tree.MemberOrTypeExpression mte =
(Tree.MemberOrTypeExpression) that.getPrimary();
Declaration declaration = mte.getDeclaration();
if (declaration instanceof Functional) {
List<ParameterList> pls =
((Functional) declaration).getParameterLists();
if (!pls.isEmpty()) {
//Note: This line suppresses the little menu
// that gives me a choice of context infos.
// Delete it to get a choice of all surrounding
// argument lists.
infos.clear();
infos.add(new ParameterContextInformation(declaration,
mte.getTarget(), rootNode.getUnit(),
pls.get(0), al.getStartIndex(),
true, al instanceof Tree.NamedArgumentList /*false*/));
}
}
}
}
}
super.visit(that);
}
});
return infos;
}
static void addFakeShowParametersCompletion(final Node node,
final CeylonParseController cpc,
final List<ICompletionProposal> result) {
new Visitor() {
@Override
public void visit(Tree.InvocationExpression that) {
Tree.ArgumentList al = that.getPositionalArgumentList();
if (al==null) {
al = that.getNamedArgumentList();
}
if (al!=null) {
Integer startIndex = al.getStartIndex();
Integer startIndex2 = node.getStartIndex();
if (startIndex!=null && startIndex2!=null &&
startIndex.intValue()==startIndex2.intValue()) {
Tree.Primary primary = that.getPrimary();
if (primary instanceof Tree.MemberOrTypeExpression) {
Tree.MemberOrTypeExpression mte =
(Tree.MemberOrTypeExpression) primary;
if (mte.getDeclaration()!=null && mte.getTarget()!=null) {
result.add(new ParameterInfo(al.getStartIndex(),
mte.getDeclaration(), mte.getTarget(),
node.getScope(), cpc,
al instanceof Tree.NamedArgumentList));
}
}
}
}
super.visit(that);
}
}.visit(cpc.getRootNode());
}
static final class ParameterContextInformation
implements IContextInformation {
private final Declaration declaration;
private final ProducedReference producedReference;
private final ParameterList parameterList;
private final int argumentListOffset;
private final Unit unit;
private final boolean includeDefaulted;
// private final boolean inLinkedMode;
private final boolean namedInvocation;
private ParameterContextInformation(Declaration declaration,
ProducedReference producedReference, Unit unit,
ParameterList parameterList, int argumentListOffset,
boolean includeDefaulted, boolean namedInvocation) {
// boolean inLinkedMode
this.declaration = declaration;
this.producedReference = producedReference;
this.unit = unit;
this.parameterList = parameterList;
this.argumentListOffset = argumentListOffset;
this.includeDefaulted = includeDefaulted;
// this.inLinkedMode = inLinkedMode;
this.namedInvocation = namedInvocation;
}
@Override
public String getContextDisplayString() {
return "Parameters of '" + declaration.getName() + "'";
}
@Override
public Image getImage() {
return getImageForDeclaration(declaration);
}
@Override
public String getInformationDisplayString() {
List<Parameter> ps = getParameters(parameterList,
includeDefaulted, namedInvocation);
if (ps.isEmpty()) {
return "no parameters";
}
StringBuilder result = new StringBuilder();
for (Parameter p: ps) {
boolean isListedValues = namedInvocation &&
p==ps.get(ps.size()-1) &&
p.getModel() instanceof Value &&
p.getType()!=null &&
unit.isIterableParameterType(p.getType());
if (includeDefaulted || !p.isDefaulted() ||
isListedValues) {
if (producedReference==null) {
result.append(p.getName());
}
else {
ProducedTypedReference pr =
producedReference.getTypedParameter(p);
appendParameterContextInfo(result, pr, p, unit,
namedInvocation, isListedValues);
}
if (!isListedValues) {
result.append(namedInvocation ? "; " : ", ");
}
}
}
if (!namedInvocation && result.length()>0) {
result.setLength(result.length()-2);
}
return result.toString();
}
@Override
public boolean equals(Object that) {
if (that instanceof ParameterContextInformation) {
return ((ParameterContextInformation) that).declaration
.equals(declaration);
}
else {
return false;
}
}
int getArgumentListOffset() {
return argumentListOffset;
}
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_InvocationCompletionProposal.java |
609 | @RunWith(HazelcastSerialClassRunner.class)
@Category(NightlyTest.class)
public class MemberListTest {
@Before
@After
public void killAllHazelcastInstances() throws IOException {
Hazelcast.shutdownAll();
}
/*
* Sets up a situation where node3 removes the master and sets node2 as the
* master but none of the other nodes do. This means that node3 thinks node2
* is master but node2 thinks node1 is master.
*/
@Test
public void testOutOfSyncMemberList() throws Exception {
Config c1 = buildConfig(false);
Config c2 = buildConfig(false);
Config c3 = buildConfig(false);
c1.getNetworkConfig().setPort(25701);
c2.getNetworkConfig().setPort(25702);
c3.getNetworkConfig().setPort(25703);
List<String> allMembers = Arrays.asList("127.0.0.1:25701, 127.0.0.1:25702, 127.0.0.1:25703");
c1.getNetworkConfig().getJoin().getTcpIpConfig().setMembers(allMembers);
c2.getNetworkConfig().getJoin().getTcpIpConfig().setMembers(allMembers);
c3.getNetworkConfig().getJoin().getTcpIpConfig().setMembers(allMembers);
final HazelcastInstance h1 = Hazelcast.newHazelcastInstance(c1);
final HazelcastInstance h2 = Hazelcast.newHazelcastInstance(c2);
final HazelcastInstance h3 = Hazelcast.newHazelcastInstance(c3);
// All three nodes join into one cluster
assertEquals(3, h1.getCluster().getMembers().size());
assertEquals(3, h2.getCluster().getMembers().size());
assertEquals(3, h3.getCluster().getMembers().size());
// This simulates each node reading from the other nodes in the list at regular intervals
// This prevents the heart beat code from timing out
final HazelcastInstance[] instances = new HazelcastInstance[]{h1, h2, h3};
final AtomicBoolean doingWork = new AtomicBoolean(true);
Thread[] workThreads = new Thread[instances.length];
for (int i = 0; i < instances.length; i++) {
final int threadNum = i;
workThreads[threadNum] = new Thread(new Runnable() {
public void run() {
while (doingWork.get()) {
final HazelcastInstance hz = instances[threadNum];
Set<Member> members = new HashSet<Member>(hz.getCluster().getMembers());
members.remove(hz.getCluster().getLocalMember());
final Map<Member, Future<String>> futures = hz.getExecutorService("test")
.submitToMembers(new PingCallable(), members);
for (Future<String> f : futures.values()) {
try {
f.get();
} catch (MemberLeftException ignored) {
} catch (Exception e) {
e.printStackTrace();
}
}
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
});
workThreads[threadNum].start();
}
final Node n3 = TestUtil.getNode(h3);
n3.clusterService.removeAddress(((MemberImpl) h1.getCluster().getLocalMember()).getAddress());
// Give the cluster some time to figure things out. The merge and heartbeat code should have kicked in by this point
Thread.sleep(30 * 1000);
doingWork.set(false);
for (Thread t : workThreads) {
t.join();
}
assertEquals(3, h1.getCluster().getMembers().size());
assertEquals(3, h2.getCluster().getMembers().size());
assertEquals(3, h3.getCluster().getMembers().size());
}
private static class PingCallable implements Callable<String>, Serializable {
public String call() throws Exception {
return "ping response";
}
}
/*
* Sets up a situation where the member list is out of order on node2. Both
* node2 and node1 think they are masters and both think each other are in
* their clusters.
*/
@Test
public void testOutOfSyncMemberListTwoMasters() throws Exception {
Config c1 = buildConfig(false);
Config c2 = buildConfig(false);
Config c3 = buildConfig(false);
c1.getNetworkConfig().setPort(35701);
c2.getNetworkConfig().setPort(35702);
c3.getNetworkConfig().setPort(35703);
List<String> allMembers = Arrays.asList("127.0.0.1:35701, 127.0.0.1:35702, 127.0.0.1:35703");
c1.getNetworkConfig().getJoin().getTcpIpConfig().setMembers(allMembers);
c2.getNetworkConfig().getJoin().getTcpIpConfig().setMembers(allMembers);
c3.getNetworkConfig().getJoin().getTcpIpConfig().setMembers(allMembers);
final HazelcastInstance h1 = Hazelcast.newHazelcastInstance(c1);
final HazelcastInstance h2 = Hazelcast.newHazelcastInstance(c2);
final HazelcastInstance h3 = Hazelcast.newHazelcastInstance(c3);
final MemberImpl m1 = (MemberImpl) h1.getCluster().getLocalMember();
final MemberImpl m2 = (MemberImpl) h2.getCluster().getLocalMember();
final MemberImpl m3 = (MemberImpl) h3.getCluster().getLocalMember();
// All three nodes join into one cluster
assertEquals(3, h1.getCluster().getMembers().size());
assertEquals(3, h2.getCluster().getMembers().size());
assertEquals(3, h3.getCluster().getMembers().size());
final Node n2 = TestUtil.getNode(h2);
// Simulates node2 getting an out of order member list. That causes node2 to think it's the master.
List<MemberInfo> members = new ArrayList<MemberInfo>();
members.add(new MemberInfo(m2.getAddress(), m2.getUuid(), Collections. <String, Object> emptyMap()));
members.add(new MemberInfo(m3.getAddress(), m3.getUuid(), Collections. <String, Object> emptyMap()));
members.add(new MemberInfo(m1.getAddress(), m1.getUuid(), Collections. <String, Object> emptyMap()));
n2.clusterService.updateMembers(members);
n2.setMasterAddress(m2.getAddress());
// Give the cluster some time to figure things out. The merge and heartbeat code should have kicked in by this point
Thread.sleep(30 * 1000);
assertEquals(m1, h1.getCluster().getMembers().iterator().next());
assertEquals(m1, h2.getCluster().getMembers().iterator().next());
assertEquals(m1, h3.getCluster().getMembers().iterator().next());
assertEquals(3, h1.getCluster().getMembers().size());
assertEquals(3, h2.getCluster().getMembers().size());
assertEquals(3, h3.getCluster().getMembers().size());
}
/*
* Sets up situation where all nodes have the same master, but node 2's list
* doesn't contain node 3.
*/
@Test
public void testSameMasterDifferentMemberList() throws Exception {
Config c1 = buildConfig(false);
Config c2 = buildConfig(false);
Config c3 = buildConfig(false);
c1.getNetworkConfig().setPort(45701);
c2.getNetworkConfig().setPort(45702);
c3.getNetworkConfig().setPort(45703);
List<String> allMembers = Arrays.asList("127.0.0.1:45701, 127.0.0.1:45702, 127.0.0.1:45703");
c1.getNetworkConfig().getJoin().getTcpIpConfig().setMembers(allMembers);
c2.getNetworkConfig().getJoin().getTcpIpConfig().setMembers(allMembers);
c3.getNetworkConfig().getJoin().getTcpIpConfig().setMembers(allMembers);
final HazelcastInstance h1 = Hazelcast.newHazelcastInstance(c1);
final HazelcastInstance h2 = Hazelcast.newHazelcastInstance(c2);
final HazelcastInstance h3 = Hazelcast.newHazelcastInstance(c3);
final MemberImpl m1 = (MemberImpl) h1.getCluster().getLocalMember();
final MemberImpl m2 = (MemberImpl) h2.getCluster().getLocalMember();
// All three nodes join into one cluster
assertEquals(3, h1.getCluster().getMembers().size());
assertEquals(3, h2.getCluster().getMembers().size());
assertEquals(3, h3.getCluster().getMembers().size());
final Node n2 = TestUtil.getNode(h2);
// Simulates node2 getting an out of order member list. That causes node2 to think it's the master.
List<MemberInfo> members = new ArrayList<MemberInfo>();
members.add(new MemberInfo(m1.getAddress(), m1.getUuid(), Collections. <String, Object> emptyMap()));
members.add(new MemberInfo(m2.getAddress(), m2.getUuid(), Collections. <String, Object> emptyMap()));
n2.clusterService.updateMembers(members);
// Give the cluster some time to figure things out. The merge and heartbeat code should have kicked in by this point
Thread.sleep(30 * 1000);
assertEquals(m1, h1.getCluster().getMembers().iterator().next());
assertEquals(m1, h2.getCluster().getMembers().iterator().next());
assertEquals(m1, h3.getCluster().getMembers().iterator().next());
assertEquals(3, h1.getCluster().getMembers().size());
assertEquals(3, h2.getCluster().getMembers().size());
assertEquals(3, h3.getCluster().getMembers().size());
}
@Test
public void testSwitchingMasters() throws Exception {
Config c1 = buildConfig(false);
Config c2 = buildConfig(false);
Config c3 = buildConfig(false);
Config c4 = buildConfig(false);
Config c5 = buildConfig(false);
c1.getNetworkConfig().setPort(55701);
c2.getNetworkConfig().setPort(55702);
c3.getNetworkConfig().setPort(55703);
c4.getNetworkConfig().setPort(55704);
c5.getNetworkConfig().setPort(55705);
List<String> allMembers = Arrays.asList("127.0.0.1:55701", "127.0.0.1:55702",
"127.0.0.1:55703", "127.0.0.1:55704", "127.0.0.1:55705");
c1.getNetworkConfig().getJoin().getTcpIpConfig().setMembers(allMembers);
c2.getNetworkConfig().getJoin().getTcpIpConfig().setMembers(allMembers);
c3.getNetworkConfig().getJoin().getTcpIpConfig().setMembers(allMembers);
c4.getNetworkConfig().getJoin().getTcpIpConfig().setMembers(allMembers);
c5.getNetworkConfig().getJoin().getTcpIpConfig().setMembers(allMembers);
final HazelcastInstance h1 = Hazelcast.newHazelcastInstance(c1);
final HazelcastInstance h2 = Hazelcast.newHazelcastInstance(c2);
final HazelcastInstance h3 = Hazelcast.newHazelcastInstance(c3);
final HazelcastInstance h4 = Hazelcast.newHazelcastInstance(c4);
final HazelcastInstance h5 = Hazelcast.newHazelcastInstance(c5);
assertEquals(5, h1.getCluster().getMembers().size());
assertEquals(5, h2.getCluster().getMembers().size());
assertEquals(5, h3.getCluster().getMembers().size());
assertEquals(5, h4.getCluster().getMembers().size());
assertEquals(5, h5.getCluster().getMembers().size());
// Need to wait for at least as long as PROP_MAX_NO_MASTER_CONFIRMATION_SECONDS
Thread.sleep(15 * 1000);
Member master = h1.getCluster().getLocalMember();
assertEquals(master, h2.getCluster().getMembers().iterator().next());
assertEquals(master, h3.getCluster().getMembers().iterator().next());
assertEquals(master, h4.getCluster().getMembers().iterator().next());
assertEquals(master, h5.getCluster().getMembers().iterator().next());
h1.shutdown();
assertEquals(4, h2.getCluster().getMembers().size());
assertEquals(4, h3.getCluster().getMembers().size());
assertEquals(4, h4.getCluster().getMembers().size());
assertEquals(4, h5.getCluster().getMembers().size());
master = h2.getCluster().getLocalMember();
assertEquals(master, h2.getCluster().getMembers().iterator().next());
assertEquals(master, h3.getCluster().getMembers().iterator().next());
assertEquals(master, h4.getCluster().getMembers().iterator().next());
assertEquals(master, h5.getCluster().getMembers().iterator().next());
Thread.sleep(10 * 1000);
assertEquals(4, h2.getCluster().getMembers().size());
assertEquals(4, h3.getCluster().getMembers().size());
assertEquals(4, h4.getCluster().getMembers().size());
assertEquals(4, h5.getCluster().getMembers().size());
assertEquals(master, h2.getCluster().getMembers().iterator().next());
assertEquals(master, h3.getCluster().getMembers().iterator().next());
assertEquals(master, h4.getCluster().getMembers().iterator().next());
assertEquals(master, h5.getCluster().getMembers().iterator().next());
}
private static Config buildConfig(boolean multicastEnabled) {
Config c = new Config();
c.getGroupConfig().setName("group").setPassword("pass");
c.setProperty(GroupProperties.PROP_MERGE_FIRST_RUN_DELAY_SECONDS, "10");
c.setProperty(GroupProperties.PROP_MERGE_NEXT_RUN_DELAY_SECONDS, "5");
c.setProperty(GroupProperties.PROP_MAX_NO_HEARTBEAT_SECONDS, "10");
c.setProperty(GroupProperties.PROP_MASTER_CONFIRMATION_INTERVAL_SECONDS, "2");
c.setProperty(GroupProperties.PROP_MAX_NO_MASTER_CONFIRMATION_SECONDS, "10");
c.setProperty(GroupProperties.PROP_MEMBER_LIST_PUBLISH_INTERVAL_SECONDS, "10");
final NetworkConfig networkConfig = c.getNetworkConfig();
networkConfig.getJoin().getMulticastConfig().setEnabled(multicastEnabled);
networkConfig.getJoin().getTcpIpConfig().setEnabled(!multicastEnabled);
networkConfig.setPortAutoIncrement(false);
return c;
}
} | 0true
| hazelcast_src_test_java_com_hazelcast_cluster_MemberListTest.java |
1,450 | public class BillingInfoForm implements Serializable {
private static final long serialVersionUID = 7408792703984771616L;
private Address address = new AddressImpl();
private String paymentMethod;
private String creditCardName;
private String creditCardNumber;
private String creditCardCvvCode;
private String creditCardExpMonth;
private String creditCardExpYear;
private String selectedCreditCardType;
private boolean useShippingAddress;
public BillingInfoForm() {
address.setPhonePrimary(new PhoneImpl());
}
public String getPaymentMethod() {
return paymentMethod;
}
public void setPaymentMethod(String paymentMethod) {
this.paymentMethod = paymentMethod;
}
public Address getAddress() {
return address;
}
public void setAddress(Address address) {
this.address = address;
}
public String getCreditCardName() {
return creditCardName;
}
public void setCreditCardName(String creditCardName) {
this.creditCardName = creditCardName;
}
public String getCreditCardNumber() {
return creditCardNumber;
}
public void setCreditCardNumber(String creditCardNumber) {
this.creditCardNumber = creditCardNumber;
}
public String getCreditCardCvvCode() {
return creditCardCvvCode;
}
public void setCreditCardCvvCode(String creditCardCvvCode) {
this.creditCardCvvCode = creditCardCvvCode;
}
public String getCreditCardExpMonth() {
return creditCardExpMonth;
}
public void setCreditCardExpMonth(String creditCardExpMonth) {
this.creditCardExpMonth = creditCardExpMonth;
}
public String getCreditCardExpYear() {
return creditCardExpYear;
}
public void setCreditCardExpYear(String creditCardExpYear) {
this.creditCardExpYear = creditCardExpYear;
}
public String getSelectedCreditCardType() {
return selectedCreditCardType;
}
public void setSelectedCreditCardType(String selectedCreditCardType) {
this.selectedCreditCardType = selectedCreditCardType;
}
public boolean isUseShippingAddress() {
return useShippingAddress;
}
public void setUseShippingAddress(boolean useShippingAddress) {
this.useShippingAddress = useShippingAddress;
}
} | 0true
| core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_checkout_model_BillingInfoForm.java |
1,133 | public class CartOperationContext implements ProcessContext {
public final static long serialVersionUID = 1L;
protected boolean stopEntireProcess = false;
protected CartOperationRequest seedData;
public void setSeedData(Object seedObject) {
seedData = (CartOperationRequest) seedObject;
}
public boolean stopProcess() {
this.stopEntireProcess = true;
return stopEntireProcess;
}
public boolean isStopped() {
return stopEntireProcess;
}
public CartOperationRequest getSeedData(){
return seedData;
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_service_workflow_CartOperationContext.java |
1,108 | @Test
public class SQLFunctionDifferenceTest {
@Test
public void testOperator() {
final OSQLFunctionDifference differenceFunction = new OSQLFunctionDifference() {
@Override
protected boolean returnDistributedResult() {
return false;
}
};
final List<Object> income = Arrays.<Object> asList(1, 2, 3, 1, 4, 5, 2, 2, 1, 1);
final Set<Object> expectedResult = new HashSet<Object>(Arrays.asList(3, 4, 5));
for (Object i : income) {
differenceFunction.execute(null, null, new Object[] { i }, null);
}
final Set<Object> actualResult = differenceFunction.getResult();
assertSetEquals(actualResult, expectedResult);
}
@Test
public void testOperatorMerge() {
final OSQLFunctionDifference merger = new OSQLFunctionDifference();
final List<OSQLFunctionDifference> differences = new ArrayList<OSQLFunctionDifference>(3);
for (int i = 0; i < 3; i++) {
differences.add(new OSQLFunctionDifference() {
@Override
protected boolean returnDistributedResult() {
return true;
}
});
}
final List<List<Object>> incomes = Arrays.asList(Arrays.<Object> asList(1, 2, 3, 4, 5, 1),
Arrays.<Object> asList(3, 5, 6, 7, 0, 1, 3, 3, 6), Arrays.<Object> asList(2, 2, 8, 9));
final Set<Object> expectedResult = new HashSet<Object>(Arrays.<Object> asList(4, 7, 8, 9, 0));
for (int j = 0; j < 3; j++) {
for (Object i : incomes.get(j)) {
differences.get(j).execute(null, null, new Object[] { i }, null);
}
}
final Set<Object> actualResult = (Set<Object>) merger.mergeDistributedResult(Arrays.asList((Object) differences.get(0)
.getResult(), differences.get(1).getResult(), differences.get(2).getResult()));
assertSetEquals(actualResult, expectedResult);
}
private void assertSetEquals(Set<Object> actualResult, Set<Object> expectedResult) {
assertEquals(actualResult.size(), expectedResult.size());
for (Object o : actualResult) {
assertTrue(expectedResult.contains(o));
}
}
} | 0true
| core_src_test_java_com_orientechnologies_orient_core_sql_functions_coll_SQLFunctionDifferenceTest.java |
1,143 | static class IndexThread implements Runnable {
private final Client client;
private volatile boolean run = true;
IndexThread(Client client) {
this.client = client;
}
@Override
public void run() {
while (run) {
int childIdLimit = PARENT_COUNT * NUM_CHILDREN_PER_PARENT;
for (int childId = 1; run && childId < childIdLimit;) {
try {
for (int j = 0; j < 8; j++) {
GetResponse getResponse = client
.prepareGet(indexName, "child", String.valueOf(++childId))
.setFields("_source", "_parent")
.setRouting("1") // Doesn't matter what value, since there is only one shard
.get();
client.prepareIndex(indexName, "child", Integer.toString(childId) + "_" + j)
.setParent(getResponse.getField("_parent").getValue().toString())
.setSource(getResponse.getSource())
.get();
}
client.admin().indices().prepareRefresh(indexName).execute().actionGet();
Thread.sleep(1000);
if (childId % 500 == 0) {
NodesStatsResponse statsResponse = client.admin().cluster().prepareNodesStats()
.clear().setIndices(true).execute().actionGet();
System.out.println("Deleted docs: " + statsResponse.getAt(0).getIndices().getDocs().getDeleted());
}
} catch (Throwable e) {
e.printStackTrace();
}
}
}
}
public void stop() {
run = false;
}
} | 0true
| src_test_java_org_elasticsearch_benchmark_search_child_ChildSearchAndIndexingBenchmark.java |
905 | public class OfferMaxUseExceededException extends CheckoutException {
private static final long serialVersionUID = 1L;
public OfferMaxUseExceededException() {
super();
}
public OfferMaxUseExceededException(String message) {
super(message, null);
}
public OfferMaxUseExceededException(String message, Throwable cause, CheckoutSeed seed) {
super(message, cause, seed);
}
public OfferMaxUseExceededException(String message, CheckoutSeed seed) {
super(message, seed);
}
public OfferMaxUseExceededException(Throwable cause, CheckoutSeed seed) {
super(cause, seed);
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_service_exception_OfferMaxUseExceededException.java |
314 | public abstract class AbstractMergeBeanPostProcessor implements BeanPostProcessor, ApplicationContextAware {
protected static final Log LOG = LogFactory.getLog(AbstractMergeBeanPostProcessor.class);
protected String collectionRef;
protected String targetRef;
protected Placement placement = Placement.APPEND;
protected int position;
protected ApplicationContext applicationContext;
protected MergeBeanStatusProvider statusProvider;
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException {
return bean;
}
@Override
public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException {
if (statusProvider != null && !statusProvider.isProcessingEnabled(bean, beanName, applicationContext)) {
if (LOG.isTraceEnabled()) {
LOG.trace(String.format("Not performing post-processing on targetRef [%s] because the registered " +
"status provider [%s] returned false", targetRef, statusProvider.getClass().getSimpleName()));
}
return bean;
}
if (beanName.equals(targetRef)) {
Object mergeCollection = applicationContext.getBean(collectionRef);
if (bean instanceof ListFactoryBean || bean instanceof List) {
try {
List mergeList = (List) mergeCollection;
List sourceList;
if (bean instanceof ListFactoryBean) {
Field field = ListFactoryBean.class.getDeclaredField("sourceList");
field.setAccessible(true);
sourceList = (List) field.get(bean);
} else {
sourceList = (List) bean;
}
switch (placement) {
case APPEND:
sourceList.addAll(mergeList);
break;
case PREPEND:
sourceList.addAll(0, mergeList);
break;
case SPECIFIC:
sourceList.addAll(position, mergeList);
break;
}
} catch (Exception e) {
throw new BeanCreationException(e.getMessage());
}
} else if (bean instanceof SetFactoryBean || bean instanceof Set) {
try {
Set mergeSet = (Set) mergeCollection;
Set sourceSet;
if (bean instanceof SetFactoryBean) {
Field field = SetFactoryBean.class.getDeclaredField("sourceSet");
field.setAccessible(true);
sourceSet = (Set) field.get(bean);
} else {
sourceSet = (Set)bean;
}
List tempList = new ArrayList(sourceSet);
switch (placement) {
case APPEND:
tempList.addAll(mergeSet);
break;
case PREPEND:
tempList.addAll(0, mergeSet);
break;
case SPECIFIC:
tempList.addAll(position, mergeSet);
break;
}
sourceSet.clear();
sourceSet.addAll(tempList);
} catch (Exception e) {
throw new BeanCreationException(e.getMessage());
}
} else if (bean instanceof MapFactoryBean || bean instanceof Map) {
try {
Map mergeMap = (Map) mergeCollection;
Map sourceMap;
if (bean instanceof MapFactoryBean) {
Field field = MapFactoryBean.class.getDeclaredField("sourceMap");
field.setAccessible(true);
sourceMap = (Map) field.get(bean);
} else {
sourceMap = (Map) bean;
}
LinkedHashMap tempMap = new LinkedHashMap();
switch (placement) {
case APPEND:
tempMap.putAll(sourceMap);
tempMap.putAll(mergeMap);
break;
case PREPEND:
tempMap.putAll(mergeMap);
tempMap.putAll(sourceMap);
break;
case SPECIFIC:
boolean added = false;
int j = 0;
for (Object key : sourceMap.keySet()) {
if (j == position) {
tempMap.putAll(mergeMap);
added = true;
}
tempMap.put(key, sourceMap.get(key));
j++;
}
if (!added) {
tempMap.putAll(mergeMap);
}
break;
}
sourceMap.clear();
sourceMap.putAll(tempMap);
} catch (Exception e) {
throw new BeanCreationException(e.getMessage());
}
} else {
throw new IllegalArgumentException("Bean (" + beanName + ") is specified as a merge target, " +
"but is not" +
" of type ListFactoryBean, SetFactoryBean or MapFactoryBean");
}
}
return bean;
}
/**
* Retrieve the id of the collection to be merged
*
* @return the id of the collection to be merged
*/
public String getCollectionRef() {
return collectionRef;
}
/**
* Set the id of the collection to be merged
*
* @param collectionRef the id of the collection to be merged
*/
public void setCollectionRef(String collectionRef) {
this.collectionRef = collectionRef;
}
/**
* Retrieve the id of the collection to receive the merge
*
* @return the id of the collection receiving the merge
*/
public String getTargetRef() {
return targetRef;
}
/**
* Set the id of the collection to receive the merge
*
* @param targetRef the id of the collection receiving the merge
*/
public void setTargetRef(String targetRef) {
this.targetRef = targetRef;
}
/**
* The position in the target collection to place the merge. This can be at the beginning,
* end or at an explicit position.
*
* @return the position in the target collection to place the merge
*/
public Placement getPlacement() {
return placement;
}
/**
* The position in the target collection to place the merge. This can be at the beginning,
* end or at an explicit position.
*
* @param placement the position in the target collection to place the merge
*/
public void setPlacement(Placement placement) {
this.placement = placement;
}
/**
* If a placement of type Placement.SPECIFIC is used, then this is the integer position in the target
* target collection at which the merge will be performed.
*
* @return the specific position in the target collection
*/
public int getPosition() {
return position;
}
/**
* If a placement of type Placement.SPECIFIC is used, then this is the integer position in the target
* target collection at which the merge will be performed.
*
* @param position the specific position in the target collection
*/
public void setPosition(int position) {
this.position = position;
}
/**
* Gets the status provider that is configured for this post processor
*
* @return the MergeStatusBeanProvider
*/
public MergeBeanStatusProvider getStatusProvider() {
return statusProvider;
}
/**
* Sets the MergeBeanStatusProvider, which controls whether or not this post processor is activated.
* If no statusProvider is set, then we will always execute.
*
* @param statusProvider
*/
public void setStatusProvider(MergeBeanStatusProvider statusProvider) {
this.statusProvider = statusProvider;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_extensibility_context_merge_AbstractMergeBeanPostProcessor.java |
1,421 | public class OChannelBinaryAsynchClient extends OChannelBinary {
private final Condition readCondition = lockRead.getUnderlying().newCondition();
private volatile boolean channelRead = false;
private byte currentStatus;
private int currentSessionId;
private final int maxUnreadResponses;
protected final int socketTimeout; // IN MS
protected final short srvProtocolVersion;
private final String serverURL;
private OAsynchChannelServiceThread serviceThread;
public OChannelBinaryAsynchClient(final String remoteHost, final int remotePort, final OContextConfiguration iConfig,
final int iProtocolVersion) throws IOException {
this(remoteHost, remotePort, iConfig, iProtocolVersion, null);
}
public OChannelBinaryAsynchClient(final String remoteHost, final int remotePort, final OContextConfiguration iConfig,
final int protocolVersion, final ORemoteServerEventListener asynchEventListener) throws IOException {
super(new Socket(), iConfig);
maxUnreadResponses = OGlobalConfiguration.NETWORK_BINARY_READ_RESPONSE_MAX_TIMES.getValueAsInteger();
serverURL = remoteHost + ":" + remotePort;
socketTimeout = iConfig.getValueAsInteger(OGlobalConfiguration.NETWORK_SOCKET_TIMEOUT);
socket.setPerformancePreferences(0, 2, 1);
socket.setKeepAlive(true);
socket.setSendBufferSize(socketBufferSize);
socket.setReceiveBufferSize(socketBufferSize);
try {
socket.connect(new InetSocketAddress(remoteHost, remotePort), socketTimeout);
connected();
} catch (java.net.SocketTimeoutException e) {
throw new IOException("Cannot connect to host " + remoteHost + ":" + remotePort, e);
}
inStream = new BufferedInputStream(socket.getInputStream(), socketBufferSize);
outStream = new BufferedOutputStream(socket.getOutputStream(), socketBufferSize);
in = new DataInputStream(inStream);
out = new DataOutputStream(outStream);
try {
srvProtocolVersion = readShort();
} catch (IOException e) {
throw new ONetworkProtocolException("Cannot read protocol version from remote server " + socket.getRemoteSocketAddress()
+ ": " + e);
}
if (srvProtocolVersion != protocolVersion) {
OLogManager.instance().warn(
this,
"The Client driver version is different than Server version: client=" + protocolVersion + ", server="
+ srvProtocolVersion
+ ". You could not use the full features of the newer version. Assure to have the same versions on both");
}
if (asynchEventListener != null)
serviceThread = new OAsynchChannelServiceThread(asynchEventListener, this);
}
public void beginRequest() {
acquireWriteLock();
}
public void endRequest() throws IOException {
flush();
releaseWriteLock();
}
public void beginResponse(final int iRequesterId) throws IOException {
beginResponse(iRequesterId, timeout);
}
public void beginResponse(final int iRequesterId, final long iTimeout) throws IOException {
try {
int unreadResponse = 0;
final long startClock = iTimeout > 0 ? System.currentTimeMillis() : 0;
// WAIT FOR THE RESPONSE
do {
if (iTimeout <= 0)
acquireReadLock();
else if (!lockRead.tryAcquireLock(iTimeout, TimeUnit.MILLISECONDS))
throw new OTimeoutException("Cannot acquire read lock against channel: " + this);
if (!channelRead) {
channelRead = true;
try {
currentStatus = readByte();
currentSessionId = readInt();
if (debug)
OLogManager.instance().debug(this, "%s - Read response: %d-%d", socket.getLocalAddress(), (int) currentStatus,
currentSessionId);
} catch (IOException e) {
// UNLOCK THE RESOURCE AND PROPAGATES THE EXCEPTION
channelRead = false;
readCondition.signalAll();
releaseReadLock();
throw e;
}
}
if (currentSessionId == iRequesterId)
// IT'S FOR ME
break;
try {
if (debug)
OLogManager.instance().debug(this, "%s - Session %d skip response, it is for %d", socket.getLocalAddress(),
iRequesterId, currentSessionId);
if (iTimeout > 0 && (System.currentTimeMillis() - startClock) > iTimeout) {
// CLOSE THE SOCKET TO CHANNEL TO AVOID FURTHER DIRTY DATA
close();
throw new OTimeoutException("Timeout on reading response from the server "
+ (socket != null ? socket.getRemoteSocketAddress() : "") + " for the request " + iRequesterId);
}
if (unreadResponse > maxUnreadResponses) {
if (debug)
OLogManager.instance().info(this, "Unread responses %d > %d, consider the buffer as dirty: clean it", unreadResponse,
maxUnreadResponses);
close();
throw new IOException("Timeout on reading response");
}
readCondition.signalAll();
if (debug)
OLogManager.instance().debug(this, "Session %d is going to sleep...", iRequesterId);
final long start = System.currentTimeMillis();
// WAIT 1 SECOND AND RETRY
readCondition.await(1, TimeUnit.SECONDS);
final long now = System.currentTimeMillis();
if (debug)
OLogManager.instance().debug(this, "Waked up: slept %dms, checking again from %s for session %d", (now - start),
socket.getLocalAddress(), iRequesterId);
if (now - start >= 1000)
unreadResponse++;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} finally {
releaseReadLock();
}
} while (true);
if (debug)
OLogManager.instance().debug(this, "%s - Session %d handle response", socket.getLocalAddress(), iRequesterId);
handleStatus(currentStatus, currentSessionId);
} catch (OLockException e) {
Thread.currentThread().interrupt();
// NEVER HAPPENS?
e.printStackTrace();
}
}
protected int handleStatus(final byte iResult, final int iClientTxId) throws IOException {
if (iResult == OChannelBinaryProtocol.RESPONSE_STATUS_OK || iResult == OChannelBinaryProtocol.PUSH_DATA) {
} else if (iResult == OChannelBinaryProtocol.RESPONSE_STATUS_ERROR) {
StringBuilder buffer = new StringBuilder();
final List<OPair<String, String>> exceptions = new ArrayList<OPair<String, String>>();
// EXCEPTION
while (readByte() == 1) {
final String excClassName = readString();
final String excMessage = readString();
exceptions.add(new OPair<String, String>(excClassName, excMessage));
}
byte[] serializedException = null;
if (srvProtocolVersion >= 19)
serializedException = readBytes();
Exception previous = null;
if (serializedException != null && serializedException.length > 0)
throwSerializedException(serializedException);
for (int i = exceptions.size() - 1; i > -1; --i) {
previous = createException(exceptions.get(i).getKey(), exceptions.get(i).getValue(), previous);
}
if (previous != null) {
throw new RuntimeException(previous);
} else
throw new ONetworkProtocolException("Network response error: " + buffer.toString());
} else {
// PROTOCOL ERROR
// close();
throw new ONetworkProtocolException("Error on reading response from the server");
}
return iClientTxId;
}
private void throwSerializedException(byte[] serializedException) throws IOException {
final OMemoryInputStream inputStream = new OMemoryInputStream(serializedException);
final ObjectInputStream objectInputStream = new ObjectInputStream(inputStream);
Object throwable = null;
try {
throwable = objectInputStream.readObject();
} catch (ClassNotFoundException e) {
OLogManager.instance().error(this, "Error during exception serialization.", e);
}
objectInputStream.close();
if (throwable instanceof Throwable)
throw new OResponseProcessingException("Exception during response processing.", (Throwable) throwable);
else
OLogManager.instance().error(
this,
"Error during exception serialization, serialized exception is not Throwable, exception type is "
+ (throwable != null ? throwable.getClass().getName() : "null"));
}
@SuppressWarnings("unchecked")
private static RuntimeException createException(final String iClassName, final String iMessage, final Exception iPrevious) {
RuntimeException rootException = null;
Constructor<?> c = null;
try {
final Class<RuntimeException> excClass = (Class<RuntimeException>) Class.forName(iClassName);
if (iPrevious != null) {
try {
c = excClass.getConstructor(String.class, Throwable.class);
} catch (NoSuchMethodException e) {
c = excClass.getConstructor(String.class, Exception.class);
}
}
if (c == null)
c = excClass.getConstructor(String.class);
} catch (Exception e) {
// UNABLE TO REPRODUCE THE SAME SERVER-SIZE EXCEPTION: THROW A STORAGE EXCEPTION
rootException = new OStorageException(iMessage, iPrevious);
}
if (c != null)
try {
final Throwable e;
if (c.getParameterTypes().length > 1)
e = (Throwable) c.newInstance(iMessage, iPrevious);
else
e = (Throwable) c.newInstance(iMessage);
if (e instanceof RuntimeException)
rootException = (RuntimeException) e;
else
rootException = new OException(e);
} catch (InstantiationException e) {
} catch (IllegalAccessException e) {
} catch (InvocationTargetException e) {
}
return rootException;
}
public void endResponse() {
channelRead = false;
// WAKE UP ALL THE WAITING THREADS
try {
readCondition.signalAll();
} catch (IllegalMonitorStateException e) {
// IGNORE IT
OLogManager.instance().debug(this, "Error on signaling waiting clients after reading response");
}
try {
releaseReadLock();
} catch (IllegalMonitorStateException e) {
// IGNORE IT
OLogManager.instance().debug(this, "Error on unlocking network channel after reading response");
}
}
@Override
public void close() {
if (lockRead.tryAcquireLock())
try {
readCondition.signalAll();
} finally {
releaseReadLock();
}
super.close();
if (serviceThread != null) {
final OAsynchChannelServiceThread s = serviceThread;
serviceThread = null;
s.sendShutdown();
}
}
@Override
public void clearInput() throws IOException {
acquireReadLock();
try {
super.clearInput();
} finally {
releaseReadLock();
}
}
/**
* Tells if the channel is connected.
*
* @return true if it's connected, otherwise false.
*/
public boolean isConnected() {
if (socket != null && socket.isConnected() && !socket.isInputShutdown() && !socket.isOutputShutdown())
return true;
return false;
}
/**
* Gets the major supported protocol version
*
*/
public short getSrvProtocolVersion() {
return srvProtocolVersion;
}
public OAdaptiveLock getLockRead() {
return lockRead;
}
public OAdaptiveLock getLockWrite() {
return lockWrite;
}
public String getServerURL() {
return serverURL;
}
} | 1no label
| enterprise_src_main_java_com_orientechnologies_orient_enterprise_channel_binary_OChannelBinaryAsynchClient.java |
336 | public class CommonsConfiguration implements WriteConfiguration {
private final Configuration config;
private static final Logger log =
LoggerFactory.getLogger(CommonsConfiguration.class);
public CommonsConfiguration() {
this(new BaseConfiguration());
}
public CommonsConfiguration(Configuration config) {
Preconditions.checkArgument(config!=null);
this.config = config;
}
public Configuration getCommonConfiguration() {
return config;
}
@Override
public<O> O get(String key, Class<O> datatype) {
if (!config.containsKey(key)) return null;
if (datatype.isArray()) {
Preconditions.checkArgument(datatype.getComponentType()==String.class,"Only string arrays are supported: %s",datatype);
return (O)config.getStringArray(key);
} else if (Number.class.isAssignableFrom(datatype)) {
// A properties file configuration returns Strings even for numeric
// values small enough to fit inside Integer (e.g. 5000). In-memory
// configuration impls seem to be able to store and return actual
// numeric types rather than String
//
// We try to handle either case here
Object o = config.getProperty(key);
if (datatype.isInstance(o)) {
return (O)o;
} else {
return constructFromStringArgument(datatype, o.toString());
}
} else if (datatype==String.class) {
return (O)config.getString(key);
} else if (datatype==Boolean.class) {
return (O)new Boolean(config.getBoolean(key));
} else if (datatype.isEnum()) {
Enum[] constants = (Enum[])datatype.getEnumConstants();
Preconditions.checkState(null != constants && 0 < constants.length, "Zero-length or undefined enum");
String estr = config.getProperty(key).toString();
for (Enum ec : constants)
if (ec.toString().equals(estr))
return (O)ec;
throw new IllegalArgumentException("No match for string \"" + estr + "\" in enum " + datatype);
} else if (datatype==Object.class) {
return (O)config.getProperty(key);
} else if (Duration.class.isAssignableFrom(datatype)) {
// This is a conceptual leak; the config layer should ideally only handle standard library types
Object o = config.getProperty(key);
if (Duration.class.isInstance(o)) {
return (O) o;
} else {
String[] comps = o.toString().split("\\s");
TimeUnit unit = null;
if (comps.length == 1) {
//By default, times are in milli seconds
unit = TimeUnit.MILLISECONDS;
} else if (comps.length == 2) {
unit = Durations.parse(comps[1]);
} else {
throw new IllegalArgumentException("Cannot parse time duration from: " + o.toString());
}
return (O) new StandardDuration(Long.valueOf(comps[0]), unit);
}
// Lists are deliberately not supported. List's generic parameter
// is subject to erasure and can't be checked at runtime. Someone
// could create a ConfigOption<List<Number>>; we would instead return
// a List<String> like we always do at runtime, and it wouldn't break
// until the client tried to use the contents of the list.
//
// We could theoretically get around this by adding a type token to
// every declaration of a List-typed ConfigOption, but it's just
// not worth doing since we only actually use String[] anyway.
// } else if (List.class.isAssignableFrom(datatype)) {
// return (O) config.getProperty(key);
} else throw new IllegalArgumentException("Unsupported data type: " + datatype);
}
private <O> O constructFromStringArgument(Class<O> datatype, String arg) {
try {
Constructor<O> ctor = datatype.getConstructor(String.class);
return ctor.newInstance(arg);
// ReflectiveOperationException is narrower and more appropriate than Exception, but only @since 1.7
//} catch (ReflectiveOperationException e) {
} catch (Exception e) {
log.error("Failed to parse configuration string \"{}\" into type {} due to the following reflection exception", arg, datatype, e);
throw new RuntimeException(e);
}
}
@Override
public Iterable<String> getKeys(String prefix) {
List<String> result = Lists.newArrayList();
Iterator<String> keys;
if (StringUtils.isNotBlank(prefix)) keys = config.getKeys(prefix);
else keys = config.getKeys();
while (keys.hasNext()) result.add(keys.next());
return result;
}
@Override
public void close() {
//Do nothing
}
@Override
public <O> void set(String key, O value) {
if (value==null) config.clearProperty(key);
else config.setProperty(key,value);
}
@Override
public void remove(String key) {
config.clearProperty(key);
}
@Override
public WriteConfiguration copy() {
BaseConfiguration copy = new BaseConfiguration();
copy.copy(config);
return new CommonsConfiguration(copy);
}
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_configuration_backend_CommonsConfiguration.java |
288 | @Repository("blDataDrivenEnumerationDao")
public class DataDrivenEnumerationDaoImpl implements DataDrivenEnumerationDao {
@PersistenceContext(unitName = "blPU")
protected EntityManager em;
@Resource(name = "blEntityConfiguration")
protected EntityConfiguration entityConfiguration;
@Override
public DataDrivenEnumeration readEnumByKey(String enumKey) {
TypedQuery<DataDrivenEnumeration> query = new TypedQueryBuilder<DataDrivenEnumeration>(DataDrivenEnumeration.class, "dde")
.addRestriction("dde.key", "=", enumKey)
.toQuery(em);
return query.getSingleResult();
}
@Override
public DataDrivenEnumerationValue readEnumValueByKey(String enumKey, String enumValueKey) {
TypedQuery<DataDrivenEnumerationValue> query =
new TypedQueryBuilder<DataDrivenEnumerationValue>(DataDrivenEnumerationValue.class, "ddev")
.addRestriction("ddev.type.key", "=", enumKey)
.addRestriction("ddev.key", "=", enumValueKey)
.toQuery(em);
return query.getSingleResult();
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_enumeration_dao_DataDrivenEnumerationDaoImpl.java |
2,627 | public final class BinaryClassDefinitionProxy extends BinaryClassDefinition implements ClassDefinition {
public BinaryClassDefinitionProxy(int factoryId, int classId, int version, byte[] binary) {
this.classId = classId;
this.version = version;
this.factoryId = factoryId;
setBinary(binary);
}
public ClassDefinition toReal(SerializationContext context) throws IOException {
final ClassDefinition cd = context.lookup(factoryId, classId, version);
return cd != null ? cd : context.createClassDefinition(factoryId, getBinary());
}
public FieldDefinition get(String name) {
throw new UnsupportedOperationException();
}
public FieldDefinition get(int fieldIndex) {
throw new UnsupportedOperationException();
}
public boolean hasField(String fieldName) {
throw new UnsupportedOperationException();
}
public Set<String> getFieldNames() {
throw new UnsupportedOperationException();
}
public FieldType getFieldType(String fieldName) {
throw new UnsupportedOperationException();
}
public int getFieldClassId(String fieldName) {
throw new UnsupportedOperationException();
}
public int getFieldCount() {
throw new UnsupportedOperationException();
}
public void writeData(ObjectDataOutput out) throws IOException {
throw new UnsupportedOperationException();
}
public void readData(ObjectDataInput in) throws IOException {
throw new UnsupportedOperationException();
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_nio_serialization_BinaryClassDefinitionProxy.java |
347 | field(TTL_FIELD, new HashMap<String, Object>() {{
put("enabled", true);
}}). | 0true
| titan-es_src_main_java_com_thinkaurelius_titan_diskstorage_es_ElasticSearchIndex.java |
219 | public abstract class OrientConsole extends OConsoleApplication {
public OrientConsole(String[] args) {
super(args);
}
@Override
protected void onException(Throwable e) {
Throwable current = e;
while (current != null) {
err.print("\nError: " + current.toString());
current = current.getCause();
}
}
@Override
protected void onBefore() {
printApplicationInfo();
}
protected void printApplicationInfo() {
}
@Override
protected void onAfter() {
out.println();
}
@Override
public void help() {
super.help();
}
protected String format(final String iValue, final int iMaxSize) {
if (iValue == null)
return null;
if (iValue.length() > iMaxSize)
return iValue.substring(0, iMaxSize - 3) + "...";
return iValue;
}
} | 0true
| tools_src_main_java_com_orientechnologies_orient_console_OrientConsole.java |
928 | public abstract class BroadcastOperationResponse extends ActionResponse {
private static final ShardOperationFailedException[] EMPTY = new ShardOperationFailedException[0];
private int totalShards;
private int successfulShards;
private int failedShards;
private ShardOperationFailedException[] shardFailures = EMPTY;
protected BroadcastOperationResponse() {
}
protected BroadcastOperationResponse(int totalShards, int successfulShards, int failedShards, List<ShardOperationFailedException> shardFailures) {
this.totalShards = totalShards;
this.successfulShards = successfulShards;
this.failedShards = failedShards;
this.shardFailures = shardFailures == null ? EMPTY : shardFailures.toArray(new ShardOperationFailedException[shardFailures.size()]);
}
/**
* The total shards this request ran against.
*/
public int getTotalShards() {
return totalShards;
}
/**
* The successful shards this request was executed on.
*/
public int getSuccessfulShards() {
return successfulShards;
}
/**
* The failed shards this request was executed on.
*/
public int getFailedShards() {
return failedShards;
}
/**
* The list of shard failures exception.
*/
public ShardOperationFailedException[] getShardFailures() {
return shardFailures;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
totalShards = in.readVInt();
successfulShards = in.readVInt();
failedShards = in.readVInt();
int size = in.readVInt();
if (size > 0) {
shardFailures = new ShardOperationFailedException[size];
for (int i = 0; i < size; i++) {
shardFailures[i] = readShardOperationFailed(in);
}
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(totalShards);
out.writeVInt(successfulShards);
out.writeVInt(failedShards);
out.writeVInt(shardFailures.length);
for (ShardOperationFailedException exp : shardFailures) {
exp.writeTo(out);
}
}
} | 0true
| src_main_java_org_elasticsearch_action_support_broadcast_BroadcastOperationResponse.java |
90 | public class ClientEntryListenerDisconnectTest {
private static int adds = 0;
private static int evictionsNull = 0;
private ClientEntryListenerDisconnectTest() {
}
public static void main(String[] args) throws InterruptedException {
Config config = new Config();
config.setGroupConfig(new GroupConfig("test", "test"));
config.getNetworkConfig().setPort(6701);
HazelcastInstance hazelcastInstance = Hazelcast.newHazelcastInstance(config);
IMap<Integer, GenericEvent> map = hazelcastInstance.getMap("test");
map.addIndex("userId", false);
Hazelcast.newHazelcastInstance(config);
ClientConfig clientConfig = new ClientConfig();
clientConfig.getNetworkConfig().addAddress("localhost:6701", "localhost:6702");
clientConfig.setGroupConfig(new GroupConfig("test", "test"));
clientConfig.getNetworkConfig().setConnectionAttemptLimit(100);
clientConfig.getNetworkConfig().setSmartRouting(false);
HazelcastInstance client = HazelcastClient.newHazelcastClient(clientConfig);
IMap<Integer, GenericEvent> mapClient = client.getMap("test");
mapClient.addEntryListener(new EntryAdapter<Integer, GenericEvent>() {
public void entryAdded(EntryEvent<Integer, GenericEvent> event) {
adds++;
}
public void entryEvicted(EntryEvent<Integer, GenericEvent> event) {
if (event.getValue() == null) evictionsNull++;
}
}, true);
HazelcastInstance client2 = HazelcastClient.newHazelcastClient(clientConfig);
IMap<Integer, GenericEvent> mapClient2 = client2.getMap("test");
map.put(1, new GenericEvent(1), 5, TimeUnit.SECONDS);
Thread.sleep(20);
mapClient.remove(1);
hazelcastInstance.getLifecycleService().terminate();
Thread.sleep(15000);
mapClient2.put(2, new GenericEvent(2), 1, TimeUnit.SECONDS);
Thread.sleep(20);
mapClient2.remove(2);
mapClient2.put(3, new GenericEvent(3), 1, TimeUnit.SECONDS);
Thread.sleep(15000);
hazelcastInstance = Hazelcast.newHazelcastInstance(config);
map = hazelcastInstance.getMap("test");
map.put(4, new GenericEvent(4), 1, TimeUnit.SECONDS);
map.put(5, new GenericEvent(5), 5, TimeUnit.SECONDS);
map.put(6, new GenericEvent(6), 1, TimeUnit.SECONDS);
map.put(7, new GenericEvent(7), 1, TimeUnit.SECONDS);
Thread.sleep(10000);
if (evictionsNull != 0) {
System.out.println("ERROR: got " + evictionsNull + " evictions with null values");
} else {
System.out.println("OK");
}
mapClient.put(8, new GenericEvent(8), 1, TimeUnit.SECONDS);
Thread.sleep(5000);
if (adds != 8) {
System.out.println("ERROR: got " + adds + " instead of 8");
} else {
System.out.println("OK");
}
System.exit(0);
}
private static class GenericEvent implements Serializable {
private static final long serialVersionUID = -933111044641052844L;
private int userId;
public GenericEvent(int userId) {
this.setUserId(userId);
}
public int getUserId() {
return userId;
}
public void setUserId(int userId) {
this.userId = userId;
}
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_ClientEntryListenerDisconnectTest.java |
271 | final class PeekDefinitionPopup extends PopupDialog
implements IInformationControl, IInformationControlExtension2,
IInformationControlExtension3 {
private final class GotoListener implements KeyListener {
@Override
public void keyReleased(KeyEvent e) {}
@Override
public void keyPressed(KeyEvent e) {
if (e.character == 0x1B) // ESC
dispose();
if (EditorUtil.triggersBinding(e, getCommandBinding())) {
e.doit=false;
dispose();
gotoNode(referencedNode, pc.getProject());
}
}
}
private ISourceViewer viewer;
private final CeylonEditor editor;
private Node referencedNode;
private CeylonParseController pc = new CeylonParseController();
public ISourceViewer getViewer() {
return viewer;
}
private StyledText titleLabel;
private TriggerSequence commandBinding;
protected TriggerSequence getCommandBinding() {
return commandBinding;
}
PeekDefinitionPopup(Shell parent, int shellStyle, CeylonEditor editor) {
super(parent, shellStyle, true, true, false, true,
true, null, null);
this.editor = editor;
commandBinding = EditorUtil.getCommandBinding(PLUGIN_ID +
".editor.code");
if (commandBinding!=null) {
setInfoText(commandBinding.format() + " to open editor");
}
create();
Color bg = parent.getDisplay().getSystemColor(SWT.COLOR_INFO_BACKGROUND);
getShell().setBackground(bg);
setBackgroundColor(bg);
//setBackgroundColor(getEditorWidget(editor).getBackground());
setForegroundColor(getEditorWidget(editor).getForeground());
}
private StyledText getEditorWidget(CeylonEditor editor) {
return editor.getCeylonSourceViewer().getTextWidget();
}
protected Control createContents(Composite parent) {
Composite composite = (Composite) super.createContents(parent);
Control[] children = composite.getChildren();
GridLayout layout = (GridLayout) composite.getLayout();
layout.verticalSpacing=8;
layout.marginLeft=8;
layout.marginRight=8;
layout.marginTop=8;
layout.marginBottom=8;
children[children.length-2].setVisible(false);
return composite;
}
@Override
protected Control createDialogArea(Composite parent) {
int styles= SWT.V_SCROLL | SWT.H_SCROLL | SWT.MULTI | SWT.FULL_SELECTION;
viewer = new CeylonSourceViewer(editor, parent, null, null, false, styles);
viewer.setEditable(false);
StyledText textWidget = viewer.getTextWidget();
textWidget.setFont(getEditorWidget(editor).getFont());
textWidget.setBackground(getEditorWidget(editor).getBackground());
textWidget.addKeyListener(new GotoListener());
return textWidget;
}
private static GridLayoutFactory popupLayoutFactory;
protected static GridLayoutFactory getPopupLayout() {
if (popupLayoutFactory == null) {
popupLayoutFactory = GridLayoutFactory.fillDefaults()
.margins(POPUP_MARGINWIDTH, POPUP_MARGINHEIGHT)
.spacing(POPUP_HORIZONTALSPACING, POPUP_VERTICALSPACING);
}
return popupLayoutFactory;
}
protected StyledString styleTitle(final StyledText title) {
StyledString result = new StyledString();
StringTokenizer tokens =
new StringTokenizer(title.getText(), "-", false);
styleDescription(title, result, tokens.nextToken());
result.append("-");
Highlights.styleProposal(result, tokens.nextToken(), false);
return result;
}
protected void styleDescription(final StyledText title, StyledString result,
String desc) {
final FontData[] fontDatas = title.getFont().getFontData();
for (int i = 0; i < fontDatas.length; i++) {
fontDatas[i].setStyle(SWT.BOLD);
}
result.append(desc, new Styler() {
@Override
public void applyStyles(TextStyle textStyle) {
textStyle.font=new Font(title.getDisplay(), fontDatas);
}
});
}
@Override
protected Control createTitleControl(Composite parent) {
getPopupLayout().copy().numColumns(3).spacing(6, 6).applyTo(parent);
Label iconLabel = new Label(parent, SWT.NONE);
iconLabel.setImage(CeylonPlugin.getInstance().getImageRegistry().get(CEYLON_SOURCE));
getShell().addKeyListener(new GotoListener());
titleLabel = new StyledText(parent, SWT.NONE);
titleLabel.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
titleLabel.setStyleRanges(styleTitle(titleLabel).getStyleRanges());
}
});
titleLabel.setEditable(false);
GridDataFactory.fillDefaults().align(SWT.FILL, SWT.CENTER)
.grab(true,false).span(1, 1).applyTo(titleLabel);
return null;
}
@Override
protected void setTitleText(String text) {
if (titleLabel!=null)
titleLabel.setText(text);
}
/*@Override
protected void adjustBounds() {
Rectangle bounds = getShell().getBounds();
int h = bounds.height;
if (h>400) {
bounds.height=400;
bounds.y = bounds.y + (h-400)/3;
getShell().setBounds(bounds);
}
int w = bounds.width;
if (w<600) {
bounds.width=600;
getShell().setBounds(bounds);
}
}*/
public void setInformation(String information) {
// this method is ignored, see IInformationControlExtension2
}
public void setSize(int width, int height) {
getShell().setSize(width, height);
}
public void addDisposeListener(DisposeListener listener) {
getShell().addDisposeListener(listener);
}
public void removeDisposeListener(DisposeListener listener) {
getShell().removeDisposeListener(listener);
}
public void setForegroundColor(Color foreground) {
applyForegroundColor(foreground, getContents());
}
public void setBackgroundColor(Color background) {
applyBackgroundColor(background, getContents());
}
public boolean isFocusControl() {
return getShell().getDisplay().getActiveShell() == getShell();
}
public void setFocus() {
getShell().forceFocus();
}
public void addFocusListener(FocusListener listener) {
getShell().addFocusListener(listener);
}
public void removeFocusListener(FocusListener listener) {
getShell().removeFocusListener(listener);
}
public void setSizeConstraints(int maxWidth, int maxHeight) {
// ignore
}
public void setLocation(Point location) {
/*
* If the location is persisted, it gets managed by PopupDialog - fine. Otherwise, the location is
* computed in Window#getInitialLocation, which will center it in the parent shell / main
* monitor, which is wrong for two reasons:
* - we want to center over the editor / subject control, not the parent shell
* - the center is computed via the initalSize, which may be also wrong since the size may
* have been updated since via min/max sizing of AbstractInformationControlManager.
* In that case, override the location with the one computed by the manager. Note that
* the call to constrainShellSize in PopupDialog.open will still ensure that the shell is
* entirely visible.
*/
if (!getPersistLocation() || getDialogSettings() == null)
getShell().setLocation(location);
}
public Point computeSizeHint() {
// return the shell's size - note that it already has the persisted size if persisting
// is enabled.
return getShell().getSize();
}
public void setVisible(boolean visible) {
if (visible) {
open();
} else {
saveDialogBounds(getShell());
getShell().setVisible(false);
}
}
public final void dispose() {
docProvider.disconnect(ei);
ei = null;
close();
}
IDocumentProvider docProvider = new SourceArchiveDocumentProvider();
IEditorInput ei;
@Override
public void setInput(Object input) {
CeylonParseController epc = editor.getParseController();
IRegion r = editor.getSelection();
Node node = Nodes.findNode(epc.getRootNode(), r.getOffset(),
r.getOffset()+r.getLength());
referencedNode = Nodes.getReferencedNode(node, epc);
if (referencedNode==null) return;
IProject project = epc.getProject();
IPath path = getNodePath(referencedNode, project);
//CeylonParseController treats files with full paths subtly
//differently to files with relative paths, so make the
//path relative
IPath pathToCompare = path;
if (project!=null &&
project.getLocation().isPrefixOf(path)) {
pathToCompare = path.makeRelativeTo(project.getLocation());
}
IDocument doc;
if (pathToCompare.equals(epc.getPath())) {
doc = epc.getDocument();
}
else {
ei = getEditorInput(referencedNode.getUnit());
if (ei == null) {
ei = getEditorInput(path);
}
try {
docProvider.connect(ei);
doc = docProvider.getDocument(ei);
}
catch (CoreException e) {
e.printStackTrace();
return;
}
}
viewer.setDocument(doc);
try {
IRegion firstLine = doc.getLineInformationOfOffset(referencedNode.getStartIndex());
IRegion lastLine = doc.getLineInformationOfOffset(referencedNode.getStopIndex());
viewer.setVisibleRegion(firstLine.getOffset(),
lastLine.getOffset()+lastLine.getLength()-firstLine.getOffset());
}
catch (BadLocationException e) {
e.printStackTrace();
}
pc.initialize(path, project, null);
pc.parse(doc, new NullProgressMonitor(), null);
/*try {
int lines = doc.getLineOfOffset(refDec.getStopIndex())-
doc.getLineOfOffset(refDec.getStartIndex())+1;
setSize(getShell().getBounds().width,
viewer.getTextWidget().getLineHeight()*lines);
}
catch (BadLocationException e) {
e.printStackTrace();
}*/
if (referencedNode instanceof Tree.Declaration) {
Declaration model = ((Tree.Declaration) referencedNode).getDeclarationModel();
setTitleText("Peek Definition - " + getLabelDescriptionFor(model));
}
}
@Override
public boolean restoresLocation() {
return false;
}
@Override
public boolean restoresSize() {
return true;
}
@Override
public Rectangle getBounds() {
return getShell().getBounds();
}
@Override
public Rectangle computeTrim() {
return getShell().computeTrim(0, 0, 0, 0);
}
public CeylonParseController getParseController() {
return pc;
}
@Override
protected IDialogSettings getDialogSettings() {
String sectionName= "com.redhat.ceylon.eclipse.ui.PeekDefinition";
IDialogSettings dialogSettings = CeylonPlugin.getInstance()
.getDialogSettings();
IDialogSettings settings= dialogSettings.getSection(sectionName);
if (settings == null)
settings= dialogSettings.addNewSection(sectionName);
return settings;
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_PeekDefinitionPopup.java |
2,969 | public class CommonGramsTokenFilterFactoryTests extends ElasticsearchTokenStreamTestCase {
@Test
public void testDefault() throws IOException {
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_default.type", "common_grams").build();
try {
AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
Assert.fail("[common_words] or [common_words_path] is set");
} catch (Exception e) {
assertThat(e.getCause(), instanceOf(ElasticsearchIllegalArgumentException.class));
}
}
@Test
public void testWithoutCommonWordsMatch() throws IOException {
{
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_default.type", "common_grams")
.putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein")
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
{
TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_default");
String source = "the quick brown is a fox Or noT";
String[] expected = new String[] { "the", "quick", "brown", "is", "a", "fox", "Or", "noT" };
Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source));
assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
}
}
{
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_default.type", "common_grams")
.put("index.analysis.filter.common_grams_default.query_mode", false)
.putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein")
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
{
TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_default");
String source = "the quick brown is a fox Or noT";
String[] expected = new String[] { "the", "quick", "brown", "is", "a", "fox", "Or", "noT" };
Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source));
assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
}
}
}
@Test
public void testSettings() throws IOException {
{
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_1.type", "common_grams")
.put("index.analysis.filter.common_grams_1.ignore_case", true)
.putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are")
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_1");
String source = "the quick brown is a fox or noT";
String[] expected = new String[] { "the", "the_quick", "quick", "brown", "brown_is", "is", "is_a", "a", "a_fox", "fox", "fox_or", "or", "or_noT", "noT" };
Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source));
assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
}
{
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_2.type", "common_grams")
.put("index.analysis.filter.common_grams_2.ignore_case", false)
.putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_2");
String source = "the quick brown is a fox or why noT";
String[] expected = new String[] { "the", "the_quick", "quick", "brown", "brown_is", "is", "is_a", "a", "a_fox", "fox", "or", "why", "why_noT", "noT" };
Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source));
assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
}
{
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_3.type", "common_grams")
.putArray("index.analysis.filter.common_grams_3.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_3");
String source = "the quick brown is a fox Or noT";
String[] expected = new String[] { "the", "the_quick", "quick", "brown", "brown_is", "is", "is_a", "a", "a_fox", "fox", "Or", "noT" };
Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source));
assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
}
}
@Test
public void testCommonGramsAnalysis() throws IOException {
Settings settings = ImmutableSettings.settingsBuilder().loadFromClasspath("org/elasticsearch/index/analysis/commongrams/commongrams.json").build();
{
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
Analyzer analyzer = analysisService.analyzer("commongramsAnalyzer").analyzer();
String source = "the quick brown is a fox or not";
String[] expected = new String[] { "the", "quick", "quick_brown", "brown", "brown_is", "is", "a", "a_fox", "fox", "fox_or", "or", "not" };
assertTokenStreamContents(analyzer.tokenStream("test", source), expected);
}
{
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
Analyzer analyzer = analysisService.analyzer("commongramsAnalyzer_file").analyzer();
String source = "the quick brown is a fox or not";
String[] expected = new String[] { "the", "quick", "quick_brown", "brown", "brown_is", "is", "a", "a_fox", "fox", "fox_or", "or", "not" };
assertTokenStreamContents(analyzer.tokenStream("test", source), expected);
}
}
@Test
public void testQueryModeSettings() throws IOException {
{
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_1.type", "common_grams")
.put("index.analysis.filter.common_grams_1.query_mode", true)
.putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are")
.put("index.analysis.filter.common_grams_1.ignore_case", true)
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_1");
String source = "the quick brown is a fox or noT";
String[] expected = new String[] { "the_quick", "quick", "brown_is", "is_a", "a_fox", "fox_or", "or_noT" };
Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source));
assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
}
{
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_2.type", "common_grams")
.put("index.analysis.filter.common_grams_2.query_mode", true)
.putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
.put("index.analysis.filter.common_grams_2.ignore_case", false)
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_2");
String source = "the quick brown is a fox or why noT";
String[] expected = new String[] { "the_quick", "quick", "brown_is", "is_a", "a_fox", "fox", "or", "why_noT" };
Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source));
assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
}
{
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_3.type", "common_grams")
.put("index.analysis.filter.common_grams_3.query_mode", true)
.putArray("index.analysis.filter.common_grams_3.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_3");
String source = "the quick brown is a fox or why noT";
String[] expected = new String[] { "the_quick", "quick", "brown_is", "is_a", "a_fox", "fox", "or", "why_noT" };
Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source));
assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
}
{
Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.common_grams_4.type", "common_grams")
.put("index.analysis.filter.common_grams_4.query_mode", true)
.putArray("index.analysis.filter.common_grams_4.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
.build();
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_4");
String source = "the quick brown is a fox Or noT";
String[] expected = new String[] { "the_quick", "quick", "brown_is", "is_a", "a_fox", "fox", "Or", "noT" };
Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source));
assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
}
}
@Test
public void testQueryModeCommonGramsAnalysis() throws IOException {
Settings settings = ImmutableSettings.settingsBuilder().loadFromClasspath("org/elasticsearch/index/analysis/commongrams/commongrams_query_mode.json").build();
{
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
Analyzer analyzer = analysisService.analyzer("commongramsAnalyzer").analyzer();
String source = "the quick brown is a fox or not";
String[] expected = new String[] { "the", "quick_brown", "brown_is", "is", "a_fox", "fox_or", "or", "not" };
assertTokenStreamContents(analyzer.tokenStream("test", source), expected);
}
{
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings);
Analyzer analyzer = analysisService.analyzer("commongramsAnalyzer_file").analyzer();
String source = "the quick brown is a fox or not";
String[] expected = new String[] { "the", "quick_brown", "brown_is", "is", "a_fox", "fox_or", "or", "not" };
assertTokenStreamContents(analyzer.tokenStream("test", source), expected);
}
}
} | 0true
| src_test_java_org_elasticsearch_index_analysis_commongrams_CommonGramsTokenFilterFactoryTests.java |
959 | public class AwaitRequest extends KeyBasedClientRequest implements Portable, SecureRequest {
private ObjectNamespace namespace;
private String name;
private long timeout;
private long threadId;
private String conditionId;
public AwaitRequest() {
}
public AwaitRequest(ObjectNamespace namespace, String name, long timeout, long threadId, String conditionId) {
this.namespace = namespace;
this.name = name;
this.timeout = timeout;
this.threadId = threadId;
this.conditionId = conditionId;
}
@Override
protected Object getKey() {
return name;
}
@Override
protected Operation prepareOperation() {
final Data key = getClientEngine().toData(name);
return new AwaitOperation(namespace, key, threadId, timeout, conditionId);
}
@Override
public String getServiceName() {
return LockService.SERVICE_NAME;
}
@Override
public int getFactoryId() {
return LockPortableHook.FACTORY_ID;
}
@Override
public int getClassId() {
return LockPortableHook.CONDITION_AWAIT;
}
@Override
public void write(PortableWriter writer) throws IOException {
writer.writeUTF("n", name);
writer.writeLong("tout", timeout);
writer.writeLong("tid", threadId);
writer.writeUTF("cid", conditionId);
ObjectDataOutput out = writer.getRawDataOutput();
namespace.writeData(out);
}
@Override
public void read(PortableReader reader) throws IOException {
name = reader.readUTF("n");
timeout = reader.readLong("tout");
threadId = reader.readLong("tid");
conditionId = reader.readUTF("cid");
ObjectDataInput in = reader.getRawDataInput();
namespace = new InternalLockNamespace();
namespace.readData(in);
}
@Override
public Permission getRequiredPermission() {
return new LockPermission(namespace.getObjectName(), ActionConstants.ACTION_LOCK);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_concurrent_lock_client_AwaitRequest.java |
370 | public class GetRepositoriesRequestBuilder extends MasterNodeReadOperationRequestBuilder<GetRepositoriesRequest, GetRepositoriesResponse, GetRepositoriesRequestBuilder> {
/**
* Creates new get repository request builder
*
* @param clusterAdminClient cluster admin client
*/
public GetRepositoriesRequestBuilder(ClusterAdminClient clusterAdminClient) {
super((InternalClusterAdminClient) clusterAdminClient, new GetRepositoriesRequest());
}
/**
* Creates new get repository request builder
*
* @param clusterAdminClient cluster admin client
* @param repositories list of repositories to get
*/
public GetRepositoriesRequestBuilder(ClusterAdminClient clusterAdminClient, String... repositories) {
super((InternalClusterAdminClient) clusterAdminClient, new GetRepositoriesRequest(repositories));
}
/**
* Sets list of repositories to get
*
* @param repositories list of repositories
* @return builder
*/
public GetRepositoriesRequestBuilder setRepositories(String... repositories) {
request.repositories(repositories);
return this;
}
/**
* Adds repositories to the list of repositories to get
*
* @param repositories list of repositories
* @return builder
*/
public GetRepositoriesRequestBuilder addRepositories(String... repositories) {
request.repositories(ObjectArrays.concat(request.repositories(), repositories, String.class));
return this;
}
@Override
protected void doExecute(ActionListener<GetRepositoriesResponse> listener) {
((ClusterAdminClient) client).getRepositories(request, listener);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_repositories_get_GetRepositoriesRequestBuilder.java |
602 | public class MasterConfirmationOperation extends AbstractClusterOperation {
@Override
public void run() {
final Address endpoint = getCallerAddress();
if (endpoint == null) {
return;
}
final ClusterServiceImpl clusterService = getService();
final ILogger logger = getNodeEngine().getLogger(MasterConfirmationOperation.class.getName());
final MemberImpl member = clusterService.getMember(endpoint);
if (member == null) {
logger.warning("MasterConfirmation has been received from " + endpoint
+ ", but it is not a member of this cluster!");
OperationService operationService = getNodeEngine().getOperationService();
operationService.send(new MemberRemoveOperation(clusterService.getThisAddress()), endpoint);
} else {
if (clusterService.isMaster()) {
clusterService.acceptMasterConfirmation(member);
} else {
logger.warning(endpoint + " has sent MasterConfirmation, but this node is not master!");
}
}
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_cluster_MasterConfirmationOperation.java |
2,078 | public class CachedStreams {
public static void clear() {
CachedStreamInput.clear();
}
} | 0true
| src_main_java_org_elasticsearch_common_io_CachedStreams.java |
2,402 | final class BigLongArray extends AbstractBigArray implements LongArray {
private long[][] pages;
/** Constructor. */
public BigLongArray(long size, PageCacheRecycler recycler, boolean clearOnResize) {
super(LONG_PAGE_SIZE, recycler, clearOnResize);
this.size = size;
pages = new long[numPages(size)][];
for (int i = 0; i < pages.length; ++i) {
pages[i] = newLongPage(i);
}
}
@Override
public long get(long index) {
final int pageIndex = pageIndex(index);
final int indexInPage = indexInPage(index);
return pages[pageIndex][indexInPage];
}
@Override
public long set(long index, long value) {
final int pageIndex = pageIndex(index);
final int indexInPage = indexInPage(index);
final long[] page = pages[pageIndex];
final long ret = page[indexInPage];
page[indexInPage] = value;
return ret;
}
@Override
public long increment(long index, long inc) {
final int pageIndex = pageIndex(index);
final int indexInPage = indexInPage(index);
return pages[pageIndex][indexInPage] += inc;
}
@Override
protected int numBytesPerElement() {
return RamUsageEstimator.NUM_BYTES_LONG;
}
/** Change the size of this array. Content between indexes <code>0</code> and <code>min(size(), newSize)</code> will be preserved. */
public void resize(long newSize) {
final int numPages = numPages(newSize);
if (numPages > pages.length) {
pages = Arrays.copyOf(pages, ArrayUtil.oversize(numPages, RamUsageEstimator.NUM_BYTES_OBJECT_REF));
}
for (int i = numPages - 1; i >= 0 && pages[i] == null; --i) {
pages[i] = newLongPage(i);
}
for (int i = numPages; i < pages.length && pages[i] != null; ++i) {
pages[i] = null;
releasePage(i);
}
this.size = newSize;
}
@Override
public void fill(long fromIndex, long toIndex, long value) {
Preconditions.checkArgument(fromIndex <= toIndex);
final int fromPage = pageIndex(fromIndex);
final int toPage = pageIndex(toIndex - 1);
if (fromPage == toPage) {
Arrays.fill(pages[fromPage], indexInPage(fromIndex), indexInPage(toIndex - 1) + 1, value);
} else {
Arrays.fill(pages[fromPage], indexInPage(fromIndex), pages[fromPage].length, value);
for (int i = fromPage + 1; i < toPage; ++i) {
Arrays.fill(pages[i], value);
}
Arrays.fill(pages[toPage], 0, indexInPage(toIndex - 1) + 1, value);
}
}
} | 0true
| src_main_java_org_elasticsearch_common_util_BigLongArray.java |
1,543 | public class ProcessorUtils {
/**
* Gets a UTF-8 URL encoded URL based on the current URL as well as the specified map
* of query string parameters
*
* @param baseUrl
* @param parameters
* @return the built URL
*/
public static String getUrl(String baseUrl, Map<String, String[]> parameters) {
if (baseUrl.contains("?")) {
throw new IllegalArgumentException("baseUrl contained a ? indicating it is not a base url");
}
StringBuilder sb = new StringBuilder();
sb.append(baseUrl);
boolean atLeastOneParam = false;
if (parameters != null && parameters.size() > 0) {
for (Entry<String, String[]> entry : parameters.entrySet()) {
if (entry.getValue().length > 0) {
atLeastOneParam = true;
}
}
}
if (atLeastOneParam) {
sb.append("?");
} else {
return sb.toString();
}
for (Entry<String, String[]> entry : parameters.entrySet()) {
String key = entry.getKey();
for (String value : entry.getValue()) {
StringBuilder parameter = new StringBuilder();
try {
parameter.append(URLEncoder.encode(key, "UTF-8"));
parameter.append("=");
parameter.append(URLEncoder.encode(value, "UTF-8"));
parameter.append("&");
} catch (UnsupportedEncodingException e) {
parameter = null;
}
sb.append(parameter);
}
}
String url = sb.toString();
if (url.charAt(url.length() - 1) == '&') {
url = url.substring(0, url.length() - 1);
}
return url;
}
} | 0true
| core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_util_ProcessorUtils.java |
640 | public class ShardStatus extends BroadcastShardOperationResponse {
private ShardRouting shardRouting;
IndexShardState state;
ByteSizeValue storeSize;
long translogId = -1;
long translogOperations = -1;
DocsStatus docs;
MergeStats mergeStats;
RefreshStats refreshStats;
FlushStats flushStats;
PeerRecoveryStatus peerRecoveryStatus;
GatewayRecoveryStatus gatewayRecoveryStatus;
GatewaySnapshotStatus gatewaySnapshotStatus;
ShardStatus() {
}
ShardStatus(ShardRouting shardRouting) {
super(shardRouting.index(), shardRouting.id());
this.shardRouting = shardRouting;
}
/**
* The shard routing information (cluster wide shard state).
*/
public ShardRouting getShardRouting() {
return this.shardRouting;
}
/**
* The shard state (index/local state).
*/
public IndexShardState getState() {
return state;
}
/**
* The current size of the shard index storage.
*/
public ByteSizeValue getStoreSize() {
return storeSize;
}
/**
* The transaction log id.
*/
public long getTranslogId() {
return translogId;
}
/**
* The number of transaction operations in the transaction log.
*/
public long getTranslogOperations() {
return translogOperations;
}
/**
* Docs level information for the shard index, <tt>null</tt> if not applicable.
*/
public DocsStatus getDocs() {
return docs;
}
/**
* Index merge statistics.
*/
public MergeStats getMergeStats() {
return this.mergeStats;
}
/**
* Refresh stats.
*/
public RefreshStats getRefreshStats() {
return this.refreshStats;
}
public FlushStats getFlushStats() {
return this.flushStats;
}
/**
* Peer recovery status (<tt>null</tt> if not applicable). Both real time if an on going recovery
* is in progress and summary once it is done.
*/
public PeerRecoveryStatus getPeerRecoveryStatus() {
return peerRecoveryStatus;
}
/**
* Gateway recovery status (<tt>null</tt> if not applicable). Both real time if an on going recovery
* is in progress adn summary once it is done.
*/
public GatewayRecoveryStatus getGatewayRecoveryStatus() {
return gatewayRecoveryStatus;
}
/**
* The current on going snapshot to the gateway or the last one if none is on going.
*/
public GatewaySnapshotStatus getGatewaySnapshotStatus() {
return gatewaySnapshotStatus;
}
public static ShardStatus readIndexShardStatus(StreamInput in) throws IOException {
ShardStatus shardStatus = new ShardStatus();
shardStatus.readFrom(in);
return shardStatus;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
shardRouting.writeTo(out);
out.writeByte(state.id());
if (storeSize == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
storeSize.writeTo(out);
}
out.writeLong(translogId);
out.writeLong(translogOperations);
if (docs == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeLong(docs.getNumDocs());
out.writeLong(docs.getMaxDoc());
out.writeLong(docs.getDeletedDocs());
}
if (peerRecoveryStatus == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeByte(peerRecoveryStatus.stage.value());
out.writeVLong(peerRecoveryStatus.startTime);
out.writeVLong(peerRecoveryStatus.time);
out.writeVLong(peerRecoveryStatus.indexSize);
out.writeVLong(peerRecoveryStatus.reusedIndexSize);
out.writeVLong(peerRecoveryStatus.recoveredIndexSize);
out.writeVLong(peerRecoveryStatus.recoveredTranslogOperations);
}
if (gatewayRecoveryStatus == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeByte(gatewayRecoveryStatus.stage.value());
out.writeVLong(gatewayRecoveryStatus.startTime);
out.writeVLong(gatewayRecoveryStatus.time);
out.writeVLong(gatewayRecoveryStatus.indexSize);
out.writeVLong(gatewayRecoveryStatus.reusedIndexSize);
out.writeVLong(gatewayRecoveryStatus.recoveredIndexSize);
out.writeVLong(gatewayRecoveryStatus.recoveredTranslogOperations);
}
if (gatewaySnapshotStatus == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeByte(gatewaySnapshotStatus.stage.value());
out.writeVLong(gatewaySnapshotStatus.startTime);
out.writeVLong(gatewaySnapshotStatus.time);
out.writeVLong(gatewaySnapshotStatus.indexSize);
out.writeVInt(gatewaySnapshotStatus.getExpectedNumberOfOperations());
}
if (mergeStats == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
mergeStats.writeTo(out);
}
if (refreshStats == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
refreshStats.writeTo(out);
}
if (flushStats == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
flushStats.writeTo(out);
}
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
shardRouting = readShardRoutingEntry(in);
state = IndexShardState.fromId(in.readByte());
if (in.readBoolean()) {
storeSize = readBytesSizeValue(in);
}
translogId = in.readLong();
translogOperations = in.readLong();
if (in.readBoolean()) {
docs = new DocsStatus();
docs.numDocs = in.readLong();
docs.maxDoc = in.readLong();
docs.deletedDocs = in.readLong();
}
if (in.readBoolean()) {
peerRecoveryStatus = new PeerRecoveryStatus(PeerRecoveryStatus.Stage.fromValue(in.readByte()),
in.readVLong(), in.readVLong(), in.readVLong(), in.readVLong(), in.readVLong(), in.readVLong());
}
if (in.readBoolean()) {
gatewayRecoveryStatus = new GatewayRecoveryStatus(GatewayRecoveryStatus.Stage.fromValue(in.readByte()),
in.readVLong(), in.readVLong(), in.readVLong(), in.readVLong(), in.readVLong(), in.readVLong());
}
if (in.readBoolean()) {
gatewaySnapshotStatus = new GatewaySnapshotStatus(GatewaySnapshotStatus.Stage.fromValue(in.readByte()),
in.readVLong(), in.readVLong(), in.readVLong(), in.readVInt());
}
if (in.readBoolean()) {
mergeStats = MergeStats.readMergeStats(in);
}
if (in.readBoolean()) {
refreshStats = RefreshStats.readRefreshStats(in);
}
if (in.readBoolean()) {
flushStats = FlushStats.readFlushStats(in);
}
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_status_ShardStatus.java |
804 | return new PortableFactory() {
@Override
public Portable create(int classId) {
switch (classId) {
case ADD_AND_GET:
return new AddAndGetRequest();
case COMPARE_AND_SET:
return new CompareAndSetRequest();
case GET_AND_ADD:
return new GetAndAddRequest();
case GET_AND_SET:
return new GetAndSetRequest();
case SET:
return new SetRequest();
case APPLY:
return new ApplyRequest();
case ALTER:
return new AlterRequest();
case ALTER_AND_GET:
return new AlterAndGetRequest();
case GET_AND_ALTER:
return new GetAndAlterRequest();
default:
return null;
}
}
}; | 0true
| hazelcast_src_main_java_com_hazelcast_concurrent_atomiclong_client_AtomicLongPortableHook.java |
1,687 | public class OAsyncCommandResultListener extends OAbstractCommandResultListener {
private final ONetworkProtocolBinary protocol;
private final AtomicBoolean empty = new AtomicBoolean(true);
private final int txId;
public OAsyncCommandResultListener(final ONetworkProtocolBinary iNetworkProtocolBinary, final int txId) {
this.protocol = iNetworkProtocolBinary;
this.txId = txId;
}
@Override
public boolean result(final Object iRecord) {
if (empty.compareAndSet(true, false))
try {
protocol.sendOk(txId);
} catch (IOException e1) {
}
try {
protocol.channel.writeByte((byte) 1); // ONE MORE RECORD
protocol.writeIdentifiable((ORecordInternal<?>) ((OIdentifiable) iRecord).getRecord());
fetchRecord(iRecord);
} catch (IOException e) {
return false;
}
return true;
}
public boolean isEmpty() {
return empty.get();
}
} | 1no label
| server_src_main_java_com_orientechnologies_orient_server_network_protocol_binary_OAsyncCommandResultListener.java |
1,659 | public final class Preconditions {
private Preconditions() {
}
/**
* Ensures the truth of an expression involving one or more parameters to the
* calling method.
*
* @param expression a boolean expression
* @throws org.elasticsearch.ElasticsearchIllegalArgumentException
* if {@code expression} is false
*/
public static void checkArgument(boolean expression) {
if (!expression) {
throw new ElasticsearchIllegalArgumentException();
}
}
/**
* Ensures the truth of an expression involving one or more parameters to the
* calling method.
*
* @param expression a boolean expression
* @param errorMessage the exception message to use if the check fails; will
* be converted to a string using {@link String#valueOf(Object)}
* @throws org.elasticsearch.ElasticsearchIllegalArgumentException
* if {@code expression} is false
*/
public static void checkArgument(boolean expression, Object errorMessage) {
if (!expression) {
throw new ElasticsearchIllegalArgumentException(String.valueOf(errorMessage));
}
}
/**
* Ensures the truth of an expression involving one or more parameters to the
* calling method.
*
* @param expression a boolean expression
* @param errorMessageTemplate a template for the exception message should the
* check fail. The message is formed by replacing each {@code %s}
* placeholder in the template with an argument. These are matched by
* position - the first {@code %s} gets {@code errorMessageArgs[0]}, etc.
* Unmatched arguments will be appended to the formatted message in square
* braces. Unmatched placeholders will be left as-is.
* @param errorMessageArgs the arguments to be substituted into the message
* template. Arguments are converted to strings using
* {@link String#valueOf(Object)}.
* @throws org.elasticsearch.ElasticsearchIllegalArgumentException
* if {@code expression} is false
* @throws org.elasticsearch.ElasticsearchNullPointerException
* if the check fails and either {@code
* errorMessageTemplate} or {@code errorMessageArgs} is null (don't let
* this happen)
*/
public static void checkArgument(boolean expression,
String errorMessageTemplate, Object... errorMessageArgs) {
if (!expression) {
throw new ElasticsearchIllegalArgumentException(
format(errorMessageTemplate, errorMessageArgs));
}
}
/**
* Ensures the truth of an expression involving the state of the calling
* instance, but not involving any parameters to the calling method.
*
* @param expression a boolean expression
* @throws org.elasticsearch.ElasticsearchIllegalStateException
* if {@code expression} is false
*/
public static void checkState(boolean expression) {
if (!expression) {
throw new ElasticsearchIllegalStateException();
}
}
/**
* Ensures the truth of an expression involving the state of the calling
* instance, but not involving any parameters to the calling method.
*
* @param expression a boolean expression
* @param errorMessage the exception message to use if the check fails; will
* be converted to a string using {@link String#valueOf(Object)}
* @throws org.elasticsearch.ElasticsearchIllegalStateException
* if {@code expression} is false
*/
public static void checkState(boolean expression, Object errorMessage) {
if (!expression) {
throw new ElasticsearchIllegalStateException(String.valueOf(errorMessage));
}
}
/**
* Ensures the truth of an expression involving the state of the calling
* instance, but not involving any parameters to the calling method.
*
* @param expression a boolean expression
* @param errorMessageTemplate a template for the exception message should the
* check fail. The message is formed by replacing each {@code %s}
* placeholder in the template with an argument. These are matched by
* position - the first {@code %s} gets {@code errorMessageArgs[0]}, etc.
* Unmatched arguments will be appended to the formatted message in square
* braces. Unmatched placeholders will be left as-is.
* @param errorMessageArgs the arguments to be substituted into the message
* template. Arguments are converted to strings using
* {@link String#valueOf(Object)}.
* @throws org.elasticsearch.ElasticsearchIllegalStateException
* if {@code expression} is false
* @throws org.elasticsearch.ElasticsearchNullPointerException
* if the check fails and either {@code
* errorMessageTemplate} or {@code errorMessageArgs} is null (don't let
* this happen)
*/
public static void checkState(boolean expression,
String errorMessageTemplate, Object... errorMessageArgs) {
if (!expression) {
throw new ElasticsearchIllegalStateException(
format(errorMessageTemplate, errorMessageArgs));
}
}
/**
* Ensures that an object reference passed as a parameter to the calling
* method is not null.
*
* @param reference an object reference
* @return the non-null reference that was validated
* @throws org.elasticsearch.ElasticsearchNullPointerException
* if {@code reference} is null
*/
public static <T> T checkNotNull(T reference) {
if (reference == null) {
throw new ElasticsearchNullPointerException();
}
return reference;
}
/**
* Ensures that an object reference passed as a parameter to the calling
* method is not null.
*
* @param reference an object reference
* @param errorMessage the exception message to use if the check fails; will
* be converted to a string using {@link String#valueOf(Object)}
* @return the non-null reference that was validated
* @throws org.elasticsearch.ElasticsearchNullPointerException
* if {@code reference} is null
*/
public static <T> T checkNotNull(T reference, Object errorMessage) {
if (reference == null) {
throw new ElasticsearchNullPointerException(String.valueOf(errorMessage));
}
return reference;
}
/**
* Ensures that an object reference passed as a parameter to the calling
* method is not null.
*
* @param reference an object reference
* @param errorMessageTemplate a template for the exception message should the
* check fail. The message is formed by replacing each {@code %s}
* placeholder in the template with an argument. These are matched by
* position - the first {@code %s} gets {@code errorMessageArgs[0]}, etc.
* Unmatched arguments will be appended to the formatted message in square
* braces. Unmatched placeholders will be left as-is.
* @param errorMessageArgs the arguments to be substituted into the message
* template. Arguments are converted to strings using
* {@link String#valueOf(Object)}.
* @return the non-null reference that was validated
* @throws org.elasticsearch.ElasticsearchNullPointerException
* if {@code reference} is null
*/
public static <T> T checkNotNull(T reference, String errorMessageTemplate,
Object... errorMessageArgs) {
if (reference == null) {
// If either of these parameters is null, the right thing happens anyway
throw new ElasticsearchNullPointerException(
format(errorMessageTemplate, errorMessageArgs));
}
return reference;
}
/**
* Ensures that an {@code Iterable} object passed as a parameter to the
* calling method is not null and contains no null elements.
*
* @param iterable the iterable to check the contents of
* @return the non-null {@code iterable} reference just validated
* @throws org.elasticsearch.ElasticsearchNullPointerException
* if {@code iterable} is null or contains at
* least one null element
*/
public static <T extends Iterable<?>> T checkContentsNotNull(T iterable) {
if (containsOrIsNull(iterable)) {
throw new ElasticsearchNullPointerException();
}
return iterable;
}
/**
* Ensures that an {@code Iterable} object passed as a parameter to the
* calling method is not null and contains no null elements.
*
* @param iterable the iterable to check the contents of
* @param errorMessage the exception message to use if the check fails; will
* be converted to a string using {@link String#valueOf(Object)}
* @return the non-null {@code iterable} reference just validated
* @throws org.elasticsearch.ElasticsearchNullPointerException
* if {@code iterable} is null or contains at
* least one null element
*/
public static <T extends Iterable<?>> T checkContentsNotNull(
T iterable, Object errorMessage) {
if (containsOrIsNull(iterable)) {
throw new ElasticsearchNullPointerException(String.valueOf(errorMessage));
}
return iterable;
}
/**
* Ensures that an {@code Iterable} object passed as a parameter to the
* calling method is not null and contains no null elements.
*
* @param iterable the iterable to check the contents of
* @param errorMessageTemplate a template for the exception message should the
* check fail. The message is formed by replacing each {@code %s}
* placeholder in the template with an argument. These are matched by
* position - the first {@code %s} gets {@code errorMessageArgs[0]}, etc.
* Unmatched arguments will be appended to the formatted message in square
* braces. Unmatched placeholders will be left as-is.
* @param errorMessageArgs the arguments to be substituted into the message
* template. Arguments are converted to strings using
* {@link String#valueOf(Object)}.
* @return the non-null {@code iterable} reference just validated
* @throws org.elasticsearch.ElasticsearchNullPointerException
* if {@code iterable} is null or contains at
* least one null element
*/
public static <T extends Iterable<?>> T checkContentsNotNull(T iterable,
String errorMessageTemplate, Object... errorMessageArgs) {
if (containsOrIsNull(iterable)) {
throw new ElasticsearchNullPointerException(
format(errorMessageTemplate, errorMessageArgs));
}
return iterable;
}
private static boolean containsOrIsNull(Iterable<?> iterable) {
if (iterable == null) {
return true;
}
if (iterable instanceof Collection) {
Collection<?> collection = (Collection<?>) iterable;
try {
return collection.contains(null);
} catch (ElasticsearchNullPointerException e) {
// A NPE implies that the collection doesn't contain null.
return false;
}
} else {
for (Object element : iterable) {
if (element == null) {
return true;
}
}
return false;
}
}
/**
* Ensures that {@code index} specifies a valid <i>element</i> in an array,
* list or string of size {@code size}. An element index may range from zero,
* inclusive, to {@code size}, exclusive.
*
* @param index a user-supplied index identifying an element of an array, list
* or string
* @param size the size of that array, list or string
* @throws IndexOutOfBoundsException if {@code index} is negative or is not
* less than {@code size}
* @throws org.elasticsearch.ElasticsearchIllegalArgumentException
* if {@code size} is negative
*/
public static void checkElementIndex(int index, int size) {
checkElementIndex(index, size, "index");
}
/**
* Ensures that {@code index} specifies a valid <i>element</i> in an array,
* list or string of size {@code size}. An element index may range from zero,
* inclusive, to {@code size}, exclusive.
*
* @param index a user-supplied index identifying an element of an array, list
* or string
* @param size the size of that array, list or string
* @param desc the text to use to describe this index in an error message
* @throws IndexOutOfBoundsException if {@code index} is negative or is not
* less than {@code size}
* @throws org.elasticsearch.ElasticsearchIllegalArgumentException
* if {@code size} is negative
*/
public static void checkElementIndex(int index, int size, String desc) {
checkArgument(size >= 0, "negative size: %s", size);
if (index < 0) {
throw new IndexOutOfBoundsException(
format("%s (%s) must not be negative", desc, index));
}
if (index >= size) {
throw new IndexOutOfBoundsException(
format("%s (%s) must be less than size (%s)", desc, index, size));
}
}
/**
* Ensures that {@code index} specifies a valid <i>position</i> in an array,
* list or string of size {@code size}. A position index may range from zero
* to {@code size}, inclusive.
*
* @param index a user-supplied index identifying a position in an array, list
* or string
* @param size the size of that array, list or string
* @throws IndexOutOfBoundsException if {@code index} is negative or is
* greater than {@code size}
* @throws org.elasticsearch.ElasticsearchIllegalArgumentException
* if {@code size} is negative
*/
public static void checkPositionIndex(int index, int size) {
checkPositionIndex(index, size, "index");
}
/**
* Ensures that {@code index} specifies a valid <i>position</i> in an array,
* list or string of size {@code size}. A position index may range from zero
* to {@code size}, inclusive.
*
* @param index a user-supplied index identifying a position in an array, list
* or string
* @param size the size of that array, list or string
* @param desc the text to use to describe this index in an error message
* @throws IndexOutOfBoundsException if {@code index} is negative or is
* greater than {@code size}
* @throws org.elasticsearch.ElasticsearchIllegalArgumentException
* if {@code size} is negative
*/
public static void checkPositionIndex(int index, int size, String desc) {
checkArgument(size >= 0, "negative size: %s", size);
if (index < 0) {
throw new IndexOutOfBoundsException(format(
"%s (%s) must not be negative", desc, index));
}
if (index > size) {
throw new IndexOutOfBoundsException(format(
"%s (%s) must not be greater than size (%s)", desc, index, size));
}
}
/**
* Ensures that {@code start} and {@code end} specify a valid <i>positions</i>
* in an array, list or string of size {@code size}, and are in order. A
* position index may range from zero to {@code size}, inclusive.
*
* @param start a user-supplied index identifying a starting position in an
* array, list or string
* @param end a user-supplied index identifying a ending position in an array,
* list or string
* @param size the size of that array, list or string
* @throws IndexOutOfBoundsException if either index is negative or is
* greater than {@code size}, or if {@code end} is less than {@code start}
* @throws org.elasticsearch.ElasticsearchIllegalArgumentException
* if {@code size} is negative
*/
public static void checkPositionIndexes(int start, int end, int size) {
checkPositionIndex(start, size, "start index");
checkPositionIndex(end, size, "end index");
if (end < start) {
throw new IndexOutOfBoundsException(format(
"end index (%s) must not be less than start index (%s)", end, start));
}
}
/**
* Substitutes each {@code %s} in {@code template} with an argument. These
* are matched by position - the first {@code %s} gets {@code args[0]}, etc.
* If there are more arguments than placeholders, the unmatched arguments will
* be appended to the end of the formatted message in square braces.
*
* @param template a non-null string containing 0 or more {@code %s}
* placeholders.
* @param args the arguments to be substituted into the message
* template. Arguments are converted to strings using
* {@link String#valueOf(Object)}. Arguments can be null.
*/
// VisibleForTesting
static String format(String template, Object... args) {
// start substituting the arguments into the '%s' placeholders
StringBuilder builder = new StringBuilder(
template.length() + 16 * args.length);
int templateStart = 0;
int i = 0;
while (i < args.length) {
int placeholderStart = template.indexOf("%s", templateStart);
if (placeholderStart == -1) {
break;
}
builder.append(template.substring(templateStart, placeholderStart));
builder.append(args[i++]);
templateStart = placeholderStart + 2;
}
builder.append(template.substring(templateStart));
// if we run out of placeholders, append the extra args in square braces
if (i < args.length) {
builder.append(" [");
builder.append(args[i++]);
while (i < args.length) {
builder.append(", ");
builder.append(args[i++]);
}
builder.append("]");
}
return builder.toString();
}
} | 0true
| src_main_java_org_elasticsearch_common_Preconditions.java |
2,329 | interface Builder {
/**
* Builds the settings.
*/
Settings build();
} | 0true
| src_main_java_org_elasticsearch_common_settings_Settings.java |
4,722 | public class RepositoriesService extends AbstractComponent implements ClusterStateListener {
private final RepositoryTypesRegistry typesRegistry;
private final Injector injector;
private final ClusterService clusterService;
private volatile ImmutableMap<String, RepositoryHolder> repositories = ImmutableMap.of();
@Inject
public RepositoriesService(Settings settings, ClusterService clusterService, RepositoryTypesRegistry typesRegistry, Injector injector) {
super(settings);
this.typesRegistry = typesRegistry;
this.injector = injector;
this.clusterService = clusterService;
// Doesn't make sense to maintain repositories on non-master and non-data nodes
// Nothing happens there anyway
if (DiscoveryNode.dataNode(settings) || DiscoveryNode.masterNode(settings)) {
clusterService.add(this);
}
}
/**
* Registers new repository in the cluster
* <p/>
* This method can be only called on the master node. It tries to create a new repository on the master
* and if it was successful it adds new repository to cluster metadata.
*
* @param request register repository request
* @param listener register repository listener
*/
public void registerRepository(final RegisterRepositoryRequest request, final ActionListener<RegisterRepositoryResponse> listener) {
final RepositoryMetaData newRepositoryMetaData = new RepositoryMetaData(request.name, request.type, request.settings);
clusterService.submitStateUpdateTask(request.cause, new AckedClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
ensureRepositoryNotInUse(currentState, request.name);
// Trying to create the new repository on master to make sure it works
if (!registerRepository(newRepositoryMetaData)) {
// The new repository has the same settings as the old one - ignore
return currentState;
}
MetaData metaData = currentState.metaData();
MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData());
RepositoriesMetaData repositories = metaData.custom(RepositoriesMetaData.TYPE);
if (repositories == null) {
logger.info("put repository [{}]", request.name);
repositories = new RepositoriesMetaData(new RepositoryMetaData(request.name, request.type, request.settings));
} else {
boolean found = false;
List<RepositoryMetaData> repositoriesMetaData = new ArrayList<RepositoryMetaData>(repositories.repositories().size() + 1);
for (RepositoryMetaData repositoryMetaData : repositories.repositories()) {
if (repositoryMetaData.name().equals(newRepositoryMetaData.name())) {
found = true;
repositoriesMetaData.add(newRepositoryMetaData);
} else {
repositoriesMetaData.add(repositoryMetaData);
}
}
if (!found) {
logger.info("put repository [{}]", request.name);
repositoriesMetaData.add(new RepositoryMetaData(request.name, request.type, request.settings));
} else {
logger.info("update repository [{}]", request.name);
}
repositories = new RepositoriesMetaData(repositoriesMetaData.toArray(new RepositoryMetaData[repositoriesMetaData.size()]));
}
mdBuilder.putCustom(RepositoriesMetaData.TYPE, repositories);
return ClusterState.builder(currentState).metaData(mdBuilder).build();
}
@Override
public void onFailure(String source, Throwable t) {
logger.warn("failed to create repository [{}]", t, request.name);
listener.onFailure(t);
}
@Override
public TimeValue timeout() {
return request.masterNodeTimeout();
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
}
@Override
public boolean mustAck(DiscoveryNode discoveryNode) {
return discoveryNode.masterNode();
}
@Override
public void onAllNodesAcked(@Nullable Throwable t) {
listener.onResponse(new RegisterRepositoryResponse(true));
}
@Override
public void onAckTimeout() {
listener.onResponse(new RegisterRepositoryResponse(false));
}
@Override
public TimeValue ackTimeout() {
return request.ackTimeout();
}
});
}
/**
* Unregisters repository in the cluster
* <p/>
* This method can be only called on the master node. It removes repository information from cluster metadata.
*
* @param request unregister repository request
* @param listener unregister repository listener
*/
public void unregisterRepository(final UnregisterRepositoryRequest request, final ActionListener<UnregisterRepositoryResponse> listener) {
clusterService.submitStateUpdateTask(request.cause, new AckedClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
ensureRepositoryNotInUse(currentState, request.name);
MetaData metaData = currentState.metaData();
MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData());
RepositoriesMetaData repositories = metaData.custom(RepositoriesMetaData.TYPE);
if (repositories != null && repositories.repositories().size() > 0) {
List<RepositoryMetaData> repositoriesMetaData = new ArrayList<RepositoryMetaData>(repositories.repositories().size());
boolean changed = false;
for (RepositoryMetaData repositoryMetaData : repositories.repositories()) {
if (Regex.simpleMatch(request.name, repositoryMetaData.name())) {
logger.info("delete repository [{}]", repositoryMetaData.name());
changed = true;
} else {
repositoriesMetaData.add(repositoryMetaData);
}
}
if (changed) {
repositories = new RepositoriesMetaData(repositoriesMetaData.toArray(new RepositoryMetaData[repositoriesMetaData.size()]));
mdBuilder.putCustom(RepositoriesMetaData.TYPE, repositories);
return ClusterState.builder(currentState).metaData(mdBuilder).build();
}
}
throw new RepositoryMissingException(request.name);
}
@Override
public void onFailure(String source, Throwable t) {
listener.onFailure(t);
}
@Override
public TimeValue timeout() {
return request.masterNodeTimeout();
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
}
@Override
public boolean mustAck(DiscoveryNode discoveryNode) {
// Since operation occurs only on masters, it's enough that only master-eligible nodes acked
return discoveryNode.masterNode();
}
@Override
public void onAllNodesAcked(@Nullable Throwable t) {
listener.onResponse(new UnregisterRepositoryResponse(true));
}
@Override
public void onAckTimeout() {
listener.onResponse(new UnregisterRepositoryResponse(false));
}
@Override
public TimeValue ackTimeout() {
return request.ackTimeout();
}
});
}
/**
* Checks if new repositories appeared in or disappeared from cluster metadata and updates current list of
* repositories accordingly.
*
* @param event cluster changed event
*/
@Override
public void clusterChanged(ClusterChangedEvent event) {
try {
RepositoriesMetaData oldMetaData = event.previousState().getMetaData().custom(RepositoriesMetaData.TYPE);
RepositoriesMetaData newMetaData = event.state().getMetaData().custom(RepositoriesMetaData.TYPE);
// Check if repositories got changed
if ((oldMetaData == null && newMetaData == null) || (oldMetaData != null && oldMetaData.equals(newMetaData))) {
return;
}
Map<String, RepositoryHolder> survivors = newHashMap();
// First, remove repositories that are no longer there
for (Map.Entry<String, RepositoryHolder> entry : repositories.entrySet()) {
if (newMetaData == null || newMetaData.repository(entry.getKey()) == null) {
closeRepository(entry.getKey(), entry.getValue());
} else {
survivors.put(entry.getKey(), entry.getValue());
}
}
ImmutableMap.Builder<String, RepositoryHolder> builder = ImmutableMap.builder();
if (newMetaData != null) {
// Now go through all repositories and update existing or create missing
for (RepositoryMetaData repositoryMetaData : newMetaData.repositories()) {
RepositoryHolder holder = survivors.get(repositoryMetaData.name());
if (holder != null) {
// Found previous version of this repository
if (!holder.type.equals(repositoryMetaData.type()) || !holder.settings.equals(repositoryMetaData.settings())) {
// Previous version is different from the version in settings
closeRepository(repositoryMetaData.name(), holder);
holder = createRepositoryHolder(repositoryMetaData);
}
} else {
holder = createRepositoryHolder(repositoryMetaData);
}
if (holder != null) {
builder.put(repositoryMetaData.name(), holder);
}
}
}
repositories = builder.build();
} catch (Throwable ex) {
logger.warn("failure updating cluster state ", ex);
}
}
/**
* Returns registered repository
* <p/>
* This method is called only on the master node
*
* @param repository repository name
* @return registered repository
* @throws RepositoryMissingException if repository with such name isn't registered
*/
public Repository repository(String repository) {
RepositoryHolder holder = repositories.get(repository);
if (holder != null) {
return holder.repository;
}
throw new RepositoryMissingException(repository);
}
/**
* Returns registered index shard repository
* <p/>
* This method is called only on data nodes
*
* @param repository repository name
* @return registered repository
* @throws RepositoryMissingException if repository with such name isn't registered
*/
public IndexShardRepository indexShardRepository(String repository) {
RepositoryHolder holder = repositories.get(repository);
if (holder != null) {
return holder.indexShardRepository;
}
throw new RepositoryMissingException(repository);
}
/**
* Creates a new repository and adds it to the list of registered repositories.
* <p/>
* If a repository with the same name but different types or settings already exists, it will be closed and
* replaced with the new repository. If a repository with the same name exists but it has the same type and settings
* the new repository is ignored.
*
* @param repositoryMetaData new repository metadata
* @return {@code true} if new repository was added or {@code false} if it was ignored
*/
private boolean registerRepository(RepositoryMetaData repositoryMetaData) {
RepositoryHolder previous = repositories.get(repositoryMetaData.name());
if (previous != null) {
if (!previous.type.equals(repositoryMetaData.type()) && previous.settings.equals(repositoryMetaData.settings())) {
// Previous version is the same as this one - ignore it
return false;
}
}
RepositoryHolder holder = createRepositoryHolder(repositoryMetaData);
if (previous != null) {
// Closing previous version
closeRepository(repositoryMetaData.name(), previous);
}
Map<String, RepositoryHolder> newRepositories = newHashMap(repositories);
newRepositories.put(repositoryMetaData.name(), holder);
repositories = ImmutableMap.copyOf(newRepositories);
return true;
}
/**
* Closes the repository
*
* @param name repository name
* @param holder repository holder
*/
private void closeRepository(String name, RepositoryHolder holder) {
logger.debug("closing repository [{}][{}]", holder.type, name);
if (holder.injector != null) {
Injectors.close(holder.injector);
}
if (holder.repository != null) {
holder.repository.close();
}
}
/**
* Creates repository holder
*/
private RepositoryHolder createRepositoryHolder(RepositoryMetaData repositoryMetaData) {
logger.debug("creating repository [{}][{}]", repositoryMetaData.type(), repositoryMetaData.name());
Injector repositoryInjector = null;
try {
ModulesBuilder modules = new ModulesBuilder();
RepositoryName name = new RepositoryName(repositoryMetaData.type(), repositoryMetaData.name());
modules.add(new RepositoryNameModule(name));
modules.add(new RepositoryModule(name, repositoryMetaData.settings(), this.settings, typesRegistry));
repositoryInjector = modules.createChildInjector(injector);
Repository repository = repositoryInjector.getInstance(Repository.class);
IndexShardRepository indexShardRepository = repositoryInjector.getInstance(IndexShardRepository.class);
repository.start();
return new RepositoryHolder(repositoryMetaData.type(), repositoryMetaData.settings(), repositoryInjector, repository, indexShardRepository);
} catch (Throwable t) {
if (repositoryInjector != null) {
Injectors.close(repositoryInjector);
}
logger.warn("failed to create repository [{}][{}]", t, repositoryMetaData.type(), repositoryMetaData.name());
throw new RepositoryException(repositoryMetaData.name(), "failed to create repository", t);
}
}
private void ensureRepositoryNotInUse(ClusterState clusterState, String repository) {
if (SnapshotsService.isRepositoryInUse(clusterState, repository) || RestoreService.isRepositoryInUse(clusterState, repository)) {
throw new ElasticsearchIllegalStateException("trying to modify or unregister repository that is currently used ");
}
}
/**
* Internal data structure for holding repository with its configuration information and injector
*/
private static class RepositoryHolder {
private final String type;
private final Settings settings;
private final Injector injector;
private final Repository repository;
private final IndexShardRepository indexShardRepository;
public RepositoryHolder(String type, Settings settings, Injector injector, Repository repository, IndexShardRepository indexShardRepository) {
this.type = type;
this.settings = settings;
this.repository = repository;
this.indexShardRepository = indexShardRepository;
this.injector = injector;
}
}
/**
* Register repository request
*/
public static class RegisterRepositoryRequest extends ClusterStateUpdateRequest<RegisterRepositoryRequest> {
final String cause;
final String name;
final String type;
Settings settings = EMPTY_SETTINGS;
/**
* Constructs new register repository request
*
* @param cause repository registration cause
* @param name repository name
* @param type repository type
*/
public RegisterRepositoryRequest(String cause, String name, String type) {
this.cause = cause;
this.name = name;
this.type = type;
}
/**
* Sets repository settings
*
* @param settings repository settings
* @return this request
*/
public RegisterRepositoryRequest settings(Settings settings) {
this.settings = settings;
return this;
}
}
/**
* Register repository response
*/
public static class RegisterRepositoryResponse extends ClusterStateUpdateResponse {
RegisterRepositoryResponse(boolean acknowledged) {
super(acknowledged);
}
}
/**
* Unregister repository request
*/
public static class UnregisterRepositoryRequest extends ClusterStateUpdateRequest<UnregisterRepositoryRequest> {
final String cause;
final String name;
/**
* Creates a new unregister repository request
*
* @param cause repository unregistration cause
* @param name repository name
*/
public UnregisterRepositoryRequest(String cause, String name) {
this.cause = cause;
this.name = name;
}
}
/**
* Unregister repository response
*/
public static class UnregisterRepositoryResponse extends ClusterStateUpdateResponse {
UnregisterRepositoryResponse(boolean acknowledged) {
super(acknowledged);
}
}
} | 1no label
| src_main_java_org_elasticsearch_repositories_RepositoriesService.java |
940 | Thread thread = new Thread(new Runnable() {
public void run() {
try {
lock2.lock();
error.set(true);
} catch (Throwable ignored) {
}
}
}); | 0true
| hazelcast_src_test_java_com_hazelcast_concurrent_lock_LockTest.java |
10 | CollectionUtils.collect(values, new Transformer() {
@Override
public Object transform(Object input) {
return ((ProductOptionValue) input).getAttributeValue();
}
}, stringValues); | 0true
| admin_broadleaf-admin-module_src_main_java_org_broadleafcommerce_admin_server_service_handler_SkuCustomPersistenceHandler.java |
2,598 | private static class MasterPingResponseResponse extends TransportResponse {
private boolean connectedToMaster;
private MasterPingResponseResponse() {
}
private MasterPingResponseResponse(boolean connectedToMaster) {
this.connectedToMaster = connectedToMaster;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
connectedToMaster = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(connectedToMaster);
}
} | 0true
| src_main_java_org_elasticsearch_discovery_zen_fd_MasterFaultDetection.java |
427 | public class ClusterStateRequest extends MasterNodeReadOperationRequest<ClusterStateRequest> {
private boolean routingTable = true;
private boolean nodes = true;
private boolean metaData = true;
private boolean blocks = true;
private String[] indices = Strings.EMPTY_ARRAY;
private String[] indexTemplates = Strings.EMPTY_ARRAY;
public ClusterStateRequest() {
}
@Override
public ActionRequestValidationException validate() {
return null;
}
public ClusterStateRequest all() {
routingTable = true;
nodes = true;
metaData = true;
blocks = true;
indices = Strings.EMPTY_ARRAY;
indexTemplates = Strings.EMPTY_ARRAY;
return this;
}
public ClusterStateRequest clear() {
routingTable = false;
nodes = false;
metaData = false;
blocks = false;
indices = Strings.EMPTY_ARRAY;
indexTemplates = Strings.EMPTY_ARRAY;
return this;
}
public boolean routingTable() {
return routingTable;
}
public ClusterStateRequest routingTable(boolean routingTable) {
this.routingTable = routingTable;
return this;
}
public boolean nodes() {
return nodes;
}
public ClusterStateRequest nodes(boolean nodes) {
this.nodes = nodes;
return this;
}
public boolean metaData() {
return metaData;
}
public ClusterStateRequest metaData(boolean metaData) {
this.metaData = metaData;
return this;
}
public boolean blocks() {
return blocks;
}
public ClusterStateRequest blocks(boolean blocks) {
this.blocks = blocks;
return this;
}
public String[] indices() {
return indices;
}
public ClusterStateRequest indices(String... indices) {
this.indices = indices;
return this;
}
public String[] indexTemplates() {
return this.indexTemplates;
}
public ClusterStateRequest indexTemplates(String... indexTemplates) {
this.indexTemplates = indexTemplates;
return this;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
routingTable = in.readBoolean();
nodes = in.readBoolean();
metaData = in.readBoolean();
blocks = in.readBoolean();
indices = in.readStringArray();
indexTemplates = in.readStringArray();
readLocal(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(routingTable);
out.writeBoolean(nodes);
out.writeBoolean(metaData);
out.writeBoolean(blocks);
out.writeStringArray(indices);
out.writeStringArray(indexTemplates);
writeLocal(out);
}
} | 1no label
| src_main_java_org_elasticsearch_action_admin_cluster_state_ClusterStateRequest.java |
326 | ExecutionCallback executionCallback = new ExecutionCallback() {
@Override
public void onResponse(Object response) {
latch.countDown();
}
@Override
public void onFailure(Throwable t) {
}
}; | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapTest.java |
352 | public class ODefaultDataSegmentStrategy implements ODataSegmentStrategy {
public int assignDataSegmentId(final ODatabase iDatabase, final ORecord<?> iRecord) {
// GET THE DATASEGMENT SPECIFIED IN THE RECORD IF ANY
final String dsName = iRecord.getDataSegmentName();
if (dsName != null)
return iDatabase.getDataSegmentIdByName(dsName);
// GET THE DATA SEGMENT CONFIGURED IN THE CLUSTER IF ANY
final int clusterId = iRecord.getIdentity().getClusterId();
if (clusterId >= 0)
return iDatabase.getStorage().getClusterById(clusterId).getDataSegmentId();
// RETURN 0 AS DEFAULT ONE
return 0;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_db_ODefaultDataSegmentStrategy.java |
922 | new ConstructorFunction<ObjectNamespace, EntryTaskScheduler>() {
@Override
public EntryTaskScheduler createNew(ObjectNamespace namespace) {
LockEvictionProcessor entryProcessor = new LockEvictionProcessor(nodeEngine, namespace);
ScheduledExecutorService scheduledExecutor =
nodeEngine.getExecutionService().getDefaultScheduledExecutor();
return EntryTaskSchedulerFactory
.newScheduler(scheduledExecutor, entryProcessor, ScheduleType.POSTPONE);
}
}; | 0true
| hazelcast_src_main_java_com_hazelcast_concurrent_lock_LockServiceImpl.java |
1,747 | private static class IncrementorEntryProcessor extends AbstractEntryProcessor implements DataSerializable {
IncrementorEntryProcessor() {
super(true);
}
public Object process(Map.Entry entry) {
Integer value = (Integer) entry.getValue();
if (value == null) {
value = 0;
}
if (value == -1) {
entry.setValue(null);
return null;
}
value++;
entry.setValue(value);
return value;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
}
@Override
public void readData(ObjectDataInput in) throws IOException {
}
public void processBackup(Map.Entry entry) {
entry.setValue((Integer) entry.getValue() + 1);
}
} | 0true
| hazelcast_src_test_java_com_hazelcast_map_EntryProcessorTest.java |
577 | public class ClusterProxy implements Cluster {
private final ClusterServiceImpl clusterService;
public ClusterProxy(final ClusterServiceImpl clusterService) {
this.clusterService = clusterService;
}
@Override
public Member getLocalMember() {
return clusterService.getLocalMember();
}
@Override
public Set<Member> getMembers() {
return clusterService.getMembers();
}
@Override
public long getClusterTime() {
return clusterService.getClusterTime();
}
@Override
public String addMembershipListener(MembershipListener listener) {
return clusterService.addMembershipListener(listener);
}
@Override
public boolean removeMembershipListener(final String registrationId) {
return clusterService.removeMembershipListener(registrationId);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_cluster_ClusterProxy.java |
3,461 | static final class Fields {
static final XContentBuilderString GET = new XContentBuilderString("get");
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
static final XContentBuilderString TIME = new XContentBuilderString("getTime");
static final XContentBuilderString TIME_IN_MILLIS = new XContentBuilderString("time_in_millis");
static final XContentBuilderString EXISTS_TOTAL = new XContentBuilderString("exists_total");
static final XContentBuilderString EXISTS_TIME = new XContentBuilderString("exists_time");
static final XContentBuilderString EXISTS_TIME_IN_MILLIS = new XContentBuilderString("exists_time_in_millis");
static final XContentBuilderString MISSING_TOTAL = new XContentBuilderString("missing_total");
static final XContentBuilderString MISSING_TIME = new XContentBuilderString("missing_time");
static final XContentBuilderString MISSING_TIME_IN_MILLIS = new XContentBuilderString("missing_time_in_millis");
static final XContentBuilderString CURRENT = new XContentBuilderString("current");
} | 0true
| src_main_java_org_elasticsearch_index_get_GetStats.java |
883 | threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
Tuple<String, Long>[] context1 = scrollId.getContext();
for (int i = 0; i < context1.length; i++) {
Tuple<String, Long> target = context1[i];
DiscoveryNode node = nodes.get(target.v1());
if (node != null && nodes.localNodeId().equals(node.id())) {
executePhase(i, node, target.v2());
}
}
}
}); | 0true
| src_main_java_org_elasticsearch_action_search_type_TransportSearchScrollQueryAndFetchAction.java |
3,241 | class PerSegmentComparator extends NestedWrappableComparator<BytesRef> {
final Ordinals.Docs readerOrds;
final BytesValues.WithOrdinals termsIndex;
public PerSegmentComparator(BytesValues.WithOrdinals termsIndex) {
this.readerOrds = termsIndex.ordinals();
this.termsIndex = termsIndex;
if (readerOrds.getNumOrds() > Long.MAX_VALUE / 4) {
throw new IllegalStateException("Current terms index pretends it has more than " + (Long.MAX_VALUE / 4) + " ordinals, which is unsupported by this impl");
}
}
@Override
public FieldComparator<BytesRef> setNextReader(AtomicReaderContext context) throws IOException {
return BytesRefOrdValComparator.this.setNextReader(context);
}
@Override
public int compare(int slot1, int slot2) {
return BytesRefOrdValComparator.this.compare(slot1, slot2);
}
@Override
public void setBottom(final int bottom) {
BytesRefOrdValComparator.this.setBottom(bottom);
}
@Override
public BytesRef value(int slot) {
return BytesRefOrdValComparator.this.value(slot);
}
@Override
public int compareValues(BytesRef val1, BytesRef val2) {
if (val1 == null) {
if (val2 == null) {
return 0;
}
return -1;
} else if (val2 == null) {
return 1;
}
return val1.compareTo(val2);
}
@Override
public int compareDocToValue(int doc, BytesRef value) {
final long ord = getOrd(doc);
final BytesRef docValue = ord == Ordinals.MISSING_ORDINAL ? missingValue : termsIndex.getValueByOrd(ord);
return compareValues(docValue, value);
}
protected long getOrd(int doc) {
return readerOrds.getOrd(doc);
}
@Override
public int compareBottom(int doc) {
assert bottomSlot != -1;
final long docOrd = getOrd(doc);
final long comparableOrd = docOrd == Ordinals.MISSING_ORDINAL ? missingOrd : docOrd << 2;
return LongValuesComparator.compare(bottomOrd, comparableOrd);
}
@Override
public int compareBottomMissing() {
assert bottomSlot != -1;
return LongValuesComparator.compare(bottomOrd, missingOrd);
}
@Override
public void copy(int slot, int doc) {
final long ord = getOrd(doc);
if (ord == Ordinals.MISSING_ORDINAL) {
ords[slot] = missingOrd;
values[slot] = missingValue;
} else {
assert ord > 0;
ords[slot] = ord << 2;
if (values[slot] == null || values[slot] == missingValue) {
values[slot] = new BytesRef();
}
values[slot].copyBytes(termsIndex.getValueByOrd(ord));
}
readerGen[slot] = currentReaderGen;
}
@Override
public void missing(int slot) {
ords[slot] = missingOrd;
values[slot] = missingValue;
}
} | 1no label
| src_main_java_org_elasticsearch_index_fielddata_fieldcomparator_BytesRefOrdValComparator.java |
1,898 | public class CacheRecord implements Comparable<CacheRecord> {
final Data key;
final Object value;
final long creationTime;
final AtomicInteger hit;
volatile long lastAccessTime;
CacheRecord(Data key, Object value) {
this.key = key;
this.value = value;
long time = Clock.currentTimeMillis();
this.lastAccessTime = time;
this.creationTime = time;
this.hit = new AtomicInteger(0);
}
void access() {
hit.incrementAndGet();
nearCacheStats.incrementHits();
lastAccessTime = Clock.currentTimeMillis();
}
boolean expired() {
long time = Clock.currentTimeMillis();
return (maxIdleMillis > 0 && time > lastAccessTime + maxIdleMillis)
|| (timeToLiveMillis > 0 && time > creationTime + timeToLiveMillis);
}
public int compareTo(CacheRecord o) {
if (EvictionPolicy.LRU.equals(evictionPolicy)) {
return ((Long) this.lastAccessTime).compareTo((o.lastAccessTime));
} else if (EvictionPolicy.LFU.equals(evictionPolicy)) {
return ((Integer) this.hit.get()).compareTo((o.hit.get()));
}
return 0;
}
public boolean equals(Object o) {
if (o != null && o instanceof CacheRecord) {
return this.compareTo((CacheRecord) o) == 0;
}
return false;
}
// If you don't think instances of this class will ever be inserted into a HashMap/HashTable,
// the recommended hashCode implementation to use is:
public int hashCode() {
assert false : "hashCode not designed";
// any arbitrary constant will do.
return 42;
}
public long getCost() {
// todo find object size if not a Data instance.
if (!(value instanceof Data)) {
return 0;
}
final int numberOfLongs = 2;
final int numberOfIntegers = 3;
// value is Data
return key.getHeapCost()
+ ((Data) value).getHeapCost()
+ numberOfLongs * (Long.SIZE / Byte.SIZE)
// sizeof atomic integer
+ (Integer.SIZE / Byte.SIZE)
// object references (key, value, hit)
+ numberOfIntegers * (Integer.SIZE / Byte.SIZE);
}
public Data getKey() {
return key;
}
public Object getValue() {
return value;
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_map_NearCache.java |
1,251 | public class ClientTransportModule extends AbstractModule {
@Override
protected void configure() {
bind(InternalTransportClient.class).asEagerSingleton();
bind(InternalTransportAdminClient.class).asEagerSingleton();
bind(InternalTransportIndicesAdminClient.class).asEagerSingleton();
bind(InternalTransportClusterAdminClient.class).asEagerSingleton();
bind(TransportClientNodesService.class).asEagerSingleton();
}
} | 0true
| src_main_java_org_elasticsearch_client_transport_ClientTransportModule.java |
1,061 | public class ManagementCenterConfig {
private boolean enabled = false;
private String url;
private int updateInterval = 5;
private String clusterId;
private String securityToken;
public ManagementCenterConfig() {
}
public ManagementCenterConfig(final String url, final int dataUpdateInterval) {
this.url = url;
this.updateInterval = dataUpdateInterval;
}
public boolean isEnabled() {
return enabled;
}
public ManagementCenterConfig setEnabled(final boolean enabled) {
this.enabled = enabled;
return this;
}
public String getUrl() {
return url;
}
public ManagementCenterConfig setUrl(final String url) {
this.url = url;
return this;
}
public int getUpdateInterval() {
return updateInterval;
}
public ManagementCenterConfig setUpdateInterval(final int updateInterval) {
this.updateInterval = updateInterval;
return this;
}
public String getClusterId() {
return clusterId;
}
public void setClusterId(String clusterId) {
this.clusterId = clusterId;
}
public String getSecurityToken() {
return securityToken;
}
public void setSecurityToken(String securityToken) {
this.securityToken = securityToken;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("ManagementCenterConfig");
sb.append("{enabled=").append(enabled);
sb.append(", url='").append(url).append('\'');
sb.append(", clusterId='").append(clusterId).append('\'');
sb.append(", securityToken='").append(securityToken).append('\'');
sb.append(", url='").append(url).append('\'');
sb.append(", updateInterval=").append(updateInterval);
sb.append('}');
return sb.toString();
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_config_ManagementCenterConfig.java |
597 | public class ServiceStatusType implements Serializable, BroadleafEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, ServiceStatusType> TYPES = new LinkedHashMap<String, ServiceStatusType>();
public static final ServiceStatusType UP = new ServiceStatusType("UP", "Up");
public static final ServiceStatusType DOWN = new ServiceStatusType("DOWN", "Down");
public static final ServiceStatusType PAUSED = new ServiceStatusType("PAUSED", "Paused");
public static ServiceStatusType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public ServiceStatusType() {
//do nothing
}
public ServiceStatusType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)){
TYPES.put(type, this);
} else {
throw new RuntimeException("Cannot add the type: (" + type + "). It already exists as a type via " + getInstance(type).getClass().getName());
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
ServiceStatusType other = (ServiceStatusType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
} | 1no label
| common_src_main_java_org_broadleafcommerce_common_vendor_service_type_ServiceStatusType.java |
848 | new Visitor() {
private boolean needsParens = false;
@Override
public void visit(Tree.Variable that) {
if (that.getType() instanceof Tree.SyntheticVariable) {
TypedDeclaration od =
that.getDeclarationModel().getOriginalDeclaration();
if (od!=null && od.equals(declaration) && delete) {
Integer startIndex =
that.getSpecifierExpression().getStartIndex();
tfc.addEdit(new InsertEdit(startIndex,
that.getIdentifier().getText()+" = "));
}
}
super.visit(that);
}
@Override
public void visit(Tree.MemberOrTypeExpression that) {
super.visit(that);
inlineDefinition(tokens, declarationTokens, term,
tfc, null, that, needsParens);
}
@Override
public void visit(Tree.OperatorExpression that) {
boolean onp = needsParens;
needsParens=true;
super.visit(that);
needsParens = onp;
}
@Override
public void visit(Tree.StatementOrArgument that) {
boolean onp = needsParens;
needsParens = false;
super.visit(that);
needsParens = onp;
}
@Override
public void visit(Tree.Expression that) {
boolean onp = needsParens;
needsParens = false;
super.visit(that);
needsParens = onp;
}
}.visit(pu); | 1no label
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_refactor_InlineRefactoring.java |
94 | @Target(ElementType.PARAMETER)
@Retention(RetentionPolicy.RUNTIME)
public @interface ConsoleParameter {
String name() default "";
String description() default "";
boolean optional() default false;
} | 0true
| commons_src_main_java_com_orientechnologies_common_console_annotation_ConsoleParameter.java |
442 | static final class Fields {
static final XContentBuilderString VERSIONS = new XContentBuilderString("versions");
static final XContentBuilderString VERSION = new XContentBuilderString("version");
static final XContentBuilderString VM_NAME = new XContentBuilderString("vm_name");
static final XContentBuilderString VM_VERSION = new XContentBuilderString("vm_version");
static final XContentBuilderString VM_VENDOR = new XContentBuilderString("vm_vendor");
static final XContentBuilderString COUNT = new XContentBuilderString("count");
static final XContentBuilderString THREADS = new XContentBuilderString("threads");
static final XContentBuilderString MAX_UPTIME = new XContentBuilderString("max_uptime");
static final XContentBuilderString MAX_UPTIME_IN_MILLIS = new XContentBuilderString("max_uptime_in_millis");
static final XContentBuilderString MEM = new XContentBuilderString("mem");
static final XContentBuilderString HEAP_USED = new XContentBuilderString("heap_used");
static final XContentBuilderString HEAP_USED_IN_BYTES = new XContentBuilderString("heap_used_in_bytes");
static final XContentBuilderString HEAP_MAX = new XContentBuilderString("heap_max");
static final XContentBuilderString HEAP_MAX_IN_BYTES = new XContentBuilderString("heap_max_in_bytes");
} | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_stats_ClusterStatsNodes.java |
1,039 | public static class Tab {
public static class Name {
public static final String Advanced = "OrderImpl_Advanced";
}
public static class Order {
public static final int Advanced = 2000;
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_OrderItemImpl.java |
329 | @SuppressWarnings("serial")
public class OStorageTxConfiguration extends OStorageFileConfiguration {
private static final String DEF_EXTENSION = ".otd";
private static final String DEF_MAX_SIZE = "512mb";
private static final String DEF_INCREMENT_SIZE = "50%";
private boolean synchRecord = false;
private boolean synchTx = true;
public OStorageTxConfiguration() {
maxSize = DEF_MAX_SIZE;
}
public OStorageTxConfiguration(final String iPath, final String iType, final String iMaxSize, final String iSynchRecord,
final String iSynchTx) {
super(null, iPath + DEF_EXTENSION, iType, iMaxSize != null ? iMaxSize : DEF_MAX_SIZE, DEF_INCREMENT_SIZE);
synchRecord = Boolean.parseBoolean(iSynchRecord);
synchTx = Boolean.parseBoolean(iSynchTx);
}
public boolean isSynchRecord() {
return synchRecord;
}
public boolean isSynchTx() {
return synchTx;
}
public void setSynchRecord(boolean synchRecord) {
this.synchRecord = synchRecord;
}
public void setSynchTx(boolean synchTx) {
this.synchTx = synchTx;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_config_OStorageTxConfiguration.java |
803 | @Entity
@Table(name = "BLC_CUSTOMER_OFFER_XREF")
@Inheritance(strategy=InheritanceType.JOINED)
public class CustomerOfferImpl implements CustomerOffer {
public static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator= "CustomerOfferId")
@GenericGenerator(
name="CustomerOfferId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="CustomerOfferImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.offer.domain.CustomerOfferImpl")
}
)
@Column(name = "CUSTOMER_OFFER_ID")
protected Long id;
@ManyToOne(targetEntity = CustomerImpl.class, optional=false)
@JoinColumn(name = "CUSTOMER_ID")
@Index(name="CUSTOFFER_CUSTOMER_INDEX", columnNames={"CUSTOMER_ID"})
protected Customer customer;
@ManyToOne(targetEntity = OfferImpl.class, optional=false)
@JoinColumn(name = "OFFER_ID")
@Index(name="CUSTOFFER_OFFER_INDEX", columnNames={"OFFER_ID"})
protected Offer offer;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public Offer getOffer() {
return offer;
}
@Override
public void setOffer(Offer offer) {
this.offer = offer;
}
@Override
public Customer getCustomer() {
return customer;
}
@Override
public void setCustomer(Customer customer) {
this.customer = customer;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((customer == null) ? 0 : customer.hashCode());
result = prime * result + ((offer == null) ? 0 : offer.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
CustomerOfferImpl other = (CustomerOfferImpl) obj;
if (id != null && other.id != null) {
return id.equals(other.id);
}
if (customer == null) {
if (other.customer != null)
return false;
} else if (!customer.equals(other.customer))
return false;
if (offer == null) {
if (other.offer != null)
return false;
} else if (!offer.equals(other.offer))
return false;
return true;
}
} | 1no label
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_domain_CustomerOfferImpl.java |
304 | {
@Override
protected NodePropertyUpdate read( NodeRecord node )
{
long[] labels = parseLabelsField( node ).get( nodeStore );
if ( !containsLabel( soughtLabelId, labels ) )
{
return null;
}
for ( PropertyBlock property : properties( node ) )
{
int propertyKeyId = property.getKeyIndexId();
if ( soughtPropertyKeyId == propertyKeyId )
{
return NodePropertyUpdate.add( node.getId(), propertyKeyId, valueOf( property ), labels );
}
}
return null;
}
@Override
protected void process( NodePropertyUpdate update ) throws FAILURE
{
visitor.visit( update );
}
}; | 0true
| community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_xa_NeoStoreIndexStoreView.java |
906 | Thread t = new Thread(new Runnable() {
public void run() {
try {
lock.lock();
if (lock.isLockedByCurrentThread()) {
count.incrementAndGet();
}
condition.await();
if (lock.isLockedByCurrentThread()) {
count.incrementAndGet();
}
} catch (InterruptedException ignored) {
} finally {
lock.unlock();
}
}
}); | 0true
| hazelcast_src_test_java_com_hazelcast_concurrent_lock_ConditionTest.java |
1,349 | @Repository("blZipCodeDao")
public class ZipCodeDaoImpl implements ZipCodeDao {
@PersistenceContext(unitName="blPU")
private EntityManager em;
@SuppressWarnings("unchecked")
public ZipCode findZipCodeByZipCode(Integer zipCode) {
Query query = em.createNamedQuery("BC_FIND_ZIP_CODE_BY_ZIP_CODE");
query.setHint("org.hibernate.cacheable", true);
query.setParameter("zipCode", zipCode);
List<ZipCode> result = query.getResultList();
return (result.size() > 0) ? result.get(0) : null;
}
@SuppressWarnings("unchecked")
public ZipCode findBestZipCode(String pCity, String pCounty, String pState, Integer pZipCode, Long pZipGeo) {
// If we have a zip geo, use it
if ( pZipGeo != null ) {
Query query = em.createNamedQuery("FIND_ZIP_WITH_GEO");
query.setHint("org.hibernate.cacheable", true);
query.setParameter("geo", pZipGeo);
query.setParameter("city", pCity);
query.setParameter("zipCode", pZipCode);
query.setParameter("state", pState);
List<ZipCode> result = query.getResultList();
if (result.size() > 0) {
return result.get(0);
}
}
// If we have a county, try and find a match
if ( pCounty != null && !"".equals(pCounty.trim()) ) {
Query query = em.createNamedQuery("FIND_ZIP_WITH_COUNTY");
query.setHint("org.hibernate.cacheable", true);
query.setParameter("county", pCounty);
query.setParameter("city", pCity);
query.setParameter("zipCode", pZipCode);
query.setParameter("state", pState);
List<ZipCode> result = query.getResultList();
if (result.size() > 0) {
return result.get(0);
}
}
{
// first try for exact match with city, state, zip
Query query = em.createNamedQuery("FIND_ZIP_WITH_CITY_STATE_ZIP");
query.setHint("org.hibernate.cacheable", true);
query.setParameter("city", pCity);
query.setParameter("zipCode", pZipCode);
query.setParameter("state", pState);
List<ZipCode> result = query.getResultList();
if (result.size() > 0) {
return result.get(0);
}
}
{
// now try soundex match with soundex(city),state,zip
Query query = em.createNamedQuery("FIND_ZIP_WITH_SOUNDEX");
query.setHint("org.hibernate.cacheable", true);
query.setParameter("city", pCity);
query.setParameter("zipCode", pZipCode);
query.setParameter("state", pState);
List<ZipCode> result = query.getResultList();
if (result.size() > 0) {
return result.get(0);
}
}
{
// now try state and zip
Query query = em.createNamedQuery("FIND_ZIP_WITH_STATE_ZIP");
query.setHint("org.hibernate.cacheable", true);
query.setParameter("zipCode", pZipCode);
query.setParameter("state", pState);
List<ZipCode> result = query.getResultList();
if (result.size() > 0) {
return result.get(0);
}
}
{
// finally just try state
Query query = em.createNamedQuery("FIND_ZIP_WITH_STATE");
query.setHint("org.hibernate.cacheable", true);
query.setParameter("state", pState);
List<ZipCode> result = query.getResultList();
if (result.size() > 0) {
return result.get(0);
}
}
return null;
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_store_dao_ZipCodeDaoImpl.java |
1,208 | public interface OClusterFactory {
OCluster createCluster(String iType);
OCluster createCluster(OStorageClusterConfiguration iConfig);
String[] getSupported();
boolean isSupported(String clusterType);
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_storage_OClusterFactory.java |
1,487 | public class BroadleafSearchController extends AbstractCatalogController {
@Resource(name = "blSearchService")
protected SearchService searchService;
@Resource(name = "blExploitProtectionService")
protected ExploitProtectionService exploitProtectionService;
@Resource(name = "blSearchFacetDTOService")
protected SearchFacetDTOService facetService;
@Resource(name = "blSearchRedirectService")
private SearchRedirectService searchRedirectService;
protected static String searchView = "catalog/search";
protected static String PRODUCTS_ATTRIBUTE_NAME = "products";
protected static String FACETS_ATTRIBUTE_NAME = "facets";
protected static String PRODUCT_SEARCH_RESULT_ATTRIBUTE_NAME = "result";
protected static String ACTIVE_FACETS_ATTRIBUTE_NAME = "activeFacets";
protected static String ORIGINAL_QUERY_ATTRIBUTE_NAME = "originalQuery";
public String search(Model model, HttpServletRequest request, HttpServletResponse response,String query) throws ServletException, IOException, ServiceException {
try {
if (StringUtils.isNotEmpty(query)) {
query = StringUtils.trim(query);
query = exploitProtectionService.cleanString(query);
}
} catch (ServiceException e) {
query = null;
}
if (query == null || query.length() == 0) {
return "redirect:/";
}
if (request.getParameterMap().containsKey("facetField")) {
// If we receive a facetField parameter, we need to convert the field to the
// product search criteria expected format. This is used in multi-facet selection. We
// will send a redirect to the appropriate URL to maintain canonical URLs
String fieldName = request.getParameter("facetField");
List<String> activeFieldFilters = new ArrayList<String>();
Map<String, String[]> parameters = new HashMap<String, String[]>(request.getParameterMap());
for (Iterator<Entry<String,String[]>> iter = parameters.entrySet().iterator(); iter.hasNext();){
Map.Entry<String, String[]> entry = iter.next();
String key = entry.getKey();
if (key.startsWith(fieldName + "-")) {
activeFieldFilters.add(key.substring(key.indexOf('-') + 1));
iter.remove();
}
}
parameters.remove(ProductSearchCriteria.PAGE_NUMBER);
parameters.put(fieldName, activeFieldFilters.toArray(new String[activeFieldFilters.size()]));
parameters.remove("facetField");
String newUrl = ProcessorUtils.getUrl(request.getRequestURL().toString(), parameters);
return "redirect:" + newUrl;
} else {
// Else, if we received a GET to the category URL (either the user performed a search or we redirected
// from the POST method, we can actually process the results
SearchRedirect handler = searchRedirectService.findSearchRedirectBySearchTerm(query);
if (handler != null) {
String contextPath = request.getContextPath();
String url = UrlUtil.fixRedirectUrl(contextPath, handler.getUrl());
response.sendRedirect(url);
return null;
}
if (StringUtils.isNotEmpty(query)) {
List<SearchFacetDTO> availableFacets = searchService.getSearchFacets();
ProductSearchCriteria searchCriteria = facetService.buildSearchCriteria(request, availableFacets);
ProductSearchResult result = searchService.findProductsByQuery(query, searchCriteria);
facetService.setActiveFacetResults(result.getFacets(), request);
model.addAttribute(PRODUCTS_ATTRIBUTE_NAME, result.getProducts());
model.addAttribute(FACETS_ATTRIBUTE_NAME, result.getFacets());
model.addAttribute(PRODUCT_SEARCH_RESULT_ATTRIBUTE_NAME, result);
model.addAttribute(ORIGINAL_QUERY_ATTRIBUTE_NAME, query);
}
}
return getSearchView();
}
public String getSearchView() {
return searchView;
}
} | 0true
| core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_controller_catalog_BroadleafSearchController.java |
550 | public class ClientTxnListProxy<E> extends AbstractClientTxnCollectionProxy<E> implements TransactionalList<E> {
public ClientTxnListProxy(String name, TransactionContextProxy proxy) {
super(name, proxy);
}
public String getServiceName() {
return ListService.SERVICE_NAME;
}
public boolean add(E e) {
throwExceptionIfNull(e);
final Data value = toData(e);
final TxnListAddRequest request = new TxnListAddRequest(getName(), value);
final Boolean result = invoke(request);
return result;
}
public boolean remove(E e) {
throwExceptionIfNull(e);
final Data value = toData(e);
final TxnListRemoveRequest request = new TxnListRemoveRequest(getName(), value);
final Boolean result = invoke(request);
return result;
}
public int size() {
final TxnListSizeRequest request = new TxnListSizeRequest(getName());
final Integer result = invoke(request);
return result;
}
} | 0true
| hazelcast-client_src_main_java_com_hazelcast_client_txn_proxy_ClientTxnListProxy.java |
1,646 | tasks.add(distributedQueryExecutors.submit(new Runnable() {
@Override
public void run() {
try {
Object result = node.command(wrapped.getName(), iDistributedCommand, false);
if (result != null && !node.isLocal()) {
// generally we need thread local database for record deserializing, but not hear
// select resultset will be routed thought OHazelcastResultListener, so it will never reach this block
// other commands return primitive types so that thread local database instance is not required for deserializing
result = OCommandResultSerializationHelper.readFromStream((byte[]) result);
}
addResult(result);
} catch (IOException e) {
OLogManager.instance().error(this, "Error deserializing result from node " + node.getNodeId(), e);
}
}
})); | 0true
| distributed_src_main_java_com_orientechnologies_orient_server_hazelcast_oldsharding_OAbstractDistributedQueryExecutor.java |
1,530 | public class SingleShardOneReplicaRoutingTests extends ElasticsearchAllocationTestCase {
private final ESLogger logger = Loggers.getLogger(SingleShardOneReplicaRoutingTests.class);
@Test
public void testSingleIndexFirstStartPrimaryThenBackups() {
AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build());
logger.info("Building initial routing table");
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").numberOfShards(1).numberOfReplicas(1))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test"))
.build();
ClusterState clusterState = ClusterState.builder().metaData(metaData).routingTable(routingTable).build();
assertThat(routingTable.index("test").shards().size(), equalTo(1));
assertThat(routingTable.index("test").shard(0).size(), equalTo(2));
assertThat(routingTable.index("test").shard(0).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(0).shards().get(0).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(0).shards().get(1).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(0).shards().get(0).currentNodeId(), nullValue());
assertThat(routingTable.index("test").shard(0).shards().get(1).currentNodeId(), nullValue());
logger.info("Adding one node and performing rerouting");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1"))).build();
RoutingTable prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test").shards().size(), equalTo(1));
assertThat(routingTable.index("test").shard(0).size(), equalTo(2));
assertThat(routingTable.index("test").shard(0).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(0).primaryShard().state(), equalTo(INITIALIZING));
assertThat(routingTable.index("test").shard(0).primaryShard().currentNodeId(), equalTo("node1"));
assertThat(routingTable.index("test").shard(0).replicaShards().size(), equalTo(1));
assertThat(routingTable.index("test").shard(0).replicaShards().get(0).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(0).replicaShards().get(0).currentNodeId(), nullValue());
logger.info("Add another node and perform rerouting, nothing will happen since primary shards not started");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).put(newNode("node2"))).build();
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(prevRoutingTable == routingTable, equalTo(true));
logger.info("Start the primary shard (on node1)");
RoutingNodes routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.node("node1").shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test").shards().size(), equalTo(1));
assertThat(routingTable.index("test").shard(0).size(), equalTo(2));
assertThat(routingTable.index("test").shard(0).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(0).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(0).primaryShard().currentNodeId(), equalTo("node1"));
assertThat(routingTable.index("test").shard(0).replicaShards().size(), equalTo(1));
// backup shards are initializing as well, we make sure that they recover from primary *started* shards in the IndicesClusterStateService
assertThat(routingTable.index("test").shard(0).replicaShards().get(0).state(), equalTo(INITIALIZING));
assertThat(routingTable.index("test").shard(0).replicaShards().get(0).currentNodeId(), equalTo("node2"));
logger.info("Reroute, nothing should change");
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
assertThat(prevRoutingTable == routingTable, equalTo(true));
logger.info("Start the backup shard");
routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.node("node2").shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test").shards().size(), equalTo(1));
assertThat(routingTable.index("test").shard(0).size(), equalTo(2));
assertThat(routingTable.index("test").shard(0).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(0).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(0).primaryShard().currentNodeId(), equalTo("node1"));
assertThat(routingTable.index("test").shard(0).replicaShards().size(), equalTo(1));
assertThat(routingTable.index("test").shard(0).replicaShards().get(0).state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(0).replicaShards().get(0).currentNodeId(), equalTo("node2"));
logger.info("Kill node1, backup shard should become primary");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).remove("node1")).build();
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test").shards().size(), equalTo(1));
assertThat(routingTable.index("test").shard(0).size(), equalTo(2));
assertThat(routingTable.index("test").shard(0).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(0).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(0).primaryShard().currentNodeId(), equalTo("node2"));
assertThat(routingTable.index("test").shard(0).replicaShards().size(), equalTo(1));
// backup shards are initializing as well, we make sure that they recover from primary *started* shards in the IndicesClusterStateService
assertThat(routingTable.index("test").shard(0).replicaShards().get(0).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(0).replicaShards().get(0).currentNodeId(), nullValue());
logger.info("Start another node, backup shard should start initializing");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).put(newNode("node3"))).build();
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test").shards().size(), equalTo(1));
assertThat(routingTable.index("test").shard(0).size(), equalTo(2));
assertThat(routingTable.index("test").shard(0).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(0).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(0).primaryShard().currentNodeId(), equalTo("node2"));
assertThat(routingTable.index("test").shard(0).replicaShards().size(), equalTo(1));
// backup shards are initializing as well, we make sure that they recover from primary *started* shards in the IndicesClusterStateService
assertThat(routingTable.index("test").shard(0).replicaShards().get(0).state(), equalTo(INITIALIZING));
assertThat(routingTable.index("test").shard(0).replicaShards().get(0).currentNodeId(), equalTo("node3"));
}
} | 0true
| src_test_java_org_elasticsearch_cluster_routing_allocation_SingleShardOneReplicaRoutingTests.java |
695 | constructors[LIST_GET] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new ListGetRequest();
}
}; | 0true
| hazelcast_src_main_java_com_hazelcast_collection_CollectionPortableHook.java |
1,939 | public class MapExecuteWithPredicateRequest extends AllPartitionsClientRequest {
private String name;
private EntryProcessor processor;
private Predicate predicate;
public MapExecuteWithPredicateRequest() {
}
public MapExecuteWithPredicateRequest(String name, EntryProcessor processor, Predicate predicate) {
this.name = name;
this.processor = processor;
this.predicate = predicate;
}
@Override
protected OperationFactory createOperationFactory() {
return new PartitionWideEntryWithPredicateOperationFactory(name, processor, predicate);
}
@Override
protected Object reduce(Map<Integer, Object> map) {
MapEntrySet result = new MapEntrySet();
MapService mapService = getService();
for (Object o : map.values()) {
if (o != null) {
MapEntrySet entrySet = (MapEntrySet)mapService.toObject(o);
Set<Map.Entry<Data,Data>> entries = entrySet.getEntrySet();
for (Map.Entry<Data, Data> entry : entries) {
result.add(entry);
}
}
}
return result;
}
public String getServiceName() {
return MapService.SERVICE_NAME;
}
@Override
public int getFactoryId() {
return MapPortableHook.F_ID;
}
public int getClassId() {
return MapPortableHook.EXECUTE_WITH_PREDICATE;
}
public void write(PortableWriter writer) throws IOException {
writer.writeUTF("n", name);
final ObjectDataOutput out = writer.getRawDataOutput();
out.writeObject(processor);
out.writeObject(predicate);
}
public void read(PortableReader reader) throws IOException {
name = reader.readUTF("n");
final ObjectDataInput in = reader.getRawDataInput();
processor = in.readObject();
predicate = in.readObject();
}
public Permission getRequiredPermission() {
return new MapPermission(name, ActionConstants.ACTION_PUT, ActionConstants.ACTION_REMOVE);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_map_client_MapExecuteWithPredicateRequest.java |
864 | public enum STATUSES {
SUSPENDED, ACTIVE
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_metadata_security_OUser.java |
2,010 | public static class TestMapStore extends MapStoreAdapter implements MapLoaderLifecycleSupport, MapStore {
final Map store = new ConcurrentHashMap();
final CountDownLatch latchStore;
final CountDownLatch latchStoreAll;
final CountDownLatch latchDelete;
final CountDownLatch latchDeleteAll;
final CountDownLatch latchLoad;
final CountDownLatch latchLoadAllKeys;
final CountDownLatch latchLoadAll;
CountDownLatch latchStoreOpCount;
CountDownLatch latchStoreAllOpCount;
final AtomicInteger callCount = new AtomicInteger();
final AtomicInteger initCount = new AtomicInteger();
final AtomicInteger destroyCount = new AtomicInteger();
private HazelcastInstance hazelcastInstance;
private Properties properties;
private String mapName;
private boolean loadAllKeys = true;
public TestMapStore() {
this(0, 0, 0, 0, 0, 0);
}
public TestMapStore(int expectedStore, int expectedDelete, int expectedLoad) {
this(expectedStore, 0, expectedDelete, 0, expectedLoad, 0);
}
public TestMapStore(int expectedStore, int expectedStoreAll, int expectedDelete,
int expectedDeleteAll, int expectedLoad, int expectedLoadAll) {
this(expectedStore, expectedStoreAll, expectedDelete, expectedDeleteAll,
expectedLoad, expectedLoadAll, 0);
}
public TestMapStore(int expectedStore, int expectedStoreAll, int expectedDelete,
int expectedDeleteAll, int expectedLoad, int expectedLoadAll,
int expectedLoadAllKeys) {
latchStore = new CountDownLatch(expectedStore);
latchStoreAll = new CountDownLatch(expectedStoreAll);
latchDelete = new CountDownLatch(expectedDelete);
latchDeleteAll = new CountDownLatch(expectedDeleteAll);
latchLoad = new CountDownLatch(expectedLoad);
latchLoadAll = new CountDownLatch(expectedLoadAll);
latchLoadAllKeys = new CountDownLatch(expectedLoadAllKeys);
}
public void init(HazelcastInstance hazelcastInstance, Properties properties, String mapName) {
this.hazelcastInstance = hazelcastInstance;
this.properties = properties;
this.mapName = mapName;
initCount.incrementAndGet();
}
public boolean isLoadAllKeys() {
return loadAllKeys;
}
public void setLoadAllKeys(boolean loadAllKeys) {
this.loadAllKeys = loadAllKeys;
}
public void destroy() {
destroyCount.incrementAndGet();
}
public int getInitCount() {
return initCount.get();
}
public int getDestroyCount() {
return destroyCount.get();
}
public HazelcastInstance getHazelcastInstance() {
return hazelcastInstance;
}
public String getMapName() {
return mapName;
}
public Properties getProperties() {
return properties;
}
public void assertAwait(int seconds) throws InterruptedException {
assertTrue("Store remaining: " + latchStore.getCount(), latchStore.await(seconds, TimeUnit.SECONDS));
assertTrue("Store-all remaining: " + latchStoreAll.getCount(), latchStoreAll.await(seconds, TimeUnit.SECONDS));
assertTrue("Delete remaining: " + latchDelete.getCount(), latchDelete.await(seconds, TimeUnit.SECONDS));
assertTrue("Delete-all remaining: " + latchDeleteAll.getCount(), latchDeleteAll.await(seconds, TimeUnit.SECONDS));
assertTrue("Load remaining: " + latchLoad.getCount(), latchLoad.await(seconds, TimeUnit.SECONDS));
assertTrue("Load-al remaining: " + latchLoadAll.getCount(), latchLoadAll.await(seconds, TimeUnit.SECONDS));
}
public Map getStore() {
return store;
}
public void insert(Object key, Object value) {
store.put(key, value);
}
public void store(Object key, Object value) {
store.put(key, value);
callCount.incrementAndGet();
latchStore.countDown();
if (latchStoreOpCount != null) {
latchStoreOpCount.countDown();
}
}
public Set loadAllKeys() {
callCount.incrementAndGet();
latchLoadAllKeys.countDown();
if (!loadAllKeys) return null;
return store.keySet();
}
public Object load(Object key) {
callCount.incrementAndGet();
latchLoad.countDown();
return store.get(key);
}
public void storeAll(Map map) {
store.putAll(map);
callCount.incrementAndGet();
latchStoreAll.countDown();
if (latchStoreAllOpCount != null) {
for (int i = 0; i < map.size(); i++) {
latchStoreAllOpCount.countDown();
}
}
}
public void delete(Object key) {
store.remove(key);
callCount.incrementAndGet();
latchDelete.countDown();
}
public Map loadAll(Collection keys) {
Map map = new HashMap(keys.size());
for (Object key : keys) {
Object value = store.get(key);
if (value != null) {
map.put(key, value);
}
}
callCount.incrementAndGet();
latchLoadAll.countDown();
return map;
}
public void deleteAll(Collection keys) {
for (Object key : keys) {
store.remove(key);
}
callCount.incrementAndGet();
latchDeleteAll.countDown();
}
} | 0true
| hazelcast_src_test_java_com_hazelcast_map_mapstore_MapStoreTest.java |
540 | public class OHookThreadLocal extends ThreadLocal<Set<OIdentifiable>> {
public static OHookThreadLocal INSTANCE = new OHookThreadLocal();
@Override
protected Set<OIdentifiable> initialValue() {
return new HashSet<OIdentifiable>();
}
public boolean push(final OIdentifiable iRecord) {
final Set<OIdentifiable> set = get();
if (set.contains(iRecord))
return false;
set.add(iRecord);
return true;
}
public boolean pop(final OIdentifiable iRecord) {
return get().remove(iRecord);
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_hook_OHookThreadLocal.java |
447 | static final class Fields {
static final XContentBuilderString CPU = new XContentBuilderString("cpu");
static final XContentBuilderString PERCENT = new XContentBuilderString("percent");
static final XContentBuilderString OPEN_FILE_DESCRIPTORS = new XContentBuilderString("open_file_descriptors");
static final XContentBuilderString MIN = new XContentBuilderString("min");
static final XContentBuilderString MAX = new XContentBuilderString("max");
static final XContentBuilderString AVG = new XContentBuilderString("avg");
} | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_stats_ClusterStatsNodes.java |
555 | public class ClientTxnSetProxy<E> extends AbstractClientTxnCollectionProxy<E> implements TransactionalSet<E> {
public ClientTxnSetProxy(String name, TransactionContextProxy proxy) {
super(name, proxy);
}
public boolean add(E e) {
throwExceptionIfNull(e);
final Data value = toData(e);
final TxnSetAddRequest request = new TxnSetAddRequest(getName(), value);
final Boolean result = invoke(request);
return result;
}
public boolean remove(E e) {
throwExceptionIfNull(e);
final Data value = toData(e);
final TxnSetRemoveRequest request = new TxnSetRemoveRequest(getName(), value);
final Boolean result = invoke(request);
return result;
}
public int size() {
final TxnSetSizeRequest request = new TxnSetSizeRequest(getName());
final Integer result = invoke(request);
return result;
}
public String getServiceName() {
return SetService.SERVICE_NAME;
}
} | 0true
| hazelcast-client_src_main_java_com_hazelcast_client_txn_proxy_ClientTxnSetProxy.java |
536 | class ShardGatewaySnapshotResponse extends BroadcastShardOperationResponse {
ShardGatewaySnapshotResponse() {
}
public ShardGatewaySnapshotResponse(String index, int shardId) {
super(index, shardId);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_gateway_snapshot_ShardGatewaySnapshotResponse.java |
170 | es.submit(new Runnable() {
public void run() {
IMap<String, byte[]> map = client.getMap("default");
while (true) {
int key = (int) (Math.random() * ENTRY_COUNT);
int operation = ((int) (Math.random() * 100));
if (operation < GET_PERCENTAGE) {
map.get(String.valueOf(key));
stats.gets.incrementAndGet();
} else if (operation < GET_PERCENTAGE + PUT_PERCENTAGE) {
map.put(String.valueOf(key), new byte[VALUE_SIZE]);
stats.puts.incrementAndGet();
} else {
map.remove(String.valueOf(key));
stats.removes.incrementAndGet();
}
}
}
}); | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_SimpleMapTestFromClient.java |
2,953 | @AnalysisSettingsRequired
public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory {
private final SynonymMap synonymMap;
private final boolean ignoreCase;
@Inject
public SynonymTokenFilterFactory(Index index, @IndexSettings Settings indexSettings, Environment env, IndicesAnalysisService indicesAnalysisService, Map<String, TokenizerFactoryFactory> tokenizerFactories,
@Assisted String name, @Assisted Settings settings) {
super(index, indexSettings, name, settings);
Reader rulesReader = null;
if (settings.getAsArray("synonyms", null) != null) {
List<String> rules = Analysis.getWordList(env, settings, "synonyms");
StringBuilder sb = new StringBuilder();
for (String line : rules) {
sb.append(line).append(System.getProperty("line.separator"));
}
rulesReader = new FastStringReader(sb.toString());
} else if (settings.get("synonyms_path") != null) {
rulesReader = Analysis.getReaderFromFile(env, settings, "synonyms_path");
} else {
throw new ElasticsearchIllegalArgumentException("synonym requires either `synonyms` or `synonyms_path` to be configured");
}
this.ignoreCase = settings.getAsBoolean("ignore_case", false);
boolean expand = settings.getAsBoolean("expand", true);
String tokenizerName = settings.get("tokenizer", "whitespace");
TokenizerFactoryFactory tokenizerFactoryFactory = tokenizerFactories.get(tokenizerName);
if (tokenizerFactoryFactory == null) {
tokenizerFactoryFactory = indicesAnalysisService.tokenizerFactoryFactory(tokenizerName);
}
if (tokenizerFactoryFactory == null) {
throw new ElasticsearchIllegalArgumentException("failed to find tokenizer [" + tokenizerName + "] for synonym token filter");
}
final TokenizerFactory tokenizerFactory = tokenizerFactoryFactory.create(tokenizerName, settings);
Analyzer analyzer = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
Tokenizer tokenizer = tokenizerFactory == null ? new WhitespaceTokenizer(Lucene.ANALYZER_VERSION, reader) : tokenizerFactory.create(reader);
TokenStream stream = ignoreCase ? new LowerCaseFilter(Lucene.ANALYZER_VERSION, tokenizer) : tokenizer;
return new TokenStreamComponents(tokenizer, stream);
}
};
try {
SynonymMap.Builder parser = null;
if ("wordnet".equalsIgnoreCase(settings.get("format"))) {
parser = new WordnetSynonymParser(true, expand, analyzer);
((WordnetSynonymParser) parser).parse(rulesReader);
} else {
parser = new SolrSynonymParser(true, expand, analyzer);
((SolrSynonymParser) parser).parse(rulesReader);
}
synonymMap = parser.build();
} catch (Exception e) {
throw new ElasticsearchIllegalArgumentException("failed to build synonyms", e);
}
}
@Override
public TokenStream create(TokenStream tokenStream) {
// fst is null means no synonyms
return synonymMap.fst == null ? tokenStream : new SynonymFilter(tokenStream, synonymMap, ignoreCase);
}
} | 0true
| src_main_java_org_elasticsearch_index_analysis_SynonymTokenFilterFactory.java |
26 | final class DataValue {
private final Map<String, String> data;
DataValue(Map<String, String> data) {
this.data = data;
}
Map<String, String> getData() {
return data;
}
} | 0true
| timeSequenceFeedAggregator_src_main_java_gov_nasa_arc_mct_buffer_disk_internal_DataValue.java |
438 | public static class Counts implements Streamable, ToXContent {
int total;
int masterOnly;
int dataOnly;
int masterData;
int client;
public void addNodeInfo(NodeInfo nodeInfo) {
total++;
DiscoveryNode node = nodeInfo.getNode();
if (node.masterNode()) {
if (node.dataNode()) {
masterData++;
} else {
masterOnly++;
}
} else if (node.dataNode()) {
dataOnly++;
} else if (node.clientNode()) {
client++;
}
}
public int getTotal() {
return total;
}
public int getMasterOnly() {
return masterOnly;
}
public int getDataOnly() {
return dataOnly;
}
public int getMasterData() {
return masterData;
}
public int getClient() {
return client;
}
public static Counts readCounts(StreamInput in) throws IOException {
Counts c = new Counts();
c.readFrom(in);
return c;
}
@Override
public void readFrom(StreamInput in) throws IOException {
total = in.readVInt();
masterOnly = in.readVInt();
dataOnly = in.readVInt();
masterData = in.readVInt();
client = in.readVInt();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(total);
out.writeVInt(masterOnly);
out.writeVInt(dataOnly);
out.writeVInt(masterData);
out.writeVInt(client);
}
static final class Fields {
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
static final XContentBuilderString MASTER_ONLY = new XContentBuilderString("master_only");
static final XContentBuilderString DATA_ONLY = new XContentBuilderString("data_only");
static final XContentBuilderString MASTER_DATA = new XContentBuilderString("master_data");
static final XContentBuilderString CLIENT = new XContentBuilderString("client");
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.TOTAL, total);
builder.field(Fields.MASTER_ONLY, masterOnly);
builder.field(Fields.DATA_ONLY, dataOnly);
builder.field(Fields.MASTER_DATA, masterData);
builder.field(Fields.CLIENT, client);
return builder;
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_stats_ClusterStatsNodes.java |
801 | public static class DocBuilder implements ToXContent {
private BytesReference doc;
public DocBuilder setDoc(BytesReference doc) {
this.doc = doc;
return this;
}
public DocBuilder setDoc(String field, Object value) {
Map<String, Object> values = new HashMap<String, Object>(2);
values.put(field, value);
setDoc(values);
return this;
}
public DocBuilder setDoc(String doc) {
this.doc = new BytesArray(doc);
return this;
}
public DocBuilder setDoc(XContentBuilder doc) {
this.doc = doc.bytes();
return this;
}
public DocBuilder setDoc(Map doc) {
return setDoc(doc, PercolateRequest.contentType);
}
public DocBuilder setDoc(Map doc, XContentType contentType) {
try {
return setDoc(XContentFactory.contentBuilder(contentType).map(doc));
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + doc + "]", e);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
XContentType contentType = XContentFactory.xContentType(doc);
if (contentType == builder.contentType()) {
builder.rawField("doc", doc);
} else {
XContentParser parser = XContentFactory.xContent(contentType).createParser(doc);
try {
parser.nextToken();
builder.field("doc");
builder.copyCurrentStructure(parser);
} finally {
parser.close();
}
}
return builder;
}
} | 0true
| src_main_java_org_elasticsearch_action_percolate_PercolateSourceBuilder.java |
1,374 | public static enum Type {
ADD((byte) 0),
REMOVE((byte) 1);
private final byte value;
Type(byte value) {
this.value = value;
}
public byte value() {
return value;
}
public static Type fromValue(byte value) {
if (value == 0) {
return ADD;
} else if (value == 1) {
return REMOVE;
} else {
throw new ElasticsearchIllegalArgumentException("No type for action [" + value + "]");
}
}
} | 0true
| src_main_java_org_elasticsearch_cluster_metadata_AliasAction.java |
2,369 | public class ByteSizeValueTests extends ElasticsearchTestCase {
@Test
public void testActualPeta() {
MatcherAssert.assertThat(new ByteSizeValue(4, ByteSizeUnit.PB).bytes(), equalTo(4503599627370496l));
}
@Test
public void testActualTera() {
MatcherAssert.assertThat(new ByteSizeValue(4, ByteSizeUnit.TB).bytes(), equalTo(4398046511104l));
}
@Test
public void testActual() {
MatcherAssert.assertThat(new ByteSizeValue(4, ByteSizeUnit.GB).bytes(), equalTo(4294967296l));
}
@Test
public void testSimple() {
assertThat(ByteSizeUnit.BYTES.toBytes(10), is(new ByteSizeValue(10, ByteSizeUnit.BYTES).bytes()));
assertThat(ByteSizeUnit.KB.toKB(10), is(new ByteSizeValue(10, ByteSizeUnit.KB).kb()));
assertThat(ByteSizeUnit.MB.toMB(10), is(new ByteSizeValue(10, ByteSizeUnit.MB).mb()));
assertThat(ByteSizeUnit.GB.toGB(10), is(new ByteSizeValue(10, ByteSizeUnit.GB).gb()));
assertThat(ByteSizeUnit.TB.toTB(10), is(new ByteSizeValue(10, ByteSizeUnit.TB).tb()));
assertThat(ByteSizeUnit.PB.toPB(10), is(new ByteSizeValue(10, ByteSizeUnit.PB).pb()));
}
@Test
public void testToString() {
assertThat("10b", is(new ByteSizeValue(10, ByteSizeUnit.BYTES).toString()));
assertThat("1.5kb", is(new ByteSizeValue((long) (1024 * 1.5), ByteSizeUnit.BYTES).toString()));
assertThat("1.5mb", is(new ByteSizeValue((long) (1024 * 1.5), ByteSizeUnit.KB).toString()));
assertThat("1.5gb", is(new ByteSizeValue((long) (1024 * 1.5), ByteSizeUnit.MB).toString()));
assertThat("1.5tb", is(new ByteSizeValue((long) (1024 * 1.5), ByteSizeUnit.GB).toString()));
assertThat("1.5pb", is(new ByteSizeValue((long) (1024 * 1.5), ByteSizeUnit.TB).toString()));
assertThat("1536pb", is(new ByteSizeValue((long) (1024 * 1.5), ByteSizeUnit.PB).toString()));
}
@Test
public void testParsing() {
assertThat(ByteSizeValue.parseBytesSizeValue("42pb").toString(), is("42pb"));
assertThat(ByteSizeValue.parseBytesSizeValue("42P").toString(), is("42pb"));
assertThat(ByteSizeValue.parseBytesSizeValue("42PB").toString(), is("42pb"));
assertThat(ByteSizeValue.parseBytesSizeValue("54tb").toString(), is("54tb"));
assertThat(ByteSizeValue.parseBytesSizeValue("54T").toString(), is("54tb"));
assertThat(ByteSizeValue.parseBytesSizeValue("54TB").toString(), is("54tb"));
assertThat(ByteSizeValue.parseBytesSizeValue("12gb").toString(), is("12gb"));
assertThat(ByteSizeValue.parseBytesSizeValue("12G").toString(), is("12gb"));
assertThat(ByteSizeValue.parseBytesSizeValue("12GB").toString(), is("12gb"));
assertThat(ByteSizeValue.parseBytesSizeValue("12M").toString(), is("12mb"));
assertThat(ByteSizeValue.parseBytesSizeValue("1b").toString(), is("1b"));
assertThat(ByteSizeValue.parseBytesSizeValue("23kb").toString(), is("23kb"));
assertThat(ByteSizeValue.parseBytesSizeValue("23k").toString(), is("23kb"));
assertThat(ByteSizeValue.parseBytesSizeValue("23").toString(), is("23b"));
}
@Test(expected = ElasticsearchParseException.class)
public void testFailOnEmptyParsing() {
assertThat(ByteSizeValue.parseBytesSizeValue("").toString(), is("23kb"));
}
@Test(expected = ElasticsearchParseException.class)
public void testFailOnEmptyNumberParsing() {
assertThat(ByteSizeValue.parseBytesSizeValue("g").toString(), is("23b"));
}
} | 0true
| src_test_java_org_elasticsearch_common_unit_ByteSizeValueTests.java |
965 | return new PortableFactory() {
public Portable create(int classId) {
switch (classId) {
case LOCK:
return new LockRequest();
case UNLOCK:
return new UnlockRequest();
case IS_LOCKED:
return new IsLockedRequest();
case GET_LOCK_COUNT:
return new GetLockCountRequest();
case GET_REMAINING_LEASE:
return new GetRemainingLeaseRequest();
case CONDITION_BEFORE_AWAIT:
return new BeforeAwaitRequest();
case CONDITION_AWAIT:
return new AwaitRequest();
case CONDITION_SIGNAL:
return new SignalRequest();
default:
return null;
}
}
}; | 0true
| hazelcast_src_main_java_com_hazelcast_concurrent_lock_client_LockPortableHook.java |
1,376 | public class TitanCassandraHadoopGraph extends TitanHadoopGraph {
public TitanCassandraHadoopGraph(TitanHadoopSetup setup) {
super(setup);
}
public FaunusVertex readHadoopVertex(final Configuration configuration, final ByteBuffer key, final SortedMap<ByteBuffer, Column> value) {
return super.readHadoopVertex(configuration, StaticArrayBuffer.of(key), new CassandraMapIterable(value));
}
private static class CassandraMapIterable implements Iterable<Entry> {
private final SortedMap<ByteBuffer, Column> columnValues;
public CassandraMapIterable(final SortedMap<ByteBuffer, Column> columnValues) {
Preconditions.checkNotNull(columnValues);
this.columnValues = columnValues;
}
@Override
public Iterator<Entry> iterator() {
return new CassandraMapIterator(columnValues.entrySet().iterator());
}
}
private static class CassandraMapIterator implements Iterator<Entry> {
private final Iterator<Map.Entry<ByteBuffer, Column>> iterator;
public CassandraMapIterator(final Iterator<Map.Entry<ByteBuffer, Column>> iterator) {
this.iterator = iterator;
}
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public Entry next() {
final Map.Entry<ByteBuffer, Column> entry = iterator.next();
ByteBuffer col = entry.getKey();
ByteBuffer val = entry.getValue().value();
return StaticArrayEntry.of(StaticArrayBuffer.of(col), StaticArrayBuffer.of(val));
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
} | 1no label
| titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_formats_cassandra_TitanCassandraHadoopGraph.java |
61 | static final class ForwardingNode<K,V> extends Node<K,V> {
final Node<K,V>[] nextTable;
ForwardingNode(Node<K,V>[] tab) {
super(MOVED, null, null, null);
this.nextTable = tab;
}
Node<K,V> find(int h, Object k) {
// loop to avoid arbitrarily deep recursion on forwarding nodes
outer: for (Node<K,V>[] tab = nextTable;;) {
Node<K,V> e; int n;
if (k == null || tab == null || (n = tab.length) == 0 ||
(e = tabAt(tab, (n - 1) & h)) == null)
return null;
for (;;) {
int eh; K ek;
if ((eh = e.hash) == h &&
((ek = e.key) == k || (ek != null && k.equals(ek))))
return e;
if (eh < 0) {
if (e instanceof ForwardingNode) {
tab = ((ForwardingNode<K,V>)e).nextTable;
continue outer;
}
else
return e.find(h, k);
}
if ((e = e.next) == null)
return null;
}
}
}
} | 0true
| src_main_java_jsr166e_ConcurrentHashMapV8.java |
1,375 | Collections.sort(indexesToLock, new Comparator<OIndex<?>>() {
public int compare(final OIndex<?> indexOne, final OIndex<?> indexTwo) {
return indexOne.getName().compareTo(indexTwo.getName());
}
}); | 1no label
| core_src_main_java_com_orientechnologies_orient_core_tx_OTransactionOptimistic.java |
136 | public interface TitanIndex extends TitanSchemaElement {
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_core_schema_TitanIndex.java |
1,311 | @Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_SEARCH_FACET")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region = "blStandardElements")
public class SearchFacetImpl implements SearchFacet, Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "SearchFacetId")
@GenericGenerator(
name="SearchFacetId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="SearchFacetImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.search.domain.SearchFacetImpl")
}
)
@Column(name = "SEARCH_FACET_ID")
@AdminPresentation(friendlyName = "SearchFacetImpl_ID", order = 1, group = "SearchFacetImpl_description", groupOrder = 1, visibility = VisibilityEnum.HIDDEN_ALL)
protected Long id;
@ManyToOne(optional=false, targetEntity = FieldImpl.class)
@JoinColumn(name = "FIELD_ID")
@AdminPresentation(friendlyName = "SearchFacetImpl_field", order = 2, group = "SearchFacetImpl_description", prominent = true, gridOrder = 1)
@AdminPresentationToOneLookup(lookupDisplayProperty = "propertyName")
protected Field field;
@Column(name = "LABEL")
@AdminPresentation(friendlyName = "SearchFacetImpl_label", order = 3, group = "SearchFacetImpl_description",
groupOrder = 1, prominent = true, translatable = true, gridOrder = 2)
protected String label;
@Column(name = "SHOW_ON_SEARCH")
@AdminPresentation(friendlyName = "SearchFacetImpl_showOnSearch", order = 4, group = "SearchFacetImpl_description", groupOrder = 1,prominent=false)
protected Boolean showOnSearch = false;
@Column(name = "SEARCH_DISPLAY_PRIORITY")
@AdminPresentation(friendlyName = "SearchFacetImpl_searchPriority", order = 5, group = "SearchFacetImpl_description", groupOrder = 1, prominent=true)
protected Integer searchDisplayPriority = 1;
@Column(name = "MULTISELECT")
@AdminPresentation(friendlyName = "SearchFacetImpl_multiselect", order = 6, group = "SearchFacetImpl_description", groupOrder = 1)
protected Boolean canMultiselect = true;
@OneToMany(mappedBy = "searchFacet", targetEntity = SearchFacetRangeImpl.class, cascade = {CascadeType.ALL})
@Cascade(value={org.hibernate.annotations.CascadeType.ALL, org.hibernate.annotations.CascadeType.DELETE_ORPHAN})
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements")
@AdminPresentationCollection(addType = AddMethodType.PERSIST, friendlyName = "newRangeTitle")
protected List<SearchFacetRange> searchFacetRanges = new ArrayList<SearchFacetRange>();
@OneToMany(mappedBy = "searchFacet", targetEntity = RequiredFacetImpl.class, cascade = {CascadeType.ALL})
@Cascade(value={org.hibernate.annotations.CascadeType.ALL, org.hibernate.annotations.CascadeType.DELETE_ORPHAN})
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements")
@AdminPresentationAdornedTargetCollection(targetObjectProperty = "requiredFacet", friendlyName = "requiredFacetTitle", gridVisibleFields = { "label", "searchDisplayPriority", "canMultiselect", "requiresAllDependentFacets" })
protected List<RequiredFacet> requiredFacets = new ArrayList<RequiredFacet>();
@Column(name = "REQUIRES_ALL_DEPENDENT")
@AdminPresentation(friendlyName = "SearchFacetImpl_requiresAllDependentFacets", order = 6, group = "SearchFacetImpl_description", groupOrder = 1)
protected Boolean requiresAllDependentFacets = false;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public Field getField() {
return field;
}
@Override
public void setField(Field field) {
this.field = field;
}
@Override
public String getLabel() {
return DynamicTranslationProvider.getValue(this, "label", label);
}
@Override
public void setLabel(String label) {
this.label = label;
}
@Override
public Boolean getShowOnSearch() {
return showOnSearch;
}
@Override
public void setShowOnSearch(Boolean showOnSearch) {
this.showOnSearch = showOnSearch;
}
@Override
public Integer getSearchDisplayPriority() {
return searchDisplayPriority;
}
@Override
public void setSearchDisplayPriority(Integer searchDisplayPriority) {
this.searchDisplayPriority = searchDisplayPriority;
}
@Override
public Boolean getCanMultiselect() {
return canMultiselect;
}
@Override
public void setCanMultiselect(Boolean canMultiselect) {
this.canMultiselect = canMultiselect;
}
@Override
public List<RequiredFacet> getRequiredFacets() {
return requiredFacets;
}
@Override
public void setRequiredFacets(List<RequiredFacet> requiredFacets) {
this.requiredFacets = requiredFacets;
}
@Override
public Boolean getRequiresAllDependentFacets() {
return requiresAllDependentFacets == null ? false : requiresAllDependentFacets;
}
@Override
public void setRequiresAllDependentFacets(Boolean requiresAllDependentFacets) {
this.requiresAllDependentFacets = requiresAllDependentFacets;
}
@Override
public List<SearchFacetRange> getSearchFacetRanges() {
return searchFacetRanges;
}
@Override
public void setSearchFacetRanges(List<SearchFacetRange> searchFacetRanges) {
this.searchFacetRanges = searchFacetRanges;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
SearchFacet other = (SearchFacet) obj;
return getField().equals(other.getField());
}
} | 1no label
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_search_domain_SearchFacetImpl.java |
961 | public class OrderItemDaoTest extends BaseTest {
private Long orderItemId;
private Long giftWrapItemId;
@Resource
private OrderItemDao orderItemDao;
@Resource
private SkuDao skuDao;
@Test(groups = { "createDiscreteOrderItem" }, dataProvider = "basicDiscreteOrderItem", dataProviderClass = OrderItemDataProvider.class, dependsOnGroups = { "createOrder", "createSku" })
@Rollback(false)
@Transactional
public void createDiscreteOrderItem(DiscreteOrderItem orderItem) {
Sku si = skuDao.readFirstSku();
assert si.getId() != null;
orderItem.setSku(si);
assert orderItem.getId() == null;
orderItem = (DiscreteOrderItem) orderItemDao.save(orderItem);
assert orderItem.getId() != null;
orderItemId = orderItem.getId();
}
@Test(groups = { "createGiftWrapOrderItem" }, dataProvider = "basicGiftWrapOrderItem", dataProviderClass = OrderItemDataProvider.class, dependsOnGroups = { "readOrderItemsById" })
@Rollback(false)
@Transactional
public void createGiftWrapOrderItem(GiftWrapOrderItem orderItem) {
Sku si = skuDao.readFirstSku();
assert si.getId() != null;
orderItem.setSku(si);
assert orderItem.getId() == null;
OrderItem discreteItem = orderItemDao.readOrderItemById(orderItemId);
orderItem.getWrappedItems().add(discreteItem);
discreteItem.setGiftWrapOrderItem(orderItem);
orderItem = (GiftWrapOrderItem) orderItemDao.save(orderItem);
assert orderItem.getId() != null;
giftWrapItemId = orderItem.getId();
}
@Test(groups = { "readGiftWrapOrderItemsById" }, dependsOnGroups = { "createGiftWrapOrderItem" })
@Transactional
public void readGiftWrapOrderItemsById() {
assert giftWrapItemId != null;
OrderItem result = orderItemDao.readOrderItemById(giftWrapItemId);
assert result != null;
assert result.getId().equals(giftWrapItemId);
assert ((GiftWrapOrderItem) result).getWrappedItems().get(0).getId().equals(orderItemId);
}
@Test(groups = { "deleteGiftWrapOrderItemsById" }, dependsOnGroups = { "readGiftWrapOrderItemsById" })
@Rollback(false)
public void deleteGiftWrapOrderItemsById() {
OrderItem result = orderItemDao.readOrderItemById(giftWrapItemId);
orderItemDao.delete(result);
assert orderItemDao.readOrderItemById(giftWrapItemId) == null;
}
@Test(groups = { "readOrderItemsById" }, dependsOnGroups = { "createDiscreteOrderItem" })
public void readOrderItemsById() {
assert orderItemId != null;
OrderItem result = orderItemDao.readOrderItemById(orderItemId);
assert result != null;
assert result.getId().equals(orderItemId);
}
@Test(groups = { "readOrderItemsByIdAfterGiftWrapDeletion" }, dependsOnGroups = { "deleteGiftWrapOrderItemsById" })
public void readOrderItemsByIdAfterGiftWrapDeletion() {
assert orderItemId != null;
OrderItem result = orderItemDao.readOrderItemById(orderItemId);
assert result != null;
assert result.getId().equals(orderItemId);
assert result.getGiftWrapOrderItem() == null;
}
} | 0true
| integration_src_test_java_org_broadleafcommerce_core_order_dao_OrderItemDaoTest.java |
1,793 | .setImplementation(new StreamSerializer() {
public void write(ObjectDataOutput out, Object object) throws IOException {
}
public Object read(ObjectDataInput in) throws IOException {
return new DummyValue();
}
public int getTypeId() {
return 123;
}
public void destroy() {
}
})); | 0true
| hazelcast_src_test_java_com_hazelcast_map_IssuesTest.java |
2,503 | FIELD_NAME {
@Override
public boolean isValue() {
return false;
}
}, | 0true
| src_main_java_org_elasticsearch_common_xcontent_XContentParser.java |
1,302 | public class TestApp implements EntryListener, ItemListener, MessageListener {
private static final int LOAD_EXECUTORS_COUNT = 16;
private static final int ONE_KB = 1024;
private static final int ONE_THOUSAND = 1000;
private static final int ONE_HUNDRED = 100;
private static final int ONE_HOUR = 3600;
private IQueue<Object> queue;
private ITopic<Object> topic;
private IMap<Object, Object> map;
private MultiMap<Object, Object> multiMap;
private ISet<Object> set;
private IList<Object> list;
private IAtomicLong atomicNumber;
private String namespace = "default";
private boolean silent;
private boolean echo;
private volatile HazelcastInstance hazelcast;
private volatile LineReader lineReader;
private volatile boolean running;
public TestApp(HazelcastInstance hazelcast) {
this.hazelcast = hazelcast;
}
public IQueue<Object> getQueue() {
queue = hazelcast.getQueue(namespace);
return queue;
}
public ITopic<Object> getTopic() {
topic = hazelcast.getTopic(namespace);
return topic;
}
public IMap<Object, Object> getMap() {
map = hazelcast.getMap(namespace);
return map;
}
public MultiMap<Object, Object> getMultiMap() {
multiMap = hazelcast.getMultiMap(namespace);
return multiMap;
}
public IAtomicLong getAtomicNumber() {
atomicNumber = hazelcast.getAtomicLong(namespace);
return atomicNumber;
}
public ISet<Object> getSet() {
set = hazelcast.getSet(namespace);
return set;
}
public IList<Object> getList() {
list = hazelcast.getList(namespace);
return list;
}
public void setHazelcast(HazelcastInstance hazelcast) {
this.hazelcast = hazelcast;
map = null;
list = null;
set = null;
queue = null;
topic = null;
}
public void stop() {
running = false;
}
public void start(String[] args) throws Exception {
getMap().size();
getList().size();
getSet().size();
getQueue().size();
getTopic().getLocalTopicStats();
getMultiMap().size();
hazelcast.getExecutorService("default").getLocalExecutorStats();
for (int k = 1; k <= LOAD_EXECUTORS_COUNT; k++) {
hazelcast.getExecutorService("e" + k).getLocalExecutorStats();
}
if (lineReader == null) {
lineReader = new DefaultLineReader();
}
running = true;
while (running) {
print("hazelcast[" + namespace + "] > ");
try {
final String command = lineReader.readLine();
handleCommand(command);
} catch (Throwable e) {
e.printStackTrace();
}
}
}
/**
* A line reader
*/
static class DefaultLineReader implements LineReader {
private BufferedReader in;
public DefaultLineReader() throws UnsupportedEncodingException {
in = new BufferedReader(new InputStreamReader(System.in, "UTF-8"));
}
public String readLine() throws Exception {
return in.readLine();
}
}
//CHECKSTYLE:OFF
/**
* Handle a command
*
* @param commandInputted
*/
@edu.umd.cs.findbugs.annotations.SuppressWarnings("DM_EXIT")
protected void handleCommand(String commandInputted) {
String command = commandInputted;
if (command.contains("__")) {
namespace = command.split("__")[0];
command = command.substring(command.indexOf("__") + 2);
}
if (echo) {
handleEcho(command);
}
if (command == null || command.startsWith("//")) {
return;
}
command = command.trim();
if (command == null || command.length() == 0) {
return;
}
String first = command;
int spaceIndex = command.indexOf(' ');
String[] argsSplit = command.split(" ");
String[] args = new String[argsSplit.length];
for (int i = 0; i < argsSplit.length; i++) {
args[i] = argsSplit[i].trim();
}
if (spaceIndex != -1) {
first = args[0];
}
if (command.startsWith("help")) {
handleHelp(command);
} else if (first.startsWith("#") && first.length() > 1) {
int repeat = Integer.parseInt(first.substring(1));
long t0 = Clock.currentTimeMillis();
for (int i = 0; i < repeat; i++) {
handleCommand(command.substring(first.length()).replaceAll("\\$i", "" + i));
}
println("ops/s = " + repeat * ONE_THOUSAND / (Clock.currentTimeMillis() - t0));
} else if (first.startsWith("&") && first.length() > 1) {
final int fork = Integer.parseInt(first.substring(1));
ExecutorService pool = Executors.newFixedThreadPool(fork);
final String threadCommand = command.substring(first.length());
for (int i = 0; i < fork; i++) {
final int threadID = i;
pool.submit(new Runnable() {
public void run() {
String command = threadCommand;
String[] threadArgs = command.replaceAll("\\$t", "" + threadID).trim()
.split(" ");
// TODO &t #4 m.putmany x k
if ("m.putmany".equals(threadArgs[0])
|| "m.removemany".equals(threadArgs[0])) {
if (threadArgs.length < 4) {
command += " " + Integer.parseInt(threadArgs[1]) * threadID;
}
}
handleCommand(command);
}
});
}
pool.shutdown();
try {
// wait 1h
pool.awaitTermination(ONE_HOUR, TimeUnit.SECONDS);
} catch (Exception e) {
e.printStackTrace();
}
} else if (first.startsWith("@")) {
handleAt(first);
} else if (command.indexOf(';') != -1) {
handleColon(command);
} else if ("silent".equals(first)) {
silent = Boolean.parseBoolean(args[1]);
} else if ("shutdown".equals(first)) {
hazelcast.getLifecycleService().shutdown();
} else if ("echo".equals(first)) {
echo = Boolean.parseBoolean(args[1]);
println("echo: " + echo);
} else if ("ns".equals(first)) {
handleNamespace(args);
} else if ("whoami".equals(first)) {
handleWhoami();
} else if ("who".equals(first)) {
handleWho();
} else if ("jvm".equals(first)) {
handleJvm();
} else if (first.contains("ock") && !first.contains(".")) {
handleLock(args);
} else if (first.contains(".size")) {
handleSize(args);
} else if (first.contains(".clear")) {
handleClear(args);
} else if (first.contains(".destroy")) {
handleDestroy(args);
} else if (first.contains(".iterator")) {
handleIterator(args);
} else if (first.contains(".contains")) {
handleContains(args);
} else if (first.contains(".stats")) {
handStats(args);
} else if ("t.publish".equals(first)) {
handleTopicPublish(args);
} else if ("q.offer".equals(first)) {
handleQOffer(args);
} else if ("q.take".equals(first)) {
handleQTake(args);
} else if ("q.poll".equals(first)) {
handleQPoll(args);
} else if ("q.peek".equals(first)) {
handleQPeek(args);
} else if ("q.capacity".equals(first)) {
handleQCapacity(args);
} else if ("q.offermany".equals(first)) {
handleQOfferMany(args);
} else if ("q.pollmany".equals(first)) {
handleQPollMany(args);
} else if ("s.add".equals(first)) {
handleSetAdd(args);
} else if ("s.remove".equals(first)) {
handleSetRemove(args);
} else if ("s.addmany".equals(first)) {
handleSetAddMany(args);
} else if ("s.removemany".equals(first)) {
handleSetRemoveMany(args);
} else if (first.equals("m.replace")) {
handleMapReplace(args);
} else if (first.equalsIgnoreCase("m.putIfAbsent")) {
handleMapPutIfAbsent(args);
} else if (first.equals("m.putAsync")) {
handleMapPutAsync(args);
} else if (first.equals("m.getAsync")) {
handleMapGetAsync(args);
} else if (first.equals("m.put")) {
handleMapPut(args);
} else if (first.equals("m.get")) {
handleMapGet(args);
} else if (first.equalsIgnoreCase("m.getMapEntry")) {
handleMapGetMapEntry(args);
} else if (first.equals("m.remove")) {
handleMapRemove(args);
} else if (first.equals("m.evict")) {
handleMapEvict(args);
} else if (first.equals("m.putmany") || first.equalsIgnoreCase("m.putAll")) {
handleMapPutMany(args);
} else if (first.equals("m.getmany")) {
handleMapGetMany(args);
} else if (first.equals("m.removemany")) {
handleMapRemoveMany(args);
} else if (command.equalsIgnoreCase("m.localKeys")) {
handleMapLocalKeys();
} else if (command.equalsIgnoreCase("m.localSize")) {
handleMapLocalSize();
} else if (command.equals("m.keys")) {
handleMapKeys();
} else if (command.equals("m.values")) {
handleMapValues();
} else if (command.equals("m.entries")) {
handleMapEntries();
} else if (first.equals("m.lock")) {
handleMapLock(args);
} else if (first.equalsIgnoreCase("m.tryLock")) {
handleMapTryLock(args);
} else if (first.equals("m.unlock")) {
handleMapUnlock(args);
} else if (first.contains(".addListener")) {
handleAddListener(args);
} else if (first.equals("m.removeMapListener")) {
handleRemoveListener(args);
} else if (first.equals("m.unlock")) {
handleMapUnlock(args);
} else if (first.equals("mm.put")) {
handleMultiMapPut(args);
} else if (first.equals("mm.get")) {
handleMultiMapGet(args);
} else if (first.equals("mm.remove")) {
handleMultiMapRemove(args);
} else if (command.equals("mm.keys")) {
handleMultiMapKeys();
} else if (command.equals("mm.values")) {
handleMultiMapValues();
} else if (command.equals("mm.entries")) {
handleMultiMapEntries();
} else if (first.equals("mm.lock")) {
handleMultiMapLock(args);
} else if (first.equalsIgnoreCase("mm.tryLock")) {
handleMultiMapTryLock(args);
} else if (first.equals("mm.unlock")) {
handleMultiMapUnlock(args);
} else if (first.equals("l.add")) {
handleListAdd(args);
} else if (first.equals("l.set")) {
handleListSet(args);
} else if ("l.addmany".equals(first)) {
handleListAddMany(args);
} else if (first.equals("l.remove")) {
handleListRemove(args);
} else if (first.equals("l.contains")) {
handleListContains(args);
} else if ("a.get".equals(first)) {
handleAtomicNumberGet(args);
} else if ("a.set".equals(first)) {
handleAtomicNumberSet(args);
} else if ("a.inc".equals(first)) {
handleAtomicNumberInc(args);
} else if ("a.dec".equals(first)) {
handleAtomicNumberDec(args);
} else if (first.equals("execute")) {
execute(args);
} else if (first.equals("partitions")) {
handlePartitions(args);
// } else if (first.equals("txn")) {
// hazelcast.getTransaction().begin();
// } else if (first.equals("commit")) {
// hazelcast.getTransaction().commit();
// } else if (first.equals("rollback")) {
// hazelcast.getTransaction().rollback();
} else if (first.equalsIgnoreCase("executeOnKey")) {
executeOnKey(args);
} else if (first.equalsIgnoreCase("executeOnMember")) {
executeOnMember(args);
} else if (first.equalsIgnoreCase("executeOnMembers")) {
executeOnMembers(args);
// } else if (first.equalsIgnoreCase("longOther") || first.equalsIgnoreCase("executeLongOther")) {
// executeLongTaskOnOtherMember(args);
//} else if (first.equalsIgnoreCase("long") || first.equalsIgnoreCase("executeLong")) {
// executeLong(args);
} else if (first.equalsIgnoreCase("instances")) {
handleInstances(args);
} else if (first.equalsIgnoreCase("quit") || first.equalsIgnoreCase("exit")) {
System.exit(0);
} else if (first.startsWith("e") && first.endsWith(".simulateLoad")) {
handleExecutorSimulate(args);
} else {
println("type 'help' for help");
}
}
private void handleExecutorSimulate(String[] args) {
String first = args[0];
int threadCount = Integer.parseInt(first.substring(1, first.indexOf(".")));
if (threadCount < 1 || threadCount > 16) {
throw new RuntimeException("threadcount can't be smaller than 1 or larger than 16");
}
int taskCount = Integer.parseInt(args[1]);
int durationSec = Integer.parseInt(args[2]);
long startMs = System.currentTimeMillis();
IExecutorService executor = hazelcast.getExecutorService("e" + threadCount);
List<Future> futures = new LinkedList<Future>();
List<Member> members = new LinkedList<Member>(hazelcast.getCluster().getMembers());
int totalThreadCount = hazelcast.getCluster().getMembers().size() * threadCount;
int latchId = 0;
for (int k = 0; k < taskCount; k++) {
Member member = members.get(k % members.size());
if (taskCount % totalThreadCount == 0) {
latchId = taskCount / totalThreadCount;
hazelcast.getCountDownLatch("latch" + latchId).trySetCount(totalThreadCount);
}
Future f = executor.submitToMember(new SimulateLoadTask(durationSec, k + 1, "latch" + latchId), member);
futures.add(f);
}
for (Future f : futures) {
try {
f.get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
long durationMs = System.currentTimeMillis() - startMs;
println(format("Executed %s tasks in %s ms", taskCount, durationMs));
}
/**
* A simulated load test
*/
private static final class SimulateLoadTask implements Callable, Serializable, HazelcastInstanceAware {
private static final long serialVersionUID = 1;
private final int delay;
private final int taskId;
private final String latchId;
private transient HazelcastInstance hz;
private SimulateLoadTask(int delay, int taskId, String latchId) {
this.delay = delay;
this.taskId = taskId;
this.latchId = latchId;
}
@Override
public void setHazelcastInstance(HazelcastInstance hazelcastInstance) {
this.hz = hazelcastInstance;
}
@Override
public Object call() throws Exception {
try {
Thread.sleep(delay * ONE_THOUSAND);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
hz.getCountDownLatch(latchId).countDown();
System.out.println("Finished task:" + taskId);
return null;
}
}
private void handleColon(String command) {
StringTokenizer st = new StringTokenizer(command, ";");
while (st.hasMoreTokens()) {
handleCommand(st.nextToken());
}
}
private void handleAt(String first) {
if (first.length() == 1) {
println("usage: @<file-name>");
return;
}
File f = new File(first.substring(1));
println("Executing script file " + f.getAbsolutePath());
if (f.exists()) {
try {
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(f), "UTF-8"));
String l = br.readLine();
while (l != null) {
handleCommand(l);
l = br.readLine();
}
br.close();
} catch (IOException e) {
e.printStackTrace();
}
} else {
println("File not found! " + f.getAbsolutePath());
}
}
private void handleEcho(String command) {
if (!Thread.currentThread().getName().toLowerCase().contains("main")) {
println(" [" + Thread.currentThread().getName() + "] " + command);
} else {
println(command);
}
}
private void handleNamespace(String[] args) {
if (args.length > 1) {
namespace = args[1];
println("namespace: " + namespace);
// init();
}
}
@edu.umd.cs.findbugs.annotations.SuppressWarnings("DM_GC")
private void handleJvm() {
System.gc();
println("Memory max: " + Runtime.getRuntime().maxMemory() / ONE_KB / ONE_KB
+ "M");
println("Memory free: "
+ Runtime.getRuntime().freeMemory()
/ ONE_KB
/ ONE_KB
+ "M "
+ (int) (Runtime.getRuntime().freeMemory() * 100 / Runtime.getRuntime()
.maxMemory()) + "%");
long total = Runtime.getRuntime().totalMemory();
long free = Runtime.getRuntime().freeMemory();
println("Used Memory:" + ((total - free) / ONE_KB / ONE_KB) + "MB");
println("# procs: " + Runtime.getRuntime().availableProcessors());
println("OS info: " + ManagementFactory.getOperatingSystemMXBean().getArch()
+ " " + ManagementFactory.getOperatingSystemMXBean().getName() + " "
+ ManagementFactory.getOperatingSystemMXBean().getVersion());
println("JVM: " + ManagementFactory.getRuntimeMXBean().getVmVendor() + " "
+ ManagementFactory.getRuntimeMXBean().getVmName() + " "
+ ManagementFactory.getRuntimeMXBean().getVmVersion());
}
private void handleWhoami() {
println(hazelcast.getCluster().getLocalMember());
}
private void handleWho() {
StringBuilder sb = new StringBuilder("\n\nMembers [");
final Collection<Member> members = hazelcast.getCluster().getMembers();
sb.append(members != null ? members.size() : 0);
sb.append("] {");
if (members != null) {
for (Member member : members) {
sb.append("\n\t").append(member);
}
}
sb.append("\n}\n");
println(sb.toString());
}
private void handleAtomicNumberGet(String[] args) {
println(getAtomicNumber().get());
}
private void handleAtomicNumberSet(String[] args) {
long v = 0;
if (args.length > 1) {
v = Long.parseLong(args[1]);
}
getAtomicNumber().set(v);
println(getAtomicNumber().get());
}
private void handleAtomicNumberInc(String[] args) {
println(getAtomicNumber().incrementAndGet());
}
private void handleAtomicNumberDec(String[] args) {
println(getAtomicNumber().decrementAndGet());
}
protected void handlePartitions(String[] args) {
Set<Partition> partitions = hazelcast.getPartitionService().getPartitions();
Map<Member, Integer> partitionCounts = new HashMap<Member, Integer>();
for (Partition partition : partitions) {
Member owner = partition.getOwner();
if (owner != null) {
Integer count = partitionCounts.get(owner);
int newCount = 1;
if (count != null) {
newCount = count + 1;
}
partitionCounts.put(owner, newCount);
}
println(partition);
}
Set<Map.Entry<Member, Integer>> entries = partitionCounts.entrySet();
for (Map.Entry<Member, Integer> entry : entries) {
println(entry.getKey() + ":" + entry.getValue());
}
}
protected void handleInstances(String[] args) {
Collection<DistributedObject> distributedObjects = hazelcast.getDistributedObjects();
for (DistributedObject distributedObject : distributedObjects) {
println(distributedObject);
}
}
// ==================== list ===================================
protected void handleListContains(String[] args) {
println(getList().contains(args[1]));
}
protected void handleListRemove(String[] args) {
int index = -1;
try {
index = Integer.parseInt(args[1]);
} catch (NumberFormatException e) {
throw new RuntimeException(e);
}
if (index >= 0) {
println(getList().remove(index));
} else {
println(getList().remove(args[1]));
}
}
protected void handleListAdd(String[] args) {
if (args.length == 3) {
final int index = Integer.parseInt(args[1]);
getList().add(index, args[2]);
println("true");
} else {
println(getList().add(args[1]));
}
}
protected void handleListSet(String[] args) {
final int index = Integer.parseInt(args[1]);
println(getList().set(index, args[2]));
}
protected void handleListAddMany(String[] args) {
int count = 1;
if (args.length > 1) {
count = Integer.parseInt(args[1]);
}
int successCount = 0;
long t0 = Clock.currentTimeMillis();
for (int i = 0; i < count; i++) {
boolean success = getList().add("obj" + i);
if (success) {
successCount++;
}
}
long t1 = Clock.currentTimeMillis();
println("Added " + successCount + " objects.");
println("size = " + list.size() + ", " + successCount * ONE_THOUSAND / (t1 - t0)
+ " evt/s");
}
// ==================== map ===================================
protected void handleMapPut(String[] args) {
if (args.length == 1) {
println("m.put requires a key and a value. You have not specified either.");
} else if (args.length == 2) {
println("m.put requires a key and a value. You have only specified the key " + args[1]);
} else if (args.length > 3) {
println("m.put takes two arguments, a key and a value. You have specified more than two arguments.");
} else {
println(getMap().put(args[1], args[2]));
}
}
protected void handleMapPutAsync(String[] args) {
try {
println(getMap().putAsync(args[1], args[2]).get());
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
protected void handleMapPutIfAbsent(String[] args) {
println(getMap().putIfAbsent(args[1], args[2]));
}
protected void handleMapReplace(String[] args) {
println(getMap().replace(args[1], args[2]));
}
protected void handleMapGet(String[] args) {
println(getMap().get(args[1]));
}
protected void handleMapGetAsync(String[] args) {
try {
println(getMap().getAsync(args[1]).get());
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
protected void handleMapGetMapEntry(String[] args) {
println(getMap().getEntryView(args[1]));
}
protected void handleMapRemove(String[] args) {
println(getMap().remove(args[1]));
}
protected void handleMapEvict(String[] args) {
println(getMap().evict(args[1]));
}
protected void handleMapPutMany(String[] args) {
int count = 1;
if (args.length > 1) {
count = Integer.parseInt(args[1]);
}
int b = ONE_HUNDRED;
byte[] value = new byte[b];
if (args.length > 2) {
b = Integer.parseInt(args[2]);
value = new byte[b];
}
int start = getMap().size();
if (args.length > 3) {
start = Integer.parseInt(args[3]);
}
Map theMap = new HashMap(count);
for (int i = 0; i < count; i++) {
theMap.put("key" + (start + i), value);
}
long t0 = Clock.currentTimeMillis();
getMap().putAll(theMap);
long t1 = Clock.currentTimeMillis();
if (t1 - t0 > 1) {
println("size = " + getMap().size() + ", " + count * ONE_THOUSAND / (t1 - t0)
+ " evt/s, " + (count * ONE_THOUSAND / (t1 - t0)) * (b * 8) / ONE_KB + " Kbit/s, "
+ count * b / ONE_KB + " KB added");
}
}
protected void handleMapGetMany(String[] args) {
int count = 1;
if (args.length > 1) {
count = Integer.parseInt(args[1]);
}
for (int i = 0; i < count; i++) {
println(getMap().get("key" + i));
}
}
protected void handleMapRemoveMany(String[] args) {
int count = 1;
if (args.length > 1) {
count = Integer.parseInt(args[1]);
}
int start = 0;
if (args.length > 2) {
start = Integer.parseInt(args[2]);
}
long t0 = Clock.currentTimeMillis();
for (int i = 0; i < count; i++) {
getMap().remove("key" + (start + i));
}
long t1 = Clock.currentTimeMillis();
println("size = " + getMap().size() + ", " + count * ONE_THOUSAND / (t1 - t0) + " evt/s");
}
protected void handleMapLock(String[] args) {
getMap().lock(args[1]);
println("true");
}
protected void handleMapTryLock(String[] args) {
String key = args[1];
long time = (args.length > 2) ? Long.parseLong(args[2]) : 0;
boolean locked;
if (time == 0) {
locked = getMap().tryLock(key);
} else {
try {
locked = getMap().tryLock(key, time, TimeUnit.SECONDS);
} catch (InterruptedException e) {
locked = false;
}
}
println(locked);
}
protected void handleMapUnlock(String[] args) {
getMap().unlock(args[1]);
println("true");
}
protected void handleMapLocalKeys() {
Set set = getMap().localKeySet();
Iterator it = set.iterator();
int count = 0;
while (it.hasNext()) {
count++;
println(it.next());
}
println("Total " + count);
}
protected void handleMapLocalSize() {
println("Local Size = " + getMap().localKeySet().size());
}
protected void handleMapKeys() {
Set set = getMap().keySet();
Iterator it = set.iterator();
int count = 0;
while (it.hasNext()) {
count++;
println(it.next());
}
println("Total " + count);
}
protected void handleMapEntries() {
Set set = getMap().entrySet();
Iterator it = set.iterator();
int count = 0;
while (it.hasNext()) {
count++;
Map.Entry entry = (Entry) it.next();
println(entry.getKey() + " : " + entry.getValue());
}
println("Total " + count);
}
protected void handleMapValues() {
Collection set = getMap().values();
Iterator it = set.iterator();
int count = 0;
while (it.hasNext()) {
count++;
println(it.next());
}
println("Total " + count);
}
// ==================== multimap ===================================
protected void handleMultiMapPut(String[] args) {
println(getMultiMap().put(args[1], args[2]));
}
protected void handleMultiMapGet(String[] args) {
println(getMultiMap().get(args[1]));
}
protected void handleMultiMapRemove(String[] args) {
println(getMultiMap().remove(args[1]));
}
protected void handleMultiMapKeys() {
Set set = getMultiMap().keySet();
Iterator it = set.iterator();
int count = 0;
while (it.hasNext()) {
count++;
println(it.next());
}
println("Total " + count);
}
protected void handleMultiMapEntries() {
Set set = getMultiMap().entrySet();
Iterator it = set.iterator();
int count = 0;
while (it.hasNext()) {
count++;
Map.Entry entry = (Entry) it.next();
println(entry.getKey() + " : " + entry.getValue());
}
println("Total " + count);
}
protected void handleMultiMapValues() {
Collection set = getMultiMap().values();
Iterator it = set.iterator();
int count = 0;
while (it.hasNext()) {
count++;
println(it.next());
}
println("Total " + count);
}
protected void handleMultiMapLock(String[] args) {
getMultiMap().lock(args[1]);
println("true");
}
protected void handleMultiMapTryLock(String[] args) {
String key = args[1];
long time = (args.length > 2) ? Long.parseLong(args[2]) : 0;
boolean locked;
if (time == 0) {
locked = getMultiMap().tryLock(key);
} else {
try {
locked = getMultiMap().tryLock(key, time, TimeUnit.SECONDS);
} catch (InterruptedException e) {
locked = false;
}
}
println(locked);
}
protected void handleMultiMapUnlock(String[] args) {
getMultiMap().unlock(args[1]);
println("true");
}
// =======================================================
private void handStats(String[] args) {
String iteratorStr = args[0];
if (iteratorStr.startsWith("m.")) {
println(getMap().getLocalMapStats());
} else if (iteratorStr.startsWith("mm.")) {
println(getMultiMap().getLocalMultiMapStats());
} else if (iteratorStr.startsWith("q.")) {
println(getQueue().getLocalQueueStats());
}
}
@java.lang.SuppressWarnings("LockAcquiredButNotSafelyReleased")
protected void handleLock(String[] args) {
String lockStr = args[0];
String key = args[1];
Lock lock = hazelcast.getLock(key);
if (lockStr.equalsIgnoreCase("lock")) {
lock.lock();
println("true");
} else if (lockStr.equalsIgnoreCase("unlock")) {
lock.unlock();
println("true");
} else if (lockStr.equalsIgnoreCase("trylock")) {
String timeout = args.length > 2 ? args[2] : null;
if (timeout == null) {
println(lock.tryLock());
} else {
long time = Long.parseLong(timeout);
try {
println(lock.tryLock(time, TimeUnit.SECONDS));
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
protected void handleAddListener(String[] args) {
String first = args[0];
if (first.startsWith("s.")) {
getSet().addItemListener(this, true);
} else if (first.startsWith("m.")) {
if (args.length > 1) {
getMap().addEntryListener(this, args[1], true);
} else {
getMap().addEntryListener(this, true);
}
} else if (first.startsWith("mm.")) {
if (args.length > 1) {
getMultiMap().addEntryListener(this, args[1], true);
} else {
getMultiMap().addEntryListener(this, true);
}
} else if (first.startsWith("q.")) {
getQueue().addItemListener(this, true);
} else if (first.startsWith("t.")) {
getTopic().addMessageListener(this);
} else if (first.startsWith("l.")) {
getList().addItemListener(this, true);
}
}
protected void handleRemoveListener(String[] args) {
// String first = args[0];
// if (first.startsWith("s.")) {
// getSet().removeItemListener(this);
// } else if (first.startsWith("m.")) {
// if (args.length > 1) {
// // todo revise here
// getMap().removeEntryListener(args[1]);
// } else {
// getMap().removeEntryListener(args[0]);
// }
// } else if (first.startsWith("q.")) {
// getQueue().removeItemListener(this);
// } else if (first.startsWith("t.")) {
// getTopic().removeMessageListener(this);
// } else if (first.startsWith("l.")) {
// getList().removeItemListener(this);
// }
}
protected void handleSetAdd(String[] args) {
println(getSet().add(args[1]));
}
protected void handleSetRemove(String[] args) {
println(getSet().remove(args[1]));
}
protected void handleSetAddMany(String[] args) {
int count = 1;
if (args.length > 1) {
count = Integer.parseInt(args[1]);
}
int successCount = 0;
long t0 = Clock.currentTimeMillis();
for (int i = 0; i < count; i++) {
boolean success = getSet().add("obj" + i);
if (success) {
successCount++;
}
}
long t1 = Clock.currentTimeMillis();
println("Added " + successCount + " objects.");
println("size = " + getSet().size() + ", " + successCount * ONE_THOUSAND / (t1 - t0)
+ " evt/s");
}
protected void handleSetRemoveMany(String[] args) {
int count = 1;
if (args.length > 1) {
count = Integer.parseInt(args[1]);
}
int successCount = 0;
long t0 = Clock.currentTimeMillis();
for (int i = 0; i < count; i++) {
boolean success = getSet().remove("obj" + i);
if (success) {
successCount++;
}
}
long t1 = Clock.currentTimeMillis();
println("Removed " + successCount + " objects.");
println("size = " + getSet().size() + ", " + successCount * ONE_THOUSAND / (t1 - t0)
+ " evt/s");
}
protected void handleIterator(String[] args) {
Iterator it = null;
String iteratorStr = args[0];
if (iteratorStr.startsWith("s.")) {
it = getSet().iterator();
} else if (iteratorStr.startsWith("m.")) {
it = getMap().keySet().iterator();
} else if (iteratorStr.startsWith("mm.")) {
it = getMultiMap().keySet().iterator();
} else if (iteratorStr.startsWith("q.")) {
it = getQueue().iterator();
} else if (iteratorStr.startsWith("l.")) {
it = getList().iterator();
}
if (it != null) {
boolean remove = false;
if (args.length > 1) {
String removeStr = args[1];
remove = removeStr.equals("remove");
}
int count = 1;
while (it.hasNext()) {
print(count++ + " " + it.next());
if (remove) {
it.remove();
print(" removed");
}
println("");
}
}
}
protected void handleContains(String[] args) {
String iteratorStr = args[0];
boolean key = false;
boolean value = false;
if (iteratorStr.toLowerCase().endsWith("key")) {
key = true;
} else if (iteratorStr.toLowerCase().endsWith("value")) {
value = true;
}
String data = args[1];
boolean result = false;
if (iteratorStr.startsWith("s.")) {
result = getSet().contains(data);
} else if (iteratorStr.startsWith("m.")) {
result = (key) ? getMap().containsKey(data) : getMap().containsValue(data);
} else if (iteratorStr.startsWith("mmm.")) {
result = (key) ? getMultiMap().containsKey(data) : getMultiMap().containsValue(data);
} else if (iteratorStr.startsWith("q.")) {
result = getQueue().contains(data);
} else if (iteratorStr.startsWith("l.")) {
result = getList().contains(data);
}
println("Contains : " + result);
}
protected void handleSize(String[] args) {
int size = 0;
String iteratorStr = args[0];
if (iteratorStr.startsWith("s.")) {
size = getSet().size();
} else if (iteratorStr.startsWith("m.")) {
size = getMap().size();
} else if (iteratorStr.startsWith("mm.")) {
size = getMultiMap().size();
} else if (iteratorStr.startsWith("q.")) {
size = getQueue().size();
} else if (iteratorStr.startsWith("l.")) {
size = getList().size();
}
println("Size = " + size);
}
protected void handleClear(String[] args) {
String iteratorStr = args[0];
if (iteratorStr.startsWith("s.")) {
getSet().clear();
} else if (iteratorStr.startsWith("m.")) {
getMap().clear();
} else if (iteratorStr.startsWith("mm.")) {
getMultiMap().clear();
} else if (iteratorStr.startsWith("q.")) {
getQueue().clear();
} else if (iteratorStr.startsWith("l.")) {
getList().clear();
}
println("Cleared all.");
}
protected void handleDestroy(String[] args) {
String iteratorStr = args[0];
if (iteratorStr.startsWith("s.")) {
getSet().destroy();
} else if (iteratorStr.startsWith("m.")) {
getMap().destroy();
} else if (iteratorStr.startsWith("mm.")) {
getMultiMap().destroy();
} else if (iteratorStr.startsWith("q.")) {
getQueue().destroy();
} else if (iteratorStr.startsWith("l.")) {
getList().destroy();
} else if (iteratorStr.startsWith("t.")) {
getTopic().destroy();
}
println("Destroyed!");
}
protected void handleQOffer(String[] args) {
long timeout = 0;
if (args.length > 2) {
timeout = Long.parseLong(args[2]);
}
try {
boolean offered = getQueue().offer(args[1], timeout, TimeUnit.SECONDS);
println(offered);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
protected void handleQTake(String[] args) {
try {
println(getQueue().take());
} catch (InterruptedException e) {
e.printStackTrace();
}
}
protected void handleQPoll(String[] args) {
long timeout = 0;
if (args.length > 1) {
timeout = Long.parseLong(args[1]);
}
try {
println(getQueue().poll(timeout, TimeUnit.SECONDS));
} catch (InterruptedException e) {
e.printStackTrace();
}
}
protected void handleTopicPublish(String[] args) {
getTopic().publish(args[1]);
}
protected void handleQOfferMany(String[] args) {
int count = 1;
if (args.length > 1) {
count = Integer.parseInt(args[1]);
}
Object value = null;
if (args.length > 2) {
value = new byte[Integer.parseInt(args[2])];
}
long t0 = Clock.currentTimeMillis();
for (int i = 0; i < count; i++) {
if (value == null) {
getQueue().offer("obj");
} else {
getQueue().offer(value);
}
}
long t1 = Clock.currentTimeMillis();
print("size = " + getQueue().size() + ", " + count * ONE_THOUSAND / (t1 - t0) + " evt/s");
if (value == null) {
println("");
} else {
int b = Integer.parseInt(args[2]);
println(", " + (count * ONE_THOUSAND / (t1 - t0)) * (b * 8) / ONE_KB + " Kbit/s, "
+ count * b / ONE_KB + " KB added");
}
}
protected void handleQPollMany(String[] args) {
int count = 1;
if (args.length > 1) {
count = Integer.parseInt(args[1]);
}
int c = 1;
for (int i = 0; i < count; i++) {
Object obj = getQueue().poll();
if (obj instanceof byte[]) {
println(c++ + " " + ((byte[]) obj).length);
} else {
println(c++ + " " + obj);
}
}
}
protected void handleQPeek(String[] args) {
println(getQueue().peek());
}
protected void handleQCapacity(String[] args) {
println(getQueue().remainingCapacity());
}
private void execute(String[] args) {
// execute <echo-string>
doExecute(false, false, args);
}
private void executeOnKey(String[] args) {
// executeOnKey <echo-string> <key>
doExecute(true, false, args);
}
private void executeOnMember(String[] args) {
// executeOnMember <echo-string> <memberIndex>
doExecute(false, true, args);
}
private void doExecute(boolean onKey, boolean onMember, String[] args) {
// executeOnKey <echo-string> <key>
try {
IExecutorService executorService = hazelcast.getExecutorService("default");
Echo callable = new Echo(args[1]);
Future<String> future;
if (onKey) {
String key = args[2];
future = executorService.submitToKeyOwner(callable, key);
} else if (onMember) {
int memberIndex = Integer.parseInt(args[2]);
List<Member> members = new LinkedList(hazelcast.getCluster().getMembers());
if (memberIndex >= members.size()) {
throw new IndexOutOfBoundsException("Member index: " + memberIndex + " must be smaller than " + members
.size());
}
Member member = members.get(memberIndex);
future = executorService.submitToMember(callable, member);
} else {
future = executorService.submit(callable);
}
println("Result: " + future.get());
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
private void executeOnMembers(String[] args) {
// executeOnMembers <echo-string>
try {
IExecutorService executorService = hazelcast.getExecutorService("default");
Echo task = new Echo(args[1]);
Map<Member, Future<String>> results = executorService.submitToAllMembers(task);
for (Future f : results.values()) {
println(f.get());
}
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
@Override
public void entryAdded(EntryEvent event) {
println(event);
}
@Override
public void entryRemoved(EntryEvent event) {
println(event);
}
@Override
public void entryUpdated(EntryEvent event) {
println(event);
}
@Override
public void entryEvicted(EntryEvent event) {
println(event);
}
@Override
public void itemAdded(ItemEvent itemEvent) {
println("Item added = " + itemEvent.getItem());
}
@Override
public void itemRemoved(ItemEvent itemEvent) {
println("Item removed = " + itemEvent.getItem());
}
@Override
public void onMessage(Message msg) {
println("Topic received = " + msg.getMessageObject());
}
/**
* Echoes to screen
*/
public static class Echo extends HazelcastInstanceAwareObject implements Callable<String>, DataSerializable {
String input;
public Echo() {
}
public Echo(String input) {
this.input = input;
}
@Override
public String call() {
getHazelcastInstance().getCountDownLatch("latch").countDown();
return getHazelcastInstance().getCluster().getLocalMember().toString() + ":" + input;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(input);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
input = in.readUTF();
}
}
/**
* A Hazelcast instance aware object
*/
private static class HazelcastInstanceAwareObject implements HazelcastInstanceAware {
HazelcastInstance hazelcastInstance;
public HazelcastInstance getHazelcastInstance() {
return hazelcastInstance;
}
@Override
public void setHazelcastInstance(HazelcastInstance hazelcastInstance) {
this.hazelcastInstance = hazelcastInstance;
}
}
/**
* Handled the help command
*
* @param command
*/
protected void handleHelp(String command) {
boolean silentBefore = silent;
silent = false;
println("Commands:");
printGeneralCommands();
printQueueCommands();
printSetCommands();
printLockCommands();
printMapCommands();
printMulitiMapCommands();
printListCommands();
printAtomicLongCommands();
printExecutorServiceCommands();
silent = silentBefore;
}
private void printGeneralCommands() {
println("-- General commands");
println("echo true|false //turns on/off echo of commands (default false)");
println("silent true|false //turns on/off silent of command output (default false)");
println("#<number> <command> //repeats <number> time <command>, replace $i in <command> with current "
+ "iteration (0..<number-1>)");
println("&<number> <command> //forks <number> threads to execute <command>, "
+ "replace $t in <command> with current thread number (0..<number-1>");
println(" When using #x or &x, is is advised to use silent true as well.");
println(" When using &x with m.putmany and m.removemany, each thread will get a different share of keys unless a "
+ "start key index is specified");
println("jvm //displays info about the runtime");
println("who //displays info about the cluster");
println("whoami //displays info about this cluster member");
println("ns <string> //switch the namespace for using the distributed queue/map/set/list "
+ "<string> (defaults to \"default\"");
println("@<file> //executes the given <file> script. Use '//' for comments in the script");
println("");
}
private void printQueueCommands() {
println("-- Queue commands");
println("q.offer <string> //adds a string object to the queue");
println("q.poll //takes an object from the queue");
println("q.offermany <number> [<size>] //adds indicated number of string objects to the queue ('obj<i>' or "
+ "byte[<size>]) ");
println("q.pollmany <number> //takes indicated number of objects from the queue");
println("q.iterator [remove] //iterates the queue, remove if specified");
println("q.size //size of the queue");
println("q.clear //clears the queue");
println("");
}
private void printSetCommands() {
println("-- Set commands");
println("s.add <string> //adds a string object to the set");
println("s.remove <string> //removes the string object from the set");
println("s.addmany <number> //adds indicated number of string objects to the set ('obj<i>')");
println("s.removemany <number> //takes indicated number of objects from the set");
println("s.iterator [remove] //iterates the set, removes if specified");
println("s.size //size of the set");
println("s.clear //clears the set");
println("");
}
private void printLockCommands() {
println("-- Lock commands");
println("lock <key> //same as Hazelcast.getLock(key).lock()");
println("tryLock <key> //same as Hazelcast.getLock(key).tryLock()");
println("tryLock <key> <time> //same as tryLock <key> with timeout in seconds");
println("unlock <key> //same as Hazelcast.getLock(key).unlock()");
println("");
}
private void printMapCommands() {
println("-- Map commands");
println("m.put <key> <value> //puts an entry to the map");
println("m.remove <key> //removes the entry of given key from the map");
println("m.get <key> //returns the value of given key from the map");
println("m.putmany <number> [<size>] [<index>]//puts indicated number of entries to the map ('key<i>':byte[<size>], "
+ "<index>+(0..<number>)");
println("m.removemany <number> [<index>] //removes indicated number of entries from the map ('key<i>', "
+ "<index>+(0..<number>)");
println(" When using &x with m.putmany and m.removemany, each thread will get a different share of keys unless a "
+ "start key <index> is specified");
println("m.keys //iterates the keys of the map");
println("m.values //iterates the values of the map");
println("m.entries //iterates the entries of the map");
println("m.iterator [remove] //iterates the keys of the map, remove if specified");
println("m.size //size of the map");
println("m.localSize //local size of the map");
println("m.clear //clears the map");
println("m.destroy //destroys the map");
println("m.lock <key> //locks the key");
println("m.tryLock <key> //tries to lock the key and returns immediately");
println("m.tryLock <key> <time> //tries to lock the key within given seconds");
println("m.unlock <key> //unlocks the key");
println("m.stats //shows the local stats of the map");
println("");
}
private void printMulitiMapCommands() {
println("-- MultiMap commands");
println("mm.put <key> <value> //puts an entry to the multimap");
println("mm.get <key> //returns the value of given key from the multimap");
println("mm.remove <key> //removes the entry of given key from the multimap");
println("mm.size //size of the multimap");
println("mm.clear //clears the multimap");
println("mm.destroy //destroys the multimap");
println("mm.iterator [remove] //iterates the keys of the multimap, remove if specified");
println("mm.keys //iterates the keys of the multimap");
println("mm.values //iterates the values of the multimap");
println("mm.entries //iterates the entries of the multimap");
println("mm.lock <key> //locks the key");
println("mm.tryLock <key> //tries to lock the key and returns immediately");
println("mm.tryLock <key> <time> //tries to lock the key within given seconds");
println("mm.unlock <key> //unlocks the key");
println("mm.stats //shows the local stats of the multimap");
println("");
}
private void printExecutorServiceCommands() {
println("-- Executor Service commands:");
println("execute <echo-input> //executes an echo task on random member");
println("executeOnKey <echo-input> <key> //executes an echo task on the member that owns the given key");
println("executeOnMember <echo-input> <memberIndex> //executes an echo task on the member with given index");
println("executeOnMembers <echo-input> //executes an echo task on all of the members");
println("e<threadcount>.simulateLoad <task-count> <delaySeconds> //simulates load on executor with given number "
+ "of thread (e1..e16)");
println("");
}
private void printAtomicLongCommands() {
println("-- IAtomicLong commands:");
println("a.get");
println("a.set <long>");
println("a.inc");
println("a.dec");
print("");
}
private void printListCommands() {
println("-- List commands:");
println("l.add <string>");
println("l.add <index> <string>");
println("l.contains <string>");
println("l.remove <string>");
println("l.remove <index>");
println("l.set <index> <string>");
println("l.iterator [remove]");
println("l.size");
println("l.clear");
print("");
}
public void println(Object obj) {
if (!silent) {
System.out.println(obj);
}
}
public void print(Object obj) {
if (!silent) {
System.out.print(obj);
}
}
/**
* Starts the test application. Loads the config from classpath hazelcast.xml,
* if it fails to load, will use default config.
*
* @param args none
* @throws Exception
*/
public static void main(String[] args) throws Exception {
Config config;
try {
config = new FileSystemXmlConfig("hazelcast.xml");
} catch (FileNotFoundException e) {
config = new Config();
}
for (int k = 1; k <= LOAD_EXECUTORS_COUNT; k++) {
config.addExecutorConfig(new ExecutorConfig("e" + k).setPoolSize(k));
}
TestApp testApp = new TestApp(Hazelcast.newHazelcastInstance(null));
testApp.start(args);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_examples_TestApp.java |
77 | public class OSharedResourceExternalTimeout extends OSharedResourceTimeout {
public OSharedResourceExternalTimeout(final int timeout) {
super(timeout);
}
@Override
public void acquireExclusiveLock() throws OTimeoutException {
super.acquireExclusiveLock();
}
@Override
public void acquireSharedLock() throws OTimeoutException {
super.acquireSharedLock();
}
@Override
public void releaseExclusiveLock() {
super.releaseExclusiveLock();
}
@Override
public void releaseSharedLock() {
super.releaseSharedLock();
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_concur_resource_OSharedResourceExternalTimeout.java |
608 | public class OIndexManagerRemote extends OIndexManagerAbstract {
private static final String QUERY_DROP = "drop index %s";
public OIndexManagerRemote(final ODatabaseRecord iDatabase) {
super(iDatabase);
}
protected OIndex<?> getRemoteIndexInstance(boolean isMultiValueIndex, String type, String name, Set<String> clustersToIndex,
OIndexDefinition indexDefinition, ORID identity, ODocument configuration) {
if (isMultiValueIndex)
return new OIndexRemoteMultiValue(name, type, identity, indexDefinition, configuration, clustersToIndex);
return new OIndexRemoteOneValue(name, type, identity, indexDefinition, configuration, clustersToIndex);
}
public OIndex<?> createIndex(final String iName, final String iType, final OIndexDefinition iIndexDefinition,
final int[] iClusterIdsToIndex, final OProgressListener iProgressListener) {
final String createIndexDDL;
if (iIndexDefinition != null) {
createIndexDDL = iIndexDefinition.toCreateIndexDDL(iName, iType);
} else {
createIndexDDL = new OSimpleKeyIndexDefinition().toCreateIndexDDL(iName, iType);
}
acquireExclusiveLock();
try {
if (iProgressListener != null) {
iProgressListener.onBegin(this, 0);
}
getDatabase().command(new OCommandSQL(createIndexDDL)).execute();
document.setIdentity(new ORecordId(document.getDatabase().getStorage().getConfiguration().indexMgrRecordId));
if (iProgressListener != null) {
iProgressListener.onCompletition(this, true);
}
reload();
return preProcessBeforeReturn(indexes.get(iName.toLowerCase()));
} finally {
releaseExclusiveLock();
}
}
public OIndexManager dropIndex(final String iIndexName) {
acquireExclusiveLock();
try {
final String text = String.format(QUERY_DROP, iIndexName);
getDatabase().command(new OCommandSQL(text)).execute();
// REMOVE THE INDEX LOCALLY
indexes.remove(iIndexName.toLowerCase());
reload();
return this;
} finally {
releaseExclusiveLock();
}
}
@Override
protected void fromStream() {
acquireExclusiveLock();
try {
clearMetadata();
final Collection<ODocument> idxs = document.field(CONFIG_INDEXES);
if (idxs != null) {
for (ODocument d : idxs) {
try {
OIndexInternal<?> newIndex = OIndexes.createIndex(getDatabase(), (String) d.field(OIndexInternal.CONFIG_TYPE),
document.<String> field(OIndexInternal.ALGORITHM),
document.<String> field(OIndexInternal.VALUE_CONTAINER_ALGORITHM));
OIndexInternal.IndexMetadata newIndexMetadata = newIndex.loadMetadata(d);
addIndexInternal(getRemoteIndexInstance(newIndex instanceof OIndexMultiValues, newIndexMetadata.getType(),
newIndexMetadata.getName(), newIndexMetadata.getClustersToIndex(), newIndexMetadata.getIndexDefinition(),
(ORID) d.field(OIndexAbstract.CONFIG_MAP_RID, OType.LINK), d));
} catch (Exception e) {
OLogManager.instance().error(this, "Error on loading of index by configuration: %s", e, d);
}
}
}
} finally {
releaseExclusiveLock();
}
}
@Override
public ODocument toStream() {
throw new UnsupportedOperationException("Remote index cannot be streamed");
}
@Override
public void recreateIndexes() {
throw new UnsupportedOperationException("recreateIndexes()");
}
@Override
public void waitTillIndexRestore() {
}
@Override
public boolean autoRecreateIndexesAfterCrash() {
return false;
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_index_OIndexManagerRemote.java |
54 | public class TypeArgumentListCompletions {
public static void addTypeArgumentListProposal(final int offset,
final CeylonParseController cpc, final Node node,
final Scope scope, final IDocument document,
final List<ICompletionProposal> result) {
final Integer startIndex2 = node.getStartIndex();
final Integer stopIndex2 = node.getStopIndex();
final String typeArgText;
try {
typeArgText = document.get(startIndex2, stopIndex2-startIndex2+1);
}
catch (BadLocationException e) {
e.printStackTrace();
return;
}
new Visitor() {
@Override
public void visit(Tree.StaticMemberOrTypeExpression that) {
Tree.TypeArguments tal = that.getTypeArguments();
Integer startIndex = tal==null ?
null : that.getTypeArguments().getStartIndex();
if (startIndex!=null && startIndex2!=null &&
startIndex.intValue()==startIndex2.intValue()) {
ProducedReference pr = that.getTarget();
Declaration d = that.getDeclaration();
if (d instanceof Functional && pr!=null) {
try {
String pref = document.get(that.getStartIndex(),
that.getStopIndex()-that.getStartIndex()+1);
addInvocationProposals(offset, pref, cpc, result, d,
pr, scope, null, typeArgText, false);
}
catch (BadLocationException e) {
e.printStackTrace();
}
}
}
super.visit(that);
}
public void visit(Tree.SimpleType that) {
Tree.TypeArgumentList tal = that.getTypeArgumentList();
Integer startIndex = tal==null ? null : tal.getStartIndex();
if (startIndex!=null && startIndex2!=null &&
startIndex.intValue()==startIndex2.intValue()) {
Declaration d = that.getDeclarationModel();
if (d instanceof Functional) {
try {
String pref = document.get(that.getStartIndex(),
that.getStopIndex()-that.getStartIndex()+1);
addInvocationProposals(offset, pref, cpc, result, d,
that.getTypeModel(), scope, null, typeArgText,
false);
}
catch (BadLocationException e) {
e.printStackTrace();
}
}
}
super.visit(that);
}
}.visit(cpc.getRootNode());
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_TypeArgumentListCompletions.java |
1,192 | bootstrap.setPipelineFactory(new ChannelPipelineFactory() {
@Override
public ChannelPipeline getPipeline() throws Exception {
return Channels.pipeline(new Handler());
}
}); | 0true
| src_main_java_org_elasticsearch_bulk_udp_BulkUdpService.java |
1,492 | public static class Map extends Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex> {
private boolean isVertex;
private ElementChecker elementChecker;
@Override
public void setup(final Mapper.Context context) throws IOException, InterruptedException {
this.isVertex = context.getConfiguration().getClass(CLASS, Element.class, Element.class).equals(Vertex.class);
final String key = context.getConfiguration().get(KEY);
final Class valueClass = context.getConfiguration().getClass(VALUE_CLASS, String.class);
final String[] valueStrings = context.getConfiguration().getStrings(VALUES);
final Object[] values = new Object[valueStrings.length];
if (valueClass.equals(Object.class)) {
for (int i = 0; i < valueStrings.length; i++) {
values[i] = null;
}
} else if (valueClass.equals(String.class)) {
for (int i = 0; i < valueStrings.length; i++) {
values[i] = (valueStrings[i].equals(Tokens.NULL)) ? null : valueStrings[i];
}
} else if (Number.class.isAssignableFrom((valueClass))) {
for (int i = 0; i < valueStrings.length; i++) {
values[i] = (valueStrings[i].equals(Tokens.NULL)) ? null : Float.valueOf(valueStrings[i]);
}
} else if (valueClass.equals(Boolean.class)) {
for (int i = 0; i < valueStrings.length; i++) {
values[i] = (valueStrings[i].equals(Tokens.NULL)) ? null : Boolean.valueOf(valueStrings[i]);
}
} else {
throw new IOException("Class " + valueClass + " is an unsupported value class");
}
final Compare compare = Compare.valueOf(context.getConfiguration().get(COMPARE));
this.elementChecker = new ElementChecker(key, compare, values);
}
@Override
public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException {
if (this.isVertex) {
if (value.hasPaths() && !this.elementChecker.isLegal(value)) {
value.clearPaths();
DEFAULT_COMPAT.incrementContextCounter(context, Counters.VERTICES_FILTERED, 1L);
}
} else {
long edgesFiltered = 0;
for (Edge e : value.getEdges(Direction.BOTH)) {
final StandardFaunusEdge edge = (StandardFaunusEdge) e;
if (edge.hasPaths() && !this.elementChecker.isLegal(edge)) {
edge.clearPaths();
edgesFiltered++;
}
}
DEFAULT_COMPAT.incrementContextCounter(context, Counters.EDGES_FILTERED, edgesFiltered);
}
context.write(NullWritable.get(), value);
}
} | 1no label
| titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_filter_PropertyFilterMap.java |
1,203 | public abstract class OutOfMemoryHandler {
/**
* When an <code>OutOfMemoryError</code> is caught by Hazelcast threads,
* this method is called for ALL <code>HazelcastInstance</code>s
* knows by current JVM (actually ClassLoader).
*
* <p>
* User can shutdown <tt>HazelcastInstance</tt>, call <code>System.exit()</code>,
* just log the error etc.
* Default handler tries to close socket connections to other nodes and shutdown
* <tt>HazelcastInstance</tt>.
* </p>
*
* <p>
* <b>Warning: </b> <tt>OutOfMemoryHandler</tt> may not be called although JVM throws
* <tt>OutOfMemoryError</tt>.
* Because error may be thrown from an external (user thread) thread
* and Hazelcast may not be informed about <tt>OutOfMemoryError</tt>.
* </p>
*
* @see OutOfMemoryHandler#inactivate(HazelcastInstance)
* @see OutOfMemoryHandler#tryCloseConnections(HazelcastInstance)
* @see OutOfMemoryHandler#tryStopThreads(HazelcastInstance)
* @see OutOfMemoryHandler#tryShutdown(HazelcastInstance)
*
* @param oom OutOfMemoryError thrown by JVM
* @param hazelcastInstances All HazelcastInstances known by JVM,
* can include inactive or NULL instances.
*/
public abstract void onOutOfMemory(OutOfMemoryError oom, HazelcastInstance[] hazelcastInstances);
/**
* Just inactivates <tt>HazelcastInstance</tt>; leaves threads, connections untouched.
*
* @param hazelcastInstance
*/
protected final void inactivate(final HazelcastInstance hazelcastInstance) {
OutOfMemoryErrorDispatcher.Helper.inactivate(hazelcastInstance);
}
/**
* Tries to close server socket and connections to other <tt>HazelcastInstance</tt>s.
*
* @param hazelcastInstance
*/
protected final void tryCloseConnections(HazelcastInstance hazelcastInstance) {
OutOfMemoryErrorDispatcher.Helper.tryCloseConnections(hazelcastInstance);
}
/**
* Tries to stop internal Hazelcast threads (such as service thread, IO threads, executor threads).
*
* @param hazelcastInstance
*/
protected final void tryStopThreads(final HazelcastInstance hazelcastInstance) {
OutOfMemoryErrorDispatcher.Helper.tryStopThreads(hazelcastInstance);
}
/**
* Tries to shutdown <tt>HazelcastInstance</tt> forcefully;
* including closing sockets and connections, stopping threads etc.
*
* @param hazelcastInstance
*/
protected final void tryShutdown(final HazelcastInstance hazelcastInstance) {
OutOfMemoryErrorDispatcher.Helper.tryShutdown(hazelcastInstance);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_core_OutOfMemoryHandler.java |
1,275 | public class OStorageLocalTxExecuter {
private final OStorageLocal storage;
private final OTxSegment txSegment;
private OTransaction currentTransaction;
public OStorageLocalTxExecuter(final OStorageLocal iStorage, final OStorageTxConfiguration iConfig) throws IOException {
storage = iStorage;
iConfig.path = OStorageVariableParser.DB_PATH_VARIABLE + "/txlog.otx";
txSegment = new OTxSegment(storage, iStorage.getConfiguration().txSegment);
}
public void open() throws IOException {
try {
txSegment.open();
} catch (FileNotFoundException e) {
OLogManager.instance().warn(this, "Creating new txlog file '%s'", txSegment.getFile());
create();
} catch (Exception e) {
OLogManager.instance().warn(this, "Error on opening the txlog file '%s', reset it", e, txSegment.getFile());
create();
}
}
public void create() throws IOException {
txSegment.create(0);
}
public void close() throws IOException {
txSegment.close();
}
protected OPhysicalPosition createRecord(final int iTxId, final ODataLocal iDataSegment, final OCluster iClusterSegment,
final ORecordId iRid, final byte[] iContent, final ORecordVersion iRecordVersion, final byte iRecordType) {
try {
final OPhysicalPosition ppos = storage.createRecord(iDataSegment, iClusterSegment, iContent, iRecordType, iRid,
iRecordVersion);
// SAVE INTO THE LOG THE POSITION OF THE RECORD JUST CREATED. IF TX FAILS AT THIS POINT A GHOST RECORD IS CREATED UNTIL DEFRAG
txSegment.addLog(OTxSegment.OPERATION_CREATE, iTxId, iRid.clusterId, iRid.clusterPosition, iRecordType, OVersionFactory
.instance().createVersion(), null, iDataSegment.getId());
return ppos;
} catch (IOException e) {
OLogManager.instance().error(this, "Error on creating entry in log segment: " + iClusterSegment, e,
OTransactionException.class);
return null;
}
}
/**
* Stores the new content in a new position, then saves in the log the coords of the new position. At free time the
*
* @param iTxId
* @param iClusterSegment
* @param iRid
* @param iContent
* @param iVersion
* @param iRecordType
* @return
*/
protected ORecordVersion updateRecord(final int iTxId, final OCluster iClusterSegment, final ORecordId iRid,
final byte[] iContent, final ORecordVersion iVersion, final byte iRecordType) {
try {
// READ CURRENT RECORD CONTENT
final ORawBuffer buffer = storage.readRecord(iClusterSegment, iRid, true, false);
if (buffer == null)
if (OFastConcurrentModificationException.enabled())
throw OFastConcurrentModificationException.instance();
else
throw new OConcurrentModificationException(iRid, new OSimpleVersion(), iVersion, ORecordOperation.UPDATED);
// SAVE INTO THE LOG THE POSITION OF THE OLD RECORD JUST DELETED. IF TX FAILS AT THIS POINT AS ABOVE
txSegment.addLog(OTxSegment.OPERATION_UPDATE, iTxId, iRid.clusterId, iRid.clusterPosition, iRecordType, buffer.version,
buffer.buffer, -1);
final OPhysicalPosition ppos = storage.updateRecord(iClusterSegment, iRid, iContent, iVersion, iRecordType);
if (ppos != null)
return ppos.recordVersion;
return OVersionFactory.instance().createUntrackedVersion();
} catch (IOException e) {
OLogManager.instance().error(this, "Error on updating entry #" + iRid + " in log segment: " + iClusterSegment, e,
OTransactionException.class);
}
return OVersionFactory.instance().createUntrackedVersion();
}
protected boolean deleteRecord(final int iTxId, final OCluster iClusterSegment, final OClusterPosition iPosition,
final ORecordVersion iVersion) {
try {
final ORecordId rid = new ORecordId(iClusterSegment.getId(), iPosition);
// READ CURRENT RECORD CONTENT
final ORawBuffer buffer = storage.readRecord(iClusterSegment, rid, true, false);
if (buffer != null) {
// SAVE INTO THE LOG THE OLD RECORD
final OPhysicalPosition ppos = iClusterSegment.getPhysicalPosition(new OPhysicalPosition(iPosition));
txSegment.addLog(OTxSegment.OPERATION_DELETE, iTxId, iClusterSegment.getId(), iPosition, buffer.recordType, buffer.version,
buffer.buffer, ppos.dataSegmentId);
return storage
.deleteRecord(iClusterSegment, rid, iVersion, OGlobalConfiguration.STORAGE_USE_TOMBSTONES.getValueAsBoolean()) != null;
}
} catch (IOException e) {
OLogManager.instance().error(this, "Error on deleting entry #" + iPosition + " in log segment: " + iClusterSegment, e,
OTransactionException.class);
}
return false;
}
public OTxSegment getTxSegment() {
return txSegment;
}
public void commitAllPendingRecords(final OTransaction iTx) throws IOException {
currentTransaction = iTx;
try {
// COPY ALL THE ENTRIES IN SEPARATE COLLECTION SINCE DURING THE COMMIT PHASE SOME NEW ENTRIES COULD BE CREATED AND
// CONCURRENT-EXCEPTION MAY OCCURS
final List<ORecordOperation> tmpEntries = new ArrayList<ORecordOperation>();
while (iTx.getCurrentRecordEntries().iterator().hasNext()) {
for (ORecordOperation txEntry : iTx.getCurrentRecordEntries())
tmpEntries.add(txEntry);
iTx.clearRecordEntries();
if (!tmpEntries.isEmpty()) {
for (ORecordOperation txEntry : tmpEntries)
// COMMIT ALL THE SINGLE ENTRIES ONE BY ONE
commitEntry(iTx, txEntry, iTx.isUsingLog());
}
}
// UPDATE THE CACHE ONLY IF THE ITERATOR ALLOWS IT
OTransactionAbstract.updateCacheFromEntries(iTx, iTx.getAllRecordEntries(), true);
} finally {
currentTransaction = null;
}
}
public void clearLogEntries(final OTransaction iTx) throws IOException {
// CLEAR ALL TEMPORARY RECORDS
txSegment.clearLogEntries(iTx.getId());
}
private void commitEntry(final OTransaction iTx, final ORecordOperation txEntry, final boolean iUseLog) throws IOException {
if (txEntry.type != ORecordOperation.DELETED && !txEntry.getRecord().isDirty())
return;
final ORecordId rid = (ORecordId) txEntry.getRecord().getIdentity();
if (rid.clusterId == ORID.CLUSTER_ID_INVALID && txEntry.getRecord() instanceof ODocument
&& ((ODocument) txEntry.getRecord()).getSchemaClass() != null) {
// TRY TO FIX CLUSTER ID TO THE DEFAULT CLUSTER ID DEFINED IN SCHEMA CLASS
rid.clusterId = ((ODocument) txEntry.getRecord()).getSchemaClass().getDefaultClusterId();
}
final OCluster cluster = storage.getClusterById(rid.clusterId);
final ODataLocal dataSegment = storage.getDataSegmentById(txEntry.dataSegmentId);
if (cluster.getName().equals(OMetadataDefault.CLUSTER_INDEX_NAME)
|| cluster.getName().equals(OMetadataDefault.CLUSTER_MANUAL_INDEX_NAME))
// AVOID TO COMMIT INDEX STUFF
return;
if (!(cluster instanceof OClusterLocal))
// ONLY LOCAL CLUSTER ARE INVOLVED IN TX
return;
if (txEntry.getRecord() instanceof OTxListener)
((OTxListener) txEntry.getRecord()).onEvent(txEntry, OTxListener.EVENT.BEFORE_COMMIT);
switch (txEntry.type) {
case ORecordOperation.LOADED:
break;
case ORecordOperation.CREATED: {
// CHECK 2 TIMES TO ASSURE THAT IT'S A CREATE OR AN UPDATE BASED ON RECURSIVE TO-STREAM METHOD
final byte[] stream = txEntry.getRecord().toStream();
if (stream == null) {
OLogManager.instance().warn(this, "Null serialization on committing new record %s in transaction", rid);
break;
}
final ORecordId oldRID = rid.isNew() ? rid.copy() : rid;
if (rid.isNew()) {
rid.clusterId = cluster.getId();
final OPhysicalPosition ppos;
if (iUseLog)
ppos = createRecord(iTx.getId(), dataSegment, cluster, rid, stream, txEntry.getRecord().getRecordVersion(), txEntry
.getRecord().getRecordType());
else
ppos = iTx
.getDatabase()
.getStorage()
.createRecord(txEntry.dataSegmentId, rid, stream, OVersionFactory.instance().createVersion(),
txEntry.getRecord().getRecordType(), (byte) 0, null).getResult();
rid.clusterPosition = ppos.clusterPosition;
txEntry.getRecord().getRecordVersion().copyFrom(ppos.recordVersion);
iTx.updateIdentityAfterCommit(oldRID, rid);
} else {
if (iUseLog)
txEntry
.getRecord()
.getRecordVersion()
.copyFrom(
updateRecord(iTx.getId(), cluster, rid, stream, txEntry.getRecord().getRecordVersion(), txEntry.getRecord()
.getRecordType()));
else
txEntry
.getRecord()
.getRecordVersion()
.copyFrom(
iTx.getDatabase()
.getStorage()
.updateRecord(rid, stream, txEntry.getRecord().getRecordVersion(), txEntry.getRecord().getRecordType(),
(byte) 0, null).getResult());
}
break;
}
case ORecordOperation.UPDATED: {
final byte[] stream = txEntry.getRecord().toStream();
if (stream == null) {
OLogManager.instance().warn(this, "Null serialization on committing updated record %s in transaction", rid);
break;
}
if (iUseLog)
txEntry
.getRecord()
.getRecordVersion()
.copyFrom(
updateRecord(iTx.getId(), cluster, rid, stream, txEntry.getRecord().getRecordVersion(), txEntry.getRecord()
.getRecordType()));
else
txEntry
.getRecord()
.getRecordVersion()
.copyFrom(
iTx.getDatabase()
.getStorage()
.updateRecord(rid, stream, txEntry.getRecord().getRecordVersion(), txEntry.getRecord().getRecordType(),
(byte) 0, null).getResult());
break;
}
case ORecordOperation.DELETED: {
if (iUseLog)
deleteRecord(iTx.getId(), cluster, rid.clusterPosition, txEntry.getRecord().getRecordVersion());
else
iTx.getDatabase().getStorage().deleteRecord(rid, txEntry.getRecord().getRecordVersion(), (byte) 0, null);
}
break;
}
txEntry.getRecord().unsetDirty();
if (txEntry.getRecord() instanceof OTxListener)
((OTxListener) txEntry.getRecord()).onEvent(txEntry, OTxListener.EVENT.AFTER_COMMIT);
}
public boolean isCommitting() {
return currentTransaction != null;
}
public OTransaction getCurrentTransaction() {
return currentTransaction;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_storage_impl_local_OStorageLocalTxExecuter.java |
325 | public class OStoragePaginatedClusterConfiguration implements OStorageClusterConfiguration {
public static float DEFAULT_GROW_FACTOR = (float) 1.2;
public transient OStorageConfiguration root;
public int id;
public String name;
public String location;
public boolean useWal = true;
public float recordOverflowGrowFactor = DEFAULT_GROW_FACTOR;
public float recordGrowFactor = DEFAULT_GROW_FACTOR;
public String compression = OGlobalConfiguration.STORAGE_COMPRESSION_METHOD
.getValueAsString();
public OStoragePaginatedClusterConfiguration(OStorageConfiguration root, int id, String name, String location, boolean useWal,
float recordOverflowGrowFactor, float recordGrowFactor, String compression) {
this.root = root;
this.id = id;
this.name = name;
this.location = location;
this.useWal = useWal;
this.recordOverflowGrowFactor = recordOverflowGrowFactor;
this.recordGrowFactor = recordGrowFactor;
this.compression = compression;
}
@Override
public int getId() {
return id;
}
@Override
public String getName() {
return name;
}
@Override
public String getLocation() {
return location;
}
@Override
public int getDataSegmentId() {
return -1;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_config_OStoragePaginatedClusterConfiguration.java |
54 | private class ClusterListenerImpl extends ClusterListener.Adapter
{
@Override
public void enteredCluster( ClusterConfiguration clusterConfiguration )
{
// Catch up with elections
for ( Map.Entry<String, InstanceId> memberRoles : clusterConfiguration.getRoles().entrySet() )
{
elected( memberRoles.getKey(), memberRoles.getValue(),
clusterConfiguration.getUriForId( memberRoles.getValue() ) );
}
}
@Override
public void elected( String role, final InstanceId instanceId, final URI electedMember )
{
if ( role.equals( ClusterConfiguration.COORDINATOR ) )
{
// Use the cluster coordinator as master for HA
Listeners.notifyListeners( listeners, new Listeners.Notification<ClusterMemberListener>()
{
@Override
public void notify( ClusterMemberListener listener )
{
listener.coordinatorIsElected( instanceId );
}
} );
}
}
@Override
public void leftCluster( final InstanceId member )
{
// Notify unavailability of members
Listeners.notifyListeners( listeners, new Listeners.Notification<ClusterMemberListener>()
{
@Override
public void notify( ClusterMemberListener listener )
{
for ( MemberIsAvailable memberIsAvailable : clusterMembersSnapshot.getCurrentAvailable( member ) )
{
listener.memberIsUnavailable( memberIsAvailable.getRole(), member );
}
}
} );
clusterMembersSnapshot.unavailableMember( member );
}
} | 1no label
| enterprise_cluster_src_main_java_org_neo4j_cluster_member_paxos_PaxosClusterMemberEvents.java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.