Unnamed: 0
int64
0
6.45k
func
stringlengths
29
253k
target
class label
2 classes
project
stringlengths
36
167
3,127
class FailEngineOnMergeFailure implements MergeSchedulerProvider.FailureListener { @Override public void onFailedMerge(MergePolicy.MergeException e) { failEngine(e); } }
1no label
src_main_java_org_elasticsearch_index_engine_internal_InternalEngine.java
1,211
THREAD_LOCAL { @Override <T> Recycler<T> build(Recycler.C<T> c, int limit, int availableProcessors) { return threadLocal(dequeFactory(c, limit)); } },
0true
src_main_java_org_elasticsearch_cache_recycler_CacheRecycler.java
511
@RunWith(HazelcastSerialClassRunner.class) @Category(NightlyTest.class) public class MapUpdateStressTest extends StressTestSupport { public static final int CLIENT_THREAD_COUNT = 5; public static final int MAP_SIZE = 100 * 1000; private HazelcastInstance client; private IMap<Integer, Integer> map; private StressThread[] stressThreads; @Before public void setUp() { super.setUp(); ClientConfig clientConfig = new ClientConfig(); clientConfig.setRedoOperation(true); client = HazelcastClient.newHazelcastClient(clientConfig); map = client.getMap("map"); stressThreads = new StressThread[CLIENT_THREAD_COUNT]; for (int k = 0; k < stressThreads.length; k++) { stressThreads[k] = new StressThread(); stressThreads[k].start(); } } @After public void tearDown() { super.tearDown(); if (client != null) { client.shutdown(); } } //@Test public void testChangingCluster() { test(true); } @Test public void testFixedCluster() { test(false); } public void test(boolean clusterChangeEnabled) { setClusterChangeEnabled(clusterChangeEnabled); fillMap(); startAndWaitForTestCompletion(); joinAll(stressThreads); assertNoUpdateFailures(); } private void assertNoUpdateFailures() { int[] increments = new int[MAP_SIZE]; for (StressThread t : stressThreads) { t.addIncrements(increments); } Set<Integer> failedKeys = new HashSet<Integer>(); for (int k = 0; k < MAP_SIZE; k++) { int expectedValue = increments[k]; int foundValue = map.get(k); if (expectedValue != foundValue) { failedKeys.add(k); } } if (failedKeys.isEmpty()) { return; } int index = 1; for (Integer key : failedKeys) { System.err.println("Failed write: " + index + " found:" + map.get(key) + " expected:" + increments[key]); index++; } fail("There are failed writes, number of failures:" + failedKeys.size()); } private void fillMap() { System.out.println("=================================================================="); System.out.println("Inserting data in map"); System.out.println("=================================================================="); for (int k = 0; k < MAP_SIZE; k++) { map.put(k, 0); if (k % 10000 == 0) { System.out.println("Inserted data: " + k); } } System.out.println("=================================================================="); System.out.println("Completed with inserting data in map"); System.out.println("=================================================================="); } public class StressThread extends TestThread { private final int[] increments = new int[MAP_SIZE]; @Override public void doRun() throws Exception { while (!isStopped()) { int key = random.nextInt(MAP_SIZE); int increment = random.nextInt(10); increments[key] += increment; for (; ; ) { int oldValue = map.get(key); if (map.replace(key, oldValue, oldValue + increment)) { break; } } } } public void addIncrements(int[] increments) { for (int k = 0; k < increments.length; k++) { increments[k] += this.increments[k]; } } } }
0true
hazelcast-client_src_test_java_com_hazelcast_client_stress_MapUpdateStressTest.java
291
public class NoSuchNodeException extends FailedNodeException { public NoSuchNodeException(String nodeId) { super(nodeId, "No such node [" + nodeId + "]", null); } }
0true
src_main_java_org_elasticsearch_action_NoSuchNodeException.java
170
public interface BaseTransactionConfig { /** * Returns the commit time of this transaction which is either a custom timestamp provided * by the user, the commit time as set by the enclosing operation, or the first time this method is called. * * @return commit timestamp for this transaction */ public Timepoint getCommitTime(); /** * Sets the commit time of this transaction. If a commit time has already been set, this method throws * an exception. Use {@link #hasCommitTime()} to check prior to setting. * * @param time */ public void setCommitTime(Timepoint time); /** * Returns true if a commit time has been set on this transaction. * * @return */ public boolean hasCommitTime(); /** * Returns the timestamp provider of this transaction. */ public TimestampProvider getTimestampProvider(); /** * Returns the (possibly null) group name for this transaction. * Transactions are grouped under this name for reporting and error tracking purposes. * * @return group name prefix string or null */ public String getGroupName(); /** * True when {@link #getGroupName()} is non-null, false when null. */ public boolean hasGroupName(); /** * Get an arbitrary transaction-specific option. * * @param opt option for which to return a value * @return value of the option */ public <V> V getCustomOption(ConfigOption<V> opt); /** * Return any transaction-specific options. * * @see #getCustomOption(ConfigOption) * @return options for this tx */ public Configuration getCustomOptions(); }
0true
titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_BaseTransactionConfig.java
1,483
private class RoutingTableUpdater implements Runnable { @Override public void run() { reroute(); } }
0true
src_main_java_org_elasticsearch_cluster_routing_RoutingService.java
1,889
@Target(ANNOTATION_TYPE) @Retention(RUNTIME) public @interface ScopeAnnotation { }
0true
src_main_java_org_elasticsearch_common_inject_ScopeAnnotation.java
929
public enum BroadcastOperationThreading { /** * No threads are used, all the local shards operations will be performed on the calling * thread. */ NO_THREADS((byte) 0), /** * The local shards operations will be performed in serial manner on a single forked thread. */ SINGLE_THREAD((byte) 1), /** * Each local shard operation will execute on its own thread. */ THREAD_PER_SHARD((byte) 2); private final byte id; BroadcastOperationThreading(byte id) { this.id = id; } public byte id() { return this.id; } public static BroadcastOperationThreading fromId(byte id) { if (id == 0) { return NO_THREADS; } if (id == 1) { return SINGLE_THREAD; } if (id == 2) { return THREAD_PER_SHARD; } throw new ElasticsearchIllegalArgumentException("No type matching id [" + id + "]"); } public static BroadcastOperationThreading fromString(String value, BroadcastOperationThreading defaultValue) { if (value == null) { return defaultValue; } return BroadcastOperationThreading.valueOf(value.toUpperCase(Locale.ROOT)); } }
0true
src_main_java_org_elasticsearch_action_support_broadcast_BroadcastOperationThreading.java
1,701
public class BoundedTreeSet<E> extends TreeSet<E> { private final int size; public BoundedTreeSet(int size) { this.size = size; } public BoundedTreeSet(Comparator<? super E> comparator, int size) { super(comparator); this.size = size; } @Override public boolean add(E e) { boolean result = super.add(e); rebound(); return result; } @Override public boolean addAll(Collection<? extends E> c) { boolean result = super.addAll(c); rebound(); return result; } private void rebound() { while (size() > size) { remove(last()); } } }
0true
src_main_java_org_elasticsearch_common_collect_BoundedTreeSet.java
1,792
public interface DownloadProgress { /** * begin a download */ void beginDownload(); /** * tick handler */ void onTick(); /** * end a download */ void endDownload(); }
0true
src_main_java_org_elasticsearch_common_http_client_HttpDownloadHelper.java
132
private final class InitializerValueProposal implements ICompletionProposal, ICompletionProposalExtension2 { private final String text; private final Image image; private final int offset; private InitializerValueProposal(int offset, String text, Image image) { this.offset = offset; this.text = text; this.image = image; } protected IRegion getCurrentRegion(IDocument document) throws BadLocationException { int start = offset; int length = 0; for (int i=offset; i<document.getLength(); i++) { char ch = document.getChar(i); if (Character.isWhitespace(ch) || ch==';'||ch==','||ch==')') { break; } length++; } return new Region(start, length); } @Override public Image getImage() { return image; } @Override public Point getSelection(IDocument document) { return new Point(offset + text.length(), 0); } public void apply(IDocument document) { try { IRegion region = getCurrentRegion(document); document.replace(region.getOffset(), region.getLength(), text); } catch (BadLocationException e) { e.printStackTrace(); } } public String getDisplayString() { return text; } public String getAdditionalProposalInfo() { return null; } @Override public IContextInformation getContextInformation() { return null; } @Override public void apply(ITextViewer viewer, char trigger, int stateMask, int offset) { apply(viewer.getDocument()); } @Override public void selected(ITextViewer viewer, boolean smartToggle) {} @Override public void unselected(ITextViewer viewer) {} @Override public boolean validate(IDocument document, int offset, DocumentEvent event) { try { IRegion region = getCurrentRegion(document); String prefix = document.get(region.getOffset(), offset-region.getOffset()); return text.startsWith(prefix); } catch (BadLocationException e) { return false; } } }
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_InitializerProposal.java
675
@Test public class LocalHashTableTest { private static final int KEYS_COUNT = 1600000; private ODatabaseDocumentTx databaseDocumentTx; private OLocalHashTable<Integer, String> localHashTable; @BeforeClass public void beforeClass() { String buildDirectory = System.getProperty("buildDirectory"); if (buildDirectory == null) buildDirectory = "."; databaseDocumentTx = new ODatabaseDocumentTx("local:" + buildDirectory + "/localHashTableTest"); if (databaseDocumentTx.exists()) { databaseDocumentTx.open("admin", "admin"); databaseDocumentTx.drop(); } databaseDocumentTx.create(); OMurmurHash3HashFunction<Integer> murmurHash3HashFunction = new OMurmurHash3HashFunction<Integer>(); murmurHash3HashFunction.setValueSerializer(OIntegerSerializer.INSTANCE); localHashTable = new OLocalHashTable<Integer, String>(".imc", ".tsc", ".obf", murmurHash3HashFunction); localHashTable.create("localHashTableTest", OIntegerSerializer.INSTANCE, OStringSerializer.INSTANCE, null, (OStorageLocal) databaseDocumentTx.getStorage()); } @AfterClass public void afterClass() throws Exception { localHashTable.clear(); localHashTable.delete(); databaseDocumentTx.drop(); } @BeforeMethod public void beforeMethod() { } @AfterMethod public void afterMethod() { localHashTable.clear(); } public void testKeyPut() { for (int i = 0; i < KEYS_COUNT; i++) { localHashTable.put(i, i + ""); Assert.assertEquals(localHashTable.get(i), i + ""); } for (int i = 0; i < KEYS_COUNT; i++) { Assert.assertEquals(localHashTable.get(i), i + "", i + " key is absent"); } for (int i = KEYS_COUNT; i < 2 * KEYS_COUNT; i++) { Assert.assertNull(localHashTable.get(i)); } } public void testKeyPutRandomUniform() { final Set<Integer> keys = new HashSet<Integer>(); final MersenneTwisterFast random = new MersenneTwisterFast(); while (keys.size() < KEYS_COUNT) { int key = random.nextInt(); localHashTable.put(key, key + ""); keys.add(key); Assert.assertEquals(localHashTable.get(key), key + ""); } for (int key : keys) Assert.assertEquals(localHashTable.get(key), "" + key); } public void testKeyPutRandomGaussian() { Set<Integer> keys = new HashSet<Integer>(); MersenneTwisterFast random = new MersenneTwisterFast(); keys.clear(); while (keys.size() < KEYS_COUNT) { int key = (int) (random.nextGaussian() * Integer.MAX_VALUE / 2 + Integer.MAX_VALUE); if (key < 0) continue; localHashTable.put(key, key + ""); keys.add(key); Assert.assertEquals(localHashTable.get(key), "" + key); } for (int key : keys) Assert.assertEquals(localHashTable.get(key), "" + key); } public void testKeyDeleteRandomUniform() { HashSet<Integer> keys = new HashSet<Integer>(); for (int i = 0; i < KEYS_COUNT; i++) { localHashTable.put(i, i + ""); keys.add(i); } for (int key : keys) { if (key % 3 == 0) localHashTable.remove(key); } for (int key : keys) { if (key % 3 == 0) { Assert.assertNull(localHashTable.get(key)); } else { Assert.assertEquals(localHashTable.get(key), key + ""); } } } public void testKeyDeleteRandomGaussian() { HashSet<Integer> keys = new HashSet<Integer>(); MersenneTwisterFast random = new MersenneTwisterFast(); while (keys.size() < KEYS_COUNT) { int key = (int) (random.nextGaussian() * Integer.MAX_VALUE / 2 + Integer.MAX_VALUE); if (key < 0) continue; localHashTable.put(key, key + ""); keys.add(key); } for (int key : keys) { if (key % 3 == 0) localHashTable.remove(key); } for (int key : keys) { if (key % 3 == 0) { Assert.assertNull(localHashTable.get(key)); } else { Assert.assertEquals(localHashTable.get(key), key + ""); } } } public void testKeyDelete() { for (int i = 0; i < KEYS_COUNT; i++) { localHashTable.put(i, i + ""); } for (int i = 0; i < KEYS_COUNT; i++) { if (i % 3 == 0) Assert.assertEquals(localHashTable.remove(i), "" + i); } for (int i = 0; i < KEYS_COUNT; i++) { if (i % 3 == 0) Assert.assertNull(localHashTable.get(i)); else Assert.assertEquals(localHashTable.get(i), i + ""); } } public void testKeyAddDelete() { for (int i = 0; i < KEYS_COUNT; i++) localHashTable.put(i, i + ""); for (int i = 0; i < KEYS_COUNT; i++) { if (i % 3 == 0) Assert.assertEquals(localHashTable.remove(i), i + ""); if (i % 2 == 0) localHashTable.put(KEYS_COUNT + i, (KEYS_COUNT + i) + ""); } for (int i = 0; i < KEYS_COUNT; i++) { if (i % 3 == 0) Assert.assertNull(localHashTable.get(i)); else Assert.assertEquals(localHashTable.get(i), i + ""); if (i % 2 == 0) Assert.assertEquals(localHashTable.get(KEYS_COUNT + i), "" + (KEYS_COUNT + i)); } } }
0true
core_src_test_java_com_orientechnologies_orient_core_index_hashindex_local_LocalHashTableTest.java
1,957
public class MapReplaceRequest extends MapPutRequest { public MapReplaceRequest() { } public MapReplaceRequest(String name, Data key, Data value, long threadId) { super(name, key, value, threadId); } public MapReplaceRequest(String name, Data key, Data value, long threadId, long ttl) { super(name, key, value, threadId, ttl); } public int getClassId() { return MapPortableHook.REPLACE; } protected Operation prepareOperation() { ReplaceOperation op = new ReplaceOperation(name, key, value); op.setThreadId(threadId); return op; } }
0true
hazelcast_src_main_java_com_hazelcast_map_client_MapReplaceRequest.java
1,874
public class ModulesBuilder implements Iterable<Module> { private final List<Module> modules = Lists.newArrayList(); public ModulesBuilder add(Module... modules) { for (Module module : modules) { add(module); } return this; } public ModulesBuilder add(Module module) { modules.add(module); if (module instanceof SpawnModules) { Iterable<? extends Module> spawned = ((SpawnModules) module).spawnModules(); for (Module spawn : spawned) { add(spawn); } } return this; } @Override public Iterator<Module> iterator() { return modules.iterator(); } public Injector createInjector() { Modules.processModules(modules); Injector injector = Guice.createInjector(modules); Injectors.cleanCaches(injector); // in ES, we always create all instances as if they are eager singletons // this allows for considerable memory savings (no need to store construction info) as well as cycles ((InjectorImpl) injector).readOnlyAllSingletons(); return injector; } public Injector createChildInjector(Injector injector) { Modules.processModules(modules); Injector childInjector = injector.createChildInjector(modules); Injectors.cleanCaches(childInjector); // in ES, we always create all instances as if they are eager singletons // this allows for considerable memory savings (no need to store construction info) as well as cycles ((InjectorImpl) childInjector).readOnlyAllSingletons(); return childInjector; } }
0true
src_main_java_org_elasticsearch_common_inject_ModulesBuilder.java
2,052
public class GetAllOperation extends AbstractMapOperation implements PartitionAwareOperation { Set<Data> keys = new HashSet<Data>(); MapEntrySet entrySet; public GetAllOperation(String name, Set<Data> keys) { super(name); this.keys = keys; } public GetAllOperation() { } public void run() { int partitionId = getPartitionId(); RecordStore recordStore = mapService.getRecordStore(partitionId, name); Set<Data> partitionKeySet = new HashSet<Data>(); for (Data key : keys) { if (partitionId == getNodeEngine().getPartitionService().getPartitionId(key)) { partitionKeySet.add(key); } } entrySet = recordStore.getAll(partitionKeySet); } @Override public Object getResponse() { return entrySet; } @Override public String toString() { return "GetAllOperation{" + '}'; } @Override protected void writeInternal(ObjectDataOutput out) throws IOException { super.writeInternal(out); if (keys == null) { out.writeInt(-1); } else { out.writeInt(keys.size()); for (Data key : keys) { key.writeData(out); } } } @Override protected void readInternal(ObjectDataInput in) throws IOException { super.readInternal(in); int size = in.readInt(); if (size > -1) { for (int i = 0; i < size; i++) { Data data = new Data(); data.readData(in); keys.add(data); } } } }
0true
hazelcast_src_main_java_com_hazelcast_map_operation_GetAllOperation.java
1,019
@Entity @Inheritance(strategy = InheritanceType.JOINED) @Table(name="BLC_ORDER_ATTRIBUTE") @Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blOrderElements") @AdminPresentationMergeOverrides( { @AdminPresentationMergeOverride(name = "", mergeEntries = @AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.READONLY, booleanOverrideValue = true)) } ) @AdminPresentationClass(friendlyName = "OrderAttributeImpl_baseProductAttribute") public class OrderAttributeImpl implements OrderAttribute { private static final long serialVersionUID = 1L; @Id @GeneratedValue(generator= "OrderAttributeId") @GenericGenerator( name="OrderAttributeId", strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator", parameters = { @Parameter(name="segment_value", value="OrderAttributeImpl"), @Parameter(name="entity_name", value="org.broadleafcommerce.core.catalog.domain.OrderAttributeImpl") } ) @Column(name = "ORDER_ATTRIBUTE_ID") protected Long id; @Column(name = "NAME", nullable=false) @AdminPresentation(friendlyName = "OrderAttributeImpl_Attribute_Name", order=1000, prominent=true) protected String name; /** The value. */ @Column(name = "VALUE") @AdminPresentation(friendlyName = "OrderAttributeImpl_Attribute_Value", order=2000, prominent=true) protected String value; @ManyToOne(targetEntity = OrderImpl.class, optional=false) @JoinColumn(name = "ORDER_ID") protected Order order; @Override public Long getId() { return id; } @Override public void setId(Long id) { this.id = id; } @Override public String getValue() { return value; } @Override public void setValue(String value) { this.value = value; } @Override public String getName() { return name; } @Override public void setName(String name) { this.name = name; } @Override public String toString() { return value; } @Override public Order getOrder() { return order; } @Override public void setOrder(Order order) { this.order = order; } @Override public int hashCode() { return value.hashCode(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; if (value == null) { return false; } return value.equals(((OrderAttribute) obj).getValue()); } }
1no label
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_OrderAttributeImpl.java
661
constructors[COLLECTION_ADD_BACKUP] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() { public IdentifiedDataSerializable createNew(Integer arg) { return new CollectionAddBackupOperation(); } };
0true
hazelcast_src_main_java_com_hazelcast_collection_CollectionDataSerializerHook.java
370
public interface TranslationService { /** * Persists the given translation * * @param translation * @return the persisted translation */ public Translation save(Translation translation); /** * Creates a new translation object for the requested parameters, saves it, and returns the saved instance. * * <b>Note: This method will overwrite a previously existing translation if it matches on entityType, entityId, * fieldName, and localeCode.</b> * * @param entityType * @param entityId * @param fieldName * @param localeCode * @param translatedValue * @return the persisted translation */ public Translation save(String entityType, String entityId, String fieldName, String localeCode, String translatedValue); /** * Updates the given translation id with the new locale code and translated value * * @param translationId * @param localeCode * @param translatedValue * @return the persisted translation */ public Translation update(Long translationId, String localeCode, String translatedValue); /** * Deletes the given translations * * @param translationId */ public void deleteTranslationById(Long translationId); /** * Finds all current translations for the specified field * * @param ceilingEntityClassname * @param entityId * @param property * @return the list of translations */ public List<Translation> getTranslations(String ceilingEntityClassname, String entityId, String property); /** * Attempts to find the translation object for the given parameters * * @param entity * @param entityId * @param fieldName * @param localeCode * @return the persisted translation */ public Translation getTranslation(TranslatedEntity entity, String entityId, String fieldName, String localeCode); /** * Returns the translated value of the property for the given entity. For example, if entity is an instance of * Product and property is equal to name, this method might return "Hoppin' Hot Sauce" if we are in an English * locale and "Salsa de la Muerte Saltante" if we are in a Spanish locale. * * If a country is set on the locale (locale code en_GB for example), we will first look for a translation that matches * en_GB, and then look for a translation for en. If a translated value for the given locale is not available, * it will return null. * * @param entity * @param property * @param locale * @return the translated value of the property for the given entity */ public String getTranslatedValue(Object entity, String property, Locale locale); public Translation findTranslationById(Long id); }
0true
common_src_main_java_org_broadleafcommerce_common_i18n_service_TranslationService.java
829
public final class AtomicReferenceDataSerializerHook implements DataSerializerHook { public static final int F_ID = FactoryIdHelper.getFactoryId(FactoryIdHelper.ATOMIC_REFERENCE_DS_FACTORY, -21); public static final int ALTER_AND_GET = 0; public static final int ALTER = 1; public static final int APPLY = 2; public static final int COMPARE_AND_SET = 3; public static final int CONTAINS = 4; public static final int GET_AND_ALTER = 5; public static final int GET_AND_SET = 6; public static final int GET = 7; public static final int IS_NULL = 8; public static final int SET_AND_GET = 9; public static final int SET_BACKUP = 10; public static final int SET = 11; public static final int REPLICATION = 12; @Override public int getFactoryId() { return F_ID; } @Override public DataSerializableFactory createFactory() { return new DataSerializableFactory() { @Override public IdentifiedDataSerializable create(int typeId) { switch (typeId) { case ALTER_AND_GET: return new AlterAndGetOperation(); case ALTER: return new AlterOperation(); case APPLY: return new ApplyOperation(); case COMPARE_AND_SET: return new CompareAndSetOperation(); case CONTAINS: return new ContainsOperation(); case GET_AND_ALTER: return new GetAndAlterOperation(); case GET_AND_SET: return new GetAndSetOperation(); case GET: return new GetOperation(); case IS_NULL: return new IsNullOperation(); case SET_AND_GET: return new SetAndGetOperation(); case SET_BACKUP: return new SetBackupOperation(); case SET: return new SetOperation(); case REPLICATION: return new AtomicReferenceReplicationOperation(); default: return null; } } }; } }
0true
hazelcast_src_main_java_com_hazelcast_concurrent_atomicreference_AtomicReferenceDataSerializerHook.java
299
public class SelectRulerAction extends AbstractRulerActionDelegate { @Override protected IAction createAction(ITextEditor editor, IVerticalRulerInfo rulerInfo) { return new CeylonSelectAnnotationRulerAction( EditorActionMessages.ResBundle, "SelectRulerAction.", editor, rulerInfo); } }
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_SelectRulerAction.java
1,630
@Deprecated public abstract class OServerHandlerAbstract extends OServerPluginAbstract { }
0true
server_src_main_java_com_orientechnologies_orient_server_handler_OServerHandlerAbstract.java
1,389
@Test public class OMVRBTreeDatabaseLazySaveNonCompositeTest extends OMVRBTreeNonCompositeTest { private ODatabaseDocumentTx database; private int oldPageSize; private int oldEntryPoints; @BeforeClass public void beforeClass() { database = new ODatabaseDocumentTx("memory:mvrbtreeindextest").create(); database.addCluster("indextestclsuter", OStorage.CLUSTER_TYPE.MEMORY); } @BeforeMethod @Override public void beforeMethod() throws Exception { oldPageSize = OGlobalConfiguration.MVRBTREE_NODE_PAGE_SIZE.getValueAsInteger(); OGlobalConfiguration.MVRBTREE_NODE_PAGE_SIZE.setValue(4); oldEntryPoints = OGlobalConfiguration.MVRBTREE_ENTRYPOINTS.getValueAsInteger(); OGlobalConfiguration.MVRBTREE_ENTRYPOINTS.setValue(1); tree = new OMVRBTreeDatabaseLazySave<Double, Double>("indextestclsuter", new OSimpleKeySerializer<Double>(OType.DOUBLE), OStreamSerializerLiteral.INSTANCE, 1, 5000); for (double i = 1; i < 10; i++) { tree.put(i, i); } ((OMVRBTreeDatabaseLazySave<Double, Double>) tree).save(); ((OMVRBTreeDatabaseLazySave<Double, Double>) tree).optimize(true); } @AfterClass public void afterClass() { database.drop(); OGlobalConfiguration.MVRBTREE_NODE_PAGE_SIZE.setValue(oldPageSize); OGlobalConfiguration.MVRBTREE_ENTRYPOINTS.setValue(oldEntryPoints); } }
0true
core_src_test_java_com_orientechnologies_orient_core_type_tree_OMVRBTreeDatabaseLazySaveNonCompositeTest.java
394
@SuppressWarnings({ "serial", "unchecked" }) public class ORecordLazyMap extends OTrackedMap<OIdentifiable> implements ORecordLazyMultiValue { final private byte recordType; private ORecordMultiValueHelper.MULTIVALUE_CONTENT_TYPE status = MULTIVALUE_CONTENT_TYPE.EMPTY; protected boolean marshalling = false; private boolean autoConvertToRecord = true; public ORecordLazyMap(final ODocument iSourceRecord) { super(iSourceRecord); this.recordType = ODocument.RECORD_TYPE; } public ORecordLazyMap(final ODocument iSourceRecord, final byte iRecordType) { super(iSourceRecord); this.recordType = iRecordType; if (iSourceRecord != null) { if (!iSourceRecord.isLazyLoad()) // SET AS NON-LAZY LOAD THE COLLECTION TOO autoConvertToRecord = false; } } public ORecordLazyMap(final ODocument iSourceRecord, final Map<Object, OIdentifiable> iOrigin) { this(iSourceRecord); if (iOrigin != null && !iOrigin.isEmpty()) putAll(iOrigin); } @Override public boolean containsValue(final Object o) { return super.containsValue(o); } @Override public OIdentifiable get(final Object iKey) { if (iKey == null) return null; final String key = iKey.toString(); if (autoConvertToRecord) convertLink2Record(key); return super.get(key); } @Override public OIdentifiable put(final Object iKey, OIdentifiable iValue) { if (status == MULTIVALUE_CONTENT_TYPE.ALL_RIDS && iValue instanceof ORecord<?> && !iValue.getIdentity().isNew()) // IT'S BETTER TO LEAVE ALL RIDS AND EXTRACT ONLY THIS ONE iValue = iValue.getIdentity(); else status = ORecordMultiValueHelper.updateContentType(status, iValue); return super.put(iKey, iValue); } @Override public Collection<OIdentifiable> values() { convertLinks2Records(); return super.values(); } @Override public OIdentifiable remove(Object o) { final OIdentifiable result = super.remove(o); if (size() == 0) status = MULTIVALUE_CONTENT_TYPE.EMPTY; return result; } @Override public void clear() { super.clear(); status = MULTIVALUE_CONTENT_TYPE.EMPTY; } @Override public String toString() { return ORecordMultiValueHelper.toString(this); } public boolean isAutoConvertToRecord() { return autoConvertToRecord; } public void setAutoConvertToRecord(boolean convertToRecord) { this.autoConvertToRecord = convertToRecord; } public void convertLinks2Records() { if (status == MULTIVALUE_CONTENT_TYPE.ALL_RECORDS || !autoConvertToRecord) // PRECONDITIONS return; for (Object k : keySet()) convertLink2Record(k); status = MULTIVALUE_CONTENT_TYPE.ALL_RECORDS; } public boolean convertRecords2Links() { if (status == MULTIVALUE_CONTENT_TYPE.ALL_RIDS) // PRECONDITIONS return true; boolean allConverted = true; for (Object k : keySet()) if (!convertRecord2Link(k)) allConverted = false; if (allConverted) status = MULTIVALUE_CONTENT_TYPE.ALL_RIDS; return allConverted; } private boolean convertRecord2Link(final Object iKey) { if (status == MULTIVALUE_CONTENT_TYPE.ALL_RIDS) return true; final Object value = super.get(iKey); if (value != null) if (value instanceof ORecord<?> && !((ORecord<?>) value).getIdentity().isNew()) { if (((ORecord<?>) value).isDirty()) ODatabaseRecordThreadLocal.INSTANCE.get().save((ORecordInternal<?>) value); marshalling = true; try { // OVERWRITE super.put(iKey, ((ORecord<?>) value).getIdentity()); } finally { marshalling = false; } // CONVERTED return true; } else if (value instanceof ORID) // ALREADY CONVERTED return true; return false; } /** * Convert the item with the received key to a record. * * @param iKey * Key of the item to convert */ private void convertLink2Record(final Object iKey) { if (status == MULTIVALUE_CONTENT_TYPE.ALL_RECORDS) return; final Object value; if (iKey instanceof ORID) value = iKey; else value = super.get(iKey); if (value != null && value instanceof ORID) { final ORID rid = (ORID) value; marshalling = true; try { try { // OVERWRITE IT super.put(iKey, rid.getRecord()); } catch (ORecordNotFoundException e) { // IGNORE THIS } } finally { marshalling = false; } } } @Override public OTrackedMap<OIdentifiable> setDirty() { if (!marshalling) return super.setDirty(); return this; } @Override protected void fireCollectionChangedEvent(final OMultiValueChangeEvent<Object, OIdentifiable> event) { if (!marshalling) super.fireCollectionChangedEvent(event); } public byte getRecordType() { return recordType; } public Iterator<OIdentifiable> rawIterator() { return new OLazyRecordIterator(sourceRecord, super.values().iterator(), false); } public boolean detach() { return convertRecords2Links(); } @Override public int size() { return super.size(); } }
0true
core_src_main_java_com_orientechnologies_orient_core_db_record_ORecordLazyMap.java
497
private static class ListRewriter implements FieldRewriter<List<?>> { @Override public List<?> rewriteValue(List<?> listValue) { boolean wasRewritten = false; List<Object> result = new ArrayList<Object>(listValue.size()); for (Object listItem : listValue) { FieldRewriter<Object> fieldRewriter = RewritersFactory.INSTANCE.findRewriter(null, null, listItem); Object rewrittenItem = fieldRewriter.rewriteValue(listItem); if (rewrittenItem != null) { wasRewritten = true; result.add(rewrittenItem); } else result.add(listItem); } if (!wasRewritten) return null; return result; } }
0true
core_src_main_java_com_orientechnologies_orient_core_db_tool_ODatabaseImport.java
1,243
METRIC_TYPE.COUNTER, new OProfilerHookValue() { public Object getValue() { return metricReusedPages; } });
0true
core_src_main_java_com_orientechnologies_orient_core_storage_fs_OMMapManagerNew.java
914
return new DataSerializableFactory() { @Override public IdentifiedDataSerializable create(int typeId) { switch (typeId) { case AWAIT_BACKUP: return new AwaitBackupOperation(); case AWAIT: return new AwaitOperation(); case BEFORE_AWAIT_BACKUP: return new BeforeAwaitBackupOperation(); case BEFORE_AWAIT: return new BeforeAwaitOperation(); case GET_LOCK_COUNT: return new GetLockCountOperation(); case GET_REMAINING_LEASETIME: return new GetRemainingLeaseTimeOperation(); case IS_LOCKED: return new IsLockedOperation(); case LOCK: return new LockOperation(); case LOCK_BACKUP: return new LockBackupOperation(); case LOCK_REPLICATION: return new LockReplicationOperation(); case SIGNAL_BACKUP: return new SignalBackupOperation(); case SIGNAL: return new SignalOperation(); case UNLOCK_BACKUP: return new UnlockBackupOperation(); case UNLOCK: return new UnlockOperation(); default: return null; } } };
0true
hazelcast_src_main_java_com_hazelcast_concurrent_lock_LockDataSerializerHook.java
1,174
NETTY { @Override public Transport newTransport(Settings settings, ThreadPool threadPool) { return new NettyTransport(settings, threadPool, new NetworkService(ImmutableSettings.EMPTY), Version.CURRENT); } };
0true
src_test_java_org_elasticsearch_benchmark_transport_TransportBenchmark.java
432
public enum LookupType { STANDARD, DROPDOWN }
0true
common_src_main_java_org_broadleafcommerce_common_presentation_client_LookupType.java
1,256
public static interface NodeListenerCallback<Response> { void doWithNode(DiscoveryNode node, ActionListener<Response> listener) throws ElasticsearchException; }
0true
src_main_java_org_elasticsearch_client_transport_TransportClientNodesService.java
827
public interface OrderAdjustment extends Adjustment { public Order getOrder(); public void init(Order order, Offer offer, String reason); public void setOrder(Order order); }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_domain_OrderAdjustment.java
4,105
static class IncludeNestedDocsWeight extends Weight { private final Query parentQuery; private final Weight parentWeight; private final Filter parentsFilter; IncludeNestedDocsWeight(Query parentQuery, Weight parentWeight, Filter parentsFilter) { this.parentQuery = parentQuery; this.parentWeight = parentWeight; this.parentsFilter = parentsFilter; } @Override public Query getQuery() { return parentQuery; } @Override public void normalize(float norm, float topLevelBoost) { parentWeight.normalize(norm, topLevelBoost); } @Override public float getValueForNormalization() throws IOException { return parentWeight.getValueForNormalization(); // this query is never boosted so just delegate... } @Override public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs) throws IOException { final Scorer parentScorer = parentWeight.scorer(context, true, false, acceptDocs); // no matches if (parentScorer == null) { return null; } DocIdSet parents = parentsFilter.getDocIdSet(context, acceptDocs); if (parents == null) { // No matches return null; } if (!(parents instanceof FixedBitSet)) { throw new IllegalStateException("parentFilter must return FixedBitSet; got " + parents); } int firstParentDoc = parentScorer.nextDoc(); if (firstParentDoc == DocIdSetIterator.NO_MORE_DOCS) { // No matches return null; } return new IncludeNestedDocsScorer(this, parentScorer, (FixedBitSet) parents, firstParentDoc); } @Override public Explanation explain(AtomicReaderContext context, int doc) throws IOException { return null; //Query is used internally and not by users, so explain can be empty } @Override public boolean scoresDocsOutOfOrder() { return false; } }
1no label
src_main_java_org_elasticsearch_index_search_nested_IncludeNestedDocsQuery.java
43
public class StatsCommandProcessor extends MemcacheCommandProcessor<StatsCommand> { public StatsCommandProcessor(TextCommandService textCommandService) { super(textCommandService); } public void handle(StatsCommand command) { Stats stats = textCommandService.getStats(); command.setResponse(stats); textCommandService.sendResponse(command); } public void handleRejection(StatsCommand command) { handle(command); } }
0true
hazelcast_src_main_java_com_hazelcast_ascii_memcache_StatsCommandProcessor.java
58
public class AssetNotFoundException extends RuntimeException { private static final long serialVersionUID = -6349160176427682630L; public AssetNotFoundException() { //do nothing } public AssetNotFoundException(Throwable cause) { super(cause); } public AssetNotFoundException(String message) { super(message); } public AssetNotFoundException(String message, Throwable cause) { super(message, cause); } }
0true
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_common_AssetNotFoundException.java
594
public interface OIndexDefinitionMultiValue extends OIndexDefinition { /** * Converts passed in value in the key of single index entry. * * @param param * Value to convert. * @return Index key. */ public Object createSingleValue(final Object... param); /** * Process event that contains operation on collection and extract values that should be added removed from index to reflect * collection changes in the given index. * * @param changeEvent * Event that describes operation that was performed on collection. * @param keysToAdd * Values that should be added to related index. * @param keysToRemove * Values that should be removed to related index. */ public void processChangeEvent(final OMultiValueChangeEvent<?, ?> changeEvent, final Map<Object, Integer> keysToAdd, final Map<Object, Integer> keysToRemove); }
0true
core_src_main_java_com_orientechnologies_orient_core_index_OIndexDefinitionMultiValue.java
701
CollectionUtils.filter(returnProducts, new Predicate() { @Override public boolean evaluate(Object arg) { return 'Y'!=((Status)((UpSaleProductImpl) arg).getRelatedProduct()).getArchived(); } });
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_domain_ProductImpl.java
2,220
public enum CombineFunction { MULT { @Override public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) { return toFloat(queryBoost * queryScore * Math.min(funcScore, maxBoost)); } @Override public String getName() { return "multiply"; } @Override public ComplexExplanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) { float score = queryBoost * Math.min(funcExpl.getValue(), maxBoost) * queryExpl.getValue(); ComplexExplanation res = new ComplexExplanation(true, score, "function score, product of:"); res.addDetail(queryExpl); ComplexExplanation minExpl = new ComplexExplanation(true, Math.min(funcExpl.getValue(), maxBoost), "Math.min of"); minExpl.addDetail(funcExpl); minExpl.addDetail(new Explanation(maxBoost, "maxBoost")); res.addDetail(minExpl); res.addDetail(new Explanation(queryBoost, "queryBoost")); return res; } }, REPLACE { @Override public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) { return toFloat(queryBoost * Math.min(funcScore, maxBoost)); } @Override public String getName() { return "replace"; } @Override public ComplexExplanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) { float score = queryBoost * Math.min(funcExpl.getValue(), maxBoost); ComplexExplanation res = new ComplexExplanation(true, score, "function score, product of:"); ComplexExplanation minExpl = new ComplexExplanation(true, Math.min(funcExpl.getValue(), maxBoost), "Math.min of"); minExpl.addDetail(funcExpl); minExpl.addDetail(new Explanation(maxBoost, "maxBoost")); res.addDetail(minExpl); res.addDetail(new Explanation(queryBoost, "queryBoost")); return res; } }, SUM { @Override public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) { return toFloat(queryBoost * (queryScore + Math.min(funcScore, maxBoost))); } @Override public String getName() { return "sum"; } @Override public ComplexExplanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) { float score = queryBoost * (Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue()); ComplexExplanation res = new ComplexExplanation(true, score, "function score, product of:"); ComplexExplanation minExpl = new ComplexExplanation(true, Math.min(funcExpl.getValue(), maxBoost), "Math.min of"); minExpl.addDetail(funcExpl); minExpl.addDetail(new Explanation(maxBoost, "maxBoost")); ComplexExplanation sumExpl = new ComplexExplanation(true, Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue(), "sum of"); sumExpl.addDetail(queryExpl); sumExpl.addDetail(minExpl); res.addDetail(sumExpl); res.addDetail(new Explanation(queryBoost, "queryBoost")); return res; } }, AVG { @Override public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) { return toFloat((queryBoost * (Math.min(funcScore, maxBoost) + queryScore) / 2.0)); } @Override public String getName() { return "avg"; } @Override public ComplexExplanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) { float score = toFloat(queryBoost * (queryExpl.getValue() + Math.min(funcExpl.getValue(), maxBoost)) / 2.0); ComplexExplanation res = new ComplexExplanation(true, score, "function score, product of:"); ComplexExplanation minExpl = new ComplexExplanation(true, Math.min(funcExpl.getValue(), maxBoost), "Math.min of"); minExpl.addDetail(funcExpl); minExpl.addDetail(new Explanation(maxBoost, "maxBoost")); ComplexExplanation avgExpl = new ComplexExplanation(true, toFloat((Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue()) / 2.0), "avg of"); avgExpl.addDetail(queryExpl); avgExpl.addDetail(minExpl); res.addDetail(avgExpl); res.addDetail(new Explanation(queryBoost, "queryBoost")); return res; } }, MIN { @Override public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) { return toFloat(queryBoost * Math.min(queryScore, Math.min(funcScore, maxBoost))); } @Override public String getName() { return "min"; } @Override public ComplexExplanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) { float score = toFloat(queryBoost * Math.min(queryExpl.getValue(), Math.min(funcExpl.getValue(), maxBoost))); ComplexExplanation res = new ComplexExplanation(true, score, "function score, product of:"); ComplexExplanation innerMinExpl = new ComplexExplanation(true, Math.min(funcExpl.getValue(), maxBoost), "Math.min of"); innerMinExpl.addDetail(funcExpl); innerMinExpl.addDetail(new Explanation(maxBoost, "maxBoost")); ComplexExplanation outerMinExpl = new ComplexExplanation(true, Math.min(Math.min(funcExpl.getValue(), maxBoost), queryExpl.getValue()), "min of"); outerMinExpl.addDetail(queryExpl); outerMinExpl.addDetail(innerMinExpl); res.addDetail(outerMinExpl); res.addDetail(new Explanation(queryBoost, "queryBoost")); return res; } }, MAX { @Override public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) { return toFloat(queryBoost * (Math.max(queryScore, Math.min(funcScore, maxBoost)))); } @Override public String getName() { return "max"; } @Override public ComplexExplanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) { float score = toFloat(queryBoost * Math.max(queryExpl.getValue(), Math.min(funcExpl.getValue(), maxBoost))); ComplexExplanation res = new ComplexExplanation(true, score, "function score, product of:"); ComplexExplanation innerMinExpl = new ComplexExplanation(true, Math.min(funcExpl.getValue(), maxBoost), "Math.min of"); innerMinExpl.addDetail(funcExpl); innerMinExpl.addDetail(new Explanation(maxBoost, "maxBoost")); ComplexExplanation outerMaxExpl = new ComplexExplanation(true, Math.max(Math.min(funcExpl.getValue(), maxBoost), queryExpl.getValue()), "max of"); outerMaxExpl.addDetail(queryExpl); outerMaxExpl.addDetail(innerMinExpl); res.addDetail(outerMaxExpl); res.addDetail(new Explanation(queryBoost, "queryBoost")); return res; } }; public abstract float combine(double queryBoost, double queryScore, double funcScore, double maxBoost); public abstract String getName(); public static float toFloat(double input) { assert deviation(input) <= 0.001 : "input " + input + " out of float scope for function score deviation: " + deviation(input); return (float) input; } private static double deviation(double input) { // only with assert! float floatVersion = (float) input; return Double.compare(floatVersion, input) == 0 || input == 0.0d ? 0 : 1.d - (floatVersion) / input; } public abstract ComplexExplanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost); }
0true
src_main_java_org_elasticsearch_common_lucene_search_function_CombineFunction.java
1,029
class ShardSingleOperationRequest extends TransportRequest { private Request request; private int shardId; ShardSingleOperationRequest() { } public ShardSingleOperationRequest(Request request, int shardId) { super(request); this.request = request; this.shardId = shardId; } public Request request() { return request; } public int shardId() { return shardId; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); request = newRequest(); request.readFrom(in); shardId = in.readVInt(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); request.writeTo(out); out.writeVInt(shardId); } }
0true
src_main_java_org_elasticsearch_action_support_single_shard_TransportShardSingleOperationAction.java
329
IPropertyChangeListener workingSetListener= new IPropertyChangeListener() { public void propertyChange(PropertyChangeEvent event) { doWorkingSetChanged(event); } };
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_explorer_PackageExplorerActionGroup.java
275
class PresentationDamageRepairer implements IPresentationDamager, IPresentationRepairer { private volatile List<CommonToken> tokens; private final CeylonEditor editor; private IDocument document; PresentationDamageRepairer(ISourceViewer sourceViewer, CeylonEditor editor) { this.editor = editor; } public IRegion getDamageRegion(ITypedRegion partition, DocumentEvent event, boolean documentPartitioningChanged) { if (tokens==null) { //parse and color the whole document the first time! return partition; } if (editor!=null && editor.isInLinkedMode()) { Position linkedPosition = getLinkedPosition(event.getOffset(), event.getLength()); if (linkedPosition == null) { return partition; } else { return new Region(linkedPosition.getOffset(), linkedPosition.getLength()); } } if (noTextChange(event)) { //it was a change to annotations - don't reparse return new Region(event.getOffset(), event.getLength()); } Region tokenRegion = getContainingTokenRegion(event); if (tokenRegion == null) { //the change is totally within a token, //and doesn't break it, return the //token extent return partition; } else { return tokenRegion; } } private Region getContainingTokenRegion(DocumentEvent event) { int tokenIndex = getTokenIndexAtCharacter(tokens, event.getOffset()-1); if (tokenIndex<0) tokenIndex=-tokenIndex; CommonToken t = tokens.get(tokenIndex); if (isWithinExistingToken(event, t)) { if (isWithinTokenChange(event, t)) { //the edit just changes the text inside //a token, leaving the rest of the //document structure unchanged return new Region(event.getOffset(), event.getText().length()); } } return null; } public boolean isWithinExistingToken(DocumentEvent event, CommonToken t) { int eventStart = event.getOffset(); int eventStop = event.getOffset()+event.getLength(); int tokenStart = t.getStartIndex(); int tokenStop = t.getStopIndex()+1; switch (t.getType()) { case CeylonLexer.MULTI_COMMENT: return tokenStart<=eventStart-2 && tokenStop>=eventStop+2; case CeylonLexer.VERBATIM_STRING: case CeylonLexer.AVERBATIM_STRING: return tokenStart<=eventStart-3 && tokenStop>=eventStop+3; case CeylonLexer.CHAR_LITERAL: case CeylonLexer.STRING_LITERAL: case CeylonLexer.ASTRING_LITERAL: case CeylonLexer.STRING_START: case CeylonLexer.STRING_MID: case CeylonLexer.STRING_END: return tokenStart<=event.getOffset()-1 && tokenStop>=eventStop+1; case CeylonLexer.LINE_COMMENT: return tokenStart<=eventStart-2 && tokenStop>=eventStop+1; //account for case where we delete the newline default: return tokenStart<=eventStart && tokenStop>=eventStop; } } public boolean isWithinTokenChange(DocumentEvent event, CommonToken t) { switch (t.getType()) { case CeylonLexer.WS: for (char c: event.getText().toCharArray()) { if (!Character.isWhitespace(c)) { return false; } } break; case CeylonLexer.UIDENTIFIER: case CeylonLexer.LIDENTIFIER: for (char c: event.getText().toCharArray()) { if (!Character.isJavaIdentifierPart(c)) { return false; } } break; case CeylonLexer.STRING_LITERAL: case CeylonLexer.ASTRING_LITERAL: case CeylonLexer.VERBATIM_STRING: case CeylonLexer.AVERBATIM_STRING: case CeylonLexer.STRING_START: case CeylonLexer.STRING_MID: case CeylonLexer.STRING_END: for (char c: event.getText().toCharArray()) { if (c=='"'||c=='`') { return false; } } break; case CeylonLexer.CHAR_LITERAL: for (char c: event.getText().toCharArray()) { if (c=='\'') { return false; } } break; case CeylonLexer.MULTI_COMMENT: for (char c: event.getText().toCharArray()) { if (c=='/'||c=='*') { return false; } } break; case CeylonLexer.LINE_COMMENT: for (char c: event.getText().toCharArray()) { if (c=='\n'||c=='\f'||c=='\r') { return false; } } break; default: return false; } return true; } public void createPresentation(TextPresentation presentation, ITypedRegion damage) { ANTLRStringStream input = new NewlineFixingStringStream(document.get()); CeylonLexer lexer = new CeylonLexer(input); CommonTokenStream tokenStream = new CommonTokenStream(lexer); CeylonParser parser = new CeylonParser(tokenStream); try { parser.compilationUnit(); } catch (RecognitionException e) { throw new RuntimeException(e); } //it sounds strange, but it's better to parse //and cache here than in getDamageRegion(), //because these methods get called in strange //orders tokens = tokenStream.getTokens(); highlightTokens(presentation, damage); } private void highlightTokens(TextPresentation presentation, ITypedRegion damage) { //int prevStartOffset= -1; //int prevEndOffset= -1; boolean inMetaLiteral=false; int inInterpolated=0; boolean afterMemberOp = false; //start iterating tokens Iterator<CommonToken> iter = tokens.iterator(); if (iter!=null) { while (iter.hasNext()) { CommonToken token= iter.next(); int tt = token.getType(); if (tt==CeylonLexer.EOF) { break; } switch (tt) { case CeylonParser.BACKTICK: inMetaLiteral = !inMetaLiteral; break; case CeylonParser.STRING_START: inInterpolated++; break; case CeylonParser.STRING_END: inInterpolated--; break; } int startOffset= token.getStartIndex(); int endOffset= token.getStopIndex()+1; if (endOffset<damage.getOffset()) continue; if (startOffset>damage.getOffset()+damage.getLength()) break; switch (tt) { case CeylonParser.STRING_MID: endOffset-=2; startOffset+=2; break; case CeylonParser.STRING_START: endOffset-=2; break; case CeylonParser.STRING_END: startOffset+=2; break; } /*if (startOffset <= prevEndOffset && endOffset >= prevStartOffset) { //this case occurs when applying a //quick fix, and causes an error //from SWT if we let it through continue; }*/ if (tt==CeylonParser.STRING_MID || tt==CeylonParser.STRING_END) { changeTokenPresentation(presentation, getInterpolationColoring(), startOffset-2,startOffset-1, inInterpolated>1 ? SWT.ITALIC : SWT.NORMAL); } changeTokenPresentation(presentation, afterMemberOp && tt==CeylonLexer.LIDENTIFIER ? getMemberColoring() : getColoring(token), startOffset, endOffset, inMetaLiteral || inInterpolated>1 || inInterpolated>0 && tt!=CeylonParser.STRING_START && tt!=CeylonParser.STRING_MID && tt!=CeylonParser.STRING_END? SWT.ITALIC : SWT.NORMAL); if (tt==CeylonParser.STRING_MID || tt==CeylonParser.STRING_START) { changeTokenPresentation(presentation, getInterpolationColoring(), endOffset+1,endOffset+2, inInterpolated>1 ? SWT.ITALIC : SWT.NORMAL); } //prevStartOffset= startOffset; //prevEndOffset= endOffset; afterMemberOp = tt==CeylonLexer.MEMBER_OP || tt==CeylonLexer.SAFE_MEMBER_OP|| tt==CeylonLexer.SPREAD_OP; } } } private void changeTokenPresentation(TextPresentation presentation, TextAttribute attribute, int startOffset, int endOffset, int extraStyle) { Color foreground = attribute==null ? null : attribute.getForeground(); Color background = attribute==null ? null : attribute.getBackground(); int fontStyle = attribute==null ? extraStyle : attribute.getStyle()|extraStyle; StyleRange styleRange = new StyleRange(startOffset, endOffset-startOffset, foreground, background, fontStyle); presentation.addStyleRange(styleRange); } private Position getLinkedPosition(int offset, int length) { LinkedModeModel linkedMode = editor.getLinkedMode(); if (linkedMode.anyPositionContains(offset) || linkedMode.anyPositionContains(offset+length)) { try { for (Position p: document.getPositions(linkedMode.toString())) { if (!p.isDeleted()) { if (p.includes(offset) && p.includes(offset+length)) { return p; } } } } catch (BadPositionCategoryException e) { e.printStackTrace(); } } return null; } private boolean noTextChange(DocumentEvent event) { try { return document.get(event.getOffset(), event.getLength()) .equals(event.getText()); } catch (BadLocationException e) { return false; } } public void setDocument(IDocument document) { this.document = document; } }
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_PresentationDamageRepairer.java
1,338
public abstract class IndicesClusterStateUpdateRequest<T extends IndicesClusterStateUpdateRequest<T>> extends ClusterStateUpdateRequest<T> { private String[] indices; /** * Returns the indices the operation needs to be executed on */ public String[] indices() { return indices; } /** * Sets the indices the operation needs to be executed on */ @SuppressWarnings("unchecked") public T indices(String[] indices) { this.indices = indices; return (T)this; } }
0true
src_main_java_org_elasticsearch_cluster_ack_IndicesClusterStateUpdateRequest.java
744
public class ExplainRequestBuilder extends SingleShardOperationRequestBuilder<ExplainRequest, ExplainResponse, ExplainRequestBuilder> { private QuerySourceBuilder sourceBuilder; ExplainRequestBuilder(Client client) { super((InternalClient) client, new ExplainRequest()); } public ExplainRequestBuilder(Client client, String index, String type, String id) { super((InternalClient) client, new ExplainRequest().index(index).type(type).id(id)); } /** * Sets the type to get a score explanation for. */ public ExplainRequestBuilder setType(String type) { request().type(type); return this; } /** * Sets the id to get a score explanation for. */ public ExplainRequestBuilder setId(String id) { request().id(id); return this; } /** * Sets the routing for sharding. */ public ExplainRequestBuilder setRouting(String routing) { request().routing(routing); return this; } /** * Simple sets the routing. Since the parent is only used to get to the right shard. */ public ExplainRequestBuilder setParent(String parent) { request().parent(parent); return this; } /** * Sets the shard preference. */ public ExplainRequestBuilder setPreference(String preference) { request().preference(preference); return this; } /** * Sets the query to get a score explanation for. */ public ExplainRequestBuilder setQuery(QueryBuilder query) { sourceBuilder().setQuery(query); return this; } /** * Sets the query to get a score explanation for. */ public ExplainRequestBuilder setQuery(BytesReference query) { sourceBuilder().setQuery(query); return this; } /** * Explicitly specify the fields that will be returned for the explained document. By default, nothing is returned. */ public ExplainRequestBuilder setFields(String... fields) { request.fields(fields); return this; } /** * Indicates whether the response should contain the stored _source * * * @param fetch * @return */ public ExplainRequestBuilder setFetchSource(boolean fetch) { FetchSourceContext context = request.fetchSourceContext(); if (context == null) { request.fetchSourceContext(new FetchSourceContext(fetch)); } else { context.fetchSource(fetch); } return this; } /** * Indicate that _source should be returned, with an "include" and/or "exclude" set which can include simple wildcard * elements. * * @param include An optional include (optionally wildcarded) pattern to filter the returned _source * @param exclude An optional exclude (optionally wildcarded) pattern to filter the returned _source */ public ExplainRequestBuilder setFetchSource(@Nullable String include, @Nullable String exclude) { return setFetchSource( include == null? Strings.EMPTY_ARRAY : new String[] {include}, exclude == null? Strings.EMPTY_ARRAY : new String[] {exclude}); } /** * Indicate that _source should be returned, with an "include" and/or "exclude" set which can include simple wildcard * elements. * * @param includes An optional list of include (optionally wildcarded) pattern to filter the returned _source * @param excludes An optional list of exclude (optionally wildcarded) pattern to filter the returned _source */ public ExplainRequestBuilder setFetchSource(@Nullable String[] includes, @Nullable String[] excludes) { FetchSourceContext context = request.fetchSourceContext(); if (context == null) { request.fetchSourceContext(new FetchSourceContext(includes, excludes)); } else { context.fetchSource(true); context.includes(includes); context.excludes(excludes); } return this; } /** * Sets the full source of the explain request (for example, wrapping an actual query). */ public ExplainRequestBuilder setSource(BytesReference source, boolean unsafe) { request().source(source, unsafe); return this; } /** * Sets whether the actual explain action should occur in a different thread if executed locally. */ public ExplainRequestBuilder operationThreaded(boolean threadedOperation) { request().operationThreaded(threadedOperation); return this; } protected void doExecute(ActionListener<ExplainResponse> listener) { if (sourceBuilder != null) { request.source(sourceBuilder); } ((Client) client).explain(request, listener); } private QuerySourceBuilder sourceBuilder() { if (sourceBuilder == null) { sourceBuilder = new QuerySourceBuilder(); } return sourceBuilder; } }
0true
src_main_java_org_elasticsearch_action_explain_ExplainRequestBuilder.java
1,599
public abstract class SystemLog { enum Type { NODE, JOIN, CONNECTION, PARTITION, CALL, NONE } protected long date = Clock.currentTimeMillis(); protected Type type = Type.NONE; public long getDate() { return date; } public Type getType() { return type; } public void setType(Type type) { this.type = type; } }
0true
hazelcast_src_main_java_com_hazelcast_logging_SystemLog.java
1,202
doubleObjectMap = build(type, limit, smartSize, availableProcessors, new Recycler.C<DoubleObjectOpenHashMap>() { @Override public DoubleObjectOpenHashMap newInstance(int sizing) { return new DoubleObjectOpenHashMap(size(sizing)); } @Override public void clear(DoubleObjectOpenHashMap value) { value.clear(); } });
0true
src_main_java_org_elasticsearch_cache_recycler_CacheRecycler.java
141
@Test public class DecimalSerializerTest { private final static int FIELD_SIZE = 9; private static final BigDecimal OBJECT = new BigDecimal(new BigInteger("20"), 2); private ODecimalSerializer decimalSerializer; private static final byte[] stream = new byte[FIELD_SIZE]; @BeforeClass public void beforeClass() { decimalSerializer = new ODecimalSerializer(); } public void testFieldSize() { Assert.assertEquals(decimalSerializer.getObjectSize(OBJECT), FIELD_SIZE); } public void testSerialize() { decimalSerializer.serialize(OBJECT, stream, 0); Assert.assertEquals(decimalSerializer.deserialize(stream, 0), OBJECT); } public void testSerializeNative() { decimalSerializer.serializeNative(OBJECT, stream, 0); Assert.assertEquals(decimalSerializer.deserializeNative(stream, 0), OBJECT); } public void testNativeDirectMemoryCompatibility() { decimalSerializer.serializeNative(OBJECT, stream, 0); ODirectMemoryPointer pointer = new ODirectMemoryPointer(stream); try { Assert.assertEquals(decimalSerializer.deserializeFromDirectMemory(pointer, 0), OBJECT); } finally { pointer.free(); } } }
0true
commons_src_test_java_com_orientechnologies_common_serialization_types_DecimalSerializerTest.java
53
public class HttpDeleteCommandProcessor extends HttpCommandProcessor<HttpDeleteCommand> { public HttpDeleteCommandProcessor(TextCommandService textCommandService) { super(textCommandService); } public void handle(HttpDeleteCommand command) { String uri = command.getURI(); if (uri.startsWith(URI_MAPS)) { int indexEnd = uri.indexOf('/', URI_MAPS.length()); if (indexEnd == -1) { String mapName = uri.substring(URI_MAPS.length(), uri.length()); textCommandService.deleteAll(mapName); command.send200(); } else { String mapName = uri.substring(URI_MAPS.length(), indexEnd); String key = uri.substring(indexEnd + 1); textCommandService.delete(mapName, key); command.send200(); } } else if (uri.startsWith(URI_QUEUES)) { // Poll an item from the default queue in 3 seconds // http://127.0.0.1:5701/hazelcast/rest/queues/default/3 int indexEnd = uri.indexOf('/', URI_QUEUES.length()); String queueName = uri.substring(URI_QUEUES.length(), indexEnd); String secondStr = (uri.length() > (indexEnd + 1)) ? uri.substring(indexEnd + 1) : null; int seconds = (secondStr == null) ? 0 : Integer.parseInt(secondStr); Object value = textCommandService.poll(queueName, seconds); if (value == null) { command.send204(); } else { if (value instanceof byte[]) { command.setResponse(null, (byte[]) value); } else if (value instanceof RestValue) { RestValue restValue = (RestValue) value; command.setResponse(restValue.getContentType(), restValue.getValue()); } else if (value instanceof String) { command.setResponse(HttpCommand.CONTENT_TYPE_PLAIN_TEXT, stringToBytes((String) value)); } else { command.setResponse(null, textCommandService.toByteArray(value)); } } } else { command.send400(); } textCommandService.sendResponse(command); } public void handleRejection(HttpDeleteCommand command) { handle(command); } }
0true
hazelcast_src_main_java_com_hazelcast_ascii_rest_HttpDeleteCommandProcessor.java
3,017
public class ResidentQueryParserCache extends AbstractIndexComponent implements QueryParserCache { private final Cache<QueryParserSettings, Query> cache; private volatile int maxSize; private volatile TimeValue expire; @Inject public ResidentQueryParserCache(Index index, @IndexSettings Settings indexSettings) { super(index, indexSettings); this.maxSize = componentSettings.getAsInt("max_size", 100); this.expire = componentSettings.getAsTime("expire", null); logger.debug("using [resident] query cache with max_size [{}], expire [{}]", maxSize, expire); CacheBuilder cacheBuilder = CacheBuilder.newBuilder().maximumSize(maxSize); if (expire != null) { cacheBuilder.expireAfterAccess(expire.nanos(), TimeUnit.NANOSECONDS); } this.cache = cacheBuilder.build(); } @Override public Query get(QueryParserSettings queryString) { return cache.getIfPresent(queryString); } @Override public void put(QueryParserSettings queryString, Query query) { if (queryString.isCacheable()) { cache.put(queryString, query); } } @Override public void clear() { cache.invalidateAll(); } @Override public void close() throws ElasticsearchException { cache.invalidateAll(); } }
1no label
src_main_java_org_elasticsearch_index_cache_query_parser_resident_ResidentQueryParserCache.java
280
public interface ActionListener<Response> { /** * A response handler. */ void onResponse(Response response); /** * A failure handler. */ void onFailure(Throwable e); }
0true
src_main_java_org_elasticsearch_action_ActionListener.java
2,406
public static class Factory { public static final Factory DEFAULT = buildDefault(); private static Factory buildDefault() { // Some numbers: // 10k =0.001: 140.4kb , 10 Hashes // 10k =0.01 : 93.6kb , 6 Hashes // 100k=0.01 : 936.0kb , 6 Hashes // 100k=0.03 : 712.7kb , 5 Hashes // 500k=0.01 : 4.5mb , 6 Hashes // 500k=0.03 : 3.4mb , 5 Hashes // 500k=0.05 : 2.9mb , 4 Hashes // 1m=0.01 : 9.1mb , 6 Hashes // 1m=0.03 : 6.9mb , 5 Hashes // 1m=0.05 : 5.9mb , 4 Hashes // 5m=0.01 : 45.7mb , 6 Hashes // 5m=0.03 : 34.8mb , 5 Hashes // 5m=0.05 : 29.7mb , 4 Hashes // 50m=0.01 : 457.0mb , 6 Hashes // 50m=0.03 : 297.3mb , 4 Hashes // 50m=0.10 : 228.5mb , 3 Hashes return buildFromString("10k=0.01,1m=0.03"); } /** * Supports just passing fpp, as in "0.01", and also ranges, like "50k=0.01,1m=0.05". If * its null, returns {@link #buildDefault()}. */ public static Factory buildFromString(@Nullable String config) { if (config == null) { return buildDefault(); } String[] sEntries = Strings.splitStringToArray(config, ','); if (sEntries.length == 0) { if (config.length() > 0) { return new Factory(new Entry[]{new Entry(0, Double.parseDouble(config))}); } return buildDefault(); } Entry[] entries = new Entry[sEntries.length]; for (int i = 0; i < sEntries.length; i++) { int index = sEntries[i].indexOf('='); entries[i] = new Entry( (int) SizeValue.parseSizeValue(sEntries[i].substring(0, index).trim()).singles(), Double.parseDouble(sEntries[i].substring(index + 1).trim()) ); } return new Factory(entries); } private final Entry[] entries; public Factory(Entry[] entries) { this.entries = entries; // the order is from the upper most expected insertions to the lowest Arrays.sort(this.entries, new Comparator<Entry>() { @Override public int compare(Entry o1, Entry o2) { return o2.expectedInsertions - o1.expectedInsertions; } }); } public BloomFilter createFilter(int expectedInsertions) { for (Entry entry : entries) { if (expectedInsertions > entry.expectedInsertions) { return BloomFilter.create(expectedInsertions, entry.fpp); } } return BloomFilter.create(expectedInsertions, 0.03); } public static class Entry { public final int expectedInsertions; public final double fpp; Entry(int expectedInsertions, double fpp) { this.expectedInsertions = expectedInsertions; this.fpp = fpp; } } }
0true
src_main_java_org_elasticsearch_common_util_BloomFilter.java
429
trackedMap.addChangeListener(new OMultiValueChangeListener<Object, String>() { public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) { changed.value = true; } });
0true
core_src_test_java_com_orientechnologies_orient_core_db_record_TrackedMapTest.java
310
public enum ResourceType { FILESYSTEM,CLASSPATH }
0true
common_src_main_java_org_broadleafcommerce_common_extensibility_context_MergeFileSystemAndClassPathXMLApplicationContext.java
16
public static class TransactionTimeSpanPruneStrategy extends AbstractPruneStrategy { private final int timeToKeep; private final TimeUnit unit; public TransactionTimeSpanPruneStrategy( FileSystemAbstraction fileSystem, int timeToKeep, TimeUnit unit ) { super( fileSystem ); this.timeToKeep = timeToKeep; this.unit = unit; } @Override protected Threshold newThreshold() { return new Threshold() { private long lowerLimit = System.currentTimeMillis() - unit.toMillis( timeToKeep ); @Override public boolean reached( File file, long version, LogLoader source ) { try { return source.getFirstStartRecordTimestamp( version ) < lowerLimit; } catch ( IOException e ) { throw new RuntimeException( e ); } } }; } }
1no label
community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_xaframework_LogPruneStrategies.java
1,301
public final class StartServer { private StartServer() { } /** * Creates a server instance of Hazelcast * * @param args none */ public static void main(String[] args) { Hazelcast.newHazelcastInstance(null); } }
0true
hazelcast_src_main_java_com_hazelcast_examples_StartServer.java
1,036
@SuppressWarnings("unchecked") public class OCommandExecutorSQLDropClass extends OCommandExecutorSQLAbstract implements OCommandDistributedReplicateRequest { public static final String KEYWORD_DROP = "DROP"; public static final String KEYWORD_CLASS = "CLASS"; private String className; public OCommandExecutorSQLDropClass parse(final OCommandRequest iRequest) { init((OCommandRequestText) iRequest); final StringBuilder word = new StringBuilder(); int oldPos = 0; int pos = nextWord(parserText, parserTextUpperCase, oldPos, word, true); if (pos == -1 || !word.toString().equals(KEYWORD_DROP)) throw new OCommandSQLParsingException("Keyword " + KEYWORD_DROP + " not found. Use " + getSyntax(), parserText, oldPos); pos = nextWord(parserText, parserTextUpperCase, pos, word, true); if (pos == -1 || !word.toString().equals(KEYWORD_CLASS)) throw new OCommandSQLParsingException("Keyword " + KEYWORD_CLASS + " not found. Use " + getSyntax(), parserText, oldPos); pos = nextWord(parserText, parserTextUpperCase, pos, word, false); if (pos == -1) throw new OCommandSQLParsingException("Expected <class>. Use " + getSyntax(), parserText, pos); className = word.toString(); return this; } /** * Execute the DROP CLASS. */ public Object execute(final Map<Object, Object> iArgs) { if (className == null) throw new OCommandExecutionException("Cannot execute the command because it has not been parsed yet"); final ODatabaseRecord database = getDatabase(); final OClass oClass = database.getMetadata().getSchema().getClass(className); if (oClass == null) return null; for (final OIndex<?> oIndex : oClass.getClassIndexes()) { database.getMetadata().getIndexManager().dropIndex(oIndex.getName()); } final OClass superClass = oClass.getSuperClass(); final int[] clustersToIndex = oClass.getPolymorphicClusterIds(); final String[] clusterNames = new String[clustersToIndex.length]; for (int i = 0; i < clustersToIndex.length; i++) { clusterNames[i] = database.getClusterNameById(clustersToIndex[i]); } final int clusterId = oClass.getDefaultClusterId(); ((OSchemaProxy) database.getMetadata().getSchema()).dropClassInternal(className); ((OSchemaProxy) database.getMetadata().getSchema()).saveInternal(); database.getMetadata().getSchema().reload(); deleteDefaultCluster(clusterId); if (superClass == null) return true; for (final OIndex<?> oIndex : superClass.getIndexes()) { for (final String clusterName : clusterNames) oIndex.getInternal().removeCluster(clusterName); OLogManager.instance() .info(this, "Index %s is used in super class of %s and should be rebuilt.", oIndex.getName(), className); oIndex.rebuild(); } return true; } protected void deleteDefaultCluster(int clusterId) { final ODatabaseRecord database = getDatabase(); OCluster cluster = database.getStorage().getClusterById(clusterId); if (cluster.getName().equalsIgnoreCase(className)) { if (isClusterDeletable(clusterId)) { database.getStorage().dropCluster(clusterId, true); } } } protected boolean isClusterDeletable(int clusterId) { final ODatabaseRecord database = getDatabase(); for (OClass iClass : database.getMetadata().getSchema().getClasses()) { for (int i : iClass.getClusterIds()) { if (i == clusterId) return false; } } return true; } @Override public String getSyntax() { return "DROP CLASS <class>"; } }
1no label
core_src_main_java_com_orientechnologies_orient_core_sql_OCommandExecutorSQLDropClass.java
584
public class RefreshRequest extends BroadcastOperationRequest<RefreshRequest> { private boolean force = true; RefreshRequest() { } public RefreshRequest(String... indices) { super(indices); } public boolean force() { return force; } /** * Forces calling refresh, overriding the check that dirty operations even happened. Defaults * to true (note, still lightweight if no refresh is needed). */ public RefreshRequest force(boolean force) { this.force = force; return this; } public void readFrom(StreamInput in) throws IOException { super.readFrom(in); force = in.readBoolean(); } public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeBoolean(force); } }
0true
src_main_java_org_elasticsearch_action_admin_indices_refresh_RefreshRequest.java
54
@SuppressWarnings("serial") static final class ForEachKeyTask<K,V> extends BulkTask<K,V,Void> { final Action<? super K> action; ForEachKeyTask (BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t, Action<? super K> action) { super(p, b, i, f, t); this.action = action; } public final void compute() { final Action<? super K> action; if ((action = this.action) != null) { for (int i = baseIndex, f, h; batch > 0 && (h = ((f = baseLimit) + i) >>> 1) > i;) { addToPendingCount(1); new ForEachKeyTask<K,V> (this, batch >>>= 1, baseLimit = h, f, tab, action).fork(); } for (Node<K,V> p; (p = advance()) != null;) action.apply(p.key); propagateCompletion(); } } }
0true
src_main_java_jsr166e_ConcurrentHashMapV8.java
2,870
@AnalysisSettingsRequired public class HunspellTokenFilterFactory extends AbstractTokenFilterFactory { private final HunspellDictionary dictionary; private final boolean dedup; private final int recursionLevel; @Inject public HunspellTokenFilterFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings, HunspellService hunspellService) { super(index, indexSettings, name, settings); String locale = settings.get("locale", settings.get("language", settings.get("lang", null))); if (locale == null) { throw new ElasticsearchIllegalArgumentException("missing [locale | language | lang] configuration for hunspell token filter"); } dictionary = hunspellService.getDictionary(locale); if (dictionary == null) { throw new ElasticsearchIllegalArgumentException(String.format(Locale.ROOT, "Unknown hunspell dictionary for locale [%s]", locale)); } dedup = settings.getAsBoolean("dedup", true); recursionLevel = settings.getAsInt("recursion_level", 2); if (recursionLevel < 0) { throw new ElasticsearchIllegalArgumentException(String.format(Locale.ROOT, "Negative recursion level not allowed for hunspell [%d]", recursionLevel)); } } @Override public TokenStream create(TokenStream tokenStream) { return new HunspellStemFilter(tokenStream, dictionary, dedup, recursionLevel); } public boolean dedup() { return dedup; } public int recursionLevel() { return recursionLevel; } }
0true
src_main_java_org_elasticsearch_index_analysis_HunspellTokenFilterFactory.java
426
trackedList.addChangeListener(new OMultiValueChangeListener<Integer, String>() { public void onAfterRecordChanged(final OMultiValueChangeEvent<Integer, String> event) { changed.value = true; } });
0true
core_src_test_java_com_orientechnologies_orient_core_db_record_TrackedListTest.java
198
public static class TxPosition { final long version; final int masterId; final int identifier; final long position; final long checksum; public TxPosition( long version, int masterId, int identifier, long position, long checksum ) { this.version = version; this.masterId = masterId; this.identifier = identifier; this.position = position; this.checksum = checksum; } public boolean earlierThan( TxPosition other ) { if ( version < other.version ) return true; if ( version > other.version ) return false; return position < other.position; } @Override public String toString() { return "TxPosition[version:" + version + ", pos:" + position + "]"; } }
0true
community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_xaframework_LogExtractor.java
1,157
public interface AmountItem extends Serializable { public abstract Long getId(); public abstract void setId(Long id); public abstract String getShortDescription(); public abstract void setShortDescription(String shortDescription); public abstract String getDescription(); public abstract void setDescription(String description); public abstract BigDecimal getUnitPrice(); public abstract void setUnitPrice(BigDecimal unitPrice); public abstract Long getQuantity(); public abstract void setQuantity(Long quantity); public abstract PaymentInfo getPaymentInfo(); public abstract void setPaymentInfo(PaymentInfo paymentInfo); public abstract String getSystemId(); public abstract void setSystemId(String systemId); }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_payment_domain_AmountItem.java
1,871
boolean b = h1.executeTransaction(options, new TransactionalTask<Boolean>() { public Boolean execute(TransactionalTaskContext context) throws TransactionException { final TransactionalMap<Object, Object> txMap = context.getMap("default"); assertEquals(true, txMap.replace("1", "1", "11")); assertEquals(false, txMap.replace("5", "5", "55")); assertEquals(false, txMap.replace("2", "1", "22")); assertEquals("1", map2.get("1")); assertEquals("11", txMap.get("1")); assertEquals("2", map2.get("2")); assertEquals("2", txMap.get("2")); return true; } });
0true
hazelcast_src_test_java_com_hazelcast_map_MapTransactionTest.java
5,843
public class InternalSearchHit implements SearchHit { private static final Object[] EMPTY_SORT_VALUES = new Object[0]; private static final Text MAX_TERM_AS_TEXT = new StringAndBytesText(BytesRefFieldComparatorSource.MAX_TERM.utf8ToString()); private transient int docId; private float score = Float.NEGATIVE_INFINITY; private Text id; private Text type; private long version = -1; private BytesReference source; private Map<String, SearchHitField> fields = ImmutableMap.of(); private Map<String, HighlightField> highlightFields = null; private Object[] sortValues = EMPTY_SORT_VALUES; private String[] matchedQueries = Strings.EMPTY_ARRAY; private Explanation explanation; @Nullable private SearchShardTarget shard; private Map<String, Object> sourceAsMap; private byte[] sourceAsBytes; private InternalSearchHit() { } public InternalSearchHit(int docId, String id, Text type, Map<String, SearchHitField> fields) { this.docId = docId; this.id = new StringAndBytesText(id); this.type = type; this.fields = fields; } public int docId() { return this.docId; } public void shardTarget(SearchShardTarget shardTarget) { this.shard = shardTarget; } public void score(float score) { this.score = score; } @Override public float score() { return this.score; } @Override public float getScore() { return score(); } public void version(long version) { this.version = version; } @Override public long version() { return this.version; } @Override public long getVersion() { return this.version; } @Override public String index() { return shard.index(); } @Override public String getIndex() { return index(); } @Override public String id() { return id.string(); } @Override public String getId() { return id(); } @Override public String type() { return type.string(); } @Override public String getType() { return type(); } /** * Returns bytes reference, also un compress the source if needed. */ public BytesReference sourceRef() { try { this.source = CompressorFactory.uncompressIfNeeded(this.source); return this.source; } catch (IOException e) { throw new ElasticsearchParseException("failed to decompress source", e); } } /** * Sets representation, might be compressed.... */ public InternalSearchHit sourceRef(BytesReference source) { this.source = source; this.sourceAsBytes = null; this.sourceAsMap = null; return this; } @Override public BytesReference getSourceRef() { return sourceRef(); } /** * Internal source representation, might be compressed.... */ public BytesReference internalSourceRef() { return source; } @Override public byte[] source() { if (source == null) { return null; } if (sourceAsBytes != null) { return sourceAsBytes; } this.sourceAsBytes = sourceRef().toBytes(); return this.sourceAsBytes; } @Override public boolean isSourceEmpty() { return source == null; } @Override public Map<String, Object> getSource() { return sourceAsMap(); } @Override public String sourceAsString() { if (source == null) { return null; } try { return XContentHelper.convertToJson(sourceRef(), false); } catch (IOException e) { throw new ElasticsearchParseException("failed to convert source to a json string"); } } @Override public String getSourceAsString() { return sourceAsString(); } @SuppressWarnings({"unchecked"}) @Override public Map<String, Object> sourceAsMap() throws ElasticsearchParseException { if (source == null) { return null; } if (sourceAsMap != null) { return sourceAsMap; } sourceAsMap = SourceLookup.sourceAsMap(source); return sourceAsMap; } @Override public Iterator<SearchHitField> iterator() { return fields.values().iterator(); } @Override public SearchHitField field(String fieldName) { return fields().get(fieldName); } @Override public Map<String, SearchHitField> fields() { if (fields == null) { return ImmutableMap.of(); } return fields; } // returns the fields without handling null cases public Map<String, SearchHitField> fieldsOrNull() { return this.fields; } @Override public Map<String, SearchHitField> getFields() { return fields(); } public void fields(Map<String, SearchHitField> fields) { this.fields = fields; } public Map<String, HighlightField> internalHighlightFields() { return highlightFields; } @Override public Map<String, HighlightField> highlightFields() { if (highlightFields == null) { return ImmutableMap.of(); } return this.highlightFields; } @Override public Map<String, HighlightField> getHighlightFields() { return highlightFields(); } public void highlightFields(Map<String, HighlightField> highlightFields) { this.highlightFields = highlightFields; } public void sortValues(Object[] sortValues) { // LUCENE 4 UPGRADE: There must be a better way // we want to convert to a Text object here, and not BytesRef if (sortValues != null) { for (int i = 0; i < sortValues.length; i++) { if (sortValues[i] instanceof BytesRef) { sortValues[i] = new StringAndBytesText(new BytesArray((BytesRef) sortValues[i])); } } } this.sortValues = sortValues; } @Override public Object[] sortValues() { return sortValues; } @Override public Object[] getSortValues() { return sortValues(); } @Override public Explanation explanation() { return explanation; } @Override public Explanation getExplanation() { return explanation(); } public void explanation(Explanation explanation) { this.explanation = explanation; } @Override public SearchShardTarget shard() { return shard; } @Override public SearchShardTarget getShard() { return shard(); } public void shard(SearchShardTarget target) { this.shard = target; } public void matchedQueries(String[] matchedQueries) { this.matchedQueries = matchedQueries; } @Override public String[] matchedQueries() { return this.matchedQueries; } @Override public String[] getMatchedQueries() { return this.matchedQueries; } public static class Fields { static final XContentBuilderString _INDEX = new XContentBuilderString("_index"); static final XContentBuilderString _TYPE = new XContentBuilderString("_type"); static final XContentBuilderString _ID = new XContentBuilderString("_id"); static final XContentBuilderString _VERSION = new XContentBuilderString("_version"); static final XContentBuilderString _SCORE = new XContentBuilderString("_score"); static final XContentBuilderString FIELDS = new XContentBuilderString("fields"); static final XContentBuilderString HIGHLIGHT = new XContentBuilderString("highlight"); static final XContentBuilderString SORT = new XContentBuilderString("sort"); static final XContentBuilderString MATCHED_QUERIES = new XContentBuilderString("matched_queries"); static final XContentBuilderString _EXPLANATION = new XContentBuilderString("_explanation"); static final XContentBuilderString VALUE = new XContentBuilderString("value"); static final XContentBuilderString DESCRIPTION = new XContentBuilderString("description"); static final XContentBuilderString DETAILS = new XContentBuilderString("details"); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); if (explanation() != null) { builder.field("_shard", shard.shardId()); builder.field("_node", shard.nodeIdText()); } builder.field(Fields._INDEX, shard.indexText()); builder.field(Fields._TYPE, type); builder.field(Fields._ID, id); if (version != -1) { builder.field(Fields._VERSION, version); } if (Float.isNaN(score)) { builder.nullField(Fields._SCORE); } else { builder.field(Fields._SCORE, score); } if (source != null) { RestXContentBuilder.restDocumentSource(source, builder, params); } if (fields != null && !fields.isEmpty()) { builder.startObject(Fields.FIELDS); for (SearchHitField field : fields.values()) { if (field.values().isEmpty()) { continue; } String fieldName = field.getName(); if (field.isMetadataField()) { builder.field(fieldName, field.value()); } else { builder.startArray(fieldName); for (Object value : field.getValues()) { builder.value(value); } builder.endArray(); } } builder.endObject(); } if (highlightFields != null && !highlightFields.isEmpty()) { builder.startObject(Fields.HIGHLIGHT); for (HighlightField field : highlightFields.values()) { builder.field(field.name()); if (field.fragments() == null) { builder.nullValue(); } else { builder.startArray(); for (Text fragment : field.fragments()) { builder.value(fragment); } builder.endArray(); } } builder.endObject(); } if (sortValues != null && sortValues.length > 0) { builder.startArray(Fields.SORT); for (Object sortValue : sortValues) { if (sortValue != null && sortValue.equals(MAX_TERM_AS_TEXT)) { // We don't display MAX_TERM in JSON responses in case some clients have UTF-8 parsers that wouldn't accept a // non-character in the response, even though this is valid UTF-8 builder.nullValue(); } else { builder.value(sortValue); } } builder.endArray(); } if (matchedQueries.length > 0) { builder.startArray(Fields.MATCHED_QUERIES); for (String matchedFilter : matchedQueries) { builder.value(matchedFilter); } builder.endArray(); } if (explanation() != null) { builder.field(Fields._EXPLANATION); buildExplanation(builder, explanation()); } builder.endObject(); return builder; } private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException { builder.startObject(); builder.field(Fields.VALUE, explanation.getValue()); builder.field(Fields.DESCRIPTION, explanation.getDescription()); Explanation[] innerExps = explanation.getDetails(); if (innerExps != null) { builder.startArray(Fields.DETAILS); for (Explanation exp : innerExps) { buildExplanation(builder, exp); } builder.endArray(); } builder.endObject(); } public static InternalSearchHit readSearchHit(StreamInput in, InternalSearchHits.StreamContext context) throws IOException { InternalSearchHit hit = new InternalSearchHit(); hit.readFrom(in, context); return hit; } @Override public void readFrom(StreamInput in) throws IOException { readFrom(in, InternalSearchHits.streamContext().streamShardTarget(InternalSearchHits.StreamContext.ShardTargetType.STREAM)); } public void readFrom(StreamInput in, InternalSearchHits.StreamContext context) throws IOException { score = in.readFloat(); id = in.readText(); type = in.readSharedText(); version = in.readLong(); source = in.readBytesReference(); if (source.length() == 0) { source = null; } if (in.readBoolean()) { explanation = readExplanation(in); } int size = in.readVInt(); if (size == 0) { fields = ImmutableMap.of(); } else if (size == 1) { SearchHitField hitField = readSearchHitField(in); fields = ImmutableMap.of(hitField.name(), hitField); } else if (size == 2) { SearchHitField hitField1 = readSearchHitField(in); SearchHitField hitField2 = readSearchHitField(in); fields = ImmutableMap.of(hitField1.name(), hitField1, hitField2.name(), hitField2); } else if (size == 3) { SearchHitField hitField1 = readSearchHitField(in); SearchHitField hitField2 = readSearchHitField(in); SearchHitField hitField3 = readSearchHitField(in); fields = ImmutableMap.of(hitField1.name(), hitField1, hitField2.name(), hitField2, hitField3.name(), hitField3); } else if (size == 4) { SearchHitField hitField1 = readSearchHitField(in); SearchHitField hitField2 = readSearchHitField(in); SearchHitField hitField3 = readSearchHitField(in); SearchHitField hitField4 = readSearchHitField(in); fields = ImmutableMap.of(hitField1.name(), hitField1, hitField2.name(), hitField2, hitField3.name(), hitField3, hitField4.name(), hitField4); } else if (size == 5) { SearchHitField hitField1 = readSearchHitField(in); SearchHitField hitField2 = readSearchHitField(in); SearchHitField hitField3 = readSearchHitField(in); SearchHitField hitField4 = readSearchHitField(in); SearchHitField hitField5 = readSearchHitField(in); fields = ImmutableMap.of(hitField1.name(), hitField1, hitField2.name(), hitField2, hitField3.name(), hitField3, hitField4.name(), hitField4, hitField5.name(), hitField5); } else { ImmutableMap.Builder<String, SearchHitField> builder = ImmutableMap.builder(); for (int i = 0; i < size; i++) { SearchHitField hitField = readSearchHitField(in); builder.put(hitField.name(), hitField); } fields = builder.build(); } size = in.readVInt(); if (size == 0) { highlightFields = ImmutableMap.of(); } else if (size == 1) { HighlightField field = readHighlightField(in); highlightFields = ImmutableMap.of(field.name(), field); } else if (size == 2) { HighlightField field1 = readHighlightField(in); HighlightField field2 = readHighlightField(in); highlightFields = ImmutableMap.of(field1.name(), field1, field2.name(), field2); } else if (size == 3) { HighlightField field1 = readHighlightField(in); HighlightField field2 = readHighlightField(in); HighlightField field3 = readHighlightField(in); highlightFields = ImmutableMap.of(field1.name(), field1, field2.name(), field2, field3.name(), field3); } else if (size == 4) { HighlightField field1 = readHighlightField(in); HighlightField field2 = readHighlightField(in); HighlightField field3 = readHighlightField(in); HighlightField field4 = readHighlightField(in); highlightFields = ImmutableMap.of(field1.name(), field1, field2.name(), field2, field3.name(), field3, field4.name(), field4); } else { ImmutableMap.Builder<String, HighlightField> builder = ImmutableMap.builder(); for (int i = 0; i < size; i++) { HighlightField field = readHighlightField(in); builder.put(field.name(), field); } highlightFields = builder.build(); } size = in.readVInt(); if (size > 0) { sortValues = new Object[size]; for (int i = 0; i < sortValues.length; i++) { byte type = in.readByte(); if (type == 0) { sortValues[i] = null; } else if (type == 1) { sortValues[i] = in.readString(); } else if (type == 2) { sortValues[i] = in.readInt(); } else if (type == 3) { sortValues[i] = in.readLong(); } else if (type == 4) { sortValues[i] = in.readFloat(); } else if (type == 5) { sortValues[i] = in.readDouble(); } else if (type == 6) { sortValues[i] = in.readByte(); } else if (type == 7) { sortValues[i] = in.readShort(); } else if (type == 8) { sortValues[i] = in.readBoolean(); } else if (type == 9) { sortValues[i] = in.readText(); } else { throw new IOException("Can't match type [" + type + "]"); } } } size = in.readVInt(); if (size > 0) { matchedQueries = new String[size]; for (int i = 0; i < size; i++) { matchedQueries[i] = in.readString(); } } if (context.streamShardTarget() == InternalSearchHits.StreamContext.ShardTargetType.STREAM) { if (in.readBoolean()) { shard = readSearchShardTarget(in); } } else if (context.streamShardTarget() == InternalSearchHits.StreamContext.ShardTargetType.LOOKUP) { int lookupId = in.readVInt(); if (lookupId > 0) { shard = context.handleShardLookup().get(lookupId); } } } @Override public void writeTo(StreamOutput out) throws IOException { writeTo(out, InternalSearchHits.streamContext().streamShardTarget(InternalSearchHits.StreamContext.ShardTargetType.STREAM)); } public void writeTo(StreamOutput out, InternalSearchHits.StreamContext context) throws IOException { out.writeFloat(score); out.writeText(id); out.writeSharedText(type); out.writeLong(version); out.writeBytesReference(source); if (explanation == null) { out.writeBoolean(false); } else { out.writeBoolean(true); writeExplanation(out, explanation); } if (fields == null) { out.writeVInt(0); } else { out.writeVInt(fields.size()); for (SearchHitField hitField : fields().values()) { hitField.writeTo(out); } } if (highlightFields == null) { out.writeVInt(0); } else { out.writeVInt(highlightFields.size()); for (HighlightField highlightField : highlightFields.values()) { highlightField.writeTo(out); } } if (sortValues.length == 0) { out.writeVInt(0); } else { out.writeVInt(sortValues.length); for (Object sortValue : sortValues) { if (sortValue == null) { out.writeByte((byte) 0); } else { Class type = sortValue.getClass(); if (type == String.class) { out.writeByte((byte) 1); out.writeString((String) sortValue); } else if (type == Integer.class) { out.writeByte((byte) 2); out.writeInt((Integer) sortValue); } else if (type == Long.class) { out.writeByte((byte) 3); out.writeLong((Long) sortValue); } else if (type == Float.class) { out.writeByte((byte) 4); out.writeFloat((Float) sortValue); } else if (type == Double.class) { out.writeByte((byte) 5); out.writeDouble((Double) sortValue); } else if (type == Byte.class) { out.writeByte((byte) 6); out.writeByte((Byte) sortValue); } else if (type == Short.class) { out.writeByte((byte) 7); out.writeShort((Short) sortValue); } else if (type == Boolean.class) { out.writeByte((byte) 8); out.writeBoolean((Boolean) sortValue); } else if (sortValue instanceof Text) { out.writeByte((byte) 9); out.writeText((Text) sortValue); } else { throw new IOException("Can't handle sort field value of type [" + type + "]"); } } } } if (matchedQueries.length == 0) { out.writeVInt(0); } else { out.writeVInt(matchedQueries.length); for (String matchedFilter : matchedQueries) { out.writeString(matchedFilter); } } if (context.streamShardTarget() == InternalSearchHits.StreamContext.ShardTargetType.STREAM) { if (shard == null) { out.writeBoolean(false); } else { out.writeBoolean(true); shard.writeTo(out); } } else if (context.streamShardTarget() == InternalSearchHits.StreamContext.ShardTargetType.LOOKUP) { if (shard == null) { out.writeVInt(0); } else { out.writeVInt(context.shardHandleLookup().get(shard)); } } } }
1no label
src_main_java_org_elasticsearch_search_internal_InternalSearchHit.java
1,565
public class RollbackStateLocal { private static final ThreadLocal<RollbackStateLocal> THREAD_LOCAL = ThreadLocalManager.createThreadLocal(RollbackStateLocal.class, false); public static RollbackStateLocal getRollbackStateLocal() { return THREAD_LOCAL.get(); } public static void setRollbackStateLocal(RollbackStateLocal rollbackStateLocal) { THREAD_LOCAL.set(rollbackStateLocal); } private String threadId; private String workflowId; public String getThreadId() { return threadId; } public void setThreadId(String threadId) { this.threadId = threadId; } public String getWorkflowId() { return workflowId; } public void setWorkflowId(String workflowId) { this.workflowId = workflowId; } }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_workflow_state_RollbackStateLocal.java
253
private class IteratorCollector<T> implements Answer<Object> { private final int arg; private final List<T> elements = new ArrayList<>(); public IteratorCollector( int arg ) { this.arg = arg; } @SafeVarargs public final void assertContent( T... expected ) { assertEquals( Arrays.asList( expected ), elements ); } @Override @SuppressWarnings("unchecked") public Object answer( InvocationOnMock invocation ) throws Throwable { Object iterator = invocation.getArguments()[arg]; if ( iterator instanceof Iterable ) { iterator = ((Iterable) iterator).iterator(); } if ( iterator instanceof Iterator ) { collect( (Iterator) iterator ); } return null; } private void collect( Iterator<T> iterator ) { while ( iterator.hasNext() ) { elements.add( iterator.next() ); } } }
0true
community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_xa_WriteTransactionTest.java
953
public static class OutputStream extends java.io.FilterOutputStream { private boolean encode; private int position; private byte[] buffer; private int bufferLength; private int lineLength; private boolean breakLines; private byte[] b4; // Scratch used in a few places private boolean suspendEncoding; private int options; // Record for later private byte[] decodabet; // Local copies to avoid extra method calls /** * Constructs a {@link OBase64Utils.OutputStream} in ENCODE mode. * * @param out * the <tt>java.io.OutputStream</tt> to which data will be written. * @since 1.3 */ public OutputStream(java.io.OutputStream out) { this(out, ENCODE); } // end constructor /** * Constructs a {@link OBase64Utils.OutputStream} in either ENCODE or DECODE mode. * <p> * Valid options: * * <pre> * ENCODE or DECODE: Encode or Decode as data is read. * DO_BREAK_LINES: don't break lines at 76 characters * (only meaningful when encoding)</i> * </pre> * <p> * Example: <code>new Base64.OutputStream( out, Base64.ENCODE )</code> * * @param out * the <tt>java.io.OutputStream</tt> to which data will be written. * @param options * Specified options. * @see OBase64Utils#ENCODE * @see OBase64Utils#DECODE * @see OBase64Utils#DO_BREAK_LINES * @since 1.3 */ public OutputStream(java.io.OutputStream out, int options) { super(out); this.breakLines = (options & DO_BREAK_LINES) != 0; this.encode = (options & ENCODE) != 0; this.bufferLength = encode ? 3 : 4; this.buffer = new byte[bufferLength]; this.position = 0; this.lineLength = 0; this.suspendEncoding = false; this.b4 = new byte[4]; this.options = options; this.decodabet = getDecodabet(options); } // end constructor /** * Writes the byte to the output stream after converting to/from Base64 notation. When encoding, bytes are buffered three at a * time before the output stream actually gets a write() call. When decoding, bytes are buffered four at a time. * * @param theByte * the byte to write * @since 1.3 */ @Override public void write(int theByte) throws java.io.IOException { // Encoding suspended? if (suspendEncoding) { this.out.write(theByte); return; } // end if: supsended // Encode? if (encode) { buffer[position++] = (byte) theByte; if (position >= bufferLength) { // Enough to encode. this.out.write(encode3to4(b4, buffer, bufferLength, options)); lineLength += 4; if (breakLines && lineLength >= MAX_LINE_LENGTH) { this.out.write(NEW_LINE); lineLength = 0; } // end if: end of line position = 0; } // end if: enough to output } // end if: encoding // Else, Decoding else { // Meaningful Base64 character? if (decodabet[theByte & 0x7f] > WHITE_SPACE_ENC) { buffer[position++] = (byte) theByte; if (position >= bufferLength) { // Enough to output. int len = OBase64Utils.decode4to3(buffer, 0, b4, 0, options); out.write(b4, 0, len); position = 0; } // end if: enough to output } // end if: meaningful base64 character else if (decodabet[theByte & 0x7f] != WHITE_SPACE_ENC) { throw new java.io.IOException("Invalid character in Base64 data."); } // end else: not white space either } // end else: decoding } // end write /** * Calls {@link #write(int)} repeatedly until <var>len</var> bytes are written. * * @param theBytes * array from which to read bytes * @param off * offset for array * @param len * max number of bytes to read into array * @since 1.3 */ @Override public void write(byte[] theBytes, int off, int len) throws java.io.IOException { // Encoding suspended? if (suspendEncoding) { this.out.write(theBytes, off, len); return; } // end if: supsended for (int i = 0; i < len; i++) { write(theBytes[off + i]); } // end for: each byte written } // end write /** * Method added by PHIL. [Thanks, PHIL. -Rob] This pads the buffer without closing the stream. * * @throws java.io.IOException * if there's an error. */ public void flushBase64() throws java.io.IOException { if (position > 0) { if (encode) { out.write(encode3to4(b4, buffer, position, options)); position = 0; } // end if: encoding else { throw new java.io.IOException("Base64 input not properly padded."); } // end else: decoding } // end if: buffer partially full } // end flush /** * Flushes and closes (I think, in the superclass) the stream. * * @since 1.3 */ @Override public void close() throws java.io.IOException { // 1. Ensure that pending characters are written flushBase64(); // 2. Actually close the stream // Base class both flushes and closes. super.close(); buffer = null; out = null; } // end close /** * Suspends encoding of the stream. May be helpful if you need to embed a piece of base64-encoded data in a stream. * * @throws java.io.IOException * if there's an error flushing * @since 1.5.1 */ public void suspendEncoding() throws java.io.IOException { flushBase64(); this.suspendEncoding = true; } // end suspendEncoding /** * Resumes encoding of the stream. May be helpful if you need to embed a piece of base64-encoded data in a stream. * * @since 1.5.1 */ public void resumeEncoding() { this.suspendEncoding = false; } // end resumeEncoding } // end inner class OutputStream
0true
core_src_main_java_com_orientechnologies_orient_core_serialization_OBase64Utils.java
2,884
public class LimitTokenCountFilterFactoryTests extends ElasticsearchTokenStreamTestCase { @Test public void testDefault() throws IOException { Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.limit_default.type", "limit").build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); { TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_default"); String source = "the quick brown fox"; String[] expected = new String[] { "the" }; Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source)); assertTokenStreamContents(tokenFilter.create(tokenizer), expected); } { TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit"); String source = "the quick brown fox"; String[] expected = new String[] { "the" }; Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source)); assertTokenStreamContents(tokenFilter.create(tokenizer), expected); } } @Test public void testSettings() throws IOException { { Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.limit_1.type", "limit") .put("index.analysis.filter.limit_1.max_token_count", 3).put("index.analysis.filter.limit_1.consume_all_tokens", true) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1"); String source = "the quick brown fox"; String[] expected = new String[] { "the", "quick", "brown" }; Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source)); assertTokenStreamContents(tokenFilter.create(tokenizer), expected); } { Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.limit_1.type", "limit") .put("index.analysis.filter.limit_1.max_token_count", 3).put("index.analysis.filter.limit_1.consume_all_tokens", false) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1"); String source = "the quick brown fox"; String[] expected = new String[] { "the", "quick", "brown" }; Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source)); assertTokenStreamContents(tokenFilter.create(tokenizer), expected); } { Settings settings = ImmutableSettings.settingsBuilder().put("index.analysis.filter.limit_1.type", "limit") .put("index.analysis.filter.limit_1.max_token_count", 17).put("index.analysis.filter.limit_1.consume_all_tokens", true) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1"); String source = "the quick brown fox"; String[] expected = new String[] { "the", "quick", "brown", "fox" }; Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(source)); assertTokenStreamContents(tokenFilter.create(tokenizer), expected); } } }
0true
src_test_java_org_elasticsearch_index_analysis_LimitTokenCountFilterFactoryTests.java
1,048
@RunWith(HazelcastParallelClassRunner.class) @Category(QuickTest.class) public class InterfacesTest { final String interfaceA = "127.0.0.1"; final String interfaceB = "127.0.0.2"; final String interfaceC = "127.0.0.3"; @Test public void testIsEnabledByDefault() { InterfacesConfig interfaces = new InterfacesConfig(); assertFalse(interfaces.isEnabled()); } @Test public void testSetEnabled() { InterfacesConfig interfaces = new InterfacesConfig().setEnabled(true); assertTrue(interfaces.isEnabled()); } @Test public void testAddInterface() { InterfacesConfig interfaces = new InterfacesConfig().addInterface(interfaceA); assertTrue(interfaces.getInterfaces().contains(interfaceA)); } @Test public void testClear() { InterfacesConfig interfaces = new InterfacesConfig() .addInterface(interfaceA) .addInterface(interfaceB) .addInterface(interfaceC); assertTrue(interfaces.getInterfaces().size() == 3); interfaces.clear(); assertTrue(interfaces.getInterfaces().size() == 0); } @Test public void testGetInterfaceList() { InterfacesConfig interfaces = new InterfacesConfig(); assertNotNull(interfaces.getInterfaces()); } @Test public void testSetInterfaceList() { List<String> interfaceList = new ArrayList<String>(); interfaceList.add(interfaceA); interfaceList.add(interfaceB); interfaceList.add(interfaceC); InterfacesConfig interfaces = new InterfacesConfig().setInterfaces(interfaceList); assertTrue(interfaces.getInterfaces().contains(interfaceA)); assertTrue(interfaces.getInterfaces().contains(interfaceB)); assertTrue(interfaces.getInterfaces().contains(interfaceC)); } @Test public void shouldNotContainDuplicateInterfaces() { InterfacesConfig interfaces = new InterfacesConfig().addInterface(interfaceA); assertTrue(interfaces.getInterfaces().size() == 1); interfaces.addInterface(interfaceA); assertTrue(interfaces.getInterfaces().size() == 1); } @Test(expected = UnsupportedOperationException.class) public void shouldNotBeModifiable() { new InterfacesConfig() .addInterface(interfaceA) .getInterfaces() .clear(); } }
0true
hazelcast_src_test_java_com_hazelcast_config_InterfacesTest.java
272
public class ElasticsearchTimeoutException extends ElasticsearchException { public ElasticsearchTimeoutException(String message) { super(message); } public ElasticsearchTimeoutException(String message, Throwable cause) { super(message, cause); } }
0true
src_main_java_org_elasticsearch_ElasticsearchTimeoutException.java
1,384
public class MappingMetaData { public static class Id { public static final Id EMPTY = new Id(null); private final String path; private final String[] pathElements; public Id(String path) { this.path = path; if (path == null) { pathElements = Strings.EMPTY_ARRAY; } else { pathElements = Strings.delimitedListToStringArray(path, "."); } } public boolean hasPath() { return path != null; } public String path() { return this.path; } public String[] pathElements() { return this.pathElements; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Id id = (Id) o; if (path != null ? !path.equals(id.path) : id.path != null) return false; if (!Arrays.equals(pathElements, id.pathElements)) return false; return true; } @Override public int hashCode() { int result = path != null ? path.hashCode() : 0; result = 31 * result + (pathElements != null ? Arrays.hashCode(pathElements) : 0); return result; } } public static class Routing { public static final Routing EMPTY = new Routing(false, null); private final boolean required; private final String path; private final String[] pathElements; public Routing(boolean required, String path) { this.required = required; this.path = path; if (path == null) { pathElements = Strings.EMPTY_ARRAY; } else { pathElements = Strings.delimitedListToStringArray(path, "."); } } public boolean required() { return required; } public boolean hasPath() { return path != null; } public String path() { return this.path; } public String[] pathElements() { return this.pathElements; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Routing routing = (Routing) o; if (required != routing.required) return false; if (path != null ? !path.equals(routing.path) : routing.path != null) return false; if (!Arrays.equals(pathElements, routing.pathElements)) return false; return true; } @Override public int hashCode() { int result = (required ? 1 : 0); result = 31 * result + (path != null ? path.hashCode() : 0); result = 31 * result + (pathElements != null ? Arrays.hashCode(pathElements) : 0); return result; } } public static class Timestamp { public static String parseStringTimestamp(String timestampAsString, FormatDateTimeFormatter dateTimeFormatter) throws TimestampParsingException { long ts; try { // if we manage to parse it, its a millisecond timestamp, just return the string as is ts = Long.parseLong(timestampAsString); return timestampAsString; } catch (NumberFormatException e) { try { ts = dateTimeFormatter.parser().parseMillis(timestampAsString); } catch (RuntimeException e1) { throw new TimestampParsingException(timestampAsString); } } return Long.toString(ts); } public static final Timestamp EMPTY = new Timestamp(false, null, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT); private final boolean enabled; private final String path; private final String format; private final String[] pathElements; private final FormatDateTimeFormatter dateTimeFormatter; public Timestamp(boolean enabled, String path, String format) { this.enabled = enabled; this.path = path; if (path == null) { pathElements = Strings.EMPTY_ARRAY; } else { pathElements = Strings.delimitedListToStringArray(path, "."); } this.format = format; this.dateTimeFormatter = Joda.forPattern(format); } public boolean enabled() { return enabled; } public boolean hasPath() { return path != null; } public String path() { return this.path; } public String[] pathElements() { return this.pathElements; } public String format() { return this.format; } public FormatDateTimeFormatter dateTimeFormatter() { return this.dateTimeFormatter; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Timestamp timestamp = (Timestamp) o; if (enabled != timestamp.enabled) return false; if (dateTimeFormatter != null ? !dateTimeFormatter.equals(timestamp.dateTimeFormatter) : timestamp.dateTimeFormatter != null) return false; if (format != null ? !format.equals(timestamp.format) : timestamp.format != null) return false; if (path != null ? !path.equals(timestamp.path) : timestamp.path != null) return false; if (!Arrays.equals(pathElements, timestamp.pathElements)) return false; return true; } @Override public int hashCode() { int result = (enabled ? 1 : 0); result = 31 * result + (path != null ? path.hashCode() : 0); result = 31 * result + (format != null ? format.hashCode() : 0); result = 31 * result + (pathElements != null ? Arrays.hashCode(pathElements) : 0); result = 31 * result + (dateTimeFormatter != null ? dateTimeFormatter.hashCode() : 0); return result; } } private final String type; private final CompressedString source; private Id id; private Routing routing; private Timestamp timestamp; private boolean hasParentField; public MappingMetaData(DocumentMapper docMapper) { this.type = docMapper.type(); this.source = docMapper.mappingSource(); this.id = new Id(docMapper.idFieldMapper().path()); this.routing = new Routing(docMapper.routingFieldMapper().required(), docMapper.routingFieldMapper().path()); this.timestamp = new Timestamp(docMapper.timestampFieldMapper().enabled(), docMapper.timestampFieldMapper().path(), docMapper.timestampFieldMapper().dateTimeFormatter().format()); this.hasParentField = docMapper.parentFieldMapper().active(); } public MappingMetaData(CompressedString mapping) throws IOException { this.source = mapping; Map<String, Object> mappingMap = XContentHelper.createParser(mapping.compressed(), 0, mapping.compressed().length).mapOrderedAndClose(); if (mappingMap.size() != 1) { throw new ElasticsearchIllegalStateException("Can't derive type from mapping, no root type: " + mapping.string()); } this.type = mappingMap.keySet().iterator().next(); initMappers((Map<String, Object>) mappingMap.get(this.type)); } public MappingMetaData(Map<String, Object> mapping) throws IOException { this(mapping.keySet().iterator().next(), mapping); } public MappingMetaData(String type, Map<String, Object> mapping) throws IOException { this.type = type; XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().map(mapping); this.source = new CompressedString(mappingBuilder.bytes()); Map<String, Object> withoutType = mapping; if (mapping.size() == 1 && mapping.containsKey(type)) { withoutType = (Map<String, Object>) mapping.get(type); } initMappers(withoutType); } private void initMappers(Map<String, Object> withoutType) { if (withoutType.containsKey("_id")) { String path = null; Map<String, Object> routingNode = (Map<String, Object>) withoutType.get("_id"); for (Map.Entry<String, Object> entry : routingNode.entrySet()) { String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("path")) { path = fieldNode.toString(); } } this.id = new Id(path); } else { this.id = Id.EMPTY; } if (withoutType.containsKey("_routing")) { boolean required = false; String path = null; Map<String, Object> routingNode = (Map<String, Object>) withoutType.get("_routing"); for (Map.Entry<String, Object> entry : routingNode.entrySet()) { String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("required")) { required = nodeBooleanValue(fieldNode); } else if (fieldName.equals("path")) { path = fieldNode.toString(); } } this.routing = new Routing(required, path); } else { this.routing = Routing.EMPTY; } if (withoutType.containsKey("_timestamp")) { boolean enabled = false; String path = null; String format = TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT; Map<String, Object> timestampNode = (Map<String, Object>) withoutType.get("_timestamp"); for (Map.Entry<String, Object> entry : timestampNode.entrySet()) { String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { enabled = nodeBooleanValue(fieldNode); } else if (fieldName.equals("path")) { path = fieldNode.toString(); } else if (fieldName.equals("format")) { format = fieldNode.toString(); } } this.timestamp = new Timestamp(enabled, path, format); } else { this.timestamp = Timestamp.EMPTY; } if (withoutType.containsKey("_parent")) { this.hasParentField = true; } else { this.hasParentField = false; } } public MappingMetaData(String type, CompressedString source, Id id, Routing routing, Timestamp timestamp, boolean hasParentField) { this.type = type; this.source = source; this.id = id; this.routing = routing; this.timestamp = timestamp; this.hasParentField = hasParentField; } void updateDefaultMapping(MappingMetaData defaultMapping) { if (id == Id.EMPTY) { id = defaultMapping.id(); } if (routing == Routing.EMPTY) { routing = defaultMapping.routing(); } if (timestamp == Timestamp.EMPTY) { timestamp = defaultMapping.timestamp(); } } public String type() { return this.type; } public CompressedString source() { return this.source; } public boolean hasParentField() { return hasParentField; } /** * Converts the serialized compressed form of the mappings into a parsed map. */ public Map<String, Object> sourceAsMap() throws IOException { Map<String, Object> mapping = XContentHelper.convertToMap(source.compressed(), 0, source.compressed().length, true).v2(); if (mapping.size() == 1 && mapping.containsKey(type())) { // the type name is the root value, reduce it mapping = (Map<String, Object>) mapping.get(type()); } return mapping; } /** * Converts the serialized compressed form of the mappings into a parsed map. */ public Map<String, Object> getSourceAsMap() throws IOException { return sourceAsMap(); } public Id id() { return this.id; } public Routing routing() { return this.routing; } public Timestamp timestamp() { return this.timestamp; } public ParseContext createParseContext(@Nullable String id, @Nullable String routing, @Nullable String timestamp) { return new ParseContext( id == null && id().hasPath(), routing == null && routing().hasPath(), timestamp == null && timestamp().hasPath() ); } public void parse(XContentParser parser, ParseContext parseContext) throws IOException { innerParse(parser, parseContext); } private void innerParse(XContentParser parser, ParseContext context) throws IOException { if (!context.parsingStillNeeded()) { return; } XContentParser.Token t = parser.currentToken(); if (t == null) { t = parser.nextToken(); } if (t == XContentParser.Token.START_OBJECT) { t = parser.nextToken(); } String idPart = context.idParsingStillNeeded() ? id().pathElements()[context.locationId] : null; String routingPart = context.routingParsingStillNeeded() ? routing().pathElements()[context.locationRouting] : null; String timestampPart = context.timestampParsingStillNeeded() ? timestamp().pathElements()[context.locationTimestamp] : null; for (; t == XContentParser.Token.FIELD_NAME; t = parser.nextToken()) { // Must point to field name String fieldName = parser.currentName(); // And then the value... t = parser.nextToken(); boolean incLocationId = false; boolean incLocationRouting = false; boolean incLocationTimestamp = false; if (context.idParsingStillNeeded() && fieldName.equals(idPart)) { if (context.locationId + 1 == id.pathElements().length) { if (!t.isValue()) { throw new MapperParsingException("id field must be a value but was either an object or an array"); } context.id = parser.textOrNull(); context.idResolved = true; } else { incLocationId = true; } } if (context.routingParsingStillNeeded() && fieldName.equals(routingPart)) { if (context.locationRouting + 1 == routing.pathElements().length) { context.routing = parser.textOrNull(); context.routingResolved = true; } else { incLocationRouting = true; } } if (context.timestampParsingStillNeeded() && fieldName.equals(timestampPart)) { if (context.locationTimestamp + 1 == timestamp.pathElements().length) { context.timestamp = parser.textOrNull(); context.timestampResolved = true; } else { incLocationTimestamp = true; } } if (incLocationId || incLocationRouting || incLocationTimestamp) { if (t == XContentParser.Token.START_OBJECT) { context.locationId += incLocationId ? 1 : 0; context.locationRouting += incLocationRouting ? 1 : 0; context.locationTimestamp += incLocationTimestamp ? 1 : 0; innerParse(parser, context); context.locationId -= incLocationId ? 1 : 0; context.locationRouting -= incLocationRouting ? 1 : 0; context.locationTimestamp -= incLocationTimestamp ? 1 : 0; } } else { parser.skipChildren(); } if (!context.parsingStillNeeded()) { return; } } } public static void writeTo(MappingMetaData mappingMd, StreamOutput out) throws IOException { out.writeString(mappingMd.type()); mappingMd.source().writeTo(out); // id if (mappingMd.id().hasPath()) { out.writeBoolean(true); out.writeString(mappingMd.id().path()); } else { out.writeBoolean(false); } // routing out.writeBoolean(mappingMd.routing().required()); if (mappingMd.routing().hasPath()) { out.writeBoolean(true); out.writeString(mappingMd.routing().path()); } else { out.writeBoolean(false); } // timestamp out.writeBoolean(mappingMd.timestamp().enabled()); if (mappingMd.timestamp().hasPath()) { out.writeBoolean(true); out.writeString(mappingMd.timestamp().path()); } else { out.writeBoolean(false); } out.writeString(mappingMd.timestamp().format()); out.writeBoolean(mappingMd.hasParentField()); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; MappingMetaData that = (MappingMetaData) o; if (!id.equals(that.id)) return false; if (!routing.equals(that.routing)) return false; if (!source.equals(that.source)) return false; if (!timestamp.equals(that.timestamp)) return false; if (!type.equals(that.type)) return false; return true; } @Override public int hashCode() { int result = type.hashCode(); result = 31 * result + source.hashCode(); result = 31 * result + id.hashCode(); result = 31 * result + routing.hashCode(); result = 31 * result + timestamp.hashCode(); return result; } public static MappingMetaData readFrom(StreamInput in) throws IOException { String type = in.readString(); CompressedString source = CompressedString.readCompressedString(in); // id Id id = new Id(in.readBoolean() ? in.readString() : null); // routing Routing routing = new Routing(in.readBoolean(), in.readBoolean() ? in.readString() : null); // timestamp Timestamp timestamp = new Timestamp(in.readBoolean(), in.readBoolean() ? in.readString() : null, in.readString()); final boolean hasParentField = in.readBoolean(); return new MappingMetaData(type, source, id, routing, timestamp, hasParentField); } public static class ParseContext { final boolean shouldParseId; final boolean shouldParseRouting; final boolean shouldParseTimestamp; int locationId = 0; int locationRouting = 0; int locationTimestamp = 0; boolean idResolved; boolean routingResolved; boolean timestampResolved; String id; String routing; String timestamp; public ParseContext(boolean shouldParseId, boolean shouldParseRouting, boolean shouldParseTimestamp) { this.shouldParseId = shouldParseId; this.shouldParseRouting = shouldParseRouting; this.shouldParseTimestamp = shouldParseTimestamp; } /** * The id value parsed, <tt>null</tt> if does not require parsing, or not resolved. */ public String id() { return id; } /** * Does id parsing really needed at all? */ public boolean shouldParseId() { return shouldParseId; } /** * Has id been resolved during the parsing phase. */ public boolean idResolved() { return idResolved; } /** * Is id parsing still needed? */ public boolean idParsingStillNeeded() { return shouldParseId && !idResolved; } /** * The routing value parsed, <tt>null</tt> if does not require parsing, or not resolved. */ public String routing() { return routing; } /** * Does routing parsing really needed at all? */ public boolean shouldParseRouting() { return shouldParseRouting; } /** * Has routing been resolved during the parsing phase. */ public boolean routingResolved() { return routingResolved; } /** * Is routing parsing still needed? */ public boolean routingParsingStillNeeded() { return shouldParseRouting && !routingResolved; } /** * The timestamp value parsed, <tt>null</tt> if does not require parsing, or not resolved. */ public String timestamp() { return timestamp; } /** * Does timestamp parsing really needed at all? */ public boolean shouldParseTimestamp() { return shouldParseTimestamp; } /** * Has timestamp been resolved during the parsing phase. */ public boolean timestampResolved() { return timestampResolved; } /** * Is timestamp parsing still needed? */ public boolean timestampParsingStillNeeded() { return shouldParseTimestamp && !timestampResolved; } /** * Do we really need parsing? */ public boolean shouldParse() { return shouldParseId || shouldParseRouting || shouldParseTimestamp; } /** * Is parsing still needed? */ public boolean parsingStillNeeded() { return idParsingStillNeeded() || routingParsingStillNeeded() || timestampParsingStillNeeded(); } } }
1no label
src_main_java_org_elasticsearch_cluster_metadata_MappingMetaData.java
279
static class SchemaRuleCommand extends Command { private final NeoStore neoStore; private final IndexingService indexes; private final SchemaStore store; private final Collection<DynamicRecord> recordsBefore; private final Collection<DynamicRecord> recordsAfter; private final SchemaRule schemaRule; private long txId; SchemaRuleCommand( NeoStore neoStore, SchemaStore store, IndexingService indexes, Collection<DynamicRecord> recordsBefore, Collection<DynamicRecord> recordsAfter, SchemaRule schemaRule, long txId ) { super( first( recordsAfter ).getId(), Mode.fromRecordState( first( recordsAfter ) ) ); this.neoStore = neoStore; this.indexes = indexes; this.store = store; this.recordsBefore = recordsBefore; this.recordsAfter = recordsAfter; this.schemaRule = schemaRule; this.txId = txId; } @Override public void accept( CommandRecordVisitor visitor ) { visitor.visitSchemaRule( recordsAfter ); } @Override public String toString() { if ( schemaRule != null ) { return getMode() + ":" + schemaRule.toString(); } return "SchemaRule" + recordsAfter; } @Override void removeFromCache( CacheAccessBackDoor cacheAccess ) { cacheAccess.removeSchemaRuleFromCache( getKey() ); } Collection<DynamicRecord> getRecordsAfter() { return unmodifiableCollection( recordsAfter ); } @Override public void execute() { for ( DynamicRecord record : recordsAfter ) { store.updateRecord( record ); } if ( schemaRule instanceof IndexRule ) { switch ( getMode() ) { case UPDATE: // Shouldn't we be more clear about that we are waiting for an index to come online here? // right now we just assume that an update to index records means wait for it to be online. if ( ((IndexRule) schemaRule).isConstraintIndex() ) { try { indexes.activateIndex( schemaRule.getId() ); } catch ( IndexNotFoundKernelException | IndexActivationFailedKernelException | IndexPopulationFailedKernelException e ) { throw new IllegalStateException( "Unable to enable constraint, backing index is not online.", e ); } } break; case CREATE: indexes.createIndex( (IndexRule) schemaRule ); break; case DELETE: indexes.dropIndex( (IndexRule)schemaRule ); break; default: throw new IllegalStateException( getMode().name() ); } } if( schemaRule instanceof UniquenessConstraintRule ) { switch ( getMode() ) { case UPDATE: case CREATE: neoStore.setLatestConstraintIntroducingTx( txId ); break; case DELETE: break; default: throw new IllegalStateException( getMode().name() ); } } } @Override public void writeToFile( LogBuffer buffer ) throws IOException { buffer.put( SCHEMA_RULE_COMMAND ); writeDynamicRecords( buffer, recordsBefore ); writeDynamicRecords( buffer, recordsAfter ); buffer.put( first( recordsAfter ).isCreated() ? (byte) 1 : 0); buffer.putLong( txId ); } public SchemaRule getSchemaRule() { return schemaRule; } public long getTxId() { return txId; } public void setTxId( long txId ) { this.txId = txId; } static Command readFromFile( NeoStore neoStore, IndexingService indexes, ReadableByteChannel byteChannel, ByteBuffer buffer ) throws IOException { Collection<DynamicRecord> recordsBefore = new ArrayList<>(); readDynamicRecords( byteChannel, buffer, recordsBefore, COLLECTION_DYNAMIC_RECORD_ADDER ); Collection<DynamicRecord> recordsAfter = new ArrayList<>(); readDynamicRecords( byteChannel, buffer, recordsAfter, COLLECTION_DYNAMIC_RECORD_ADDER ); if ( !readAndFlip( byteChannel, buffer, 1 ) ) { throw new IllegalStateException( "Missing SchemaRule.isCreated flag in deserialization" ); } byte isCreated = buffer.get(); if ( 1 == isCreated ) { for ( DynamicRecord record : recordsAfter ) { record.setCreated(); } } if ( !readAndFlip( byteChannel, buffer, 8 ) ) { throw new IllegalStateException( "Missing SchemaRule.txId in deserialization" ); } long txId = buffer.getLong(); SchemaRule rule = first( recordsAfter ).inUse() ? readSchemaRule( recordsAfter ) : readSchemaRule( recordsBefore ); return new SchemaRuleCommand( neoStore, neoStore != null ? neoStore.getSchemaStore() : null, indexes, recordsBefore, recordsAfter, rule, txId ); } private static SchemaRule readSchemaRule( Collection<DynamicRecord> recordsBefore ) { assert first(recordsBefore).inUse() : "Asked to deserialize schema records that were not in use."; SchemaRule rule; ByteBuffer deserialized = AbstractDynamicStore.concatData( recordsBefore, new byte[100] ); try { rule = SchemaRule.Kind.deserialize( first( recordsBefore ).getId(), deserialized ); } catch ( MalformedSchemaRuleException e ) { // TODO This is bad. We should probably just shut down if that happens throw launderedException( e ); } return rule; } }
0true
community_kernel_src_main_java_org_neo4j_kernel_impl_nioneo_xa_Command.java
366
public static class TestReducer extends Reducer<String, Integer, Integer> { private transient int sum = 0; @Override public void reduce(Integer value) { sum += value; } @Override public Integer finalizeReduce() { return sum; } }
0true
hazelcast-client_src_test_java_com_hazelcast_client_mapreduce_ClientMapReduceTest.java
1,292
clusterService.submitStateUpdateTask("test", new AckedClusterStateUpdateTask() { @Override public boolean mustAck(DiscoveryNode discoveryNode) { return true; } @Override public void onAllNodesAcked(@Nullable Throwable t) { allNodesAcked.set(true); latch.countDown(); } @Override public void onAckTimeout() { ackTimeout.set(true); latch.countDown(); } @Override public TimeValue ackTimeout() { return TimeValue.timeValueSeconds(10); } @Override public TimeValue timeout() { return TimeValue.timeValueSeconds(10); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { processedLatch.countDown(); } @Override public ClusterState execute(ClusterState currentState) throws Exception { executed.set(true); return ClusterState.builder(currentState).build(); } @Override public void onFailure(String source, Throwable t) { logger.error("failed to execute callback in test {}", t, source); onFailure.set(true); latch.countDown(); } });
0true
src_test_java_org_elasticsearch_cluster_ClusterServiceTests.java
827
getDatabase().getStorage().callInLock(new Callable<Object>() { @Override public Object call() throws Exception { final OClass cls = classes.get(key); if (cls == null) throw new OSchemaException("Class " + iClassName + " was not found in current database"); if (cls.getBaseClasses().hasNext()) throw new OSchemaException("Class " + iClassName + " cannot be dropped because it has sub classes. Remove the dependencies before trying to drop it again"); final StringBuilder cmd = new StringBuilder("drop class "); cmd.append(iClassName); Object result = getDatabase().command(new OCommandSQL(cmd.toString())).execute(); if (result instanceof Boolean && (Boolean) result) { classes.remove(key); } getDatabase().reload(); reload(); return null; } }, true);
1no label
core_src_main_java_com_orientechnologies_orient_core_metadata_schema_OSchemaShared.java
243
private static class RecordingNodeStore extends NodeStore { private final AtomicReference<List<String>> currentRecording; public RecordingNodeStore( AtomicReference<List<String>> currentRecording ) { super( null, null, null, null, null, null, null ); this.currentRecording = currentRecording; } @Override public void updateRecord(NodeRecord record) { currentRecording.get().add(commandActionToken(record) + " node"); } @Override protected void checkStorage() { } @Override protected void checkVersion() { } @Override protected void loadStorage() { } @Override public NodeRecord getRecord(long id) { NodeRecord record = new NodeRecord(id, -1, -1); record.setInUse(true); return record; } }
0true
community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_xa_WriteTransactionCommandOrderingTest.java
768
public class OIntentMassiveRead implements OIntent { public void begin(final ODatabaseRaw iDatabase, final Object... iArgs) { } public void end(final ODatabaseRaw iDatabase) { } }
0true
core_src_main_java_com_orientechnologies_orient_core_intent_OIntentMassiveRead.java
3,152
public abstract class AbstractIndexFieldData<FD extends AtomicFieldData> extends AbstractIndexComponent implements IndexFieldData<FD> { private final FieldMapper.Names fieldNames; protected final FieldDataType fieldDataType; protected final IndexFieldDataCache cache; public AbstractIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache) { super(index, indexSettings); this.fieldNames = fieldNames; this.fieldDataType = fieldDataType; this.cache = cache; } @Override public FieldMapper.Names getFieldNames() { return this.fieldNames; } @Override public void clear() { cache.clear(fieldNames.indexName()); } @Override public void clear(IndexReader reader) { cache.clear(reader); } @Override public final FD load(AtomicReaderContext context) { try { FD fd = cache.load(context, this); return fd; } catch (Throwable e) { if (e instanceof ElasticsearchException) { throw (ElasticsearchException) e; } else { throw new ElasticsearchException(e.getMessage(), e); } } } /** * A {@code PerValueEstimator} is a sub-class that can be used to estimate * the memory overhead for loading the data. Each field data * implementation should implement its own {@code PerValueEstimator} if it * intends to take advantage of the MemoryCircuitBreaker. * <p/> * Note that the .beforeLoad(...) and .afterLoad(...) methods must be * manually called. */ public interface PerValueEstimator { /** * @return the number of bytes for the given term */ public long bytesPerValue(BytesRef term); /** * Execute any pre-loading estimations for the terms. May also * optionally wrap a {@link TermsEnum} in a * {@link RamAccountingTermsEnum} * which will estimate the memory on a per-term basis. * * @param terms terms to be estimated * @return A TermsEnum for the given terms * @throws IOException */ public TermsEnum beforeLoad(Terms terms) throws IOException; /** * Possibly adjust a circuit breaker after field data has been loaded, * now that the actual amount of memory used by the field data is known * * @param termsEnum terms that were loaded * @param actualUsed actual field data memory usage */ public void afterLoad(TermsEnum termsEnum, long actualUsed); } }
0true
src_main_java_org_elasticsearch_index_fielddata_AbstractIndexFieldData.java
1,009
public class AvailableOperation extends SemaphoreOperation implements IdentifiedDataSerializable { public AvailableOperation() { } public AvailableOperation(String name) { super(name, -1); } @Override public void run() throws Exception { response = getPermit().getAvailable(); } @Override public int getFactoryId() { return SemaphoreDataSerializerHook.F_ID; } @Override public int getId() { return SemaphoreDataSerializerHook.AVAILABLE_OPERATION; } }
0true
hazelcast_src_main_java_com_hazelcast_concurrent_semaphore_operations_AvailableOperation.java
153
public class TransactionReader { public interface Visitor { void visitStart( int localId, byte[] globalTransactionId, int masterId, int myId, long startTimestamp ); void visitPrepare( int localId, long prepareTimestamp ); void visitCommit( int localId, boolean twoPhase, long txId, long commitTimestamp ); void visitDone( int localId ); void visitUpdateNode( int localId, NodeRecord node ); void visitDeleteNode( int localId, long node ); void visitUpdateRelationship( int localId, RelationshipRecord node ); void visitDeleteRelationship( int localId, long node ); void visitUpdateProperty( int localId, PropertyRecord node ); void visitDeleteProperty( int localId, long node ); void visitUpdateRelationshipTypeToken( int localId, RelationshipTypeTokenRecord node ); void visitDeleteRelationshipTypeToken( int localId, int node ); void visitUpdateLabelToken( int localId, LabelTokenRecord node ); void visitDeleteLabelToken( int localId, int node ); void visitUpdatePropertyKeyToken( int localId, PropertyKeyTokenRecord node ); void visitDeletePropertyKeyToken( int localId, int node ); void visitUpdateNeoStore( int localId, NeoStoreRecord node ); void visitDeleteNeoStore( int localId, long node ); void visitDeleteSchemaRule( int localId, Collection<DynamicRecord> records, long id ); void visitUpdateSchemaRule( int localId, Collection<DynamicRecord> records ); } private static final XaCommandFactory COMMAND_FACTORY = new XaCommandFactory() { @Override public XaCommand readCommand( ReadableByteChannel byteChannel, ByteBuffer buffer ) throws IOException { return Command.readCommand( null, null, byteChannel, buffer ); } }; private final ByteBuffer buffer = ByteBuffer.wrap( new byte[256] ); public void read( ReadableByteChannel source, Visitor visitor ) throws IOException { for ( LogEntry entry; null != (entry = readEntry( source )); ) { if ( entry instanceof LogEntry.Command ) { Command command = (Command) ((LogEntry.Command) entry).getXaCommand(); command.accept( new CommandVisitor( entry.getIdentifier(), visitor ) ); } else if ( entry instanceof LogEntry.Start ) { LogEntry.Start start = (LogEntry.Start) entry; visitor.visitStart( start.getIdentifier(), start.getXid().getGlobalTransactionId(), start.getMasterId(), start.getLocalId(), start.getTimeWritten() ); } else if ( entry instanceof LogEntry.Prepare ) { LogEntry.Prepare prepare = (LogEntry.Prepare) entry; visitor.visitPrepare( prepare.getIdentifier(), prepare.getTimeWritten() ); } else if ( entry instanceof LogEntry.OnePhaseCommit ) { LogEntry.OnePhaseCommit commit = (LogEntry.OnePhaseCommit) entry; visitor.visitCommit( commit.getIdentifier(), false, commit.getTxId(), commit.getTimeWritten() ); } else if ( entry instanceof LogEntry.TwoPhaseCommit ) { LogEntry.TwoPhaseCommit commit = (LogEntry.TwoPhaseCommit) entry; visitor.visitCommit( commit.getIdentifier(), true, commit.getTxId(), commit.getTimeWritten() ); } else if ( entry instanceof LogEntry.Done ) { LogEntry.Done done = (LogEntry.Done) entry; visitor.visitDone( done.getIdentifier() ); } } } private LogEntry readEntry( ReadableByteChannel source ) throws IOException { return LogIoUtils.readEntry( buffer, source, COMMAND_FACTORY ); } private static class CommandVisitor implements CommandRecordVisitor { private final int localId; private final Visitor visitor; public CommandVisitor( int localId, Visitor visitor ) { this.localId = localId; this.visitor = visitor; } @Override public void visitNode( NodeRecord record ) { if ( !record.inUse() ) { visitor.visitDeleteNode( localId, record.getId() ); } else { visitor.visitUpdateNode( localId, record ); } } @Override public void visitRelationship( RelationshipRecord record ) { if ( !record.inUse() ) { visitor.visitDeleteRelationship( localId, record.getId() ); } else { visitor.visitUpdateRelationship( localId, record ); } } @Override public void visitProperty( PropertyRecord record ) { if ( !record.inUse() ) { visitor.visitDeleteProperty( localId, record.getId() ); } else { visitor.visitUpdateProperty( localId, record ); } } @Override public void visitRelationshipTypeToken( RelationshipTypeTokenRecord record ) { if ( !record.inUse() ) { visitor.visitDeleteRelationshipTypeToken( localId, record.getId() ); } else { visitor.visitUpdateRelationshipTypeToken( localId, record ); } } @Override public void visitLabelToken( LabelTokenRecord record ) { if ( !record.inUse() ) { visitor.visitDeleteLabelToken( localId, record.getId() ); } else { visitor.visitUpdateLabelToken( localId, record ); } } @Override public void visitPropertyKeyToken( PropertyKeyTokenRecord record ) { if ( !record.inUse() ) { visitor.visitDeletePropertyKeyToken( localId, record.getId() ); } else { visitor.visitUpdatePropertyKeyToken( localId, record ); } } @Override public void visitNeoStore( NeoStoreRecord record ) { if ( !record.inUse() ) { visitor.visitDeleteNeoStore( localId, record.getId() ); } else { visitor.visitUpdateNeoStore( localId, record ); } } @Override public void visitSchemaRule( Collection<DynamicRecord> records ) { if ( ! records.isEmpty() ) { DynamicRecord first = records.iterator().next(); if ( !first.inUse() ) { visitor.visitDeleteSchemaRule( localId, records, first.getId() ); } else { visitor.visitUpdateSchemaRule( localId, records ); } } } } }
0true
community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_xaframework_TransactionReader.java
44
public enum OccurrenceLocation { EXISTS(false), NONEMPTY(false), IS(false), EXTENDS(false), SATISFIES(false), CLASS_ALIAS(false), OF(false), UPPER_BOUND(false), TYPE_ALIAS(false), CASE(false), CATCH(false), IMPORT(false), EXPRESSION(false), PARAMETER_LIST(false), TYPE_PARAMETER_LIST(false), TYPE_ARGUMENT_LIST(false), META(false), PACKAGE_REF(true), MODULE_REF(true), INTERFACE_REF(true), CLASS_REF(true), ALIAS_REF(true), TYPE_PARAMETER_REF(true), VALUE_REF(true), FUNCTION_REF(true), DOCLINK(false); public final boolean reference; OccurrenceLocation(boolean reference) { this.reference = reference; } }
0true
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_OccurrenceLocation.java
4,896
public class RestRecoveryAction extends AbstractCatAction { @Inject protected RestRecoveryAction(Settings settings, Client client, RestController restController) { super(settings, client); restController.registerHandler(GET, "/_cat/recovery", this); restController.registerHandler(GET, "/_cat/recovery/{index}", this); } @Override void documentation(StringBuilder sb) { sb.append("/_cat/recovery\n"); sb.append("/_cat/recovery/{index}\n"); } @Override public void doRequest(final RestRequest request, final RestChannel channel) { final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); final ClusterStateRequest clusterStateRequest = new ClusterStateRequest(); clusterStateRequest.clear().nodes(true); clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local())); clusterStateRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterStateRequest.masterNodeTimeout())); client.admin().cluster().state(clusterStateRequest, new ActionListener<ClusterStateResponse>() { @Override public void onResponse(final ClusterStateResponse clusterStateResponse) { IndicesStatusRequest indicesStatusRequest = new IndicesStatusRequest(indices); indicesStatusRequest.recovery(true); indicesStatusRequest.operationThreading(BroadcastOperationThreading.SINGLE_THREAD); client.admin().indices().status(indicesStatusRequest, new ActionListener<IndicesStatusResponse>() { @Override public void onResponse(IndicesStatusResponse indicesStatusResponse) { Map<String, Long> primarySizes = new HashMap<String, Long>(); Set<ShardStatus> replicas = new HashSet<ShardStatus>(); // Loop through all the shards in the index status, keeping // track of the primary shard size with a Map and the // recovering shards in a Set of ShardStatus objects for (ShardStatus shardStatus : indicesStatusResponse.getShards()) { if (shardStatus.getShardRouting().primary()) { primarySizes.put(shardStatus.getShardRouting().getIndex() + shardStatus.getShardRouting().getId(), shardStatus.getStoreSize().bytes()); } else if (shardStatus.getState() == IndexShardState.RECOVERING) { replicas.add(shardStatus); } } try { channel.sendResponse(RestTable.buildResponse(buildRecoveryTable(request, clusterStateResponse, primarySizes, replicas), request, channel)); } catch (Throwable e) { try { channel.sendResponse(new XContentThrowableRestResponse(request, e)); } catch (IOException e2) { logger.error("Unable to send recovery status response", e2); } } } @Override public void onFailure(Throwable e) { try { channel.sendResponse(new XContentThrowableRestResponse(request, e)); } catch (IOException e1) { logger.error("Failed to send failure response", e1); } } }); } @Override public void onFailure(Throwable e) { try { channel.sendResponse(new XContentThrowableRestResponse(request, e)); } catch (IOException e1) { logger.error("Failed to send failure response", e1); } } }); } @Override Table getTableWithHeader(RestRequest request) { Table t = new Table(); t.startHeaders().addCell("index", "alias:i,idx;desc:index name") .addCell("shard", "alias:s,sh;desc:shard name") .addCell("target", "alias:t;text-align:right;desc:bytes of source shard") .addCell("recovered", "alias:r;text-align:right;desc:bytes recovered so far") .addCell("percent", "alias:per,ratio;text-align:right;desc:percent recovered so far") .addCell("host", "alias:h;desc:node host where source shard lives") .addCell("ip", "desc:node ip where source shard lives") .addCell("node", "alias:n;desc:node name where source shard lives") .endHeaders(); return t; } /** * buildRecoveryTable will build a table of recovery information suitable * for displaying at the command line. * * @param request * @param state Current cluster state. * @param primarySizes A Map of {@code index + shardId} strings to store size for all primary shards. * @param recoveringReplicas A Set of {@link org.elasticsearch.action.admin.indices.status.ShardStatus} objects for each recovering replica to be displayed. * @return A table containing index, shardId, node, target size, recovered size and percentage for each recovering replica */ public Table buildRecoveryTable(RestRequest request, ClusterStateResponse state, Map<String, Long> primarySizes, Set<ShardStatus> recoveringReplicas) { Table t = getTableWithHeader(request); for (ShardStatus status : recoveringReplicas) { DiscoveryNode node = state.getState().nodes().get(status.getShardRouting().currentNodeId()); String index = status.getShardRouting().getIndex(); int id = status.getShardId(); long replicaSize = status.getStoreSize().bytes(); Long primarySize = primarySizes.get(index + id); t.startRow(); t.addCell(index); t.addCell(id); t.addCell(primarySize); t.addCell(replicaSize); t.addCell(primarySize == null ? null : String.format(Locale.ROOT, "%1.1f%%", 100.0 * (float) replicaSize / primarySize)); t.addCell(node == null ? null : node.getHostName()); t.addCell(node == null ? null : node.getHostAddress()); t.addCell(node == null ? null : node.name()); t.endRow(); } return t; } }
1no label
src_main_java_org_elasticsearch_rest_action_cat_RestRecoveryAction.java
279
@Category({ CassandraSSLTests.class }) public class ThriftSSLStoreTest extends ThriftStoreTest { @BeforeClass public static void startCassandra() { CassandraStorageSetup.startCleanEmbedded(); } @Override public ModifiableConfiguration getBaseStorageConfiguration() { return CassandraStorageSetup.getCassandraThriftSSLConfiguration(this.getClass().getSimpleName()); } }
0true
titan-cassandra_src_test_java_com_thinkaurelius_titan_diskstorage_cassandra_thrift_ThriftSSLStoreTest.java
3,098
static interface SnapshotHandler<T> { T snapshot(SnapshotIndexCommit snapshotIndexCommit, Translog.Snapshot translogSnapshot) throws EngineException; }
0true
src_main_java_org_elasticsearch_index_engine_Engine.java
662
constructors[COLLECTION_GET_ALL] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() { public IdentifiedDataSerializable createNew(Integer arg) { return new CollectionGetAllOperation(); } };
0true
hazelcast_src_main_java_com_hazelcast_collection_CollectionDataSerializerHook.java
1,471
public class OSQLFunctionIn extends OSQLFunctionMove { public static final String NAME = "in"; public OSQLFunctionIn() { super(NAME, 0, -1); } @Override protected Object move(final OrientBaseGraph graph, final OIdentifiable iRecord, final String[] iLabels) { return v2v(graph, iRecord, Direction.IN, iLabels); } }
1no label
graphdb_src_main_java_com_orientechnologies_orient_graph_sql_functions_OSQLFunctionIn.java
362
public class DeleteRepositoryAction extends ClusterAction<DeleteRepositoryRequest, DeleteRepositoryResponse, DeleteRepositoryRequestBuilder> { public static final DeleteRepositoryAction INSTANCE = new DeleteRepositoryAction(); public static final String NAME = "cluster/repository/delete"; private DeleteRepositoryAction() { super(NAME); } @Override public DeleteRepositoryResponse newResponse() { return new DeleteRepositoryResponse(); } @Override public DeleteRepositoryRequestBuilder newRequestBuilder(ClusterAdminClient client) { return new DeleteRepositoryRequestBuilder(client); } }
0true
src_main_java_org_elasticsearch_action_admin_cluster_repositories_delete_DeleteRepositoryAction.java
3,451
public class LocalIndexShardGateway extends AbstractIndexShardComponent implements IndexShardGateway { private final ThreadPool threadPool; private final InternalIndexShard indexShard; private final RecoveryStatus recoveryStatus = new RecoveryStatus(); private volatile ScheduledFuture flushScheduler; private final TimeValue syncInterval; @Inject public LocalIndexShardGateway(ShardId shardId, @IndexSettings Settings indexSettings, ThreadPool threadPool, IndexShard indexShard) { super(shardId, indexSettings); this.threadPool = threadPool; this.indexShard = (InternalIndexShard) indexShard; syncInterval = componentSettings.getAsTime("sync", TimeValue.timeValueSeconds(5)); if (syncInterval.millis() > 0) { this.indexShard.translog().syncOnEachOperation(false); flushScheduler = threadPool.schedule(syncInterval, ThreadPool.Names.SAME, new Sync()); } else if (syncInterval.millis() == 0) { flushScheduler = null; this.indexShard.translog().syncOnEachOperation(true); } else { flushScheduler = null; } } @Override public String toString() { return "local"; } @Override public RecoveryStatus recoveryStatus() { return recoveryStatus; } @Override public void recover(boolean indexShouldExists, RecoveryStatus recoveryStatus) throws IndexShardGatewayRecoveryException { recoveryStatus.index().startTime(System.currentTimeMillis()); recoveryStatus.updateStage(RecoveryStatus.Stage.INDEX); long version = -1; long translogId = -1; try { SegmentInfos si = null; try { si = Lucene.readSegmentInfos(indexShard.store().directory()); } catch (Throwable e) { String files = "_unknown_"; try { files = Arrays.toString(indexShard.store().directory().listAll()); } catch (Throwable e1) { files += " (failure=" + ExceptionsHelper.detailedMessage(e1) + ")"; } if (indexShouldExists && indexShard.store().indexStore().persistent()) { throw new IndexShardGatewayRecoveryException(shardId(), "shard allocated for local recovery (post api), should exist, but doesn't, current files: " + files, e); } } if (si != null) { if (indexShouldExists) { version = si.getVersion(); if (si.getUserData().containsKey(Translog.TRANSLOG_ID_KEY)) { translogId = Long.parseLong(si.getUserData().get(Translog.TRANSLOG_ID_KEY)); } else { translogId = version; } logger.trace("using existing shard data, translog id [{}]", translogId); } else { // it exists on the directory, but shouldn't exist on the FS, its a leftover (possibly dangling) // its a "new index create" API, we have to do something, so better to clean it than use same data logger.trace("cleaning existing shard, shouldn't exists"); IndexWriter writer = new IndexWriter(indexShard.store().directory(), new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER).setOpenMode(IndexWriterConfig.OpenMode.CREATE)); writer.close(); } } } catch (Throwable e) { throw new IndexShardGatewayRecoveryException(shardId(), "failed to fetch index version after copying it over", e); } recoveryStatus.index().updateVersion(version); recoveryStatus.index().time(System.currentTimeMillis() - recoveryStatus.index().startTime()); // since we recover from local, just fill the files and size try { int numberOfFiles = 0; long totalSizeInBytes = 0; for (String name : indexShard.store().directory().listAll()) { numberOfFiles++; totalSizeInBytes += indexShard.store().directory().fileLength(name); } recoveryStatus.index().files(numberOfFiles, totalSizeInBytes, numberOfFiles, totalSizeInBytes); } catch (Exception e) { // ignore } recoveryStatus.start().startTime(System.currentTimeMillis()); recoveryStatus.updateStage(RecoveryStatus.Stage.START); if (translogId == -1) { // no translog files, bail indexShard.postRecovery("post recovery from gateway, no translog"); // no index, just start the shard and bail recoveryStatus.start().time(System.currentTimeMillis() - recoveryStatus.start().startTime()); recoveryStatus.start().checkIndexTime(indexShard.checkIndexTook()); return; } // move an existing translog, if exists, to "recovering" state, and start reading from it FsTranslog translog = (FsTranslog) indexShard.translog(); String translogName = "translog-" + translogId; String recoverTranslogName = translogName + ".recovering"; File recoveringTranslogFile = null; for (File translogLocation : translog.locations()) { File tmpRecoveringFile = new File(translogLocation, recoverTranslogName); if (!tmpRecoveringFile.exists()) { File tmpTranslogFile = new File(translogLocation, translogName); if (tmpTranslogFile.exists()) { for (int i = 0; i < 3; i++) { if (tmpTranslogFile.renameTo(tmpRecoveringFile)) { recoveringTranslogFile = tmpRecoveringFile; break; } } } } else { recoveringTranslogFile = tmpRecoveringFile; break; } } if (recoveringTranslogFile == null || !recoveringTranslogFile.exists()) { // no translog to recovery from, start and bail // no translog files, bail indexShard.postRecovery("post recovery from gateway, no translog"); // no index, just start the shard and bail recoveryStatus.start().time(System.currentTimeMillis() - recoveryStatus.start().startTime()); recoveryStatus.start().checkIndexTime(indexShard.checkIndexTook()); return; } // recover from the translog file indexShard.performRecoveryPrepareForTranslog(); recoveryStatus.start().time(System.currentTimeMillis() - recoveryStatus.start().startTime()); recoveryStatus.start().checkIndexTime(indexShard.checkIndexTook()); recoveryStatus.translog().startTime(System.currentTimeMillis()); recoveryStatus.updateStage(RecoveryStatus.Stage.TRANSLOG); FileInputStream fs = null; try { fs = new FileInputStream(recoveringTranslogFile); InputStreamStreamInput si = new InputStreamStreamInput(fs); while (true) { Translog.Operation operation; try { int opSize = si.readInt(); operation = TranslogStreams.readTranslogOperation(si); } catch (EOFException e) { // ignore, not properly written the last op break; } catch (IOException e) { // ignore, not properly written last op break; } try { indexShard.performRecoveryOperation(operation); recoveryStatus.translog().addTranslogOperations(1); } catch (ElasticsearchException e) { if (e.status() == RestStatus.BAD_REQUEST) { // mainly for MapperParsingException and Failure to detect xcontent logger.info("ignoring recovery of a corrupt translog entry", e); } else { throw e; } } } } catch (Throwable e) { // we failed to recovery, make sure to delete the translog file (and keep the recovering one) indexShard.translog().closeWithDelete(); throw new IndexShardGatewayRecoveryException(shardId, "failed to recover shard", e); } finally { try { fs.close(); } catch (IOException e) { // ignore } } indexShard.performRecoveryFinalization(true); recoveringTranslogFile.delete(); recoveryStatus.translog().time(System.currentTimeMillis() - recoveryStatus.translog().startTime()); } @Override public String type() { return "local"; } @Override public SnapshotStatus snapshot(Snapshot snapshot) { return null; } @Override public SnapshotStatus lastSnapshotStatus() { return null; } @Override public SnapshotStatus currentSnapshotStatus() { return null; } @Override public boolean requiresSnapshot() { return false; } @Override public boolean requiresSnapshotScheduling() { return false; } @Override public void close() { if (flushScheduler != null) { flushScheduler.cancel(false); } } @Override public SnapshotLock obtainSnapshotLock() throws Exception { return NO_SNAPSHOT_LOCK; } class Sync implements Runnable { @Override public void run() { // don't re-schedule if its closed..., we are done if (indexShard.state() == IndexShardState.CLOSED) { return; } if (indexShard.state() == IndexShardState.STARTED && indexShard.translog().syncNeeded()) { threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(new Runnable() { @Override public void run() { try { indexShard.translog().sync(); } catch (Exception e) { if (indexShard.state() == IndexShardState.STARTED) { logger.warn("failed to sync translog", e); } } if (indexShard.state() != IndexShardState.CLOSED) { flushScheduler = threadPool.schedule(syncInterval, ThreadPool.Names.SAME, Sync.this); } } }); } else { flushScheduler = threadPool.schedule(syncInterval, ThreadPool.Names.SAME, Sync.this); } } } }
1no label
src_main_java_org_elasticsearch_index_gateway_local_LocalIndexShardGateway.java
535
static enum FrameworkType { PERSISTENCE, GENERAL, LOGGING, UI, XML, UTILITY, SCHEDULER, CACHE, RULES, ECOMMERCE, OTHER }
0true
common_src_main_java_org_broadleafcommerce_common_util_PomEvaluator.java
1,907
static class Person implements Serializable { String name; int age; Person() { } Person(String name, int age) { this.name = name; this.age = age; } }
0true
hazelcast_src_test_java_com_hazelcast_map_QueryListenerTest.java
2,555
public class DiscoveryService extends AbstractLifecycleComponent<DiscoveryService> { private final TimeValue initialStateTimeout; private final Discovery discovery; private volatile boolean initialStateReceived; @Inject public DiscoveryService(Settings settings, Discovery discovery) { super(settings); this.discovery = discovery; this.initialStateTimeout = componentSettings.getAsTime("initial_state_timeout", TimeValue.timeValueSeconds(30)); } @Override protected void doStart() throws ElasticsearchException { final CountDownLatch latch = new CountDownLatch(1); InitialStateDiscoveryListener listener = new InitialStateDiscoveryListener() { @Override public void initialStateProcessed() { latch.countDown(); } }; discovery.addListener(listener); try { discovery.start(); if (initialStateTimeout.millis() > 0) { try { logger.trace("waiting for {} for the initial state to be set by the discovery", initialStateTimeout); if (latch.await(initialStateTimeout.millis(), TimeUnit.MILLISECONDS)) { logger.trace("initial state set from discovery"); initialStateReceived = true; } else { initialStateReceived = false; logger.warn("waited for {} and no initial state was set by the discovery", initialStateTimeout); } } catch (InterruptedException e) { // ignore } } } finally { discovery.removeListener(listener); } logger.info(discovery.nodeDescription()); } @Override protected void doStop() throws ElasticsearchException { discovery.stop(); } @Override protected void doClose() throws ElasticsearchException { discovery.close(); } public DiscoveryNode localNode() { return discovery.localNode(); } /** * Returns <tt>true</tt> if the initial state was received within the timeout waiting for it * on {@link #doStart()}. */ public boolean initialStateReceived() { return initialStateReceived; } public String nodeDescription() { return discovery.nodeDescription(); } /** * Publish all the changes to the cluster from the master (can be called just by the master). The publish * process should not publish this state to the master as well! (the master is sending it...). * <p/> * The {@link org.elasticsearch.discovery.Discovery.AckListener} allows to acknowledge the publish * event based on the response gotten from all nodes */ public void publish(ClusterState clusterState, Discovery.AckListener ackListener) { if (lifecycle.started()) { discovery.publish(clusterState, ackListener); } } public static String generateNodeId(Settings settings) { String seed = settings.get("discovery.id.seed"); if (seed != null) { Strings.randomBase64UUID(new Random(Long.parseLong(seed))); } return Strings.randomBase64UUID(); } }
1no label
src_main_java_org_elasticsearch_discovery_DiscoveryService.java
1,512
public class OObjectEnumLazyMap<TYPE extends Enum> extends HashMap<Object, Object> implements Serializable, OObjectLazyEnumSerializer<Map<Object, Object>> { private static final long serialVersionUID = -8606432090996808181L; private final ORecord<?> sourceRecord; private final Map<Object, Object> underlying; private boolean converted = false; private final Class<Enum> enumClass; public OObjectEnumLazyMap(final Class<Enum> iEnumClass, final ORecord<?> iSourceRecord, final Map<Object, Object> iRecordMap) { super(); this.sourceRecord = iSourceRecord; this.underlying = iRecordMap; converted = iRecordMap.isEmpty(); this.enumClass = iEnumClass; } public OObjectEnumLazyMap(final Class<Enum> iEnumClass, final ORecord<?> iSourceRecord, final Map<Object, Object> iRecordMap, final Map<Object, Object> iSourceMap) { this(iEnumClass, iSourceRecord, iRecordMap); putAll(iSourceMap); } @Override public int size() { return underlying.size(); } @Override public boolean isEmpty() { return underlying.isEmpty(); } @Override public boolean containsKey(final Object k) { return underlying.containsKey(k); } @Override public boolean containsValue(final Object o) { boolean underlyingContains = underlying.containsValue(o.toString()); return underlyingContains || super.containsValue(o); } @Override public Object put(final Object iKey, final Object e) { setDirty(); underlying.put(iKey, ((TYPE) e).name()); return super.put(iKey, e); } @Override public Object remove(final Object iKey) { underlying.remove((String) iKey); setDirty(); return super.remove(iKey); } @Override public void clear() { converted = true; underlying.clear(); super.clear(); setDirty(); } public boolean isConverted() { return converted; } @Override public String toString() { return underlying.toString(); } @Override public Set<java.util.Map.Entry<Object, Object>> entrySet() { convertAll(); return super.entrySet(); } @Override public Object get(final Object iKey) { convert(iKey); return super.get(iKey); } @Override public Set<Object> keySet() { convertAll(); return underlying.keySet(); } @Override public void putAll(final Map<? extends Object, ? extends Object> iMap) { for (java.util.Map.Entry<? extends Object, ? extends Object> e : iMap.entrySet()) { put(e.getKey(), e.getValue()); } } @Override public Collection<Object> values() { convertAll(); return super.values(); } public void setDirty() { if (sourceRecord != null) sourceRecord.setDirty(); } /** * Assure that the requested key is converted. */ private void convert(final Object iKey) { if (converted) return; if (super.containsKey(iKey)) return; Object o = underlying.get(String.valueOf(iKey)); if (o instanceof Number) super.put(iKey, enumClass.getEnumConstants()[((Number) o).intValue()]); else super.put(iKey, Enum.valueOf(enumClass, o.toString())); } public void detach() { convertAll(); } public void detach(boolean nonProxiedInstance) { convertAll(); } public void detachAll(boolean nonProxiedInstance) { convertAll(); } @Override public Map<Object, Object> getNonOrientInstance() { Map<Object, Object> map = new HashMap<Object, Object>(); map.putAll((Map<Object, Object>) this); return map; } @Override public Object getUnderlying() { return underlying; } /** * Converts all the items */ protected void convertAll() { if (converted) return; for (java.util.Map.Entry<Object, Object> e : underlying.entrySet()) { if (e.getValue() instanceof Number) super.put(e.getKey(), enumClass.getEnumConstants()[((Number) e.getValue()).intValue()]); else super.put(e.getKey(), Enum.valueOf(enumClass, e.getValue().toString())); } converted = true; } }
0true
object_src_main_java_com_orientechnologies_orient_object_enumerations_OObjectEnumLazyMap.java
1,347
@Repository("blStoreDao") public class StoreDaoImpl implements StoreDao { @PersistenceContext(unitName = "blPU") private EntityManager em; @SuppressWarnings("unchecked") public Store readStoreByStoreCode(final String storeCode) { Query query = em.createNamedQuery("FIND_STORE_BY_STORE_CODE"); query.setParameter("abbreviation", storeCode.toUpperCase()); //TODO use the property injection for "org.hibernate.cacheable" like the other daos query.setHint("org.hibernate.cacheable", true); List result = query.getResultList(); return (result.size() > 0) ? (Store) result.get(0) : null; } @SuppressWarnings("unchecked") public List<Store> readAllStores() { Query query = em.createNamedQuery("BC_FIND_ALL_STORES"); query.setHint("org.hibernate.cacheable", true); List results = query.getResultList(); return results; } }
0true
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_store_dao_StoreDaoImpl.java
3,372
static class LongValues extends DenseLongValues { private final PackedInts.Mutable values; private final long minValue; LongValues(PackedInts.Mutable values, long minValue) { super(false); this.values = values; this.minValue = minValue; } @Override public long nextValue() { return minValue + values.get(docId); } }
0true
src_main_java_org_elasticsearch_index_fielddata_plain_PackedArrayAtomicFieldData.java
247
public static class XBuilder { private Builder<Pair<Long, BytesRef>> builder; private int maxSurfaceFormsPerAnalyzedForm; private IntsRef scratchInts = new IntsRef(); private final PairOutputs<Long, BytesRef> outputs; private boolean hasPayloads; private BytesRef analyzed = new BytesRef(); private final SurfaceFormAndPayload[] surfaceFormsAndPayload; private int count; private ObjectIntOpenHashMap<BytesRef> seenSurfaceForms = HppcMaps.Object.Integer.ensureNoNullKeys(256, 0.75f); private int payloadSep; public XBuilder(int maxSurfaceFormsPerAnalyzedForm, boolean hasPayloads, int payloadSep) { this.payloadSep = payloadSep; this.outputs = new PairOutputs<Long, BytesRef>(PositiveIntOutputs.getSingleton(), ByteSequenceOutputs.getSingleton()); this.builder = new Builder<Pair<Long, BytesRef>>(FST.INPUT_TYPE.BYTE1, outputs); this.maxSurfaceFormsPerAnalyzedForm = maxSurfaceFormsPerAnalyzedForm; this.hasPayloads = hasPayloads; surfaceFormsAndPayload = new SurfaceFormAndPayload[maxSurfaceFormsPerAnalyzedForm]; } public void startTerm(BytesRef analyzed) { this.analyzed.copyBytes(analyzed); this.analyzed.grow(analyzed.length+2); } private final static class SurfaceFormAndPayload implements Comparable<SurfaceFormAndPayload> { BytesRef payload; long weight; public SurfaceFormAndPayload(BytesRef payload, long cost) { super(); this.payload = payload; this.weight = cost; } @Override public int compareTo(SurfaceFormAndPayload o) { int res = compare(weight, o.weight); if (res == 0 ){ return payload.compareTo(o.payload); } return res; } public static int compare(long x, long y) { return (x < y) ? -1 : ((x == y) ? 0 : 1); } } public void addSurface(BytesRef surface, BytesRef payload, long cost) throws IOException { int surfaceIndex = -1; long encodedWeight = cost == -1 ? cost : encodeWeight(cost); /* * we need to check if we have seen this surface form, if so only use the * the surface form with the highest weight and drop the rest no matter if * the payload differs. */ if (count >= maxSurfaceFormsPerAnalyzedForm) { // More than maxSurfaceFormsPerAnalyzedForm // dups: skip the rest: return; } BytesRef surfaceCopy; if (count > 0 && seenSurfaceForms.containsKey(surface)) { surfaceIndex = seenSurfaceForms.lget(); SurfaceFormAndPayload surfaceFormAndPayload = surfaceFormsAndPayload[surfaceIndex]; if (encodedWeight >= surfaceFormAndPayload.weight) { return; } surfaceCopy = BytesRef.deepCopyOf(surface); } else { surfaceIndex = count++; surfaceCopy = BytesRef.deepCopyOf(surface); seenSurfaceForms.put(surfaceCopy, surfaceIndex); } BytesRef payloadRef; if (!hasPayloads) { payloadRef = surfaceCopy; } else { int len = surface.length + 1 + payload.length; final BytesRef br = new BytesRef(len); System.arraycopy(surface.bytes, surface.offset, br.bytes, 0, surface.length); br.bytes[surface.length] = (byte) payloadSep; System.arraycopy(payload.bytes, payload.offset, br.bytes, surface.length + 1, payload.length); br.length = len; payloadRef = br; } if (surfaceFormsAndPayload[surfaceIndex] == null) { surfaceFormsAndPayload[surfaceIndex] = new SurfaceFormAndPayload(payloadRef, encodedWeight); } else { surfaceFormsAndPayload[surfaceIndex].payload = payloadRef; surfaceFormsAndPayload[surfaceIndex].weight = encodedWeight; } } public void finishTerm(long defaultWeight) throws IOException { ArrayUtil.timSort(surfaceFormsAndPayload, 0, count); int deduplicator = 0; analyzed.bytes[analyzed.offset + analyzed.length] = 0; analyzed.length += 2; for (int i = 0; i < count; i++) { analyzed.bytes[analyzed.offset + analyzed.length - 1 ] = (byte) deduplicator++; Util.toIntsRef(analyzed, scratchInts); SurfaceFormAndPayload candiate = surfaceFormsAndPayload[i]; long cost = candiate.weight == -1 ? encodeWeight(Math.min(Integer.MAX_VALUE, defaultWeight)) : candiate.weight; builder.add(scratchInts, outputs.newPair(cost, candiate.payload)); } seenSurfaceForms.clear(); count = 0; } public FST<Pair<Long, BytesRef>> build() throws IOException { return builder.finish(); } public boolean hasPayloads() { return hasPayloads; } public int maxSurfaceFormsPerAnalyzedForm() { return maxSurfaceFormsPerAnalyzedForm; } }
0true
src_main_java_org_apache_lucene_search_suggest_analyzing_XAnalyzingSuggester.java
1,019
transportService.sendRequest(node, transportAction, request, transportOptions(), new BaseTransportResponseHandler<Response>() { @Override public Response newInstance() { return newResponse(); } @Override public String executor() { return ThreadPool.Names.SAME; } @Override public void handleResponse(Response response) { listener.onResponse(response); } @Override public void handleException(TransportException exp) { // if we got disconnected from the node, or the node / shard is not in the right state (being closed) if (exp.unwrapCause() instanceof ConnectTransportException || exp.unwrapCause() instanceof NodeClosedException || retryOnFailure(exp)) { operationStarted.set(false); // we already marked it as started when we executed it (removed the listener) so pass false // to re-add to the cluster listener retry(false, null); } else { listener.onFailure(exp); } } });
0true
src_main_java_org_elasticsearch_action_support_single_instance_TransportInstanceSingleOperationAction.java
65
public class OSharedLockEntry<REQUESTER_TYPE> { /** The requester lock : generally {@link Thread} or {@link Runnable}. */ protected REQUESTER_TYPE requester; /** * Count shared locks held by this requester for the resource. * <p> * Used for reentrancy : when the same requester acquire a shared lock for the same resource in a nested code. */ protected int countSharedLocks; /** Next shared lock for the same resource by an other requester. */ protected OSharedLockEntry<REQUESTER_TYPE> nextSharedLock; protected OSharedLockEntry() { } public OSharedLockEntry(final REQUESTER_TYPE iRequester) { super(); requester = iRequester; countSharedLocks = 1; } }
0true
commons_src_main_java_com_orientechnologies_common_concur_lock_OSharedLockEntry.java
1,749
FACTOR() { @Override public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) { double longitudeDifference = targetLongitude - sourceLongitude; double a = Math.toRadians(90D - sourceLatitude); double c = Math.toRadians(90D - targetLatitude); return (Math.cos(a) * Math.cos(c)) + (Math.sin(a) * Math.sin(c) * Math.cos(Math.toRadians(longitudeDifference))); } @Override public double normalize(double distance, DistanceUnit unit) { return Math.cos(distance / unit.getEarthRadius()); } @Override public FixedSourceDistance fixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { return new FactorFixedSourceDistance(sourceLatitude, sourceLongitude, unit); } },
0true
src_main_java_org_elasticsearch_common_geo_GeoDistance.java
343
public class NodeValueMerge extends BaseHandler { protected String delimiter = " "; protected String regex = "[\\s\\n\\r]+"; public Node[] merge(List<Node> nodeList1, List<Node> nodeList2, List<Node> exhaustedNodes) { if (CollectionUtils.isEmpty(nodeList1) || CollectionUtils.isEmpty(nodeList2)) { return null; } Node node1 = nodeList1.get(0); Node node2 = nodeList2.get(0); String[] items1 = node1.getNodeValue().split(getRegEx()); String[] items2 = node2.getNodeValue().split(getRegEx()); Set<String> finalItems = new LinkedHashSet<String>(); for (String anItems1 : items1) { finalItems.add(anItems1.trim()); } for (String anItems2 : items2) { finalItems.add(anItems2.trim()); } StringBuilder sb = new StringBuilder(); Iterator<String> itr = finalItems.iterator(); while (itr.hasNext()) { sb.append(itr.next()); if (itr.hasNext()) { sb.append(getDelimiter()); } } node1.setNodeValue(sb.toString()); node2.setNodeValue(sb.toString()); Node[] response = new Node[nodeList2.size()]; for (int j=0;j<response.length;j++){ response[j] = nodeList2.get(j); } return response; } public String getDelimiter() { return delimiter; } public String getRegEx() { return regex; } }
0true
common_src_main_java_org_broadleafcommerce_common_extensibility_context_merge_handlers_NodeValueMerge.java
1,644
public class RuntimeStateRequest implements ConsoleRequest { //todo: unused. private static final long LOCK_TIME_THRESHOLD = TimeUnit.SECONDS.toMillis(300); @Override public int getType() { return ConsoleRequestConstants.REQUEST_TYPE_CLUSTER_STATE; } private Collection<LockResource> collectLockState(HazelcastInstanceImpl instance) { LockService lockService = instance.node.nodeEngine.getService(LockService.SERVICE_NAME); return lockService.getAllLocks(); } @Override public void writeResponse(ManagementCenterService mcs, ObjectDataOutput dos) throws Exception { HazelcastInstanceImpl instance = mcs.getHazelcastInstance(); Node node = instance.node; ClusterServiceImpl cluster = node.getClusterService(); InternalPartitionService partitionService = node.partitionService; Collection<LockResource> lockedRecords = collectLockState(instance); Map<Address, Connection> connectionMap = node.connectionManager.getReadonlyConnectionMap(); ClusterRuntimeState clusterRuntimeState = new ClusterRuntimeState( cluster.getMembers(), partitionService.getPartitions(), partitionService.getActiveMigrations(), connectionMap, lockedRecords); clusterRuntimeState.writeData(dos); } @Override public Object readResponse(ObjectDataInput in) throws IOException { ClusterRuntimeState clusterRuntimeState = new ClusterRuntimeState(); clusterRuntimeState.readData(in); return clusterRuntimeState; } @Override public void writeData(ObjectDataOutput out) throws IOException { } @Override public void readData(ObjectDataInput in) throws IOException { } }
0true
hazelcast_src_main_java_com_hazelcast_management_request_RuntimeStateRequest.java
1,566
public class ClusterRebalanceAllocationDecider extends AllocationDecider { /** * An enum representation for the configured re-balance type. */ public static enum ClusterRebalanceType { /** * Re-balancing is allowed once a shard replication group is active */ ALWAYS, /** * Re-balancing is allowed only once all primary shards on all indices are active. */ INDICES_PRIMARIES_ACTIVE, /** * Re-balancing is allowed only once all shards on all indices are active. */ INDICES_ALL_ACTIVE } private final ClusterRebalanceType type; @Inject public ClusterRebalanceAllocationDecider(Settings settings) { super(settings); String allowRebalance = settings.get("cluster.routing.allocation.allow_rebalance", "indices_all_active"); if ("always".equalsIgnoreCase(allowRebalance)) { type = ClusterRebalanceType.ALWAYS; } else if ("indices_primaries_active".equalsIgnoreCase(allowRebalance) || "indicesPrimariesActive".equalsIgnoreCase(allowRebalance)) { type = ClusterRebalanceType.INDICES_PRIMARIES_ACTIVE; } else if ("indices_all_active".equalsIgnoreCase(allowRebalance) || "indicesAllActive".equalsIgnoreCase(allowRebalance)) { type = ClusterRebalanceType.INDICES_ALL_ACTIVE; } else { logger.warn("[cluster.routing.allocation.allow_rebalance] has a wrong value {}, defaulting to 'indices_all_active'", allowRebalance); type = ClusterRebalanceType.INDICES_ALL_ACTIVE; } logger.debug("using [cluster.routing.allocation.allow_rebalance] with [{}]", type.toString().toLowerCase(Locale.ROOT)); } @Override public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation allocation) { if (type == ClusterRebalanceType.INDICES_PRIMARIES_ACTIVE) { // check if there are unassigned primaries. if ( allocation.routingNodes().hasUnassignedPrimaries() ) { return allocation.decision(Decision.NO, "cluster has unassigned primary shards"); } // check if there are initializing primaries that don't have a relocatingNodeId entry. if ( allocation.routingNodes().hasInactivePrimaries() ) { return allocation.decision(Decision.NO, "cluster has inactive primary shards"); } return allocation.decision(Decision.YES, "all primary shards are active"); } if (type == ClusterRebalanceType.INDICES_ALL_ACTIVE) { // check if there are unassigned shards. if ( allocation.routingNodes().hasUnassignedShards() ) { return allocation.decision(Decision.NO, "cluster has unassigned shards"); } // in case all indices are assigned, are there initializing shards which // are not relocating? if ( allocation.routingNodes().hasInactiveShards() ) { return allocation.decision(Decision.NO, "cluster has inactive shards"); } } // type == Type.ALWAYS return allocation.decision(Decision.YES, "all shards are active"); } }
0true
src_main_java_org_elasticsearch_cluster_routing_allocation_decider_ClusterRebalanceAllocationDecider.java
6,067
public class TermSuggestion extends Suggestion<TermSuggestion.Entry> { public static Comparator<Suggestion.Entry.Option> SCORE = new Score(); public static Comparator<Suggestion.Entry.Option> FREQUENCY = new Frequency(); // Same behaviour as comparators in suggest module, but for SuggestedWord // Highest score first, then highest freq first, then lowest term first public static class Score implements Comparator<Suggestion.Entry.Option> { @Override public int compare(Suggestion.Entry.Option first, Suggestion.Entry.Option second) { // first criteria: the distance int cmp = Float.compare(second.getScore(), first.getScore()); if (cmp != 0) { return cmp; } return FREQUENCY.compare(first, second); } } // Same behaviour as comparators in suggest module, but for SuggestedWord // Highest freq first, then highest score first, then lowest term first public static class Frequency implements Comparator<Suggestion.Entry.Option> { @Override public int compare(Suggestion.Entry.Option first, Suggestion.Entry.Option second) { // first criteria: the popularity int cmp = ((TermSuggestion.Entry.Option) second).getFreq() - ((TermSuggestion.Entry.Option) first).getFreq(); if (cmp != 0) { return cmp; } // second criteria (if first criteria is equal): the distance cmp = Float.compare(second.getScore(), first.getScore()); if (cmp != 0) { return cmp; } // third criteria: term text return first.getText().compareTo(second.getText()); } } public static final int TYPE = 1; private Sort sort; public TermSuggestion() { } public TermSuggestion(String name, int size, Sort sort) { super(name, size); this.sort = sort; } public int getType() { return TYPE; } @Override protected Comparator<Option> sortComparator() { switch (sort) { case SCORE: return SCORE; case FREQUENCY: return FREQUENCY; default: throw new ElasticsearchException("Could not resolve comparator for sort key: [" + sort + "]"); } } @Override protected void innerReadFrom(StreamInput in) throws IOException { super.innerReadFrom(in); sort = Sort.fromId(in.readByte()); } @Override public void innerWriteTo(StreamOutput out) throws IOException { super.innerWriteTo(out); out.writeByte(sort.id()); } protected Entry newEntry() { return new Entry(); } /** * Represents a part from the suggest text with suggested options. */ public static class Entry extends org.elasticsearch.search.suggest.Suggest.Suggestion.Entry<TermSuggestion.Entry.Option> { Entry(Text text, int offset, int length) { super(text, offset, length); } Entry() { } @Override protected Option newOption() { return new Option(); } /** * Contains the suggested text with its document frequency and score. */ public static class Option extends org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option { static class Fields { static final XContentBuilderString FREQ = new XContentBuilderString("freq"); } private int freq; protected Option(Text text, int freq, float score) { super(text, score); this.freq = freq; } @Override protected void mergeInto(Suggestion.Entry.Option otherOption) { super.mergeInto(otherOption); freq += ((Option) otherOption).freq; } protected Option() { super(); } public void setFreq(int freq) { this.freq = freq; } /** * @return How often this suggested text appears in the index. */ public int getFreq() { return freq; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); freq = in.readVInt(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVInt(freq); } @Override protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { builder = super.innerToXContent(builder, params); builder.field(Fields.FREQ, freq); return builder; } } } }
1no label
src_main_java_org_elasticsearch_search_suggest_term_TermSuggestion.java
1,162
public interface ICountDownLatch extends DistributedObject { /** * Causes the current thread to wait until the latch has counted down to * zero, an exception is thrown, or the specified waiting time elapses. * <p/> * <p>If the current count is zero then this method returns immediately * with the value {@code true}. * <p/> * <p>If the current count is greater than zero then the current * thread becomes disabled for thread scheduling purposes and lies * dormant until one of five things happen: * <ul> * <li>The count reaches zero due to invocations of the * {@link #countDown} method; * <li>This ICountDownLatch instance is destroyed; * <li>The countdown owner becomes disconnected; * <li>Some other thread {@linkplain Thread#interrupt interrupts} * the current thread; or * <li>The specified waiting time elapses. * </ul> * <p/> * <p>If the count reaches zero then the method returns with the * value {@code true}. * <p/> * <p>If the current thread: * <ul> * <li>has its interrupted status set on entry to this method; or * <li>is {@linkplain Thread#interrupt interrupted} while waiting, * </ul> * then {@link InterruptedException} is thrown and the current thread's * interrupted status is cleared. * <p>If the specified waiting time elapses then the value {@code false} * is returned. If the time is less than or equal to zero, the method * will not wait at all. * * @param timeout the maximum time to wait * @param unit the time unit of the {@code timeout} argument * @return {@code true} if the count reached zero and {@code false} * if the waiting time elapsed before the count reached zero * @throws InterruptedException if the current thread is interrupted * @throws IllegalStateException if hazelcast instance is shutdown while waiting */ public boolean await(long timeout, TimeUnit unit) throws InterruptedException; /** * Decrements the count of the latch, releasing all waiting threads if * the count reaches zero. * <p/> * If the current count is greater than zero then it is decremented. * If the new count is zero: * <ul> * <li>All waiting threads are re-enabled for thread scheduling purposes; and * <li>Countdown owner is set to {@code null}. * </ul> * <p/> * If the current count equals zero then nothing happens. */ public void countDown(); /** * Returns the current count. * * @return current count */ public int getCount(); /** * Sets the count to the given value if the current count is zero. * <p/>If count is not zero then this method does nothing and returns {@code false}. * * @param count the number of times {@link #countDown} must be invoked * before threads can pass through {@link #await} * @return {@code true} if the new count was set or {@code false} if the current * count is not zero * @throws IllegalArgumentException if {@code count} is negative */ public boolean trySetCount(int count); }
0true
hazelcast_src_main_java_com_hazelcast_core_ICountDownLatch.java