Unnamed: 0
int64 0
6.45k
| func
stringlengths 29
253k
| target
class label 2
classes | project
stringlengths 36
167
|
---|---|---|---|
2,278 | protected static final Recycler.C<byte[]> RECYCLER_C = new Recycler.C<byte[]>() {
@Override
public byte[] newInstance(int sizing) {
return new byte[10];
}
@Override
public void clear(byte[] value) {
Arrays.fill(value, (byte) 0);
}
}; | 0true
| src_test_java_org_elasticsearch_common_recycler_AbstractRecyclerTests.java |
3,566 | public static class Fields {
// Mapping field names
public static final String ANALYZER = "analyzer";
public static final ParseField INDEX_ANALYZER = new ParseField("index_analyzer");
public static final ParseField SEARCH_ANALYZER = new ParseField("search_analyzer");
public static final ParseField PRESERVE_SEPARATORS = new ParseField("preserve_separators");
public static final ParseField PRESERVE_POSITION_INCREMENTS = new ParseField("preserve_position_increments");
public static final String PAYLOADS = "payloads";
public static final String TYPE = "type";
public static final ParseField MAX_INPUT_LENGTH = new ParseField("max_input_length", "max_input_len");
// Content field names
public static final String CONTENT_FIELD_NAME_INPUT = "input";
public static final String CONTENT_FIELD_NAME_OUTPUT = "output";
public static final String CONTENT_FIELD_NAME_PAYLOAD = "payload";
public static final String CONTENT_FIELD_NAME_WEIGHT = "weight";
} | 0true
| src_main_java_org_elasticsearch_index_mapper_core_CompletionFieldMapper.java |
989 | public class ORecordSerializerDocument2Binary implements ORecordSerializer {
public static final String NAME = "ORecordDocument2binary";
protected ORecordSchemaAware<?> newObject(ODatabaseRecord iDatabase, String iClassName) throws InstantiationException,
IllegalAccessException {
return new ODocument();
}
public ORecordInternal<?> fromStream(ODatabaseRecord iDatabase, byte[] iSource) {
// TODO: HANDLE FACTORIES
return fromStream(iSource, null, null);
}
public ORecordInternal<?> fromStream(byte[] iSource, ORecordInternal<?> iRecord, String[] iFields) {
ODocument record = (ODocument) iRecord;
if (iRecord == null)
record = new ODocument();
ByteArrayInputStream stream = null;
DataInputStream in = null;
try {
stream = new ByteArrayInputStream(iSource);
in = new DataInputStream(stream);
// UNMARSHALL ALL THE PROPERTIES
Object value;
int length;
byte[] buffer;
for (OProperty p : record.getSchemaClass().properties()) {
value = null;
switch (p.getType()) {
case BINARY:
length = in.readInt();
if (length >= 0) {
// != NULL
buffer = new byte[length];
in.readFully(buffer);
value = buffer;
}
break;
case BOOLEAN:
value = in.readBoolean();
break;
case DATE:
case DATETIME:
long date = in.readLong();
if (date > -1)
value = new Date(date);
break;
case DOUBLE:
value = in.readDouble();
break;
case EMBEDDED:
length = in.readInt();
if (length >= 0) {
// != NULL
buffer = new byte[length];
in.readFully(buffer);
value = new ODocument(p.getLinkedClass().getName()).fromStream(buffer);
}
break;
case EMBEDDEDLIST:
break;
case EMBEDDEDSET:
break;
case FLOAT:
value = in.readFloat();
break;
case INTEGER:
value = in.readInt();
break;
case LINK:
value = new ORecordId(in.readInt(), OClusterPositionFactory.INSTANCE.fromStream((InputStream) in));
break;
case LINKLIST:
break;
case LINKSET:
break;
case LONG:
value = in.readLong();
break;
case SHORT:
value = in.readShort();
break;
case STRING:
value = in.readUTF();
break;
}
record.field(p.getName(), value);
}
} catch (Exception e) {
OLogManager.instance().error(this, "Error on unmarshalling object in binary format: " + record.getIdentity(), e);
} finally {
try {
if (stream != null)
stream.close();
if (in != null)
in.close();
} catch (IOException e) {
}
}
return iRecord;
}
public byte[] toStream(final ORecordInternal<?> iRecord, boolean iOnlyDelta) {
ODocument record = (ODocument) iRecord;
ByteArrayOutputStream stream = null;
DataOutputStream out = null;
try {
stream = new ByteArrayOutputStream();
out = new DataOutputStream(stream);
// MARSHALL ALL THE PROPERTIES
Object value;
byte[] buffer;
for (OProperty p : record.getSchemaClass().properties()) {
value = record.field(p.getName());
switch (p.getType()) {
case BINARY:
if (value == null)
// NULL: WRITE -1 AS LENGTH
out.writeInt(-1);
else {
buffer = (byte[]) value;
out.writeInt(buffer.length);
out.write(buffer);
}
break;
case BOOLEAN:
out.writeBoolean(value != null);
if (value != null)
out.writeBoolean((Boolean) value);
break;
case DATE:
case DATETIME:
out.writeLong(value != null ? ((Date) value).getTime() : -1);
break;
case DOUBLE:
out.writeBoolean(value != null);
if (value != null)
out.writeDouble((Double) value);
break;
case EMBEDDED:
if (value == null)
// NULL: WRITE -1 AS LENGTH
out.writeInt(-1);
else {
buffer = ((ORecordInternal<?>) value).toStream();
out.writeInt(buffer.length);
out.write(buffer);
}
break;
case EMBEDDEDLIST:
break;
case EMBEDDEDSET:
break;
case FLOAT:
out.writeBoolean(value != null);
if (value != null)
out.writeFloat((Float) value);
break;
case INTEGER:
out.writeBoolean(value != null);
if (value != null)
out.writeInt((Integer) value);
break;
case LINK:
out.writeBoolean(value != null);
if (value != null) {
if (!(value instanceof ORecord<?>))
throw new ODatabaseException("Invalid property value in '" + p.getName() + "': found " + value.getClass()
+ " while it was expected a ORecord");
ORID rid = ((ORecord<?>) value).getIdentity();
out.writeInt(rid.getClusterId());
out.write(rid.getClusterPosition().toStream());
}
break;
case LINKLIST:
break;
case LINKSET:
break;
case LONG:
out.writeBoolean(value != null);
if (value != null)
out.writeLong((Long) value);
break;
case SHORT:
out.writeBoolean(value != null);
if (value != null)
out.writeShort((Short) value);
break;
case STRING:
out.writeBoolean(value != null);
if (value != null)
out.writeUTF((String) value);
break;
}
}
return stream.toByteArray();
} catch (Exception e) {
OLogManager.instance().error(this, "Error on marshalling object in binary format: " + iRecord.getIdentity(), e);
} finally {
try {
if (stream != null)
stream.close();
if (out != null)
out.close();
} catch (IOException e) {
}
}
return null;
}
@Override
public String toString() {
return NAME;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_serialization_serializer_record_string_ORecordSerializerDocument2Binary.java |
1,999 | public static class BasicMapStoreFactory implements MapStoreFactory<String, String> {
@Override
public MapLoader<String, String> newMapStore(String mapName, final Properties properties) {
return new MapStore<String, String>() {
@Override
public void store(String key, String value) {
}
@Override
public void storeAll(Map map) {
}
@Override
public void delete(String key) {
}
@Override
public void deleteAll(Collection keys) {
}
@Override
public String load(String key) {
return properties.getProperty(key.toString());
}
@Override
public Map<String, String> loadAll(Collection<String> keys) {
Map<String, String> map = new HashMap<String, String>();
for (String key : keys) {
map.put(key, properties.getProperty(key));
}
return map;
}
@Override
public Set<String> loadAllKeys() {
return new HashSet<String>(properties.stringPropertyNames());
}
};
}
} | 0true
| hazelcast_src_test_java_com_hazelcast_map_mapstore_MapStoreTest.java |
1,470 | public class MutableShardRouting extends ImmutableShardRouting {
public MutableShardRouting(ShardRouting copy) {
super(copy);
}
public MutableShardRouting(ShardRouting copy, long version) {
super(copy);
this.version = version;
}
public MutableShardRouting(String index, int shardId, String currentNodeId, boolean primary, ShardRoutingState state, long version) {
super(index, shardId, currentNodeId, primary, state, version);
}
public MutableShardRouting(String index, int shardId, String currentNodeId,
String relocatingNodeId, boolean primary, ShardRoutingState state, long version) {
super(index, shardId, currentNodeId, relocatingNodeId, null, primary, state, version);
}
public MutableShardRouting(String index, int shardId, String currentNodeId,
String relocatingNodeId, RestoreSource restoreSource, boolean primary, ShardRoutingState state, long version) {
super(index, shardId, currentNodeId, relocatingNodeId, restoreSource, primary, state, version);
}
/**
* Assign this shard to a node.
*
* @param nodeId id of the node to assign this shard to
*/
void assignToNode(String nodeId) {
version++;
if (currentNodeId == null) {
assert state == ShardRoutingState.UNASSIGNED;
state = ShardRoutingState.INITIALIZING;
currentNodeId = nodeId;
relocatingNodeId = null;
} else if (state == ShardRoutingState.STARTED) {
state = ShardRoutingState.RELOCATING;
relocatingNodeId = nodeId;
} else if (state == ShardRoutingState.RELOCATING) {
assert nodeId.equals(relocatingNodeId);
}
}
/**
* Relocate the shard to another node.
*
* @param relocatingNodeId id of the node to relocate the shard
*/
void relocate(String relocatingNodeId) {
version++;
assert state == ShardRoutingState.STARTED;
state = ShardRoutingState.RELOCATING;
this.relocatingNodeId = relocatingNodeId;
}
/**
* Cancel relocation of a shard. The shards state must be set
* to <code>RELOCATING</code>.
*/
void cancelRelocation() {
version++;
assert state == ShardRoutingState.RELOCATING;
assert assignedToNode();
assert relocatingNodeId != null;
state = ShardRoutingState.STARTED;
relocatingNodeId = null;
}
/**
* Set the shards state to <code>UNASSIGNED</code>.
* //TODO document the state
*/
void deassignNode() {
version++;
assert state != ShardRoutingState.UNASSIGNED;
state = ShardRoutingState.UNASSIGNED;
this.currentNodeId = null;
this.relocatingNodeId = null;
}
/**
* Set the shards state to <code>STARTED</code>. The shards state must be
* <code>INITIALIZING</code> or <code>RELOCATING</code>. Any relocation will be
* canceled.
*/
void moveToStarted() {
version++;
assert state == ShardRoutingState.INITIALIZING || state == ShardRoutingState.RELOCATING;
relocatingNodeId = null;
restoreSource = null;
state = ShardRoutingState.STARTED;
}
/**
* Make the shard primary unless it's not Primary
* //TODO: doc exception
*/
void moveToPrimary() {
version++;
if (primary) {
throw new IllegalShardRoutingStateException(this, "Already primary, can't move to primary");
}
primary = true;
}
/**
* Set the primary shard to non-primary
*/
void moveFromPrimary() {
version++;
if (!primary) {
throw new IllegalShardRoutingStateException(this, "Not primary, can't move to replica");
}
primary = false;
}
private long hashVersion = version-1;
private int hashCode = 0;
@Override
public int hashCode() {
hashCode = (hashVersion != version ? super.hashCode() : hashCode);
hashVersion = version;
return hashCode;
}
} | 0true
| src_main_java_org_elasticsearch_cluster_routing_MutableShardRouting.java |
1,501 | public class OObjectEntitySerializerTest {
private OObjectDatabaseTx databaseTx;
@BeforeClass
protected void setUp() throws Exception {
databaseTx = new OObjectDatabaseTx("memory:OObjectEntitySerializerTest");
databaseTx.create();
databaseTx.getEntityManager().registerEntityClass(ExactEntity.class);
}
@AfterClass
protected void tearDown() {
databaseTx.close();
}
@Test
public void testCallbacksHierarchy() {
ExactEntity entity = new ExactEntity();
databaseTx.save(entity);
assertTrue(entity.callbackExecuted());
}
@Test
public void testCallbacksHierarchyUpdate() {
ExactEntity entity = new ExactEntity();
entity = databaseTx.save(entity);
entity.reset();
databaseTx.save(entity);
assertTrue(entity.callbackExecuted());
}
} | 0true
| object_src_test_java_com_orientechnologies_orient_object_enhancement_OObjectEntitySerializerTest.java |
1,340 | Collections.sort(entry.getValue().getFacetValues(), new Comparator<SearchFacetResultDTO>() {
public int compare(SearchFacetResultDTO o1, SearchFacetResultDTO o2) {
if (o1.getValue() != null && o2.getValue() != null) {
return o1.getValue().compareTo(o2.getValue());
} else if (o1.getMinValue() != null && o2.getMinValue() != null) {
return o1.getMinValue().compareTo(o2.getMinValue());
}
return 0; // Don't know how to compare
}
}); | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_search_service_solr_SolrSearchServiceImpl.java |
1,272 | new OProfilerHookValue() {
public Object getValue() {
return getHoleSize();
}
}, "db.*.data.holeSize"); | 0true
| core_src_main_java_com_orientechnologies_orient_core_storage_impl_local_OStorageLocal.java |
390 | clusterService.submitStateUpdateTask("cluster_update_settings", Priority.URGENT, new AckedClusterStateUpdateTask() {
private volatile boolean changed = false;
@Override
public boolean mustAck(DiscoveryNode discoveryNode) {
return true;
}
@Override
public void onAllNodesAcked(@Nullable Throwable t) {
if (changed) {
reroute(true);
} else {
listener.onResponse(new ClusterUpdateSettingsResponse(true, transientUpdates.build(), persistentUpdates.build()));
}
}
@Override
public void onAckTimeout() {
if (changed) {
reroute(false);
} else {
listener.onResponse(new ClusterUpdateSettingsResponse(false, transientUpdates.build(), persistentUpdates.build()));
}
}
private void reroute(final boolean updateSettingsAcked) {
clusterService.submitStateUpdateTask("reroute_after_cluster_update_settings", Priority.URGENT, new AckedClusterStateUpdateTask() {
@Override
public boolean mustAck(DiscoveryNode discoveryNode) {
//we wait for the reroute ack only if the update settings was acknowledged
return updateSettingsAcked;
}
@Override
public void onAllNodesAcked(@Nullable Throwable t) {
//we return when the cluster reroute is acked (the acknowledged flag depends on whether the update settings was acknowledged)
listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, transientUpdates.build(), persistentUpdates.build()));
}
@Override
public void onAckTimeout() {
//we return when the cluster reroute ack times out (acknowledged false)
listener.onResponse(new ClusterUpdateSettingsResponse(false, transientUpdates.build(), persistentUpdates.build()));
}
@Override
public TimeValue ackTimeout() {
return request.timeout();
}
@Override
public TimeValue timeout() {
return request.masterNodeTimeout();
}
@Override
public void onFailure(String source, Throwable t) {
//if the reroute fails we only log
logger.debug("failed to perform [{}]", t, source);
}
@Override
public ClusterState execute(final ClusterState currentState) {
// now, reroute in case things that require it changed (e.g. number of replicas)
RoutingAllocation.Result routingResult = allocationService.reroute(currentState);
if (!routingResult.changed()) {
return currentState;
}
return ClusterState.builder(currentState).routingResult(routingResult).build();
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
}
});
}
@Override
public TimeValue ackTimeout() {
return request.timeout();
}
@Override
public TimeValue timeout() {
return request.masterNodeTimeout();
}
@Override
public void onFailure(String source, Throwable t) {
logger.debug("failed to perform [{}]", t, source);
listener.onFailure(t);
}
@Override
public ClusterState execute(final ClusterState currentState) {
ImmutableSettings.Builder transientSettings = ImmutableSettings.settingsBuilder();
transientSettings.put(currentState.metaData().transientSettings());
for (Map.Entry<String, String> entry : request.transientSettings().getAsMap().entrySet()) {
if (dynamicSettings.hasDynamicSetting(entry.getKey()) || entry.getKey().startsWith("logger.")) {
String error = dynamicSettings.validateDynamicSetting(entry.getKey(), entry.getValue());
if (error == null) {
transientSettings.put(entry.getKey(), entry.getValue());
transientUpdates.put(entry.getKey(), entry.getValue());
changed = true;
} else {
logger.warn("ignoring transient setting [{}], [{}]", entry.getKey(), error);
}
} else {
logger.warn("ignoring transient setting [{}], not dynamically updateable", entry.getKey());
}
}
ImmutableSettings.Builder persistentSettings = ImmutableSettings.settingsBuilder();
persistentSettings.put(currentState.metaData().persistentSettings());
for (Map.Entry<String, String> entry : request.persistentSettings().getAsMap().entrySet()) {
if (dynamicSettings.hasDynamicSetting(entry.getKey()) || entry.getKey().startsWith("logger.")) {
String error = dynamicSettings.validateDynamicSetting(entry.getKey(), entry.getValue());
if (error == null) {
persistentSettings.put(entry.getKey(), entry.getValue());
persistentUpdates.put(entry.getKey(), entry.getValue());
changed = true;
} else {
logger.warn("ignoring persistent setting [{}], [{}]", entry.getKey(), error);
}
} else {
logger.warn("ignoring persistent setting [{}], not dynamically updateable", entry.getKey());
}
}
if (!changed) {
return currentState;
}
MetaData.Builder metaData = MetaData.builder(currentState.metaData())
.persistentSettings(persistentSettings.build())
.transientSettings(transientSettings.build());
ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks());
boolean updatedReadOnly = metaData.persistentSettings().getAsBoolean(MetaData.SETTING_READ_ONLY, false) || metaData.transientSettings().getAsBoolean(MetaData.SETTING_READ_ONLY, false);
if (updatedReadOnly) {
blocks.addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK);
} else {
blocks.removeGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK);
}
return builder(currentState).metaData(metaData).blocks(blocks).build();
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
}
}); | 1no label
| src_main_java_org_elasticsearch_action_admin_cluster_settings_TransportClusterUpdateSettingsAction.java |
1,474 | public interface HazelcastRegion<Cache extends RegionCache> extends Region {
HazelcastInstance getInstance();
Cache getCache();
ILogger getLogger();
} | 0true
| hazelcast-hibernate_hazelcast-hibernate3_src_main_java_com_hazelcast_hibernate_region_HazelcastRegion.java |
199 | public abstract class Mutation<E,K> {
private List<E> additions;
private List<K> deletions;
public Mutation(List<E> additions, List<K> deletions) {
Preconditions.checkNotNull(additions);
Preconditions.checkNotNull(deletions);
if (additions.isEmpty()) this.additions=null;
else this.additions = Lists.newArrayList(additions);
if (deletions.isEmpty()) this.deletions=null;
else this.deletions = Lists.newArrayList(deletions);
}
public Mutation() {
this.additions = null;
this.deletions = null;
}
/**
* Whether this mutation has additions
* @return
*/
public boolean hasAdditions() {
return additions!=null && !additions.isEmpty();
}
/**
* Whether this mutation has deletions
* @return
*/
public boolean hasDeletions() {
return deletions != null && !deletions.isEmpty();
}
/**
* Returns the list of additions in this mutation
* @return
*/
public List<E> getAdditions() {
if (additions==null) return ImmutableList.of();
return additions;
}
/**
* Returns the list of deletions in this mutation.
*
* @return
*/
public List<K> getDeletions() {
if (deletions==null) return ImmutableList.of();
return deletions;
}
/**
* Adds a new entry as an addition to this mutation
*
* @param entry
*/
public void addition(E entry) {
if (additions==null) additions = new ArrayList<E>();
additions.add(entry);
}
/**
* Adds a new key as a deletion to this mutation
*
* @param key
*/
public void deletion(K key) {
if (deletions==null) deletions = new ArrayList<K>();
deletions.add(key);
}
/**
* Merges another mutation into this mutation. Ensures that all additions and deletions
* are added to this mutation. Does not remove duplicates if such exist - this needs to be ensured by the caller.
*
* @param m
*/
public void merge(Mutation<E,K> m) {
Preconditions.checkNotNull(m);
if (null != m.additions) {
if (null == additions) additions = m.additions;
else additions.addAll(m.additions);
}
if (null != m.deletions) {
if (null == deletions) deletions = m.deletions;
else deletions.addAll(m.deletions);
}
}
public boolean isEmpty() {
return getTotalMutations()==0;
}
public int getTotalMutations() {
return (additions==null?0:additions.size()) + (deletions==null?0:deletions.size());
}
/**
* Consolidates this mutation by removing redundant deletions. A deletion is considered redundant if
* it is identical to some addition under the provided conversion functions since we consider additions to apply logically after deletions.
* Hence, such a deletion would be applied and immediately overwritten by an addition. To avoid this
* inefficiency, consolidation should be called.
* </p>
* The provided conversion functions map additions and deletions into some object space V for comparison.
* An addition is considered identical to a deletion if both map to the same object (i.e. equals=true) with the respective
* conversion functions.
* </p>
* It needs to be ensured that V objects have valid hashCode() and equals() implementations.
*
* @param convertAdds Function which maps additions onto comparison objects.
* @param convertDels Function which maps deletions onto comparison objects.
*/
public<V> void consolidate(Function<E,V> convertAdds, Function<K,V> convertDels) {
if (hasDeletions() && hasAdditions()) {
Set<V> adds = Sets.newHashSet(Iterables.transform(additions,convertAdds));
Iterator<K> iter = deletions.iterator();
while (iter.hasNext()) {
if (adds.contains(convertDels.apply(iter.next()))) iter.remove();
}
}
}
public abstract void consolidate();
/**
* Checks whether this mutation is consolidated in the sense of {@link #consolidate(com.google.common.base.Function, com.google.common.base.Function)}.
* This should only be used in assertions and tests due to the performance penalty.
*
* @param convertAdds
* @param convertDels
* @param <V>
* @return
*/
public<V> boolean isConsolidated(Function<E,V> convertAdds, Function<K,V> convertDels) {
int delBefore = getDeletions().size();
consolidate(convertAdds,convertDels);
return getDeletions().size()==delBefore;
}
public abstract boolean isConsolidated();
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_Mutation.java |
378 | @RunWith(HazelcastParallelClassRunner.class)
@Category(NightlyTest.class)
public class ClientMultiMapListenerStressTest {
static final int MAX_SECONDS = 60 * 10;
static final String MAP_NAME = randomString();
static final int NUMBER_OF_CLIENTS = 8;
static final int THREADS_PER_CLIENT = 8;
static HazelcastInstance server;
@BeforeClass
public static void init() {
server = Hazelcast.newHazelcastInstance();
}
@AfterClass
public static void destroy() {
HazelcastClient.shutdownAll();
Hazelcast.shutdownAll();
}
@Category(ProblematicTest.class)
@Test
public void listenerAddStressTest() throws InterruptedException {
final PutItemsThread[] putThreads = new PutItemsThread[NUMBER_OF_CLIENTS * THREADS_PER_CLIENT];
int idx=0;
for(int i=0; i<NUMBER_OF_CLIENTS; i++){
HazelcastInstance client = HazelcastClient.newHazelcastClient();
for(int j=0; j<THREADS_PER_CLIENT; j++){
PutItemsThread t = new PutItemsThread(client);
putThreads[idx++]=t;
}
}
for(int i=0; i<putThreads.length; i++){
putThreads[i].start();
}
MultiMap mm = server.getMultiMap(MAP_NAME);
assertJoinable(MAX_SECONDS, putThreads );
assertEquals(PutItemsThread.MAX_ITEMS * putThreads.length, mm.size());
assertTrueEventually(new AssertTask() {
public void run() throws Exception {
for(int i=0; i<putThreads.length; i++){
putThreads[i].assertResult(PutItemsThread.MAX_ITEMS * putThreads.length);
}
}
});
}
public class PutItemsThread extends Thread{
public static final int MAX_ITEMS = 1000;
public final MyEntryListener listener = new MyEntryListener();
public HazelcastInstance hzInstance;
public MultiMap mm;
public String id;
public PutItemsThread(HazelcastInstance hzInstance){
this.id = randomString();
this.hzInstance = hzInstance;
this.mm = hzInstance.getMultiMap(MAP_NAME);
mm.addEntryListener(listener, true);
}
public void run(){
for(int i=0; i< MAX_ITEMS; i++){
mm.put(id+i, id+i);
}
}
public void assertResult(int target){
System.out.println("listener "+id+" add events received "+listener.add.get());
assertEquals(target, listener.add.get());
}
}
static class MyEntryListener implements EntryListener {
public AtomicInteger add = new AtomicInteger(0);
public void entryAdded(EntryEvent event) {
add.incrementAndGet();
}
public void entryRemoved(EntryEvent event) {
}
public void entryUpdated(EntryEvent event) {
}
public void entryEvicted(EntryEvent event) {
}
};
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_multimap_ClientMultiMapListenerStressTest.java |
1,676 | public abstract class AbstractFsBlobContainer extends AbstractBlobContainer {
protected final FsBlobStore blobStore;
protected final File path;
public AbstractFsBlobContainer(FsBlobStore blobStore, BlobPath blobPath, File path) {
super(blobPath);
this.blobStore = blobStore;
this.path = path;
}
public File filePath() {
return this.path;
}
public ImmutableMap<String, BlobMetaData> listBlobs() throws IOException {
File[] files = path.listFiles();
if (files == null || files.length == 0) {
return ImmutableMap.of();
}
// using MapBuilder and not ImmutableMap.Builder as it seems like File#listFiles might return duplicate files!
MapBuilder<String, BlobMetaData> builder = MapBuilder.newMapBuilder();
for (File file : files) {
if (file.isFile()) {
builder.put(file.getName(), new PlainBlobMetaData(file.getName(), file.length()));
}
}
return builder.immutableMap();
}
public boolean deleteBlob(String blobName) throws IOException {
return new File(path, blobName).delete();
}
@Override
public boolean blobExists(String blobName) {
return new File(path, blobName).exists();
}
@Override
public void readBlob(final String blobName, final ReadBlobListener listener) {
blobStore.executor().execute(new Runnable() {
@Override
public void run() {
byte[] buffer = new byte[blobStore.bufferSizeInBytes()];
FileInputStream is = null;
try {
is = new FileInputStream(new File(path, blobName));
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
listener.onPartial(buffer, 0, bytesRead);
}
} catch (Throwable t) {
IOUtils.closeWhileHandlingException(is);
listener.onFailure(t);
return;
}
try {
IOUtils.closeWhileHandlingException(is);
listener.onCompleted();
} catch (Throwable t) {
listener.onFailure(t);
}
}
});
}
} | 0true
| src_main_java_org_elasticsearch_common_blobstore_fs_AbstractFsBlobContainer.java |
2,087 | public class ThrowableObjectInputStream extends ObjectInputStream {
private final ClassLoader classLoader;
public ThrowableObjectInputStream(InputStream in) throws IOException {
this(in, null);
}
public ThrowableObjectInputStream(InputStream in, ClassLoader classLoader) throws IOException {
super(in);
this.classLoader = classLoader;
}
@Override
protected void readStreamHeader() throws IOException, StreamCorruptedException {
int version = readByte() & 0xFF;
if (version != STREAM_VERSION) {
throw new StreamCorruptedException(
"Unsupported version: " + version);
}
}
@Override
protected ObjectStreamClass readClassDescriptor()
throws IOException, ClassNotFoundException {
int type = read();
if (type < 0) {
throw new EOFException();
}
switch (type) {
case ThrowableObjectOutputStream.TYPE_EXCEPTION:
return ObjectStreamClass.lookup(Exception.class);
case ThrowableObjectOutputStream.TYPE_STACKTRACEELEMENT:
return ObjectStreamClass.lookup(StackTraceElement.class);
case ThrowableObjectOutputStream.TYPE_FAT_DESCRIPTOR:
return super.readClassDescriptor();
case ThrowableObjectOutputStream.TYPE_THIN_DESCRIPTOR:
String className = readUTF();
Class<?> clazz = loadClass(className);
return ObjectStreamClass.lookup(clazz);
default:
throw new StreamCorruptedException(
"Unexpected class descriptor type: " + type);
}
}
@Override
protected Class<?> resolveClass(ObjectStreamClass desc) throws IOException, ClassNotFoundException {
String className = desc.getName();
try {
return loadClass(className);
} catch (ClassNotFoundException ex) {
return super.resolveClass(desc);
}
}
protected Class<?> loadClass(String className) throws ClassNotFoundException {
Class<?> clazz;
ClassLoader classLoader = this.classLoader;
if (classLoader == null) {
classLoader = Classes.getDefaultClassLoader();
}
if (classLoader != null) {
clazz = classLoader.loadClass(className);
} else {
clazz = Class.forName(className);
}
return clazz;
}
} | 0true
| src_main_java_org_elasticsearch_common_io_ThrowableObjectInputStream.java |
1,172 | public static class FieldOrder {
public static final int REFNUMBER = 3000;
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_payment_domain_PaymentInfoImpl.java |
1,452 | public class MultiShipInstructionForm implements Serializable {
private static final long serialVersionUID = 1L;
protected String deliveryMessage;
protected PersonalMessage personalMessage = new PersonalMessageImpl();
protected Long fulfillmentGroupId;
public String getDeliveryMessage() {
return deliveryMessage;
}
public void setDeliveryMessage(String deliveryMessage) {
this.deliveryMessage = deliveryMessage;
}
public PersonalMessage getPersonalMessage() {
return personalMessage;
}
public void setPersonalMessage(PersonalMessage personalMessage) {
this.personalMessage = personalMessage;
}
public Long getFulfillmentGroupId() {
return fulfillmentGroupId;
}
public void setFulfillmentGroupId(Long id) {
this.fulfillmentGroupId = id;
}
} | 0true
| core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_checkout_model_MultiShipInstructionForm.java |
1,173 | @edu.umd.cs.findbugs.annotations.SuppressWarnings("SE_BAD_FIELD")
public class InitialMembershipEvent extends EventObject {
private static final long serialVersionUID = -2010865371829087371L;
private final Set<Member> members;
public InitialMembershipEvent(Cluster cluster, Set<Member> members) {
super(cluster);
this.members = members;
}
/**
* Returns an immutable set of ordered members at the moment this {@link InitialMembershipListener} is
* registered. See {@link com.hazelcast.core.Cluster#getMembers()} for more information.
*
* @return a set of members.
*/
public Set<Member> getMembers() {
return members;
}
/**
* Returns the cluster of the event.
*
* @return the cluster of the event.
*/
public Cluster getCluster() {
return (Cluster) getSource();
}
@Override
public String toString() {
return "MembershipInitializeEvent {" + members + "}";
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_core_InitialMembershipEvent.java |
1,736 | public abstract class CompressedStreamOutput<T extends CompressorContext> extends StreamOutput {
private final StreamOutput out;
protected final T context;
protected byte[] uncompressed;
protected int uncompressedLength;
private int position = 0;
private boolean closed;
public CompressedStreamOutput(StreamOutput out, T context) throws IOException {
this.out = out;
this.context = context;
super.setVersion(out.getVersion());
writeHeader(out);
}
@Override
public StreamOutput setVersion(Version version) {
out.setVersion(version);
return super.setVersion(version);
}
@Override
public void write(int b) throws IOException {
if (position >= uncompressedLength) {
flushBuffer();
}
uncompressed[position++] = (byte) b;
}
@Override
public void writeByte(byte b) throws IOException {
if (position >= uncompressedLength) {
flushBuffer();
}
uncompressed[position++] = b;
}
@Override
public void writeBytes(byte[] input, int offset, int length) throws IOException {
// ES, check if length is 0, and don't write in this case
if (length == 0) {
return;
}
final int BUFFER_LEN = uncompressedLength;
// simple case first: buffering only (for trivially short writes)
int free = BUFFER_LEN - position;
if (free >= length) {
System.arraycopy(input, offset, uncompressed, position, length);
position += length;
return;
}
// fill partial input as much as possible and flush
if (position > 0) {
System.arraycopy(input, offset, uncompressed, position, free);
position += free;
flushBuffer();
offset += free;
length -= free;
}
// then write intermediate full block, if any, without copying:
while (length >= BUFFER_LEN) {
compress(input, offset, BUFFER_LEN, out);
offset += BUFFER_LEN;
length -= BUFFER_LEN;
}
// and finally, copy leftovers in input, if any
if (length > 0) {
System.arraycopy(input, offset, uncompressed, 0, length);
}
position = length;
}
@Override
public void flush() throws IOException {
flushBuffer();
out.flush();
}
@Override
public void close() throws IOException {
if (!closed) {
flushBuffer();
closed = true;
doClose();
out.close();
}
}
protected abstract void doClose() throws IOException;
@Override
public void reset() throws IOException {
position = 0;
out.reset();
}
private void flushBuffer() throws IOException {
if (position > 0) {
compress(uncompressed, 0, position, out);
position = 0;
}
}
protected abstract void writeHeader(StreamOutput out) throws IOException;
/**
* Compresses the data into the output
*/
protected abstract void compress(byte[] data, int offset, int len, StreamOutput out) throws IOException;
} | 0true
| src_main_java_org_elasticsearch_common_compress_CompressedStreamOutput.java |
954 | class ProducerThread extends TestThread {
private final ILock lock;
private final ICondition condition;
ProducerThread(int id, ILock lock, ICondition condition) {
super("ProducerThread-" + id);
this.lock = lock;
this.condition = condition;
}
void runSingleIteration() throws InterruptedException {
lock.lock();
try {
while (object != null) {
condition.await();
}
object = "";
condition.signalAll();
} finally {
lock.unlock();
}
}
} | 0true
| hazelcast_src_test_java_com_hazelcast_concurrent_lock_ProducerConsumerConditionStressTest.java |
1,661 | @RunWith(HazelcastParallelClassRunner.class)
@Category(QuickTest.class)
public class BasicMapTest extends HazelcastTestSupport {
private static final int instanceCount = 3;
private static final Random rand = new Random();
private HazelcastInstance[] instances;
@Before
public void init() {
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(instanceCount);
Config config = new Config();
instances = factory.newInstances(config);
}
private HazelcastInstance getInstance() {
return instances[rand.nextInt(instanceCount)];
}
@Test
public void testBoxedPrimitives(){
IMap map = getInstance().getMap("testPrimitives");
assertPutGet(map,new Boolean(true));
assertPutGet(map,new Boolean(false));
assertPutGet(map, new Integer(10));
assertPutGet(map, new Short((short)10));
assertPutGet(map,new Byte((byte)10));
assertPutGet(map,new Long(10));
assertPutGet(map, new Float(10));
assertPutGet(map, new Double(10));
assertPutGet(map,new Character('x'));
}
public void assertPutGet(Map map, Object value){
String key = UUID.randomUUID().toString();
map.put(key, value);
assertEquals(value, map.get(key));
}
@Test
public void testArrays(){
IMap map = getInstance().getMap("testArrays");
boolean[] booleanArray = {true, false};
map.put("boolean", booleanArray);
assertTrue(Arrays.equals(booleanArray, (boolean[])map.get("boolean")));
int[] intArray = {1, 2};
map.put("int", intArray);
assertArrayEquals(intArray, (int[]) map.get("int"));
short[] shortArray = {(short)1, (short)2};
map.put("short", shortArray);
assertArrayEquals(shortArray, (short[]) map.get("short"));
short[] byteArray = {(byte)1, (byte)2};
map.put("byte", byteArray);
assertArrayEquals(byteArray,(short[])map.get("byte"));
long[] longArray = {1l, 2l};
map.put("long", longArray);
assertArrayEquals(longArray,(long[])map.get("long"));
float[] floatArray = {(float)1, (float)2};
map.put("float", floatArray);
assertTrue(Arrays.equals(floatArray, (float[]) map.get("float")));
double[] doubleArray = {(double)1, (double)2};
map.put("double", doubleArray);
assertTrue(Arrays.equals(doubleArray, (double[]) map.get("double")));
char[] charArray = {'1', '2'};
map.put("char", charArray);
assertArrayEquals(charArray, (char[])map.get("char"));
Object[] objectArray = {"foo",null,new Integer(3)};
map.put("object", objectArray);
assertArrayEquals(objectArray, (Object[])map.get("object"));
}
@Test
public void testMapPutAndGet() {
IMap<String, String> map = getInstance().getMap("testMapPutAndGet");
String value = map.put("Hello", "World");
assertEquals("World", map.get("Hello"));
assertEquals(1, map.size());
assertNull(value);
value = map.put("Hello", "World");
assertEquals("World", map.get("Hello"));
assertEquals(1, map.size());
assertEquals("World", value);
value = map.put("Hello", "New World");
assertEquals("World", value);
assertEquals("New World", map.get("Hello"));
}
@Test
public void testMapPutIfAbsent() {
IMap<String, String> map = getInstance().getMap("testMapPutIfAbsent");
assertEquals(map.putIfAbsent("key1", "value1"), null);
assertEquals(map.putIfAbsent("key2", "value2"), null);
assertEquals(map.putIfAbsent("key1", "valueX"), "value1");
assertEquals(map.get("key1"), "value1");
assertEquals(map.size(), 2);
}
@Test
public void testMapGetNullIsNotAllowed() {
IMap<String, String> map = getInstance().getMap("testMapGetNullIsNotAllowed");
try {
map.get(null);
fail();
} catch (Exception e) {
assertTrue(e instanceof NullPointerException);
}
}
@Test
public void valuesToArray() {
IMap<String, String> map = getInstance().getMap("valuesToArray");
assertEquals(0, map.size());
map.put("a", "1");
map.put("b", "2");
map.put("c", "3");
assertEquals(3, map.size());
{
final Object[] values = map.values().toArray();
Arrays.sort(values);
assertArrayEquals(new Object[]{"1", "2", "3"}, values);
}
{
final String[] values = map.values().toArray(new String[3]);
Arrays.sort(values);
assertArrayEquals(new String[]{"1", "2", "3"}, values);
}
{
final String[] values = map.values().toArray(new String[2]);
Arrays.sort(values);
assertArrayEquals(new String[]{"1", "2", "3"}, values);
}
{
final String[] values = map.values().toArray(new String[5]);
Arrays.sort(values, 0, 3);
assertArrayEquals(new String[]{"1", "2", "3", null, null}, values);
}
}
@Test
public void testMapEvictAndListener() throws InterruptedException {
IMap<String, String> map = getInstance().getMap("testMapEvictAndListener");
final String value1 = "/home/data/file1.dat";
final String value2 = "/home/data/file2.dat";
final List<String> newList = new CopyOnWriteArrayList<String>();
final CountDownLatch latch1 = new CountDownLatch(1);
final CountDownLatch latch2 = new CountDownLatch(1);
map.addEntryListener(new EntryAdapter<String, String>() {
public void entryEvicted(EntryEvent<String, String> event) {
if (value1.equals(event.getValue())) {
newList.add(event.getValue());
latch1.countDown();
} else if (value2.equals(event.getValue())) {
newList.add(event.getValue());
latch2.countDown();
}
}
}, true);
map.put("key", value1, 1, TimeUnit.SECONDS);
assertTrue(latch1.await(10, TimeUnit.SECONDS));
map.put("key", value2, 1, TimeUnit.SECONDS);
assertTrue(latch2.await(10, TimeUnit.SECONDS));
assertEquals(value1, newList.get(0));
assertEquals(value2, newList.get(1));
}
@Test
public void testMapEntryListener() {
IMap<String, String> map = getInstance().getMap("testMapEntryListener");
final CountDownLatch latchAdded = new CountDownLatch(1);
final CountDownLatch latchRemoved = new CountDownLatch(1);
final CountDownLatch latchUpdated = new CountDownLatch(1);
map.addEntryListener(new EntryListener<String, String>() {
public void entryAdded(EntryEvent event) {
assertEquals("world", event.getValue());
assertEquals("hello", event.getKey());
latchAdded.countDown();
}
public void entryRemoved(EntryEvent event) {
assertEquals("hello", event.getKey());
assertEquals("new world", event.getValue());
latchRemoved.countDown();
}
public void entryUpdated(EntryEvent event) {
assertEquals("world", event.getOldValue());
assertEquals("new world", event.getValue());
assertEquals("hello", event.getKey());
latchUpdated.countDown();
}
public void entryEvicted(EntryEvent event) {
entryRemoved(event);
}
}, true);
map.put("hello", "world");
map.put("hello", "new world");
map.remove("hello");
try {
assertTrue(latchAdded.await(5, TimeUnit.SECONDS));
assertTrue(latchUpdated.await(5, TimeUnit.SECONDS));
assertTrue(latchRemoved.await(5, TimeUnit.SECONDS));
} catch (InterruptedException e) {
e.printStackTrace();
assertFalse(e.getMessage(), true);
}
}
/** Test for issue #181 */
@Test
public void testMapKeyListenerWithRemoveAndUnlock() throws InterruptedException {
IMap<String, String> map = getInstance().getMap("testMapKeyListenerWithRemoveAndUnlock");
final String key = "key";
final int count = 20;
final CountDownLatch latch = new CountDownLatch(count * 2);
map.addEntryListener(new EntryAdapter<String, String>() {
public void entryAdded(final EntryEvent<String, String> e) {
testEvent(e);
}
public void entryRemoved(final EntryEvent<String, String> e) {
testEvent(e);
}
private void testEvent(final EntryEvent<String, String> e) {
if (key.equals(e.getKey())) {
latch.countDown();
} else {
fail("Invalid event: " + e);
}
}
}, key, true);
for (int i = 0; i < count; i++) {
map.lock(key);
map.put(key, "value");
map.remove(key);
map.unlock(key);
}
assertTrue("Listener events are missing! Remaining: " + latch.getCount(),
latch.await(5, TimeUnit.SECONDS));
}
@Test
public void testMapEntrySetWhenRemoved() {
IMap<String, String> map = getInstance().getMap("testMapEntrySetWhenRemoved");
map.put("Hello", "World");
map.remove("Hello");
Set<IMap.Entry<String, String>> set = map.entrySet();
for (IMap.Entry<String, String> e : set) {
fail("Iterator should not contain removed entry, found " + e.getKey());
}
}
@Test
public void testMapRemove() {
IMap<String, String> map = getInstance().getMap("testMapRemove");
map.put("key1", "value1");
map.put("key2", "value2");
map.put("key3", "value3");
assertEquals(map.remove("key1"), "value1");
assertEquals(map.size(), 2);
assertEquals(map.remove("key1"), null);
assertEquals(map.size(), 2);
assertEquals(map.remove("key3"), "value3");
assertEquals(map.size(), 1);
}
@Test
public void testMapDelete() {
IMap<String, String> map = getInstance().getMap("testMapRemove");
map.put("key1", "value1");
map.put("key2", "value2");
map.put("key3", "value3");
map.delete("key1");
assertEquals(map.size(), 2);
map.delete("key1");
assertEquals(map.size(), 2);
map.delete("key3");
assertEquals(map.size(), 1);
}
@Test
public void testMapClear_nonEmptyMap() {
IMap<String, String> map = getInstance().getMap("testMapClear");
map.put("key1", "value1");
map.put("key2", "value2");
map.put("key3", "value3");
map.clear();
assertEquals(map.size(), 0);
assertEquals(map.get("key1"), null);
assertEquals(map.get("key2"), null);
assertEquals(map.get("key3"), null);
}
@Test
public void testMapClear_emptyMap() {
String mapName = "testMapClear_emptyMap";
HazelcastInstance hz = getInstance();
IMap<String, String> map = hz.getMap(mapName);
map.clear();
assertEquals(map.size(), 0);
//this test is going to be enabled as soon as the size has been fixed (since it also triggers unwanted recordstore creation)
//we need to make sure there are no unwanted recordstores (consumes memory) being created because of the clear.
//so we are going to check one of the partitions if it has a recordstore and then we can safely assume that the
//rest of the partitions have no record store either.
//MapService mapService = getNode(hz).nodeEngine.getService(MapService.SERVICE_NAME);
//RecordStore recordStore = mapService.getPartitionContainer(1).getExistingRecordStore(mapName);
//assertNull(recordStore);
}
@Test
public void testMapEvict() {
IMap<String, String> map = getInstance().getMap("testMapEvict");
map.put("key1", "value1");
map.put("key2", "value2");
map.put("key3", "value3");
assertEquals(map.remove("key1"), "value1");
assertEquals(map.size(), 2);
assertEquals(map.remove("key1"), null);
assertEquals(map.size(), 2);
assertEquals(map.remove("key3"), "value3");
assertEquals(map.size(), 1);
}
@Test
public void testMapTryRemove() throws InterruptedException {
final IMap<Object, Object> map = getInstance().getMap("testMapTryRemove");
map.put("key1", "value1");
map.lock("key1");
final CountDownLatch latch1 = new CountDownLatch(1);
final CountDownLatch latch2 = new CountDownLatch(1);
final CountDownLatch latch3 = new CountDownLatch(1);
final AtomicBoolean firstBool = new AtomicBoolean();
final AtomicBoolean secondBool = new AtomicBoolean();
Thread thread = new Thread(new Runnable() {
public void run() {
try {
firstBool.set(map.tryRemove("key1", 1, TimeUnit.SECONDS));
latch2.countDown();
latch1.await();
secondBool.set(map.tryRemove("key1", 1, TimeUnit.SECONDS));
latch3.countDown();
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
});
thread.start();
latch2.await();
map.unlock("key1");
latch1.countDown();
latch3.await();
assertFalse(firstBool.get());
assertTrue(secondBool.get());
thread.join();
}
@Test
public void testMapRemoveIfSame() {
IMap<String, String> map = getInstance().getMap("testMapRemoveIfSame");
map.put("key1", "value1");
map.put("key2", "value2");
map.put("key3", "value3");
assertFalse(map.remove("key1", "nan"));
assertEquals(map.size(), 3);
assertTrue(map.remove("key1", "value1"));
assertEquals(map.size(), 2);
assertTrue(map.remove("key2", "value2"));
assertTrue(map.remove("key3", "value3"));
assertEquals(map.size(), 0);
}
@Test
public void testMapSet() {
IMap<String, String> map = getInstance().getMap("testMapSet");
map.put("key1", "value1");
assertEquals(map.get("key1"), "value1");
assertEquals(map.size(), 1);
map.set("key1", "valueX", 0, TimeUnit.MILLISECONDS);
assertEquals(map.size(), 1);
assertEquals(map.get("key1"), "valueX");
map.set("key2", "value2", 0, TimeUnit.MILLISECONDS);
assertEquals(map.size(), 2);
assertEquals(map.get("key1"), "valueX");
assertEquals(map.get("key2"), "value2");
}
@Test
public void testMapContainsKey() {
IMap<String, String> map = getInstance().getMap("testMapContainsKey");
map.put("key1", "value1");
map.put("key2", "value2");
map.put("key3", "value3");
assertEquals(map.containsKey("key1"), true);
assertEquals(map.containsKey("key5"), false);
map.remove("key1");
assertEquals(map.containsKey("key1"), false);
assertEquals(map.containsKey("key2"), true);
assertEquals(map.containsKey("key5"), false);
}
@Test
public void testMapKeySet() {
IMap<String, String> map = getInstance().getMap("testMapKeySet");
map.put("key1", "value1");
map.put("key2", "value2");
map.put("key3", "value3");
HashSet<String> actual = new HashSet<String>();
actual.add("key1");
actual.add("key2");
actual.add("key3");
assertEquals(map.keySet(), actual);
}
@Test
public void testMapLocalKeySet() {
IMap<String, String> map = getInstance().getMap("testMapKeySet");
map.put("key1", "value1");
map.put("key2", "value2");
map.put("key3", "value3");
HashSet<String> actual = new HashSet<String>();
actual.add("key1");
actual.add("key2");
actual.add("key3");
assertEquals(map.keySet(), actual);
}
@Test
public void testMapValues() {
IMap<String, String> map = getInstance().getMap("testMapValues");
map.put("key1", "value1");
map.put("key2", "value2");
map.put("key3", "value3");
map.put("key4", "value3");
List<String> values = new ArrayList<String>(map.values());
List<String> actual = new ArrayList<String>();
actual.add("value1");
actual.add("value2");
actual.add("value3");
actual.add("value3");
Collections.sort(values);
Collections.sort(actual);
assertEquals(values, actual);
}
@Test
public void testMapContainsValue() {
IMap map = getInstance().getMap("testMapContainsValue");
map.put(1, 1);
map.put(2, 2);
map.put(3, 3);
assertTrue(map.containsValue(1));
assertFalse(map.containsValue(5));
map.remove(1);
assertFalse(map.containsValue(1));
assertTrue(map.containsValue(2));
assertFalse(map.containsValue(5));
}
@Test
public void testMapIsEmpty() {
IMap<String, String> map = getInstance().getMap("testMapIsEmpty");
assertTrue(map.isEmpty());
map.put("key1", "value1");
assertFalse(map.isEmpty());
map.remove("key1");
assertTrue(map.isEmpty());
}
@Test
public void testMapSize() {
IMap map = getInstance().getMap("testMapSize");
assertEquals(map.size(), 0);
map.put(1, 1);
assertEquals(map.size(), 1);
map.put(2, 2);
map.put(3, 3);
assertEquals(map.size(), 3);
}
@Test
public void testMapReplace() {
IMap map = getInstance().getMap("testMapReplace");
map.put(1, 1);
assertNull(map.replace(2, 1));
assertNull(map.get(2));
map.put(2, 2);
assertEquals(2, map.replace(2, 3));
assertEquals(3, map.get(2));
}
@Test
public void testMapReplaceIfSame() {
IMap map = getInstance().getMap("testMapReplaceIfSame");
map.put(1, 1);
assertFalse(map.replace(1, 2, 3));
assertTrue(map.replace(1, 1, 2));
assertEquals(map.get(1), 2);
map.put(2, 2);
assertTrue(map.replace(2, 2, 3));
assertEquals(map.get(2), 3);
assertTrue(map.replace(2, 3, 4));
assertEquals(map.get(2), 4);
}
@Test
public void testMapLockAndUnlockAndTryLock() throws InterruptedException {
final IMap<Object, Object> map = getInstance().getMap("testMapLockAndUnlockAndTryLock");
map.lock("key0");
map.lock("key1");
map.lock("key2");
map.lock("key3");
final AtomicBoolean check1 = new AtomicBoolean(false);
final AtomicBoolean check2 = new AtomicBoolean(false);
final CountDownLatch latch0 = new CountDownLatch(1);
final CountDownLatch latch1 = new CountDownLatch(1);
final CountDownLatch latch2 = new CountDownLatch(1);
final CountDownLatch latch3 = new CountDownLatch(1);
Thread thread = new Thread(new Runnable() {
public void run() {
try {
check1.set(map.tryLock("key0"));
check2.set(map.tryLock("key0", 3000, TimeUnit.MILLISECONDS));
latch0.countDown();
map.put("key1", "value1");
latch1.countDown();
map.put("key2", "value2");
latch2.countDown();
map.put("key3", "value3");
latch3.countDown();
} catch (Exception e) {
fail(e.getMessage());
}
}
});
thread.start();
Thread.sleep(1000);
map.unlock("key0");
assertTrue(latch0.await(3, TimeUnit.SECONDS));
assertFalse(check1.get());
assertTrue(check2.get());
map.unlock("key1");
assertTrue(latch1.await(3, TimeUnit.SECONDS));
map.unlock("key2");
assertTrue(latch2.await(3, TimeUnit.SECONDS));
map.unlock("key3");
assertTrue(latch3.await(3, TimeUnit.SECONDS));
}
@Test
public void testMapIsLocked() throws InterruptedException {
final IMap<Object, Object> map = getInstance().getMap("testMapIsLocked");
map.lock("key1");
assertTrue(map.isLocked("key1"));
assertFalse(map.isLocked("key2"));
final CountDownLatch latch = new CountDownLatch(1);
final AtomicBoolean b1 = new AtomicBoolean();
final AtomicBoolean b2 = new AtomicBoolean();
Thread thread = new Thread(new Runnable() {
public void run() {
try {
b1.set(map.isLocked("key1"));
b2.set(map.isLocked("key2"));
latch.countDown();
} catch (Exception e) {
fail(e.getMessage());
}
}
});
thread.start();
latch.await();
assertTrue(b1.get());
assertFalse(b2.get());
thread.join();
}
@Test
public void testEntrySet() {
final IMap<Object, Object> map = getInstance().getMap("testEntrySet");
map.put(1, 1);
map.put(2, 2);
map.put(3, 3);
map.put(4, 4);
map.put(5, 5);
Set<Map.Entry> entrySet = new HashSet<Map.Entry>();
entrySet.add(new AbstractMap.SimpleImmutableEntry(1, 1));
entrySet.add(new AbstractMap.SimpleImmutableEntry(2, 2));
entrySet.add(new AbstractMap.SimpleImmutableEntry(3, 3));
entrySet.add(new AbstractMap.SimpleImmutableEntry(4, 4));
entrySet.add(new AbstractMap.SimpleImmutableEntry(5, 5));
assertEquals(entrySet, map.entrySet());
}
@Test
public void testEntryView() {
Config config = new Config();
config.getMapConfig("default").setStatisticsEnabled(true);
HazelcastInstance instance = getInstance();
final IMap<Integer, Integer> map = instance.getMap("testEntryView");
final TimeUnit timeUnit = TimeUnit.NANOSECONDS;
long time1 = timeUnit.toMillis(System.nanoTime());
map.put(1, 1);
map.put(2, 2);
map.put(3, 3);
long time2 = timeUnit.toMillis(System.nanoTime());
map.get(3);
map.get(3);
long time3 = timeUnit.toMillis(System.nanoTime());
map.put(2, 22);
EntryView<Integer, Integer> entryView1 = map.getEntryView(1);
EntryView<Integer, Integer> entryView2 = map.getEntryView(2);
EntryView<Integer, Integer> entryView3 = map.getEntryView(3);
assertEquals((Integer) 1, entryView1.getKey());
assertEquals((Integer) 2, entryView2.getKey());
assertEquals((Integer) 3, entryView3.getKey());
assertEquals((Integer) 1, entryView1.getValue());
assertEquals((Integer) 22, entryView2.getValue());
assertEquals((Integer) 3, entryView3.getValue());
assertEquals(0, entryView1.getHits());
assertEquals(1, entryView2.getHits());
assertEquals(2, entryView3.getHits());
assertEquals(0, entryView1.getVersion());
assertEquals(1, entryView2.getVersion());
assertEquals(0, entryView3.getVersion());
assertTrue(entryView1.getCreationTime() >= time1 && entryView1.getCreationTime() <= time2);
assertTrue(entryView2.getCreationTime() >= time1 && entryView2.getCreationTime() <= time2);
assertTrue(entryView3.getCreationTime() >= time1 && entryView3.getCreationTime() <= time2);
assertTrue(entryView1.getLastAccessTime() >= time1 && entryView1.getLastAccessTime() <= time2);
assertTrue(entryView2.getLastAccessTime() >= time3);
assertTrue(entryView3.getLastAccessTime() >= time2 && entryView3.getLastAccessTime() <= time3);
assertTrue(entryView1.getLastUpdateTime() >= time1 && entryView1.getLastUpdateTime() <= time2);
assertTrue(entryView2.getLastUpdateTime() >= time3);
assertTrue(entryView3.getLastUpdateTime() >= time1 && entryView3.getLastUpdateTime() <= time2);
}
@Test
public void testMapTryPut() throws InterruptedException {
final IMap<Object, Object> map = getInstance().getMap("testMapTryPut");
final String key1 = "key1";
final String key2 = "key2";
map.lock(key1);
final AtomicInteger counter = new AtomicInteger(6);
final CountDownLatch latch = new CountDownLatch(1);
Thread thread = new Thread(new Runnable() {
public void run() {
try {
if (map.tryPut(key1, "value1", 100, TimeUnit.MILLISECONDS) == false)
counter.decrementAndGet();
if (map.get(key1) == null)
counter.decrementAndGet();
if (map.tryPut(key2, "value", 100, TimeUnit.MILLISECONDS))
counter.decrementAndGet();
if (map.get(key2).equals("value"))
counter.decrementAndGet();
if (map.tryPut(key1, "value1", 5, TimeUnit.SECONDS))
counter.decrementAndGet();
if (map.get(key1).equals("value1"))
counter.decrementAndGet();
latch.countDown();
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
});
thread.start();
Thread.sleep(1000);
map.unlock("key1");
latch.await(10, TimeUnit.SECONDS);
assertEquals(0, counter.get());
thread.join(10000);
}
@Test
public void testGetPutRemoveAsync() {
final IMap<Object, Object> map = getInstance().getMap("testGetPutRemoveAsync");
Future<Object> ff = map.putAsync(1, 1);
try {
assertEquals(null, ff.get());
assertEquals(1, map.putAsync(1, 2).get());
assertEquals(2, map.getAsync(1).get());
assertEquals(2, map.removeAsync(1).get());
assertEquals(0, map.size());
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
@Test
public void testGetAllPutAll() throws InterruptedException {
warmUpPartitions(instances);
final IMap<Object, Object> map = getInstance().getMap("testGetAllPutAll");
Set ss = new HashSet();
ss.add(1);
ss.add(3);
map.getAll(ss);
assertTrue(map.isEmpty());
Map mm = new HashMap();
int size = 100;
for (int i = 0; i < size; i++) {
mm.put(i, i);
}
map.putAll(mm);
assertEquals(size, map.size());
for (int i = 0; i < size; i++) {
assertEquals(map.get(i), i);
}
size = 10000;
for (int i = 0; i < size; i++) {
mm.put(i, i);
}
map.putAll(mm);
assertEquals(size, map.size());
for (int i = 0; i < size; i++) {
assertEquals(map.get(i), i);
}
ss = new HashSet();
ss.add(1);
ss.add(3);
Map m2 = map.getAll(ss);
assertEquals(m2.size(), 2);
assertEquals(m2.get(1), 1);
assertEquals(m2.get(3), 3);
}
@Test
// todo fails in parallel
public void testPutAllBackup() throws InterruptedException {
HazelcastInstance instance1 = instances[0];
HazelcastInstance instance2 = instances[1];
final IMap<Object, Object> map = instance1.getMap("testPutAllBackup");
final IMap<Object, Object> map2 = instance2.getMap("testPutAllBackup");
warmUpPartitions(instances);
Map mm = new HashMap();
final int size = 100;
for (int i = 0; i < size; i++) {
mm.put(i, i);
}
map.putAll(mm);
assertEquals(size, map.size());
for (int i = 0; i < size; i++) {
assertEquals(i, map.get(i));
}
instance2.shutdown();
assertEquals(size, map.size());
for (int i = 0; i < size; i++) {
assertEquals(i, map.get(i));
}
}
@Test
// todo fails in parallel
public void testPutAllTooManyEntriesWithBackup() throws InterruptedException {
HazelcastInstance instance1 = instances[0];
HazelcastInstance instance2 = instances[1];
final IMap<Object, Object> map = instance1.getMap("testPutAllBackup");
final IMap<Object, Object> map2 = instance2.getMap("testPutAllBackup");
warmUpPartitions(instances);
Map mm = new HashMap();
final int size = 10000;
for (int i = 0; i < size; i++) {
mm.put(i, i);
}
map.putAll(mm);
assertEquals(size, map.size());
for (int i = 0; i < size; i++) {
assertEquals(i, map.get(i));
}
instance2.shutdown();
assertEquals(size, map.size());
for (int i = 0; i < size; i++) {
assertEquals(i, map.get(i));
}
}
@Test
public void testMapListenersWithValue() throws InterruptedException {
final IMap<Object, Object> map = getInstance().getMap("testMapListenersWithValue");
final Object[] addedKey = new Object[1];
final Object[] addedValue = new Object[1];
final Object[] updatedKey = new Object[1];
final Object[] oldValue = new Object[1];
final Object[] newValue = new Object[1];
final Object[] removedKey = new Object[1];
final Object[] removedValue = new Object[1];
EntryListener<Object, Object> listener = new EntryListener<Object, Object>() {
public void entryAdded(EntryEvent<Object, Object> event) {
addedKey[0] = event.getKey();
addedValue[0] = event.getValue();
}
public void entryRemoved(EntryEvent<Object, Object> event) {
removedKey[0] = event.getKey();
removedValue[0] = event.getOldValue();
}
public void entryUpdated(EntryEvent<Object, Object> event) {
updatedKey[0] = event.getKey();
oldValue[0] = event.getOldValue();
newValue[0] = event.getValue();
}
public void entryEvicted(EntryEvent<Object, Object> event) {
}
};
map.addEntryListener(listener, true);
map.put("key", "value");
map.put("key", "value2");
map.remove("key");
Thread.sleep(1000);
assertEquals(addedKey[0], "key");
assertEquals(addedValue[0], "value");
assertEquals(updatedKey[0], "key");
assertEquals(oldValue[0], "value");
assertEquals(newValue[0], "value2");
assertEquals(removedKey[0], "key");
assertEquals(removedValue[0], "value2");
}
@Test
public void testMapQueryListener() throws InterruptedException {
final IMap<Object, Object> map = getInstance().getMap("testMapQueryListener");
final Object[] addedKey = new Object[1];
final Object[] addedValue = new Object[1];
final Object[] updatedKey = new Object[1];
final Object[] oldValue = new Object[1];
final Object[] newValue = new Object[1];
final Object[] removedKey = new Object[1];
final Object[] removedValue = new Object[1];
EntryListener<Object, Object> listener = new EntryListener<Object, Object>() {
public void entryAdded(EntryEvent<Object, Object> event) {
addedKey[0] = event.getKey();
addedValue[0] = event.getValue();
}
public void entryRemoved(EntryEvent<Object, Object> event) {
removedKey[0] = event.getKey();
removedValue[0] = event.getOldValue();
}
public void entryUpdated(EntryEvent<Object, Object> event) {
updatedKey[0] = event.getKey();
oldValue[0] = event.getOldValue();
newValue[0] = event.getValue();
}
public void entryEvicted(EntryEvent<Object, Object> event) {
}
};
map.addEntryListener(listener, new StartsWithPredicate("a"), null, true);
map.put("key1", "abc");
map.put("key2", "bcd");
map.put("key2", "axyz");
map.remove("key1");
Thread.sleep(1000);
assertEquals(addedKey[0], "key1");
assertEquals(addedValue[0], "abc");
assertEquals(updatedKey[0], "key2");
assertEquals(oldValue[0], "bcd");
assertEquals(newValue[0], "axyz");
assertEquals(removedKey[0], "key1");
assertEquals(removedValue[0], "abc");
}
static class StartsWithPredicate implements Predicate<Object, Object>, Serializable {
String pref;
StartsWithPredicate(String pref) {
this.pref = pref;
}
public boolean apply(Map.Entry<Object, Object> mapEntry) {
String val = (String) mapEntry.getValue();
if (val == null)
return false;
if (val.startsWith(pref))
return true;
return false;
}
}
@Test
public void testMapListenersWithValueAndKeyFiltered() throws InterruptedException {
final IMap<Object, Object> map = getInstance().getMap("testMapListenersWithValueAndKeyFiltered");
final Object[] addedKey = new Object[1];
final Object[] addedValue = new Object[1];
final Object[] updatedKey = new Object[1];
final Object[] oldValue = new Object[1];
final Object[] newValue = new Object[1];
final Object[] removedKey = new Object[1];
final Object[] removedValue = new Object[1];
EntryListener<Object, Object> listener = new EntryListener<Object, Object>() {
public void entryAdded(EntryEvent<Object, Object> event) {
addedKey[0] = event.getKey();
addedValue[0] = event.getValue();
}
public void entryRemoved(EntryEvent<Object, Object> event) {
removedKey[0] = event.getKey();
removedValue[0] = event.getOldValue();
}
public void entryUpdated(EntryEvent<Object, Object> event) {
updatedKey[0] = event.getKey();
oldValue[0] = event.getOldValue();
newValue[0] = event.getValue();
}
public void entryEvicted(EntryEvent<Object, Object> event) {
}
};
map.addEntryListener(listener, "key", true);
map.put("keyx", "valuex");
map.put("key", "value");
map.put("key", "value2");
map.put("keyx", "valuex2");
map.put("keyz", "valuez");
map.remove("keyx");
map.remove("key");
map.remove("keyz");
Thread.sleep(1000);
assertEquals(addedKey[0], "key");
assertEquals(addedValue[0], "value");
assertEquals(updatedKey[0], "key");
assertEquals(oldValue[0], "value");
assertEquals(newValue[0], "value2");
assertEquals(removedKey[0], "key");
assertEquals(removedValue[0], "value2");
}
@Test
public void testMapListenersWithoutValue() throws InterruptedException {
final IMap<Object, Object> map = getInstance().getMap("testMapListenersWithoutValue");
final Object[] addedKey = new Object[1];
final Object[] addedValue = new Object[1];
final Object[] updatedKey = new Object[1];
final Object[] oldValue = new Object[1];
final Object[] newValue = new Object[1];
final Object[] removedKey = new Object[1];
final Object[] removedValue = new Object[1];
EntryListener<Object, Object> listener = new EntryListener<Object, Object>() {
public void entryAdded(EntryEvent<Object, Object> event) {
addedKey[0] = event.getKey();
addedValue[0] = event.getValue();
}
public void entryRemoved(EntryEvent<Object, Object> event) {
removedKey[0] = event.getKey();
removedValue[0] = event.getOldValue();
}
public void entryUpdated(EntryEvent<Object, Object> event) {
updatedKey[0] = event.getKey();
oldValue[0] = event.getOldValue();
newValue[0] = event.getValue();
}
public void entryEvicted(EntryEvent<Object, Object> event) {
}
};
map.addEntryListener(listener, false);
map.put("key", "value");
map.put("key", "value2");
map.remove("key");
Thread.sleep(1000);
assertEquals(addedKey[0], "key");
assertEquals(addedValue[0], null);
assertEquals(updatedKey[0], "key");
assertEquals(oldValue[0], null);
assertEquals(newValue[0], null);
assertEquals(removedKey[0], "key");
assertEquals(removedValue[0], null);
}
@Test
public void testPutWithTtl2() throws InterruptedException {
}
@Test
public void testPutWithTtl() throws InterruptedException {
IMap<String, String> map = getInstance().getMap("testPutWithTtl");
final CountDownLatch latch = new CountDownLatch(1);
map.addEntryListener(new EntryAdapter<String, String>() {
public void entryEvicted(EntryEvent<String, String> event) {
latch.countDown();
}
}, true);
// ttl should be bigger than 5sec (= sync backup wait timeout)
map.put("key", "value", 6, TimeUnit.SECONDS);
assertEquals("value", map.get("key"));
assertTrue(latch.await(20, TimeUnit.SECONDS));
assertNull(map.get("key"));
}
@Test
public void testMapEntryProcessor() throws InterruptedException {
IMap<Integer, Integer> map = getInstance().getMap("testMapEntryProcessor");
map.put(1, 1);
EntryProcessor entryProcessor = new SampleEntryProcessor();
map.executeOnKey(1, entryProcessor);
assertEquals(map.get(1), (Object) 2);
}
@Test
@Category(ProblematicTest.class)
public void testMapLoaderLoadUpdatingIndex() throws Exception {
MapConfig mapConfig = getInstance().getConfig().getMapConfig("testMapLoaderLoadUpdatingIndex");
List<MapIndexConfig> indexConfigs = mapConfig.getMapIndexConfigs();
indexConfigs.add(new MapIndexConfig("name", true));
SampleIndexableObjectMapLoader loader = new SampleIndexableObjectMapLoader();
MapStoreConfig storeConfig = new MapStoreConfig();
storeConfig.setFactoryImplementation(loader);
mapConfig.setMapStoreConfig(storeConfig);
IMap<Integer, SampleIndexableObject> map = getInstance().getMap("testMapLoaderLoadUpdatingIndex");
for (int i = 0; i < 10; i++) {
map.put(i, new SampleIndexableObject("My-" + i, i));
}
SqlPredicate predicate = new SqlPredicate("name='My-5'");
Set<Entry<Integer, SampleIndexableObject>> result = map.entrySet(predicate);
assertEquals(1, result.size());
assertEquals(5, (int) result.iterator().next().getValue().value);
map.destroy();
loader.preloadValues = true;
map = getInstance().getMap("testMapLoaderLoadUpdatingIndex");
assertFalse(map.isEmpty());
predicate = new SqlPredicate("name='My-5'");
result = map.entrySet(predicate);
assertEquals(1, result.size());
assertEquals(5, (int) result.iterator().next().getValue().value);
}
@Test
public void testIfWeCarryRecordVersionInfoToReplicas() {
final String mapName = randomMapName();
final int mapSize = 1000;
final int nodeCount = 3;
final int expectedRecordVersion = 3;
final TestHazelcastInstanceFactory factory = new TestHazelcastInstanceFactory(nodeCount);
final Config config = new Config();
final HazelcastInstance node1 = factory.newHazelcastInstance(config);
final HazelcastInstance node2 = factory.newHazelcastInstance(config);
final IMap<Integer, Integer> map1 = node1.getMap(mapName);
for (int i = 0; i < mapSize; i++) {
map1.put(i, 0);//version 0.
map1.put(i, 1);//version 1.
map1.put(i, 2);//version 2.
map1.put(i, 3);//version 3.
}
final HazelcastInstance node3 = factory.newHazelcastInstance(config);
node1.shutdown();
node2.shutdown();
final IMap<Integer, Integer> map3 = node3.getMap(mapName);
for (int i = 0; i < mapSize; i++) {
final EntryView<Integer, Integer> entryView = map3.getEntryView(i);
assertEquals(expectedRecordVersion, entryView.getVersion());
}
}
@Test
public void testNullChecks() {
final IMap<String, String> map = getInstance().getMap("testNullChecks");
Runnable runnable;
runnable = new Runnable() { public void run() { map.containsKey(null); } };
assertRunnableThrowsNullPointerException(runnable, "containsKey(null)");
runnable = new Runnable() { public void run() { map.containsValue(null); } };
assertRunnableThrowsNullPointerException(runnable, "containsValue(null)");
runnable = new Runnable() { public void run() { map.get(null); } };
assertRunnableThrowsNullPointerException(runnable, "get(null)");
runnable = new Runnable() { public void run() { map.put(null, "value"); } };
assertRunnableThrowsNullPointerException(runnable, "put(null, \"value\")");
runnable = new Runnable() { public void run() { map.put("key", null); } };
assertRunnableThrowsNullPointerException(runnable, "put(\"key\", null)");
runnable = new Runnable() { public void run() { map.remove(null); } };
assertRunnableThrowsNullPointerException(runnable, "remove(null)");
runnable = new Runnable() { public void run() { map.remove(null, "value"); } };
assertRunnableThrowsNullPointerException(runnable, "remove(null, \"value\")");
runnable = new Runnable() { public void run() { map.remove("key", null); } };
assertRunnableThrowsNullPointerException(runnable, "remove(\"key\", null)");
runnable = new Runnable() { public void run() { map.delete(null); } };
assertRunnableThrowsNullPointerException(runnable, "delete(null)");
final Set<String> keys = new HashSet<String>();
keys.add("key");
keys.add(null);
runnable = new Runnable() { public void run() { map.getAll(keys); } };
assertRunnableThrowsNullPointerException(runnable, "remove(keys)");
runnable = new Runnable() { public void run() { map.getAsync(null); } };
assertRunnableThrowsNullPointerException(runnable, "getAsync(null)");
runnable = new Runnable() { public void run() { map.putAsync(null, "value"); } };
assertRunnableThrowsNullPointerException(runnable, "putAsync(null, \"value\")");
runnable = new Runnable() { public void run() { map.putAsync("key", null); } };
assertRunnableThrowsNullPointerException(runnable, "putAsync(\"key\", null)");
runnable = new Runnable() { public void run() { map.putAsync(null, "value", 1, TimeUnit.SECONDS); } };
assertRunnableThrowsNullPointerException(runnable, "putAsync(null, \"value\", 1, TimeUnit.SECONDS)");
runnable = new Runnable() { public void run() { map.putAsync("key", null, 1, TimeUnit.SECONDS); } };
assertRunnableThrowsNullPointerException(runnable, "putAsync(\"key\", null, 1, TimeUnit.SECONDS)");
runnable = new Runnable() { public void run() { map.removeAsync(null); } };
assertRunnableThrowsNullPointerException(runnable, "removeAsync(null)");
runnable = new Runnable() { public void run() { map.tryRemove(null, 1, TimeUnit.SECONDS); } };
assertRunnableThrowsNullPointerException(runnable, "tryRemove(null, 1, TimeUnit.SECONDS)");
runnable = new Runnable() { public void run() { map.tryPut(null, "value", 1, TimeUnit.SECONDS); } };
assertRunnableThrowsNullPointerException(runnable, "tryPut(null, \"value\", 1, TimeUnit.SECONDS)");
runnable = new Runnable() { public void run() { map.tryPut("key", null, 1, TimeUnit.SECONDS); } };
assertRunnableThrowsNullPointerException(runnable, "tryPut(\"key\", null, 1, TimeUnit.SECONDS)");
runnable = new Runnable() { public void run() { map.putTransient(null, "value", 1, TimeUnit.SECONDS); } };
assertRunnableThrowsNullPointerException(runnable, "putTransient(null, \"value\", 1, TimeUnit.SECONDS)");
runnable = new Runnable() { public void run() { map.putTransient("key", null, 1, TimeUnit.SECONDS); } };
assertRunnableThrowsNullPointerException(runnable, "putTransient(\"key\", null, 1, TimeUnit.SECONDS)");
runnable = new Runnable() { public void run() { map.putIfAbsent(null, "value"); } };
assertRunnableThrowsNullPointerException(runnable, "putIfAbsent(null, \"value\")");
runnable = new Runnable() { public void run() { map.putIfAbsent("key", null); } };
assertRunnableThrowsNullPointerException(runnable, "putIfAbsent(\"key\", null)");
runnable = new Runnable() { public void run() { map.putIfAbsent(null, "value", 1, TimeUnit.SECONDS); } };
assertRunnableThrowsNullPointerException(runnable, "putIfAbsent(null, \"value\", 1, TimeUnit.SECONDS)");
runnable = new Runnable() { public void run() { map.putIfAbsent("key", null, 1, TimeUnit.SECONDS); } };
assertRunnableThrowsNullPointerException(runnable, "putIfAbsent(\"key\", null, 1, TimeUnit.SECONDS)");
runnable = new Runnable() { public void run() { map.replace(null, "oldValue", "newValue"); } };
assertRunnableThrowsNullPointerException(runnable, "replace(null, \"oldValue\", \"newValue\")");
runnable = new Runnable() { public void run() { map.replace("key", null, "newValue"); } };
assertRunnableThrowsNullPointerException(runnable, "replace(\"key\", null, \"newValue\")");
runnable = new Runnable() { public void run() { map.replace("key", "oldValue", null); } };
assertRunnableThrowsNullPointerException(runnable, "replace(\"key\", \"oldValue\", null)");
runnable = new Runnable() { public void run() { map.replace(null, "value"); } };
assertRunnableThrowsNullPointerException(runnable, "replace(null, \"value\")");
runnable = new Runnable() { public void run() { map.replace("key", null); } };
assertRunnableThrowsNullPointerException(runnable, "replace(\"key\", null)");
runnable = new Runnable() { public void run() { map.set(null, "value"); } };
assertRunnableThrowsNullPointerException(runnable, "set(null, \"value\")");
runnable = new Runnable() { public void run() { map.set("key", null); } };
assertRunnableThrowsNullPointerException(runnable, "set(\"key\", null)");
runnable = new Runnable() { public void run() { map.set(null, "value", 1, TimeUnit.SECONDS); } };
assertRunnableThrowsNullPointerException(runnable, "set(null, \"value\", 1, TimeUnit.SECONDS)");
runnable = new Runnable() { public void run() { map.set("key", null, 1, TimeUnit.SECONDS); } };
assertRunnableThrowsNullPointerException(runnable, "set(\"key\", null, 1, TimeUnit.SECONDS)");
runnable = new Runnable() { public void run() { map.lock(null); } };
assertRunnableThrowsNullPointerException(runnable, "lock(null)");
runnable = new Runnable() { public void run() { map.lock(null, 1, TimeUnit.SECONDS); } };
assertRunnableThrowsNullPointerException(runnable, "lock(null, 1, TimeUnit.SECONDS)");
runnable = new Runnable() { public void run() { map.isLocked(null); } };
assertRunnableThrowsNullPointerException(runnable, "isLocked(null)");
runnable = new Runnable() { public void run() { map.tryLock(null); } };
assertRunnableThrowsNullPointerException(runnable, "tryLock(null)");
runnable = new Runnable() { public void run() {
try {
map.tryLock(null, 1, TimeUnit.SECONDS);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
} };
assertRunnableThrowsNullPointerException(runnable, "tryLock(null, 1, TimeUnit.SECONDS)");
runnable = new Runnable() { public void run() { map.unlock(null); } };
assertRunnableThrowsNullPointerException(runnable, "unlock(null)");
runnable = new Runnable() { public void run() { map.forceUnlock(null); } };
assertRunnableThrowsNullPointerException(runnable, "forceUnlock(null)");
runnable = new Runnable() { public void run() { map.getEntryView(null); } };
assertRunnableThrowsNullPointerException(runnable, "getEntryView(null)");
runnable = new Runnable() { public void run() { map.evict(null); } };
assertRunnableThrowsNullPointerException(runnable, "evict(null)");
runnable = new Runnable() { public void run() { map.executeOnKey(null, new SampleEntryProcessor()); } };
assertRunnableThrowsNullPointerException(runnable, "executeOnKey(null, entryProcessor)");
final Map<String, String> mapWithNullKey = new HashMap<String, String>();
mapWithNullKey.put("key", "value");
mapWithNullKey.put(null, "nullKey");
runnable = new Runnable() { public void run() { map.putAll(mapWithNullKey); } };
assertRunnableThrowsNullPointerException(runnable, "map.putAll(mapWithNullKey)");
final Map<String, String> mapWithNullValue = new HashMap<String, String>();
mapWithNullValue.put("key", "value");
mapWithNullValue.put("nullValue", null);
runnable = new Runnable() { public void run() { map.putAll(mapWithNullValue); } };
assertRunnableThrowsNullPointerException(runnable, "map.putAll(mapWithNullValue)");
// We need to run the putAll() tests a second time passing in a map with more than (partitionCount * 3) entries,
// because MapProxySupport#putAllInternal() takes a different code path if there are more than that many entries.
final int entryLimit = (instanceCount * 3) + 1;
for (int i = 0; i < entryLimit; i++) {
mapWithNullKey.put("key" + i, "value" + i);
}
runnable = new Runnable() { public void run() { map.putAll(mapWithNullKey); } };
assertRunnableThrowsNullPointerException(runnable, "map.putAll(mapWithNullKey)");
for (int i = 0; i < entryLimit; i++) {
mapWithNullValue.put("key" + i, "value" + i);
}
runnable = new Runnable() { public void run() { map.putAll(mapWithNullValue); } };
assertRunnableThrowsNullPointerException(runnable, "map.putAll(mapWithNullValue)");
}
public void assertRunnableThrowsNullPointerException(Runnable runnable, String description) {
boolean threwNpe = false;
try {
runnable.run();
} catch (NullPointerException npe) {
threwNpe = true;
}
assertTrue(description + " did not throw a NullPointerException.", threwNpe);
}
static class SampleEntryProcessor implements EntryProcessor, EntryBackupProcessor, Serializable {
public Object process(Map.Entry entry) {
entry.setValue((Integer) entry.getValue() + 1);
return true;
}
public EntryBackupProcessor getBackupProcessor() {
return SampleEntryProcessor.this;
}
public void processBackup(Map.Entry entry) {
entry.setValue((Integer) entry.getValue() + 1);
}
}
public static class SampleIndexableObject implements Serializable {
String name;
Integer value;
public SampleIndexableObject() {
}
public SampleIndexableObject(String name, Integer value) {
this.name = name;
this.value = value;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Integer getValue() {
return value;
}
public void setValue(Integer value) {
this.value = value;
}
}
public static class SampleIndexableObjectMapLoader
implements MapLoader<Integer, SampleIndexableObject>, MapStoreFactory<Integer, SampleIndexableObject> {
private SampleIndexableObject[] values = new SampleIndexableObject[10];
private Set<Integer> keys = new HashSet<Integer>();
boolean preloadValues = false;
public SampleIndexableObjectMapLoader() {
for (int i = 0; i < 10; i++) {
keys.add(i);
values[i] = new SampleIndexableObject("My-" + i, i);
}
}
@Override
public SampleIndexableObject load(Integer key) {
if (!preloadValues) return null;
return values[key];
}
@Override
public Map<Integer, SampleIndexableObject> loadAll(Collection<Integer> keys) {
if (!preloadValues) return Collections.emptyMap();
Map<Integer, SampleIndexableObject> data = new HashMap<Integer, SampleIndexableObject>();
for (Integer key : keys) {
data.put(key, values[key]);
}
return data;
}
@Override
public Set<Integer> loadAllKeys() {
if (!preloadValues) return Collections.emptySet();
return Collections.unmodifiableSet(keys);
}
@Override
public MapLoader<Integer, SampleIndexableObject> newMapStore(String mapName, Properties properties) {
return this;
}
}
} | 0true
| hazelcast_src_test_java_com_hazelcast_map_BasicMapTest.java |
775 | public class CollectionRollbackBackupOperation extends CollectionOperation implements BackupOperation {
private long itemId;
private boolean removeOperation;
public CollectionRollbackBackupOperation() {
}
public CollectionRollbackBackupOperation(String name, long itemId, boolean removeOperation) {
super(name);
this.itemId = itemId;
this.removeOperation = removeOperation;
}
@Override
public int getId() {
return CollectionDataSerializerHook.COLLECTION_ROLLBACK_BACKUP;
}
@Override
public void beforeRun() throws Exception {
}
@Override
public void run() throws Exception {
if (removeOperation) {
getOrCreateContainer().rollbackRemoveBackup(itemId);
} else {
getOrCreateContainer().rollbackAddBackup(itemId);
}
}
@Override
public void afterRun() throws Exception {
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeLong(itemId);
out.writeBoolean(removeOperation);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
itemId = in.readLong();
removeOperation = in.readBoolean();
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_collection_txn_CollectionRollbackBackupOperation.java |
1,776 | new Thread() {
final IMap map = instances[0].getMap(mapName);
public void run() {
try {
Thread.sleep(1000);
while (latch.getCount() != 0) {
try {
int msize = map.size();
if (msize > (size * k + size * k * 10 / 100)) {
success.set(false);
break;
}
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}.start(); | 0true
| hazelcast_src_test_java_com_hazelcast_map_EvictionTest.java |
875 | public class CountDownLatchInfo implements DataSerializable {
private String name;
private int count;
public CountDownLatchInfo() {
}
public CountDownLatchInfo(String name) {
this.name = name;
}
public int countDown() {
if (count > 0) {
count--;
}
return count;
}
public int getCount() {
return count;
}
public String getName() {
return name;
}
public boolean setCount(int count) {
if (this.count > 0 || count <= 0) {
return false;
}
this.count = count;
return true;
}
public void setCountDirect(int count) {
this.count = count;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(name);
out.writeInt(count);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
name = in.readUTF();
count = in.readInt();
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("LocalCountDownLatch");
sb.append("{name='").append(name).append('\'');
sb.append(", count=").append(count);
sb.append('}');
return sb.toString();
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_concurrent_countdownlatch_CountDownLatchInfo.java |
543 | public class TransportDeleteMappingAction extends TransportMasterNodeOperationAction<DeleteMappingRequest, DeleteMappingResponse> {
private final MetaDataMappingService metaDataMappingService;
private final TransportFlushAction flushAction;
private final TransportDeleteByQueryAction deleteByQueryAction;
private final TransportRefreshAction refreshAction;
private final DestructiveOperations destructiveOperations;
@Inject
public TransportDeleteMappingAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, MetaDataMappingService metaDataMappingService,
TransportDeleteByQueryAction deleteByQueryAction, TransportRefreshAction refreshAction,
TransportFlushAction flushAction, NodeSettingsService nodeSettingsService) {
super(settings, transportService, clusterService, threadPool);
this.metaDataMappingService = metaDataMappingService;
this.deleteByQueryAction = deleteByQueryAction;
this.refreshAction = refreshAction;
this.flushAction = flushAction;
this.destructiveOperations = new DestructiveOperations(logger, settings, nodeSettingsService);
}
@Override
protected String executor() {
// no need for fork on another thread pool, we go async right away
return ThreadPool.Names.SAME;
}
@Override
protected String transportAction() {
return DeleteMappingAction.NAME;
}
@Override
protected DeleteMappingRequest newRequest() {
return new DeleteMappingRequest();
}
@Override
protected DeleteMappingResponse newResponse() {
return new DeleteMappingResponse();
}
@Override
protected void doExecute(DeleteMappingRequest request, ActionListener<DeleteMappingResponse> listener) {
destructiveOperations.failDestructive(request.indices());
super.doExecute(request, listener);
}
@Override
protected ClusterBlockException checkBlock(DeleteMappingRequest request, ClusterState state) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA, request.indices());
}
@Override
protected void masterOperation(final DeleteMappingRequest request, final ClusterState state, final ActionListener<DeleteMappingResponse> listener) throws ElasticsearchException {
request.indices(state.metaData().concreteIndices(request.indices(), request.indicesOptions()));
flushAction.execute(Requests.flushRequest(request.indices()), new ActionListener<FlushResponse>() {
@Override
public void onResponse(FlushResponse flushResponse) {
// get all types that need to be deleted.
ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> result = clusterService.state().metaData().findMappings(
request.indices(), request.types()
);
// create OrFilter with type filters within to account for different types
BoolFilterBuilder filterBuilder = new BoolFilterBuilder();
Set<String> types = new HashSet<String>();
for (ObjectObjectCursor<String, ImmutableOpenMap<String, MappingMetaData>> typesMeta : result) {
for (ObjectObjectCursor<String, MappingMetaData> type : typesMeta.value) {
filterBuilder.should(new TypeFilterBuilder(type.key));
types.add(type.key);
}
}
if (types.size() == 0) {
throw new TypeMissingException(new Index("_all"), request.types(), "No index has the type.");
}
request.types(types.toArray(new String[types.size()]));
QuerySourceBuilder querySourceBuilder = new QuerySourceBuilder()
.setQuery(QueryBuilders.filteredQuery(QueryBuilders.matchAllQuery(), filterBuilder));
deleteByQueryAction.execute(Requests.deleteByQueryRequest(request.indices()).source(querySourceBuilder), new ActionListener<DeleteByQueryResponse>() {
@Override
public void onResponse(DeleteByQueryResponse deleteByQueryResponse) {
refreshAction.execute(Requests.refreshRequest(request.indices()), new ActionListener<RefreshResponse>() {
@Override
public void onResponse(RefreshResponse refreshResponse) {
removeMapping();
}
@Override
public void onFailure(Throwable e) {
removeMapping();
}
protected void removeMapping() {
DeleteMappingClusterStateUpdateRequest clusterStateUpdateRequest = new DeleteMappingClusterStateUpdateRequest()
.indices(request.indices()).types(request.types())
.ackTimeout(request.timeout())
.masterNodeTimeout(request.masterNodeTimeout());
metaDataMappingService.removeMapping(clusterStateUpdateRequest, new ClusterStateUpdateListener() {
@Override
public void onResponse(ClusterStateUpdateResponse response) {
listener.onResponse(new DeleteMappingResponse(response.isAcknowledged()));
}
@Override
public void onFailure(Throwable t) {
listener.onFailure(t);
}
});
}
});
}
@Override
public void onFailure(Throwable t) {
listener.onFailure(t);
}
});
}
@Override
public void onFailure(Throwable t) {
listener.onFailure(t);
}
});
}
} | 1no label
| src_main_java_org_elasticsearch_action_admin_indices_mapping_delete_TransportDeleteMappingAction.java |
411 | trackedList.addChangeListener(new OMultiValueChangeListener<Integer, String>() {
public void onAfterRecordChanged(final OMultiValueChangeEvent<Integer, String> event) {
changed.value = true;
}
}); | 0true
| core_src_test_java_com_orientechnologies_orient_core_db_record_TrackedListTest.java |
154 | class RefineFormalMembersProposal implements ICompletionProposal,
ICompletionProposalExtension6 {
private final Tree.CompilationUnit rootNode;
private final String description;
private Node node;
public RefineFormalMembersProposal(Node node,
Tree.CompilationUnit rootNode,
String description) {
this.node = node;
this.description = description;
this.rootNode = rootNode;
}
@Override
public Point getSelection(IDocument doc) {
return null;
}
@Override
public Image getImage() {
return FORMAL_REFINEMENT;
}
@Override
public String getDisplayString() {
return description;
}
@Override
public StyledString getStyledDisplayString() {
return Highlights.styleProposal(getDisplayString(), false);
}
@Override
public IContextInformation getContextInformation() {
return null;
}
@Override
public String getAdditionalProposalInfo() {
//TODO: list the members that will be refined!
return null;
}
@Override
public void apply(IDocument doc) {
try {
refineFormalMembers(doc);
}
catch (ExecutionException e) {
e.printStackTrace();
}
}
private void refineFormalMembers(IDocument document)
throws ExecutionException {
if (rootNode==null) return;
TextChange change =
new DocumentChange("Refine Members", document);
change.setEdit(new MultiTextEdit());
//TODO: copy/pasted from CeylonQuickFixAssistant
Tree.Body body;
int offset;
if (node instanceof Tree.ClassDefinition) {
body = ((Tree.ClassDefinition) node).getClassBody();
offset = -1;
}
else if (node instanceof Tree.InterfaceDefinition) {
body = ((Tree.InterfaceDefinition) node).getInterfaceBody();
offset = -1;
}
else if (node instanceof Tree.ObjectDefinition) {
body = ((Tree.ObjectDefinition) node).getClassBody();
offset = -1;
}
else if (node instanceof Tree.ClassBody ||
node instanceof Tree.InterfaceBody) {
body = (Tree.Body) node;
IEditorPart editor = EditorUtil.getCurrentEditor();
if (editor instanceof CeylonEditor) {
offset = ((CeylonEditor) editor).getSelection().getOffset();
}
else {
offset = -1;
}
}
else {
//TODO run a visitor to find the containing body!
return; //TODO popup error dialog
}
boolean isInterface = body instanceof Tree.InterfaceBody;
List<Statement> statements = body.getStatements();
String indent;
// String bodyIndent = getIndent(body, document);
String bodyIndent = getIndent(node, document);
String delim = getDefaultLineDelimiter(document);
if (statements.isEmpty()) {
indent = delim + bodyIndent + getDefaultIndent();
if (offset<0) offset = body.getStartIndex()+1;
}
else {
Statement statement = statements.get(statements.size()-1);
indent = delim + getIndent(statement, document);
if (offset<0) offset = statement.getStopIndex()+1;
}
StringBuilder result = new StringBuilder();
Set<Declaration> already = new HashSet<Declaration>();
ClassOrInterface ci = (ClassOrInterface) node.getScope();
Unit unit = node.getUnit();
Set<String> ambiguousNames = new HashSet<String>();
//TODO: does not return unrefined overloaded
// versions of a method with one overlaad
// already refined
Collection<DeclarationWithProximity> proposals =
getProposals(node, ci, rootNode).values();
for (DeclarationWithProximity dwp: proposals) {
Declaration dec = dwp.getDeclaration();
for (Declaration d: overloads(dec)) {
try {
if (d.isFormal() &&
ci.isInheritedFromSupertype(d)) {
appendRefinementText(isInterface, indent, result, ci, unit, d);
importSignatureTypes(d, rootNode, already);
ambiguousNames.add(d.getName());
}
}
catch (Exception e) {
e.printStackTrace();
}
}
}
for (TypeDeclaration superType: ci.getSupertypeDeclarations()) {
for (Declaration m: superType.getMembers()) {
try {
if (m.isShared()) {
Declaration r = ci.getMember(m.getName(), null, false);
if ((r==null ||
!r.refines(m) &&
!r.getContainer().equals(ci)) &&
ambiguousNames.add(m.getName())) {
appendRefinementText(isInterface, indent, result, ci, unit, m);
importSignatureTypes(m, rootNode, already);
}
}
}
catch (Exception e) {
e.printStackTrace();
}
}
}
try {
if (document.getChar(offset)=='}' && result.length()>0) {
result.append(delim).append(bodyIndent);
}
}
catch (BadLocationException e) {
e.printStackTrace();
}
applyImports(change, already, rootNode, document);
change.addEdit(new InsertEdit(offset, result.toString()));
change.initializeValidationData(null);
try {
getWorkspace().run(new PerformChangeOperation(change),
new NullProgressMonitor());
}
catch (CoreException ce) {
throw new ExecutionException("Error cleaning imports", ce);
}
}
private void appendRefinementText(boolean isInterface, String indent,
StringBuilder result, ClassOrInterface ci, Unit unit,
Declaration member) {
ProducedReference pr = getRefinedProducedReference(ci, member);
String rtext = getRefinementTextFor(member, pr, unit,
isInterface, ci, indent, true);
result.append(indent).append(rtext).append(indent);
}
static void addRefineFormalMembersProposal(Collection<ICompletionProposal> proposals,
Node node, Tree.CompilationUnit rootNode, boolean ambiguousError) {
for (ICompletionProposal p: proposals) {
if (p instanceof RefineFormalMembersProposal) {
return;
}
}
if (node instanceof Tree.ClassBody ||
node instanceof Tree.InterfaceBody ||
node instanceof Tree.ClassDefinition ||
node instanceof Tree.InterfaceDefinition ||
node instanceof Tree.ObjectDefinition) {
Scope scope = node.getScope();
if (scope instanceof ClassOrInterface) {
ClassOrInterface ci = (ClassOrInterface) scope;
String desc = ambiguousError ?
"Refine inherited ambiguous and formal members of '" + ci.getName() + "'":
"Refine inherited formal members of '" + ci.getName() + "'";
proposals.add(new RefineFormalMembersProposal(node, rootNode, desc));
}
}
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_RefineFormalMembersProposal.java |
3,661 | public static class Builder extends NumberFieldMapper.Builder<Builder, BoostFieldMapper> {
protected Float nullValue = Defaults.NULL_VALUE;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE));
builder = this;
}
public Builder nullValue(float nullValue) {
this.nullValue = nullValue;
return this;
}
@Override
public BoostFieldMapper build(BuilderContext context) {
return new BoostFieldMapper(name, buildIndexName(context),
precisionStep, boost, fieldType, docValues, nullValue, postingsProvider, docValuesProvider, fieldDataSettings, context.indexSettings());
}
} | 0true
| src_main_java_org_elasticsearch_index_mapper_internal_BoostFieldMapper.java |
1,080 | public class MulticastConfig {
public final static boolean DEFAULT_ENABLED = true;
public final static String DEFAULT_MULTICAST_GROUP = "224.2.2.3";
public final static int DEFAULT_MULTICAST_PORT = 54327;
public final static int DEFAULT_MULTICAST_TIMEOUT_SECONDS = 2;
public final static int DEFAULT_MULTICAST_TTL = 32;
private boolean enabled = DEFAULT_ENABLED;
private String multicastGroup = DEFAULT_MULTICAST_GROUP;
private int multicastPort = DEFAULT_MULTICAST_PORT;
private int multicastTimeoutSeconds = DEFAULT_MULTICAST_TIMEOUT_SECONDS;
private int multicastTimeToLive = DEFAULT_MULTICAST_TTL;
private final Set<String> trustedInterfaces = new HashSet<String>();
/**
* Check if the multicast discovery mechanism has been enabled.
*
* @return the enabled
*/
public boolean isEnabled() {
return enabled;
}
/**
* Enables or disables the multicast discovery mechanism
*
* @param enabled the enabled to set true when disabled, false when disabled.
* @return the updated MulticastConfig
*/
public MulticastConfig setEnabled(boolean enabled) {
this.enabled = enabled;
return this;
}
/**
* Gets the multicast group.
*
* @return the multicastGroup
*/
public String getMulticastGroup() {
return multicastGroup;
}
/**
* Sets the multicast-group.
*
* @param multicastGroup the multicastGroup to set
* @return the updated MulticastConfig
* @throws IllegalArgumentException if multicastGroup is null or empty.
* @see #getMulticastGroup()
* @see #setMulticastPort(int)
*/
public MulticastConfig setMulticastGroup(String multicastGroup) {
this.multicastGroup = hasText(multicastGroup, "multicastGroup");
return this;
}
/**
* Gets the multicast port.
*
* @return the multicastPort
* @see #setMulticastPort(int)
*/
public int getMulticastPort() {
return multicastPort;
}
/**
* Sets the multicast port.
*
* @param multicastPort the multicastPort to set
* @return the updated MulticastConfig
* @see #getMulticastPort()
* @see #setMulticastGroup(String)
* @throws IllegalArgumentException if multicastPort is smaller than 0.
*/
public MulticastConfig setMulticastPort(int multicastPort) {
if(multicastPort<0){
throw new IllegalArgumentException("multicastPort can't be smaller than 0");
}
this.multicastPort = multicastPort;
return this;
}
/**
* Gets the multicast timeout in seconds.
*
* @return the multicastTimeoutSeconds
* @see #setMulticastTimeoutSeconds(int)
*/
public int getMulticastTimeoutSeconds() {
return multicastTimeoutSeconds;
}
/**
* Specifies the time in seconds that a node should wait for a valid multicast response from another node running
* in the network before declaring itself as master node and creating its own cluster. This applies only to the startup
* of nodes where no master has been assigned yet. If you specify a high value, e.g. 60 seconds, it means until a master
* is selected, each node is going to wait 60 seconds before continuing, so be careful with providing a high value. If
* the value is set too low, it might be that nodes are giving up too early and will create their own cluster.
*
* @param multicastTimeoutSeconds the multicastTimeoutSeconds to set
* @returns the updated MulticastConfig
* @see #getMulticastTimeoutSeconds()
*/
public MulticastConfig setMulticastTimeoutSeconds(int multicastTimeoutSeconds) {
this.multicastTimeoutSeconds = multicastTimeoutSeconds;
return this;
}
/**
* Gets the trusted interfaces.
*
* @return the trusted interface.
* @see #setTrustedInterfaces(java.util.Set)
*/
public Set<String> getTrustedInterfaces() {
return trustedInterfaces;
}
/**
* Sets the trusted interfaces.
*
* By default, so when the set of trusted interfaces is empty, a Hazelcast member will accept join-requests
* from every member. With a trusted interface you can control the members you want to receive join request
* from.
*
* The interface is an ip address where the last octet can be a wildcard '*' or a range '10-20'.
*
* @param interfaces the new trusted interfaces.
* @return the updated MulticastConfig.
* @see IllegalArgumentException if interfaces is null.
*/
public MulticastConfig setTrustedInterfaces(Set<String> interfaces) {
isNotNull(interfaces,"interfaces");
trustedInterfaces.clear();
trustedInterfaces.addAll(interfaces);
return this;
}
/**
* Adds a trusted interface.
*
* @param ip the ip of the trusted interface.
* @return the updated MulticastConfig.
* @throws IllegalArgumentException if ip is null.
* @see #setTrustedInterfaces(java.util.Set)
*/
public MulticastConfig addTrustedInterface(final String ip) {
trustedInterfaces.add(isNotNull(ip,"ip"));
return this;
}
/**
* Gets the time to live of the multicast package.
*
* @return the time to live
* @see java.net.MulticastSocket#setTimeToLive(int)
* @see #setMulticastTimeToLive(int)
*/
public int getMulticastTimeToLive() {
return multicastTimeToLive;
}
/**
* Sets the time to live for the multicast package; a value between 0..255.
*
* See this <a href="http://www.tldp.org/HOWTO/Multicast-HOWTO-2.html">link</a> for more information.
*
* @param multicastTimeToLive the time to live.
* @return the updated MulticastConfig.
* @throws IllegalArgumentException if time to live is smaller than 0 or larger than 255.
* @see #getMulticastTimeToLive()
* @see java.net.MulticastSocket#setTimeToLive(int)
*/
public MulticastConfig setMulticastTimeToLive(final int multicastTimeToLive) {
if (multicastTimeToLive < 0 || multicastTimeToLive > 255) {
throw new IllegalArgumentException("multicastTimeToLive out of range");
}
this.multicastTimeToLive = multicastTimeToLive;
return this;
}
@Override
public String toString() {
return "MulticastConfig [enabled=" + enabled
+ ", multicastGroup=" + multicastGroup
+ ", multicastPort=" + multicastPort
+ ", multicastTimeToLive=" + multicastTimeToLive
+ ", multicastTimeoutSeconds=" + multicastTimeoutSeconds
+ ", trustedInterfaces=" + trustedInterfaces +
"]";
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_config_MulticastConfig.java |
2,355 | public class PortsRange {
private final String portRange;
public PortsRange(String portRange) {
this.portRange = portRange;
}
public int[] ports() throws NumberFormatException {
final IntArrayList ports = new IntArrayList();
iterate(new PortCallback() {
@Override
public boolean onPortNumber(int portNumber) {
ports.add(portNumber);
return false;
}
});
return ports.toArray();
}
public boolean iterate(PortCallback callback) throws NumberFormatException {
StringTokenizer st = new StringTokenizer(portRange, ",");
boolean success = false;
while (st.hasMoreTokens() && !success) {
String portToken = st.nextToken().trim();
int index = portToken.indexOf('-');
if (index == -1) {
int portNumber = Integer.parseInt(portToken.trim());
success = callback.onPortNumber(portNumber);
if (success) {
break;
}
} else {
int startPort = Integer.parseInt(portToken.substring(0, index).trim());
int endPort = Integer.parseInt(portToken.substring(index + 1).trim());
if (endPort < startPort) {
throw new IllegalArgumentException("Start port [" + startPort + "] must be greater than end port [" + endPort + "]");
}
for (int i = startPort; i <= endPort; i++) {
success = callback.onPortNumber(i);
if (success) {
break;
}
}
}
}
return success;
}
public static interface PortCallback {
boolean onPortNumber(int portNumber);
}
} | 0true
| src_main_java_org_elasticsearch_common_transport_PortsRange.java |
707 | public class TransportShardBulkAction extends TransportShardReplicationOperationAction<BulkShardRequest, BulkShardRequest, BulkShardResponse> {
private final MappingUpdatedAction mappingUpdatedAction;
private final UpdateHelper updateHelper;
private final boolean allowIdGeneration;
@Inject
public TransportShardBulkAction(Settings settings, TransportService transportService, ClusterService clusterService,
IndicesService indicesService, ThreadPool threadPool, ShardStateAction shardStateAction,
MappingUpdatedAction mappingUpdatedAction, UpdateHelper updateHelper) {
super(settings, transportService, clusterService, indicesService, threadPool, shardStateAction);
this.mappingUpdatedAction = mappingUpdatedAction;
this.updateHelper = updateHelper;
this.allowIdGeneration = settings.getAsBoolean("action.allow_id_generation", true);
}
@Override
protected String executor() {
return ThreadPool.Names.BULK;
}
@Override
protected boolean checkWriteConsistency() {
return true;
}
@Override
protected TransportRequestOptions transportOptions() {
return BulkAction.INSTANCE.transportOptions(settings);
}
@Override
protected BulkShardRequest newRequestInstance() {
return new BulkShardRequest();
}
@Override
protected BulkShardRequest newReplicaRequestInstance() {
return new BulkShardRequest();
}
@Override
protected BulkShardResponse newResponseInstance() {
return new BulkShardResponse();
}
@Override
protected String transportAction() {
return BulkAction.NAME + "/shard";
}
@Override
protected ClusterBlockException checkGlobalBlock(ClusterState state, BulkShardRequest request) {
return state.blocks().globalBlockedException(ClusterBlockLevel.WRITE);
}
@Override
protected ClusterBlockException checkRequestBlock(ClusterState state, BulkShardRequest request) {
return state.blocks().indexBlockedException(ClusterBlockLevel.WRITE, request.index());
}
@Override
protected ShardIterator shards(ClusterState clusterState, BulkShardRequest request) {
return clusterState.routingTable().index(request.index()).shard(request.shardId()).shardsIt();
}
@Override
protected PrimaryResponse<BulkShardResponse, BulkShardRequest> shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) {
final BulkShardRequest request = shardRequest.request;
IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.request.index()).shardSafe(shardRequest.shardId);
Engine.IndexingOperation[] ops = null;
Set<Tuple<String, String>> mappingsToUpdate = null;
BulkItemResponse[] responses = new BulkItemResponse[request.items().length];
long[] preVersions = new long[request.items().length];
for (int requestIndex = 0; requestIndex < request.items().length; requestIndex++) {
BulkItemRequest item = request.items()[requestIndex];
if (item.request() instanceof IndexRequest) {
IndexRequest indexRequest = (IndexRequest) item.request();
try {
WriteResult result = shardIndexOperation(request, indexRequest, clusterState, indexShard, true);
// add the response
IndexResponse indexResponse = result.response();
responses[requestIndex] = new BulkItemResponse(item.id(), indexRequest.opType().lowercase(), indexResponse);
preVersions[requestIndex] = result.preVersion;
if (result.mappingToUpdate != null) {
if (mappingsToUpdate == null) {
mappingsToUpdate = Sets.newHashSet();
}
mappingsToUpdate.add(result.mappingToUpdate);
}
if (result.op != null) {
if (ops == null) {
ops = new Engine.IndexingOperation[request.items().length];
}
ops[requestIndex] = result.op;
}
} catch (Throwable e) {
// rethrow the failure if we are going to retry on primary and let parent failure to handle it
if (retryPrimaryException(e)) {
// restore updated versions...
for (int j = 0; j < requestIndex; j++) {
applyVersion(request.items()[j], preVersions[j]);
}
throw (ElasticsearchException) e;
}
if (e instanceof ElasticsearchException && ((ElasticsearchException) e).status() == RestStatus.CONFLICT) {
logger.trace("[{}][{}] failed to execute bulk item (index) {}", e, shardRequest.request.index(), shardRequest.shardId, indexRequest);
} else {
logger.debug("[{}][{}] failed to execute bulk item (index) {}", e, shardRequest.request.index(), shardRequest.shardId, indexRequest);
}
responses[requestIndex] = new BulkItemResponse(item.id(), indexRequest.opType().lowercase(),
new BulkItemResponse.Failure(indexRequest.index(), indexRequest.type(), indexRequest.id(), e));
// nullify the request so it won't execute on the replicas
request.items()[requestIndex] = null;
}
} else if (item.request() instanceof DeleteRequest) {
DeleteRequest deleteRequest = (DeleteRequest) item.request();
try {
// add the response
DeleteResponse deleteResponse = shardDeleteOperation(deleteRequest, indexShard).response();
responses[requestIndex] = new BulkItemResponse(item.id(), "delete", deleteResponse);
} catch (Throwable e) {
// rethrow the failure if we are going to retry on primary and let parent failure to handle it
if (retryPrimaryException(e)) {
// restore updated versions...
for (int j = 0; j < requestIndex; j++) {
applyVersion(request.items()[j], preVersions[j]);
}
throw (ElasticsearchException) e;
}
if (e instanceof ElasticsearchException && ((ElasticsearchException) e).status() == RestStatus.CONFLICT) {
logger.trace("[{}][{}] failed to execute bulk item (delete) {}", e, shardRequest.request.index(), shardRequest.shardId, deleteRequest);
} else {
logger.debug("[{}][{}] failed to execute bulk item (delete) {}", e, shardRequest.request.index(), shardRequest.shardId, deleteRequest);
}
responses[requestIndex] = new BulkItemResponse(item.id(), "delete",
new BulkItemResponse.Failure(deleteRequest.index(), deleteRequest.type(), deleteRequest.id(), e));
// nullify the request so it won't execute on the replicas
request.items()[requestIndex] = null;
}
} else if (item.request() instanceof UpdateRequest) {
UpdateRequest updateRequest = (UpdateRequest) item.request();
// We need to do the requested retries plus the initial attempt. We don't do < 1+retry_on_conflict because retry_on_conflict may be Integer.MAX_VALUE
for (int updateAttemptsCount = 0; updateAttemptsCount <= updateRequest.retryOnConflict(); updateAttemptsCount++) {
UpdateResult updateResult;
try {
updateResult = shardUpdateOperation(clusterState, request, updateRequest, indexShard);
} catch (Throwable t) {
updateResult = new UpdateResult(null, null, false, t, null);
}
if (updateResult.success()) {
switch (updateResult.result.operation()) {
case UPSERT:
case INDEX:
WriteResult result = updateResult.writeResult;
IndexRequest indexRequest = updateResult.request();
BytesReference indexSourceAsBytes = indexRequest.source();
// add the response
IndexResponse indexResponse = result.response();
UpdateResponse updateResponse = new UpdateResponse(indexResponse.getIndex(), indexResponse.getType(), indexResponse.getId(), indexResponse.getVersion(), indexResponse.isCreated());
if (updateRequest.fields() != null && updateRequest.fields().length > 0) {
Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(indexSourceAsBytes, true);
updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes));
}
responses[requestIndex] = new BulkItemResponse(item.id(), "update", updateResponse);
preVersions[requestIndex] = result.preVersion;
if (result.mappingToUpdate != null) {
if (mappingsToUpdate == null) {
mappingsToUpdate = Sets.newHashSet();
}
mappingsToUpdate.add(result.mappingToUpdate);
}
if (result.op != null) {
if (ops == null) {
ops = new Engine.IndexingOperation[request.items().length];
}
ops[requestIndex] = result.op;
}
// Replace the update request to the translated index request to execute on the replica.
request.items()[requestIndex] = new BulkItemRequest(request.items()[requestIndex].id(), indexRequest);
break;
case DELETE:
DeleteResponse response = updateResult.writeResult.response();
DeleteRequest deleteRequest = updateResult.request();
updateResponse = new UpdateResponse(response.getIndex(), response.getType(), response.getId(), response.getVersion(), false);
updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, response.getVersion(), updateResult.result.updatedSourceAsMap(), updateResult.result.updateSourceContentType(), null));
responses[requestIndex] = new BulkItemResponse(item.id(), "update", updateResponse);
// Replace the update request to the translated delete request to execute on the replica.
request.items()[requestIndex] = new BulkItemRequest(request.items()[requestIndex].id(), deleteRequest);
break;
case NONE:
responses[requestIndex] = new BulkItemResponse(item.id(), "update", updateResult.noopResult);
request.items()[requestIndex] = null; // No need to go to the replica
break;
}
// NOTE: Breaking out of the retry_on_conflict loop!
break;
} else if (updateResult.failure()) {
Throwable t = updateResult.error;
if (updateResult.retry) {
// updateAttemptCount is 0 based and marks current attempt, if it's equal to retryOnConflict we are going out of the iteration
if (updateAttemptsCount >= updateRequest.retryOnConflict()) {
// we can't try any more
responses[requestIndex] = new BulkItemResponse(item.id(), "update",
new BulkItemResponse.Failure(updateRequest.index(), updateRequest.type(), updateRequest.id(), t));
;
request.items()[requestIndex] = null; // do not send to replicas
}
} else {
// rethrow the failure if we are going to retry on primary and let parent failure to handle it
if (retryPrimaryException(t)) {
// restore updated versions...
for (int j = 0; j < requestIndex; j++) {
applyVersion(request.items()[j], preVersions[j]);
}
throw (ElasticsearchException) t;
}
if (updateResult.result == null) {
responses[requestIndex] = new BulkItemResponse(item.id(), "update", new BulkItemResponse.Failure(updateRequest.index(), updateRequest.type(), updateRequest.id(), t));
} else {
switch (updateResult.result.operation()) {
case UPSERT:
case INDEX:
IndexRequest indexRequest = updateResult.request();
if (t instanceof ElasticsearchException && ((ElasticsearchException) t).status() == RestStatus.CONFLICT) {
logger.trace("[{}][{}] failed to execute bulk item (index) {}", t, shardRequest.request.index(), shardRequest.shardId, indexRequest);
} else {
logger.debug("[{}][{}] failed to execute bulk item (index) {}", t, shardRequest.request.index(), shardRequest.shardId, indexRequest);
}
responses[requestIndex] = new BulkItemResponse(item.id(), indexRequest.opType().lowercase(),
new BulkItemResponse.Failure(indexRequest.index(), indexRequest.type(), indexRequest.id(), t));
break;
case DELETE:
DeleteRequest deleteRequest = updateResult.request();
if (t instanceof ElasticsearchException && ((ElasticsearchException) t).status() == RestStatus.CONFLICT) {
logger.trace("[{}][{}] failed to execute bulk item (delete) {}", t, shardRequest.request.index(), shardRequest.shardId, deleteRequest);
} else {
logger.debug("[{}][{}] failed to execute bulk item (delete) {}", t, shardRequest.request.index(), shardRequest.shardId, deleteRequest);
}
responses[requestIndex] = new BulkItemResponse(item.id(), "delete",
new BulkItemResponse.Failure(deleteRequest.index(), deleteRequest.type(), deleteRequest.id(), t));
break;
}
}
// nullify the request so it won't execute on the replicas
request.items()[requestIndex] = null;
// NOTE: Breaking out of the retry_on_conflict loop!
break;
}
}
}
}
assert responses[requestIndex] != null; // we must have set a response somewhere.
}
if (mappingsToUpdate != null) {
for (Tuple<String, String> mappingToUpdate : mappingsToUpdate) {
updateMappingOnMaster(mappingToUpdate.v1(), mappingToUpdate.v2());
}
}
if (request.refresh()) {
try {
indexShard.refresh(new Engine.Refresh("refresh_flag_bulk").force(false));
} catch (Throwable e) {
// ignore
}
}
BulkShardResponse response = new BulkShardResponse(new ShardId(request.index(), request.shardId()), responses);
return new PrimaryResponse<BulkShardResponse, BulkShardRequest>(shardRequest.request, response, ops);
}
static class WriteResult {
final Object response;
final long preVersion;
final Tuple<String, String> mappingToUpdate;
final Engine.IndexingOperation op;
WriteResult(Object response, long preVersion, Tuple<String, String> mappingToUpdate, Engine.IndexingOperation op) {
this.response = response;
this.preVersion = preVersion;
this.mappingToUpdate = mappingToUpdate;
this.op = op;
}
@SuppressWarnings("unchecked")
<T> T response() {
return (T) response;
}
}
private WriteResult shardIndexOperation(BulkShardRequest request, IndexRequest indexRequest, ClusterState clusterState,
IndexShard indexShard, boolean processed) {
// validate, if routing is required, that we got routing
MappingMetaData mappingMd = clusterState.metaData().index(request.index()).mappingOrDefault(indexRequest.type());
if (mappingMd != null && mappingMd.routing().required()) {
if (indexRequest.routing() == null) {
throw new RoutingMissingException(indexRequest.index(), indexRequest.type(), indexRequest.id());
}
}
if (!processed) {
indexRequest.process(clusterState.metaData(), indexRequest.index(), mappingMd, allowIdGeneration);
}
SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.PRIMARY, indexRequest.source()).type(indexRequest.type()).id(indexRequest.id())
.routing(indexRequest.routing()).parent(indexRequest.parent()).timestamp(indexRequest.timestamp()).ttl(indexRequest.ttl());
long version;
boolean created;
Engine.IndexingOperation op;
if (indexRequest.opType() == IndexRequest.OpType.INDEX) {
Engine.Index index = indexShard.prepareIndex(sourceToParse).version(indexRequest.version()).versionType(indexRequest.versionType()).origin(Engine.Operation.Origin.PRIMARY);
indexShard.index(index);
version = index.version();
op = index;
created = index.created();
} else {
Engine.Create create = indexShard.prepareCreate(sourceToParse).version(indexRequest.version()).versionType(indexRequest.versionType()).origin(Engine.Operation.Origin.PRIMARY);
indexShard.create(create);
version = create.version();
op = create;
created = true;
}
long preVersion = indexRequest.version();
// update the version on request so it will happen on the replicas
indexRequest.version(version);
// update mapping on master if needed, we won't update changes to the same type, since once its changed, it won't have mappers added
Tuple<String, String> mappingsToUpdate = null;
if (op.parsedDoc().mappingsModified()) {
mappingsToUpdate = Tuple.tuple(indexRequest.index(), indexRequest.type());
}
IndexResponse indexResponse = new IndexResponse(indexRequest.index(), indexRequest.type(), indexRequest.id(), version, created);
return new WriteResult(indexResponse, preVersion, mappingsToUpdate, op);
}
private WriteResult shardDeleteOperation(DeleteRequest deleteRequest, IndexShard indexShard) {
Engine.Delete delete = indexShard.prepareDelete(deleteRequest.type(), deleteRequest.id(), deleteRequest.version()).versionType(deleteRequest.versionType()).origin(Engine.Operation.Origin.PRIMARY);
indexShard.delete(delete);
// update the request with the version so it will go to the replicas
deleteRequest.version(delete.version());
DeleteResponse deleteResponse = new DeleteResponse(deleteRequest.index(), deleteRequest.type(), deleteRequest.id(), delete.version(), delete.found());
return new WriteResult(deleteResponse, deleteRequest.version(), null, null);
}
static class UpdateResult {
final UpdateHelper.Result result;
final ActionRequest actionRequest;
final boolean retry;
final Throwable error;
final WriteResult writeResult;
final UpdateResponse noopResult;
UpdateResult(UpdateHelper.Result result, ActionRequest actionRequest, boolean retry, Throwable error, WriteResult writeResult) {
this.result = result;
this.actionRequest = actionRequest;
this.retry = retry;
this.error = error;
this.writeResult = writeResult;
this.noopResult = null;
}
UpdateResult(UpdateHelper.Result result, ActionRequest actionRequest, WriteResult writeResult) {
this.result = result;
this.actionRequest = actionRequest;
this.writeResult = writeResult;
this.retry = false;
this.error = null;
this.noopResult = null;
}
public UpdateResult(UpdateHelper.Result result, UpdateResponse updateResponse) {
this.result = result;
this.noopResult = updateResponse;
this.actionRequest = null;
this.writeResult = null;
this.retry = false;
this.error = null;
}
boolean failure() {
return error != null;
}
boolean success() {
return noopResult != null || writeResult != null;
}
@SuppressWarnings("unchecked")
<T extends ActionRequest> T request() {
return (T) actionRequest;
}
}
private UpdateResult shardUpdateOperation(ClusterState clusterState, BulkShardRequest bulkShardRequest, UpdateRequest updateRequest, IndexShard indexShard) {
UpdateHelper.Result translate = updateHelper.prepare(updateRequest, indexShard);
switch (translate.operation()) {
case UPSERT:
case INDEX:
IndexRequest indexRequest = translate.action();
try {
WriteResult result = shardIndexOperation(bulkShardRequest, indexRequest, clusterState, indexShard, false);
return new UpdateResult(translate, indexRequest, result);
} catch (Throwable t) {
t = ExceptionsHelper.unwrapCause(t);
boolean retry = false;
if (t instanceof VersionConflictEngineException || (t instanceof DocumentAlreadyExistsException && translate.operation() == UpdateHelper.Operation.UPSERT)) {
retry = true;
}
return new UpdateResult(translate, indexRequest, retry, t, null);
}
case DELETE:
DeleteRequest deleteRequest = translate.action();
try {
WriteResult result = shardDeleteOperation(deleteRequest, indexShard);
return new UpdateResult(translate, deleteRequest, result);
} catch (Throwable t) {
t = ExceptionsHelper.unwrapCause(t);
boolean retry = false;
if (t instanceof VersionConflictEngineException) {
retry = true;
}
return new UpdateResult(translate, deleteRequest, retry, t, null);
}
case NONE:
UpdateResponse updateResponse = translate.action();
return new UpdateResult(translate, updateResponse);
default:
throw new ElasticsearchIllegalStateException("Illegal update operation " + translate.operation());
}
}
protected void shardOperationOnReplica(ReplicaOperationRequest shardRequest) {
IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.request.index()).shardSafe(shardRequest.shardId);
final BulkShardRequest request = shardRequest.request;
for (int i = 0; i < request.items().length; i++) {
BulkItemRequest item = request.items()[i];
if (item == null) {
continue;
}
if (item.request() instanceof IndexRequest) {
IndexRequest indexRequest = (IndexRequest) item.request();
try {
SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.REPLICA, indexRequest.source()).type(indexRequest.type()).id(indexRequest.id())
.routing(indexRequest.routing()).parent(indexRequest.parent()).timestamp(indexRequest.timestamp()).ttl(indexRequest.ttl());
if (indexRequest.opType() == IndexRequest.OpType.INDEX) {
Engine.Index index = indexShard.prepareIndex(sourceToParse).version(indexRequest.version()).origin(Engine.Operation.Origin.REPLICA);
indexShard.index(index);
} else {
Engine.Create create = indexShard.prepareCreate(sourceToParse).version(indexRequest.version()).origin(Engine.Operation.Origin.REPLICA);
indexShard.create(create);
}
} catch (Throwable e) {
// ignore, we are on backup
}
} else if (item.request() instanceof DeleteRequest) {
DeleteRequest deleteRequest = (DeleteRequest) item.request();
try {
Engine.Delete delete = indexShard.prepareDelete(deleteRequest.type(), deleteRequest.id(), deleteRequest.version()).origin(Engine.Operation.Origin.REPLICA);
indexShard.delete(delete);
} catch (Throwable e) {
// ignore, we are on backup
}
}
}
if (request.refresh()) {
try {
indexShard.refresh(new Engine.Refresh("refresh_flag_bulk").force(false));
} catch (Throwable e) {
// ignore
}
}
}
private void updateMappingOnMaster(final String index, final String type) {
try {
MapperService mapperService = indicesService.indexServiceSafe(index).mapperService();
final DocumentMapper documentMapper = mapperService.documentMapper(type);
if (documentMapper == null) { // should not happen
return;
}
IndexMetaData metaData = clusterService.state().metaData().index(index);
if (metaData == null) {
return;
}
// we generate the order id before we get the mapping to send and refresh the source, so
// if 2 happen concurrently, we know that the later order will include the previous one
long orderId = mappingUpdatedAction.generateNextMappingUpdateOrder();
documentMapper.refreshSource();
DiscoveryNode node = clusterService.localNode();
final MappingUpdatedAction.MappingUpdatedRequest request = new MappingUpdatedAction.MappingUpdatedRequest(index, metaData.uuid(), type, documentMapper.mappingSource(), orderId, node != null ? node.id() : null);
mappingUpdatedAction.execute(request, new ActionListener<MappingUpdatedAction.MappingUpdatedResponse>() {
@Override
public void onResponse(MappingUpdatedAction.MappingUpdatedResponse mappingUpdatedResponse) {
// all is well
}
@Override
public void onFailure(Throwable e) {
logger.warn("failed to update master on updated mapping for {}", e, request);
}
});
} catch (Throwable e) {
logger.warn("failed to update master on updated mapping for index [{}], type [{}]", e, index, type);
}
}
private void applyVersion(BulkItemRequest item, long version) {
if (item.request() instanceof IndexRequest) {
((IndexRequest) item.request()).version(version);
} else if (item.request() instanceof DeleteRequest) {
((DeleteRequest) item.request()).version(version);
} else {
// log?
}
}
} | 1no label
| src_main_java_org_elasticsearch_action_bulk_TransportShardBulkAction.java |
1,076 | @Service("blFulfillmentOptionService")
@Transactional("blTransactionManager")
public class FulfillmentOptionServiceImpl implements FulfillmentOptionService {
@Resource(name = "blFulfillmentOptionDao")
FulfillmentOptionDao fulfillmentOptionDao;
@Override
public FulfillmentOption readFulfillmentOptionById(Long fulfillmentOptionId) {
return fulfillmentOptionDao.readFulfillmentOptionById(fulfillmentOptionId);
}
@Override
public FulfillmentOption save(FulfillmentOption option) {
return fulfillmentOptionDao.save(option);
}
@Override
public List<FulfillmentOption> readAllFulfillmentOptions() {
return fulfillmentOptionDao.readAllFulfillmentOptions();
}
@Override
public List<FulfillmentOption> readAllFulfillmentOptionsByFulfillmentType(FulfillmentType type) {
return fulfillmentOptionDao.readAllFulfillmentOptionsByFulfillmentType(type);
}
} | 1no label
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_service_FulfillmentOptionServiceImpl.java |
1,778 | private class SortableValue implements Comparable<SortableValue> {
private Integer pos;
private Serializable entity;
private Class<?> entityClass;
private String containingPropertyName;
private Object bean;
public SortableValue(Object bean, Serializable entity, Integer pos, String containingPropertyName) {
this.bean = bean;
this.entity = entity;
this.pos = pos;
this.entityClass = entity.getClass();
this.containingPropertyName = containingPropertyName;
}
public int compareTo(SortableValue o) {
return pos.compareTo(o.pos) * -1;
}
public String getContainingPropertyName() {
return containingPropertyName;
}
private Object getBean() {
return bean;
}
@Override
public int hashCode() {
int prime = 31;
int result = 1;
result = prime * result + getOuterType().hashCode();
result = prime * result + (entityClass == null ? 0 : entityClass.hashCode());
result = prime * result + (pos == null ? 0 : pos.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
SortableValue other = (SortableValue) obj;
if (!getOuterType().equals(other.getOuterType()))
return false;
if (entityClass == null) {
if (other.entityClass != null)
return false;
} else if (!entityClass.equals(other.entityClass))
return false;
if (pos == null) {
if (other.pos != null)
return false;
} else if (!pos.equals(other.pos))
return false;
return true;
}
private FieldManager getOuterType() {
return FieldManager.this;
}
} | 1no label
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_service_persistence_module_FieldManager.java |
29 | public class IncrementCommand extends AbstractTextCommand {
String key;
int value;
boolean noreply;
ByteBuffer response;
public IncrementCommand(TextCommandType type, String key, int value, boolean noReply) {
super(type);
this.key = key;
this.value = value;
this.noreply = noReply;
}
public boolean writeTo(ByteBuffer destination) {
while (destination.hasRemaining() && response.hasRemaining()) {
destination.put(response.get());
}
return !response.hasRemaining();
}
public boolean readFrom(ByteBuffer source) {
return true;
}
public boolean shouldReply() {
return !noreply;
}
public String getKey() {
return key;
}
public int getValue() {
return value;
}
public void setResponse(byte[] value) {
this.response = ByteBuffer.wrap(value);
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_ascii_memcache_IncrementCommand.java |
2,500 | END_OBJECT {
@Override
public boolean isValue() {
return false;
}
}, | 0true
| src_main_java_org_elasticsearch_common_xcontent_XContentParser.java |
1,624 | @ElasticsearchIntegrationTest.ClusterScope(scope = ElasticsearchIntegrationTest.Scope.TEST, numNodes = 1)
public class ClusterSettingsTests extends ElasticsearchIntegrationTest {
@Test
public void clusterNonExistingSettingsUpdate() {
String key1 = "no_idea_what_you_are_talking_about";
int value1 = 10;
ClusterUpdateSettingsResponse response = client().admin().cluster()
.prepareUpdateSettings()
.setTransientSettings(ImmutableSettings.builder().put(key1, value1).build())
.get();
assertThat(response.getTransientSettings().getAsMap().entrySet(), Matchers.emptyIterable());
}
@Test
public void clusterSettingsUpdateResponse() {
String key1 = "indices.cache.filter.size";
int value1 = 10;
String key2 = DisableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_DISABLE_ALLOCATION;
boolean value2 = true;
Settings transientSettings1 = ImmutableSettings.builder().put(key1, value1).build();
Settings persistentSettings1 = ImmutableSettings.builder().put(key2, value2).build();
ClusterUpdateSettingsResponse response1 = client().admin().cluster()
.prepareUpdateSettings()
.setTransientSettings(transientSettings1)
.setPersistentSettings(persistentSettings1)
.execute()
.actionGet();
assertThat(response1.getTransientSettings().get(key1), notNullValue());
assertThat(response1.getTransientSettings().get(key2), nullValue());
assertThat(response1.getPersistentSettings().get(key1), nullValue());
assertThat(response1.getPersistentSettings().get(key2), notNullValue());
Settings transientSettings2 = ImmutableSettings.builder().put(key1, value1).put(key2, value2).build();
Settings persistentSettings2 = ImmutableSettings.EMPTY;
ClusterUpdateSettingsResponse response2 = client().admin().cluster()
.prepareUpdateSettings()
.setTransientSettings(transientSettings2)
.setPersistentSettings(persistentSettings2)
.execute()
.actionGet();
assertThat(response2.getTransientSettings().get(key1), notNullValue());
assertThat(response2.getTransientSettings().get(key2), notNullValue());
assertThat(response2.getPersistentSettings().get(key1), nullValue());
assertThat(response2.getPersistentSettings().get(key2), nullValue());
Settings transientSettings3 = ImmutableSettings.EMPTY;
Settings persistentSettings3 = ImmutableSettings.builder().put(key1, value1).put(key2, value2).build();
ClusterUpdateSettingsResponse response3 = client().admin().cluster()
.prepareUpdateSettings()
.setTransientSettings(transientSettings3)
.setPersistentSettings(persistentSettings3)
.execute()
.actionGet();
assertThat(response3.getTransientSettings().get(key1), nullValue());
assertThat(response3.getTransientSettings().get(key2), nullValue());
assertThat(response3.getPersistentSettings().get(key1), notNullValue());
assertThat(response3.getPersistentSettings().get(key2), notNullValue());
}
} | 0true
| src_test_java_org_elasticsearch_cluster_settings_ClusterSettingsTests.java |
222 | public class Orient extends OListenerManger<OOrientListener> {
public static final String ORIENTDB_HOME = "ORIENTDB_HOME";
public static final String URL_SYNTAX = "<engine>:<db-type>:<db-name>[?<db-param>=<db-value>[&]]*";
protected static final Orient instance = new Orient();
protected static boolean registerDatabaseByPath = false;
protected final Map<String, OEngine> engines = new HashMap<String, OEngine>();
protected final Map<String, OStorage> storages = new HashMap<String, OStorage>();
protected final Set<ODatabaseLifecycleListener> dbLifecycleListeners = new HashSet<ODatabaseLifecycleListener>();
protected final ODatabaseFactory databaseFactory = new ODatabaseFactory();
protected final OScriptManager scriptManager = new OScriptManager();
protected OClusterFactory clusterFactory = new ODefaultClusterFactory();
protected ORecordFactoryManager recordFactoryManager = new ORecordFactoryManager();
protected OrientShutdownHook shutdownHook;
protected final Timer timer = new Timer(true);
protected final ThreadGroup threadGroup = new ThreadGroup("OrientDB");
protected final AtomicInteger serialId = new AtomicInteger();
protected OMemoryWatchDog memoryWatchDog;
protected OProfilerMBean profiler = new OProfiler(); ;
protected ODatabaseThreadLocalFactory databaseThreadFactory;
protected volatile boolean active = false;
protected Orient() {
super(new OAdaptiveLock(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean()));
startup();
}
public Orient startup() {
getLock().lock();
try {
if (active)
// ALREADY ACTIVE
return this;
shutdownHook = new OrientShutdownHook();
// REGISTER THE EMBEDDED ENGINE
registerEngine(new OEngineLocal());
registerEngine(new OEngineLocalPaginated());
registerEngine(new OEngineMemory());
registerEngine("com.orientechnologies.orient.client.remote.OEngineRemote");
if (OGlobalConfiguration.PROFILER_ENABLED.getValueAsBoolean())
// ACTIVATE RECORDING OF THE PROFILER
profiler.startRecording();
if (OGlobalConfiguration.ENVIRONMENT_DUMP_CFG_AT_STARTUP.getValueAsBoolean())
OGlobalConfiguration.dumpConfiguration(System.out);
memoryWatchDog = new OMemoryWatchDog();
active = true;
return this;
} finally {
getLock().unlock();
}
}
public Orient shutdown() {
getLock().lock();
try {
if (!active)
return this;
active = false;
if (memoryWatchDog != null) {
// SHUTDOWN IT AND WAIT FOR COMPLETITION
memoryWatchDog.interrupt();
try {
memoryWatchDog.join();
} catch (InterruptedException e) {
}
}
if (shutdownHook != null) {
shutdownHook.cancel();
shutdownHook = null;
}
OLogManager.instance().debug(this, "Orient Engine is shutting down...");
// CALL THE SHUTDOWN ON ALL THE LISTENERS
for (OOrientListener l : browseListeners()) {
if (l != null)
l.onShutdown();
}
// SHUTDOWN ENGINES
for (OEngine engine : engines.values())
engine.shutdown();
engines.clear();
if (databaseFactory != null)
// CLOSE ALL DATABASES
databaseFactory.shutdown();
if (storages != null) {
// CLOSE ALL THE STORAGES
final List<OStorage> storagesCopy = new ArrayList<OStorage>(storages.values());
for (OStorage stg : storagesCopy) {
OLogManager.instance().info(this, "Shutting down storage: " + stg.getName() + "...");
stg.close(true);
}
}
if (OMMapManagerLocator.getInstance() != null)
OMMapManagerLocator.getInstance().shutdown();
if (threadGroup != null)
// STOP ALL THE PENDING THREADS
threadGroup.interrupt();
resetListeners();
timer.purge();
profiler.shutdown();
OLogManager.instance().info(this, "Orient Engine shutdown complete\n");
} finally {
getLock().unlock();
}
return this;
}
public OStorage loadStorage(String iURL) {
if (iURL == null || iURL.length() == 0)
throw new IllegalArgumentException("URL missed");
if (iURL.endsWith("/"))
iURL = iURL.substring(0, iURL.length() - 1);
// SEARCH FOR ENGINE
int pos = iURL.indexOf(':');
if (pos <= 0)
throw new OConfigurationException("Error in database URL: the engine was not specified. Syntax is: " + URL_SYNTAX
+ ". URL was: " + iURL);
final String engineName = iURL.substring(0, pos);
getLock().lock();
try {
final OEngine engine = engines.get(engineName.toLowerCase());
if (engine == null)
throw new OConfigurationException("Error on opening database: the engine '" + engineName + "' was not found. URL was: "
+ iURL + ". Registered engines are: " + engines.keySet());
// SEARCH FOR DB-NAME
iURL = iURL.substring(pos + 1);
pos = iURL.indexOf('?');
Map<String, String> parameters = null;
String dbPath = null;
if (pos > 0) {
dbPath = iURL.substring(0, pos);
iURL = iURL.substring(pos + 1);
// PARSE PARAMETERS
parameters = new HashMap<String, String>();
String[] pairs = iURL.split("&");
String[] kv;
for (String pair : pairs) {
kv = pair.split("=");
if (kv.length < 2)
throw new OConfigurationException("Error on opening database: parameter has no value. Syntax is: " + URL_SYNTAX
+ ". URL was: " + iURL);
parameters.put(kv[0], kv[1]);
}
} else
dbPath = iURL;
final String dbName = registerDatabaseByPath ? dbPath : OIOUtils.getRelativePathIfAny(dbPath, null);
OStorage storage;
if (engine.isShared()) {
// SEARCH IF ALREADY USED
storage = storages.get(dbName);
if (storage == null) {
// NOT FOUND: CREATE IT
storage = engine.createStorage(dbPath, parameters);
storages.put(dbName, storage);
}
} else {
// REGISTER IT WITH A SERIAL NAME TO AVOID BEING REUSED
storage = engine.createStorage(dbPath, parameters);
storages.put(dbName + "__" + serialId.incrementAndGet(), storage);
}
for (OOrientListener l : browseListeners())
l.onStorageRegistered(storage);
return storage;
} finally {
getLock().unlock();
}
}
public OStorage registerStorage(final OStorage iStorage) throws IOException {
getLock().lock();
try {
for (OOrientListener l : browseListeners())
l.onStorageRegistered(iStorage);
if (!storages.containsKey(iStorage.getName()))
storages.put(iStorage.getName(), iStorage);
} finally {
getLock().unlock();
}
return iStorage;
}
public OStorage getStorage(final String iDbName) {
getLock().lock();
try {
return storages.get(iDbName);
} finally {
getLock().unlock();
}
}
public void registerEngine(final OEngine iEngine) {
getLock().lock();
try {
engines.put(iEngine.getName(), iEngine);
} finally {
getLock().unlock();
}
}
private void registerEngine(final String iClassName) {
try {
final Class<?> cls = Class.forName(iClassName);
registerEngine((OEngine) cls.newInstance());
} catch (Exception e) {
}
}
/**
* Returns the engine by its name.
*
* @param iEngineName
* Engine name to retrieve
* @return OEngine instance of found, otherwise null
*/
public OEngine getEngine(final String iEngineName) {
getLock().lock();
try {
return engines.get(iEngineName);
} finally {
getLock().unlock();
}
}
public Set<String> getEngines() {
getLock().lock();
try {
return Collections.unmodifiableSet(engines.keySet());
} finally {
getLock().unlock();
}
}
public void unregisterStorageByName(final String iName) {
final String dbName = registerDatabaseByPath ? iName : OIOUtils.getRelativePathIfAny(iName, null);
final OStorage stg = storages.get(dbName);
unregisterStorage(stg);
}
public void unregisterStorage(final OStorage iStorage) {
if (!active)
// SHUTDOWNING OR NOT ACTIVE: RETURN
return;
if (iStorage == null)
return;
getLock().lock();
try {
// UNREGISTER ALL THE LISTENER ONE BY ONE AVOIDING SELF-RECURSION BY REMOVING FROM THE LIST
final Iterable<OOrientListener> listenerCopy = getListenersCopy();
for (Iterator<OOrientListener> it = listenerCopy.iterator(); it.hasNext();) {
final OOrientListener l = it.next();
unregisterListener(l);
l.onStorageUnregistered(iStorage);
}
for (Entry<String, OStorage> s : storages.entrySet()) {
if (s.getValue().equals(iStorage)) {
storages.remove(s.getKey());
break;
}
}
} finally {
getLock().unlock();
}
}
public Collection<OStorage> getStorages() {
getLock().lock();
try {
return new ArrayList<OStorage>(storages.values());
} finally {
getLock().unlock();
}
}
public Timer getTimer() {
return timer;
}
public void removeShutdownHook() {
if (shutdownHook != null)
Runtime.getRuntime().removeShutdownHook(shutdownHook);
}
public Iterator<ODatabaseLifecycleListener> getDbLifecycleListeners() {
return dbLifecycleListeners.iterator();
}
public void addDbLifecycleListener(final ODatabaseLifecycleListener iListener) {
dbLifecycleListeners.add(iListener);
}
public void removeDbLifecycleListener(final ODatabaseLifecycleListener iListener) {
dbLifecycleListeners.remove(iListener);
}
public static Orient instance() {
return instance;
}
public ThreadGroup getThreadGroup() {
return threadGroup;
}
public ODatabaseThreadLocalFactory getDatabaseThreadFactory() {
return databaseThreadFactory;
}
public OMemoryWatchDog getMemoryWatchDog() {
return memoryWatchDog;
}
public ORecordFactoryManager getRecordFactoryManager() {
return recordFactoryManager;
}
public OClusterFactory getClusterFactory() {
return clusterFactory;
}
public ODatabaseFactory getDatabaseFactory() {
return databaseFactory;
}
public void setRecordFactoryManager(final ORecordFactoryManager iRecordFactoryManager) {
recordFactoryManager = iRecordFactoryManager;
}
public static String getHomePath() {
String v = System.getProperty("orient.home");
if (v == null)
v = System.getProperty(ORIENTDB_HOME);
if (v == null)
v = System.getenv(ORIENTDB_HOME);
return v;
}
public void setClusterFactory(final OClusterFactory clusterFactory) {
this.clusterFactory = clusterFactory;
}
public OProfilerMBean getProfiler() {
return profiler;
}
public void registerThreadDatabaseFactory(final ODatabaseThreadLocalFactory iDatabaseFactory) {
databaseThreadFactory = iDatabaseFactory;
}
public OScriptManager getScriptManager() {
return scriptManager;
}
/**
* Tells if to register database by path. Default is false. Setting to true allows to have multiple databases in different path
* with the same name.
*
* @see #setRegisterDatabaseByPath(boolean)
* @return
*/
public static boolean isRegisterDatabaseByPath() {
return registerDatabaseByPath;
}
/**
* Register database by path. Default is false. Setting to true allows to have multiple databases in different path with the same
* name.
*
* @param iValue
*/
public static void setRegisterDatabaseByPath(final boolean iValue) {
registerDatabaseByPath = iValue;
}
public void setProfiler(final OProfilerMBean iProfiler) {
profiler = iProfiler;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_Orient.java |
669 | constructors[COLLECTION_TXN_ADD_BACKUP] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
public IdentifiedDataSerializable createNew(Integer arg) {
return new CollectionTxnAddBackupOperation();
}
}; | 0true
| hazelcast_src_main_java_com_hazelcast_collection_CollectionDataSerializerHook.java |
1,242 | public interface InternalIndicesAdminClient extends IndicesAdminClient, InternalGenericClient {
} | 0true
| src_main_java_org_elasticsearch_client_internal_InternalIndicesAdminClient.java |
686 | clusterService.submitStateUpdateTask("put_warmer [" + request.name() + "]", new AckedClusterStateUpdateTask() {
@Override
public boolean mustAck(DiscoveryNode discoveryNode) {
return true;
}
@Override
public void onAllNodesAcked(@Nullable Throwable t) {
listener.onResponse(new PutWarmerResponse(true));
}
@Override
public void onAckTimeout() {
listener.onResponse(new PutWarmerResponse(false));
}
@Override
public TimeValue ackTimeout() {
return request.timeout();
}
@Override
public TimeValue timeout() {
return request.masterNodeTimeout();
}
@Override
public void onFailure(String source, Throwable t) {
logger.debug("failed to put warmer [{}] on indices [{}]", t, request.name(), request.searchRequest().indices());
listener.onFailure(t);
}
@Override
public ClusterState execute(ClusterState currentState) {
MetaData metaData = currentState.metaData();
String[] concreteIndices = metaData.concreteIndices(request.searchRequest().indices(), request.searchRequest().indicesOptions());
BytesReference source = null;
if (request.searchRequest().source() != null && request.searchRequest().source().length() > 0) {
source = request.searchRequest().source();
} else if (request.searchRequest().extraSource() != null && request.searchRequest().extraSource().length() > 0) {
source = request.searchRequest().extraSource();
}
// now replace it on the metadata
MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData());
for (String index : concreteIndices) {
IndexMetaData indexMetaData = metaData.index(index);
if (indexMetaData == null) {
throw new IndexMissingException(new Index(index));
}
IndexWarmersMetaData warmers = indexMetaData.custom(IndexWarmersMetaData.TYPE);
if (warmers == null) {
logger.info("[{}] putting warmer [{}]", index, request.name());
warmers = new IndexWarmersMetaData(new IndexWarmersMetaData.Entry(request.name(), request.searchRequest().types(), source));
} else {
boolean found = false;
List<IndexWarmersMetaData.Entry> entries = new ArrayList<IndexWarmersMetaData.Entry>(warmers.entries().size() + 1);
for (IndexWarmersMetaData.Entry entry : warmers.entries()) {
if (entry.name().equals(request.name())) {
found = true;
entries.add(new IndexWarmersMetaData.Entry(request.name(), request.searchRequest().types(), source));
} else {
entries.add(entry);
}
}
if (!found) {
logger.info("[{}] put warmer [{}]", index, request.name());
entries.add(new IndexWarmersMetaData.Entry(request.name(), request.searchRequest().types(), source));
} else {
logger.info("[{}] update warmer [{}]", index, request.name());
}
warmers = new IndexWarmersMetaData(entries.toArray(new IndexWarmersMetaData.Entry[entries.size()]));
}
IndexMetaData.Builder indexBuilder = IndexMetaData.builder(indexMetaData).putCustom(IndexWarmersMetaData.TYPE, warmers);
mdBuilder.put(indexBuilder);
}
return ClusterState.builder(currentState).metaData(mdBuilder).build();
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
}
}); | 0true
| src_main_java_org_elasticsearch_action_admin_indices_warmer_put_TransportPutWarmerAction.java |
1,135 | public class OSQLMethodAsDecimal extends OAbstractSQLMethod {
public static final String NAME = "asdecimal";
public OSQLMethodAsDecimal() {
super(NAME);
}
@Override
public Object execute(OIdentifiable iCurrentRecord, OCommandContext iContext, Object ioResult, Object[] iMethodParams) {
ioResult = ioResult != null ? new BigDecimal(ioResult.toString().trim()) : null;
return ioResult;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_sql_method_misc_OSQLMethodAsDecimal.java |
111 | public interface LogProcessorBuilder {
/**
* Returns the identifier of the transaction log to be processed by this processor.
*
* @return
*/
public String getLogIdentifier();
/**
* Sets the identifier of this processor. This String should uniquely identify a log processing instance and will be used to record
* up to which position in the log the log processor has advanced. In case of instance failure or instance restart,
* the log processor can then pick up where it left of.
* <p/>
* This is an optional argument if recording the processing state is desired.
*
* @param name
* @return
*/
public LogProcessorBuilder setProcessorIdentifier(String name);
/**
* Sets the time at which this log processor should start processing transaction log entries
*
* @param sinceEpoch
* @param unit
* @return
*/
public LogProcessorBuilder setStartTime(long sinceEpoch, TimeUnit unit);
/**
* Indicates that the transaction log processor should process newly added events.
*
* @return
*/
public LogProcessorBuilder setStartTimeNow();
/**
* Adds a {@link ChangeProcessor} to this transaction log processor. These are executed independently.
* @param processor
* @return
*/
public LogProcessorBuilder addProcessor(ChangeProcessor processor);
/**
* Sets how often this log processor should attempt to retry executing a contained {@link ChangeProcessor} in case of failure.
* @param attempts
* @return
*/
public LogProcessorBuilder setRetryAttempts(int attempts);
/**
* Builds this transaction log processor and starts processing the log.
*/
public void build();
} | 0true
| titan-core_src_main_java_com_thinkaurelius_titan_core_log_LogProcessorBuilder.java |
222 | public class CeylonRangeIndicator extends Annotation implements IAnnotationPresentation {
CeylonRangeIndicator() {
setText("<b>Current declaration</b>");
}
@Override
public int getLayer() {
return DEFAULT_LAYER;
}
@Override
public void paint(GC gc, Canvas canvas, Rectangle bounds) {
Point canvasSize = canvas.getSize();
int x = 0;
int y = bounds.y;
int w = canvasSize.x;
int h = bounds.height;
if (y + h > canvasSize.y) {
h = canvasSize.y - y;
}
if (y < 0) {
h = h + y;
y = 0;
}
if (h <= 0) {
return;
}
Color color = getCurrentThemeColor("rangeIndicatorAnnotation");
gc.setBackground(color);
Image patternImage = getPatternImage(canvas, color);
gc.drawImage(patternImage, 0, 0, w, h, x, y, w, h);
patternImage.dispose();
// gc.setAlpha(85);
// gc.fillRectangle(x, y, w, h);
gc.setAlpha(255);
gc.fillRectangle(x, bounds.y, w, 1);
gc.fillRectangle(x, bounds.y + bounds.height - 1, w, 1);
}
public static Image getPatternImage(Control control, Color color) {
Point size = control.getSize();
Display display = control.getDisplay();
Color bgColor = Display.getCurrent().getSystemColor(SWT.COLOR_WIDGET_BACKGROUND);
RGB rgbs[] = new RGB[] {
new RGB(color.getRed(), color.getGreen(), color.getBlue()),
new RGB(bgColor.getRed(), bgColor.getGreen(), bgColor.getBlue()) };
ImageData imageData = new ImageData(size.x, size.y, 1, new PaletteData(rgbs));
for (int y = 0; y < size.y; y++) {
for (int x = 0; x < size.x; x++) {
imageData.setPixel(x, y, (x + y) % 2);
}
}
return new Image(display, imageData);
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_CeylonRangeIndicator.java |
690 | public class CollectionItem implements Comparable<CollectionItem>, IdentifiedDataSerializable {
protected long itemId;
protected Data value;
protected final long creationTime;
public CollectionItem() {
creationTime = Clock.currentTimeMillis();
}
public CollectionItem(long itemId, Data value) {
this();
this.itemId = itemId;
this.value = value;
}
public long getItemId() {
return itemId;
}
public Data getValue() {
return value;
}
public void setValue(Data value) {
this.value = value;
}
public long getCreationTime() {
return creationTime;
}
@Override
public int compareTo(CollectionItem o) {
long otherItemId = o.getItemId();
if (itemId > otherItemId) {
return 1;
} else if (itemId < otherItemId) {
return -1;
}
return 0;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof CollectionItem)) {
return false;
}
CollectionItem item = (CollectionItem) o;
if (value != null ? !value.equals(item.value) : item.value != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
return value != null ? value.hashCode() : 0;
}
@Override
public int getFactoryId() {
return CollectionDataSerializerHook.F_ID;
}
@Override
public int getId() {
return CollectionDataSerializerHook.COLLECTION_ITEM;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeLong(itemId);
out.writeObject(value);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
itemId = in.readLong();
value = in.readObject();
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_collection_CollectionItem.java |
727 | public class OSBTree<K, V> extends ODurableComponent implements OTreeInternal<K, V> {
private static final int MAX_KEY_SIZE = OGlobalConfiguration.SBTREE_MAX_KEY_SIZE
.getValueAsInteger();
private static final int MAX_EMBEDDED_VALUE_SIZE = OGlobalConfiguration.SBTREE_MAX_EMBEDDED_VALUE_SIZE
.getValueAsInteger();
private static final OAlwaysLessKey ALWAYS_LESS_KEY = new OAlwaysLessKey();
private static final OAlwaysGreaterKey ALWAYS_GREATER_KEY = new OAlwaysGreaterKey();
private final static long ROOT_INDEX = 0;
private final Comparator<? super K> comparator = ODefaultComparator.INSTANCE;
private OStorageLocalAbstract storage;
private String name;
private final String dataFileExtension;
private ODiskCache diskCache;
private long fileId;
private int keySize;
private OBinarySerializer<K> keySerializer;
private OType[] keyTypes;
private OBinarySerializer<V> valueSerializer;
private final boolean durableInNonTxMode;
private static final ODurablePage.TrackMode txTrackMode = ODurablePage.TrackMode
.valueOf(OGlobalConfiguration.INDEX_TX_MODE
.getValueAsString().toUpperCase());
public OSBTree(String dataFileExtension, int keySize, boolean durableInNonTxMode) {
super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean());
this.dataFileExtension = dataFileExtension;
this.keySize = keySize;
this.durableInNonTxMode = durableInNonTxMode;
}
public void create(String name, OBinarySerializer<K> keySerializer, OBinarySerializer<V> valueSerializer, OType[] keyTypes,
OStorageLocalAbstract storageLocal) {
acquireExclusiveLock();
try {
this.storage = storageLocal;
this.keyTypes = keyTypes;
this.diskCache = storage.getDiskCache();
this.name = name;
this.keySerializer = keySerializer;
this.valueSerializer = valueSerializer;
fileId = diskCache.openFile(name + dataFileExtension);
initDurableComponent(storageLocal);
OCacheEntry rootCacheEntry = diskCache.load(fileId, ROOT_INDEX, false);
OCachePointer rootPointer = rootCacheEntry.getCachePointer();
rootPointer.acquireExclusiveLock();
try {
super.startDurableOperation(null);
OSBTreeBucket<K, V> rootBucket = new OSBTreeBucket<K, V>(rootPointer.getDataPointer(), true, keySerializer, keyTypes,
valueSerializer, getTrackMode());
rootBucket.setKeySerializerId(keySerializer.getId());
rootBucket.setValueSerializerId(valueSerializer.getId());
rootBucket.setTreeSize(0);
super.logPageChanges(rootBucket, fileId, ROOT_INDEX, true);
rootCacheEntry.markDirty();
} finally {
rootPointer.releaseExclusiveLock();
diskCache.release(rootCacheEntry);
}
super.endDurableOperation(null, false);
} catch (IOException e) {
try {
super.endDurableOperation(null, true);
} catch (IOException e1) {
OLogManager.instance().error(this, "Error during sbtree data rollback", e1);
}
throw new OSBTreeException("Error creation of sbtree with name" + name, e);
} finally {
releaseExclusiveLock();
}
}
private void initDurableComponent(OStorageLocalAbstract storageLocal) {
OWriteAheadLog writeAheadLog = storageLocal.getWALInstance();
init(writeAheadLog);
}
public String getName() {
acquireSharedLock();
try {
return name;
} finally {
releaseSharedLock();
}
}
public V get(K key) {
if (key == null)
return null;
acquireSharedLock();
try {
key = keySerializer.preprocess(key, (Object[]) keyTypes);
BucketSearchResult bucketSearchResult = findBucket(key, PartialSearchMode.NONE);
if (bucketSearchResult.itemIndex < 0)
return null;
long pageIndex = bucketSearchResult.getLastPathItem();
OCacheEntry keyBucketCacheEntry = diskCache.load(fileId, pageIndex, false);
OCachePointer keyBucketPointer = keyBucketCacheEntry.getCachePointer();
try {
OSBTreeBucket<K, V> keyBucket = new OSBTreeBucket<K, V>(keyBucketPointer.getDataPointer(), keySerializer, keyTypes,
valueSerializer, ODurablePage.TrackMode.NONE);
OSBTreeBucket.SBTreeEntry<K, V> treeEntry = keyBucket.getEntry(bucketSearchResult.itemIndex);
return readValue(treeEntry.value);
} finally {
diskCache.release(keyBucketCacheEntry);
}
} catch (IOException e) {
throw new OSBTreeException("Error during retrieving of sbtree with name " + name, e);
} finally {
releaseSharedLock();
}
}
public void put(K key, V value) {
acquireExclusiveLock();
final OStorageTransaction transaction = storage.getStorageTransaction();
try {
final int keySize = keySerializer.getObjectSize(key, (Object[]) keyTypes);
final int valueSize = valueSerializer.getObjectSize(value);
if (keySize > MAX_KEY_SIZE)
throw new OSBTreeException("Key size is more than allowed, operation was canceled. Current key size " + keySize
+ ", allowed " + MAX_KEY_SIZE);
final boolean createLinkToTheValue = valueSize > MAX_EMBEDDED_VALUE_SIZE;
key = keySerializer.preprocess(key, (Object[]) keyTypes);
startDurableOperation(transaction);
long valueLink = -1;
if (createLinkToTheValue)
valueLink = createLinkToTheValue(value);
final OSBTreeValue<V> treeValue = new OSBTreeValue<V>(createLinkToTheValue, valueLink, createLinkToTheValue ? null : value);
BucketSearchResult bucketSearchResult = findBucket(key, PartialSearchMode.NONE);
OCacheEntry keyBucketCacheEntry = diskCache.load(fileId, bucketSearchResult.getLastPathItem(), false);
OCachePointer keyBucketPointer = keyBucketCacheEntry.getCachePointer();
keyBucketPointer.acquireExclusiveLock();
OSBTreeBucket<K, V> keyBucket = new OSBTreeBucket<K, V>(keyBucketPointer.getDataPointer(), keySerializer, keyTypes,
valueSerializer, getTrackMode());
int insertionIndex;
int sizeDiff;
if (bucketSearchResult.itemIndex >= 0) {
int updateResult = keyBucket.updateValue(bucketSearchResult.itemIndex, treeValue);
if (updateResult == 1) {
logPageChanges(keyBucket, fileId, keyBucketCacheEntry.getPageIndex(), false);
keyBucketCacheEntry.markDirty();
}
if (updateResult >= 0) {
keyBucketPointer.releaseExclusiveLock();
diskCache.release(keyBucketCacheEntry);
endDurableOperation(transaction, false);
return;
} else {
assert updateResult == -1;
long removedLinkedValue = keyBucket.remove(bucketSearchResult.itemIndex);
if (removedLinkedValue >= 0)
removeLinkedValue(removedLinkedValue);
insertionIndex = bucketSearchResult.itemIndex;
sizeDiff = 0;
}
} else {
insertionIndex = -bucketSearchResult.itemIndex - 1;
sizeDiff = 1;
}
while (!keyBucket.addEntry(insertionIndex, new OSBTreeBucket.SBTreeEntry<K, V>(-1, -1, key, treeValue), true)) {
logPageChanges(keyBucket, fileId, keyBucketCacheEntry.getPageIndex(), false);
keyBucketPointer.releaseExclusiveLock();
diskCache.release(keyBucketCacheEntry);
bucketSearchResult = splitBucket(bucketSearchResult.path, insertionIndex, key);
insertionIndex = bucketSearchResult.itemIndex;
keyBucketCacheEntry = diskCache.load(fileId, bucketSearchResult.getLastPathItem(), false);
keyBucketPointer = keyBucketCacheEntry.getCachePointer();
keyBucketPointer.acquireExclusiveLock();
keyBucket = new OSBTreeBucket<K, V>(keyBucketPointer.getDataPointer(), keySerializer, keyTypes, valueSerializer,
getTrackMode());
}
logPageChanges(keyBucket, fileId, bucketSearchResult.getLastPathItem(), false);
keyBucketCacheEntry.markDirty();
keyBucketPointer.releaseExclusiveLock();
diskCache.release(keyBucketCacheEntry);
if (sizeDiff != 0)
setSize(size() + sizeDiff);
endDurableOperation(transaction, false);
} catch (IOException e) {
rollback(transaction);
throw new OSBTreeException("Error during index update with key " + key + " and value " + value, e);
} finally {
releaseExclusiveLock();
}
}
private void removeLinkedValue(long removedLink) throws IOException {
long nextPage = removedLink;
do {
removedLink = nextPage;
OCacheEntry valueEntry = diskCache.load(fileId, removedLink, false);
OCachePointer valuePointer = valueEntry.getCachePointer();
try {
OSBTreeValuePage valuePage = new OSBTreeValuePage(valuePointer.getDataPointer(), getTrackMode(), false);
nextPage = valuePage.getNextPage();
} finally {
diskCache.release(valueEntry);
}
removeValuePage(removedLink);
} while (nextPage >= 0);
}
private void removeValuePage(long pageIndex) throws IOException {
long prevFreeListItem;
OCacheEntry rootCacheEntry = diskCache.load(fileId, ROOT_INDEX, false);
OCachePointer rootCachePointer = rootCacheEntry.getCachePointer();
rootCachePointer.acquireExclusiveLock();
OSBTreeBucket<K, V> rootBucket = new OSBTreeBucket<K, V>(rootCachePointer.getDataPointer(), keySerializer, keyTypes,
valueSerializer, getTrackMode());
try {
prevFreeListItem = rootBucket.getValuesFreeListFirstIndex();
rootBucket.setValuesFreeListFirstIndex(pageIndex);
rootCacheEntry.markDirty();
logPageChanges(rootBucket, fileId, ROOT_INDEX, false);
} finally {
rootCachePointer.releaseExclusiveLock();
diskCache.release(rootCacheEntry);
}
OCacheEntry valueEntry = diskCache.load(fileId, pageIndex, false);
OCachePointer valuePointer = valueEntry.getCachePointer();
valuePointer.acquireExclusiveLock();
try {
OSBTreeValuePage valuePage = new OSBTreeValuePage(valuePointer.getDataPointer(), getTrackMode(), false);
valuePage.setNextFreeListPage(prevFreeListItem);
valueEntry.markDirty();
logPageChanges(valuePage, fileId, pageIndex, false);
} finally {
valuePointer.releaseExclusiveLock();
diskCache.release(valueEntry);
}
}
private long createLinkToTheValue(V value) throws IOException {
byte[] serializeValue = new byte[valueSerializer.getObjectSize(value)];
valueSerializer.serializeNative(value, serializeValue, 0);
final int amountOfPages = OSBTreeValuePage.calculateAmountOfPage(serializeValue.length);
int position = 0;
long freeListPageIndex = allocateValuePageFromFreeList();
OCacheEntry cacheEntry;
if (freeListPageIndex < 0)
cacheEntry = diskCache.allocateNewPage(fileId);
else
cacheEntry = diskCache.load(fileId, freeListPageIndex, false);
final long valueLink = cacheEntry.getPageIndex();
OCachePointer cachePointer = cacheEntry.getCachePointer();
cachePointer.acquireExclusiveLock();
try {
OSBTreeValuePage valuePage = new OSBTreeValuePage(cachePointer.getDataPointer(), getTrackMode(), freeListPageIndex >= 0);
position = valuePage.fillBinaryContent(serializeValue, position);
valuePage.setNextFreeListPage(-1);
valuePage.setNextPage(-1);
cacheEntry.markDirty();
if (freeListPageIndex < 0)
logPageChanges(valuePage, fileId, cacheEntry.getPageIndex(), true);
else
logPageChanges(valuePage, fileId, cacheEntry.getPageIndex(), false);
} finally {
cachePointer.releaseExclusiveLock();
diskCache.release(cacheEntry);
}
long prevPage = valueLink;
for (int i = 1; i < amountOfPages; i++) {
freeListPageIndex = allocateValuePageFromFreeList();
if (freeListPageIndex < 0)
cacheEntry = diskCache.allocateNewPage(fileId);
else
cacheEntry = diskCache.load(fileId, freeListPageIndex, false);
cachePointer = cacheEntry.getCachePointer();
cachePointer.acquireExclusiveLock();
try {
OSBTreeValuePage valuePage = new OSBTreeValuePage(cachePointer.getDataPointer(), getTrackMode(), freeListPageIndex >= 0);
position = valuePage.fillBinaryContent(serializeValue, position);
valuePage.setNextFreeListPage(-1);
valuePage.setNextPage(-1);
cacheEntry.markDirty();
if (freeListPageIndex < 0)
logPageChanges(valuePage, fileId, cacheEntry.getPageIndex(), true);
else
logPageChanges(valuePage, fileId, cacheEntry.getPageIndex(), false);
} finally {
cachePointer.releaseExclusiveLock();
diskCache.release(cacheEntry);
}
OCacheEntry prevPageCacheEntry = diskCache.load(fileId, prevPage, false);
OCachePointer prevPageCachePointer = prevPageCacheEntry.getCachePointer();
prevPageCachePointer.acquireExclusiveLock();
try {
OSBTreeValuePage valuePage = new OSBTreeValuePage(prevPageCachePointer.getDataPointer(), getTrackMode(),
freeListPageIndex >= 0);
valuePage.setNextPage(cacheEntry.getPageIndex());
prevPageCacheEntry.markDirty();
logPageChanges(valuePage, fileId, prevPage, false);
} finally {
prevPageCachePointer.releaseExclusiveLock();
diskCache.release(prevPageCacheEntry);
}
prevPage = cacheEntry.getPageIndex();
}
return valueLink;
}
private long allocateValuePageFromFreeList() throws IOException {
OCacheEntry rootCacheEntry = diskCache.load(fileId, ROOT_INDEX, false);
OCachePointer rootCachePointer = rootCacheEntry.getCachePointer();
OSBTreeBucket<K, V> rootBucket = new OSBTreeBucket<K, V>(rootCachePointer.getDataPointer(), keySerializer, keyTypes,
valueSerializer, ODurablePage.TrackMode.NONE);
long freeListFirstIndex;
try {
freeListFirstIndex = rootBucket.getValuesFreeListFirstIndex();
} finally {
diskCache.release(rootCacheEntry);
}
if (freeListFirstIndex >= 0) {
OCacheEntry freePageEntry = diskCache.load(fileId, freeListFirstIndex, false);
OCachePointer freePageCachePointer = freePageEntry.getCachePointer();
OSBTreeValuePage valuePage = new OSBTreeValuePage(freePageCachePointer.getDataPointer(), getTrackMode(), false);
freePageCachePointer.acquireExclusiveLock();
try {
long nextFreeListIndex = valuePage.getNextFreeListPage();
rootCacheEntry = diskCache.load(fileId, ROOT_INDEX, false);
rootCachePointer = rootCacheEntry.getCachePointer();
rootCachePointer.acquireExclusiveLock();
rootBucket = new OSBTreeBucket<K, V>(rootCachePointer.getDataPointer(), keySerializer, keyTypes, valueSerializer,
getTrackMode());
try {
rootBucket.setValuesFreeListFirstIndex(nextFreeListIndex);
rootCacheEntry.markDirty();
logPageChanges(rootBucket, fileId, ROOT_INDEX, false);
} finally {
rootCachePointer.releaseExclusiveLock();
diskCache.release(rootCacheEntry);
}
valuePage.setNextFreeListPage(-1);
freePageEntry.markDirty();
logPageChanges(valuePage, fileId, freePageEntry.getPageIndex(), false);
} finally {
freePageCachePointer.releaseExclusiveLock();
diskCache.release(freePageEntry);
}
return freePageEntry.getPageIndex();
}
return -1;
}
private void rollback(OStorageTransaction transaction) {
try {
endDurableOperation(transaction, true);
} catch (IOException e1) {
OLogManager.instance().error(this, "Error during sbtree operation rollback", e1);
}
}
public void close(boolean flush) {
acquireExclusiveLock();
try {
diskCache.closeFile(fileId, flush);
} catch (IOException e) {
throw new OSBTreeException("Error during close of index " + name, e);
} finally {
releaseExclusiveLock();
}
}
public void close() {
close(true);
}
public void clear() {
acquireExclusiveLock();
OStorageTransaction transaction = storage.getStorageTransaction();
try {
startDurableOperation(transaction);
diskCache.truncateFile(fileId);
OCacheEntry cacheEntry = diskCache.load(fileId, ROOT_INDEX, false);
OCachePointer rootPointer = cacheEntry.getCachePointer();
rootPointer.acquireExclusiveLock();
try {
OSBTreeBucket<K, V> rootBucket = new OSBTreeBucket<K, V>(rootPointer.getDataPointer(), true, keySerializer, keyTypes,
valueSerializer, getTrackMode());
rootBucket.setKeySerializerId(keySerializer.getId());
rootBucket.setValueSerializerId(valueSerializer.getId());
rootBucket.setTreeSize(0);
logPageChanges(rootBucket, fileId, ROOT_INDEX, true);
cacheEntry.markDirty();
} finally {
rootPointer.releaseExclusiveLock();
diskCache.release(cacheEntry);
}
endDurableOperation(transaction, false);
} catch (IOException e) {
rollback(transaction);
throw new OSBTreeException("Error during clear of sbtree with name " + name, e);
} finally {
releaseExclusiveLock();
}
}
public void delete() {
acquireExclusiveLock();
try {
diskCache.deleteFile(fileId);
} catch (IOException e) {
throw new OSBTreeException("Error during delete of sbtree with name " + name, e);
} finally {
releaseExclusiveLock();
}
}
public void deleteWithoutLoad(String name, OStorageLocalAbstract storageLocal) {
acquireExclusiveLock();
try {
final ODiskCache diskCache = storageLocal.getDiskCache();
final long fileId = diskCache.openFile(name + dataFileExtension);
diskCache.deleteFile(fileId);
} catch (IOException ioe) {
throw new OSBTreeException("Exception during deletion of sbtree " + name, ioe);
} finally {
releaseExclusiveLock();
}
}
public void load(String name, OType[] keyTypes, OStorageLocalAbstract storageLocal) {
acquireExclusiveLock();
try {
this.storage = storageLocal;
this.keyTypes = keyTypes;
diskCache = storage.getDiskCache();
this.name = name;
fileId = diskCache.openFile(name + dataFileExtension);
OCacheEntry rootCacheEntry = diskCache.load(fileId, ROOT_INDEX, false);
OCachePointer rootPointer = rootCacheEntry.getCachePointer();
try {
OSBTreeBucket<K, V> rootBucket = new OSBTreeBucket<K, V>(rootPointer.getDataPointer(), keySerializer, keyTypes,
valueSerializer, ODurablePage.TrackMode.NONE);
keySerializer = (OBinarySerializer<K>) OBinarySerializerFactory.INSTANCE.getObjectSerializer(rootBucket
.getKeySerializerId());
valueSerializer = (OBinarySerializer<V>) OBinarySerializerFactory.INSTANCE.getObjectSerializer(rootBucket
.getValueSerializerId());
} finally {
diskCache.release(rootCacheEntry);
}
initDurableComponent(storageLocal);
} catch (IOException e) {
throw new OSBTreeException("Exception during loading of sbtree " + name, e);
} finally {
releaseExclusiveLock();
}
}
private void setSize(long size) throws IOException {
OCacheEntry rootCacheEntry = diskCache.load(fileId, ROOT_INDEX, false);
OCachePointer rootPointer = rootCacheEntry.getCachePointer();
rootPointer.acquireExclusiveLock();
try {
OSBTreeBucket<K, V> rootBucket = new OSBTreeBucket<K, V>(rootPointer.getDataPointer(), keySerializer, keyTypes,
valueSerializer, getTrackMode());
rootBucket.setTreeSize(size);
logPageChanges(rootBucket, fileId, ROOT_INDEX, false);
rootCacheEntry.markDirty();
} finally {
rootPointer.releaseExclusiveLock();
diskCache.release(rootCacheEntry);
}
}
@Override
public long size() {
acquireSharedLock();
try {
OCacheEntry rootCacheEntry = diskCache.load(fileId, ROOT_INDEX, false);
OCachePointer rootPointer = rootCacheEntry.getCachePointer();
try {
OSBTreeBucket<K, V> rootBucket = new OSBTreeBucket<K, V>(rootPointer.getDataPointer(), keySerializer, keyTypes,
valueSerializer, ODurablePage.TrackMode.NONE);
return rootBucket.getTreeSize();
} finally {
diskCache.release(rootCacheEntry);
}
} catch (IOException e) {
throw new OSBTreeException("Error during retrieving of size of index " + name);
} finally {
releaseSharedLock();
}
}
@Override
public V remove(K key) {
acquireExclusiveLock();
OStorageTransaction transaction = storage.getStorageTransaction();
try {
key = keySerializer.preprocess(key, (Object[]) keyTypes);
BucketSearchResult bucketSearchResult = findBucket(key, PartialSearchMode.NONE);
if (bucketSearchResult.itemIndex < 0)
return null;
OCacheEntry keyBucketCacheEntry = diskCache.load(fileId, bucketSearchResult.getLastPathItem(), false);
OCachePointer keyBucketPointer = keyBucketCacheEntry.getCachePointer();
keyBucketPointer.acquireExclusiveLock();
try {
startDurableOperation(transaction);
OSBTreeBucket<K, V> keyBucket = new OSBTreeBucket<K, V>(keyBucketPointer.getDataPointer(), keySerializer, keyTypes,
valueSerializer, getTrackMode());
final OSBTreeValue<V> removed = keyBucket.getEntry(bucketSearchResult.itemIndex).value;
final V value = readValue(removed);
long removedValueLink = keyBucket.remove(bucketSearchResult.itemIndex);
if (removedValueLink >= 0)
removeLinkedValue(removedValueLink);
logPageChanges(keyBucket, fileId, keyBucketCacheEntry.getPageIndex(), false);
keyBucketCacheEntry.markDirty();
setSize(size() - 1);
endDurableOperation(transaction, false);
return value;
} finally {
keyBucketPointer.releaseExclusiveLock();
diskCache.release(keyBucketCacheEntry);
}
} catch (IOException e) {
rollback(transaction);
throw new OSBTreeException("Error during removing key " + key + " from sbtree " + name, e);
} finally {
releaseExclusiveLock();
}
}
@Override
protected void endDurableOperation(OStorageTransaction transaction, boolean rollback) throws IOException {
if (transaction == null && !durableInNonTxMode)
return;
super.endDurableOperation(transaction, rollback);
}
@Override
protected void startDurableOperation(OStorageTransaction transaction) throws IOException {
if (transaction == null && !durableInNonTxMode)
return;
super.startDurableOperation(transaction);
}
@Override
protected void logPageChanges(ODurablePage localPage, long fileId, long pageIndex, boolean isNewPage) throws IOException {
final OStorageTransaction transaction = storage.getStorageTransaction();
if (transaction == null && !durableInNonTxMode)
return;
super.logPageChanges(localPage, fileId, pageIndex, isNewPage);
}
@Override
protected ODurablePage.TrackMode getTrackMode() {
final OStorageTransaction transaction = storage.getStorageTransaction();
if (transaction == null && !durableInNonTxMode)
return ODurablePage.TrackMode.NONE;
final ODurablePage.TrackMode trackMode = super.getTrackMode();
if (!trackMode.equals(ODurablePage.TrackMode.NONE))
return txTrackMode;
return trackMode;
}
public Collection<V> getValuesMinor(K key, boolean inclusive, final int maxValuesToFetch) {
final List<V> result = new ArrayList<V>();
loadEntriesMinor(key, inclusive, new RangeResultListener<K, V>() {
@Override
public boolean addResult(Map.Entry<K, V> entry) {
result.add(entry.getValue());
if (maxValuesToFetch > -1 && result.size() >= maxValuesToFetch)
return false;
return true;
}
});
return result;
}
public void loadEntriesMinor(K key, boolean inclusive, RangeResultListener<K, V> listener) {
acquireSharedLock();
try {
key = keySerializer.preprocess(key, (Object[]) keyTypes);
final PartialSearchMode partialSearchMode;
if (inclusive)
partialSearchMode = PartialSearchMode.HIGHEST_BOUNDARY;
else
partialSearchMode = PartialSearchMode.LOWEST_BOUNDARY;
BucketSearchResult bucketSearchResult = findBucket(key, partialSearchMode);
long pageIndex = bucketSearchResult.getLastPathItem();
int index;
if (bucketSearchResult.itemIndex >= 0) {
index = inclusive ? bucketSearchResult.itemIndex : bucketSearchResult.itemIndex - 1;
} else {
index = -bucketSearchResult.itemIndex - 2;
}
boolean firstBucket = true;
resultsLoop: while (true) {
long nextPageIndex = -1;
OCacheEntry cacheEntry = diskCache.load(fileId, pageIndex, false);
final OCachePointer pointer = cacheEntry.getCachePointer();
try {
OSBTreeBucket<K, V> bucket = new OSBTreeBucket<K, V>(pointer.getDataPointer(), keySerializer, keyTypes, valueSerializer,
ODurablePage.TrackMode.NONE);
if (!firstBucket)
index = bucket.size() - 1;
for (int i = index; i >= 0; i--) {
if (!listener.addResult(convertToMapEntry(bucket.getEntry(i))))
break resultsLoop;
}
if (bucket.getLeftSibling() >= 0)
nextPageIndex = bucket.getLeftSibling();
else
break;
} finally {
diskCache.release(cacheEntry);
}
pageIndex = nextPageIndex;
firstBucket = false;
}
} catch (IOException ioe) {
throw new OSBTreeException("Error during fetch of minor values for key " + key + " in sbtree " + name);
} finally {
releaseSharedLock();
}
}
public Collection<V> getValuesMajor(K key, boolean inclusive, final int maxValuesToFetch) {
final List<V> result = new ArrayList<V>();
loadEntriesMajor(key, inclusive, new RangeResultListener<K, V>() {
@Override
public boolean addResult(Map.Entry<K, V> entry) {
result.add(entry.getValue());
if (maxValuesToFetch > -1 && result.size() >= maxValuesToFetch)
return false;
return true;
}
});
return result;
}
public void loadEntriesMajor(K key, boolean inclusive, RangeResultListener<K, V> listener) {
acquireSharedLock();
try {
key = keySerializer.preprocess(key, (Object[]) keyTypes);
final PartialSearchMode partialSearchMode;
if (inclusive)
partialSearchMode = PartialSearchMode.LOWEST_BOUNDARY;
else
partialSearchMode = PartialSearchMode.HIGHEST_BOUNDARY;
BucketSearchResult bucketSearchResult = findBucket(key, partialSearchMode);
long pageIndex = bucketSearchResult.getLastPathItem();
int index;
if (bucketSearchResult.itemIndex >= 0) {
index = inclusive ? bucketSearchResult.itemIndex : bucketSearchResult.itemIndex + 1;
} else {
index = -bucketSearchResult.itemIndex - 1;
}
resultsLoop: while (true) {
long nextPageIndex = -1;
final OCacheEntry cacheEntry = diskCache.load(fileId, pageIndex, false);
final OCachePointer pointer = cacheEntry.getCachePointer();
try {
OSBTreeBucket<K, V> bucket = new OSBTreeBucket<K, V>(pointer.getDataPointer(), keySerializer, keyTypes, valueSerializer,
ODurablePage.TrackMode.NONE);
int bucketSize = bucket.size();
for (int i = index; i < bucketSize; i++) {
if (!listener.addResult(convertToMapEntry(bucket.getEntry(i))))
break resultsLoop;
}
if (bucket.getRightSibling() >= 0)
nextPageIndex = bucket.getRightSibling();
else
break;
} finally {
diskCache.release(cacheEntry);
}
pageIndex = nextPageIndex;
index = 0;
}
} catch (IOException ioe) {
throw new OSBTreeException("Error during fetch of major values for key " + key + " in sbtree " + name);
} finally {
releaseSharedLock();
}
}
public Collection<V> getValuesBetween(K keyFrom, boolean fromInclusive, K keyTo, boolean toInclusive, final int maxValuesToFetch) {
final List<V> result = new ArrayList<V>();
loadEntriesBetween(keyFrom, fromInclusive, keyTo, toInclusive, new RangeResultListener<K, V>() {
@Override
public boolean addResult(Map.Entry<K, V> entry) {
result.add(entry.getValue());
if (maxValuesToFetch > 0 && result.size() >= maxValuesToFetch)
return false;
return true;
}
});
return result;
}
@Override
public K firstKey() {
acquireSharedLock();
try {
LinkedList<PagePathItemUnit> path = new LinkedList<PagePathItemUnit>();
long bucketIndex = ROOT_INDEX;
OCacheEntry cacheEntry = diskCache.load(fileId, bucketIndex, false);
OCachePointer cachePointer = cacheEntry.getCachePointer();
int itemIndex = 0;
OSBTreeBucket<K, V> bucket = new OSBTreeBucket<K, V>(cachePointer.getDataPointer(), keySerializer, keyTypes, valueSerializer,
ODurablePage.TrackMode.NONE);
try {
while (true) {
if (!bucket.isLeaf()) {
if (bucket.isEmpty() || itemIndex > bucket.size()) {
if (!path.isEmpty()) {
PagePathItemUnit pagePathItemUnit = path.removeLast();
bucketIndex = pagePathItemUnit.pageIndex;
itemIndex = pagePathItemUnit.itemIndex + 1;
} else
return null;
} else {
path.add(new PagePathItemUnit(bucketIndex, itemIndex));
if (itemIndex < bucket.size()) {
OSBTreeBucket.SBTreeEntry<K, V> entry = bucket.getEntry(itemIndex);
bucketIndex = entry.leftChild;
} else {
OSBTreeBucket.SBTreeEntry<K, V> entry = bucket.getEntry(itemIndex - 1);
bucketIndex = entry.rightChild;
}
itemIndex = 0;
}
} else {
if (bucket.isEmpty()) {
if (!path.isEmpty()) {
PagePathItemUnit pagePathItemUnit = path.removeLast();
bucketIndex = pagePathItemUnit.pageIndex;
itemIndex = pagePathItemUnit.itemIndex + 1;
} else
return null;
} else {
return bucket.getKey(0);
}
}
diskCache.release(cacheEntry);
cacheEntry = diskCache.load(fileId, bucketIndex, false);
cachePointer = cacheEntry.getCachePointer();
bucket = new OSBTreeBucket<K, V>(cachePointer.getDataPointer(), keySerializer, keyTypes, valueSerializer,
ODurablePage.TrackMode.NONE);
}
} finally {
diskCache.release(cacheEntry);
}
} catch (IOException e) {
throw new OSBTreeException("Error during finding first key in sbtree [" + name + "]");
} finally {
releaseSharedLock();
}
}
public K lastKey() {
acquireSharedLock();
try {
LinkedList<PagePathItemUnit> path = new LinkedList<PagePathItemUnit>();
long bucketIndex = ROOT_INDEX;
OCacheEntry cacheEntry = diskCache.load(fileId, bucketIndex, false);
OCachePointer cachePointer = cacheEntry.getCachePointer();
OSBTreeBucket<K, V> bucket = new OSBTreeBucket<K, V>(cachePointer.getDataPointer(), keySerializer, keyTypes, valueSerializer,
ODurablePage.TrackMode.NONE);
int itemIndex = bucket.size() - 1;
try {
while (true) {
if (!bucket.isLeaf()) {
if (itemIndex < -1) {
if (!path.isEmpty()) {
PagePathItemUnit pagePathItemUnit = path.removeLast();
bucketIndex = pagePathItemUnit.pageIndex;
itemIndex = pagePathItemUnit.itemIndex - 1;
} else
return null;
} else {
path.add(new PagePathItemUnit(bucketIndex, itemIndex));
if (itemIndex > -1) {
OSBTreeBucket.SBTreeEntry<K, V> entry = bucket.getEntry(itemIndex);
bucketIndex = entry.rightChild;
} else {
OSBTreeBucket.SBTreeEntry<K, V> entry = bucket.getEntry(0);
bucketIndex = entry.leftChild;
}
itemIndex = OSBTreeBucket.MAX_PAGE_SIZE_BYTES + 1;
}
} else {
if (bucket.isEmpty()) {
if (!path.isEmpty()) {
PagePathItemUnit pagePathItemUnit = path.removeLast();
bucketIndex = pagePathItemUnit.pageIndex;
itemIndex = pagePathItemUnit.itemIndex - 1;
} else
return null;
} else {
return bucket.getKey(bucket.size() - 1);
}
}
diskCache.release(cacheEntry);
cacheEntry = diskCache.load(fileId, bucketIndex, false);
cachePointer = cacheEntry.getCachePointer();
bucket = new OSBTreeBucket<K, V>(cachePointer.getDataPointer(), keySerializer, keyTypes, valueSerializer,
ODurablePage.TrackMode.NONE);
if (itemIndex == OSBTreeBucket.MAX_PAGE_SIZE_BYTES + 1)
itemIndex = bucket.size() - 1;
}
} finally {
diskCache.release(cacheEntry);
}
} catch (IOException e) {
throw new OSBTreeException("Error during finding first key in sbtree [" + name + "]");
} finally {
releaseSharedLock();
}
}
public void loadEntriesBetween(K keyFrom, boolean fromInclusive, K keyTo, boolean toInclusive,
OTreeInternal.RangeResultListener<K, V> listener) {
acquireSharedLock();
try {
keyFrom = keySerializer.preprocess(keyFrom, (Object[]) keyTypes);
keyTo = keySerializer.preprocess(keyTo, (Object[]) keyTypes);
PartialSearchMode partialSearchModeFrom;
if (fromInclusive)
partialSearchModeFrom = PartialSearchMode.LOWEST_BOUNDARY;
else
partialSearchModeFrom = PartialSearchMode.HIGHEST_BOUNDARY;
BucketSearchResult bucketSearchResultFrom = findBucket(keyFrom, partialSearchModeFrom);
long pageIndexFrom = bucketSearchResultFrom.getLastPathItem();
int indexFrom;
if (bucketSearchResultFrom.itemIndex >= 0) {
indexFrom = fromInclusive ? bucketSearchResultFrom.itemIndex : bucketSearchResultFrom.itemIndex + 1;
} else {
indexFrom = -bucketSearchResultFrom.itemIndex - 1;
}
PartialSearchMode partialSearchModeTo;
if (toInclusive)
partialSearchModeTo = PartialSearchMode.HIGHEST_BOUNDARY;
else
partialSearchModeTo = PartialSearchMode.LOWEST_BOUNDARY;
BucketSearchResult bucketSearchResultTo = findBucket(keyTo, partialSearchModeTo);
long pageIndexTo = bucketSearchResultTo.getLastPathItem();
int indexTo;
if (bucketSearchResultTo.itemIndex >= 0) {
indexTo = toInclusive ? bucketSearchResultTo.itemIndex : bucketSearchResultTo.itemIndex - 1;
} else {
indexTo = -bucketSearchResultTo.itemIndex - 2;
}
int startIndex = indexFrom;
int endIndex;
long pageIndex = pageIndexFrom;
resultsLoop: while (true) {
long nextPageIndex = -1;
final OCacheEntry cacheEntry = diskCache.load(fileId, pageIndex, false);
final OCachePointer pointer = cacheEntry.getCachePointer();
try {
OSBTreeBucket<K, V> bucket = new OSBTreeBucket<K, V>(pointer.getDataPointer(), keySerializer, keyTypes, valueSerializer,
ODurablePage.TrackMode.NONE);
if (pageIndex != pageIndexTo)
endIndex = bucket.size() - 1;
else
endIndex = indexTo;
for (int i = startIndex; i <= endIndex; i++) {
if (!listener.addResult(convertToMapEntry(bucket.getEntry(i))))
break resultsLoop;
}
if (pageIndex == pageIndexTo)
break;
if (bucket.getRightSibling() >= 0)
nextPageIndex = bucket.getRightSibling();
else
break;
} finally {
diskCache.release(cacheEntry);
}
pageIndex = nextPageIndex;
startIndex = 0;
}
} catch (IOException ioe) {
throw new OSBTreeException("Error during fetch of values between key " + keyFrom + " and key " + keyTo + " in sbtree " + name);
} finally {
releaseSharedLock();
}
}
public void flush() {
acquireSharedLock();
try {
try {
diskCache.flushBuffer();
} catch (IOException e) {
throw new OSBTreeException("Error during flush of sbtree [" + name + "] data");
}
} finally {
releaseSharedLock();
}
}
private BucketSearchResult splitBucket(List<Long> path, int keyIndex, K keyToInsert) throws IOException {
long pageIndex = path.get(path.size() - 1);
OCacheEntry bucketEntry = diskCache.load(fileId, pageIndex, false);
OCachePointer bucketPointer = bucketEntry.getCachePointer();
bucketPointer.acquireExclusiveLock();
try {
OSBTreeBucket<K, V> bucketToSplit = new OSBTreeBucket<K, V>(bucketPointer.getDataPointer(), keySerializer, keyTypes,
valueSerializer, getTrackMode());
final boolean splitLeaf = bucketToSplit.isLeaf();
final int bucketSize = bucketToSplit.size();
int indexToSplit = bucketSize >>> 1;
final K separationKey = bucketToSplit.getKey(indexToSplit);
final List<OSBTreeBucket.SBTreeEntry<K, V>> rightEntries = new ArrayList<OSBTreeBucket.SBTreeEntry<K, V>>(indexToSplit);
final int startRightIndex = splitLeaf ? indexToSplit : indexToSplit + 1;
for (int i = startRightIndex; i < bucketSize; i++)
rightEntries.add(bucketToSplit.getEntry(i));
if (pageIndex != ROOT_INDEX) {
OCacheEntry rightBucketEntry = diskCache.allocateNewPage(fileId);
OCachePointer rightBucketPointer = rightBucketEntry.getCachePointer();
rightBucketPointer.acquireExclusiveLock();
try {
OSBTreeBucket<K, V> newRightBucket = new OSBTreeBucket<K, V>(rightBucketPointer.getDataPointer(), splitLeaf,
keySerializer, keyTypes, valueSerializer, getTrackMode());
newRightBucket.addAll(rightEntries);
bucketToSplit.shrink(indexToSplit);
if (splitLeaf) {
long rightSiblingPageIndex = bucketToSplit.getRightSibling();
newRightBucket.setRightSibling(rightSiblingPageIndex);
newRightBucket.setLeftSibling(pageIndex);
bucketToSplit.setRightSibling(rightBucketEntry.getPageIndex());
if (rightSiblingPageIndex >= 0) {
final OCacheEntry rightSiblingBucketEntry = diskCache.load(fileId, rightSiblingPageIndex, false);
final OCachePointer rightSiblingPointer = rightSiblingBucketEntry.getCachePointer();
rightSiblingPointer.acquireExclusiveLock();
OSBTreeBucket<K, V> rightSiblingBucket = new OSBTreeBucket<K, V>(rightSiblingPointer.getDataPointer(), keySerializer,
keyTypes, valueSerializer, getTrackMode());
try {
rightSiblingBucket.setLeftSibling(rightBucketEntry.getPageIndex());
logPageChanges(rightSiblingBucket, fileId, rightSiblingPageIndex, false);
rightSiblingBucketEntry.markDirty();
} finally {
rightSiblingPointer.releaseExclusiveLock();
diskCache.release(rightSiblingBucketEntry);
}
}
}
long parentIndex = path.get(path.size() - 2);
OCacheEntry parentCacheEntry = diskCache.load(fileId, parentIndex, false);
OCachePointer parentPointer = parentCacheEntry.getCachePointer();
parentPointer.acquireExclusiveLock();
try {
OSBTreeBucket<K, V> parentBucket = new OSBTreeBucket<K, V>(parentPointer.getDataPointer(), keySerializer, keyTypes,
valueSerializer, getTrackMode());
OSBTreeBucket.SBTreeEntry<K, V> parentEntry = new OSBTreeBucket.SBTreeEntry<K, V>(pageIndex,
rightBucketEntry.getPageIndex(), separationKey, null);
int insertionIndex = parentBucket.find(separationKey);
assert insertionIndex < 0;
insertionIndex = -insertionIndex - 1;
while (!parentBucket.addEntry(insertionIndex, parentEntry, true)) {
parentPointer.releaseExclusiveLock();
diskCache.release(parentCacheEntry);
BucketSearchResult bucketSearchResult = splitBucket(path.subList(0, path.size() - 1), insertionIndex, separationKey);
parentIndex = bucketSearchResult.getLastPathItem();
parentCacheEntry = diskCache.load(fileId, parentIndex, false);
parentPointer = parentCacheEntry.getCachePointer();
parentPointer.acquireExclusiveLock();
insertionIndex = bucketSearchResult.itemIndex;
parentBucket = new OSBTreeBucket<K, V>(parentPointer.getDataPointer(), keySerializer, keyTypes, valueSerializer,
getTrackMode());
}
logPageChanges(parentBucket, fileId, parentIndex, false);
} finally {
parentCacheEntry.markDirty();
parentPointer.releaseExclusiveLock();
diskCache.release(parentCacheEntry);
}
logPageChanges(newRightBucket, fileId, rightBucketEntry.getPageIndex(), true);
} finally {
rightBucketEntry.markDirty();
rightBucketPointer.releaseExclusiveLock();
diskCache.release(rightBucketEntry);
}
logPageChanges(bucketToSplit, fileId, pageIndex, false);
ArrayList<Long> resultPath = new ArrayList<Long>(path.subList(0, path.size() - 1));
if (comparator.compare(keyToInsert, separationKey) < 0) {
resultPath.add(pageIndex);
return new BucketSearchResult(keyIndex, resultPath);
}
resultPath.add(rightBucketEntry.getPageIndex());
if (splitLeaf) {
return new BucketSearchResult(keyIndex - indexToSplit, resultPath);
}
resultPath.add(rightBucketEntry.getPageIndex());
return new BucketSearchResult(keyIndex - indexToSplit - 1, resultPath);
} else {
final long freeListPage = bucketToSplit.getValuesFreeListFirstIndex();
final long treeSize = bucketToSplit.getTreeSize();
final byte keySerializeId = bucketToSplit.getKeySerializerId();
final byte valueSerializerId = bucketToSplit.getValueSerializerId();
final List<OSBTreeBucket.SBTreeEntry<K, V>> leftEntries = new ArrayList<OSBTreeBucket.SBTreeEntry<K, V>>(indexToSplit);
for (int i = 0; i < indexToSplit; i++)
leftEntries.add(bucketToSplit.getEntry(i));
OCacheEntry leftBucketEntry = diskCache.allocateNewPage(fileId);
OCachePointer leftBucketPointer = leftBucketEntry.getCachePointer();
OCacheEntry rightBucketEntry = diskCache.allocateNewPage(fileId);
leftBucketPointer.acquireExclusiveLock();
try {
OSBTreeBucket<K, V> newLeftBucket = new OSBTreeBucket<K, V>(leftBucketPointer.getDataPointer(), splitLeaf, keySerializer,
keyTypes, valueSerializer, getTrackMode());
newLeftBucket.addAll(leftEntries);
if (splitLeaf)
newLeftBucket.setRightSibling(rightBucketEntry.getPageIndex());
logPageChanges(newLeftBucket, fileId, leftBucketEntry.getPageIndex(), true);
leftBucketEntry.markDirty();
} finally {
leftBucketPointer.releaseExclusiveLock();
diskCache.release(leftBucketEntry);
}
OCachePointer rightBucketPointer = rightBucketEntry.getCachePointer();
rightBucketPointer.acquireExclusiveLock();
try {
OSBTreeBucket<K, V> newRightBucket = new OSBTreeBucket<K, V>(rightBucketPointer.getDataPointer(), splitLeaf,
keySerializer, keyTypes, valueSerializer, getTrackMode());
newRightBucket.addAll(rightEntries);
if (splitLeaf)
newRightBucket.setLeftSibling(leftBucketEntry.getPageIndex());
logPageChanges(newRightBucket, fileId, rightBucketEntry.getPageIndex(), true);
rightBucketEntry.markDirty();
} finally {
rightBucketPointer.releaseExclusiveLock();
diskCache.release(rightBucketEntry);
}
bucketToSplit = new OSBTreeBucket<K, V>(bucketPointer.getDataPointer(), false, keySerializer, keyTypes, valueSerializer,
getTrackMode());
bucketToSplit.setTreeSize(treeSize);
bucketToSplit.setKeySerializerId(keySerializeId);
bucketToSplit.setValueSerializerId(valueSerializerId);
bucketToSplit.setValuesFreeListFirstIndex(freeListPage);
bucketToSplit.addEntry(0,
new OSBTreeBucket.SBTreeEntry<K, V>(leftBucketEntry.getPageIndex(), rightBucketEntry.getPageIndex(), separationKey,
null), true);
logPageChanges(bucketToSplit, fileId, pageIndex, false);
ArrayList<Long> resultPath = new ArrayList<Long>(path.subList(0, path.size() - 1));
if (comparator.compare(keyToInsert, separationKey) < 0) {
resultPath.add(leftBucketEntry.getPageIndex());
return new BucketSearchResult(keyIndex, resultPath);
}
resultPath.add(rightBucketEntry.getPageIndex());
if (splitLeaf)
return new BucketSearchResult(keyIndex - indexToSplit, resultPath);
return new BucketSearchResult(keyIndex - indexToSplit - 1, resultPath);
}
} finally {
bucketEntry.markDirty();
bucketPointer.releaseExclusiveLock();
diskCache.release(bucketEntry);
}
}
private BucketSearchResult findBucket(K key, PartialSearchMode partialSearchMode) throws IOException {
long pageIndex = ROOT_INDEX;
final ArrayList<Long> path = new ArrayList<Long>();
if (!(keySize == 1 || ((OCompositeKey) key).getKeys().size() == keySize || partialSearchMode.equals(PartialSearchMode.NONE))) {
final OCompositeKey fullKey = new OCompositeKey((Comparable<? super K>) key);
int itemsToAdd = keySize - fullKey.getKeys().size();
final Comparable<?> keyItem;
if (partialSearchMode.equals(PartialSearchMode.HIGHEST_BOUNDARY))
keyItem = ALWAYS_GREATER_KEY;
else
keyItem = ALWAYS_LESS_KEY;
for (int i = 0; i < itemsToAdd; i++)
fullKey.addKey(keyItem);
key = (K) fullKey;
}
while (true) {
path.add(pageIndex);
final OCacheEntry bucketEntry = diskCache.load(fileId, pageIndex, false);
final OCachePointer bucketPointer = bucketEntry.getCachePointer();
final OSBTreeBucket.SBTreeEntry<K, V> entry;
try {
final OSBTreeBucket<K, V> keyBucket = new OSBTreeBucket<K, V>(bucketPointer.getDataPointer(), keySerializer, keyTypes,
valueSerializer, ODurablePage.TrackMode.NONE);
final int index = keyBucket.find(key);
if (keyBucket.isLeaf())
return new BucketSearchResult(index, path);
if (index >= 0)
entry = keyBucket.getEntry(index);
else {
final int insertionIndex = -index - 1;
if (insertionIndex >= keyBucket.size())
entry = keyBucket.getEntry(insertionIndex - 1);
else
entry = keyBucket.getEntry(insertionIndex);
}
} finally {
diskCache.release(bucketEntry);
}
if (comparator.compare(key, entry.key) >= 0)
pageIndex = entry.rightChild;
else
pageIndex = entry.leftChild;
}
}
private V readValue(OSBTreeValue<V> sbTreeValue) throws IOException {
if (!sbTreeValue.isLink())
return sbTreeValue.getValue();
OCacheEntry cacheEntry = diskCache.load(fileId, sbTreeValue.getLink(), false);
OCachePointer cachePointer = cacheEntry.getCachePointer();
OSBTreeValuePage valuePage = new OSBTreeValuePage(cachePointer.getDataPointer(), ODurablePage.TrackMode.NONE, false);
int totalSize = valuePage.getSize();
int currentSize = 0;
byte[] value = new byte[totalSize];
while (currentSize < totalSize) {
currentSize = valuePage.readBinaryContent(value, currentSize);
long nextPage = valuePage.getNextPage();
if (nextPage >= 0) {
diskCache.release(cacheEntry);
cacheEntry = diskCache.load(fileId, nextPage, false);
cachePointer = cacheEntry.getCachePointer();
valuePage = new OSBTreeValuePage(cachePointer.getDataPointer(), ODurablePage.TrackMode.NONE, false);
}
}
diskCache.release(cacheEntry);
return valueSerializer.deserializeNative(value, 0);
}
private Map.Entry<K, V> convertToMapEntry(OSBTreeBucket.SBTreeEntry<K, V> treeEntry) throws IOException {
final K key = treeEntry.key;
final V value = readValue(treeEntry.value);
return new Map.Entry<K, V>() {
@Override
public K getKey() {
return key;
}
@Override
public V getValue() {
return value;
}
@Override
public V setValue(V value) {
throw new UnsupportedOperationException("setValue");
}
};
}
private static class BucketSearchResult {
private final int itemIndex;
private final ArrayList<Long> path;
private BucketSearchResult(int itemIndex, ArrayList<Long> path) {
this.itemIndex = itemIndex;
this.path = path;
}
public long getLastPathItem() {
return path.get(path.size() - 1);
}
}
/**
* Indicates search behavior in case of {@link OCompositeKey} keys that have less amount of internal keys are used, whether lowest
* or highest partially matched key should be used.
*/
private static enum PartialSearchMode {
/**
* Any partially matched key will be used as search result.
*/
NONE,
/**
* The biggest partially matched key will be used as search result.
*/
HIGHEST_BOUNDARY,
/**
* The smallest partially matched key will be used as search result.
*/
LOWEST_BOUNDARY
}
private static final class PagePathItemUnit {
private final long pageIndex;
private final int itemIndex;
private PagePathItemUnit(long pageIndex, int itemIndex) {
this.pageIndex = pageIndex;
this.itemIndex = itemIndex;
}
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_index_sbtree_local_OSBTree.java |
2,822 | public class BrazilianStemTokenFilterFactory extends AbstractTokenFilterFactory {
private final CharArraySet exclusions;
@Inject
public BrazilianStemTokenFilterFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
super(index, indexSettings, name, settings);
this.exclusions = Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET, version);
}
@Override
public TokenStream create(TokenStream tokenStream) {
return new BrazilianStemFilter(new SetKeywordMarkerFilter(tokenStream, exclusions));
}
} | 0true
| src_main_java_org_elasticsearch_index_analysis_BrazilianStemTokenFilterFactory.java |
1,443 | public static class ShardRestoreStatus {
private State state;
private String nodeId;
private String reason;
private ShardRestoreStatus() {
}
/**
* Constructs a new shard restore status in initializing state on the given node
*
* @param nodeId node id
*/
public ShardRestoreStatus(String nodeId) {
this(nodeId, State.INIT);
}
/**
* Constructs a new shard restore status in with specified state on the given node
*
* @param nodeId node id
* @param state restore state
*/
public ShardRestoreStatus(String nodeId, State state) {
this(nodeId, state, null);
}
/**
* Constructs a new shard restore status in with specified state on the given node with specified failure reason
*
* @param nodeId node id
* @param state restore state
* @param reason failure reason
*/
public ShardRestoreStatus(String nodeId, State state, String reason) {
this.nodeId = nodeId;
this.state = state;
this.reason = reason;
}
/**
* Returns current state
*
* @return current state
*/
public State state() {
return state;
}
/**
* Returns node id of the node where shared is getting restored
*
* @return node id
*/
public String nodeId() {
return nodeId;
}
/**
* Returns failure reason
*
* @return failure reason
*/
public String reason() {
return reason;
}
/**
* Reads restore status from stream input
*
* @param in stream input
* @return restore status
* @throws IOException
*/
public static ShardRestoreStatus readShardRestoreStatus(StreamInput in) throws IOException {
ShardRestoreStatus shardSnapshotStatus = new ShardRestoreStatus();
shardSnapshotStatus.readFrom(in);
return shardSnapshotStatus;
}
/**
* Reads restore status from stream input
*
* @param in stream input
* @throws IOException
*/
public void readFrom(StreamInput in) throws IOException {
nodeId = in.readOptionalString();
state = State.fromValue(in.readByte());
reason = in.readOptionalString();
}
/**
* Writes restore status to stream output
*
* @param out stream input
* @throws IOException
*/
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(nodeId);
out.writeByte(state.value);
out.writeOptionalString(reason);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ShardRestoreStatus status = (ShardRestoreStatus) o;
if (nodeId != null ? !nodeId.equals(status.nodeId) : status.nodeId != null) return false;
if (reason != null ? !reason.equals(status.reason) : status.reason != null) return false;
if (state != status.state) return false;
return true;
}
@Override
public int hashCode() {
int result = state != null ? state.hashCode() : 0;
result = 31 * result + (nodeId != null ? nodeId.hashCode() : 0);
result = 31 * result + (reason != null ? reason.hashCode() : 0);
return result;
}
} | 0true
| src_main_java_org_elasticsearch_cluster_metadata_RestoreMetaData.java |
837 | private static class AppendFunction implements IFunction<String, String> {
private String add;
private AppendFunction(String add) {
this.add = add;
}
@Override
public String apply(String input) {
return input + add;
}
} | 0true
| hazelcast_src_test_java_com_hazelcast_concurrent_atomicreference_AtomicReferenceTest.java |
1,445 | public class SnapshotId implements Serializable, Streamable {
private String repository;
private String snapshot;
// Caching hash code
private int hashCode;
private SnapshotId() {
}
/**
* Constructs new snapshot id
*
* @param repository repository name
* @param snapshot snapshot name
*/
public SnapshotId(String repository, String snapshot) {
this.repository = repository;
this.snapshot = snapshot;
this.hashCode = computeHashCode();
}
/**
* Returns repository name
*
* @return repository name
*/
public String getRepository() {
return repository;
}
/**
* Returns snapshot name
*
* @return snapshot name
*/
public String getSnapshot() {
return snapshot;
}
@Override
public String toString() {
return repository + ":" + snapshot;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null) return false;
SnapshotId snapshotId = (SnapshotId) o;
return snapshot.equals(snapshotId.snapshot) && repository.equals(snapshotId.repository);
}
@Override
public int hashCode() {
return hashCode;
}
private int computeHashCode() {
int result = repository != null ? repository.hashCode() : 0;
result = 31 * result + snapshot.hashCode();
return result;
}
/**
* Reads snapshot id from stream input
*
* @param in stream input
* @return snapshot id
* @throws IOException
*/
public static SnapshotId readSnapshotId(StreamInput in) throws IOException {
SnapshotId snapshot = new SnapshotId();
snapshot.readFrom(in);
return snapshot;
}
/**
* {@inheritDoc}
*/
@Override
public void readFrom(StreamInput in) throws IOException {
repository = in.readString();
snapshot = in.readString();
hashCode = computeHashCode();
}
/**
* {@inheritDoc}
*/
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(repository);
out.writeString(snapshot);
}
} | 0true
| src_main_java_org_elasticsearch_cluster_metadata_SnapshotId.java |
790 | new ConstructorFunction<String, LongWrapper>() {
public LongWrapper createNew(String key) {
return new LongWrapper();
}
}; | 0true
| hazelcast_src_main_java_com_hazelcast_concurrent_atomiclong_AtomicLongService.java |
1,627 | public class OConfigurableHooksManager implements ODatabaseLifecycleListener {
private List<OServerHookConfiguration> configuredHooks;
public OConfigurableHooksManager(final OServerConfiguration iCfg) {
configuredHooks = iCfg.hooks;
if (configuredHooks != null && !configuredHooks.isEmpty())
Orient.instance().addDbLifecycleListener(this);
}
@Override
public void onCreate(final ODatabase iDatabase) {
onOpen(iDatabase);
}
public void onOpen(ODatabase iDatabase) {
if (iDatabase instanceof ODatabaseComplex) {
final ODatabaseComplex<?> db = (ODatabaseComplex<?>) iDatabase;
for (OServerHookConfiguration hook : configuredHooks) {
try {
final ORecordHook.HOOK_POSITION pos = ORecordHook.HOOK_POSITION.valueOf(hook.position);
final ORecordHook h = (ORecordHook) Class.forName(hook.clazz).newInstance();
if (hook.parameters != null && hook.parameters.length > 0)
try {
final Method m = h.getClass().getDeclaredMethod("config", new Class[] { OServerParameterConfiguration[].class });
m.invoke(h, new Object[] { hook.parameters });
} catch (Exception e) {
OLogManager
.instance()
.warn(
this,
"[configure] Failed to configure hook '%s'. Parameters specified but hook don support parameters. Should have a method config with parameters OServerParameterConfiguration[] ",
hook.clazz);
}
db.registerHook(h, pos);
} catch (Exception e) {
e.printStackTrace();
OLogManager.instance().error(this, "[configure] Failed to configure hook '%s' due to the an error : ", hook.clazz,
e.getMessage());
}
}
}
}
public void onClose(ODatabase iDatabase) {
}
public String getName() {
return "HookRegisters";
}
} | 0true
| server_src_main_java_com_orientechnologies_orient_server_handler_OConfigurableHooksManager.java |
1,555 | public class VertexMap {
public static final String IDS = Tokens.makeNamespace(VertexMap.class) + ".ids";
public enum Counters {
VERTICES_PROCESSED
}
public static Configuration createConfiguration(final long... ids) {
final String[] idStrings = new String[ids.length];
for (int i = 0; i < ids.length; i++) {
idStrings[i] = String.valueOf(ids[i]);
}
final Configuration configuration = new EmptyConfiguration();
configuration.setStrings(IDS, idStrings);
return configuration;
}
public static class Map extends Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex> {
private Collection<Long> ids;
@Override
public void setup(final Mapper.Context context) throws IOException, InterruptedException {
//todo: make as list and double up repeats
this.ids = VertexMap.Map.getLongCollection(context.getConfiguration(), IDS, new HashSet<Long>());
}
@Override
public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException {
if (this.ids.contains(value.getLongId())) {
value.startPath();
DEFAULT_COMPAT.incrementContextCounter(context, Counters.VERTICES_PROCESSED, 1L);
} else {
value.clearPaths();
}
context.write(NullWritable.get(), value);
}
private static Collection<Long> getLongCollection(final Configuration conf, final String key, final Collection<Long> collection) {
for (final String value : conf.getStrings(key)) {
collection.add(Long.valueOf(value));
}
return collection;
}
}
} | 1no label
| titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_transform_VertexMap.java |
2,825 | public class CJKFilterFactoryTests extends ElasticsearchTokenStreamTestCase {
private static final String RESOURCE = "org/elasticsearch/index/analysis/cjk_analysis.json";
@Test
public void testDefault() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(RESOURCE);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("cjk_bigram");
String source = "多くの学生が試験に落ちた。";
String[] expected = new String[]{"多く", "くの", "の学", "学生", "生が", "が試", "試験", "験に", "に落", "落ち", "ちた" };
Tokenizer tokenizer = new StandardTokenizer(TEST_VERSION_CURRENT, new StringReader(source));
assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
}
@Test
public void testNoFlags() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(RESOURCE);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("cjk_no_flags");
String source = "多くの学生が試験に落ちた。";
String[] expected = new String[]{"多く", "くの", "の学", "学生", "生が", "が試", "試験", "験に", "に落", "落ち", "ちた" };
Tokenizer tokenizer = new StandardTokenizer(TEST_VERSION_CURRENT, new StringReader(source));
assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
}
@Test
public void testHanOnly() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(RESOURCE);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("cjk_han_only");
String source = "多くの学生が試験に落ちた。";
String[] expected = new String[]{"多", "く", "の", "学生", "が", "試験", "に", "落", "ち", "た" };
Tokenizer tokenizer = new StandardTokenizer(TEST_VERSION_CURRENT, new StringReader(source));
assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
}
@Test
public void testHanUnigramOnly() throws IOException {
AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromClassPath(RESOURCE);
TokenFilterFactory tokenFilter = analysisService.tokenFilter("cjk_han_unigram_only");
String source = "多くの学生が試験に落ちた。";
String[] expected = new String[]{"多", "く", "の", "学", "学生", "生", "が", "試", "試験", "験", "に", "落", "ち", "た" };
Tokenizer tokenizer = new StandardTokenizer(TEST_VERSION_CURRENT, new StringReader(source));
assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
}
} | 0true
| src_test_java_org_elasticsearch_index_analysis_CJKFilterFactoryTests.java |
516 | public interface TimeSource {
long timeInMillis();
} | 0true
| common_src_main_java_org_broadleafcommerce_common_time_TimeSource.java |
116 | private static final class FindTypeVisitor extends Visitor {
private final IRegion region;
Tree.Type result;
private FindTypeVisitor(IRegion region) {
this.region = region;
}
@Override
public void visit(Tree.Type that) {
super.visit(that);
Integer start = that.getStartIndex();
Integer stop = that.getStopIndex();
if (start!=null && stop!=null &&
region.getOffset()<=start &&
region.getOffset()+region.getLength()>=stop+1) {
result = that;
}
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_ExpandTypeProposal.java |
749 | public class CheckoutException extends BroadleafException {
private static final long serialVersionUID = 1L;
private CheckoutResponse checkoutResponse;
public CheckoutException() {
super();
}
public CheckoutException(String message, CheckoutSeed seed) {
super(message);
checkoutResponse = seed;
}
public CheckoutException(Throwable cause, CheckoutSeed seed) {
super(cause);
checkoutResponse = seed;
}
public CheckoutException(String message, Throwable cause, CheckoutSeed seed) {
super(message, cause);
checkoutResponse = seed;
}
public CheckoutResponse getCheckoutResponse() {
return checkoutResponse;
}
public void setCheckoutResponse(CheckoutResponse checkoutResponse) {
this.checkoutResponse = checkoutResponse;
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_checkout_service_exception_CheckoutException.java |
1,350 | private class NodeIndexStoreDeletedTransportHandler extends BaseTransportRequestHandler<NodeIndexStoreDeletedMessage> {
static final String ACTION = "cluster/nodeIndexStoreDeleted";
@Override
public NodeIndexStoreDeletedMessage newInstance() {
return new NodeIndexStoreDeletedMessage();
}
@Override
public void messageReceived(NodeIndexStoreDeletedMessage message, TransportChannel channel) throws Exception {
innerNodeIndexStoreDeleted(message.index, message.nodeId);
channel.sendResponse(TransportResponse.Empty.INSTANCE);
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
} | 0true
| src_main_java_org_elasticsearch_cluster_action_index_NodeIndexDeletedAction.java |
871 | @Service("blShippingOfferService")
public class ShippingOfferServiceImpl implements ShippingOfferService {
@Resource(name="blOfferService")
protected OfferService offerService;
public void reviewOffers(Order order) throws PricingException {
List<Offer> offers = offerService.buildOfferListForOrder(order);
offerService.applyFulfillmentGroupOffersToOrder(offers, order);
}
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_service_ShippingOfferServiceImpl.java |
520 | public class BLCArrayUtils {
/**
* Given an array and a typed predicate, determines if the array has an object that matches the condition of the
* predicate. The predicate should evaluate to true when a match occurs.
*
* @param array
* @param predicate
* @return whether or not the array contains an element that matches the predicate
*/
public static <T> boolean contains(T[] array, TypedPredicate<T> predicate) {
for (T o : array) {
if (predicate.evaluate(o)) {
return true;
}
}
return false;
}
/**
* Given an input array, will return an ArrayList representation of the array.
*
* @param array
* @return the ArrayList corresponding to the input array
*/
public static <T> ArrayList<T> asList(T[] array) {
if (array == null || array.length == 0) {
return null;
}
ArrayList<T> list = new ArrayList<T>(array.length);
for (T e : array) {
list.add(e);
}
return list;
}
/**
* Similar to the CollectionUtils collect except that it works on an array instead of a Java Collection
*
* @param array
* @param transformer
* @return the transformed collection
*/
public static <T, O> ArrayList<T> collect(Object[] array, TypedTransformer<T> transformer) {
ArrayList<T> list = new ArrayList<T>(array.length);
for (Object o : array) {
list.add(transformer.transform(o));
}
return list;
}
/**
* The same as {@link #collect(Object[], TypedTransformer)} but returns a set.
*
* @param array
* @param transformer
* @return the transformed set
*/
public static <T, O> HashSet<T> collectSet(Object[] array, TypedTransformer<T> transformer) {
HashSet<T> set = new HashSet<T>(array.length);
for (Object o : array) {
set.add(transformer.transform(o));
}
return set;
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_util_BLCArrayUtils.java |
11 | class BasicCompletionProposal extends CompletionProposal {
static void addImportProposal(int offset, String prefix,
CeylonParseController cpc, List<ICompletionProposal> result,
Declaration dec, Scope scope) {
result.add(new BasicCompletionProposal(offset, prefix,
dec.getName(), escapeName(dec), dec, cpc));
}
static void addDocLinkProposal(int offset, String prefix,
CeylonParseController cpc, List<ICompletionProposal> result,
Declaration dec, Scope scope) {
//for doc links, propose both aliases and unaliased qualified form
//we don't need to do this in code b/c there is no fully-qualified form
String name = dec.getName();
String aliasedName = dec.getName(cpc.getRootNode().getUnit());
if (!name.equals(aliasedName)) {
result.add(new BasicCompletionProposal(offset, prefix,
aliasedName, aliasedName, dec, cpc));
}
result.add(new BasicCompletionProposal(offset, prefix,
name, getTextForDocLink(cpc, dec), dec, cpc));
}
private final CeylonParseController cpc;
private final Declaration declaration;
private BasicCompletionProposal(int offset, String prefix,
String desc, String text, Declaration dec,
CeylonParseController cpc) {
super(offset, prefix, getImageForDeclaration(dec),
desc, text);
this.cpc = cpc;
this.declaration = dec;
}
public String getAdditionalProposalInfo() {
return getDocumentationFor(cpc, declaration);
}
} | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_BasicCompletionProposal.java |
1,035 | protected static class TestConfig {
final public TestDoc doc;
final public String[] selectedFields;
final public boolean requestPositions;
final public boolean requestOffsets;
final public boolean requestPayloads;
public Class expectedException = null;
public TestConfig(TestDoc doc, String[] selectedFields, boolean requestPositions, boolean requestOffsets, boolean requestPayloads) {
this.doc = doc;
this.selectedFields = selectedFields;
this.requestPositions = requestPositions;
this.requestOffsets = requestOffsets;
this.requestPayloads = requestPayloads;
}
public TestConfig expectedException(Class exceptionClass) {
this.expectedException = exceptionClass;
return this;
}
@Override
public String toString() {
String requested = "";
if (requestOffsets) {
requested += "offsets,";
}
if (requestPositions) {
requested += "position,";
}
if (requestPayloads) {
requested += "payload,";
}
Locale aLocale = new Locale("en", "US");
return String.format(aLocale, "(doc: %s\n requested: %s, fields: %s)", doc, requested,
selectedFields == null ? "NULL" : Join.join(",", selectedFields));
}
} | 0true
| src_test_java_org_elasticsearch_action_termvector_AbstractTermVectorTests.java |
871 | OSystemVariableResolver.VAR_END, new OVariableParserListener() {
@Override
public Object resolve(final String iVariable) {
return iContext.getVariable(iVariable);
}
}); | 0true
| core_src_main_java_com_orientechnologies_orient_core_processor_block_OAbstractBlock.java |
3,016 | public class NoneQueryParserCache extends AbstractIndexComponent implements QueryParserCache {
@Inject
public NoneQueryParserCache(Index index, @IndexSettings Settings indexSettings) {
super(index, indexSettings);
}
@Override
public Query get(QueryParserSettings queryString) {
return null;
}
@Override
public void put(QueryParserSettings queryString, Query query) {
}
@Override
public void clear() {
}
@Override
public void close() throws ElasticsearchException {
}
} | 0true
| src_main_java_org_elasticsearch_index_cache_query_parser_none_NoneQueryParserCache.java |
4 | @Test
public class OCompositeKeyTest {
@Test
public void testEqualSameKeys() {
final OCompositeKey compositeKey = new OCompositeKey();
compositeKey.addKey("a");
compositeKey.addKey("b");
final OCompositeKey anotherCompositeKey = new OCompositeKey();
anotherCompositeKey.addKey("a");
anotherCompositeKey.addKey("b");
assertTrue(compositeKey.equals(anotherCompositeKey));
assertTrue(compositeKey.hashCode() == anotherCompositeKey.hashCode());
}
@Test
public void testEqualNotSameKeys() {
final OCompositeKey compositeKey = new OCompositeKey();
compositeKey.addKey("a");
compositeKey.addKey("b");
final OCompositeKey anotherCompositeKey = new OCompositeKey();
anotherCompositeKey.addKey("a");
anotherCompositeKey.addKey("b");
anotherCompositeKey.addKey("c");
assertFalse(compositeKey.equals(anotherCompositeKey));
}
@Test
public void testEqualNull() {
final OCompositeKey compositeKey = new OCompositeKey();
assertFalse(compositeKey.equals(null));
}
@Test
public void testEqualSame() {
final OCompositeKey compositeKey = new OCompositeKey();
assertTrue(compositeKey.equals(compositeKey));
}
@Test
public void testEqualDiffClass() {
final OCompositeKey compositeKey = new OCompositeKey();
assertFalse(compositeKey.equals("1"));
}
@Test
public void testAddKeyComparable() {
final OCompositeKey compositeKey = new OCompositeKey();
compositeKey.addKey("a");
assertEquals(compositeKey.getKeys().size(), 1);
assertTrue(compositeKey.getKeys().contains("a"));
}
@Test
public void testAddKeyComposite() {
final OCompositeKey compositeKey = new OCompositeKey();
compositeKey.addKey("a");
final OCompositeKey compositeKeyToAdd = new OCompositeKey();
compositeKeyToAdd.addKey("a");
compositeKeyToAdd.addKey("b");
compositeKey.addKey(compositeKeyToAdd);
assertEquals(compositeKey.getKeys().size(), 3);
assertTrue(compositeKey.getKeys().contains("a"));
assertTrue(compositeKey.getKeys().contains("b"));
}
@Test
public void testCompareToSame() {
final OCompositeKey compositeKey = new OCompositeKey();
compositeKey.addKey("a");
compositeKey.addKey("b");
final OCompositeKey anotherCompositeKey = new OCompositeKey();
anotherCompositeKey.addKey("a");
anotherCompositeKey.addKey("b");
assertEquals(compositeKey.compareTo(anotherCompositeKey), 0);
}
@Test
public void testCompareToPartiallyOneCase() {
final OCompositeKey compositeKey = new OCompositeKey();
compositeKey.addKey("a");
compositeKey.addKey("b");
final OCompositeKey anotherCompositeKey = new OCompositeKey();
anotherCompositeKey.addKey("a");
anotherCompositeKey.addKey("b");
anotherCompositeKey.addKey("c");
assertEquals(compositeKey.compareTo(anotherCompositeKey), 0);
}
@Test
public void testCompareToPartiallySecondCase() {
final OCompositeKey compositeKey = new OCompositeKey();
compositeKey.addKey("a");
compositeKey.addKey("b");
compositeKey.addKey("c");
final OCompositeKey anotherCompositeKey = new OCompositeKey();
anotherCompositeKey.addKey("a");
anotherCompositeKey.addKey("b");
assertEquals(compositeKey.compareTo(anotherCompositeKey), 0);
}
@Test
public void testCompareToGT() {
final OCompositeKey compositeKey = new OCompositeKey();
compositeKey.addKey("b");
final OCompositeKey anotherCompositeKey = new OCompositeKey();
anotherCompositeKey.addKey("a");
anotherCompositeKey.addKey("b");
assertEquals(compositeKey.compareTo(anotherCompositeKey), 1);
}
@Test
public void testCompareToLT() {
final OCompositeKey compositeKey = new OCompositeKey();
compositeKey.addKey("a");
compositeKey.addKey("b");
final OCompositeKey anotherCompositeKey = new OCompositeKey();
anotherCompositeKey.addKey("b");
assertEquals(compositeKey.compareTo(anotherCompositeKey), -1);
}
@Test
public void testCompareToSymmetryOne() {
final OCompositeKey compositeKeyOne = new OCompositeKey();
compositeKeyOne.addKey(1);
compositeKeyOne.addKey(2);
final OCompositeKey compositeKeyTwo = new OCompositeKey();
compositeKeyTwo.addKey(1);
compositeKeyTwo.addKey(3);
compositeKeyTwo.addKey(1);
assertEquals(compositeKeyOne.compareTo(compositeKeyTwo), -1);
assertEquals(compositeKeyTwo.compareTo(compositeKeyOne), 1);
}
@Test
public void testCompareToSymmetryTwo() {
final OCompositeKey compositeKeyOne = new OCompositeKey();
compositeKeyOne.addKey(1);
compositeKeyOne.addKey(2);
final OCompositeKey compositeKeyTwo = new OCompositeKey();
compositeKeyTwo.addKey(1);
compositeKeyTwo.addKey(2);
compositeKeyTwo.addKey(3);
assertEquals(compositeKeyOne.compareTo(compositeKeyTwo), 0);
assertEquals(compositeKeyTwo.compareTo(compositeKeyOne), 0);
}
} | 0true
| commons_src_test_java_com_orientechnologies_common_collection_OCompositeKeyTest.java |
1,585 | public class DynamicResultSet implements Serializable {
private static final long serialVersionUID = 1L;
private ClassMetadata classMetaData;
private Entity[] records;
private Integer pageSize;
private Integer startIndex;
private Integer totalRecords;
private Integer batchId;
public DynamicResultSet() {
//do nothing
}
public DynamicResultSet(ClassMetadata classMetaData, Entity[] records, Integer totalRecords) {
this.records = records;
this.classMetaData = classMetaData;
this.totalRecords = totalRecords;
}
public DynamicResultSet(Entity[] records, Integer totalRecords) {
this.records = records;
this.totalRecords = totalRecords;
}
public DynamicResultSet(ClassMetadata classMetaData) {
this.classMetaData = classMetaData;
}
public ClassMetadata getClassMetaData() {
return classMetaData;
}
public void setClassMetaData(ClassMetadata classMetaData) {
this.classMetaData = classMetaData;
}
public Integer getTotalRecords() {
return totalRecords;
}
public void setTotalRecords(Integer totalRecords) {
this.totalRecords = totalRecords;
}
public Entity[] getRecords() {
return records;
}
public void setRecords(Entity[] records) {
this.records = records;
}
public Integer getBatchId() {
return batchId;
}
public void setBatchId(Integer batchId) {
this.batchId = batchId;
}
public Integer getStartIndex() {
return startIndex;
}
public void setStartIndex(Integer startIndex) {
this.startIndex = startIndex;
}
public Integer getPageSize() {
return pageSize;
}
public void setPageSize(Integer pageSize) {
this.pageSize = pageSize;
}
} | 0true
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_dto_DynamicResultSet.java |
610 | Runnable recreateIndexesTask = new Runnable() {
@Override
public void run() {
try {
// START IT IN BACKGROUND
newDb.setProperty(ODatabase.OPTIONS.SECURITY.toString(), Boolean.FALSE);
newDb.open("admin", "nopass");
ODatabaseRecordThreadLocal.INSTANCE.set(newDb);
try {
// DROP AND RE-CREATE 'INDEX' DATA-SEGMENT AND CLUSTER IF ANY
final int dataId = newDb.getStorage().getDataSegmentIdByName(OMetadataDefault.DATASEGMENT_INDEX_NAME);
if (dataId > -1)
newDb.getStorage().dropDataSegment(OMetadataDefault.DATASEGMENT_INDEX_NAME);
final int clusterId = newDb.getStorage().getClusterIdByName(OMetadataDefault.CLUSTER_INDEX_NAME);
if (clusterId > -1)
newDb.dropCluster(clusterId, false);
newDb.addDataSegment(OMetadataDefault.DATASEGMENT_INDEX_NAME, null);
newDb.getStorage().addCluster(OClusterLocal.TYPE, OMetadataDefault.CLUSTER_INDEX_NAME, null,
OMetadataDefault.DATASEGMENT_INDEX_NAME, true);
} catch (IllegalArgumentException ex) {
// OLD DATABASE: CREATE SEPARATE DATASEGMENT AND LET THE INDEX CLUSTER TO POINT TO IT
OLogManager.instance().info(this, "Creating 'index' data-segment to store all the index content...");
newDb.addDataSegment(OMetadataDefault.DATASEGMENT_INDEX_NAME, null);
final OCluster indexCluster = newDb.getStorage().getClusterById(
newDb.getStorage().getClusterIdByName(OMetadataDefault.CLUSTER_INDEX_NAME));
try {
indexCluster.set(ATTRIBUTES.DATASEGMENT, OMetadataDefault.DATASEGMENT_INDEX_NAME);
OLogManager.instance().info(this,
"Data-segment 'index' create correctly. Indexes will store content into this data-segment");
} catch (IOException e) {
OLogManager.instance().error(this, "Error changing data segment for cluster 'index'", e);
}
}
final Collection<ODocument> idxs = doc.field(CONFIG_INDEXES);
if (idxs == null) {
OLogManager.instance().warn(this, "List of indexes is empty.");
return;
}
int ok = 0;
int errors = 0;
for (ODocument idx : idxs) {
try {
String indexType = idx.field(OIndexInternal.CONFIG_TYPE);
String algorithm = idx.field(OIndexInternal.ALGORITHM);
String valueContainerAlgorithm = idx.field(OIndexInternal.VALUE_CONTAINER_ALGORITHM);
if (indexType == null) {
OLogManager.instance().error(this, "Index type is null, will process other record.");
errors++;
continue;
}
final OIndexInternal<?> index = OIndexes.createIndex(newDb, indexType, algorithm, valueContainerAlgorithm);
OIndexInternal.IndexMetadata indexMetadata = index.loadMetadata(idx);
OIndexDefinition indexDefinition = indexMetadata.getIndexDefinition();
if (indexDefinition == null || !indexDefinition.isAutomatic()) {
OLogManager.instance().info(this, "Index %s is not automatic index and will be added as is.",
indexMetadata.getName());
if (index.loadFromConfiguration(idx)) {
addIndexInternal(index);
setDirty();
save();
ok++;
} else {
getDatabase().unregisterListener(index.getInternal());
index.delete();
errors++;
}
OLogManager.instance().info(this, "Index %s was added in DB index list.", index.getName());
} else {
String indexName = indexMetadata.getName();
Set<String> clusters = indexMetadata.getClustersToIndex();
String type = indexMetadata.getType();
if (indexName != null && indexDefinition != null && clusters != null && !clusters.isEmpty() && type != null) {
OLogManager.instance().info(this, "Start creation of index %s", indexName);
if (algorithm.equals(ODefaultIndexFactory.SBTREE_ALGORITHM) || indexType.endsWith("HASH_INDEX"))
index.deleteWithoutIndexLoad(indexName);
index.create(indexName, indexDefinition, defaultClusterName, clusters, false, new OIndexRebuildOutputListener(
index));
index.setRebuildingFlag();
addIndexInternal(index);
OLogManager.instance().info(this, "Index %s was successfully created and rebuild is going to be started.",
indexName);
index.rebuild(new OIndexRebuildOutputListener(index));
index.flush();
setDirty();
save();
ok++;
OLogManager.instance().info(this, "Rebuild of %s index was successfully finished.", indexName);
} else {
errors++;
OLogManager.instance().error(
this,
"Information about index was restored incorrectly, following data were loaded : "
+ "index name - %s, index definition %s, clusters %s, type %s.", indexName, indexDefinition, clusters,
type);
}
}
} catch (Exception e) {
OLogManager.instance().error(this, "Error during addition of index %s", e, idx);
errors++;
}
}
rebuildCompleted = true;
newDb.close();
OLogManager.instance().info(this, "%d indexes were restored successfully, %d errors", ok, errors);
} catch (Exception e) {
OLogManager.instance().error(this, "Error when attempt to restore indexes after crash was performed.", e);
}
}
}; | 1no label
| core_src_main_java_com_orientechnologies_orient_core_index_OIndexManagerShared.java |
1,306 | public interface ClusterStateUpdateTask {
/**
* Update the cluster state based on the current state. Return the *same instance* if no state
* should be changed.
*/
ClusterState execute(ClusterState currentState) throws Exception;
/**
* A callback called when execute fails.
*/
void onFailure(String source, Throwable t);
} | 0true
| src_main_java_org_elasticsearch_cluster_ClusterStateUpdateTask.java |
508 | public class OClassDictionary {
private static final OClassDictionary instance = new OClassDictionary();
public Class<?> getClassByCode(final char iType) {
switch (iType) {
case '0':
return ODocument.class;
// case '1':
// return ORecordColumn.class;
case '2':
return ORecordFlat.class;
case '3':
return ORecordBytes.class;
case '4':
return OClass.class;
case '5':
return OProperty.class;
case '6':
return OUser.class;
case '7':
return OStorageConfiguration.class;
case '8':
return OStoragePhysicalClusterConfigurationLocal.class;
case '9':
return OStorageDataConfiguration.class;
case 'a':
return OStorageClusterHoleConfiguration.class;
case 'b':
return OStorageDataHoleConfiguration.class;
case 'c':
return OStorageSegmentConfiguration.class;
case 'd':
return OStorageFileConfiguration.class;
case 'f':
return OStoragePhysicalClusterConfigurationLocal.class;
}
throw new OConfigurationException("Unsupported record type: " + iType);
}
public Character getCodeByClass(final Class<?> iClass) {
if (iClass.equals(ODocument.class))
return '0';
// if (iClass.equals(ORecordColumn.class))
// return '1';
if (iClass.equals(ORecordFlat.class))
return '2';
if (iClass.equals(ORecordBytes.class))
return '3';
if (iClass.equals(OClass.class))
return '4';
if (iClass.equals(OProperty.class))
return '5';
if (iClass.equals(OUser.class))
return '6';
if (iClass.equals(OStorageConfiguration.class))
return '7';
if (iClass.equals(OStoragePhysicalClusterConfigurationLocal.class))
return '8';
if (iClass.equals(OStorageDataConfiguration.class))
return '9';
if (iClass.equals(OStorageClusterHoleConfiguration.class))
return 'a';
if (iClass.equals(OStorageDataHoleConfiguration.class))
return 'b';
if (iClass.equals(OStorageSegmentConfiguration.class))
return 'c';
if (iClass.equals(OStorageFileConfiguration.class))
return 'd';
if (iClass.equals(OStoragePhysicalClusterConfigurationLocal.class))
return 'f';
return null;
}
public static OClassDictionary instance() {
return instance;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_entity_OClassDictionary.java |
495 | private static class LinkRewriter implements FieldRewriter<ORID> {
private final OIndex<OIdentifiable> exportImportHashTable;
private LinkRewriter(OIndex<OIdentifiable> exportImportHashTable) {
this.exportImportHashTable = exportImportHashTable;
}
@Override
public ORID rewriteValue(ORID value) {
if (!value.isPersistent())
return null;
final OIdentifiable result = exportImportHashTable.get(value);
return result != null ? result.getIdentity() : null;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_db_tool_ODatabaseImport.java |
1,777 | public class FieldManager {
private static final Log LOG = LogFactory.getLog(FieldManager.class);
public static final String MAPFIELDSEPARATOR = "---";
protected EntityConfiguration entityConfiguration;
protected DynamicEntityDao dynamicEntityDao;
protected List<SortableValue> middleFields = new ArrayList<SortableValue>(5);
public FieldManager(EntityConfiguration entityConfiguration, DynamicEntityDao dynamicEntityDao) {
this.entityConfiguration = entityConfiguration;
this.dynamicEntityDao = dynamicEntityDao;
}
public static Field getSingleField(Class<?> clazz, String fieldName) throws IllegalStateException {
try {
return clazz.getDeclaredField(fieldName);
} catch (NoSuchFieldException nsf) {
// Try superclass
if (clazz.getSuperclass() != null) {
return getSingleField(clazz.getSuperclass(), fieldName);
}
return null;
}
}
public Field getField(Class<?> clazz, String fieldName) throws IllegalStateException {
String[] tokens = fieldName.split("\\.");
Field field = null;
for (int j=0;j<tokens.length;j++) {
String propertyName = tokens[j];
field = getSingleField(clazz, propertyName);
if (field != null && j < tokens.length - 1) {
Class<?>[] entities = dynamicEntityDao.getAllPolymorphicEntitiesFromCeiling(field.getType());
if (entities.length > 0) {
String peekAheadToken = tokens[j+1];
List<Class<?>> matchedClasses = new ArrayList<Class<?>>();
for (Class<?> entity : entities) {
Field peekAheadField = null;
try {
peekAheadField = entity.getDeclaredField(peekAheadToken);
} catch (NoSuchFieldException nsf) {
//do nothing
}
if (peekAheadField != null) {
matchedClasses.add(entity);
}
}
if (matchedClasses.size() > 1) {
LOG.warn("Found the property (" + peekAheadToken + ") in more than one class of an inheritance hierarchy. This may lead to unwanted behavior, as the system does not know which class was intended. Do not use the same property name in different levels of the inheritance hierarchy. Defaulting to the first class found (" + matchedClasses.get(0).getName() + ")");
}
if (getSingleField(matchedClasses.get(0), peekAheadToken) != null) {
clazz = matchedClasses.get(0);
PersistentClass persistentClass = dynamicEntityDao.getPersistentClass(clazz.getName());
if (persistentClass != null && matchedClasses.size() == 1 && clazz.isInterface()) {
try {
clazz = entityConfiguration.lookupEntityClass(field.getType().getName());
} catch (Exception e) {
// Do nothing - we'll use the matchedClass
}
}
} else {
clazz = field.getType();
}
} else {
//may be an embedded class - try the class directly
clazz = field.getType();
}
} else {
break;
}
}
if (field != null) {
field.setAccessible(true);
}
return field;
}
public Object getFieldValue(Object bean, String fieldName) throws IllegalAccessException, FieldNotAvailableException {
StringTokenizer tokens = new StringTokenizer(fieldName, ".");
Class<?> componentClass = bean.getClass();
Field field;
Object value = bean;
while (tokens.hasMoreTokens()) {
String fieldNamePart = tokens.nextToken();
String mapKey = null;
if (fieldNamePart.contains(FieldManager.MAPFIELDSEPARATOR)) {
mapKey = fieldNamePart.substring(fieldNamePart.indexOf(FieldManager.MAPFIELDSEPARATOR) + FieldManager.MAPFIELDSEPARATOR.length(), fieldNamePart.length());
fieldNamePart = fieldNamePart.substring(0, fieldNamePart.indexOf(FieldManager.MAPFIELDSEPARATOR));
}
field = getSingleField(componentClass, fieldNamePart);
if (field != null) {
field.setAccessible(true);
value = field.get(value);
if (value != null && mapKey != null) {
value = ((Map) value).get(mapKey);
}
if (value != null) {
componentClass = value.getClass();
} else {
break;
}
} else {
throw new FieldNotAvailableException("Unable to find field (" + fieldNamePart + ") on the class (" + componentClass + ")");
}
}
return value;
}
public Object setFieldValue(Object bean, String fieldName, Object newValue) throws IllegalAccessException, InstantiationException {
StringTokenizer tokens = new StringTokenizer(fieldName, ".");
Class<?> componentClass = bean.getClass();
Field field;
Object value = bean;
int count = tokens.countTokens();
int j=0;
StringBuilder sb = new StringBuilder();
while (tokens.hasMoreTokens()) {
String fieldNamePart = tokens.nextToken();
sb.append(fieldNamePart);
String mapKey = null;
if (fieldNamePart.contains(FieldManager.MAPFIELDSEPARATOR)) {
mapKey = fieldNamePart.substring(fieldNamePart.indexOf(FieldManager.MAPFIELDSEPARATOR) + FieldManager.MAPFIELDSEPARATOR.length(), fieldNamePart.length());
fieldNamePart = fieldNamePart.substring(0, fieldNamePart.indexOf(FieldManager.MAPFIELDSEPARATOR));
}
field = getSingleField(componentClass, fieldNamePart);
field.setAccessible(true);
if (j == count - 1) {
if (mapKey != null) {
Map map = (Map) field.get(value);
if (newValue == null) {
map.remove(mapKey);
} else {
map.put(mapKey, newValue);
}
} else {
field.set(value, newValue);
}
} else {
Object myValue = field.get(value);
if (myValue != null) {
componentClass = myValue.getClass();
value = myValue;
} else {
//consult the entity configuration manager to see if there is a user
//configured entity for this class
try {
Object newEntity = entityConfiguration.createEntityInstance(field.getType().getName());
SortableValue val = new SortableValue(bean, (Serializable) newEntity, j, sb.toString());
middleFields.add(val);
field.set(value, newEntity);
componentClass = newEntity.getClass();
value = newEntity;
} catch (Exception e) {
//Use the most extended type based on the field type
Class<?>[] entities = dynamicEntityDao.getAllPolymorphicEntitiesFromCeiling(field.getType());
if (!ArrayUtils.isEmpty(entities)) {
Object newEntity = entities[0].newInstance();
SortableValue val = new SortableValue(bean, (Serializable) newEntity, j, sb.toString());
middleFields.add(val);
field.set(value, newEntity);
componentClass = newEntity.getClass();
value = newEntity;
LOG.info("Unable to find a reference to ("+field.getType().getName()+") in the EntityConfigurationManager. Using the most extended form of this class identified as ("+entities[0].getName()+")");
} else {
//Just use the field type
Object newEntity = field.getType().newInstance();
field.set(value, newEntity);
componentClass = newEntity.getClass();
value = newEntity;
LOG.info("Unable to find a reference to ("+field.getType().getName()+") in the EntityConfigurationManager. Using the type of this class.");
}
}
}
}
sb.append(".");
j++;
}
return value;
}
public Map<String, Serializable> persistMiddleEntities() throws InstantiationException, IllegalAccessException {
Map<String, Serializable> persistedEntities = new HashMap<String, Serializable>();
Collections.sort(middleFields);
for (SortableValue val : middleFields) {
Serializable s = dynamicEntityDao.merge(val.entity);
persistedEntities.put(val.getContainingPropertyName(), s);
setFieldValue(val.getBean(), val.getContainingPropertyName(), s);
}
return persistedEntities;
}
public EntityConfiguration getEntityConfiguration() {
return entityConfiguration;
}
private class SortableValue implements Comparable<SortableValue> {
private Integer pos;
private Serializable entity;
private Class<?> entityClass;
private String containingPropertyName;
private Object bean;
public SortableValue(Object bean, Serializable entity, Integer pos, String containingPropertyName) {
this.bean = bean;
this.entity = entity;
this.pos = pos;
this.entityClass = entity.getClass();
this.containingPropertyName = containingPropertyName;
}
public int compareTo(SortableValue o) {
return pos.compareTo(o.pos) * -1;
}
public String getContainingPropertyName() {
return containingPropertyName;
}
private Object getBean() {
return bean;
}
@Override
public int hashCode() {
int prime = 31;
int result = 1;
result = prime * result + getOuterType().hashCode();
result = prime * result + (entityClass == null ? 0 : entityClass.hashCode());
result = prime * result + (pos == null ? 0 : pos.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
SortableValue other = (SortableValue) obj;
if (!getOuterType().equals(other.getOuterType()))
return false;
if (entityClass == null) {
if (other.entityClass != null)
return false;
} else if (!entityClass.equals(other.entityClass))
return false;
if (pos == null) {
if (other.pos != null)
return false;
} else if (!pos.equals(other.pos))
return false;
return true;
}
private FieldManager getOuterType() {
return FieldManager.this;
}
}
} | 1no label
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_service_persistence_module_FieldManager.java |
3,600 | public abstract static class Builder<T extends Builder, Y extends NumberFieldMapper> extends AbstractFieldMapper.Builder<T, Y> {
protected int precisionStep = Defaults.PRECISION_STEP;
private Boolean ignoreMalformed;
private Boolean coerce;
public Builder(String name, FieldType fieldType) {
super(name, fieldType);
}
public T precisionStep(int precisionStep) {
this.precisionStep = precisionStep;
return builder;
}
public T ignoreMalformed(boolean ignoreMalformed) {
this.ignoreMalformed = ignoreMalformed;
return builder;
}
protected Explicit<Boolean> ignoreMalformed(BuilderContext context) {
if (ignoreMalformed != null) {
return new Explicit<Boolean>(ignoreMalformed, true);
}
if (context.indexSettings() != null) {
return new Explicit<Boolean>(context.indexSettings().getAsBoolean("index.mapping.ignore_malformed", Defaults.IGNORE_MALFORMED.value()), false);
}
return Defaults.IGNORE_MALFORMED;
}
public T coerce(boolean coerce) {
this.coerce = coerce;
return builder;
}
protected Explicit<Boolean> coerce(BuilderContext context) {
if (coerce != null) {
return new Explicit<Boolean>(coerce, true);
}
if (context.indexSettings() != null) {
return new Explicit<Boolean>(context.indexSettings().getAsBoolean("index.mapping.coerce", Defaults.COERCE.value()), false);
}
return Defaults.COERCE;
}
} | 0true
| src_main_java_org_elasticsearch_index_mapper_core_NumberFieldMapper.java |
359 | public class NodesStatsResponse extends NodesOperationResponse<NodeStats> implements ToXContent {
NodesStatsResponse() {
}
public NodesStatsResponse(ClusterName clusterName, NodeStats[] nodes) {
super(clusterName, nodes);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
nodes = new NodeStats[in.readVInt()];
for (int i = 0; i < nodes.length; i++) {
nodes[i] = NodeStats.readNodeStats(in);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(nodes.length);
for (NodeStats node : nodes) {
node.writeTo(out);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("cluster_name", getClusterName().value());
builder.startObject("nodes");
for (NodeStats nodeStats : this) {
builder.startObject(nodeStats.getNode().id(), XContentBuilder.FieldCaseConversion.NONE);
builder.field("timestamp", nodeStats.getTimestamp());
nodeStats.toXContent(builder, params);
builder.endObject();
}
builder.endObject();
return builder;
}
@Override
public String toString() {
try {
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
builder.startObject();
toXContent(builder, EMPTY_PARAMS);
builder.endObject();
return builder.string();
} catch (IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}";
}
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_node_stats_NodesStatsResponse.java |
2,956 | public interface TokenFilterFactory {
String name();
TokenStream create(TokenStream tokenStream);
} | 0true
| src_main_java_org_elasticsearch_index_analysis_TokenFilterFactory.java |
206 | Callable<Object> response = new Callable<Object>() {
public Object call() throws Exception {
ORecordVersion result;
try {
OStorageRemoteThreadLocal.INSTANCE.get().sessionId = sessionId;
beginResponse(network);
result = network.readVersion();
} finally {
endResponse(network);
OStorageRemoteThreadLocal.INSTANCE.get().sessionId = -1;
}
iCallback.call(iRid, result);
return null;
}
}; | 0true
| client_src_main_java_com_orientechnologies_orient_client_remote_OStorageRemote.java |
1,352 | public interface ZipCode {
public String getId();
public void setId(String id);
public Integer getZipcode();
public void setZipcode(Integer zipcode);
public String getZipState();
public void setZipState(String zipState);
public String getZipCity();
public void setZipCity(String zipCity);
public double getZipLongitude();
public void setZipLongitude(double zipLongitude);
public double getZipLatitude();
public void setZipLatitude(double zipLatitude);
} | 0true
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_store_domain_ZipCode.java |
1,112 | public class OSQLFunctionAverage extends OSQLFunctionMathAbstract {
public static final String NAME = "avg";
private Number sum;
private int total = 0;
public OSQLFunctionAverage() {
super(NAME, 1, -1);
}
public Object execute(OIdentifiable iCurrentRecord, Object iCurrentResult, final Object[] iParameters, OCommandContext iContext) {
if (iParameters.length == 1) {
if (iParameters[0] instanceof Number)
sum((Number) iParameters[0]);
else if (OMultiValue.isMultiValue(iParameters[0]))
for (Object n : OMultiValue.getMultiValueIterable(iParameters[0]))
sum((Number) n);
} else {
sum = null;
for (int i = 0; i < iParameters.length; ++i)
sum((Number) iParameters[i]);
}
return getResult();
}
protected void sum(Number value) {
if (value != null) {
total++;
if (sum == null)
// FIRST TIME
sum = value;
else
sum = OType.increment(sum, value);
}
}
public String getSyntax() {
return "Syntax error: avg(<field> [,<field>*])";
}
@Override
public Object getResult() {
if (returnDistributedResult()) {
final Map<String, Object> doc = new HashMap<String, Object>();
doc.put("sum", sum);
doc.put("total", total);
return doc;
} else {
if (sum instanceof Integer)
return sum.intValue() / total;
else if (sum instanceof Long)
return sum.longValue() / total;
else if (sum instanceof Float)
return sum.floatValue() / total;
else if (sum instanceof Double)
return sum.doubleValue() / total;
else if (sum instanceof BigDecimal)
return ((BigDecimal) sum).divide(new BigDecimal(total));
}
return null;
}
@SuppressWarnings("unchecked")
@Override
public Object mergeDistributedResult(List<Object> resultsToMerge) {
Number sum = null;
int total = 0;
for (Object iParameter : resultsToMerge) {
final Map<String, Object> item = (Map<String, Object>) iParameter;
if (sum == null)
sum = (Number) item.get("sum");
else
sum = OType.increment(sum, (Number) item.get("sum"));
total += (Integer) item.get("total");
}
if (sum instanceof Integer)
return sum.intValue() / total;
else if (sum instanceof Long)
return sum.longValue() / total;
else if (sum instanceof Float)
return sum.floatValue() / total;
else if (sum instanceof Double)
return sum.doubleValue() / total;
else if (sum instanceof BigDecimal)
return ((BigDecimal) sum).divide(new BigDecimal(total));
return null;
}
@Override
public boolean aggregateResults() {
return configuredParameters.length == 1;
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_sql_functions_math_OSQLFunctionAverage.java |
223 | private class PropertyPlaceholderConfigurerResolver implements PropertyPlaceholderHelper.PlaceholderResolver {
private final Properties props;
private PropertyPlaceholderConfigurerResolver(Properties props) {
this.props = props;
}
public String resolvePlaceholder(String placeholderName) {
return RuntimeEnvironmentPropertiesConfigurer.this.resolvePlaceholder(placeholderName, props, 1);
}
} | 0true
| common_src_main_java_org_broadleafcommerce_common_config_RuntimeEnvironmentPropertiesConfigurer.java |
2,856 | return new EdgeNGramTokenizer(version, reader, minGram, maxGram) {
@Override
protected boolean isTokenChar(int chr) {
return matcher.isTokenChar(chr);
}
}; | 0true
| src_main_java_org_elasticsearch_index_analysis_EdgeNGramTokenizerFactory.java |
564 | @Test
@SuppressWarnings("unchecked")
public class OCompositeIndexDefinitionTest {
private OCompositeIndexDefinition compositeIndex;
@BeforeMethod
public void beforeMethod() {
compositeIndex = new OCompositeIndexDefinition("testClass");
compositeIndex.addIndex(new OPropertyIndexDefinition("testClass", "fOne", OType.INTEGER));
compositeIndex.addIndex(new OPropertyIndexDefinition("testClass", "fTwo", OType.STRING));
}
@Test
public void testGetFields() {
final List<String> fields = compositeIndex.getFields();
Assert.assertEquals(fields.size(), 2);
Assert.assertEquals(fields.get(0), "fOne");
Assert.assertEquals(fields.get(1), "fTwo");
}
@Test
public void testCreateValueSuccessful() {
final Object result = compositeIndex.createValue(Arrays.asList("12", "test"));
Assert.assertEquals(result, new OCompositeKey(Arrays.asList(12, "test")));
}
@Test
public void testCreateMapValueSuccessful() {
final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
compositeIndexDefinition.addIndex(new OPropertyMapIndexDefinition("testCollectionClass", "fTwo", OType.STRING,
OPropertyMapIndexDefinition.INDEX_BY.KEY));
final Map<String, String> stringMap = new HashMap<String, String>();
stringMap.put("key1", "val1");
stringMap.put("key2", "val2");
final Object result = compositeIndexDefinition.createValue(12, stringMap);
final Collection<OCompositeKey> collectionResult = (Collection<OCompositeKey>) result;
Assert.assertEquals(collectionResult.size(), 2);
Assert.assertTrue(collectionResult.contains(new OCompositeKey(12, "key1")));
Assert.assertTrue(collectionResult.contains(new OCompositeKey(12, "key2")));
}
@Test
public void testCreateCollectionValueSuccessfulOne() {
final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.INTEGER));
final Object result = compositeIndexDefinition.createValue(12, Arrays.asList(1, 2));
final ArrayList<OCompositeKey> expectedResult = new ArrayList<OCompositeKey>();
expectedResult.add(new OCompositeKey(12, 1));
expectedResult.add(new OCompositeKey(12, 2));
Assert.assertEquals(result, expectedResult);
}
@Test
public void testCreateCollectionValueSuccessfulTwo() {
final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.INTEGER));
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
final Object result = compositeIndexDefinition.createValue(Arrays.asList(Arrays.asList(1, 2), 12));
final ArrayList<OCompositeKey> expectedResult = new ArrayList<OCompositeKey>();
expectedResult.add(new OCompositeKey(1, 12));
expectedResult.add(new OCompositeKey(2, 12));
Assert.assertEquals(result, expectedResult);
}
@Test
public void testCreateCollectionValueSuccessfulThree() {
final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.INTEGER));
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fThree", OType.STRING));
final Object result = compositeIndexDefinition.createValue(12, Arrays.asList(1, 2), "test");
final ArrayList<OCompositeKey> expectedResult = new ArrayList<OCompositeKey>();
expectedResult.add(new OCompositeKey(12, 1, "test"));
expectedResult.add(new OCompositeKey(12, 2, "test"));
Assert.assertEquals(result, expectedResult);
}
@Test(expectedExceptions = OIndexException.class)
public void testCreateCollectionValueTwoCollections() {
final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.INTEGER));
compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
compositeIndexDefinition.createValue(Arrays.asList(1, 2), Arrays.asList(12));
}
@Test(expectedExceptions = NumberFormatException.class)
public void testCreateValueWrongParam() {
compositeIndex.createValue(Arrays.asList("1t2", "test"));
}
@Test
public void testCreateValueSuccessfulArrayParams() {
final Object result = compositeIndex.createValue("12", "test");
Assert.assertEquals(result, new OCompositeKey(Arrays.asList(12, "test")));
}
@Test(expectedExceptions = NumberFormatException.class)
public void testCreateValueWrongParamArrayParams() {
compositeIndex.createValue("1t2", "test");
}
@Test
public void testCreateValueDefinitionsMoreThanParams() {
compositeIndex.addIndex(new OPropertyIndexDefinition("testClass", "fThree", OType.STRING));
final Object result = compositeIndex.createValue("12", "test");
Assert.assertEquals(result, new OCompositeKey(Arrays.asList(12, "test")));
}
@Test
public void testCreateValueIndexItemWithTwoParams() {
final OCompositeIndexDefinition anotherCompositeIndex = new OCompositeIndexDefinition("testClass");
anotherCompositeIndex.addIndex(new OPropertyIndexDefinition("testClass", "f11", OType.STRING));
anotherCompositeIndex.addIndex(new OPropertyIndexDefinition("testClass", "f22", OType.STRING));
compositeIndex.addIndex(anotherCompositeIndex);
final Object result = compositeIndex.createValue("12", "test", "tset");
Assert.assertEquals(result, new OCompositeKey(Arrays.asList(12, "test", "tset")));
}
@Test
public void testDocumentToIndexSuccessful() {
final ODocument document = new ODocument();
document.field("fOne", 12);
document.field("fTwo", "test");
final Object result = compositeIndex.getDocumentValueToIndex(document);
Assert.assertEquals(result, new OCompositeKey(Arrays.asList(12, "test")));
}
@Test
public void testDocumentToIndexMapValueSuccessful() {
final ODocument document = new ODocument();
final Map<String, String> stringMap = new HashMap<String, String>();
stringMap.put("key1", "val1");
stringMap.put("key2", "val2");
document.field("fOne", 12);
document.field("fTwo", stringMap);
final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
compositeIndexDefinition.addIndex(new OPropertyMapIndexDefinition("testCollectionClass", "fTwo", OType.STRING,
OPropertyMapIndexDefinition.INDEX_BY.KEY));
final Object result = compositeIndexDefinition.getDocumentValueToIndex(document);
final Collection<OCompositeKey> collectionResult = (Collection<OCompositeKey>) result;
Assert.assertEquals(collectionResult.size(), 2);
Assert.assertTrue(collectionResult.contains(new OCompositeKey(12, "key1")));
Assert.assertTrue(collectionResult.contains(new OCompositeKey(12, "key2")));
}
@Test
public void testDocumentToIndexCollectionValueSuccessfulOne() {
final ODocument document = new ODocument();
document.field("fOne", 12);
document.field("fTwo", Arrays.asList(1, 2));
final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.INTEGER));
final Object result = compositeIndexDefinition.getDocumentValueToIndex(document);
final ArrayList<OCompositeKey> expectedResult = new ArrayList<OCompositeKey>();
expectedResult.add(new OCompositeKey(12, 1));
expectedResult.add(new OCompositeKey(12, 2));
Assert.assertEquals(result, expectedResult);
}
@Test
public void testDocumentToIndexCollectionValueSuccessfulTwo() {
final ODocument document = new ODocument();
document.field("fOne", 12);
document.field("fTwo", Arrays.asList(1, 2));
final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.INTEGER));
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
final Object result = compositeIndexDefinition.getDocumentValueToIndex(document);
final ArrayList<OCompositeKey> expectedResult = new ArrayList<OCompositeKey>();
expectedResult.add(new OCompositeKey(1, 12));
expectedResult.add(new OCompositeKey(2, 12));
Assert.assertEquals(result, expectedResult);
}
@Test
public void testDocumentToIndexCollectionValueSuccessfulThree() {
final ODocument document = new ODocument();
document.field("fOne", 12);
document.field("fTwo", Arrays.asList(1, 2));
document.field("fThree", "test");
final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.INTEGER));
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fThree", OType.STRING));
final Object result = compositeIndexDefinition.getDocumentValueToIndex(document);
final ArrayList<OCompositeKey> expectedResult = new ArrayList<OCompositeKey>();
expectedResult.add(new OCompositeKey(12, 1, "test"));
expectedResult.add(new OCompositeKey(12, 2, "test"));
Assert.assertEquals(result, expectedResult);
}
@Test(expectedExceptions = OException.class)
public void testDocumentToIndexCollectionValueTwoCollections() {
final ODocument document = new ODocument();
document.field("fOne", Arrays.asList(12));
document.field("fTwo", Arrays.asList(1, 2));
final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.INTEGER));
compositeIndexDefinition.getDocumentValueToIndex(document);
}
@Test(expectedExceptions = NumberFormatException.class)
public void testDocumentToIndexWrongField() {
final ODocument document = new ODocument();
document.field("fOne", "1t2");
document.field("fTwo", "test");
compositeIndex.getDocumentValueToIndex(document);
}
@Test
public void testGetParamCount() {
final int result = compositeIndex.getParamCount();
Assert.assertEquals(result, 2);
}
@Test
public void testGetTypes() {
final OType[] result = compositeIndex.getTypes();
Assert.assertEquals(result.length, 2);
Assert.assertEquals(result[0], OType.INTEGER);
Assert.assertEquals(result[1], OType.STRING);
}
@Test
public void testEmptyIndexReload() {
final ODatabaseDocumentTx database = new ODatabaseDocumentTx("memory:compositetestone");
database.create();
final OCompositeIndexDefinition emptyCompositeIndex = new OCompositeIndexDefinition("testClass");
emptyCompositeIndex.addIndex(new OPropertyIndexDefinition("testClass", "fOne", OType.INTEGER));
emptyCompositeIndex.addIndex(new OPropertyIndexDefinition("testClass", "fTwo", OType.STRING));
final ODocument docToStore = emptyCompositeIndex.toStream();
database.save(docToStore);
final ODocument docToLoad = database.load(docToStore.getIdentity());
final OCompositeIndexDefinition result = new OCompositeIndexDefinition();
result.fromStream(docToLoad);
database.drop();
Assert.assertEquals(result, emptyCompositeIndex);
}
@Test
public void testIndexReload() {
final ODocument docToStore = compositeIndex.toStream();
final OCompositeIndexDefinition result = new OCompositeIndexDefinition();
result.fromStream(docToStore);
Assert.assertEquals(result, compositeIndex);
}
@Test
public void testClassOnlyConstructor() {
final ODatabaseDocumentTx database = new ODatabaseDocumentTx("memory:compositetesttwo");
database.create();
final OCompositeIndexDefinition emptyCompositeIndex = new OCompositeIndexDefinition("testClass", Arrays.asList(
new OPropertyIndexDefinition("testClass", "fOne", OType.INTEGER), new OPropertyIndexDefinition("testClass", "fTwo",
OType.STRING)));
final OCompositeIndexDefinition emptyCompositeIndexTwo = new OCompositeIndexDefinition("testClass");
emptyCompositeIndexTwo.addIndex(new OPropertyIndexDefinition("testClass", "fOne", OType.INTEGER));
emptyCompositeIndexTwo.addIndex(new OPropertyIndexDefinition("testClass", "fTwo", OType.STRING));
Assert.assertEquals(emptyCompositeIndex, emptyCompositeIndexTwo);
final ODocument docToStore = emptyCompositeIndex.toStream();
database.save(docToStore);
final ODocument docToLoad = database.load(docToStore.getIdentity());
final OCompositeIndexDefinition result = new OCompositeIndexDefinition();
result.fromStream(docToLoad);
database.drop();
Assert.assertEquals(result, emptyCompositeIndexTwo);
}
public void testProcessChangeListEventsOne() {
final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition();
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.STRING));
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fThree", OType.INTEGER));
final ODocument doc = new ODocument();
doc.unsetDirty();
Assert.assertFalse(doc.isDirty());
final OTrackedList<String> trackedList = new OTrackedList<String>(doc);
final List<OMultiValueChangeEvent<Integer, String>> firedEvents = new ArrayList<OMultiValueChangeEvent<Integer, String>>();
trackedList.addChangeListener(new OMultiValueChangeListener<Integer, String>() {
public void onAfterRecordChanged(final OMultiValueChangeEvent<Integer, String> event) {
firedEvents.add(event);
}
});
trackedList.add("l1");
trackedList.add("l2");
trackedList.add("l3");
trackedList.remove("l2");
Map<OCompositeKey, Integer> keysToAdd = new HashMap<OCompositeKey, Integer>();
Map<OCompositeKey, Integer> keysToRemove = new HashMap<OCompositeKey, Integer>();
for (OMultiValueChangeEvent<Integer, String> multiValueChangeEvent : firedEvents)
compositeIndexDefinition.processChangeEvent(multiValueChangeEvent, keysToAdd, keysToRemove, 2, 3);
Assert.assertEquals(keysToRemove.size(), 0);
Assert.assertEquals(keysToAdd.size(), 2);
Assert.assertTrue(keysToAdd.containsKey(new OCompositeKey(2, "l1", 3)));
Assert.assertTrue(keysToAdd.containsKey(new OCompositeKey(2, "l3", 3)));
}
public void testProcessChangeListEventsTwo() {
final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition();
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.STRING));
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fThree", OType.INTEGER));
final ODocument doc = new ODocument();
doc.unsetDirty();
Assert.assertFalse(doc.isDirty());
final OTrackedList<String> trackedList = new OTrackedList<String>(doc);
final List<OMultiValueChangeEvent<Integer, String>> firedEvents = new ArrayList<OMultiValueChangeEvent<Integer, String>>();
trackedList.add("l1");
trackedList.add("l2");
trackedList.add("l3");
trackedList.remove("l2");
trackedList.addChangeListener(new OMultiValueChangeListener<Integer, String>() {
public void onAfterRecordChanged(final OMultiValueChangeEvent<Integer, String> event) {
firedEvents.add(event);
}
});
trackedList.add("l4");
trackedList.remove("l1");
Map<OCompositeKey, Integer> keysToAdd = new HashMap<OCompositeKey, Integer>();
Map<OCompositeKey, Integer> keysToRemove = new HashMap<OCompositeKey, Integer>();
for (OMultiValueChangeEvent<Integer, String> multiValueChangeEvent : firedEvents)
compositeIndexDefinition.processChangeEvent(multiValueChangeEvent, keysToAdd, keysToRemove, 2, 3);
Assert.assertEquals(keysToRemove.size(), 1);
Assert.assertEquals(keysToAdd.size(), 1);
Assert.assertTrue(keysToAdd.containsKey(new OCompositeKey(2, "l4", 3)));
Assert.assertTrue(keysToRemove.containsKey(new OCompositeKey(2, "l1", 3)));
}
public void testProcessChangeSetEventsOne() {
final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition();
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.STRING));
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fThree", OType.INTEGER));
final ODocument doc = new ODocument();
doc.unsetDirty();
Assert.assertFalse(doc.isDirty());
final OTrackedSet<String> trackedSet = new OTrackedSet<String>(doc);
final List<OMultiValueChangeEvent<String, String>> firedEvents = new ArrayList<OMultiValueChangeEvent<String, String>>();
trackedSet.addChangeListener(new OMultiValueChangeListener<String, String>() {
public void onAfterRecordChanged(final OMultiValueChangeEvent<String, String> event) {
firedEvents.add(event);
}
});
trackedSet.add("l1");
trackedSet.add("l2");
trackedSet.add("l3");
trackedSet.remove("l2");
Map<OCompositeKey, Integer> keysToAdd = new HashMap<OCompositeKey, Integer>();
Map<OCompositeKey, Integer> keysToRemove = new HashMap<OCompositeKey, Integer>();
for (OMultiValueChangeEvent<String, String> multiValueChangeEvent : firedEvents)
compositeIndexDefinition.processChangeEvent(multiValueChangeEvent, keysToAdd, keysToRemove, 2, 3);
Assert.assertEquals(keysToRemove.size(), 0);
Assert.assertEquals(keysToAdd.size(), 2);
Assert.assertTrue(keysToAdd.containsKey(new OCompositeKey(2, "l1", 3)));
Assert.assertTrue(keysToAdd.containsKey(new OCompositeKey(2, "l3", 3)));
}
public void testProcessChangeSetEventsTwo() {
final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition();
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.STRING));
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fThree", OType.INTEGER));
final ODocument doc = new ODocument();
doc.unsetDirty();
Assert.assertFalse(doc.isDirty());
final OTrackedSet<String> trackedMap = new OTrackedSet<String>(doc);
final List<OMultiValueChangeEvent<String, String>> firedEvents = new ArrayList<OMultiValueChangeEvent<String, String>>();
trackedMap.add("l1");
trackedMap.add("l2");
trackedMap.add("l3");
trackedMap.remove("l2");
trackedMap.addChangeListener(new OMultiValueChangeListener<String, String>() {
public void onAfterRecordChanged(final OMultiValueChangeEvent<String, String> event) {
firedEvents.add(event);
}
});
trackedMap.add("l4");
trackedMap.remove("l1");
Map<OCompositeKey, Integer> keysToAdd = new HashMap<OCompositeKey, Integer>();
Map<OCompositeKey, Integer> keysToRemove = new HashMap<OCompositeKey, Integer>();
for (OMultiValueChangeEvent<String, String> multiValueChangeEvent : firedEvents)
compositeIndexDefinition.processChangeEvent(multiValueChangeEvent, keysToAdd, keysToRemove, 2, 3);
Assert.assertEquals(keysToRemove.size(), 1);
Assert.assertEquals(keysToAdd.size(), 1);
Assert.assertTrue(keysToAdd.containsKey(new OCompositeKey(2, "l4", 3)));
Assert.assertTrue(keysToRemove.containsKey(new OCompositeKey(2, "l1", 3)));
}
public void testProcessChangeKeyMapEventsOne() {
final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition();
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
compositeIndexDefinition.addIndex(new OPropertyMapIndexDefinition("testCollectionClass", "fTwo", OType.STRING,
OPropertyMapIndexDefinition.INDEX_BY.KEY));
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fThree", OType.INTEGER));
final ODocument doc = new ODocument();
doc.unsetDirty();
Assert.assertFalse(doc.isDirty());
final OTrackedMap<String> trackedMap = new OTrackedMap<String>(doc);
final List<OMultiValueChangeEvent<Object, String>> firedEvents = new ArrayList<OMultiValueChangeEvent<Object, String>>();
trackedMap.addChangeListener(new OMultiValueChangeListener<Object, String>() {
public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) {
firedEvents.add(event);
}
});
trackedMap.put("k1", "v1");
trackedMap.put("k2", "v2");
trackedMap.put("k3", "v3");
trackedMap.remove("k2");
Map<OCompositeKey, Integer> keysToAdd = new HashMap<OCompositeKey, Integer>();
Map<OCompositeKey, Integer> keysToRemove = new HashMap<OCompositeKey, Integer>();
for (OMultiValueChangeEvent<Object, String> multiValueChangeEvent : firedEvents)
compositeIndexDefinition.processChangeEvent(multiValueChangeEvent, keysToAdd, keysToRemove, 2, 3);
Assert.assertEquals(keysToRemove.size(), 0);
Assert.assertEquals(keysToAdd.size(), 2);
Assert.assertTrue(keysToAdd.containsKey(new OCompositeKey(2, "k1", 3)));
Assert.assertTrue(keysToAdd.containsKey(new OCompositeKey(2, "k3", 3)));
}
public void testProcessChangeKeyMapEventsTwo() {
final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition();
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
compositeIndexDefinition.addIndex(new OPropertyMapIndexDefinition("testCollectionClass", "fTwo", OType.STRING,
OPropertyMapIndexDefinition.INDEX_BY.KEY));
compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fThree", OType.INTEGER));
final ODocument doc = new ODocument();
doc.unsetDirty();
Assert.assertFalse(doc.isDirty());
final OTrackedMap<String> trackedMap = new OTrackedMap<String>(doc);
trackedMap.put("k1", "v1");
trackedMap.put("k2", "v2");
trackedMap.put("k3", "v3");
trackedMap.remove("k2");
final List<OMultiValueChangeEvent<Object, String>> firedEvents = new ArrayList<OMultiValueChangeEvent<Object, String>>();
trackedMap.addChangeListener(new OMultiValueChangeListener<Object, String>() {
public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) {
firedEvents.add(event);
}
});
trackedMap.put("k4", "v4");
trackedMap.remove("k1");
Map<OCompositeKey, Integer> keysToAdd = new HashMap<OCompositeKey, Integer>();
Map<OCompositeKey, Integer> keysToRemove = new HashMap<OCompositeKey, Integer>();
for (OMultiValueChangeEvent<Object, String> multiValueChangeEvent : firedEvents)
compositeIndexDefinition.processChangeEvent(multiValueChangeEvent, keysToAdd, keysToRemove, 2, 3);
Assert.assertEquals(keysToRemove.size(), 1);
Assert.assertEquals(keysToAdd.size(), 1);
Assert.assertTrue(keysToAdd.containsKey(new OCompositeKey(2, "k4", 3)));
Assert.assertTrue(keysToRemove.containsKey(new OCompositeKey(2, "k1", 3)));
}
@Test
public void testClassName() {
Assert.assertEquals("testClass", compositeIndex.getClassName());
}
} | 0true
| core_src_test_java_com_orientechnologies_orient_core_index_OCompositeIndexDefinitionTest.java |
190 | public class RequestDTOImpl implements RequestDTO, Serializable {
private static final long serialVersionUID = 1L;
@AdminPresentation(friendlyName = "RequestDTOImpl_Request_URI")
private String requestURI;
@AdminPresentation(friendlyName = "RequestDTOImpl_Full_Url")
private String fullUrlWithQueryString;
@AdminPresentation(friendlyName = "RequestDTOImpl_Is_Secure")
private Boolean secure;
public RequestDTOImpl(HttpServletRequest request) {
requestURI = request.getRequestURI();
fullUrlWithQueryString = request.getRequestURL().toString();
secure = ("HTTPS".equalsIgnoreCase(request.getScheme()) || request.isSecure());
}
public RequestDTOImpl(WebRequest request) {
// Page level targeting does not work for WebRequest.
secure = request.isSecure();
}
/**
* @return returns the request not including the protocol, domain, or query string
*/
public String getRequestURI() {
return requestURI;
}
/**
* @return Returns the URL and parameters.
*/
public String getFullUrLWithQueryString() {
return fullUrlWithQueryString;
}
/**
* @return true if this request came in through HTTPS
*/
public Boolean isSecure() {
return secure;
}
public String getFullUrlWithQueryString() {
return fullUrlWithQueryString;
}
public void setFullUrlWithQueryString(String fullUrlWithQueryString) {
this.fullUrlWithQueryString = fullUrlWithQueryString;
}
public Boolean getSecure() {
return secure;
}
public void setSecure(Boolean secure) {
this.secure = secure;
}
public void setRequestURI(String requestURI) {
this.requestURI = requestURI;
}
} | 1no label
| common_src_main_java_org_broadleafcommerce_common_RequestDTOImpl.java |
3,084 | static class Flush {
public static enum Type {
/**
* A flush that causes a new writer to be created.
*/
NEW_WRITER,
/**
* A flush that just commits the writer, without cleaning the translog.
*/
COMMIT,
/**
* A flush that does a commit, as well as clears the translog.
*/
COMMIT_TRANSLOG
}
private Type type = Type.COMMIT_TRANSLOG;
private boolean force = false;
/**
* Should the flush operation wait if there is an ongoing flush operation.
*/
private boolean waitIfOngoing = false;
public Type type() {
return this.type;
}
/**
* Should a "full" flush be issued, basically cleaning as much memory as possible.
*/
public Flush type(Type type) {
this.type = type;
return this;
}
public boolean force() {
return this.force;
}
public Flush force(boolean force) {
this.force = force;
return this;
}
public boolean waitIfOngoing() {
return this.waitIfOngoing;
}
public Flush waitIfOngoing(boolean waitIfOngoing) {
this.waitIfOngoing = waitIfOngoing;
return this;
}
@Override
public String toString() {
return "type[" + type + "], force[" + force + "]";
}
} | 0true
| src_main_java_org_elasticsearch_index_engine_Engine.java |
650 | public class WorkflowTest extends BaseTest {
static {
getModuleContexts().add("bl-applicationContext-test-module.xml");
}
protected List<Activity<ProcessContext>> activities;
@BeforeTest
public void setup() {
activities = ((SequenceProcessor)getContext().getBean("blCheckoutWorkflow")).getActivities();
}
@Test
public void testMergedOrderedActivities() {
Assert.assertEquals(activities.get(0).getClass(), TestPassThroughActivity.class);
Assert.assertEquals(activities.get(0).getOrder(), 100);
Assert.assertEquals(activities.get(4).getClass(), TestPassThroughActivity.class);
Assert.assertEquals(activities.get(4).getOrder(), 3000);
}
@Test
public void testFrameworkOrderingChanged() {
TotalActivity totalActivity = (TotalActivity)getContext().getBean("blTotalActivity");
Assert.assertEquals(totalActivity.getOrder(), 8080);
}
@Test
public void testDetectedModuleActivity() {
List<ModuleActivity> moduleActivities = ((SequenceProcessor)getContext().getBean("blCheckoutWorkflow")).getModuleActivities();
Assert.assertEquals(moduleActivities.size(), 1);
Assert.assertEquals(moduleActivities.get(0).getModuleName(), "integration");
}
@Test
public void testNonExplicitOrdering() {
Assert.assertEquals(activities.get(activities.size() - 1).getClass(), TestExampleModuleActivity.class);
Assert.assertEquals(activities.get(activities.size() - 1).getOrder(), Ordered.LOWEST_PRECEDENCE);
}
/**
* Tests that a merged activity can have the same order as a framework activity and come after it
*/
@Test
public void testSameOrderingConfiguredActivity() {
Assert.assertEquals(activities.get(7).getClass(), TestRollbackActivity.class);
}
@Test
public void testInBetweenActivity() {
Assert.assertEquals(activities.get(4).getClass(), TestPassThroughActivity.class);
}
} | 0true
| integration_src_test_java_org_broadleafcommerce_common_workflow_WorkflowTest.java |
350 | @RunWith(HazelcastSerialClassRunner.class)
@Category(NightlyTest.class)
public class MapMemoryUsageStressTest extends HazelcastTestSupport {
private HazelcastInstance client;
@Before
public void launchHazelcastServer() {
Hazelcast.newHazelcastInstance();
ClientConfig config = new ClientConfig();
config.setGroupConfig(new GroupConfig("dev", "dev-pass"));
config.getNetworkConfig().addAddress("127.0.0.1");
client = HazelcastClient.newHazelcastClient(config);
}
@After
public void shutdownHazelcastServer() {
Hazelcast.shutdownAll();
}
@Test
public void voidCacher() throws Exception {
final AtomicInteger counter = new AtomicInteger(200000);
final AtomicInteger errors = new AtomicInteger();
Thread[] threads = new Thread[8];
for (int k = 0; k < threads.length; k++) {
StressThread stressThread = new StressThread(counter, errors);
threads[k] = stressThread;
stressThread.start();
}
assertJoinable(TimeUnit.MINUTES.toSeconds(10), threads);
assertEquals(0, errors.get());
assertTrue(counter.get() <= 0);
}
private class StressThread extends Thread {
private final AtomicInteger counter;
private final AtomicInteger errors;
public StressThread(AtomicInteger counter, AtomicInteger errors) {
this.counter = counter;
this.errors = errors;
}
public void run() {
try {
for(;;){
int index = counter.decrementAndGet();
if(index<=0){
return;
}
IMap<Object, Object> map = client.getMap("juka" + index);
map.set("aaaa", "bbbb");
map.clear();
map.destroy();
if(index % 1000 == 0){
System.out.println("At: "+index);
}
}
} catch (Throwable t) {
errors.incrementAndGet();
t.printStackTrace();
}
}
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_map_MapMemoryUsageStressTest.java |
66 | @Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_FLD_GROUP")
@Cache(usage= CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blCMSElements")
public class FieldGroupImpl implements FieldGroup {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "FieldGroupId")
@GenericGenerator(
name="FieldGroupId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="FieldGroupImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.cms.field.domain.FieldGroupImpl")
}
)
@Column(name = "FLD_GROUP_ID")
protected Long id;
@Column (name = "NAME")
protected String name;
@Column (name = "INIT_COLLAPSED_FLAG")
protected Boolean initCollapsedFlag = false;
@OneToMany(mappedBy = "fieldGroup", targetEntity = FieldDefinitionImpl.class, cascade = {CascadeType.ALL})
@Cascade(value={org.hibernate.annotations.CascadeType.ALL, org.hibernate.annotations.CascadeType.DELETE_ORPHAN})
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blCMSElements")
@OrderBy("fieldOrder")
@BatchSize(size = 20)
protected List<FieldDefinition> fieldDefinitions;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getName() {
return name;
}
@Override
public void setName(String name) {
this.name = name;
}
@Override
public Boolean getInitCollapsedFlag() {
return initCollapsedFlag;
}
@Override
public void setInitCollapsedFlag(Boolean initCollapsedFlag) {
this.initCollapsedFlag = initCollapsedFlag;
}
@Override
public List<FieldDefinition> getFieldDefinitions() {
return fieldDefinitions;
}
@Override
public void setFieldDefinitions(List<FieldDefinition> fieldDefinitions) {
this.fieldDefinitions = fieldDefinitions;
}
} | 0true
| admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_field_domain_FieldGroupImpl.java |
1,501 | NodeSettingsService service = new NodeSettingsService(settingsBuilder().build()) {
@Override
public void addListener(Listener listener) {
assertNull("addListener was called twice while only one time was expected", listeners[0]);
listeners[0] = listener;
}
}; | 0true
| src_test_java_org_elasticsearch_cluster_routing_allocation_BalanceConfigurationTests.java |
320 | {
@Override
public void returnXAResource( String rmName, XAResource rmXares )
{
}
}; | 0true
| community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_JOTMTransactionManager.java |
2,765 | @ChannelHandler.Sharable
public class HttpRequestHandler extends SimpleChannelUpstreamHandler {
private final NettyHttpServerTransport serverTransport;
public HttpRequestHandler(NettyHttpServerTransport serverTransport) {
this.serverTransport = serverTransport;
}
@Override
public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) throws Exception {
HttpRequest request = (HttpRequest) e.getMessage();
// the netty HTTP handling always copy over the buffer to its own buffer, either in NioWorker internally
// when reading, or using a cumalation buffer
serverTransport.dispatchRequest(new NettyHttpRequest(request, e.getChannel()), new NettyHttpChannel(serverTransport, e.getChannel(), request));
super.messageReceived(ctx, e);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) throws Exception {
serverTransport.exceptionCaught(ctx, e);
}
} | 1no label
| src_main_java_org_elasticsearch_http_netty_HttpRequestHandler.java |
78 | @SuppressWarnings("serial")
static final class MapReduceMappingsToDoubleTask<K,V>
extends BulkTask<K,V,Double> {
final ObjectByObjectToDouble<? super K, ? super V> transformer;
final DoubleByDoubleToDouble reducer;
final double basis;
double result;
MapReduceMappingsToDoubleTask<K,V> rights, nextRight;
MapReduceMappingsToDoubleTask
(BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
MapReduceMappingsToDoubleTask<K,V> nextRight,
ObjectByObjectToDouble<? super K, ? super V> transformer,
double basis,
DoubleByDoubleToDouble reducer) {
super(p, b, i, f, t); this.nextRight = nextRight;
this.transformer = transformer;
this.basis = basis; this.reducer = reducer;
}
public final Double getRawResult() { return result; }
public final void compute() {
final ObjectByObjectToDouble<? super K, ? super V> transformer;
final DoubleByDoubleToDouble reducer;
if ((transformer = this.transformer) != null &&
(reducer = this.reducer) != null) {
double r = this.basis;
for (int i = baseIndex, f, h; batch > 0 &&
(h = ((f = baseLimit) + i) >>> 1) > i;) {
addToPendingCount(1);
(rights = new MapReduceMappingsToDoubleTask<K,V>
(this, batch >>>= 1, baseLimit = h, f, tab,
rights, transformer, r, reducer)).fork();
}
for (Node<K,V> p; (p = advance()) != null; )
r = reducer.apply(r, transformer.apply(p.key, p.val));
result = r;
CountedCompleter<?> c;
for (c = firstComplete(); c != null; c = c.nextComplete()) {
@SuppressWarnings("unchecked") MapReduceMappingsToDoubleTask<K,V>
t = (MapReduceMappingsToDoubleTask<K,V>)c,
s = t.rights;
while (s != null) {
t.result = reducer.apply(t.result, s.result);
s = t.rights = s.nextRight;
}
}
}
}
} | 0true
| src_main_java_jsr166e_ConcurrentHashMapV8.java |
1,868 | injector.callInContext(new ContextualCallable<Void>() {
public Void call(InternalContext context) throws ErrorsException {
injectMembers(instance, errors, context);
return null;
}
}); | 0true
| src_main_java_org_elasticsearch_common_inject_MembersInjectorImpl.java |
1,987 | assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
assertEquals("value" + (size1 - 1), testMapStore.getStore().get("key"));
}
}); | 0true
| hazelcast_src_test_java_com_hazelcast_map_mapstore_MapStoreTest.java |
2,040 | public class ClearBackupOperation extends AbstractNamedOperation implements BackupOperation, DataSerializable {
MapService mapService;
RecordStore recordStore;
public ClearBackupOperation() {
}
public ClearBackupOperation(String name) {
super(name);
}
@Override
public void beforeRun() throws Exception {
mapService = getService();
recordStore = mapService.getRecordStore(getPartitionId(), name);
}
public void run() {
recordStore.clear();
}
@Override
public String toString() {
return "ClearBackupOperation{" +
'}';
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_map_operation_ClearBackupOperation.java |
1,761 | @Component("blPersistenceManager")
@Scope("prototype")
public class PersistenceManagerImpl implements InspectHelper, PersistenceManager, ApplicationContextAware {
private static final Log LOG = LogFactory.getLog(PersistenceManagerImpl.class);
@Resource(name="blDynamicEntityDao")
protected DynamicEntityDao dynamicEntityDao;
@Resource(name="blCustomPersistenceHandlers")
protected List<CustomPersistenceHandler> customPersistenceHandlers = new ArrayList<CustomPersistenceHandler>();
@Resource(name="blCustomPersistenceHandlerFilters")
protected List<CustomPersistenceHandlerFilter> customPersistenceHandlerFilters = new ArrayList<CustomPersistenceHandlerFilter>();
@Resource(name="blTargetEntityManagers")
protected Map<String, String> targetEntityManagers = new HashMap<String, String>();
@Resource(name="blAdminSecurityRemoteService")
protected SecurityVerifier adminRemoteSecurityService;
@Resource(name="blPersistenceModules")
protected PersistenceModule[] modules;
protected TargetModeType targetMode;
protected ApplicationContext applicationContext;
@PostConstruct
public void postConstruct() {
for (PersistenceModule module : modules) {
module.setPersistenceManager(this);
}
}
// public void close() throws Exception {
// //do nothing
// }
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
@Override
public Class<?>[] getAllPolymorphicEntitiesFromCeiling(Class<?> ceilingClass) {
return dynamicEntityDao.getAllPolymorphicEntitiesFromCeiling(ceilingClass);
}
@Override
public Class<?>[] getUpDownInheritance(String testClassname) throws ClassNotFoundException {
return getUpDownInheritance(Class.forName(testClassname));
}
@Override
public Class<?>[] getUpDownInheritance(Class<?> testClass) {
Class<?>[] pEntities = dynamicEntityDao.getAllPolymorphicEntitiesFromCeiling(testClass);
Class<?> topConcreteClass = pEntities[pEntities.length - 1];
List<Class<?>> temp = new ArrayList<Class<?>>(pEntities.length);
temp.addAll(Arrays.asList(pEntities));
Collections.reverse(temp);
boolean eof = false;
while (!eof) {
Class<?> superClass = topConcreteClass.getSuperclass();
PersistentClass persistentClass = dynamicEntityDao.getPersistentClass(superClass.getName());
if (persistentClass == null) {
eof = true;
} else {
temp.add(0, superClass);
topConcreteClass = superClass;
}
}
return temp.toArray(new Class<?>[temp.size()]);
}
@Override
public Class<?>[] getPolymorphicEntities(String ceilingEntityFullyQualifiedClassname) throws ClassNotFoundException {
Class<?>[] entities = getAllPolymorphicEntitiesFromCeiling(Class.forName(ceilingEntityFullyQualifiedClassname));
return entities;
}
@Override
public Map<String, FieldMetadata> getSimpleMergedProperties(String entityName, PersistencePerspective persistencePerspective) {
return dynamicEntityDao.getSimpleMergedProperties(entityName, persistencePerspective);
}
@Override
public ClassMetadata getMergedClassMetadata(final Class<?>[] entities, Map<MergedPropertyType, Map<String, FieldMetadata>> mergedProperties) {
ClassMetadata classMetadata = new ClassMetadata();
classMetadata.setPolymorphicEntities(dynamicEntityDao.getClassTree(entities));
List<Property> propertiesList = new ArrayList<Property>();
for (PersistenceModule module : modules) {
module.extractProperties(entities, mergedProperties, propertiesList);
}
/*
* Insert inherited fields whose order has been specified
*/
for (int i = 0; i < entities.length - 1; i++) {
for (Property myProperty : propertiesList) {
if (myProperty.getMetadata().getInheritedFromType().equals(entities[i].getName()) && myProperty.getMetadata().getOrder() != null) {
for (Property property : propertiesList) {
if (!property.getMetadata().getInheritedFromType().equals(entities[i].getName()) && property.getMetadata().getOrder() != null && property.getMetadata().getOrder() >= myProperty.getMetadata().getOrder()) {
property.getMetadata().setOrder(property.getMetadata().getOrder() + 1);
}
}
}
}
}
Property[] properties = new Property[propertiesList.size()];
properties = propertiesList.toArray(properties);
Arrays.sort(properties, new Comparator<Property>() {
@Override
public int compare(Property o1, Property o2) {
Integer tabOrder1 = o1.getMetadata().getTabOrder() == null ? 99999 : o1.getMetadata().getTabOrder();
Integer tabOrder2 = o2.getMetadata().getTabOrder() == null ? 99999 : o2.getMetadata().getTabOrder();
Integer groupOrder1 = null;
Integer groupOrder2 = null;
if (o1.getMetadata() instanceof BasicFieldMetadata) {
BasicFieldMetadata b1 = (BasicFieldMetadata) o1.getMetadata();
groupOrder1 = b1.getGroupOrder();
}
groupOrder1 = groupOrder1 == null ? 99999 : groupOrder1;
if (o2.getMetadata() instanceof BasicFieldMetadata) {
BasicFieldMetadata b2 = (BasicFieldMetadata) o2.getMetadata();
groupOrder2 = b2.getGroupOrder();
}
groupOrder2 = groupOrder2 == null ? 99999 : groupOrder2;
Integer fieldOrder1 = o1.getMetadata().getOrder() == null ? 99999 : o1.getMetadata().getOrder();
Integer fieldOrder2 = o2.getMetadata().getOrder() == null ? 99999 : o2.getMetadata().getOrder();
String friendlyName1 = o1.getMetadata().getFriendlyName() == null ? "zzzz" : o1.getMetadata().getFriendlyName();
String friendlyName2 = o2.getMetadata().getFriendlyName() == null ? "zzzz" : o2.getMetadata().getFriendlyName();
String name1 = o1.getName() == null ? "zzzzz" : o1.getName();
String name2 = o2.getName() == null ? "zzzzz" : o2.getName();
return new CompareToBuilder()
.append(tabOrder1, tabOrder2)
.append(groupOrder1, groupOrder2)
.append(fieldOrder1, fieldOrder2)
.append(friendlyName1, friendlyName2)
.append(name1, name2)
.toComparison();
}
});
classMetadata.setProperties(properties);
classMetadata.setCurrencyCode(Money.defaultCurrency().getCurrencyCode());
return classMetadata;
}
@Override
public DynamicResultSet inspect(PersistencePackage persistencePackage) throws ServiceException, ClassNotFoundException {
// check to see if there is a custom handler registered
for (CustomPersistenceHandler handler : getCustomPersistenceHandlers()) {
if (handler.canHandleInspect(persistencePackage)) {
if (!handler.willHandleSecurity(persistencePackage)) {
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.INSPECT);
}
DynamicResultSet results = handler.inspect(persistencePackage, dynamicEntityDao, this);
return results;
}
}
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.INSPECT);
Class<?>[] entities = getPolymorphicEntities(persistencePackage.getCeilingEntityFullyQualifiedClassname());
Map<MergedPropertyType, Map<String, FieldMetadata>> allMergedProperties = new HashMap<MergedPropertyType, Map<String, FieldMetadata>>();
for (PersistenceModule module : modules) {
module.updateMergedProperties(persistencePackage, allMergedProperties);
}
ClassMetadata mergedMetadata = getMergedClassMetadata(entities, allMergedProperties);
DynamicResultSet results = new DynamicResultSet(mergedMetadata);
return results;
}
@Override
public DynamicResultSet fetch(PersistencePackage persistencePackage, CriteriaTransferObject cto) throws ServiceException {
//check to see if there is a custom handler registered
for (CustomPersistenceHandler handler : getCustomPersistenceHandlers()) {
if (handler.canHandleFetch(persistencePackage)) {
if (!handler.willHandleSecurity(persistencePackage)) {
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.FETCH);
}
DynamicResultSet results = handler.fetch(persistencePackage, cto, dynamicEntityDao, (RecordHelper) getCompatibleModule(OperationType.BASIC));
return postFetch(results, persistencePackage, cto);
}
}
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.FETCH);
PersistenceModule myModule = getCompatibleModule(persistencePackage.getPersistencePerspective().getOperationTypes().getFetchType());
try {
return postFetch(myModule.fetch(persistencePackage, cto), persistencePackage, cto);
} catch (ServiceException e) {
if (e.getCause() instanceof NoPossibleResultsException) {
DynamicResultSet drs = new DynamicResultSet(null, new Entity[] {}, 0);
return postFetch(drs, persistencePackage, cto);
}
throw e;
}
}
protected DynamicResultSet postFetch(DynamicResultSet resultSet, PersistencePackage persistencePackage,
CriteriaTransferObject cto)
throws ServiceException {
// Expose the start index so that we can utilize when building the UI
resultSet.setStartIndex(cto.getFirstResult());
resultSet.setPageSize(cto.getMaxResults());
return resultSet;
}
@Override
public Entity add(PersistencePackage persistencePackage) throws ServiceException {
//check to see if there is a custom handler registered
//execute the root PersistencePackage
Entity response;
checkRoot: {
//if there is a validation exception in the root check, let it bubble, as we need a valid, persisted
//entity to execute the subPackage code later
for (CustomPersistenceHandler handler : getCustomPersistenceHandlers()) {
if (handler.canHandleAdd(persistencePackage)) {
if (!handler.willHandleSecurity(persistencePackage)) {
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.ADD);
}
response = handler.add(persistencePackage, dynamicEntityDao, (RecordHelper) getCompatibleModule(OperationType.BASIC));
break checkRoot;
}
}
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.ADD);
PersistenceModule myModule = getCompatibleModule(persistencePackage.getPersistencePerspective().getOperationTypes().getAddType());
response = myModule.add(persistencePackage);
}
if (!MapUtils.isEmpty(persistencePackage.getSubPackages())) {
// Once the entity has been saved, we can utilize its id for the subsequent dynamic forms
Class<?> entityClass;
try {
entityClass = Class.forName(response.getType()[0]);
} catch (ClassNotFoundException e) {
throw new ServiceException(e);
}
Map<String, Object> idMetadata = getDynamicEntityDao().getIdMetadata(entityClass);
String idProperty = (String) idMetadata.get("name");
String idVal = response.findProperty(idProperty).getValue();
Map<String, List<String>> subPackageValidationErrors = new HashMap<String, List<String>>();
for (Map.Entry<String,PersistencePackage> subPackage : persistencePackage.getSubPackages().entrySet()) {
Entity subResponse;
try {
subPackage.getValue().setCustomCriteria(new String[]{subPackage.getValue().getCustomCriteria()[0], idVal});
//Run through any subPackages -- add up any validation errors
checkHandler: {
for (CustomPersistenceHandler handler : getCustomPersistenceHandlers()) {
if (handler.canHandleAdd(subPackage.getValue())) {
if (!handler.willHandleSecurity(subPackage.getValue())) {
adminRemoteSecurityService.securityCheck(subPackage.getValue().getCeilingEntityFullyQualifiedClassname(), EntityOperationType.ADD);
}
subResponse = handler.add(subPackage.getValue(), dynamicEntityDao, (RecordHelper) getCompatibleModule(OperationType.BASIC));
subPackage.getValue().setEntity(subResponse);
break checkHandler;
}
}
adminRemoteSecurityService.securityCheck(subPackage.getValue().getCeilingEntityFullyQualifiedClassname(), EntityOperationType.ADD);
PersistenceModule subModule = getCompatibleModule(subPackage.getValue().getPersistencePerspective().getOperationTypes().getAddType());
subResponse = subModule.add(persistencePackage);
subPackage.getValue().setEntity(subResponse);
}
} catch (ValidationException e) {
subPackage.getValue().setEntity(e.getEntity());
}
}
//Build up validation errors in all of the subpackages, even those that might not have thrown ValidationExceptions
for (Map.Entry<String, PersistencePackage> subPackage : persistencePackage.getSubPackages().entrySet()) {
for (Map.Entry<String, List<String>> error : subPackage.getValue().getEntity().getValidationErrors().entrySet()) {
subPackageValidationErrors.put(subPackage.getKey() + DynamicEntityFormInfo.FIELD_SEPARATOR + error.getKey(), error.getValue());
}
}
response.getValidationErrors().putAll(subPackageValidationErrors);
}
if (response.isValidationFailure()) {
throw new ValidationException(response, "The entity has failed validation");
}
return postAdd(response, persistencePackage);
}
protected Entity postAdd(Entity entity, PersistencePackage persistencePackage) throws ServiceException {
//do nothing
return entity;
}
@Override
public Entity update(PersistencePackage persistencePackage) throws ServiceException {
//check to see if there is a custom handler registered
//execute the root PersistencePackage
Entity response;
try {
checkRoot: {
for (CustomPersistenceHandler handler : getCustomPersistenceHandlers()) {
if (handler.canHandleUpdate(persistencePackage)) {
if (!handler.willHandleSecurity(persistencePackage)) {
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.UPDATE);
}
response = handler.update(persistencePackage, dynamicEntityDao, (RecordHelper) getCompatibleModule(OperationType.BASIC));
break checkRoot;
}
}
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.UPDATE);
PersistenceModule myModule = getCompatibleModule(persistencePackage.getPersistencePerspective().getOperationTypes().getUpdateType());
response = myModule.update(persistencePackage);
}
} catch (ValidationException e) {
response = e.getEntity();
}
Map<String, List<String>> subPackageValidationErrors = new HashMap<String, List<String>>();
for (Map.Entry<String,PersistencePackage> subPackage : persistencePackage.getSubPackages().entrySet()) {
try {
//Run through any subPackages -- add up any validation errors
checkHandler: {
for (CustomPersistenceHandler handler : getCustomPersistenceHandlers()) {
if (handler.canHandleUpdate(subPackage.getValue())) {
if (!handler.willHandleSecurity(subPackage.getValue())) {
adminRemoteSecurityService.securityCheck(subPackage.getValue().getCeilingEntityFullyQualifiedClassname(), EntityOperationType.UPDATE);
}
Entity subResponse = handler.update(subPackage.getValue(), dynamicEntityDao, (RecordHelper) getCompatibleModule(OperationType.BASIC));
subPackage.getValue().setEntity(subResponse);
break checkHandler;
}
}
adminRemoteSecurityService.securityCheck(subPackage.getValue().getCeilingEntityFullyQualifiedClassname(), EntityOperationType.UPDATE);
PersistenceModule subModule = getCompatibleModule(subPackage.getValue().getPersistencePerspective().getOperationTypes().getUpdateType());
Entity subResponse = subModule.update(persistencePackage);
subPackage.getValue().setEntity(subResponse);
}
} catch (ValidationException e) {
subPackage.getValue().setEntity(e.getEntity());
}
}
//Build up validation errors in all of the subpackages, even those that might not have thrown ValidationExceptions
for (Map.Entry<String, PersistencePackage> subPackage : persistencePackage.getSubPackages().entrySet()) {
for (Map.Entry<String, List<String>> error : subPackage.getValue().getEntity().getValidationErrors().entrySet()) {
subPackageValidationErrors.put(subPackage.getKey() + DynamicEntityFormInfo.FIELD_SEPARATOR + error.getKey(), error.getValue());
}
}
response.getValidationErrors().putAll(subPackageValidationErrors);
if (response.isValidationFailure()) {
throw new ValidationException(response, "The entity has failed validation");
}
return postUpdate(response, persistencePackage);
}
protected Entity postUpdate(Entity entity, PersistencePackage persistencePackage) throws ServiceException {
//do nothing
return entity;
}
@Override
public void remove(PersistencePackage persistencePackage) throws ServiceException {
//check to see if there is a custom handler registered
for (CustomPersistenceHandler handler : getCustomPersistenceHandlers()) {
if (handler.canHandleRemove(persistencePackage)) {
if (!handler.willHandleSecurity(persistencePackage)) {
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.REMOVE);
}
handler.remove(persistencePackage, dynamicEntityDao, (RecordHelper) getCompatibleModule(OperationType.BASIC));
return;
}
}
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.REMOVE);
PersistenceModule myModule = getCompatibleModule(persistencePackage.getPersistencePerspective().getOperationTypes().getRemoveType());
myModule.remove(persistencePackage);
}
@Override
public PersistenceModule getCompatibleModule(OperationType operationType) {
PersistenceModule myModule = null;
for (PersistenceModule module : modules) {
if (module.isCompatible(operationType)) {
myModule = module;
break;
}
}
if (myModule == null) {
LOG.error("Unable to find a compatible remote service module for the operation type: " + operationType);
throw new RuntimeException("Unable to find a compatible remote service module for the operation type: " + operationType);
}
return myModule;
}
@Override
public DynamicEntityDao getDynamicEntityDao() {
return dynamicEntityDao;
}
@Override
public void setDynamicEntityDao(DynamicEntityDao dynamicEntityDao) {
this.dynamicEntityDao = dynamicEntityDao;
}
@Override
public Map<String, String> getTargetEntityManagers() {
return targetEntityManagers;
}
@Override
public void setTargetEntityManagers(Map<String, String> targetEntityManagers) {
this.targetEntityManagers = targetEntityManagers;
}
@Override
public TargetModeType getTargetMode() {
return targetMode;
}
@Override
public void setTargetMode(TargetModeType targetMode) {
String targetManagerRef = targetEntityManagers.get(targetMode.getType());
EntityManager targetManager = (EntityManager) applicationContext.getBean(targetManagerRef);
if (targetManager == null) {
throw new RuntimeException("Unable to find a target entity manager registered with the key: " + targetMode + ". Did you add an entity manager with this key to the targetEntityManagers property?");
}
dynamicEntityDao.setStandardEntityManager(targetManager);
this.targetMode = targetMode;
}
@Override
public List<CustomPersistenceHandler> getCustomPersistenceHandlers() {
List<CustomPersistenceHandler> cloned = new ArrayList<CustomPersistenceHandler>();
cloned.addAll(customPersistenceHandlers);
if (getCustomPersistenceHandlerFilters() != null) {
for (CustomPersistenceHandlerFilter filter : getCustomPersistenceHandlerFilters()) {
Iterator<CustomPersistenceHandler> itr = cloned.iterator();
while (itr.hasNext()) {
CustomPersistenceHandler handler = itr.next();
if (!filter.shouldUseHandler(handler.getClass().getName())) {
itr.remove();
}
}
}
}
Collections.sort(cloned, new Comparator<CustomPersistenceHandler>() {
@Override
public int compare(CustomPersistenceHandler o1, CustomPersistenceHandler o2) {
return new Integer(o1.getOrder()).compareTo(new Integer(o2.getOrder()));
}
});
return cloned;
}
@Override
public void setCustomPersistenceHandlers(List<CustomPersistenceHandler> customPersistenceHandlers) {
this.customPersistenceHandlers = customPersistenceHandlers;
}
public SecurityVerifier getAdminRemoteSecurityService() {
return adminRemoteSecurityService;
}
public void setAdminRemoteSecurityService(AdminSecurityServiceRemote adminRemoteSecurityService) {
this.adminRemoteSecurityService = adminRemoteSecurityService;
}
public List<CustomPersistenceHandlerFilter> getCustomPersistenceHandlerFilters() {
return customPersistenceHandlerFilters;
}
public void setCustomPersistenceHandlerFilters(List<CustomPersistenceHandlerFilter> customPersistenceHandlerFilters) {
this.customPersistenceHandlerFilters = customPersistenceHandlerFilters;
}
public PersistenceModule[] getModules() {
return modules;
}
public void setModules(PersistenceModule[] modules) {
this.modules = modules;
}
} | 1no label
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_service_persistence_PersistenceManagerImpl.java |
1,554 | public interface AllocationCommand {
/**
* Factory to create {@link AllocationCommand}s
* @param <T> Type of {@link AllocationCommand}s created by this {@link Factory}
*/
interface Factory<T extends AllocationCommand> {
/**
* Reads an {@link AllocationCommand} of type <code>T</code> from a {@link StreamInput}
* @param in {@link StreamInput} to read the {@link AllocationCommand} from
* @return {@link AllocationCommand} read from the {@link StreamInput}
* @throws IOException if something happens during reading
*/
T readFrom(StreamInput in) throws IOException;
/**
* Writes an {@link AllocationCommand} to a {@link StreamOutput}
* @param command {@link AllocationCommand} to write
* @param out {@link StreamOutput} to write the {@link AllocationCommand} to
* @throws IOException if something happens during writing the command
*/
void writeTo(T command, StreamOutput out) throws IOException;
/**
* Reads an {@link AllocationCommand} of type <code>T</code> from a {@link XContentParser}
* @param parser {@link XContentParser} to use
* @return {@link AllocationCommand} read
* @throws IOException if something happens during reading
*/
T fromXContent(XContentParser parser) throws IOException;
/**
* Writes an {@link AllocationCommand} using an {@link XContentBuilder}
* @param command {@link AllocationCommand} to write
* @param builder {@link XContentBuilder} to use
* @param params parameters to use when writing the command
* @throws IOException if something happens during writing the command
*/
void toXContent(T command, XContentBuilder builder, ToXContent.Params params) throws IOException;
}
/**
* Get the name of the command
* @return name of the command
*/
String name();
/**
* Executes the command on a {@link RoutingAllocation} setup
* @param allocation {@link RoutingAllocation} to modify
* @throws org.elasticsearch.ElasticsearchException if something happens during reconfiguration
*/
void execute(RoutingAllocation allocation) throws ElasticsearchException;
} | 0true
| src_main_java_org_elasticsearch_cluster_routing_allocation_command_AllocationCommand.java |
1,331 | public class ODirtyPage {
public final String fileName;
public final long pageIndex;
private final OLogSequenceNumber lsn;
public ODirtyPage(String fileName, long pageIndex, OLogSequenceNumber lsn) {
this.fileName = fileName;
this.pageIndex = pageIndex;
this.lsn = lsn;
}
public String getFileName() {
return fileName;
}
public long getPageIndex() {
return pageIndex;
}
public OLogSequenceNumber getLsn() {
return lsn;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
ODirtyPage that = (ODirtyPage) o;
if (pageIndex != that.pageIndex)
return false;
if (!fileName.equals(that.fileName))
return false;
if (!lsn.equals(that.lsn))
return false;
return true;
}
@Override
public int hashCode() {
int result = fileName.hashCode();
result = 31 * result + (int) (pageIndex ^ (pageIndex >>> 32));
result = 31 * result + lsn.hashCode();
return result;
}
@Override
public String toString() {
return "ODirtyPage{" + "fileName='" + fileName + '\'' + ", pageIndex=" + pageIndex + ", lsn=" + lsn + '}';
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_storage_impl_local_paginated_wal_ODirtyPage.java |
6,303 | public static final class ElasticsearchMockDirectoryWrapper extends MockDirectoryWrapper {
private final ESLogger logger;
private final boolean failOnClose;
public ElasticsearchMockDirectoryWrapper(Random random, Directory delegate, ESLogger logger, boolean failOnClose) {
super(random, delegate);
this.logger = logger;
this.failOnClose = failOnClose;
}
@Override
public void close() throws IOException {
try {
super.close();
} catch (RuntimeException ex) {
if (failOnClose) {
throw ex;
}
// we catch the exception on close to properly close shards even if there are open files
// the test framework will call closeWithRuntimeException after the test exits to fail
// on unclosed files.
logger.debug("MockDirectoryWrapper#close() threw exception", ex);
}
}
public void closeWithRuntimeException() throws IOException {
super.close(); // force fail if open files etc. called in tear down of ElasticsearchIntegrationTest
}
} | 1no label
| src_test_java_org_elasticsearch_test_store_MockDirectoryHelper.java |
1,152 | public class PaymentInfoDataProvider {
@DataProvider(name = "basicPaymentInfo")
public static Object[][] provideBasicSalesPaymentInfo() {
PaymentInfo sop = new PaymentInfoImpl();
sop.setAmount(new Money(BigDecimal.valueOf(10.99)));
sop.setReferenceNumber("987654321");
sop.setType(PaymentInfoType.CREDIT_CARD);
return new Object[][] { { sop } };
}
} | 0true
| integration_src_test_java_org_broadleafcommerce_core_payment_PaymentInfoDataProvider.java |
650 | public class GetIndexTemplatesRequest extends MasterNodeReadOperationRequest<GetIndexTemplatesRequest> {
private String[] names;
public GetIndexTemplatesRequest() {
}
public GetIndexTemplatesRequest(String... names) {
this.names = names;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (names == null) {
validationException = addValidationError("names is null or empty", validationException);
} else {
for (String name : names) {
if (name == null || !Strings.hasText(name)) {
validationException = addValidationError("name is missing", validationException);
}
}
}
return validationException;
}
/**
* Sets the names of the index templates.
*/
public GetIndexTemplatesRequest names(String... names) {
this.names = names;
return this;
}
/**
* The names of the index templates.
*/
public String[] names() {
return this.names;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
names = in.readStringArray();
readLocal(in, Version.V_1_0_0_RC2);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArray(names);
writeLocal(out, Version.V_1_0_0_RC2);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_template_get_GetIndexTemplatesRequest.java |
275 | public class IntegrityValidatorTest
{
@Test
public void shouldValidateUniquenessIndexes() throws Exception
{
// Given
NeoStore store = mock( NeoStore.class );
IndexingService indexes = mock(IndexingService.class);
IntegrityValidator validator = new IntegrityValidator(store, indexes);
doThrow( new ConstraintVerificationFailedKernelException( null, new RuntimeException() ))
.when( indexes ).validateIndex( 2l );
UniquenessConstraintRule record = uniquenessConstraintRule( 1l, 1, 1, 2l );
// When
try
{
validator.validateSchemaRule( record );
fail("Should have thrown integrity error.");
}
catch(XAException e)
{
assertThat(e.errorCode, equalTo(XAException.XA_RBINTEGRITY));
}
}
@Test
public void deletingNodeWithRelationshipsIsNotAllowed() throws Exception
{
// Given
NeoStore store = mock( NeoStore.class );
IndexingService indexes = mock(IndexingService.class);
IntegrityValidator validator = new IntegrityValidator(store, indexes );
NodeRecord record = new NodeRecord( 1l, 1l, -1l );
record.setInUse( false );
// When
try
{
validator.validateNodeRecord( record );
fail("Should have thrown integrity error.");
}
catch(XAException e)
{
assertThat(e.errorCode, equalTo(XAException.XA_RBINTEGRITY));
}
}
@Test
public void transactionsStartedBeforeAConstraintWasCreatedAreDisallowed() throws Exception
{
// Given
NeoStore store = mock( NeoStore.class );
IndexingService indexes = mock(IndexingService.class);
when(store.getLatestConstraintIntroducingTx()).thenReturn( 10l );
IntegrityValidator validator = new IntegrityValidator( store, indexes );
// When
try
{
validator.validateTransactionStartKnowledge( 1 );
fail("Should have thrown integrity error.");
}
catch(XAException e)
{
assertThat(e.errorCode, equalTo(XAException.XA_RBINTEGRITY));
}
}
} | 0true
| community_kernel_src_test_java_org_neo4j_kernel_impl_nioneo_xa_IntegrityValidatorTest.java |
1,121 | public class OSQLFunctionDecode extends OSQLFunctionAbstract {
public static final String NAME = "decode";
/**
* Get the date at construction to have the same date for all the iteration.
*/
public OSQLFunctionDecode() {
super(NAME, 2, 2);
}
@Override
public Object execute(OIdentifiable iCurrentRecord, Object iCurrentResult, final Object[] iParameters, OCommandContext iContext) {
final String candidate = iParameters[0].toString();
final String format = iParameters[1].toString();
if(OSQLFunctionEncode.FORMAT_BASE64.equalsIgnoreCase(format)){
return OBase64Utils.decode(candidate);
}else{
throw new OException("unknowned format :"+format);
}
}
@Override
public String getSyntax() {
return "Syntax error: decode(<binaryfield>, <format>)";
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_sql_functions_misc_OSQLFunctionDecode.java |
529 | public class OTransactionBlockedException extends OTransactionException {
private static final long serialVersionUID = 2347493191705052402L;
public OTransactionBlockedException(String message, Throwable cause) {
super(message, cause);
}
public OTransactionBlockedException(String message) {
super(message);
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_exception_OTransactionBlockedException.java |
1,062 | public class MapConfig {
public final static int MIN_BACKUP_COUNT = 0;
public final static int DEFAULT_BACKUP_COUNT = 1;
public final static int MAX_BACKUP_COUNT = 6;
public final static int MIN_EVICTION_PERCENTAGE = 0;
public final static int DEFAULT_EVICTION_PERCENTAGE = 25;
public final static int MAX_EVICTION_PERCENTAGE = 100;
public final static int DEFAULT_TTL_SECONDS = 0;
public final static int DEFAULT_MAX_IDLE_SECONDS = 0;
public final static int DEFAULT_MAX_SIZE = Integer.MAX_VALUE;
public final static EvictionPolicy DEFAULT_EVICTION_POLICY = EvictionPolicy.NONE;
public final static String DEFAULT_MAP_MERGE_POLICY = PutIfAbsentMapMergePolicy.class.getName();
public final static InMemoryFormat DEFAULT_IN_MEMORY_FORMAT = InMemoryFormat.BINARY;
private String name = null;
private int backupCount = DEFAULT_BACKUP_COUNT;
private int asyncBackupCount = MIN_BACKUP_COUNT;
private int evictionPercentage = DEFAULT_EVICTION_PERCENTAGE;
private int timeToLiveSeconds = DEFAULT_TTL_SECONDS;
private int maxIdleSeconds = DEFAULT_MAX_IDLE_SECONDS;
private MaxSizeConfig maxSizeConfig = new MaxSizeConfig();
private EvictionPolicy evictionPolicy = DEFAULT_EVICTION_POLICY;
private MapStoreConfig mapStoreConfig = null;
private NearCacheConfig nearCacheConfig = null;
private boolean readBackupData = false;
private boolean optimizeQueries = false;
private String mergePolicy = DEFAULT_MAP_MERGE_POLICY;
private InMemoryFormat inMemoryFormat = DEFAULT_IN_MEMORY_FORMAT;
private WanReplicationRef wanReplicationRef;
private List<EntryListenerConfig> listenerConfigs;
private List<MapIndexConfig> mapIndexConfigs;
private boolean statisticsEnabled = true;
private PartitioningStrategyConfig partitioningStrategyConfig;
private MapConfigReadOnly readOnly;
public enum EvictionPolicy {
LRU, LFU, NONE
}
public MapConfig(String name) {
this.name = name;
}
public MapConfig() {
}
public MapConfig(MapConfig config) {
this.name = config.name;
this.backupCount = config.backupCount;
this.asyncBackupCount = config.asyncBackupCount;
this.evictionPercentage = config.evictionPercentage;
this.timeToLiveSeconds = config.timeToLiveSeconds;
this.maxIdleSeconds = config.maxIdleSeconds;
this.maxSizeConfig = config.maxSizeConfig != null ? new MaxSizeConfig(config.maxSizeConfig) : null;
this.evictionPolicy = config.evictionPolicy;
this.inMemoryFormat = config.inMemoryFormat;
this.mapStoreConfig = config.mapStoreConfig != null ? new MapStoreConfig(config.mapStoreConfig) : null;
this.nearCacheConfig = config.nearCacheConfig != null ? new NearCacheConfig(config.nearCacheConfig) : null;
this.readBackupData = config.readBackupData;
this.optimizeQueries = config.optimizeQueries;
this.statisticsEnabled = config.statisticsEnabled;
this.mergePolicy = config.mergePolicy;
this.wanReplicationRef = config.wanReplicationRef != null ? new WanReplicationRef(config.wanReplicationRef) : null;
this.listenerConfigs = new ArrayList<EntryListenerConfig>(config.getEntryListenerConfigs());
this.mapIndexConfigs = new ArrayList<MapIndexConfig>(config.getMapIndexConfigs());
this.partitioningStrategyConfig = config.partitioningStrategyConfig != null
? new PartitioningStrategyConfig(config.getPartitioningStrategyConfig()) : null;
}
public MapConfigReadOnly getAsReadOnly(){
if (readOnly == null){
readOnly = new MapConfigReadOnly(this);
}
return readOnly;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @param name the name to set
*/
public MapConfig setName(String name) {
this.name = name;
return this;
}
/**
* @return data type that will be used for storing records.
*/
public InMemoryFormat getInMemoryFormat() {
return inMemoryFormat;
}
/**
* Data type that will be used for storing records.
* Possible values:
* BINARY (default): keys and values will be stored as binary data
* OBJECT : values will be stored in their object forms
* OFFHEAP : values will be stored in non-heap region of JVM
*
* @param inMemoryFormat the record type to set
* @throws IllegalArgumentException if inMemoryFormat is null.
*/
public MapConfig setInMemoryFormat(InMemoryFormat inMemoryFormat) {
this.inMemoryFormat = isNotNull(inMemoryFormat,"inMemoryFormat");
return this;
}
/**
* @return the backupCount
* @see #getAsyncBackupCount()
*/
public int getBackupCount() {
return backupCount;
}
/**
* Number of synchronous backups. If 1 is set as the backup-count for example,
* then all entries of the map will be copied to another JVM for
* fail-safety. 0 means no sync backup.
*
* @param backupCount the backupCount to set
* @see #setAsyncBackupCount(int)
*/
public MapConfig setBackupCount(final int backupCount) {
if (backupCount < MIN_BACKUP_COUNT) {
throw new IllegalArgumentException("map backup count must be equal to or bigger than "
+ MIN_BACKUP_COUNT);
}
if ((backupCount + this.asyncBackupCount) > MAX_BACKUP_COUNT) {
throw new IllegalArgumentException("total (sync + async) map backup count must be less than "
+ MAX_BACKUP_COUNT);
}
this.backupCount = backupCount;
return this;
}
/**
* @return the asyncBackupCount
* @see #setBackupCount(int)
*/
public int getAsyncBackupCount() {
return asyncBackupCount;
}
/**
* Number of asynchronous backups.
* 0 means no backup.
*
* @param asyncBackupCount the asyncBackupCount to set
* @see #setBackupCount(int)
*/
public MapConfig setAsyncBackupCount(final int asyncBackupCount) {
if (asyncBackupCount < MIN_BACKUP_COUNT) {
throw new IllegalArgumentException("map async backup count must be equal to or bigger than "
+ MIN_BACKUP_COUNT);
}
if ((this.backupCount + asyncBackupCount) > MAX_BACKUP_COUNT) {
throw new IllegalArgumentException("total (sync + async) map backup count must be less than "
+ MAX_BACKUP_COUNT);
}
this.asyncBackupCount = asyncBackupCount;
return this;
}
public int getTotalBackupCount() {
return backupCount + asyncBackupCount;
}
/**
* @return the evictionPercentage
*/
public int getEvictionPercentage() {
return evictionPercentage;
}
/**
* When max. size is reached, specified percentage of the map will be evicted.
* Any integer between 0 and 100 is allowed.
* If 25 is set for example, 25% of the entries will get evicted.
*
* @param evictionPercentage the evictionPercentage to set
* @throws IllegalArgumentException if evictionPercentage is not in the 0-100 range.
*/
public MapConfig setEvictionPercentage(final int evictionPercentage) {
if (evictionPercentage < MIN_EVICTION_PERCENTAGE) {
throw new IllegalArgumentException("eviction percentage must be greater or equal than 0");
}
if (evictionPercentage > MAX_EVICTION_PERCENTAGE) {
throw new IllegalArgumentException("eviction percentage must be smaller or equal than 100");
}
this.evictionPercentage = evictionPercentage;
return this;
}
/**
* @return the timeToLiveSeconds
*/
public int getTimeToLiveSeconds() {
return timeToLiveSeconds;
}
/**
* Maximum number of seconds for each entry to stay in the map. Entries that are
* older than timeToLiveSeconds will get automatically evicted from the map.
* Updates on the entry don't change the eviction time.
* Any integer between 0 and Integer.MAX_VALUE.
* 0 means infinite. Default is 0.
*
* @param timeToLiveSeconds the timeToLiveSeconds to set
*/
public MapConfig setTimeToLiveSeconds(int timeToLiveSeconds) {
this.timeToLiveSeconds = timeToLiveSeconds;
return this;
}
/**
* @return the maxIdleSeconds
*/
public int getMaxIdleSeconds() {
return maxIdleSeconds;
}
/**
* Maximum number of seconds for each entry to stay idle in the map. Entries that are
* idle(not touched) for more than maxIdleSeconds will get
* automatically evicted from the map. Entry is touched if get, put or
* containsKey is called.
* Any integer between 0 and Integer.MAX_VALUE.
* 0 means infinite. Default is 0.
*
* @param maxIdleSeconds the maxIdleSeconds to set
*/
public MapConfig setMaxIdleSeconds(int maxIdleSeconds) {
this.maxIdleSeconds = maxIdleSeconds;
return this;
}
public MaxSizeConfig getMaxSizeConfig() {
return maxSizeConfig;
}
public MapConfig setMaxSizeConfig(MaxSizeConfig maxSizeConfig) {
this.maxSizeConfig = maxSizeConfig;
return this;
}
/**
* @return the evictionPolicy
*/
public EvictionPolicy getEvictionPolicy() {
return evictionPolicy;
}
/**
* @param evictionPolicy the evictionPolicy to set
*/
public MapConfig setEvictionPolicy(EvictionPolicy evictionPolicy) {
this.evictionPolicy = evictionPolicy;
return this;
}
/**
* Returns the map store configuration
*
* @return the mapStoreConfig
*/
public MapStoreConfig getMapStoreConfig() {
return mapStoreConfig;
}
/**
* Sets the mapStore configuration
*
* @param mapStoreConfig the mapStoreConfig to set
*/
public MapConfig setMapStoreConfig(MapStoreConfig mapStoreConfig) {
this.mapStoreConfig = mapStoreConfig;
return this;
}
public NearCacheConfig getNearCacheConfig() {
return nearCacheConfig;
}
public MapConfig setNearCacheConfig(NearCacheConfig nearCacheConfig) {
this.nearCacheConfig = nearCacheConfig;
return this;
}
public String getMergePolicy() {
return mergePolicy;
}
public MapConfig setMergePolicy(String mergePolicy) {
this.mergePolicy = mergePolicy;
return this;
}
public boolean isStatisticsEnabled() {
return statisticsEnabled;
}
public MapConfig setStatisticsEnabled(boolean statisticsEnabled) {
this.statisticsEnabled = statisticsEnabled;
return this;
}
public boolean isReadBackupData() {
return readBackupData;
}
public MapConfig setReadBackupData(boolean readBackupData) {
this.readBackupData = readBackupData;
return this;
}
public WanReplicationRef getWanReplicationRef() {
return wanReplicationRef;
}
public MapConfig setWanReplicationRef(WanReplicationRef wanReplicationRef) {
this.wanReplicationRef = wanReplicationRef;
return this;
}
public MapConfig addEntryListenerConfig(EntryListenerConfig listenerConfig) {
getEntryListenerConfigs().add(listenerConfig);
return this;
}
public List<EntryListenerConfig> getEntryListenerConfigs() {
if (listenerConfigs == null) {
listenerConfigs = new ArrayList<EntryListenerConfig>();
}
return listenerConfigs;
}
public MapConfig setEntryListenerConfigs(List<EntryListenerConfig> listenerConfigs) {
this.listenerConfigs = listenerConfigs;
return this;
}
public MapConfig addMapIndexConfig(MapIndexConfig mapIndexConfig) {
getMapIndexConfigs().add(mapIndexConfig);
return this;
}
public List<MapIndexConfig> getMapIndexConfigs() {
if (mapIndexConfigs == null) {
mapIndexConfigs = new ArrayList<MapIndexConfig>();
}
return mapIndexConfigs;
}
public MapConfig setMapIndexConfigs(List<MapIndexConfig> mapIndexConfigs) {
this.mapIndexConfigs = mapIndexConfigs;
return this;
}
public PartitioningStrategyConfig getPartitioningStrategyConfig() {
return partitioningStrategyConfig;
}
public MapConfig setPartitioningStrategyConfig(PartitioningStrategyConfig partitioningStrategyConfig) {
this.partitioningStrategyConfig = partitioningStrategyConfig;
return this;
}
public boolean isNearCacheEnabled() {
return nearCacheConfig != null;
}
public boolean isOptimizeQueries() {
return optimizeQueries;
}
public MapConfig setOptimizeQueries(boolean optimizeQueries) {
this.optimizeQueries = optimizeQueries;
return this;
}
public boolean isCompatible(MapConfig other) {
if (this == other) {
return true;
}
return other != null &&
(this.name != null ? this.name.equals(other.name) : other.name == null) &&
this.backupCount == other.backupCount &&
this.asyncBackupCount == other.asyncBackupCount &&
this.evictionPercentage == other.evictionPercentage &&
this.maxIdleSeconds == other.maxIdleSeconds &&
(this.maxSizeConfig.getSize() == other.maxSizeConfig.getSize() ||
(Math.min(maxSizeConfig.getSize(), other.maxSizeConfig.getSize()) == 0
&& Math.max(maxSizeConfig.getSize(), other.maxSizeConfig.getSize()) == Integer.MAX_VALUE)) &&
this.timeToLiveSeconds == other.timeToLiveSeconds &&
this.readBackupData == other.readBackupData;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + this.backupCount;
result = prime * result + this.asyncBackupCount;
result = prime * result + this.evictionPercentage;
result = prime
* result
+ ((this.evictionPolicy == null) ? 0 : this.evictionPolicy
.hashCode());
result = prime
* result
+ ((this.mapStoreConfig == null) ? 0 : this.mapStoreConfig
.hashCode());
result = prime * result + this.maxIdleSeconds;
result = prime * result + this.maxSizeConfig.getSize();
result = prime
* result
+ ((this.mergePolicy == null) ? 0 : this.mergePolicy.hashCode());
result = prime * result
+ ((this.name == null) ? 0 : this.name.hashCode());
result = prime
* result
+ ((this.nearCacheConfig == null) ? 0 : this.nearCacheConfig
.hashCode());
result = prime * result + this.timeToLiveSeconds;
result = prime * result + (this.readBackupData ? 1231 : 1237);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof MapConfig)) {
return false;
}
MapConfig other = (MapConfig) obj;
return
(this.name != null ? this.name.equals(other.name) : other.name == null) &&
this.backupCount == other.backupCount &&
this.asyncBackupCount == other.asyncBackupCount &&
this.evictionPercentage == other.evictionPercentage &&
this.maxIdleSeconds == other.maxIdleSeconds &&
this.maxSizeConfig.getSize() == other.maxSizeConfig.getSize() &&
this.timeToLiveSeconds == other.timeToLiveSeconds &&
this.readBackupData == other.readBackupData &&
(this.mergePolicy != null ? this.mergePolicy.equals(other.mergePolicy) : other.mergePolicy == null) &&
(this.inMemoryFormat != null ? this.inMemoryFormat.equals(other.inMemoryFormat) : other.inMemoryFormat == null) &&
(this.evictionPolicy != null ? this.evictionPolicy.equals(other.evictionPolicy)
: other.evictionPolicy == null) &&
(this.mapStoreConfig != null ? this.mapStoreConfig.equals(other.mapStoreConfig)
: other.mapStoreConfig == null) &&
(this.nearCacheConfig != null ? this.nearCacheConfig.equals(other.nearCacheConfig)
: other.nearCacheConfig == null);
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("MapConfig");
sb.append("{name='").append(name).append('\'');
sb.append(", inMemoryFormat=").append(inMemoryFormat).append('\'');
sb.append(", backupCount=").append(backupCount);
sb.append(", asyncBackupCount=").append(asyncBackupCount);
sb.append(", timeToLiveSeconds=").append(timeToLiveSeconds);
sb.append(", maxIdleSeconds=").append(maxIdleSeconds);
sb.append(", evictionPolicy='").append(evictionPolicy).append('\'');
sb.append(", evictionPercentage=").append(evictionPercentage);
sb.append(", maxSizeConfig=").append(maxSizeConfig);
sb.append(", readBackupData=").append(readBackupData);
sb.append(", nearCacheConfig=").append(nearCacheConfig);
sb.append(", mapStoreConfig=").append(mapStoreConfig);
sb.append(", mergePolicyConfig='").append(mergePolicy).append('\'');
sb.append(", wanReplicationRef=").append(wanReplicationRef);
sb.append(", listenerConfigs=").append(listenerConfigs);
sb.append(", mapIndexConfigs=").append(mapIndexConfigs);
sb.append('}');
return sb.toString();
}
} | 1no label
| hazelcast_src_main_java_com_hazelcast_config_MapConfig.java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.