Unnamed: 0
int64 0
6.45k
| func
stringlengths 37
161k
| target
class label 2
classes | project
stringlengths 33
167
|
---|---|---|---|
20 |
static final class CompletionNode {
final Completion completion;
volatile CompletionNode next;
CompletionNode(Completion completion) { this.completion = completion; }
}
| 0true
|
src_main_java_jsr166e_CompletableFuture.java
|
598 |
interface ValuesResultListener {
boolean addResult(OIdentifiable identifiable);
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_index_OIndexEngine.java
|
2,234 |
class CustomBoostFactorWeight extends Weight {
final Weight subQueryWeight;
public CustomBoostFactorWeight(Weight subQueryWeight) throws IOException {
this.subQueryWeight = subQueryWeight;
}
public Query getQuery() {
return FunctionScoreQuery.this;
}
@Override
public float getValueForNormalization() throws IOException {
float sum = subQueryWeight.getValueForNormalization();
sum *= getBoost() * getBoost();
return sum;
}
@Override
public void normalize(float norm, float topLevelBoost) {
subQueryWeight.normalize(norm, topLevelBoost * getBoost());
}
@Override
public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs) throws IOException {
// we ignore scoreDocsInOrder parameter, because we need to score in
// order if documents are scored with a script. The
// ShardLookup depends on in order scoring.
Scorer subQueryScorer = subQueryWeight.scorer(context, true, false, acceptDocs);
if (subQueryScorer == null) {
return null;
}
function.setNextReader(context);
return new CustomBoostFactorScorer(this, subQueryScorer, function, maxBoost, combineFunction);
}
@Override
public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
Explanation subQueryExpl = subQueryWeight.explain(context, doc);
if (!subQueryExpl.isMatch()) {
return subQueryExpl;
}
function.setNextReader(context);
Explanation functionExplanation = function.explainScore(doc, subQueryExpl);
return combineFunction.explain(getBoost(), subQueryExpl, functionExplanation, maxBoost);
}
}
| 1no label
|
src_main_java_org_elasticsearch_common_lucene_search_function_FunctionScoreQuery.java
|
5,307 |
public static AggregationStreams.Stream STREAM = new AggregationStreams.Stream() {
@Override
public LongTerms readResult(StreamInput in) throws IOException {
LongTerms buckets = new LongTerms();
buckets.readFrom(in);
return buckets;
}
};
| 1no label
|
src_main_java_org_elasticsearch_search_aggregations_bucket_terms_LongTerms.java
|
150 |
{
public long generate( XaDataSource dataSource, int identifier ) throws XAException
{
return dataSource.getLastCommittedTxId() + 1;
}
public int getCurrentMasterId()
{
return XaLogicalLog.MASTER_ID_REPRESENTING_NO_MASTER;
}
public int getMyId()
{
return XaLogicalLog.MASTER_ID_REPRESENTING_NO_MASTER;
}
@Override
public void committed( XaDataSource dataSource, int identifier, long txId, Integer externalAuthor )
{
}
};
| 0true
|
community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_xaframework_TxIdGenerator.java
|
456 |
public class OSBTreeIndexRIDContainerSerializationPerformanceTest {
public static final int CYCLE_COUNT = 20000;
private static final int WARMUP_CYCLE_COUNT = 30000;
public static final ODirectMemoryPointer POINTER = new ODirectMemoryPointer(2048l);
public static void main(String[] args) throws InterruptedException {
ODatabaseDocumentTx db = new ODatabaseDocumentTx("plocal:target/testdb/OSBTreeRIDSetTest");
if (db.exists()) {
db.open("admin", "admin");
db.drop();
}
db.create();
ODatabaseRecordThreadLocal.INSTANCE.set(db);
Set<OIdentifiable> data = new HashSet<OIdentifiable>(8);
data.add(new ORecordId("#77:12"));
data.add(new ORecordId("#77:13"));
data.add(new ORecordId("#77:14"));
data.add(new ORecordId("#77:15"));
data.add(new ORecordId("#77:16"));
for (int i = 0; i < WARMUP_CYCLE_COUNT; i++) {
cycle(data);
}
System.gc();
Thread.sleep(1000);
long time = System.currentTimeMillis();
for (int i = 0; i < CYCLE_COUNT; i++) {
cycle(data);
}
time = System.currentTimeMillis() - time;
System.out.println("Time: " + time + "ms.");
System.out.println("Throughput: " + (((double) CYCLE_COUNT) * 1000 / time) + " rec/sec.");
}
private static void cycle(Set<OIdentifiable> data) {
final OIndexRIDContainer valueContainer = new OIndexRIDContainer("ValueContainerPerformanceTest");
valueContainer.addAll(data);
OStreamSerializerSBTreeIndexRIDContainer.INSTANCE.serializeInDirectMemory(valueContainer, POINTER, 0l);
}
}
| 0true
|
core_src_test_java_com_orientechnologies_orient_core_db_record_ridset_sbtree_OSBTreeIndexRIDContainerSerializationPerformanceTest.java
|
185 |
@Test
public class OMultiKeyTest {
@Test
public void testEqualsDifferentSize() {
final OMultiKey multiKey = new OMultiKey(Collections.singletonList("a"));
final OMultiKey anotherMultiKey = new OMultiKey(Arrays.asList(new String[]{"a", "b"}));
assertFalse(multiKey.equals(anotherMultiKey));
}
@Test
public void testEqualsDifferentItems() {
final OMultiKey multiKey = new OMultiKey(Arrays.asList(new String[]{"b", "c"}));
final OMultiKey anotherMultiKey = new OMultiKey(Arrays.asList(new String[]{"a", "b"}));
assertFalse(multiKey.equals(anotherMultiKey));
}
@Test
public void testEqualsTheSame() {
final OMultiKey multiKey = new OMultiKey(Collections.singletonList("a"));
assertTrue(multiKey.equals(multiKey));
}
@Test
public void testEqualsNull() {
final OMultiKey multiKey = new OMultiKey(Collections.singletonList("a"));
assertFalse(multiKey.equals(null));
}
@Test
public void testEqualsDifferentClass() {
final OMultiKey multiKey = new OMultiKey(Collections.singletonList("a"));
assertFalse(multiKey.equals("a"));
}
@Test
public void testEmptyKeyEquals() {
final Map<OMultiKey, Object> multiKeyMap = new HashMap<OMultiKey, Object>();
final OMultiKey multiKey = new OMultiKey(Collections.emptyList());
multiKeyMap.put(multiKey, new Object());
final OMultiKey anotherMultiKey = new OMultiKey(Collections.emptyList());
final Object mapResult = multiKeyMap.get(anotherMultiKey);
assertNotNull(mapResult);
}
@Test
public void testOneKeyMap() {
final Map<OMultiKey, Object> multiKeyMap = new HashMap<OMultiKey, Object>();
final OMultiKey multiKey = new OMultiKey(Collections.singletonList("a"));
multiKeyMap.put(multiKey, new Object());
final OMultiKey anotherMultiKey = new OMultiKey(Collections.singletonList("a"));
final Object mapResult = multiKeyMap.get(anotherMultiKey);
assertNotNull(mapResult);
}
@Test
public void testOneKeyNotInMap() {
final Map<OMultiKey, Object> multiKeyMap = new HashMap<OMultiKey, Object>();
final OMultiKey multiKey = new OMultiKey(Collections.singletonList("a"));
multiKeyMap.put(multiKey, new Object());
final OMultiKey anotherMultiKey = new OMultiKey(Collections.singletonList("b"));
final Object mapResult = multiKeyMap.get(anotherMultiKey);
assertNull(mapResult);
}
@Test
public void testTwoKeyMap() {
final Map<OMultiKey, Object> multiKeyMap = new HashMap<OMultiKey, Object>();
final OMultiKey multiKey = new OMultiKey(Arrays.asList(new String[]{"a", "b"}));
multiKeyMap.put(multiKey, new Object());
final OMultiKey anotherMultiKey = new OMultiKey(Arrays.asList(new String[]{"a", "b"}));
final Object mapResult = multiKeyMap.get(anotherMultiKey);
assertNotNull(mapResult);
}
@Test
public void testTwoKeyMapReordered() {
final Map<OMultiKey, Object> multiKeyMap = new HashMap<OMultiKey, Object>();
final OMultiKey multiKey = new OMultiKey(Arrays.asList(new String[]{"a", "b"}));
multiKeyMap.put(multiKey, new Object());
final OMultiKey anotherMultiKey = new OMultiKey(Arrays.asList(new String[]{"b", "a"}));
final Object mapResult = multiKeyMap.get(anotherMultiKey);
assertNotNull(mapResult);
}
}
| 0true
|
commons_src_test_java_com_orientechnologies_common_util_OMultiKeyTest.java
|
735 |
public class IndexDeleteByQueryRequest extends IndexReplicationOperationRequest<IndexDeleteByQueryRequest> {
private BytesReference source;
private String[] types = Strings.EMPTY_ARRAY;
@Nullable
private Set<String> routing;
@Nullable
private String[] filteringAliases;
IndexDeleteByQueryRequest(DeleteByQueryRequest request, String index, @Nullable Set<String> routing, @Nullable String[] filteringAliases) {
this.index = index;
this.timeout = request.timeout();
this.source = request.source();
this.types = request.types();
this.replicationType = request.replicationType();
this.consistencyLevel = request.consistencyLevel();
this.routing = routing;
this.filteringAliases = filteringAliases;
}
IndexDeleteByQueryRequest() {
}
BytesReference source() {
return source;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = super.validate();
if (source == null) {
validationException = addValidationError("source is missing", validationException);
}
return validationException;
}
Set<String> routing() {
return this.routing;
}
String[] types() {
return this.types;
}
String[] filteringAliases() {
return filteringAliases;
}
public IndexDeleteByQueryRequest timeout(TimeValue timeout) {
this.timeout = timeout;
return this;
}
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
source = in.readBytesReference();
int typesSize = in.readVInt();
if (typesSize > 0) {
types = new String[typesSize];
for (int i = 0; i < typesSize; i++) {
types[i] = in.readString();
}
}
int routingSize = in.readVInt();
if (routingSize > 0) {
routing = new HashSet<String>(routingSize);
for (int i = 0; i < routingSize; i++) {
routing.add(in.readString());
}
}
int aliasesSize = in.readVInt();
if (aliasesSize > 0) {
filteringAliases = new String[aliasesSize];
for (int i = 0; i < aliasesSize; i++) {
filteringAliases[i] = in.readString();
}
}
}
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBytesReference(source);
out.writeVInt(types.length);
for (String type : types) {
out.writeString(type);
}
if (routing != null) {
out.writeVInt(routing.size());
for (String r : routing) {
out.writeString(r);
}
} else {
out.writeVInt(0);
}
if (filteringAliases != null) {
out.writeVInt(filteringAliases.length);
for (String alias : filteringAliases) {
out.writeString(alias);
}
} else {
out.writeVInt(0);
}
}
}
| 0true
|
src_main_java_org_elasticsearch_action_deletebyquery_IndexDeleteByQueryRequest.java
|
1,303 |
@Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_FIELD")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region = "blStandardElements")
public class FieldImpl implements Field,Serializable {
/**
*
*/
private static final long serialVersionUID = 2915813511754425605L;
@Id
@GeneratedValue(generator = "FieldId")
@GenericGenerator(
name="FieldId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="FieldImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.search.domain.FieldImpl")
}
)
@Column(name = "FIELD_ID")
@AdminPresentation(friendlyName = "FieldImpl_ID", group = "FieldImpl_descrpition",visibility=VisibilityEnum.HIDDEN_ALL)
protected Long id;
// This is a broadleaf enumeration
@AdminPresentation(friendlyName = "FieldImpl_EntityType", group = "FieldImpl_descrpition", order = 2, prominent = true)
@Column(name = "ENTITY_TYPE", nullable = false)
@Index(name="ENTITY_TYPE_INDEX", columnNames={"ENTITY_TYPE"})
protected String entityType;
@Column(name = "PROPERTY_NAME", nullable = false)
@AdminPresentation(friendlyName = "FieldImpl_propertyName", group = "FieldImpl_descrpition", order = 1, prominent = true)
protected String propertyName;
@Column(name = "ABBREVIATION")
@AdminPresentation(friendlyName = "FieldImpl_abbreviation", group = "FieldImpl_descrpition", order = 3, prominent = true)
protected String abbreviation;
@Column(name = "SEARCHABLE")
@AdminPresentation(friendlyName = "FieldImpl_searchable", group = "FieldImpl_descrpition", order = 4, prominent = true)
protected Boolean searchable = false;
// This is a broadleaf enumeration
@Column(name = "FACET_FIELD_TYPE")
@AdminPresentation(friendlyName = "FieldImpl_facetFieldType", group = "FieldImpl_descrpition", excluded = true)
protected String facetFieldType;
// This is a broadleaf enumeration
@ElementCollection
@CollectionTable(name="BLC_FIELD_SEARCH_TYPES", joinColumns=@JoinColumn(name="FIELD_ID"))
@Column(name="SEARCHABLE_FIELD_TYPE")
@Cascade(value={org.hibernate.annotations.CascadeType.MERGE, org.hibernate.annotations.CascadeType.PERSIST})
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements")
protected List<String> searchableFieldTypes = new ArrayList<String>();
@Column(name = "TRANSLATABLE")
@AdminPresentation(friendlyName = "FieldImpl_translatable", group = "FieldImpl_description")
protected Boolean translatable = false;
@Override
public String getQualifiedFieldName() {
return getEntityType().getFriendlyType() + "." + propertyName;
}
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public FieldEntity getEntityType() {
return FieldEntity.getInstance(entityType);
}
@Override
public void setEntityType(FieldEntity entityType) {
this.entityType = entityType.getType();
}
@Override
public String getPropertyName() {
return propertyName;
}
@Override
public void setPropertyName(String propertyName) {
this.propertyName = propertyName;
}
@Override
public String getAbbreviation() {
return abbreviation;
}
@Override
public void setAbbreviation(String abbreviation) {
this.abbreviation = abbreviation;
}
@Override
public Boolean getSearchable() {
return searchable;
}
@Override
public void setSearchable(Boolean searchable) {
this.searchable = searchable;
}
@Override
public FieldType getFacetFieldType() {
return FieldType.getInstance(facetFieldType);
}
@Override
public void setFacetFieldType(FieldType facetFieldType) {
this.facetFieldType = facetFieldType == null ? null : facetFieldType.getType();
}
@Override
public List<FieldType> getSearchableFieldTypes() {
List<FieldType> fieldTypes = new ArrayList<FieldType>();
for (String fieldType : searchableFieldTypes) {
fieldTypes.add(FieldType.getInstance(fieldType));
}
return fieldTypes;
}
@Override
public void setSearchableFieldTypes(List<FieldType> searchableFieldTypes) {
List<String> fieldTypes = new ArrayList<String>();
for (FieldType fieldType : searchableFieldTypes) {
fieldTypes.add(fieldType.getType());
}
this.searchableFieldTypes = fieldTypes;
}
@Override
public Boolean getTranslatable() {
return translatable == null ? false : translatable;
}
@Override
public void setTranslatable(Boolean translatable) {
this.translatable = translatable;
}
@Override
public List<SearchConfig> getSearchConfigs() {
throw new UnsupportedOperationException("The default Field implementation does not support search configs");
}
@Override
public void setSearchConfigs(List<SearchConfig> searchConfigs) {
throw new UnsupportedOperationException("The default Field implementation does not support search configs");
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Field other = (Field) obj;
return getEntityType().getType().equals(other.getEntityType().getType()) && getPropertyName().equals(other.getPropertyName());
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_search_domain_FieldImpl.java
|
2,159 |
public class TxnSetOperation extends BasePutOperation implements MapTxnOperation {
private long version;
private transient boolean shouldBackup;
private String ownerUuid;
public TxnSetOperation() {
}
public TxnSetOperation(String name, Data dataKey, Data value, long version) {
super(name, dataKey, value);
this.version = version;
}
public TxnSetOperation(String name, Data dataKey, Data value, long version, long ttl) {
super(name, dataKey, value);
this.version = version;
this.ttl = ttl;
}
@Override
public boolean shouldWait() {
return !recordStore.canAcquireLock(dataKey, ownerUuid, getThreadId());
}
@Override
public void run() {
recordStore.unlock(dataKey, ownerUuid, getThreadId());
Record record = recordStore.getRecord(dataKey);
if (record == null || version == record.getVersion()) {
recordStore.set(dataKey, dataValue, ttl);
shouldBackup = true;
}
}
public long getVersion() {
return version;
}
public void setVersion(long version) {
this.version = version;
}
@Override
public void setOwnerUuid(String ownerUuid) {
this.ownerUuid = ownerUuid;
}
@Override
public Object getResponse() {
return Boolean.TRUE;
}
public boolean shouldNotify() {
return true;
}
public Operation getBackupOperation() {
RecordInfo replicationInfo = mapService.createRecordInfo(recordStore.getRecord(dataKey));
return new PutBackupOperation(name, dataKey, dataValue, replicationInfo, true);
}
public void onWaitExpire() {
final ResponseHandler responseHandler = getResponseHandler();
responseHandler.sendResponse(false);
}
@Override
public boolean shouldBackup() {
return shouldBackup;
}
public WaitNotifyKey getNotifiedKey() {
return getWaitKey();
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeLong(version);
out.writeUTF(ownerUuid);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
version = in.readLong();
ownerUuid = in.readUTF();
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_map_tx_TxnSetOperation.java
|
711 |
constructors[COLLECTION_ADD_ALL] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new CollectionAddAllRequest();
}
};
| 0true
|
hazelcast_src_main_java_com_hazelcast_collection_CollectionPortableHook.java
|
578 |
public interface ClusterService extends CoreService {
MemberImpl getMember(Address address);
MemberImpl getMember(String uuid);
Collection<MemberImpl> getMemberList();
Collection<Member> getMembers();
Address getMasterAddress();
boolean isMaster();
Address getThisAddress();
int getSize();
long getClusterTime();
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_cluster_ClusterService.java
|
389 |
new Thread(){
public void run() {
mm.forceUnlock(key);
forceUnlock.countDown();
}
}.start();
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_multimap_ClientMultiMapLockTest.java
|
2,104 |
public class ReplaceOperation extends BasePutOperation {
private boolean successful = false;
public ReplaceOperation(String name, Data dataKey, Data value) {
super(name, dataKey, value);
}
public ReplaceOperation() {
}
public void run() {
final Object oldValue = recordStore.replace(dataKey, dataValue);
dataOldValue = mapService.toData(oldValue);
successful = oldValue != null;
}
public boolean shouldBackup() {
return successful;
}
public void afterRun() {
if (successful)
super.afterRun();
}
@Override
public String toString() {
return "ReplaceOperation{" + name + "}";
}
@Override
public Object getResponse() {
return dataOldValue;
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_map_operation_ReplaceOperation.java
|
88 |
private final class ClientPacketProcessor implements Runnable {
final ClientPacket packet;
private ClientPacketProcessor(ClientPacket packet) {
this.packet = packet;
}
@Override
public void run() {
Connection conn = packet.getConn();
ClientEndpoint endpoint = getEndpoint(conn);
ClientRequest request = null;
try {
request = loadRequest();
if (request == null) {
handlePacketWithNullRequest();
} else if (request instanceof AuthenticationRequest) {
endpoint = createEndpoint(conn);
if (endpoint != null) {
processRequest(endpoint, request);
} else {
handleEndpointNotCreatedConnectionNotAlive();
}
} else if (endpoint == null) {
handleMissingEndpoint(conn);
} else if (endpoint.isAuthenticated()) {
processRequest(endpoint, request);
} else {
handleAuthenticationFailure(conn, endpoint, request);
}
} catch (Throwable e) {
handleProcessingFailure(endpoint, request, e);
}
}
private ClientRequest loadRequest() {
Data data = packet.getData();
return serializationService.toObject(data);
}
private void handleEndpointNotCreatedConnectionNotAlive() {
logger.warning("Dropped: " + packet + " -> endpoint not created for AuthenticationRequest, "
+ "connection not alive");
}
private void handlePacketWithNullRequest() {
logger.warning("Dropped: " + packet + " -> null request");
}
private void handleMissingEndpoint(Connection conn) {
if (conn.live()) {
logger.severe("Dropping: " + packet + " -> no endpoint found for live connection.");
} else {
if (logger.isFinestEnabled()) {
logger.finest("Dropping: " + packet + " -> no endpoint found for dead connection.");
}
}
}
private void handleProcessingFailure(ClientEndpoint endpoint, ClientRequest request, Throwable e) {
Level level = nodeEngine.isActive() ? Level.SEVERE : Level.FINEST;
if (logger.isLoggable(level)) {
if (request == null) {
logger.log(level, e.getMessage(), e);
} else {
logger.log(level, "While executing request: " + request + " -> " + e.getMessage(), e);
}
}
if (request != null && endpoint != null) {
endpoint.sendResponse(e, request.getCallId());
}
}
private void processRequest(ClientEndpoint endpoint, ClientRequest request) throws Exception {
request.setEndpoint(endpoint);
initService(request);
request.setClientEngine(ClientEngineImpl.this);
checkPermissions(endpoint, request);
request.process();
}
private void checkPermissions(ClientEndpoint endpoint, ClientRequest request) {
SecurityContext securityContext = getSecurityContext();
if (securityContext != null) {
Permission permission = request.getRequiredPermission();
if (permission != null) {
securityContext.checkPermission(endpoint.getSubject(), permission);
}
}
}
private void initService(ClientRequest request) {
String serviceName = request.getServiceName();
if (serviceName == null) {
return;
}
Object service = nodeEngine.getService(serviceName);
if (service == null) {
if (nodeEngine.isActive()) {
throw new IllegalArgumentException("No service registered with name: " + serviceName);
}
throw new HazelcastInstanceNotActiveException();
}
request.setService(service);
}
private void handleAuthenticationFailure(Connection conn, ClientEndpoint endpoint, ClientRequest request) {
Exception exception;
if (nodeEngine.isActive()) {
String message = "Client " + conn + " must authenticate before any operation.";
logger.severe(message);
exception = new AuthenticationException(message);
} else {
exception = new HazelcastInstanceNotActiveException();
}
endpoint.sendResponse(exception, request.getCallId());
removeEndpoint(conn);
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_client_ClientEngineImpl.java
|
369 |
public interface OLazyObjectMapInterface<TYPE> extends Map<Object, Object> {
public void setConvertToRecord(boolean convertToRecord);
public boolean isConverted();
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_db_object_OLazyObjectMapInterface.java
|
6,271 |
public class IsTrueAssertion extends Assertion {
private static final ESLogger logger = Loggers.getLogger(IsTrueAssertion.class);
public IsTrueAssertion(String field) {
super(field, true);
}
@Override
protected void doAssert(Object actualValue, Object expectedValue) {
logger.trace("assert that [{}] has a true value", actualValue);
String errorMessage = errorMessage();
assertThat(errorMessage, actualValue, notNullValue());
String actualString = actualValue.toString();
assertThat(errorMessage, actualString, not(equalTo("")));
assertThat(errorMessage, actualString, not(equalToIgnoringCase(Boolean.FALSE.toString())));
assertThat(errorMessage, actualString, not(equalTo("0")));
}
private String errorMessage() {
return "field [" + getField() + "] doesn't have a true value";
}
}
| 1no label
|
src_test_java_org_elasticsearch_test_rest_section_IsTrueAssertion.java
|
126 |
{
@Override
public boolean accept( LogEntry item )
{
return item instanceof LogEntry.Done;
}
};
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_xaframework_TestApplyTransactions.java
|
3,200 |
public class IndexFieldDataService extends AbstractIndexComponent {
private static final String DISABLED_FORMAT = "disabled";
private static final String DOC_VALUES_FORMAT = "doc_values";
private static final String ARRAY_FORMAT = "array";
private static final String PAGED_BYTES_FORMAT = "paged_bytes";
private static final String FST_FORMAT = "fst";
private static final String COMPRESSED_FORMAT = "compressed";
private final static ImmutableMap<String, IndexFieldData.Builder> buildersByType;
private final static ImmutableMap<String, IndexFieldData.Builder> docValuesBuildersByType;
private final static ImmutableMap<Tuple<String, String>, IndexFieldData.Builder> buildersByTypeAndFormat;
private final CircuitBreakerService circuitBreakerService;
static {
buildersByType = MapBuilder.<String, IndexFieldData.Builder>newMapBuilder()
.put("string", new PagedBytesIndexFieldData.Builder())
.put("float", new FloatArrayIndexFieldData.Builder())
.put("double", new DoubleArrayIndexFieldData.Builder())
.put("byte", new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.BYTE))
.put("short", new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.SHORT))
.put("int", new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.INT))
.put("long", new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.LONG))
.put("geo_point", new GeoPointDoubleArrayIndexFieldData.Builder())
.immutableMap();
docValuesBuildersByType = MapBuilder.<String, IndexFieldData.Builder>newMapBuilder()
.put("string", new DocValuesIndexFieldData.Builder())
.put("float", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.FLOAT))
.put("double", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.DOUBLE))
.put("byte", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.BYTE))
.put("short", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.SHORT))
.put("int", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.INT))
.put("long", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.LONG))
.put("geo_point", new GeoPointBinaryDVIndexFieldData.Builder())
.immutableMap();
buildersByTypeAndFormat = MapBuilder.<Tuple<String, String>, IndexFieldData.Builder>newMapBuilder()
.put(Tuple.tuple("string", PAGED_BYTES_FORMAT), new PagedBytesIndexFieldData.Builder())
.put(Tuple.tuple("string", FST_FORMAT), new FSTBytesIndexFieldData.Builder())
.put(Tuple.tuple("string", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder())
.put(Tuple.tuple("string", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple("float", ARRAY_FORMAT), new FloatArrayIndexFieldData.Builder())
.put(Tuple.tuple("float", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.FLOAT))
.put(Tuple.tuple("float", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple("double", ARRAY_FORMAT), new DoubleArrayIndexFieldData.Builder())
.put(Tuple.tuple("double", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.DOUBLE))
.put(Tuple.tuple("double", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple("byte", ARRAY_FORMAT), new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.BYTE))
.put(Tuple.tuple("byte", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.BYTE))
.put(Tuple.tuple("byte", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple("short", ARRAY_FORMAT), new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.SHORT))
.put(Tuple.tuple("short", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.SHORT))
.put(Tuple.tuple("short", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple("int", ARRAY_FORMAT), new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.INT))
.put(Tuple.tuple("int", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.INT))
.put(Tuple.tuple("int", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple("long", ARRAY_FORMAT), new PackedArrayIndexFieldData.Builder().setNumericType(IndexNumericFieldData.NumericType.LONG))
.put(Tuple.tuple("long", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.LONG))
.put(Tuple.tuple("long", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple("geo_point", ARRAY_FORMAT), new GeoPointDoubleArrayIndexFieldData.Builder())
.put(Tuple.tuple("geo_point", DOC_VALUES_FORMAT), new GeoPointBinaryDVIndexFieldData.Builder())
.put(Tuple.tuple("geo_point", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple("geo_point", COMPRESSED_FORMAT), new GeoPointCompressedIndexFieldData.Builder())
.immutableMap();
}
private final IndicesFieldDataCache indicesFieldDataCache;
private final ConcurrentMap<String, IndexFieldData<?>> loadedFieldData = ConcurrentCollections.newConcurrentMap();
private final Map<String, IndexFieldDataCache> fieldDataCaches = Maps.newHashMap(); // no need for concurrency support, always used under lock
IndexService indexService;
// public for testing
public IndexFieldDataService(Index index, CircuitBreakerService circuitBreakerService) {
this(index, ImmutableSettings.Builder.EMPTY_SETTINGS, new IndicesFieldDataCache(ImmutableSettings.Builder.EMPTY_SETTINGS), circuitBreakerService);
}
@Inject
public IndexFieldDataService(Index index, @IndexSettings Settings indexSettings, IndicesFieldDataCache indicesFieldDataCache,
CircuitBreakerService circuitBreakerService) {
super(index, indexSettings);
this.indicesFieldDataCache = indicesFieldDataCache;
this.circuitBreakerService = circuitBreakerService;
}
// we need to "inject" the index service to not create cyclic dep
public void setIndexService(IndexService indexService) {
this.indexService = indexService;
}
public void clear() {
synchronized (loadedFieldData) {
for (IndexFieldData<?> fieldData : loadedFieldData.values()) {
fieldData.clear();
}
loadedFieldData.clear();
for (IndexFieldDataCache cache : fieldDataCaches.values()) {
cache.clear();
}
fieldDataCaches.clear();
}
}
public void clearField(String fieldName) {
synchronized (loadedFieldData) {
IndexFieldData<?> fieldData = loadedFieldData.remove(fieldName);
if (fieldData != null) {
fieldData.clear();
}
IndexFieldDataCache cache = fieldDataCaches.remove(fieldName);
if (cache != null) {
cache.clear();
}
}
}
public void clear(IndexReader reader) {
synchronized (loadedFieldData) {
for (IndexFieldData<?> indexFieldData : loadedFieldData.values()) {
indexFieldData.clear(reader);
}
for (IndexFieldDataCache cache : fieldDataCaches.values()) {
cache.clear(reader);
}
}
}
public void onMappingUpdate() {
// synchronize to make sure to not miss field data instances that are being loaded
synchronized (loadedFieldData) {
// important: do not clear fieldDataCaches: the cache may be reused
loadedFieldData.clear();
}
}
public <IFD extends IndexFieldData<?>> IFD getForField(FieldMapper<?> mapper) {
final FieldMapper.Names fieldNames = mapper.names();
final FieldDataType type = mapper.fieldDataType();
final boolean docValues = mapper.hasDocValues();
IndexFieldData<?> fieldData = loadedFieldData.get(fieldNames.indexName());
if (fieldData == null) {
synchronized (loadedFieldData) {
fieldData = loadedFieldData.get(fieldNames.indexName());
if (fieldData == null) {
IndexFieldData.Builder builder = null;
String format = type.getFormat(indexSettings);
if (format != null && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(format) && !docValues) {
logger.warn("field [" + fieldNames.fullName() + "] has no doc values, will use default field data format");
format = null;
}
if (format != null) {
builder = buildersByTypeAndFormat.get(Tuple.tuple(type.getType(), format));
if (builder == null) {
logger.warn("failed to find format [" + format + "] for field [" + fieldNames.fullName() + "], will use default");
}
}
if (builder == null && docValues) {
builder = docValuesBuildersByType.get(type.getType());
}
if (builder == null) {
builder = buildersByType.get(type.getType());
}
if (builder == null) {
throw new ElasticsearchIllegalArgumentException("failed to find field data builder for field " + fieldNames.fullName() + ", and type " + type.getType());
}
IndexFieldDataCache cache = fieldDataCaches.get(fieldNames.indexName());
if (cache == null) {
// we default to node level cache, which in turn defaults to be unbounded
// this means changing the node level settings is simple, just set the bounds there
String cacheType = type.getSettings().get("cache", indexSettings.get("index.fielddata.cache", "node"));
if ("resident".equals(cacheType)) {
cache = new IndexFieldDataCache.Resident(indexService, fieldNames, type);
} else if ("soft".equals(cacheType)) {
cache = new IndexFieldDataCache.Soft(indexService, fieldNames, type);
} else if ("node".equals(cacheType)) {
cache = indicesFieldDataCache.buildIndexFieldDataCache(indexService, index, fieldNames, type);
} else {
throw new ElasticsearchIllegalArgumentException("cache type not supported [" + cacheType + "] for field [" + fieldNames.fullName() + "]");
}
fieldDataCaches.put(fieldNames.indexName(), cache);
}
fieldData = builder.build(index, indexSettings, mapper, cache, circuitBreakerService);
loadedFieldData.put(fieldNames.indexName(), fieldData);
}
}
}
return (IFD) fieldData;
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_fielddata_IndexFieldDataService.java
|
5,310 |
public class StringTerms extends InternalTerms {
public static final InternalAggregation.Type TYPE = new Type("terms", "sterms");
public static AggregationStreams.Stream STREAM = new AggregationStreams.Stream() {
@Override
public StringTerms readResult(StreamInput in) throws IOException {
StringTerms buckets = new StringTerms();
buckets.readFrom(in);
return buckets;
}
};
public static void registerStreams() {
AggregationStreams.registerStream(STREAM, TYPE.stream());
}
public static class Bucket extends InternalTerms.Bucket {
final BytesRef termBytes;
public Bucket(BytesRef term, long docCount, InternalAggregations aggregations) {
super(docCount, aggregations);
this.termBytes = term;
}
@Override
public String getKey() {
return termBytes.utf8ToString();
}
@Override
public Text getKeyAsText() {
return new BytesText(new BytesArray(termBytes));
}
@Override
public Number getKeyAsNumber() {
// this method is needed for scripted numeric faceting
return Double.parseDouble(termBytes.utf8ToString());
}
@Override
int compareTerm(Terms.Bucket other) {
return BytesRef.getUTF8SortedAsUnicodeComparator().compare(termBytes, ((Bucket) other).termBytes);
}
}
StringTerms() {} // for serialization
public StringTerms(String name, InternalOrder order, int requiredSize, long minDocCount, Collection<InternalTerms.Bucket> buckets) {
super(name, order, requiredSize, minDocCount, buckets);
}
@Override
public Type type() {
return TYPE;
}
@Override
public void readFrom(StreamInput in) throws IOException {
this.name = in.readString();
this.order = InternalOrder.Streams.readOrder(in);
this.requiredSize = readSize(in);
this.minDocCount = in.readVLong();
int size = in.readVInt();
List<InternalTerms.Bucket> buckets = new ArrayList<InternalTerms.Bucket>(size);
for (int i = 0; i < size; i++) {
buckets.add(new Bucket(in.readBytesRef(), in.readVLong(), InternalAggregations.readAggregations(in)));
}
this.buckets = buckets;
this.bucketMap = null;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
InternalOrder.Streams.writeOrder(order, out);
writeSize(requiredSize, out);
out.writeVLong(minDocCount);
out.writeVInt(buckets.size());
for (InternalTerms.Bucket bucket : buckets) {
out.writeBytesRef(((Bucket) bucket).termBytes);
out.writeVLong(bucket.getDocCount());
((InternalAggregations) bucket.getAggregations()).writeTo(out);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.startArray(CommonFields.BUCKETS);
for (InternalTerms.Bucket bucket : buckets) {
builder.startObject();
builder.field(CommonFields.KEY, ((Bucket) bucket).termBytes);
builder.field(CommonFields.DOC_COUNT, bucket.getDocCount());
((InternalAggregations) bucket.getAggregations()).toXContentInternal(builder, params);
builder.endObject();
}
builder.endArray();
builder.endObject();
return builder;
}
}
| 1no label
|
src_main_java_org_elasticsearch_search_aggregations_bucket_terms_StringTerms.java
|
1,085 |
public class IndexAliasesTests extends ElasticsearchIntegrationTest {
@Test
public void testAliases() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
logger.info("--> aliasing index [test] with [alias1]");
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias1"));
logger.info("--> indexing against [alias1], should work now");
IndexResponse indexResponse = client().index(indexRequest("alias1").type("type1").id("1").source(source("1", "test"))).actionGet();
assertThat(indexResponse.getIndex(), equalTo("test"));
logger.info("--> creating index [test_x]");
createIndex("test_x");
ensureGreen();
logger.info("--> remove [alias1], Aliasing index [test_x] with [alias1]");
assertAcked(admin().indices().prepareAliases().removeAlias("test", "alias1").addAlias("test_x", "alias1"));
logger.info("--> indexing against [alias1], should work against [test_x]");
indexResponse = client().index(indexRequest("alias1").type("type1").id("1").source(source("1", "test"))).actionGet();
assertThat(indexResponse.getIndex(), equalTo("test_x"));
}
@Test
public void testFailedFilter() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
try {
logger.info("--> aliasing index [test] with [alias1] and filter [t]");
admin().indices().prepareAliases().addAlias("test", "alias1", "{ t }").get();
fail();
} catch (Exception e) {
// all is well
}
}
@Test
public void testFilteringAliases() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
logger.info("--> aliasing index [test] with [alias1] and filter [user:kimchy]");
FilterBuilder filter = termFilter("user", "kimchy");
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias1", filter));
// For now just making sure that filter was stored with the alias
logger.info("--> making sure that filter was stored with alias [alias1] and filter [user:kimchy]");
ClusterState clusterState = admin().cluster().prepareState().get().getState();
IndexMetaData indexMd = clusterState.metaData().index("test");
assertThat(indexMd.aliases().get("alias1").filter().string(), equalTo("{\"term\":{\"user\":\"kimchy\"}}"));
}
@Test
public void testEmptyFilter() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
logger.info("--> aliasing index [test] with [alias1] and empty filter");
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias1", "{}"));
}
@Test
public void testSearchingFilteringAliasesSingleIndex() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
logger.info("--> adding filtering aliases to index [test]");
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias1"));
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias2"));
assertAcked(admin().indices().prepareAliases().addAlias("test", "foos", termFilter("name", "foo")));
assertAcked(admin().indices().prepareAliases().addAlias("test", "bars", termFilter("name", "bar")));
assertAcked(admin().indices().prepareAliases().addAlias("test", "tests", termFilter("name", "test")));
logger.info("--> indexing against [test]");
client().index(indexRequest("test").type("type1").id("1").source(source("1", "foo test")).refresh(true)).actionGet();
client().index(indexRequest("test").type("type1").id("2").source(source("2", "bar test")).refresh(true)).actionGet();
client().index(indexRequest("test").type("type1").id("3").source(source("3", "baz test")).refresh(true)).actionGet();
client().index(indexRequest("test").type("type1").id("4").source(source("4", "something else")).refresh(true)).actionGet();
logger.info("--> checking single filtering alias search");
SearchResponse searchResponse = client().prepareSearch("foos").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1");
logger.info("--> checking single filtering alias wildcard search");
searchResponse = client().prepareSearch("fo*").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1");
searchResponse = client().prepareSearch("tests").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3");
logger.info("--> checking single filtering alias search with sort");
searchResponse = client().prepareSearch("tests").setQuery(QueryBuilders.matchAllQuery()).addSort("_uid", SortOrder.ASC).get();
assertHits(searchResponse.getHits(), "1", "2", "3");
logger.info("--> checking single filtering alias search with global facets");
searchResponse = client().prepareSearch("tests").setQuery(QueryBuilders.matchQuery("name", "bar"))
.addFacet(FacetBuilders.termsFacet("test").field("name").global(true))
.get();
assertThat(((TermsFacet) searchResponse.getFacets().facet("test")).getEntries().size(), equalTo(4));
logger.info("--> checking single filtering alias search with global facets and sort");
searchResponse = client().prepareSearch("tests").setQuery(QueryBuilders.matchQuery("name", "bar"))
.addFacet(FacetBuilders.termsFacet("test").field("name").global(true))
.addSort("_uid", SortOrder.ASC).get();
assertThat(((TermsFacet) searchResponse.getFacets().facet("test")).getEntries().size(), equalTo(4));
logger.info("--> checking single filtering alias search with non-global facets");
searchResponse = client().prepareSearch("tests").setQuery(QueryBuilders.matchQuery("name", "bar"))
.addFacet(FacetBuilders.termsFacet("test").field("name").global(false))
.addSort("_uid", SortOrder.ASC).get();
assertThat(((TermsFacet) searchResponse.getFacets().facet("test")).getEntries().size(), equalTo(2));
searchResponse = client().prepareSearch("foos", "bars").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2");
logger.info("--> checking single non-filtering alias search");
searchResponse = client().prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3", "4");
logger.info("--> checking non-filtering alias and filtering alias search");
searchResponse = client().prepareSearch("alias1", "foos").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3", "4");
logger.info("--> checking index and filtering alias search");
searchResponse = client().prepareSearch("test", "foos").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3", "4");
logger.info("--> checking index and alias wildcard search");
searchResponse = client().prepareSearch("te*", "fo*").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3", "4");
}
@Test
public void testSearchingFilteringAliasesTwoIndices() throws Exception {
logger.info("--> creating index [test1]");
createIndex("test1");
logger.info("--> creating index [test2]");
createIndex("test2");
ensureGreen();
logger.info("--> adding filtering aliases to index [test1]");
assertAcked(admin().indices().prepareAliases().addAlias("test1", "aliasToTest1"));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "aliasToTests"));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "foos", termFilter("name", "foo")));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "bars", termFilter("name", "bar")));
logger.info("--> adding filtering aliases to index [test2]");
assertAcked(admin().indices().prepareAliases().addAlias("test2", "aliasToTest2"));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "aliasToTests"));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "foos", termFilter("name", "foo")));
logger.info("--> indexing against [test1]");
client().index(indexRequest("test1").type("type1").id("1").source(source("1", "foo test"))).get();
client().index(indexRequest("test1").type("type1").id("2").source(source("2", "bar test"))).get();
client().index(indexRequest("test1").type("type1").id("3").source(source("3", "baz test"))).get();
client().index(indexRequest("test1").type("type1").id("4").source(source("4", "something else"))).get();
logger.info("--> indexing against [test2]");
client().index(indexRequest("test2").type("type1").id("5").source(source("5", "foo test"))).get();
client().index(indexRequest("test2").type("type1").id("6").source(source("6", "bar test"))).get();
client().index(indexRequest("test2").type("type1").id("7").source(source("7", "baz test"))).get();
client().index(indexRequest("test2").type("type1").id("8").source(source("8", "something else"))).get();
refresh();
logger.info("--> checking filtering alias for two indices");
SearchResponse searchResponse = client().prepareSearch("foos").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "5");
assertThat(client().prepareCount("foos").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(2L));
logger.info("--> checking filtering alias for one index");
searchResponse = client().prepareSearch("bars").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "2");
assertThat(client().prepareCount("bars").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(1L));
logger.info("--> checking filtering alias for two indices and one complete index");
searchResponse = client().prepareSearch("foos", "test1").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3", "4", "5");
assertThat(client().prepareCount("foos", "test1").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(5L));
logger.info("--> checking filtering alias for two indices and non-filtering alias for one index");
searchResponse = client().prepareSearch("foos", "aliasToTest1").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "1", "2", "3", "4", "5");
assertThat(client().prepareCount("foos", "aliasToTest1").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(5L));
logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices");
searchResponse = client().prepareSearch("foos", "aliasToTests").setQuery(QueryBuilders.matchAllQuery()).get();
assertThat(searchResponse.getHits().totalHits(), equalTo(8L));
assertThat(client().prepareCount("foos", "aliasToTests").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(8L));
logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices");
searchResponse = client().prepareSearch("foos", "aliasToTests").setQuery(QueryBuilders.termQuery("name", "something")).get();
assertHits(searchResponse.getHits(), "4", "8");
assertThat(client().prepareCount("foos", "aliasToTests").setQuery(QueryBuilders.termQuery("name", "something")).get().getCount(), equalTo(2L));
}
@Test
public void testSearchingFilteringAliasesMultipleIndices() throws Exception {
logger.info("--> creating indices");
createIndex("test1", "test2", "test3");
ensureGreen();
logger.info("--> adding aliases to indices");
assertAcked(admin().indices().prepareAliases().addAlias("test1", "alias12"));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "alias12"));
logger.info("--> adding filtering aliases to indices");
assertAcked(admin().indices().prepareAliases().addAlias("test1", "filter1", termFilter("name", "test1")));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "filter23", termFilter("name", "foo")));
assertAcked(admin().indices().prepareAliases().addAlias("test3", "filter23", termFilter("name", "foo")));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "filter13", termFilter("name", "baz")));
assertAcked(admin().indices().prepareAliases().addAlias("test3", "filter13", termFilter("name", "baz")));
logger.info("--> indexing against [test1]");
client().index(indexRequest("test1").type("type1").id("11").source(source("11", "foo test1"))).get();
client().index(indexRequest("test1").type("type1").id("12").source(source("12", "bar test1"))).get();
client().index(indexRequest("test1").type("type1").id("13").source(source("13", "baz test1"))).get();
client().index(indexRequest("test2").type("type1").id("21").source(source("21", "foo test2"))).get();
client().index(indexRequest("test2").type("type1").id("22").source(source("22", "bar test2"))).get();
client().index(indexRequest("test2").type("type1").id("23").source(source("23", "baz test2"))).get();
client().index(indexRequest("test3").type("type1").id("31").source(source("31", "foo test3"))).get();
client().index(indexRequest("test3").type("type1").id("32").source(source("32", "bar test3"))).get();
client().index(indexRequest("test3").type("type1").id("33").source(source("33", "baz test3"))).get();
refresh();
logger.info("--> checking filtering alias for multiple indices");
SearchResponse searchResponse = client().prepareSearch("filter23", "filter13").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "21", "31", "13", "33");
assertThat(client().prepareCount("filter23", "filter13").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(4L));
searchResponse = client().prepareSearch("filter23", "filter1").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "21", "31", "11", "12", "13");
assertThat(client().prepareCount("filter23", "filter1").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(5L));
searchResponse = client().prepareSearch("filter13", "filter1").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "11", "12", "13", "33");
assertThat(client().prepareCount("filter13", "filter1").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(4L));
searchResponse = client().prepareSearch("filter13", "filter1", "filter23").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "11", "12", "13", "21", "31", "33");
assertThat(client().prepareCount("filter13", "filter1", "filter23").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(6L));
searchResponse = client().prepareSearch("filter23", "filter13", "test2").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "21", "22", "23", "31", "13", "33");
assertThat(client().prepareCount("filter23", "filter13", "test2").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(6L));
searchResponse = client().prepareSearch("filter23", "filter13", "test1", "test2").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "11", "12", "13", "21", "22", "23", "31", "33");
assertThat(client().prepareCount("filter23", "filter13", "test1", "test2").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(8L));
}
@Test
public void testDeletingByQueryFilteringAliases() throws Exception {
logger.info("--> creating index [test1] and [test2");
createIndex("test1", "test2");
ensureGreen();
logger.info("--> adding filtering aliases to index [test1]");
assertAcked(admin().indices().prepareAliases().addAlias("test1", "aliasToTest1"));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "aliasToTests"));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "foos", termFilter("name", "foo")));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "bars", termFilter("name", "bar")));
assertAcked(admin().indices().prepareAliases().addAlias("test1", "tests", termFilter("name", "test")));
logger.info("--> adding filtering aliases to index [test2]");
assertAcked(admin().indices().prepareAliases().addAlias("test2", "aliasToTest2"));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "aliasToTests"));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "foos", termFilter("name", "foo")));
assertAcked(admin().indices().prepareAliases().addAlias("test2", "tests", termFilter("name", "test")));
logger.info("--> indexing against [test1]");
client().index(indexRequest("test1").type("type1").id("1").source(source("1", "foo test"))).get();
client().index(indexRequest("test1").type("type1").id("2").source(source("2", "bar test"))).get();
client().index(indexRequest("test1").type("type1").id("3").source(source("3", "baz test"))).get();
client().index(indexRequest("test1").type("type1").id("4").source(source("4", "something else"))).get();
logger.info("--> indexing against [test2]");
client().index(indexRequest("test2").type("type1").id("5").source(source("5", "foo test"))).get();
client().index(indexRequest("test2").type("type1").id("6").source(source("6", "bar test"))).get();
client().index(indexRequest("test2").type("type1").id("7").source(source("7", "baz test"))).get();
client().index(indexRequest("test2").type("type1").id("8").source(source("8", "something else"))).get();
refresh();
logger.info("--> checking counts before delete");
assertThat(client().prepareCount("bars").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(1L));
logger.info("--> delete by query from a single alias");
client().prepareDeleteByQuery("bars").setQuery(QueryBuilders.termQuery("name", "test")).get();
logger.info("--> verify that only one record was deleted");
assertThat(client().prepareCount("test1").setQuery(QueryBuilders.matchAllQuery()).get().getCount(), equalTo(3L));
logger.info("--> delete by query from an aliases pointing to two indices");
client().prepareDeleteByQuery("foos").setQuery(QueryBuilders.matchAllQuery()).get();
logger.info("--> verify that proper records were deleted");
SearchResponse searchResponse = client().prepareSearch("aliasToTests").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "3", "4", "6", "7", "8");
logger.info("--> delete by query from an aliases and an index");
client().prepareDeleteByQuery("tests", "test2").setQuery(QueryBuilders.matchAllQuery()).get();
logger.info("--> verify that proper records were deleted");
searchResponse = client().prepareSearch("aliasToTests").setQuery(QueryBuilders.matchAllQuery()).get();
assertHits(searchResponse.getHits(), "4");
}
@Test
public void testDeleteAliases() throws Exception {
logger.info("--> creating index [test1] and [test2]");
createIndex("test1", "test2");
ensureGreen();
logger.info("--> adding filtering aliases to index [test1]");
assertAcked(admin().indices().prepareAliases().addAlias("test1", "aliasToTest1")
.addAlias("test1", "aliasToTests")
.addAlias("test1", "foos", termFilter("name", "foo"))
.addAlias("test1", "bars", termFilter("name", "bar"))
.addAlias("test1", "tests", termFilter("name", "test")));
logger.info("--> adding filtering aliases to index [test2]");
assertAcked(admin().indices().prepareAliases().addAlias("test2", "aliasToTest2")
.addAlias("test2", "aliasToTests")
.addAlias("test2", "foos", termFilter("name", "foo"))
.addAlias("test2", "tests", termFilter("name", "test")));
String[] indices = {"test1", "test2"};
String[] aliases = {"aliasToTest1", "foos", "bars", "tests", "aliasToTest2", "aliasToTests"};
admin().indices().prepareAliases().removeAlias(indices, aliases).get();
AliasesExistResponse response = admin().indices().prepareAliasesExist(aliases).get();
assertThat(response.exists(), equalTo(false));
}
@Test
public void testWaitForAliasCreationMultipleShards() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
for (int i = 0; i < 10; i++) {
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias" + i));
client().index(indexRequest("alias" + i).type("type1").id("1").source(source("1", "test"))).get();
}
}
@Test
public void testWaitForAliasCreationSingleShard() throws Exception {
logger.info("--> creating index [test]");
assertAcked(admin().indices().create(createIndexRequest("test").settings(settingsBuilder().put("index.numberOfReplicas", 0).put("index.numberOfShards", 1))).get());
ensureGreen();
for (int i = 0; i < 10; i++) {
assertAcked(admin().indices().prepareAliases().addAlias("test", "alias" + i));
client().index(indexRequest("alias" + i).type("type1").id("1").source(source("1", "test"))).get();
}
}
@Test
public void testWaitForAliasSimultaneousUpdate() throws Exception {
final int aliasCount = 10;
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
ExecutorService executor = Executors.newFixedThreadPool(aliasCount);
for (int i = 0; i < aliasCount; i++) {
final String aliasName = "alias" + i;
executor.submit(new Runnable() {
@Override
public void run() {
assertAcked(admin().indices().prepareAliases().addAlias("test", aliasName));
client().index(indexRequest(aliasName).type("type1").id("1").source(source("1", "test"))).actionGet();
}
});
}
executor.shutdown();
boolean done = executor.awaitTermination(10, TimeUnit.SECONDS);
assertThat(done, equalTo(true));
if (!done) {
executor.shutdownNow();
}
}
@Test
public void testSameAlias() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
logger.info("--> creating alias1 ");
assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1")));
TimeValue timeout = TimeValue.timeValueSeconds(2);
logger.info("--> recreating alias1 ");
StopWatch stopWatch = new StopWatch();
stopWatch.start();
assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1").setTimeout(timeout)));
assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));
logger.info("--> modifying alias1 to have a filter");
stopWatch.start();
assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1", termFilter("name", "foo")).setTimeout(timeout)));
assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));
logger.info("--> recreating alias1 with the same filter");
stopWatch.start();
assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1", termFilter("name", "foo")).setTimeout(timeout)));
assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));
logger.info("--> recreating alias1 with a different filter");
stopWatch.start();
assertAcked((admin().indices().prepareAliases().addAlias("test", "alias1", termFilter("name", "bar")).setTimeout(timeout)));
assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));
logger.info("--> verify that filter was updated");
AliasMetaData aliasMetaData = cluster().clusterService().state().metaData().aliases().get("alias1").get("test");
assertThat(aliasMetaData.getFilter().toString(), equalTo("{\"term\":{\"name\":\"bar\"}}"));
logger.info("--> deleting alias1");
stopWatch.start();
assertAcked((admin().indices().prepareAliases().removeAlias("test", "alias1").setTimeout(timeout)));
assertThat(stopWatch.stop().lastTaskTime().millis(), lessThan(timeout.millis()));
}
@Test(expected = AliasesMissingException.class)
public void testIndicesRemoveNonExistingAliasResponds404() throws Exception {
logger.info("--> creating index [test]");
createIndex("test");
ensureGreen();
logger.info("--> deleting alias1 which does not exist");
assertAcked((admin().indices().prepareAliases().removeAlias("test", "alias1")));
}
@Test
public void testIndicesGetAliases() throws Exception {
Settings indexSettings = ImmutableSettings.settingsBuilder()
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", 0)
.build();
logger.info("--> creating indices [foobar, test, test123, foobarbaz, bazbar]");
assertAcked(prepareCreate("foobar").setSettings(indexSettings));
assertAcked(prepareCreate("test").setSettings(indexSettings));
assertAcked(prepareCreate("test123").setSettings(indexSettings));
assertAcked(prepareCreate("foobarbaz").setSettings(indexSettings));
assertAcked(prepareCreate("bazbar").setSettings(indexSettings));
ensureGreen();
logger.info("--> creating aliases [alias1, alias2]");
assertAcked(admin().indices().prepareAliases().addAlias("foobar", "alias1").addAlias("foobar", "alias2"));
logger.info("--> getting alias1");
GetAliasesResponse getResponse = admin().indices().prepareGetAliases("alias1").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("alias1"));
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
AliasesExistResponse existsResponse = admin().indices().prepareAliasesExist("alias1").get();
assertThat(existsResponse.exists(), equalTo(true));
logger.info("--> getting all aliases that start with alias*");
getResponse = admin().indices().prepareGetAliases("alias*").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(2));
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("alias2"));
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(1), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(1).alias(), equalTo("alias1"));
assertThat(getResponse.getAliases().get("foobar").get(1).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(1).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(1).getSearchRouting(), nullValue());
existsResponse = admin().indices().prepareAliasesExist("alias*").get();
assertThat(existsResponse.exists(), equalTo(true));
logger.info("--> creating aliases [bar, baz, foo]");
assertAcked(admin().indices().prepareAliases()
.addAlias("bazbar", "bar")
.addAlias("bazbar", "bac", termFilter("field", "value"))
.addAlias("foobar", "foo"));
assertAcked(admin().indices().prepareAliases()
.addAliasAction(new AliasAction(AliasAction.Type.ADD, "foobar", "bac").routing("bla")));
logger.info("--> getting bar and baz for index bazbar");
getResponse = admin().indices().prepareGetAliases("bar", "bac").addIndices("bazbar").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("bazbar").size(), equalTo(2));
assertThat(getResponse.getAliases().get("bazbar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("bazbar").get(0).alias(), equalTo("bac"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("term"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("field"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("value"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(0).getSearchRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1), notNullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).alias(), equalTo("bar"));
assertThat(getResponse.getAliases().get("bazbar").get(1).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).getSearchRouting(), nullValue());
existsResponse = admin().indices().prepareAliasesExist("bar", "bac")
.addIndices("bazbar").get();
assertThat(existsResponse.exists(), equalTo(true));
logger.info("--> getting *b* for index baz*");
getResponse = admin().indices().prepareGetAliases("*b*").addIndices("baz*").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("bazbar").size(), equalTo(2));
assertThat(getResponse.getAliases().get("bazbar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("bazbar").get(0).alias(), equalTo("bac"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("term"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("field"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("value"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(0).getSearchRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1), notNullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).alias(), equalTo("bar"));
assertThat(getResponse.getAliases().get("bazbar").get(1).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).getSearchRouting(), nullValue());
existsResponse = admin().indices().prepareAliasesExist("*b*")
.addIndices("baz*").get();
assertThat(existsResponse.exists(), equalTo(true));
logger.info("--> getting *b* for index *bar");
getResponse = admin().indices().prepareGetAliases("b*").addIndices("*bar").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(2));
assertThat(getResponse.getAliases().get("bazbar").size(), equalTo(2));
assertThat(getResponse.getAliases().get("bazbar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("bazbar").get(0).alias(), equalTo("bac"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("term"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("field"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getFilter().string(), containsString("value"));
assertThat(getResponse.getAliases().get("bazbar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(0).getSearchRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1), notNullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).alias(), equalTo("bar"));
assertThat(getResponse.getAliases().get("bazbar").get(1).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("bazbar").get(1).getSearchRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("bac"));
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), equalTo("bla"));
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), equalTo("bla"));
existsResponse = admin().indices().prepareAliasesExist("b*")
.addIndices("*bar").get();
assertThat(existsResponse.exists(), equalTo(true));
logger.info("--> getting f* for index *bar");
getResponse = admin().indices().prepareGetAliases("f*").addIndices("*bar").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("foo"));
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
existsResponse = admin().indices().prepareAliasesExist("f*")
.addIndices("*bar").get();
assertThat(existsResponse.exists(), equalTo(true));
// alias at work
logger.info("--> getting f* for index *bac");
getResponse = admin().indices().prepareGetAliases("foo").addIndices("*bac").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("foo"));
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
existsResponse = admin().indices().prepareAliasesExist("foo")
.addIndices("*bac").get();
assertThat(existsResponse.exists(), equalTo(true));
logger.info("--> getting foo for index foobar");
getResponse = admin().indices().prepareGetAliases("foo").addIndices("foobar").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(1));
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("foo"));
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
existsResponse = admin().indices().prepareAliasesExist("foo")
.addIndices("foobar").get();
assertThat(existsResponse.exists(), equalTo(true));
// alias at work again
logger.info("--> getting * for index *bac");
getResponse = admin().indices().prepareGetAliases("*").addIndices("*bac").get();
assertThat(getResponse, notNullValue());
assertThat(getResponse.getAliases().size(), equalTo(2));
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(4));
assertThat(getResponse.getAliases().get("bazbar").size(), equalTo(2));
existsResponse = admin().indices().prepareAliasesExist("*")
.addIndices("*bac").get();
assertThat(existsResponse.exists(), equalTo(true));
assertAcked(admin().indices().prepareAliases()
.removeAlias("foobar", "foo"));
getResponse = admin().indices().prepareGetAliases("foo").addIndices("foobar").get();
assertThat(getResponse.getAliases().isEmpty(), equalTo(true));
existsResponse = admin().indices().prepareAliasesExist("foo").addIndices("foobar").get();
assertThat(existsResponse.exists(), equalTo(false));
}
@Test(expected = IndexMissingException.class)
public void testAddAliasNullIndex() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction(null, "alias1")).get();
}
@Test(expected = ActionRequestValidationException.class)
public void testAddAliasEmptyIndex() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("", "alias1")).get();
}
@Test(expected = ActionRequestValidationException.class)
public void testAddAliasNullAlias() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("index1", null)).get();
}
@Test(expected = ActionRequestValidationException.class)
public void testAddAliasEmptyAlias() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("index1", "")).get();
}
@Test
public void testAddAliasNullAliasNullIndex() {
try {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction(null, null)).get();
assertTrue("Should throw " + ActionRequestValidationException.class.getSimpleName(), false);
} catch (ActionRequestValidationException e) {
assertThat(e.validationErrors(), notNullValue());
assertThat(e.validationErrors().size(), equalTo(1));
}
}
@Test
public void testAddAliasEmptyAliasEmptyIndex() {
try {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("", "")).get();
assertTrue("Should throw " + ActionRequestValidationException.class.getSimpleName(), false);
} catch (ActionRequestValidationException e) {
assertThat(e.validationErrors(), notNullValue());
assertThat(e.validationErrors().size(), equalTo(2));
}
}
@Test(expected = ActionRequestValidationException.class)
public void tesRemoveAliasNullIndex() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newRemoveAliasAction(null, "alias1")).get();
}
@Test(expected = ActionRequestValidationException.class)
public void tesRemoveAliasEmptyIndex() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newRemoveAliasAction("", "alias1")).get();
}
@Test(expected = ActionRequestValidationException.class)
public void tesRemoveAliasNullAlias() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newRemoveAliasAction("index1", null)).get();
}
@Test(expected = ActionRequestValidationException.class)
public void tesRemoveAliasEmptyAlias() {
admin().indices().prepareAliases().addAliasAction(AliasAction.newRemoveAliasAction("index1", "")).get();
}
@Test
public void testRemoveAliasNullAliasNullIndex() {
try {
admin().indices().prepareAliases().addAliasAction(AliasAction.newRemoveAliasAction(null, null)).get();
fail("Should throw " + ActionRequestValidationException.class.getSimpleName());
} catch (ActionRequestValidationException e) {
assertThat(e.validationErrors(), notNullValue());
assertThat(e.validationErrors().size(), equalTo(2));
}
}
@Test
public void testRemoveAliasEmptyAliasEmptyIndex() {
try {
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("", "")).get();
fail("Should throw " + ActionRequestValidationException.class.getSimpleName());
} catch (ActionRequestValidationException e) {
assertThat(e.validationErrors(), notNullValue());
assertThat(e.validationErrors().size(), equalTo(2));
}
}
@Test
public void testGetAllAliasesWorks() {
createIndex("index1");
createIndex("index2");
ensureYellow();
assertAcked(admin().indices().prepareAliases().addAlias("index1", "alias1").addAlias("index2", "alias2"));
GetAliasesResponse response = admin().indices().prepareGetAliases().get();
assertThat(response.getAliases(), hasKey("index1"));
assertThat(response.getAliases(), hasKey("index1"));
}
private void assertHits(SearchHits hits, String... ids) {
assertThat(hits.totalHits(), equalTo((long) ids.length));
Set<String> hitIds = newHashSet();
for (SearchHit hit : hits.getHits()) {
hitIds.add(hit.id());
}
assertThat(hitIds, containsInAnyOrder(ids));
}
private String source(String id, String nameValue) {
return "{ \"id\" : \"" + id + "\", \"name\" : \"" + nameValue + "\" }";
}
}
| 0true
|
src_test_java_org_elasticsearch_aliases_IndexAliasesTests.java
|
400 |
static enum EvictionPolicy {
NONE, LRU, LFU
}
| 0true
|
hazelcast-client_src_main_java_com_hazelcast_client_nearcache_ClientNearCache.java
|
292 |
public interface OScriptFormatter {
public String getFunctionDefinition(OFunction iFunction);
public String getFunctionInvoke(OFunction iFunction, final Object[] iArgs);
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_command_script_formatter_OScriptFormatter.java
|
95 |
class TemplateVisitor extends Visitor {
Tree.StringTemplate result;
@Override
public void visit(Tree.StringTemplate that) {
if (that.getStartIndex()<=node.getStartIndex() &&
that.getStopIndex()>=node.getStopIndex()) {
result = that;
}
super.visit(that);
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_ConvertToConcatenationProposal.java
|
603 |
public interface OIndexInternal<T> extends OIndex<T>, Iterable<Entry<Object, T>>, ODatabaseListener {
public static final String CONFIG_KEYTYPE = "keyType";
public static final String CONFIG_AUTOMATIC = "automatic";
public static final String CONFIG_TYPE = "type";
public static final String ALGORITHM = "algorithm";
public static final String VALUE_CONTAINER_ALGORITHM = "valueContainerAlgorithm";
public static final String CONFIG_NAME = "name";
public static final String INDEX_DEFINITION = "indexDefinition";
public static final String INDEX_DEFINITION_CLASS = "indexDefinitionClass";
/**
* Loads the index giving the configuration.
*
* @param iConfig
* ODocument instance containing the configuration
*
*/
public boolean loadFromConfiguration(ODocument iConfig);
/**
* Saves the index configuration to disk.
*
* @return The configuration as ODocument instance
* @see #getConfiguration()
*/
public ODocument updateConfiguration();
/**
* Add given cluster to the list of clusters that should be automatically indexed.
*
* @param iClusterName
* Cluster to add.
* @return Current index instance.
*/
public OIndex<T> addCluster(final String iClusterName);
/**
* Remove given cluster from the list of clusters that should be automatically indexed.
*
* @param iClusterName
* Cluster to remove.
* @return Current index instance.
*/
public OIndex<T> removeCluster(final String iClusterName);
/**
* Indicates whether given index can be used to calculate result of
* {@link com.orientechnologies.orient.core.sql.operator.OQueryOperatorEquality} operators.
*
* @return {@code true} if given index can be used to calculate result of
* {@link com.orientechnologies.orient.core.sql.operator.OQueryOperatorEquality} operators.
*
*/
public boolean canBeUsedInEqualityOperators();
public boolean hasRangeQuerySupport();
/**
* Prohibit index modifications. Only index read commands are allowed after this call.
*
* @param throwException
* If <code>true</code> {@link com.orientechnologies.common.concur.lock.OModificationOperationProhibitedException}
* exception will be thrown in case of write command will be performed.
*/
public void freeze(boolean throwException);
/**
* Allow any index modifications. Is called after {@link #freeze(boolean)} command.
*/
public void release();
/**
* Is used to indicate that several index changes are going to be seen as single unit from users point of view. This command is
* used with conjunction of {@link #freeze(boolean)} command.
*/
public void acquireModificationLock();
/**
* Is used to indicate that several index changes are going to be seen as single unit from users point of view were completed.
*/
public void releaseModificationLock();
public IndexMetadata loadMetadata(ODocument iConfig);
public void setRebuildingFlag();
public void close();
public String getAlgorithm();
public void preCommit();
void addTxOperation(ODocument operationDocument);
public void commit();
public void postCommit();
public final class IndexMetadata {
private final String name;
private final OIndexDefinition indexDefinition;
private final Set<String> clustersToIndex;
private final String type;
private final String algorithm;
private final String valueContainerAlgorithm;
public IndexMetadata(String name, OIndexDefinition indexDefinition, Set<String> clustersToIndex, String type, String algorithm,
String valueContainerAlgorithm) {
this.name = name;
this.indexDefinition = indexDefinition;
this.clustersToIndex = clustersToIndex;
this.type = type;
this.algorithm = algorithm;
this.valueContainerAlgorithm = valueContainerAlgorithm;
}
public String getName() {
return name;
}
public OIndexDefinition getIndexDefinition() {
return indexDefinition;
}
public Set<String> getClustersToIndex() {
return clustersToIndex;
}
public String getType() {
return type;
}
public String getAlgorithm() {
return algorithm;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
IndexMetadata that = (IndexMetadata) o;
if (algorithm != null ? !algorithm.equals(that.algorithm) : that.algorithm != null)
return false;
if (!clustersToIndex.equals(that.clustersToIndex))
return false;
if (indexDefinition != null ? !indexDefinition.equals(that.indexDefinition) : that.indexDefinition != null)
return false;
if (!name.equals(that.name))
return false;
if (!type.equals(that.type))
return false;
return true;
}
@Override
public int hashCode() {
int result = name.hashCode();
result = 31 * result + (indexDefinition != null ? indexDefinition.hashCode() : 0);
result = 31 * result + clustersToIndex.hashCode();
result = 31 * result + type.hashCode();
result = 31 * result + (algorithm != null ? algorithm.hashCode() : 0);
return result;
}
public String getValueContainerAlgorithm() {
return valueContainerAlgorithm;
}
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_index_OIndexInternal.java
|
694 |
constructors[LIST_ADD_ALL] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new ListAddAllRequest();
}
};
| 0true
|
hazelcast_src_main_java_com_hazelcast_collection_CollectionPortableHook.java
|
110 |
public interface PageItemCriteria extends QuantityBasedRule {
/**
* Returns the parent <code>Page</code> to which this
* field belongs.
*
* @return
*/
@Nonnull
public Page getPage();
/**
* Sets the parent <code>Page</code>.
* @param page
*/
public void setPage(@Nonnull Page page);
/**
* Builds a copy of this item. Used by the content management system when an
* item is edited.
*
* @return a copy of this item
*/
@Nonnull
public PageItemCriteria cloneEntity();
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_page_domain_PageItemCriteria.java
|
6,017 |
PriorityQueue<Correction> corrections = new PriorityQueue<Correction>(maxNumCorrections) {
@Override
protected boolean lessThan(Correction a, Correction b) {
return a.score < b.score;
}
};
| 1no label
|
src_main_java_org_elasticsearch_search_suggest_phrase_CandidateScorer.java
|
177 |
@Component("blPageURLProcessor")
public class PageURLProcessor implements URLProcessor {
private static final Log LOG = LogFactory.getLog(PageURLProcessor.class);
@Resource(name = "blPageService")
private PageService pageService;
@Resource(name = "blStaticAssetService")
private StaticAssetService staticAssetService;
private static final String PAGE_ATTRIBUTE_NAME = "BLC_PAGE";
public static final String BLC_RULE_MAP_PARAM = "blRuleMap";
// The following attribute is set in BroadleafProcessURLFilter
public static final String REQUEST_DTO = "blRequestDTO";
/**
* Implementors of this interface will return true if they are able to process the
* current in request.
*
* Implementors of this method will need to rely on the BroadleafRequestContext class
* which provides access to the current sandbox, locale, request, and response via a
* threadlocal context
*
* @see BroadleafRequestContext
*
* @return true if this URLProcessor is able to process the passed in request
*/
@Override
public boolean canProcessURL(String key) {
BroadleafRequestContext context = BroadleafRequestContext.getBroadleafRequestContext();
PageDTO p = pageService.findPageByURI(context.getSandbox(), context.getLocale(), key, buildMvelParameters(context.getRequest()), context.isSecure());
context.getRequest().setAttribute(PAGE_ATTRIBUTE_NAME, p);
return (p != null);
}
/**
* Determines if the requestURI for the passed in request matches a custom content
* managed page. If so, the request is forwarded to the correct page template.
*
* The page object will be stored in the request attribute "BLC_PAGE".
*
* @param key The URI to process
*
* @return false if the url could not be processed
*
* @throws java.io.IOException
* @throws javax.servlet.ServletException
*/
public boolean processURL(String key) throws IOException, ServletException {
BroadleafRequestContext context = BroadleafRequestContext.getBroadleafRequestContext();
PageDTO p = (PageDTO) context.getRequest().getAttribute(PAGE_ATTRIBUTE_NAME);
if (p == null) {
p = pageService.findPageByURI(context.getSandbox(), context.getLocale(), key, buildMvelParameters(context.getRequest()), context.isSecure());
}
if (p != null) {
String templateJSPPath = p.getTemplatePath();
if (LOG.isDebugEnabled()) {
LOG.debug("Forwarding to page: " + templateJSPPath);
}
context.getRequest().setAttribute(PAGE_ATTRIBUTE_NAME, p);
RequestDispatcher rd = context.getRequest().getRequestDispatcher(templateJSPPath);
rd.forward(context.getRequest(), context.getResponse());
return true;
}
return false;
}
/**
* MVEL is used to process the content targeting rules.
*
*
* @param request
* @return
*/
private Map<String,Object> buildMvelParameters(HttpServletRequest request) {
TimeDTO timeDto = new TimeDTO(SystemTime.asCalendar());
RequestDTO requestDto = (RequestDTO) request.getAttribute(REQUEST_DTO);
Map<String, Object> mvelParameters = new HashMap<String, Object>();
mvelParameters.put("time", timeDto);
mvelParameters.put("request", requestDto);
Map<String,Object> blcRuleMap = (Map<String,Object>) request.getAttribute(BLC_RULE_MAP_PARAM);
if (blcRuleMap != null) {
for (String mapKey : blcRuleMap.keySet()) {
mvelParameters.put(mapKey, blcRuleMap.get(mapKey));
}
}
return mvelParameters;
}
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_web_PageURLProcessor.java
|
1,177 |
clientBootstrap.setPipelineFactory(new ChannelPipelineFactory() {
@Override
public ChannelPipeline getPipeline() throws Exception {
return Channels.pipeline(clientHandler);
}
});
| 0true
|
src_test_java_org_elasticsearch_benchmark_transport_netty_NettyEchoBenchmark.java
|
406 |
@Embeddable
public class ArchiveStatus implements Serializable {
@Column(name = "ARCHIVED")
@AdminPresentation(friendlyName = "archived", visibility = VisibilityEnum.HIDDEN_ALL, group = "ArchiveStatus")
protected Character archived = 'N';
public Character getArchived() {
return archived;
}
public void setArchived(Character archived) {
this.archived = archived;
}
}
| 1no label
|
common_src_main_java_org_broadleafcommerce_common_persistence_ArchiveStatus.java
|
157 |
public class ConcurrentLinkedDeque<E>
extends AbstractCollection<E>
implements Deque<E>, java.io.Serializable {
/*
* This is an implementation of a concurrent lock-free deque
* supporting interior removes but not interior insertions, as
* required to support the entire Deque interface.
*
* We extend the techniques developed for ConcurrentLinkedQueue and
* LinkedTransferQueue (see the internal docs for those classes).
* Understanding the ConcurrentLinkedQueue implementation is a
* prerequisite for understanding the implementation of this class.
*
* The data structure is a symmetrical doubly-linked "GC-robust"
* linked list of nodes. We minimize the number of volatile writes
* using two techniques: advancing multiple hops with a single CAS
* and mixing volatile and non-volatile writes of the same memory
* locations.
*
* A node contains the expected E ("item") and links to predecessor
* ("prev") and successor ("next") nodes:
*
* class Node<E> { volatile Node<E> prev, next; volatile E item; }
*
* A node p is considered "live" if it contains a non-null item
* (p.item != null). When an item is CASed to null, the item is
* atomically logically deleted from the collection.
*
* At any time, there is precisely one "first" node with a null
* prev reference that terminates any chain of prev references
* starting at a live node. Similarly there is precisely one
* "last" node terminating any chain of next references starting at
* a live node. The "first" and "last" nodes may or may not be live.
* The "first" and "last" nodes are always mutually reachable.
*
* A new element is added atomically by CASing the null prev or
* next reference in the first or last node to a fresh node
* containing the element. The element's node atomically becomes
* "live" at that point.
*
* A node is considered "active" if it is a live node, or the
* first or last node. Active nodes cannot be unlinked.
*
* A "self-link" is a next or prev reference that is the same node:
* p.prev == p or p.next == p
* Self-links are used in the node unlinking process. Active nodes
* never have self-links.
*
* A node p is active if and only if:
*
* p.item != null ||
* (p.prev == null && p.next != p) ||
* (p.next == null && p.prev != p)
*
* The deque object has two node references, "head" and "tail".
* The head and tail are only approximations to the first and last
* nodes of the deque. The first node can always be found by
* following prev pointers from head; likewise for tail. However,
* it is permissible for head and tail to be referring to deleted
* nodes that have been unlinked and so may not be reachable from
* any live node.
*
* There are 3 stages of node deletion;
* "logical deletion", "unlinking", and "gc-unlinking".
*
* 1. "logical deletion" by CASing item to null atomically removes
* the element from the collection, and makes the containing node
* eligible for unlinking.
*
* 2. "unlinking" makes a deleted node unreachable from active
* nodes, and thus eventually reclaimable by GC. Unlinked nodes
* may remain reachable indefinitely from an iterator.
*
* Physical node unlinking is merely an optimization (albeit a
* critical one), and so can be performed at our convenience. At
* any time, the set of live nodes maintained by prev and next
* links are identical, that is, the live nodes found via next
* links from the first node is equal to the elements found via
* prev links from the last node. However, this is not true for
* nodes that have already been logically deleted - such nodes may
* be reachable in one direction only.
*
* 3. "gc-unlinking" takes unlinking further by making active
* nodes unreachable from deleted nodes, making it easier for the
* GC to reclaim future deleted nodes. This step makes the data
* structure "gc-robust", as first described in detail by Boehm
* (http://portal.acm.org/citation.cfm?doid=503272.503282).
*
* GC-unlinked nodes may remain reachable indefinitely from an
* iterator, but unlike unlinked nodes, are never reachable from
* head or tail.
*
* Making the data structure GC-robust will eliminate the risk of
* unbounded memory retention with conservative GCs and is likely
* to improve performance with generational GCs.
*
* When a node is dequeued at either end, e.g. via poll(), we would
* like to break any references from the node to active nodes. We
* develop further the use of self-links that was very effective in
* other concurrent collection classes. The idea is to replace
* prev and next pointers with special values that are interpreted
* to mean off-the-list-at-one-end. These are approximations, but
* good enough to preserve the properties we want in our
* traversals, e.g. we guarantee that a traversal will never visit
* the same element twice, but we don't guarantee whether a
* traversal that runs out of elements will be able to see more
* elements later after enqueues at that end. Doing gc-unlinking
* safely is particularly tricky, since any node can be in use
* indefinitely (for example by an iterator). We must ensure that
* the nodes pointed at by head/tail never get gc-unlinked, since
* head/tail are needed to get "back on track" by other nodes that
* are gc-unlinked. gc-unlinking accounts for much of the
* implementation complexity.
*
* Since neither unlinking nor gc-unlinking are necessary for
* correctness, there are many implementation choices regarding
* frequency (eagerness) of these operations. Since volatile
* reads are likely to be much cheaper than CASes, saving CASes by
* unlinking multiple adjacent nodes at a time may be a win.
* gc-unlinking can be performed rarely and still be effective,
* since it is most important that long chains of deleted nodes
* are occasionally broken.
*
* The actual representation we use is that p.next == p means to
* goto the first node (which in turn is reached by following prev
* pointers from head), and p.next == null && p.prev == p means
* that the iteration is at an end and that p is a (static final)
* dummy node, NEXT_TERMINATOR, and not the last active node.
* Finishing the iteration when encountering such a TERMINATOR is
* good enough for read-only traversals, so such traversals can use
* p.next == null as the termination condition. When we need to
* find the last (active) node, for enqueueing a new node, we need
* to check whether we have reached a TERMINATOR node; if so,
* restart traversal from tail.
*
* The implementation is completely directionally symmetrical,
* except that most public methods that iterate through the list
* follow next pointers ("forward" direction).
*
* We believe (without full proof) that all single-element deque
* operations (e.g., addFirst, peekLast, pollLast) are linearizable
* (see Herlihy and Shavit's book). However, some combinations of
* operations are known not to be linearizable. In particular,
* when an addFirst(A) is racing with pollFirst() removing B, it is
* possible for an observer iterating over the elements to observe
* A B C and subsequently observe A C, even though no interior
* removes are ever performed. Nevertheless, iterators behave
* reasonably, providing the "weakly consistent" guarantees.
*
* Empirically, microbenchmarks suggest that this class adds about
* 40% overhead relative to ConcurrentLinkedQueue, which feels as
* good as we can hope for.
*/
private static final long serialVersionUID = 876323262645176354L;
/**
* A node from which the first node on list (that is, the unique node p
* with p.prev == null && p.next != p) can be reached in O(1) time.
* Invariants:
* - the first node is always O(1) reachable from head via prev links
* - all live nodes are reachable from the first node via succ()
* - head != null
* - (tmp = head).next != tmp || tmp != head
* - head is never gc-unlinked (but may be unlinked)
* Non-invariants:
* - head.item may or may not be null
* - head may not be reachable from the first or last node, or from tail
*/
private transient volatile Node<E> head;
/**
* A node from which the last node on list (that is, the unique node p
* with p.next == null && p.prev != p) can be reached in O(1) time.
* Invariants:
* - the last node is always O(1) reachable from tail via next links
* - all live nodes are reachable from the last node via pred()
* - tail != null
* - tail is never gc-unlinked (but may be unlinked)
* Non-invariants:
* - tail.item may or may not be null
* - tail may not be reachable from the first or last node, or from head
*/
private transient volatile Node<E> tail;
private static final Node<Object> PREV_TERMINATOR, NEXT_TERMINATOR;
@SuppressWarnings("unchecked")
Node<E> prevTerminator() {
return (Node<E>) PREV_TERMINATOR;
}
@SuppressWarnings("unchecked")
Node<E> nextTerminator() {
return (Node<E>) NEXT_TERMINATOR;
}
static final class Node<E> {
volatile Node<E> prev;
volatile E item;
volatile Node<E> next;
Node() { // default constructor for NEXT_TERMINATOR, PREV_TERMINATOR
}
/**
* Constructs a new node. Uses relaxed write because item can
* only be seen after publication via casNext or casPrev.
*/
Node(E item) {
UNSAFE.putObject(this, itemOffset, item);
}
boolean casItem(E cmp, E val) {
return UNSAFE.compareAndSwapObject(this, itemOffset, cmp, val);
}
void lazySetNext(Node<E> val) {
UNSAFE.putOrderedObject(this, nextOffset, val);
}
boolean casNext(Node<E> cmp, Node<E> val) {
return UNSAFE.compareAndSwapObject(this, nextOffset, cmp, val);
}
void lazySetPrev(Node<E> val) {
UNSAFE.putOrderedObject(this, prevOffset, val);
}
boolean casPrev(Node<E> cmp, Node<E> val) {
return UNSAFE.compareAndSwapObject(this, prevOffset, cmp, val);
}
// Unsafe mechanics
private static final sun.misc.Unsafe UNSAFE;
private static final long prevOffset;
private static final long itemOffset;
private static final long nextOffset;
static {
try {
UNSAFE = getUnsafe();
Class<?> k = Node.class;
prevOffset = UNSAFE.objectFieldOffset
(k.getDeclaredField("prev"));
itemOffset = UNSAFE.objectFieldOffset
(k.getDeclaredField("item"));
nextOffset = UNSAFE.objectFieldOffset
(k.getDeclaredField("next"));
} catch (Exception e) {
throw new Error(e);
}
}
}
/**
* Links e as first element.
*/
private void linkFirst(E e) {
checkNotNull(e);
final Node<E> newNode = new Node<E>(e);
restartFromHead:
for (;;)
for (Node<E> h = head, p = h, q;;) {
if ((q = p.prev) != null &&
(q = (p = q).prev) != null)
// Check for head updates every other hop.
// If p == q, we are sure to follow head instead.
p = (h != (h = head)) ? h : q;
else if (p.next == p) // PREV_TERMINATOR
continue restartFromHead;
else {
// p is first node
newNode.lazySetNext(p); // CAS piggyback
if (p.casPrev(null, newNode)) {
// Successful CAS is the linearization point
// for e to become an element of this deque,
// and for newNode to become "live".
if (p != h) // hop two nodes at a time
casHead(h, newNode); // Failure is OK.
return;
}
// Lost CAS race to another thread; re-read prev
}
}
}
/**
* Links e as last element.
*/
private void linkLast(E e) {
checkNotNull(e);
final Node<E> newNode = new Node<E>(e);
restartFromTail:
for (;;)
for (Node<E> t = tail, p = t, q;;) {
if ((q = p.next) != null &&
(q = (p = q).next) != null)
// Check for tail updates every other hop.
// If p == q, we are sure to follow tail instead.
p = (t != (t = tail)) ? t : q;
else if (p.prev == p) // NEXT_TERMINATOR
continue restartFromTail;
else {
// p is last node
newNode.lazySetPrev(p); // CAS piggyback
if (p.casNext(null, newNode)) {
// Successful CAS is the linearization point
// for e to become an element of this deque,
// and for newNode to become "live".
if (p != t) // hop two nodes at a time
casTail(t, newNode); // Failure is OK.
return;
}
// Lost CAS race to another thread; re-read next
}
}
}
private static final int HOPS = 2;
/**
* Unlinks non-null node x.
*/
void unlink(Node<E> x) {
// assert x != null;
// assert x.item == null;
// assert x != PREV_TERMINATOR;
// assert x != NEXT_TERMINATOR;
final Node<E> prev = x.prev;
final Node<E> next = x.next;
if (prev == null) {
unlinkFirst(x, next);
} else if (next == null) {
unlinkLast(x, prev);
} else {
// Unlink interior node.
//
// This is the common case, since a series of polls at the
// same end will be "interior" removes, except perhaps for
// the first one, since end nodes cannot be unlinked.
//
// At any time, all active nodes are mutually reachable by
// following a sequence of either next or prev pointers.
//
// Our strategy is to find the unique active predecessor
// and successor of x. Try to fix up their links so that
// they point to each other, leaving x unreachable from
// active nodes. If successful, and if x has no live
// predecessor/successor, we additionally try to gc-unlink,
// leaving active nodes unreachable from x, by rechecking
// that the status of predecessor and successor are
// unchanged and ensuring that x is not reachable from
// tail/head, before setting x's prev/next links to their
// logical approximate replacements, self/TERMINATOR.
Node<E> activePred, activeSucc;
boolean isFirst, isLast;
int hops = 1;
// Find active predecessor
for (Node<E> p = prev; ; ++hops) {
if (p.item != null) {
activePred = p;
isFirst = false;
break;
}
Node<E> q = p.prev;
if (q == null) {
if (p.next == p)
return;
activePred = p;
isFirst = true;
break;
}
else if (p == q)
return;
else
p = q;
}
// Find active successor
for (Node<E> p = next; ; ++hops) {
if (p.item != null) {
activeSucc = p;
isLast = false;
break;
}
Node<E> q = p.next;
if (q == null) {
if (p.prev == p)
return;
activeSucc = p;
isLast = true;
break;
}
else if (p == q)
return;
else
p = q;
}
// TODO: better HOP heuristics
if (hops < HOPS
// always squeeze out interior deleted nodes
&& (isFirst | isLast))
return;
// Squeeze out deleted nodes between activePred and
// activeSucc, including x.
skipDeletedSuccessors(activePred);
skipDeletedPredecessors(activeSucc);
// Try to gc-unlink, if possible
if ((isFirst | isLast) &&
// Recheck expected state of predecessor and successor
(activePred.next == activeSucc) &&
(activeSucc.prev == activePred) &&
(isFirst ? activePred.prev == null : activePred.item != null) &&
(isLast ? activeSucc.next == null : activeSucc.item != null)) {
updateHead(); // Ensure x is not reachable from head
updateTail(); // Ensure x is not reachable from tail
// Finally, actually gc-unlink
x.lazySetPrev(isFirst ? prevTerminator() : x);
x.lazySetNext(isLast ? nextTerminator() : x);
}
}
}
/**
* Unlinks non-null first node.
*/
private void unlinkFirst(Node<E> first, Node<E> next) {
// assert first != null;
// assert next != null;
// assert first.item == null;
for (Node<E> o = null, p = next, q;;) {
if (p.item != null || (q = p.next) == null) {
if (o != null && p.prev != p && first.casNext(next, p)) {
skipDeletedPredecessors(p);
if (first.prev == null &&
(p.next == null || p.item != null) &&
p.prev == first) {
updateHead(); // Ensure o is not reachable from head
updateTail(); // Ensure o is not reachable from tail
// Finally, actually gc-unlink
o.lazySetNext(o);
o.lazySetPrev(prevTerminator());
}
}
return;
}
else if (p == q)
return;
else {
o = p;
p = q;
}
}
}
/**
* Unlinks non-null last node.
*/
private void unlinkLast(Node<E> last, Node<E> prev) {
// assert last != null;
// assert prev != null;
// assert last.item == null;
for (Node<E> o = null, p = prev, q;;) {
if (p.item != null || (q = p.prev) == null) {
if (o != null && p.next != p && last.casPrev(prev, p)) {
skipDeletedSuccessors(p);
if (last.next == null &&
(p.prev == null || p.item != null) &&
p.next == last) {
updateHead(); // Ensure o is not reachable from head
updateTail(); // Ensure o is not reachable from tail
// Finally, actually gc-unlink
o.lazySetPrev(o);
o.lazySetNext(nextTerminator());
}
}
return;
}
else if (p == q)
return;
else {
o = p;
p = q;
}
}
}
/**
* Guarantees that any node which was unlinked before a call to
* this method will be unreachable from head after it returns.
* Does not guarantee to eliminate slack, only that head will
* point to a node that was active while this method was running.
*/
private final void updateHead() {
// Either head already points to an active node, or we keep
// trying to cas it to the first node until it does.
Node<E> h, p, q;
restartFromHead:
while ((h = head).item == null && (p = h.prev) != null) {
for (;;) {
if ((q = p.prev) == null ||
(q = (p = q).prev) == null) {
// It is possible that p is PREV_TERMINATOR,
// but if so, the CAS is guaranteed to fail.
if (casHead(h, p))
return;
else
continue restartFromHead;
}
else if (h != head)
continue restartFromHead;
else
p = q;
}
}
}
/**
* Guarantees that any node which was unlinked before a call to
* this method will be unreachable from tail after it returns.
* Does not guarantee to eliminate slack, only that tail will
* point to a node that was active while this method was running.
*/
private final void updateTail() {
// Either tail already points to an active node, or we keep
// trying to cas it to the last node until it does.
Node<E> t, p, q;
restartFromTail:
while ((t = tail).item == null && (p = t.next) != null) {
for (;;) {
if ((q = p.next) == null ||
(q = (p = q).next) == null) {
// It is possible that p is NEXT_TERMINATOR,
// but if so, the CAS is guaranteed to fail.
if (casTail(t, p))
return;
else
continue restartFromTail;
}
else if (t != tail)
continue restartFromTail;
else
p = q;
}
}
}
private void skipDeletedPredecessors(Node<E> x) {
whileActive:
do {
Node<E> prev = x.prev;
// assert prev != null;
// assert x != NEXT_TERMINATOR;
// assert x != PREV_TERMINATOR;
Node<E> p = prev;
findActive:
for (;;) {
if (p.item != null)
break findActive;
Node<E> q = p.prev;
if (q == null) {
if (p.next == p)
continue whileActive;
break findActive;
}
else if (p == q)
continue whileActive;
else
p = q;
}
// found active CAS target
if (prev == p || x.casPrev(prev, p))
return;
} while (x.item != null || x.next == null);
}
private void skipDeletedSuccessors(Node<E> x) {
whileActive:
do {
Node<E> next = x.next;
// assert next != null;
// assert x != NEXT_TERMINATOR;
// assert x != PREV_TERMINATOR;
Node<E> p = next;
findActive:
for (;;) {
if (p.item != null)
break findActive;
Node<E> q = p.next;
if (q == null) {
if (p.prev == p)
continue whileActive;
break findActive;
}
else if (p == q)
continue whileActive;
else
p = q;
}
// found active CAS target
if (next == p || x.casNext(next, p))
return;
} while (x.item != null || x.prev == null);
}
/**
* Returns the successor of p, or the first node if p.next has been
* linked to self, which will only be true if traversing with a
* stale pointer that is now off the list.
*/
final Node<E> succ(Node<E> p) {
// TODO: should we skip deleted nodes here?
Node<E> q = p.next;
return (p == q) ? first() : q;
}
/**
* Returns the predecessor of p, or the last node if p.prev has been
* linked to self, which will only be true if traversing with a
* stale pointer that is now off the list.
*/
final Node<E> pred(Node<E> p) {
Node<E> q = p.prev;
return (p == q) ? last() : q;
}
/**
* Returns the first node, the unique node p for which:
* p.prev == null && p.next != p
* The returned node may or may not be logically deleted.
* Guarantees that head is set to the returned node.
*/
Node<E> first() {
restartFromHead:
for (;;)
for (Node<E> h = head, p = h, q;;) {
if ((q = p.prev) != null &&
(q = (p = q).prev) != null)
// Check for head updates every other hop.
// If p == q, we are sure to follow head instead.
p = (h != (h = head)) ? h : q;
else if (p == h
// It is possible that p is PREV_TERMINATOR,
// but if so, the CAS is guaranteed to fail.
|| casHead(h, p))
return p;
else
continue restartFromHead;
}
}
/**
* Returns the last node, the unique node p for which:
* p.next == null && p.prev != p
* The returned node may or may not be logically deleted.
* Guarantees that tail is set to the returned node.
*/
Node<E> last() {
restartFromTail:
for (;;)
for (Node<E> t = tail, p = t, q;;) {
if ((q = p.next) != null &&
(q = (p = q).next) != null)
// Check for tail updates every other hop.
// If p == q, we are sure to follow tail instead.
p = (t != (t = tail)) ? t : q;
else if (p == t
// It is possible that p is NEXT_TERMINATOR,
// but if so, the CAS is guaranteed to fail.
|| casTail(t, p))
return p;
else
continue restartFromTail;
}
}
// Minor convenience utilities
/**
* Throws NullPointerException if argument is null.
*
* @param v the element
*/
private static void checkNotNull(Object v) {
if (v == null)
throw new NullPointerException();
}
/**
* Returns element unless it is null, in which case throws
* NoSuchElementException.
*
* @param v the element
* @return the element
*/
private E screenNullResult(E v) {
if (v == null)
throw new NoSuchElementException();
return v;
}
/**
* Creates an array list and fills it with elements of this list.
* Used by toArray.
*
* @return the array list
*/
private ArrayList<E> toArrayList() {
ArrayList<E> list = new ArrayList<E>();
for (Node<E> p = first(); p != null; p = succ(p)) {
E item = p.item;
if (item != null)
list.add(item);
}
return list;
}
/**
* Constructs an empty deque.
*/
public ConcurrentLinkedDeque() {
head = tail = new Node<E>(null);
}
/**
* Constructs a deque initially containing the elements of
* the given collection, added in traversal order of the
* collection's iterator.
*
* @param c the collection of elements to initially contain
* @throws NullPointerException if the specified collection or any
* of its elements are null
*/
public ConcurrentLinkedDeque(Collection<? extends E> c) {
// Copy c into a private chain of Nodes
Node<E> h = null, t = null;
for (E e : c) {
checkNotNull(e);
Node<E> newNode = new Node<E>(e);
if (h == null)
h = t = newNode;
else {
t.lazySetNext(newNode);
newNode.lazySetPrev(t);
t = newNode;
}
}
initHeadTail(h, t);
}
/**
* Initializes head and tail, ensuring invariants hold.
*/
private void initHeadTail(Node<E> h, Node<E> t) {
if (h == t) {
if (h == null)
h = t = new Node<E>(null);
else {
// Avoid edge case of a single Node with non-null item.
Node<E> newNode = new Node<E>(null);
t.lazySetNext(newNode);
newNode.lazySetPrev(t);
t = newNode;
}
}
head = h;
tail = t;
}
/**
* Inserts the specified element at the front of this deque.
* As the deque is unbounded, this method will never throw
* {@link IllegalStateException}.
*
* @throws NullPointerException if the specified element is null
*/
public void addFirst(E e) {
linkFirst(e);
}
/**
* Inserts the specified element at the end of this deque.
* As the deque is unbounded, this method will never throw
* {@link IllegalStateException}.
*
* <p>This method is equivalent to {@link #add}.
*
* @throws NullPointerException if the specified element is null
*/
public void addLast(E e) {
linkLast(e);
}
/**
* Inserts the specified element at the front of this deque.
* As the deque is unbounded, this method will never return {@code false}.
*
* @return {@code true} (as specified by {@link Deque#offerFirst})
* @throws NullPointerException if the specified element is null
*/
public boolean offerFirst(E e) {
linkFirst(e);
return true;
}
/**
* Inserts the specified element at the end of this deque.
* As the deque is unbounded, this method will never return {@code false}.
*
* <p>This method is equivalent to {@link #add}.
*
* @return {@code true} (as specified by {@link Deque#offerLast})
* @throws NullPointerException if the specified element is null
*/
public boolean offerLast(E e) {
linkLast(e);
return true;
}
public E peekFirst() {
for (Node<E> p = first(); p != null; p = succ(p)) {
E item = p.item;
if (item != null)
return item;
}
return null;
}
public E peekLast() {
for (Node<E> p = last(); p != null; p = pred(p)) {
E item = p.item;
if (item != null)
return item;
}
return null;
}
/**
* @throws NoSuchElementException {@inheritDoc}
*/
public E getFirst() {
return screenNullResult(peekFirst());
}
/**
* @throws NoSuchElementException {@inheritDoc}
*/
public E getLast() {
return screenNullResult(peekLast());
}
public E pollFirst() {
for (Node<E> p = first(); p != null; p = succ(p)) {
E item = p.item;
if (item != null && p.casItem(item, null)) {
unlink(p);
return item;
}
}
return null;
}
public E pollLast() {
for (Node<E> p = last(); p != null; p = pred(p)) {
E item = p.item;
if (item != null && p.casItem(item, null)) {
unlink(p);
return item;
}
}
return null;
}
/**
* @throws NoSuchElementException {@inheritDoc}
*/
public E removeFirst() {
return screenNullResult(pollFirst());
}
/**
* @throws NoSuchElementException {@inheritDoc}
*/
public E removeLast() {
return screenNullResult(pollLast());
}
// *** Queue and stack methods ***
/**
* Inserts the specified element at the tail of this deque.
* As the deque is unbounded, this method will never return {@code false}.
*
* @return {@code true} (as specified by {@link Queue#offer})
* @throws NullPointerException if the specified element is null
*/
public boolean offer(E e) {
return offerLast(e);
}
/**
* Inserts the specified element at the tail of this deque.
* As the deque is unbounded, this method will never throw
* {@link IllegalStateException} or return {@code false}.
*
* @return {@code true} (as specified by {@link Collection#add})
* @throws NullPointerException if the specified element is null
*/
public boolean add(E e) {
return offerLast(e);
}
public E poll() { return pollFirst(); }
public E remove() { return removeFirst(); }
public E peek() { return peekFirst(); }
public E element() { return getFirst(); }
public void push(E e) { addFirst(e); }
public E pop() { return removeFirst(); }
/**
* Removes the first element {@code e} such that
* {@code o.equals(e)}, if such an element exists in this deque.
* If the deque does not contain the element, it is unchanged.
*
* @param o element to be removed from this deque, if present
* @return {@code true} if the deque contained the specified element
* @throws NullPointerException if the specified element is null
*/
public boolean removeFirstOccurrence(Object o) {
checkNotNull(o);
for (Node<E> p = first(); p != null; p = succ(p)) {
E item = p.item;
if (item != null && o.equals(item) && p.casItem(item, null)) {
unlink(p);
return true;
}
}
return false;
}
/**
* Removes the last element {@code e} such that
* {@code o.equals(e)}, if such an element exists in this deque.
* If the deque does not contain the element, it is unchanged.
*
* @param o element to be removed from this deque, if present
* @return {@code true} if the deque contained the specified element
* @throws NullPointerException if the specified element is null
*/
public boolean removeLastOccurrence(Object o) {
checkNotNull(o);
for (Node<E> p = last(); p != null; p = pred(p)) {
E item = p.item;
if (item != null && o.equals(item) && p.casItem(item, null)) {
unlink(p);
return true;
}
}
return false;
}
/**
* Returns {@code true} if this deque contains at least one
* element {@code e} such that {@code o.equals(e)}.
*
* @param o element whose presence in this deque is to be tested
* @return {@code true} if this deque contains the specified element
*/
public boolean contains(Object o) {
if (o == null) return false;
for (Node<E> p = first(); p != null; p = succ(p)) {
E item = p.item;
if (item != null && o.equals(item))
return true;
}
return false;
}
/**
* Returns {@code true} if this collection contains no elements.
*
* @return {@code true} if this collection contains no elements
*/
public boolean isEmpty() {
return peekFirst() == null;
}
/**
* Returns the number of elements in this deque. If this deque
* contains more than {@code Integer.MAX_VALUE} elements, it
* returns {@code Integer.MAX_VALUE}.
*
* <p>Beware that, unlike in most collections, this method is
* <em>NOT</em> a constant-time operation. Because of the
* asynchronous nature of these deques, determining the current
* number of elements requires traversing them all to count them.
* Additionally, it is possible for the size to change during
* execution of this method, in which case the returned result
* will be inaccurate. Thus, this method is typically not very
* useful in concurrent applications.
*
* @return the number of elements in this deque
*/
public int size() {
int count = 0;
for (Node<E> p = first(); p != null; p = succ(p))
if (p.item != null)
// Collection.size() spec says to max out
if (++count == Integer.MAX_VALUE)
break;
return count;
}
/**
* Removes the first element {@code e} such that
* {@code o.equals(e)}, if such an element exists in this deque.
* If the deque does not contain the element, it is unchanged.
*
* @param o element to be removed from this deque, if present
* @return {@code true} if the deque contained the specified element
* @throws NullPointerException if the specified element is null
*/
public boolean remove(Object o) {
return removeFirstOccurrence(o);
}
/**
* Appends all of the elements in the specified collection to the end of
* this deque, in the order that they are returned by the specified
* collection's iterator. Attempts to {@code addAll} of a deque to
* itself result in {@code IllegalArgumentException}.
*
* @param c the elements to be inserted into this deque
* @return {@code true} if this deque changed as a result of the call
* @throws NullPointerException if the specified collection or any
* of its elements are null
* @throws IllegalArgumentException if the collection is this deque
*/
public boolean addAll(Collection<? extends E> c) {
if (c == this)
// As historically specified in AbstractQueue#addAll
throw new IllegalArgumentException();
// Copy c into a private chain of Nodes
Node<E> beginningOfTheEnd = null, last = null;
for (E e : c) {
checkNotNull(e);
Node<E> newNode = new Node<E>(e);
if (beginningOfTheEnd == null)
beginningOfTheEnd = last = newNode;
else {
last.lazySetNext(newNode);
newNode.lazySetPrev(last);
last = newNode;
}
}
if (beginningOfTheEnd == null)
return false;
// Atomically append the chain at the tail of this collection
restartFromTail:
for (;;)
for (Node<E> t = tail, p = t, q;;) {
if ((q = p.next) != null &&
(q = (p = q).next) != null)
// Check for tail updates every other hop.
// If p == q, we are sure to follow tail instead.
p = (t != (t = tail)) ? t : q;
else if (p.prev == p) // NEXT_TERMINATOR
continue restartFromTail;
else {
// p is last node
beginningOfTheEnd.lazySetPrev(p); // CAS piggyback
if (p.casNext(null, beginningOfTheEnd)) {
// Successful CAS is the linearization point
// for all elements to be added to this deque.
if (!casTail(t, last)) {
// Try a little harder to update tail,
// since we may be adding many elements.
t = tail;
if (last.next == null)
casTail(t, last);
}
return true;
}
// Lost CAS race to another thread; re-read next
}
}
}
/**
* Removes all of the elements from this deque.
*/
public void clear() {
while (pollFirst() != null)
;
}
/**
* Returns an array containing all of the elements in this deque, in
* proper sequence (from first to last element).
*
* <p>The returned array will be "safe" in that no references to it are
* maintained by this deque. (In other words, this method must allocate
* a new array). The caller is thus free to modify the returned array.
*
* <p>This method acts as bridge between array-based and collection-based
* APIs.
*
* @return an array containing all of the elements in this deque
*/
public Object[] toArray() {
return toArrayList().toArray();
}
/**
* Returns an array containing all of the elements in this deque,
* in proper sequence (from first to last element); the runtime
* type of the returned array is that of the specified array. If
* the deque fits in the specified array, it is returned therein.
* Otherwise, a new array is allocated with the runtime type of
* the specified array and the size of this deque.
*
* <p>If this deque fits in the specified array with room to spare
* (i.e., the array has more elements than this deque), the element in
* the array immediately following the end of the deque is set to
* {@code null}.
*
* <p>Like the {@link #toArray()} method, this method acts as
* bridge between array-based and collection-based APIs. Further,
* this method allows precise control over the runtime type of the
* output array, and may, under certain circumstances, be used to
* save allocation costs.
*
* <p>Suppose {@code x} is a deque known to contain only strings.
* The following code can be used to dump the deque into a newly
* allocated array of {@code String}:
*
* <pre> {@code String[] y = x.toArray(new String[0]);}</pre>
*
* Note that {@code toArray(new Object[0])} is identical in function to
* {@code toArray()}.
*
* @param a the array into which the elements of the deque are to
* be stored, if it is big enough; otherwise, a new array of the
* same runtime type is allocated for this purpose
* @return an array containing all of the elements in this deque
* @throws ArrayStoreException if the runtime type of the specified array
* is not a supertype of the runtime type of every element in
* this deque
* @throws NullPointerException if the specified array is null
*/
public <T> T[] toArray(T[] a) {
return toArrayList().toArray(a);
}
/**
* Returns an iterator over the elements in this deque in proper sequence.
* The elements will be returned in order from first (head) to last (tail).
*
* <p>The returned iterator is a "weakly consistent" iterator that
* will never throw {@link java.util.ConcurrentModificationException
* ConcurrentModificationException}, and guarantees to traverse
* elements as they existed upon construction of the iterator, and
* may (but is not guaranteed to) reflect any modifications
* subsequent to construction.
*
* @return an iterator over the elements in this deque in proper sequence
*/
public Iterator<E> iterator() {
return new Itr();
}
/**
* Returns an iterator over the elements in this deque in reverse
* sequential order. The elements will be returned in order from
* last (tail) to first (head).
*
* <p>The returned iterator is a "weakly consistent" iterator that
* will never throw {@link java.util.ConcurrentModificationException
* ConcurrentModificationException}, and guarantees to traverse
* elements as they existed upon construction of the iterator, and
* may (but is not guaranteed to) reflect any modifications
* subsequent to construction.
*
* @return an iterator over the elements in this deque in reverse order
*/
public Iterator<E> descendingIterator() {
return new DescendingItr();
}
private abstract class AbstractItr implements Iterator<E> {
/**
* Next node to return item for.
*/
private Node<E> nextNode;
/**
* nextItem holds on to item fields because once we claim
* that an element exists in hasNext(), we must return it in
* the following next() call even if it was in the process of
* being removed when hasNext() was called.
*/
private E nextItem;
/**
* Node returned by most recent call to next. Needed by remove.
* Reset to null if this element is deleted by a call to remove.
*/
private Node<E> lastRet;
abstract Node<E> startNode();
abstract Node<E> nextNode(Node<E> p);
AbstractItr() {
advance();
}
/**
* Sets nextNode and nextItem to next valid node, or to null
* if no such.
*/
private void advance() {
lastRet = nextNode;
Node<E> p = (nextNode == null) ? startNode() : nextNode(nextNode);
for (;; p = nextNode(p)) {
if (p == null) {
// p might be active end or TERMINATOR node; both are OK
nextNode = null;
nextItem = null;
break;
}
E item = p.item;
if (item != null) {
nextNode = p;
nextItem = item;
break;
}
}
}
public boolean hasNext() {
return nextItem != null;
}
public E next() {
E item = nextItem;
if (item == null) throw new NoSuchElementException();
advance();
return item;
}
public void remove() {
Node<E> l = lastRet;
if (l == null) throw new IllegalStateException();
l.item = null;
unlink(l);
lastRet = null;
}
}
/** Forward iterator */
private class Itr extends AbstractItr {
Node<E> startNode() { return first(); }
Node<E> nextNode(Node<E> p) { return succ(p); }
}
/** Descending iterator */
private class DescendingItr extends AbstractItr {
Node<E> startNode() { return last(); }
Node<E> nextNode(Node<E> p) { return pred(p); }
}
/**
* Saves the state to a stream (that is, serializes it).
*
* @serialData All of the elements (each an {@code E}) in
* the proper order, followed by a null
* @param s the stream
*/
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
// Write out any hidden stuff
s.defaultWriteObject();
// Write out all elements in the proper order.
for (Node<E> p = first(); p != null; p = succ(p)) {
E item = p.item;
if (item != null)
s.writeObject(item);
}
// Use trailing null as sentinel
s.writeObject(null);
}
/**
* Reconstitutes the instance from a stream (that is, deserializes it).
* @param s the stream
*/
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
// Read in elements until trailing null sentinel found
Node<E> h = null, t = null;
Object item;
while ((item = s.readObject()) != null) {
@SuppressWarnings("unchecked")
Node<E> newNode = new Node<E>((E) item);
if (h == null)
h = t = newNode;
else {
t.lazySetNext(newNode);
newNode.lazySetPrev(t);
t = newNode;
}
}
initHeadTail(h, t);
}
private boolean casHead(Node<E> cmp, Node<E> val) {
return UNSAFE.compareAndSwapObject(this, headOffset, cmp, val);
}
private boolean casTail(Node<E> cmp, Node<E> val) {
return UNSAFE.compareAndSwapObject(this, tailOffset, cmp, val);
}
// Unsafe mechanics
private static final sun.misc.Unsafe UNSAFE;
private static final long headOffset;
private static final long tailOffset;
static {
PREV_TERMINATOR = new Node<Object>();
PREV_TERMINATOR.next = PREV_TERMINATOR;
NEXT_TERMINATOR = new Node<Object>();
NEXT_TERMINATOR.prev = NEXT_TERMINATOR;
try {
UNSAFE = getUnsafe();
Class<?> k = ConcurrentLinkedDeque.class;
headOffset = UNSAFE.objectFieldOffset
(k.getDeclaredField("head"));
tailOffset = UNSAFE.objectFieldOffset
(k.getDeclaredField("tail"));
} catch (Exception e) {
throw new Error(e);
}
}
/**
* Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
* Replace with a simple call to Unsafe.getUnsafe when integrating
* into a jdk.
*
* @return a sun.misc.Unsafe
*/
static sun.misc.Unsafe getUnsafe() {
try {
return sun.misc.Unsafe.getUnsafe();
} catch (SecurityException tryReflectionInstead) {}
try {
return java.security.AccessController.doPrivileged
(new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() {
public sun.misc.Unsafe run() throws Exception {
Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class;
for (java.lang.reflect.Field f : k.getDeclaredFields()) {
f.setAccessible(true);
Object x = f.get(null);
if (k.isInstance(x))
return k.cast(x);
}
throw new NoSuchFieldError("the Unsafe");
}});
} catch (java.security.PrivilegedActionException e) {
throw new RuntimeException("Could not initialize intrinsics",
e.getCause());
}
}
}
| 0true
|
src_main_java_jsr166y_ConcurrentLinkedDeque.java
|
148 |
public class TestXaLogicalLogFiles {
@Test
public void shouldDetectLegacyLogs() throws Exception
{
FileSystemAbstraction fs = mock(FileSystemAbstraction.class);
when( fs.fileExists( new File( "logical_log.active" ) )).thenReturn( false );
when(fs.fileExists(new File("logical_log"))).thenReturn(true);
when(fs.fileExists(new File("logical_log.1"))).thenReturn(false);
when(fs.fileExists(new File("logical_log.2"))).thenReturn(false);
XaLogicalLogFiles files = new XaLogicalLogFiles(new File("logical_log"), fs);
assertThat(files.determineState(), is(XaLogicalLogFiles.State.LEGACY_WITHOUT_LOG_ROTATION));
}
@Test
public void shouldDetectNoActiveFile() throws Exception
{
FileSystemAbstraction fs = mock(FileSystemAbstraction.class);
when(fs.fileExists(new File("logical_log.active"))).thenReturn(false);
when(fs.fileExists(new File("logical_log"))).thenReturn(false);
when(fs.fileExists(new File("logical_log.1"))).thenReturn(true);
when(fs.fileExists(new File("logical_log.2"))).thenReturn(false);
XaLogicalLogFiles files = new XaLogicalLogFiles(new File("logical_log"), fs);
assertThat(files.determineState(), is(XaLogicalLogFiles.State.NO_ACTIVE_FILE));
}
@Test
public void shouldDetectLog1Active() throws Exception
{
FileSystemAbstraction fs = mock(FileSystemAbstraction.class);
when(fs.fileExists(new File("logical_log.active"))).thenReturn(true);
when(fs.fileExists(new File("logical_log"))).thenReturn(false);
when(fs.fileExists(new File("logical_log.1"))).thenReturn(true);
when(fs.fileExists(new File("logical_log.2"))).thenReturn(false);
StoreChannel fc = mockedStoreChannel( XaLogicalLogTokens.LOG1 );
when(fs.open(eq(new File("logical_log.active")), anyString())).thenReturn( fc );
XaLogicalLogFiles files = new XaLogicalLogFiles(new File("logical_log"), fs);
assertThat(files.determineState(), is(XaLogicalLogFiles.State.LOG_1_ACTIVE));
}
@Test
public void shouldDetectLog2Active() throws Exception
{
FileSystemAbstraction fs = mock(FileSystemAbstraction.class);
when(fs.fileExists(new File("logical_log.active"))).thenReturn(true);
when(fs.fileExists(new File("logical_log"))).thenReturn(false);
when(fs.fileExists(new File("logical_log.1"))).thenReturn(false);
when(fs.fileExists(new File("logical_log.2"))).thenReturn(true);
StoreChannel fc = mockedStoreChannel( XaLogicalLogTokens.LOG2 );
when(fs.open(eq(new File("logical_log.active")), anyString())).thenReturn(fc);
XaLogicalLogFiles files = new XaLogicalLogFiles(new File("logical_log"), fs);
assertThat(files.determineState(), is(XaLogicalLogFiles.State.LOG_2_ACTIVE));
}
@Test
public void shouldDetectCleanShutdown() throws Exception
{
FileSystemAbstraction fs = mock(FileSystemAbstraction.class);
when(fs.fileExists(new File("logical_log.active"))).thenReturn(true);
when(fs.fileExists(new File("logical_log"))).thenReturn(false);
when(fs.fileExists(new File("logical_log.1"))).thenReturn(true);
when(fs.fileExists(new File("logical_log.2"))).thenReturn(false);
StoreChannel fc = mockedStoreChannel( XaLogicalLogTokens.CLEAN );
when(fs.open(eq(new File("logical_log.active")), anyString())).thenReturn(fc);
XaLogicalLogFiles files = new XaLogicalLogFiles(new File("logical_log"), fs);
assertThat(files.determineState(), is(XaLogicalLogFiles.State.CLEAN));
}
@Test
public void shouldDetectDualLog1() throws Exception
{
FileSystemAbstraction fs = mock(FileSystemAbstraction.class);
when(fs.fileExists(new File("logical_log.active"))).thenReturn(true);
when(fs.fileExists(new File("logical_log"))).thenReturn(false);
when(fs.fileExists(new File("logical_log.1"))).thenReturn(true);
when(fs.fileExists(new File("logical_log.2"))).thenReturn(true);
StoreChannel fc = mockedStoreChannel( XaLogicalLogTokens.LOG1 );
when(fs.open(eq(new File("logical_log.active")), anyString())).thenReturn(fc);
XaLogicalLogFiles files = new XaLogicalLogFiles(new File("logical_log"), fs);
assertThat(files.determineState(), is(XaLogicalLogFiles.State.DUAL_LOGS_LOG_1_ACTIVE));
}
@Test
public void shouldDetectDualLog2() throws Exception
{
FileSystemAbstraction fs = mock(FileSystemAbstraction.class);
when(fs.fileExists(new File("logical_log.active"))).thenReturn(true);
when(fs.fileExists(new File("logical_log"))).thenReturn(false);
when(fs.fileExists(new File("logical_log.1"))).thenReturn(true);
when(fs.fileExists(new File("logical_log.2"))).thenReturn(true);
StoreChannel fc = mockedStoreChannel( XaLogicalLogTokens.LOG2 );
when(fs.open(eq(new File("logical_log.active")), anyString())).thenReturn(fc);
XaLogicalLogFiles files = new XaLogicalLogFiles(new File("logical_log"), fs);
assertThat(files.determineState(), is(XaLogicalLogFiles.State.DUAL_LOGS_LOG_2_ACTIVE));
}
@Test(expected=IllegalStateException.class)
public void shouldThrowIllegalStateExceptionOnUnrecognizedActiveContent() throws Exception
{
FileSystemAbstraction fs = mock(FileSystemAbstraction.class);
when(fs.fileExists(new File("logical_log.active"))).thenReturn(true);
when(fs.fileExists(new File("logical_log"))).thenReturn(false);
when(fs.fileExists(new File("logical_log.1"))).thenReturn(true);
when(fs.fileExists(new File("logical_log.2"))).thenReturn(true);
StoreChannel fc = mockedStoreChannel( ';' );
when(fs.open(eq(new File("logical_log.active")), anyString())).thenReturn(fc);
XaLogicalLogFiles files = new XaLogicalLogFiles(new File("logical_log"), fs);
files.determineState();
}
private StoreChannel mockedStoreChannel( char c ) throws IOException
{
return new MockedFileChannel(ByteBuffer.allocate(4).putChar(c).array());
}
private static class MockedFileChannel extends StoreFileChannel
{
private ByteBuffer bs;
public MockedFileChannel(byte [] bs) {
super( (FileChannel) null );
this.bs = ByteBuffer.wrap(bs);
}
@Override
public long position() throws IOException
{
return bs.position();
}
@Override
public int read(ByteBuffer buffer) throws IOException
{
int start = bs.position();
buffer.put(bs);
return bs.position() - start;
}
@Override
public void close() throws IOException
{
}
}
}
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_xaframework_TestXaLogicalLogFiles.java
|
1,324 |
@ClusterScope(scope=Scope.TEST, numNodes=0)
public class NoMasterNodeTests extends ElasticsearchIntegrationTest {
@Test
public void testNoMasterActions() throws Exception {
Settings settings = settingsBuilder()
.put("discovery.type", "zen")
.put("action.auto_create_index", false)
.put("discovery.zen.minimum_master_nodes", 2)
.put("discovery.zen.ping_timeout", "200ms")
.put("discovery.initial_state_timeout", "500ms")
.put("index.number_of_shards", 1)
.build();
TimeValue timeout = TimeValue.timeValueMillis(200);
cluster().startNode(settings);
// start a second node, create an index, and then shut it down so we have no master block
cluster().startNode(settings);
createIndex("test");
client().admin().cluster().prepareHealth("test").setWaitForGreenStatus().execute().actionGet();
cluster().stopRandomNode();
assertThat(awaitBusy(new Predicate<Object>() {
public boolean apply(Object o) {
ClusterState state = client().admin().cluster().prepareState().setLocal(true).execute().actionGet().getState();
return state.blocks().hasGlobalBlock(Discovery.NO_MASTER_BLOCK);
}
}), equalTo(true));
try {
client().prepareGet("test", "type1", "1").execute().actionGet();
fail("Expected ClusterBlockException");
} catch (ClusterBlockException e) {
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
}
try {
client().prepareMultiGet().add("test", "type1", "1").execute().actionGet();
fail("Expected ClusterBlockException");
} catch (ClusterBlockException e) {
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
}
try {
PercolateSourceBuilder percolateSource = new PercolateSourceBuilder();
percolateSource.percolateDocument().setDoc(new HashMap());
client().preparePercolate()
.setIndices("test").setDocumentType("type1")
.setSource(percolateSource).execute().actionGet();
fail("Expected ClusterBlockException");
} catch (ClusterBlockException e) {
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
}
long now = System.currentTimeMillis();
try {
client().prepareUpdate("test", "type1", "1").setScript("test script").setTimeout(timeout).execute().actionGet();
fail("Expected ClusterBlockException");
} catch (ClusterBlockException e) {
assertThat(System.currentTimeMillis() - now, greaterThan(timeout.millis() - 50));
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
}
try {
client().admin().indices().prepareAnalyze("test", "this is a test").execute().actionGet();
fail("Expected ClusterBlockException");
} catch (ClusterBlockException e) {
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
}
try {
client().prepareCount("test").execute().actionGet();
fail("Expected ClusterBlockException");
} catch (ClusterBlockException e) {
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
}
now = System.currentTimeMillis();
try {
client().prepareIndex("test", "type1", "1").setSource(XContentFactory.jsonBuilder().startObject().endObject()).setTimeout(timeout).execute().actionGet();
fail("Expected ClusterBlockException");
} catch (ClusterBlockException e) {
assertThat(System.currentTimeMillis() - now, greaterThan(timeout.millis() - 50));
assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
}
}
}
| 0true
|
src_test_java_org_elasticsearch_cluster_NoMasterNodeTests.java
|
101 |
public class OException extends RuntimeException {
private static final long serialVersionUID = 3882447822497861424L;
public OException() {
}
public OException(final String message) {
super(message);
}
public OException(final Throwable cause) {
super(cause);
}
public OException(final String message, final Throwable cause) {
super(message, cause);
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_exception_OException.java
|
337 |
public class KCVSConfiguration implements ConcurrentWriteConfiguration {
private final BackendOperation.TransactionalProvider txProvider;
private final TimestampProvider times;
private final KeyColumnValueStore store;
private final String identifier;
private final StaticBuffer rowKey;
private final StandardSerializer serializer;
private Duration maxOperationWaitTime = new StandardDuration(10000L, TimeUnit.MILLISECONDS);
public KCVSConfiguration(BackendOperation.TransactionalProvider txProvider, TimestampProvider times,
KeyColumnValueStore store, String identifier) throws BackendException {
Preconditions.checkArgument(txProvider!=null && store!=null && times!=null);
Preconditions.checkArgument(StringUtils.isNotBlank(identifier));
this.txProvider = txProvider;
this.times = times;
this.store = store;
this.identifier = identifier;
this.rowKey = string2StaticBuffer(this.identifier);
this.serializer = new StandardSerializer();
}
public void setMaxOperationWaitTime(Duration waitTime) {
Preconditions.checkArgument(ZeroDuration.INSTANCE.compareTo(waitTime) < 0,
"Wait time must be nonnegative: %s", waitTime);
this.maxOperationWaitTime = waitTime;
}
/**
* Reads the configuration property for this StoreManager
*
* @param key Key identifying the configuration property
* @return Value stored for the key or null if the configuration property has not (yet) been defined.
* @throws com.thinkaurelius.titan.diskstorage.BackendException
*/
@Override
public <O> O get(final String key, final Class<O> datatype) {
StaticBuffer column = string2StaticBuffer(key);
final KeySliceQuery query = new KeySliceQuery(rowKey,column, BufferUtil.nextBiggerBuffer(column));
StaticBuffer result = BackendOperation.execute(new BackendOperation.Transactional<StaticBuffer>() {
@Override
public StaticBuffer call(StoreTransaction txh) throws BackendException {
List<Entry> entries = store.getSlice(query,txh);
if (entries.isEmpty()) return null;
return entries.get(0).getValueAs(StaticBuffer.STATIC_FACTORY);
}
@Override
public String toString() {
return "getConfiguration";
}
}, txProvider, times, maxOperationWaitTime);
if (result==null) return null;
return staticBuffer2Object(result, datatype);
}
public<O> void set(String key, O value, O expectedValue) {
set(key,value,expectedValue,true);
}
/**
* Sets a configuration property for this StoreManager.
*
* @param key Key identifying the configuration property
* @param value Value to be stored for the key
* @throws com.thinkaurelius.titan.diskstorage.BackendException
*/
@Override
public <O> void set(String key, O value) {
set(key,value,null,false);
}
public <O> void set(String key, O value, O expectedValue, final boolean checkExpectedValue) {
final StaticBuffer column = string2StaticBuffer(key);
final List<Entry> additions;
final List<StaticBuffer> deletions;
if (value!=null) { //Addition
additions = new ArrayList<Entry>(1);
deletions = KeyColumnValueStore.NO_DELETIONS;
StaticBuffer val = object2StaticBuffer(value);
additions.add(StaticArrayEntry.of(column, val));
} else { //Deletion
additions = KeyColumnValueStore.NO_ADDITIONS;
deletions = Lists.newArrayList(column);
}
final StaticBuffer expectedValueBuffer;
if (checkExpectedValue && expectedValue!=null) {
expectedValueBuffer = object2StaticBuffer(expectedValue);
} else {
expectedValueBuffer = null;
}
BackendOperation.execute(new BackendOperation.Transactional<Boolean>() {
@Override
public Boolean call(StoreTransaction txh) throws BackendException {
if (checkExpectedValue)
store.acquireLock(rowKey,column,expectedValueBuffer,txh);
store.mutate(rowKey, additions, deletions, txh);
return true;
}
@Override
public String toString() {
return "setConfiguration";
}
}, txProvider, times, maxOperationWaitTime);
}
@Override
public void remove(String key) {
set(key,null);
}
@Override
public WriteConfiguration copy() {
throw new UnsupportedOperationException();
}
private Map<String,Object> toMap() {
Map<String,Object> entries = Maps.newHashMap();
List<Entry> result = BackendOperation.execute(new BackendOperation.Transactional<List<Entry>>() {
@Override
public List<Entry> call(StoreTransaction txh) throws BackendException {
return store.getSlice(new KeySliceQuery(rowKey, BufferUtil.zeroBuffer(128), BufferUtil.oneBuffer(128)),txh);
}
@Override
public String toString() {
return "setConfiguration";
}
},txProvider, times, maxOperationWaitTime);
for (Entry entry : result) {
String key = staticBuffer2String(entry.getColumnAs(StaticBuffer.STATIC_FACTORY));
Object value = staticBuffer2Object(entry.getValueAs(StaticBuffer.STATIC_FACTORY), Object.class);
entries.put(key,value);
}
return entries;
}
public ReadConfiguration asReadConfiguration() {
final Map<String,Object> entries = toMap();
return new ReadConfiguration() {
@Override
public <O> O get(String key, Class<O> datatype) {
Preconditions.checkArgument(!entries.containsKey(key) || datatype.isAssignableFrom(entries.get(key).getClass()));
return (O)entries.get(key);
}
@Override
public Iterable<String> getKeys(final String prefix) {
return Lists.newArrayList(Iterables.filter(entries.keySet(),new Predicate<String>() {
@Override
public boolean apply(@Nullable String s) {
assert s!=null;
return StringUtils.isBlank(prefix) || s.startsWith(prefix);
}
}));
}
@Override
public void close() {
//Do nothing
}
};
}
@Override
public Iterable<String> getKeys(String prefix) {
return asReadConfiguration().getKeys(prefix);
}
@Override
public void close() {
try {
store.close();
txProvider.close();
} catch (BackendException e) {
throw new TitanException("Could not close configuration store",e);
}
}
private StaticBuffer string2StaticBuffer(final String s) {
ByteBuffer out = ByteBuffer.wrap(s.getBytes(Charset.forName("UTF-8")));
return StaticArrayBuffer.of(out);
}
private String staticBuffer2String(final StaticBuffer s) {
return new String(s.as(StaticBuffer.ARRAY_FACTORY),Charset.forName("UTF-8"));
}
private<O> StaticBuffer object2StaticBuffer(final O value) {
DataOutput out = serializer.getDataOutput(128);
out.writeClassAndObject(value);
return out.getStaticBuffer();
}
private<O> O staticBuffer2Object(final StaticBuffer s, Class<O> datatype) {
Object value = serializer.readClassAndObject(s.asReadBuffer());
Preconditions.checkArgument(datatype.isInstance(value),"Could not deserialize to [%s], got: %s",datatype,value);
return (O)value;
}
}
| 1no label
|
titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_configuration_backend_KCVSConfiguration.java
|
363 |
public interface TranslationDao {
/**
* Persists the given translation
* @param translation
* @return the saved translation
*/
public Translation save(Translation translation);
/**
* Creates an empty translation instance that is not persisted to the database
*
* @return the unsaved, empty translation
*/
public Translation create();
/**
* Deletes the given translation
*
* @param translation
*/
public void delete(Translation translation);
/**
* Returns a map that holds the following data for the given entity:
* "name" --> idProperty (the name of the id property, always a String)
* "type" --> idProperty's type (usually either Long or String)
*
* @param entity
* @return the id property's metadata
*/
public Map<String, Object> getIdPropertyMetadata(TranslatedEntity entity);
/**
* Reads a translation by its own primary key
*
* @param translationId
* @return the translation
*/
public Translation readTranslationById(Long translationId);
/**
* Reads all translations for a given field
*
* @param entity
* @param entityId
* @param fieldName
* @return the list of translations
*/
public List<Translation> readTranslations(TranslatedEntity entity, String entityId, String fieldName);
/**
* Reads a translation for the requested parameters. Returns null if there is no translation found
*
* @param entity
* @param entityId
* @param fieldName
* @param localeCode
* @return the translation
*/
public Translation readTranslation(TranslatedEntity entity, String entityId, String fieldName, String localeCode);
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_i18n_dao_TranslationDao.java
|
399 |
public class CreateSnapshotRequest extends MasterNodeOperationRequest<CreateSnapshotRequest> {
private String snapshot;
private String repository;
private String[] indices = EMPTY_ARRAY;
private IndicesOptions indicesOptions = IndicesOptions.strict();
private boolean partial = false;
private Settings settings = EMPTY_SETTINGS;
private boolean includeGlobalState = true;
private boolean waitForCompletion;
CreateSnapshotRequest() {
}
/**
* Constructs a new put repository request with the provided snapshot and repository names
*
* @param repository repository name
* @param snapshot snapshot name
*/
public CreateSnapshotRequest(String repository, String snapshot) {
this.snapshot = snapshot;
this.repository = repository;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (snapshot == null) {
validationException = addValidationError("snapshot is missing", validationException);
}
if (repository == null) {
validationException = addValidationError("repository is missing", validationException);
}
if (indices == null) {
validationException = addValidationError("indices is null", validationException);
}
for (String index : indices) {
if (index == null) {
validationException = addValidationError("index is null", validationException);
break;
}
}
if (indicesOptions == null) {
validationException = addValidationError("indicesOptions is null", validationException);
}
if (settings == null) {
validationException = addValidationError("settings is null", validationException);
}
return validationException;
}
/**
* Sets the snapshot name
*
* @param snapshot snapshot name
*/
public CreateSnapshotRequest snapshot(String snapshot) {
this.snapshot = snapshot;
return this;
}
/**
* The snapshot name
*
* @return snapshot name
*/
public String snapshot() {
return this.snapshot;
}
/**
* Sets repository name
*
* @param repository name
* @return this request
*/
public CreateSnapshotRequest repository(String repository) {
this.repository = repository;
return this;
}
/**
* Returns repository name
*
* @return repository name
*/
public String repository() {
return this.repository;
}
/**
* Sets a list of indices that should be included into the snapshot
* <p/>
* The list of indices supports multi-index syntax. For example: "+test*" ,"-test42" will index all indices with
* prefix "test" except index "test42". Aliases are supported. An empty list or {"_all"} will snapshot all open
* indices in the cluster.
*
* @param indices
* @return this request
*/
public CreateSnapshotRequest indices(String... indices) {
this.indices = indices;
return this;
}
/**
* Sets a list of indices that should be included into the snapshot
* <p/>
* The list of indices supports multi-index syntax. For example: "+test*" ,"-test42" will index all indices with
* prefix "test" except index "test42". Aliases are supported. An empty list or {"_all"} will snapshot all open
* indices in the cluster.
*
* @param indices
* @return this request
*/
public CreateSnapshotRequest indices(List<String> indices) {
this.indices = indices.toArray(new String[indices.size()]);
return this;
}
/**
* Returns a list of indices that should be included into the snapshot
*
* @return list of indices
*/
public String[] indices() {
return indices;
}
/**
* Specifies the indices options. Like what type of requested indices to ignore. For example indices that don't exist.
*
* @return the desired behaviour regarding indices options
*/
public IndicesOptions indicesOptions() {
return indicesOptions;
}
/**
* Specifies the indices options. Like what type of requested indices to ignore. For example indices that don't exist.
*
* @param indicesOptions the desired behaviour regarding indices options
* @return this request
*/
public CreateSnapshotRequest indicesOptions(IndicesOptions indicesOptions) {
this.indicesOptions = indicesOptions;
return this;
}
/**
* Returns true if indices with unavailable shards should be be partially snapshotted.
*
* @return the desired behaviour regarding indices options
*/
public boolean partial() {
return partial;
}
/**
* Set to true to allow indices with unavailable shards to be partially snapshotted.
*
* @param partial true if indices with unavailable shards should be be partially snapshotted.
* @return this request
*/
public CreateSnapshotRequest partial(boolean partial) {
this.partial = partial;
return this;
}
/**
* If set to true the request should wait for the snapshot completion before returning.
*
* @param waitForCompletion true if
* @return this request
*/
public CreateSnapshotRequest waitForCompletion(boolean waitForCompletion) {
this.waitForCompletion = waitForCompletion;
return this;
}
/**
* Returns true if the request should wait for the snapshot completion before returning
*
* @return true if the request should wait for completion
*/
public boolean waitForCompletion() {
return waitForCompletion;
}
/**
* Sets repository-specific snapshot settings.
* <p/>
* See repository documentation for more information.
*
* @param settings repository-specific snapshot settings
* @return this request
*/
public CreateSnapshotRequest settings(Settings settings) {
this.settings = settings;
return this;
}
/**
* Sets repository-specific snapshot settings.
* <p/>
* See repository documentation for more information.
*
* @param settings repository-specific snapshot settings
* @return this request
*/
public CreateSnapshotRequest settings(Settings.Builder settings) {
this.settings = settings.build();
return this;
}
/**
* Sets repository-specific snapshot settings in JSON, YAML or properties format
* <p/>
* See repository documentation for more information.
*
* @param source repository-specific snapshot settings
* @return this request
*/
public CreateSnapshotRequest settings(String source) {
this.settings = ImmutableSettings.settingsBuilder().loadFromSource(source).build();
return this;
}
/**
* Sets repository-specific snapshot settings.
* <p/>
* See repository documentation for more information.
*
* @param source repository-specific snapshot settings
* @return this request
*/
public CreateSnapshotRequest settings(Map<String, Object> source) {
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source);
settings(builder.string());
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
}
return this;
}
/**
* Returns repository-specific snapshot settings
*
* @return repository-specific snapshot settings
*/
public Settings settings() {
return this.settings;
}
/**
* Set to true if global state should be stored as part of the snapshot
*
* @param includeGlobalState true if global state should be stored
* @return this request
*/
public CreateSnapshotRequest includeGlobalState(boolean includeGlobalState) {
this.includeGlobalState = includeGlobalState;
return this;
}
/**
* Returns true if global state should be stored as part of the snapshot
*
* @return true if global state should be stored as part of the snapshot
*/
public boolean includeGlobalState() {
return includeGlobalState;
}
/**
* Parses snapshot definition.
*
* @param source snapshot definition
* @return this request
*/
public CreateSnapshotRequest source(XContentBuilder source) {
return source(source.bytes());
}
/**
* Parses snapshot definition.
*
* @param source snapshot definition
* @return this request
*/
public CreateSnapshotRequest source(Map source) {
boolean ignoreUnavailable = IndicesOptions.lenient().ignoreUnavailable();
boolean allowNoIndices = IndicesOptions.lenient().allowNoIndices();
boolean expandWildcardsOpen = IndicesOptions.lenient().expandWildcardsOpen();
boolean expandWildcardsClosed = IndicesOptions.lenient().expandWildcardsClosed();
for (Map.Entry<String, Object> entry : ((Map<String, Object>) source).entrySet()) {
String name = entry.getKey();
if (name.equals("indices")) {
if (entry.getValue() instanceof String) {
indices(Strings.splitStringByCommaToArray((String) entry.getValue()));
} else if (entry.getValue() instanceof ArrayList) {
indices((ArrayList<String>) entry.getValue());
} else {
throw new ElasticsearchIllegalArgumentException("malformed indices section, should be an array of strings");
}
} else if (name.equals("ignore_unavailable") || name.equals("ignoreUnavailable")) {
ignoreUnavailable = nodeBooleanValue(entry.getValue());
} else if (name.equals("allow_no_indices") || name.equals("allowNoIndices")) {
allowNoIndices = nodeBooleanValue(entry.getValue());
} else if (name.equals("expand_wildcards_open") || name.equals("expandWildcardsOpen")) {
expandWildcardsOpen = nodeBooleanValue(entry.getValue());
} else if (name.equals("expand_wildcards_closed") || name.equals("expandWildcardsClosed")) {
expandWildcardsClosed = nodeBooleanValue(entry.getValue());
} else if (name.equals("partial")) {
partial(nodeBooleanValue(entry.getValue()));
} else if (name.equals("settings")) {
if (!(entry.getValue() instanceof Map)) {
throw new ElasticsearchIllegalArgumentException("malformed settings section, should indices an inner object");
}
settings((Map<String, Object>) entry.getValue());
} else if (name.equals("include_global_state")) {
includeGlobalState = nodeBooleanValue(entry.getValue());
}
}
indicesOptions(IndicesOptions.fromOptions(ignoreUnavailable, allowNoIndices, expandWildcardsOpen, expandWildcardsClosed));
return this;
}
/**
* Parses snapshot definition. JSON, YAML and properties formats are supported
*
* @param source snapshot definition
* @return this request
*/
public CreateSnapshotRequest source(String source) {
if (hasLength(source)) {
try {
return source(XContentFactory.xContent(source).createParser(source).mapOrderedAndClose());
} catch (Exception e) {
throw new ElasticsearchIllegalArgumentException("failed to parse repository source [" + source + "]", e);
}
}
return this;
}
/**
* Parses snapshot definition. JSON, YAML and properties formats are supported
*
* @param source snapshot definition
* @return this request
*/
public CreateSnapshotRequest source(byte[] source) {
return source(source, 0, source.length);
}
/**
* Parses snapshot definition. JSON, YAML and properties formats are supported
*
* @param source snapshot definition
* @param offset offset
* @param length length
* @return this request
*/
public CreateSnapshotRequest source(byte[] source, int offset, int length) {
if (length > 0) {
try {
return source(XContentFactory.xContent(source, offset, length).createParser(source, offset, length).mapOrderedAndClose());
} catch (IOException e) {
throw new ElasticsearchIllegalArgumentException("failed to parse repository source", e);
}
}
return this;
}
/**
* Parses snapshot definition. JSON, YAML and properties formats are supported
*
* @param source snapshot definition
* @return this request
*/
public CreateSnapshotRequest source(BytesReference source) {
try {
return source(XContentFactory.xContent(source).createParser(source).mapOrderedAndClose());
} catch (IOException e) {
throw new ElasticsearchIllegalArgumentException("failed to parse snapshot source", e);
}
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
snapshot = in.readString();
repository = in.readString();
indices = in.readStringArray();
indicesOptions = IndicesOptions.readIndicesOptions(in);
settings = readSettingsFromStream(in);
includeGlobalState = in.readBoolean();
waitForCompletion = in.readBoolean();
partial = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(snapshot);
out.writeString(repository);
out.writeStringArray(indices);
indicesOptions.writeIndicesOptions(out);
writeSettingsToStream(settings, out);
out.writeBoolean(includeGlobalState);
out.writeBoolean(waitForCompletion);
out.writeBoolean(partial);
}
}
| 1no label
|
src_main_java_org_elasticsearch_action_admin_cluster_snapshots_create_CreateSnapshotRequest.java
|
381 |
public class ClusterRerouteRequestBuilder extends AcknowledgedRequestBuilder<ClusterRerouteRequest, ClusterRerouteResponse, ClusterRerouteRequestBuilder> {
public ClusterRerouteRequestBuilder(ClusterAdminClient clusterClient) {
super((InternalClusterAdminClient) clusterClient, new ClusterRerouteRequest());
}
/**
* Adds allocation commands to be applied to the cluster. Note, can be empty, in which case
* will simply run a simple "reroute".
*/
public ClusterRerouteRequestBuilder add(AllocationCommand... commands) {
request.add(commands);
return this;
}
/**
* Sets a dry run flag (defaults to <tt>false</tt>) allowing to run the commands without
* actually applying them to the cluster state, and getting the resulting cluster state back.
*/
public ClusterRerouteRequestBuilder setDryRun(boolean dryRun) {
request.dryRun(dryRun);
return this;
}
/**
* Sets the source for the request
*/
public ClusterRerouteRequestBuilder setSource(BytesReference source) throws Exception {
request.source(source);
return this;
}
@Override
protected void doExecute(ActionListener<ClusterRerouteResponse> listener) {
((ClusterAdminClient) client).reroute(request, listener);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_reroute_ClusterRerouteRequestBuilder.java
|
1,484 |
public static class Map extends Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex> {
private boolean isVertex;
private Closure<Boolean> closure;
@Override
public void setup(final Mapper.Context context) throws IOException, InterruptedException {
this.isVertex = context.getConfiguration().getClass(CLASS, Element.class, Element.class).equals(Vertex.class);
try {
this.closure = (Closure<Boolean>) engine.eval(context.getConfiguration().get(CLOSURE));
} catch (final ScriptException e) {
throw new IOException(e.getMessage(), e);
}
}
@Override
public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException {
if (this.isVertex) {
if (value.hasPaths() && !this.closure.call(value)) {
value.clearPaths();
DEFAULT_COMPAT.incrementContextCounter(context, Counters.VERTICES_FILTERED, 1L);
}
} else {
long counter = 0;
for (final Edge e : value.getEdges(Direction.BOTH)) {
final StandardFaunusEdge edge = (StandardFaunusEdge) e;
if (edge.hasPaths() && !this.closure.call(edge)) {
edge.clearPaths();
counter++;
}
}
DEFAULT_COMPAT.incrementContextCounter(context, Counters.EDGES_FILTERED, counter);
}
context.write(NullWritable.get(), value);
}
}
| 1no label
|
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_filter_FilterMap.java
|
83 |
@SuppressWarnings("serial")
static final class MapReduceValuesToIntTask<K,V>
extends BulkTask<K,V,Integer> {
final ObjectToInt<? super V> transformer;
final IntByIntToInt reducer;
final int basis;
int result;
MapReduceValuesToIntTask<K,V> rights, nextRight;
MapReduceValuesToIntTask
(BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
MapReduceValuesToIntTask<K,V> nextRight,
ObjectToInt<? super V> transformer,
int basis,
IntByIntToInt reducer) {
super(p, b, i, f, t); this.nextRight = nextRight;
this.transformer = transformer;
this.basis = basis; this.reducer = reducer;
}
public final Integer getRawResult() { return result; }
public final void compute() {
final ObjectToInt<? super V> transformer;
final IntByIntToInt reducer;
if ((transformer = this.transformer) != null &&
(reducer = this.reducer) != null) {
int r = this.basis;
for (int i = baseIndex, f, h; batch > 0 &&
(h = ((f = baseLimit) + i) >>> 1) > i;) {
addToPendingCount(1);
(rights = new MapReduceValuesToIntTask<K,V>
(this, batch >>>= 1, baseLimit = h, f, tab,
rights, transformer, r, reducer)).fork();
}
for (Node<K,V> p; (p = advance()) != null; )
r = reducer.apply(r, transformer.apply(p.val));
result = r;
CountedCompleter<?> c;
for (c = firstComplete(); c != null; c = c.nextComplete()) {
@SuppressWarnings("unchecked") MapReduceValuesToIntTask<K,V>
t = (MapReduceValuesToIntTask<K,V>)c,
s = t.rights;
while (s != null) {
t.result = reducer.apply(t.result, s.result);
s = t.rights = s.nextRight;
}
}
}
}
}
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
421 |
trackedList.addChangeListener(new OMultiValueChangeListener<Integer, String>() {
public void onAfterRecordChanged(final OMultiValueChangeEvent<Integer, String> event) {
if (firedEvents.get(0).equals(event))
firedEvents.remove(0);
else
Assert.fail();
}
});
| 0true
|
core_src_test_java_com_orientechnologies_orient_core_db_record_TrackedListTest.java
|
2,927 |
public class PreBuiltTokenizerFactoryFactory implements TokenizerFactoryFactory {
private final TokenizerFactory tokenizerFactory;
public PreBuiltTokenizerFactoryFactory(TokenizerFactory tokenizerFactory) {
this.tokenizerFactory = tokenizerFactory;
}
@Override
public TokenizerFactory create(String name, Settings settings) {
Version indexVersion = settings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT);
if (!Version.CURRENT.equals(indexVersion)) {
TokenizerFactory versionedTokenizerFactory = PreBuiltTokenizers.valueOf(name.toUpperCase(Locale.ROOT)).getTokenizerFactory(indexVersion);
return versionedTokenizerFactory;
}
return tokenizerFactory;
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_analysis_PreBuiltTokenizerFactoryFactory.java
|
987 |
private class TransportHandler extends BaseTransportRequestHandler<Request> {
@Override
public Request newInstance() {
return newRequestInstance();
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
@Override
public void messageReceived(final Request request, final TransportChannel channel) throws Exception {
// no need for a threaded listener, since we just send a response
request.listenerThreaded(false);
execute(request, new ActionListener<Response>() {
@Override
public void onResponse(Response result) {
try {
channel.sendResponse(result);
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(e);
} catch (Exception e1) {
logger.warn("Failed to send error response for action [" + transportAction + "] and request [" + request + "]", e1);
}
}
});
}
}
| 0true
|
src_main_java_org_elasticsearch_action_support_replication_TransportIndicesReplicationOperationAction.java
|
1,123 |
public class NativeScriptExamplesPlugin extends AbstractPlugin {
@Override
public String name() {
return "native-script-example";
}
@Override
public String description() {
return "Native script examples";
}
public void onModule(ScriptModule module) {
module.registerScript(NativeNaiveTFIDFScoreScript.NATIVE_NAIVE_TFIDF_SCRIPT_SCORE, NativeNaiveTFIDFScoreScript.Factory.class);
module.registerScript(NativeConstantForLoopScoreScript.NATIVE_CONSTANT_FOR_LOOP_SCRIPT_SCORE, NativeConstantForLoopScoreScript.Factory.class);
module.registerScript(NativeConstantScoreScript.NATIVE_CONSTANT_SCRIPT_SCORE, NativeConstantScoreScript.Factory.class);
module.registerScript(NativePayloadSumScoreScript.NATIVE_PAYLOAD_SUM_SCRIPT_SCORE, NativePayloadSumScoreScript.Factory.class);
module.registerScript(NativePayloadSumNoRecordScoreScript.NATIVE_PAYLOAD_SUM_NO_RECORD_SCRIPT_SCORE, NativePayloadSumNoRecordScoreScript.Factory.class);
}
}
| 0true
|
src_test_java_org_elasticsearch_benchmark_scripts_score_plugin_NativeScriptExamplesPlugin.java
|
539 |
public class SortedListFactoryBean extends ListFactoryBean {
@Override
protected List createInstance() {
List response = super.createInstance();
Collections.sort(response, new Comparator<Ordered>() {
@Override
public int compare(Ordered o1, Ordered o2) {
return new Integer(o1.getOrder()).compareTo(o2.getOrder());
}
});
return response;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_util_SortedListFactoryBean.java
|
4,195 |
private class SnapshotContext extends Context {
private final Store store;
private final IndexShardSnapshotStatus snapshotStatus;
/**
* Constructs new context
*
* @param snapshotId snapshot id
* @param shardId shard to be snapshotted
* @param snapshotStatus snapshot status to report progress
*/
public SnapshotContext(SnapshotId snapshotId, ShardId shardId, IndexShardSnapshotStatus snapshotStatus) {
super(snapshotId, shardId);
store = indicesService.indexServiceSafe(shardId.getIndex()).shardInjectorSafe(shardId.id()).getInstance(Store.class);
this.snapshotStatus = snapshotStatus;
}
/**
* Create snapshot from index commit point
*
* @param snapshotIndexCommit
*/
public void snapshot(SnapshotIndexCommit snapshotIndexCommit) {
logger.debug("[{}] [{}] snapshot to [{}] ...", shardId, snapshotId, repositoryName);
final ImmutableMap<String, BlobMetaData> blobs;
try {
blobs = blobContainer.listBlobs();
} catch (IOException e) {
throw new IndexShardSnapshotFailedException(shardId, "failed to list blobs", e);
}
long generation = findLatestFileNameGeneration(blobs);
BlobStoreIndexShardSnapshots snapshots = buildBlobStoreIndexShardSnapshots(blobs);
snapshotStatus.updateStage(IndexShardSnapshotStatus.Stage.STARTED);
final CountDownLatch indexLatch = new CountDownLatch(snapshotIndexCommit.getFiles().length);
final CopyOnWriteArrayList<Throwable> failures = new CopyOnWriteArrayList<Throwable>();
final List<BlobStoreIndexShardSnapshot.FileInfo> indexCommitPointFiles = newArrayList();
int indexNumberOfFiles = 0;
long indexTotalFilesSize = 0;
for (String fileName : snapshotIndexCommit.getFiles()) {
if (snapshotStatus.aborted()) {
logger.debug("[{}] [{}] Aborted on the file [{}], exiting", shardId, snapshotId, fileName);
throw new IndexShardSnapshotFailedException(shardId, "Aborted");
}
logger.trace("[{}] [{}] Processing [{}]", shardId, snapshotId, fileName);
final StoreFileMetaData md;
try {
md = store.metaData(fileName);
} catch (IOException e) {
throw new IndexShardSnapshotFailedException(shardId, "Failed to get store file metadata", e);
}
boolean snapshotRequired = false;
// TODO: For now segment files are copied on each commit because segment files don't have checksum
// if (snapshot.indexChanged() && fileName.equals(snapshotIndexCommit.getSegmentsFileName())) {
// snapshotRequired = true; // we want to always snapshot the segment file if the index changed
// }
BlobStoreIndexShardSnapshot.FileInfo fileInfo = snapshots.findPhysicalIndexFile(fileName);
if (fileInfo == null || !fileInfo.isSame(md) || !snapshotFileExistsInBlobs(fileInfo, blobs)) {
// commit point file does not exists in any commit point, or has different length, or does not fully exists in the listed blobs
snapshotRequired = true;
}
if (snapshotRequired) {
indexNumberOfFiles++;
indexTotalFilesSize += md.length();
// create a new FileInfo
try {
BlobStoreIndexShardSnapshot.FileInfo snapshotFileInfo = new BlobStoreIndexShardSnapshot.FileInfo(fileNameFromGeneration(++generation), fileName, md.length(), chunkSize, md.checksum());
indexCommitPointFiles.add(snapshotFileInfo);
snapshotFile(snapshotFileInfo, indexLatch, failures);
} catch (IOException e) {
failures.add(e);
}
} else {
indexCommitPointFiles.add(fileInfo);
indexLatch.countDown();
}
}
snapshotStatus.files(indexNumberOfFiles, indexTotalFilesSize);
snapshotStatus.indexVersion(snapshotIndexCommit.getGeneration());
try {
indexLatch.await();
} catch (InterruptedException e) {
failures.add(e);
Thread.currentThread().interrupt();
}
if (!failures.isEmpty()) {
throw new IndexShardSnapshotFailedException(shardId, "Failed to perform snapshot (index files)", failures.get(0));
}
// now create and write the commit point
snapshotStatus.updateStage(IndexShardSnapshotStatus.Stage.FINALIZE);
String commitPointName = snapshotBlobName(snapshotId);
BlobStoreIndexShardSnapshot snapshot = new BlobStoreIndexShardSnapshot(snapshotId.getSnapshot(), snapshotIndexCommit.getGeneration(), indexCommitPointFiles);
try {
byte[] snapshotData = writeSnapshot(snapshot);
logger.trace("[{}] [{}] writing shard snapshot file", shardId, snapshotId);
blobContainer.writeBlob(commitPointName, new BytesStreamInput(snapshotData, false), snapshotData.length);
} catch (IOException e) {
throw new IndexShardSnapshotFailedException(shardId, "Failed to write commit point", e);
}
// delete all files that are not referenced by any commit point
// build a new BlobStoreIndexShardSnapshot, that includes this one and all the saved ones
List<BlobStoreIndexShardSnapshot> newSnapshotsList = Lists.newArrayList();
newSnapshotsList.add(snapshot);
for (BlobStoreIndexShardSnapshot point : snapshots) {
newSnapshotsList.add(point);
}
cleanup(newSnapshotsList, blobs);
snapshotStatus.updateStage(IndexShardSnapshotStatus.Stage.DONE);
}
/**
* Snapshot individual file
* <p/>
* This is asynchronous method. Upon completion of the operation latch is getting counted down and any failures are
* added to the {@code failures} list
*
* @param fileInfo file to be snapshotted
* @param latch latch that should be counted down once file is snapshoted
* @param failures thread-safe list of failures
* @throws IOException
*/
private void snapshotFile(final BlobStoreIndexShardSnapshot.FileInfo fileInfo, final CountDownLatch latch, final List<Throwable> failures) throws IOException {
final AtomicLong counter = new AtomicLong(fileInfo.numberOfParts());
for (long i = 0; i < fileInfo.numberOfParts(); i++) {
IndexInput indexInput = null;
try {
indexInput = store.openInputRaw(fileInfo.physicalName(), IOContext.READONCE);
indexInput.seek(i * fileInfo.partBytes());
InputStreamIndexInput inputStreamIndexInput = new ThreadSafeInputStreamIndexInput(indexInput, fileInfo.partBytes());
final IndexInput fIndexInput = indexInput;
long size = inputStreamIndexInput.actualSizeToRead();
InputStream inputStream;
if (snapshotRateLimiter != null) {
inputStream = new RateLimitingInputStream(inputStreamIndexInput, snapshotRateLimiter, snapshotThrottleListener);
} else {
inputStream = inputStreamIndexInput;
}
blobContainer.writeBlob(fileInfo.partName(i), inputStream, size, new ImmutableBlobContainer.WriterListener() {
@Override
public void onCompleted() {
IOUtils.closeWhileHandlingException(fIndexInput);
if (counter.decrementAndGet() == 0) {
latch.countDown();
}
}
@Override
public void onFailure(Throwable t) {
IOUtils.closeWhileHandlingException(fIndexInput);
failures.add(t);
if (counter.decrementAndGet() == 0) {
latch.countDown();
}
}
});
} catch (Throwable e) {
IOUtils.closeWhileHandlingException(indexInput);
failures.add(e);
latch.countDown();
}
}
}
/**
* Checks if snapshot file already exists in the list of blobs
*
* @param fileInfo file to check
* @param blobs list of blobs
* @return true if file exists in the list of blobs
*/
private boolean snapshotFileExistsInBlobs(BlobStoreIndexShardSnapshot.FileInfo fileInfo, ImmutableMap<String, BlobMetaData> blobs) {
BlobMetaData blobMetaData = blobs.get(fileInfo.name());
if (blobMetaData != null) {
return blobMetaData.length() == fileInfo.length();
} else if (blobs.containsKey(fileInfo.partName(0))) {
// multi part file sum up the size and check
int part = 0;
long totalSize = 0;
while (true) {
blobMetaData = blobs.get(fileInfo.partName(part++));
if (blobMetaData == null) {
break;
}
totalSize += blobMetaData.length();
}
return totalSize == fileInfo.length();
}
// no file, not exact and not multipart
return false;
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_snapshots_blobstore_BlobStoreIndexShardRepository.java
|
301 |
@RunWith(HazelcastParallelClassRunner.class)
@Category(QuickTest.class)
public class ClientMapBasicTest {
static HazelcastInstance client;
static HazelcastInstance server;
@BeforeClass
public static void init() {
server = Hazelcast.newHazelcastInstance();
client = HazelcastClient.newHazelcastClient();
}
@AfterClass
public static void destroy() {
HazelcastClient.shutdownAll();
Hazelcast.shutdownAll();
}
@Test
public void testClientGetMap() {
assertNotNull( client.getMap(randomString()) );
}
@Test
public void testGetName() {
String mapName = randomString();
final IMap map = client.getMap(mapName);
assertEquals(mapName, map.getName());
}
@Test
public void testSize_whenEmpty() {
final IMap map = client.getMap(randomString());
assertEquals(0, map.size());
}
@Test
public void testSize() {
final IMap map = client.getMap(randomString());
map.put("key", "val");
assertEquals(1, map.size());
}
@Test
public void testSize_withMultiKeyPuts() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object oldValue = "oldValue";
final Object newValue = "Val";
map.put(key, oldValue);
map.put(key, newValue);
assertEquals(1, map.size());
}
@Test
public void testIsEmpty_whenEmpty() {
final IMap map = client.getMap(randomString());
assertTrue(map.isEmpty());
}
@Test
public void testIsEmpty_whenNotEmpty() {
final IMap map = client.getMap(randomString());
map.put("key", "val");
assertFalse(map.isEmpty());
}
@Test
public void testIsEmpty_afterPutRemove() {
final IMap map = client.getMap(randomString());
final Object key = "key";
map.put(key, "val");
map.remove(key);
assertTrue(map.isEmpty());
}
@Test(expected = NullPointerException.class)
public void testPut_whenKeyNull() {
final IMap map = client.getMap(randomString());
final Object val = "Val";
map.put(null, val);
}
@Test(expected = HazelcastSerializationException.class)
public void testPut_whenValueNull() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
map.put(key, null);
}
@Test
public void testPut() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "Val";
final Object result = map.put(key, value);
assertNull(result);
assertEquals(value, map.get(key));
}
@Test
public void testPut_whenKeyExists() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object oldValue = "oldValue";
final Object newValue = "Val";
map.put(key, oldValue);
final Object result = map.put(key, newValue);
assertEquals(oldValue, result);
assertEquals(newValue, map.get(key));
}
@Test
public void testPutTTL() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "Value";
final Object result = map.put(key, value, 5, TimeUnit.MINUTES);
assertNull(result);
assertEquals(value, map.get(key));
}
@Test
public void testPutTTL_whenKeyExists() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object oldValue = "oldValue";
final Object newValue = "Val";
map.put(key, oldValue);
final Object result = map.put(key, newValue, 5, TimeUnit.MINUTES);
assertEquals(oldValue, result);
assertEquals(newValue, map.get(key));
}
@Test
public void testPutTTL_AfterExpire() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "Value";
final Object result = map.put(key, value, 1, TimeUnit.SECONDS);
assertNull(result);
sleepSeconds(2);
assertEquals(null, map.get(key));
}
@Test
public void testPutTTL_AfterExpireWhenKeyExists() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object oldValue = "oldValue";
final Object newValue = "Val";
map.put(key, oldValue);
final Object result = map.put(key, newValue, 1, TimeUnit.SECONDS);
assertEquals(oldValue, result);
sleepSeconds(2);
assertEquals(null, map.get(key));
}
@Test
public void testPutAsync() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "Val";
Future result = map.putAsync(key, value);
assertEquals(null, result.get());
assertEquals(value, map.get(key));
}
@Test
public void testPutAsync_whenKeyExists() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object oldValue = "oldValue";
final Object newValue = "Val";
map.put(key, oldValue);
Future result = map.putAsync(key, newValue);
assertEquals(oldValue, result.get());
assertEquals(newValue, map.get(key));
}
@Test(expected = NullPointerException.class)
public void testPutAsync_withKeyNull() throws Exception {
final IMap map = client.getMap(randomString());
final Object val = "Val";
map.putAsync(null, val);
}
@Test(expected = HazelcastSerializationException.class)
public void testPutAsync_withValueNull() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "key";
map.putAsync(key, null);
}
@Test
public void testPutAsyncTTL() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "Val";
Future result = map.putAsync(key, value, 5, TimeUnit.MINUTES);
assertEquals(null, result.get());
assertEquals(value, map.get(key));
}
@Test
public void testPutAsyncTTL_whenKeyExists() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object oldValue = "oldValue";
final Object newValue = "Val";
map.put(key, oldValue);
Future result = map.putAsync(key, newValue, 5, TimeUnit.MINUTES);
assertEquals(oldValue, result.get());
assertEquals(newValue, map.get(key));
}
@Test
public void testPutAsyncTTL_afterExpire() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "Val";
Future result = map.putAsync(key, value, 1, TimeUnit.SECONDS);
sleepSeconds(2);
assertEquals(null, result.get());
assertEquals(null, map.get(key));
}
@Test
public void testPutAsyncTTL_afterExpireWhenKeyExists() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object oldValue = "oldValue";
final Object newValue = "Val";
map.put(key, oldValue);
Future result = map.putAsync(key, newValue, 1, TimeUnit.SECONDS);
sleepSeconds(2);
assertEquals(oldValue, result.get());
assertEquals(null, map.get(key));
}
@Test
public void testTryPut_whenNotLocked() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "value";
boolean result = map.tryPut(key, value, 1, TimeUnit.SECONDS);
assertTrue(result);
assertEquals(value, map.get(key));
}
@Test
public void testTryPut_whenKeyPresentAndNotLocked() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object oldValue = "oldValue";
final Object newValue = "Val";
map.put(key, oldValue);
boolean result = map.tryPut(key, newValue, 1, TimeUnit.SECONDS);
assertTrue(result);
assertEquals(newValue, map.get(key));
}
@Test(expected = NullPointerException.class)
public void testPutIfAbsent_whenKeyNull() throws Exception {
final IMap map = client.getMap(randomString());
final Object value = "Value";
map.putIfAbsent(null, value);
}
@Test(expected = HazelcastSerializationException.class)
public void testPutIfAbsent_whenValueNull() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "key";
map.putIfAbsent(key, null);
}
@Test
public void testPutIfAbsent() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "Value";
Object result = map.putIfAbsent(key, value);
assertEquals(null, result);
assertEquals(value, map.get(key));
}
@Test
public void testPutIfAbsent_whenKeyPresent() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "Value";
map.put(key, value);
Object result = map.putIfAbsent(key, value);
assertEquals(value, result);
assertEquals(value, map.get(key));
}
@Test
public void testPutIfAbsentNewValue_whenKeyPresent() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "Value";
final Object newValue = "newValue";
map.put(key, value);
Object result = map.putIfAbsent(key, newValue);
assertEquals(value, result);
assertEquals(value, map.get(key));
}
@Test
public void testPutIfAbsentTTL() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "Value";
Object result = map.putIfAbsent(key, value, 5, TimeUnit.MINUTES);
assertEquals(null, result);
assertEquals(value, map.get(key));
}
@Test
public void testPutIfAbsentTTL_whenExpire() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "Value";
final Object result = map.putIfAbsent(key, value, 1, TimeUnit.SECONDS);
sleepSeconds(2);
assertEquals(null, result);
assertEquals(null, map.get(key));
}
@Test
public void testPutIfAbsentTTL_whenKeyPresentAfterExpire() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "Value";
map.put(key, value);
final Object result = map.putIfAbsent(key, value, 1, TimeUnit.SECONDS);
assertEquals(value, result);
assertEquals(value, map.get(key));
}
@Test
public void testPutIfAbsentTTL_whenKeyPresent() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "Value";
map.put(key, value);
final Object result = map.putIfAbsent(key, value, 5, TimeUnit.MINUTES);
assertEquals(value, result);
assertEquals(value, map.get(key));
}
@Test
public void testPutIfAbsentNewValueTTL_whenKeyPresent() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "Value";
final Object newValue = "newValue";
map.put(key, value);
final Object result = map.putIfAbsent(key, newValue, 5, TimeUnit.MINUTES);
assertEquals(value, result);
assertEquals(value, map.get(key));
}
@Test
public void testClear_whenEmpty() throws Exception {
final IMap map = client.getMap(randomString());
map.clear();
assertTrue(map.isEmpty());
}
@Test
public void testClear() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "Value";
map.put(key, value);
map.clear();
assertTrue(map.isEmpty());
}
@Test
public void testContainsKey_whenKeyAbsent() {
final IMap map = client.getMap(randomString());
assertFalse(map.containsKey("NOT_THERE"));
}
@Test(expected = NullPointerException.class)
public void testContainsKey_whenKeyNull() {
final IMap map = client.getMap(randomString());
map.containsKey(null);
}
@Test
public void testContainsKey_whenKeyPresent() {
final IMap map = client.getMap(randomString());
final Object key = "key";
map.put(key, "val");
assertTrue(map.containsKey(key));
}
@Test
public void testContainsValue_whenValueAbsent() {
final IMap map = client.getMap(randomString());
assertFalse(map.containsValue("NOT_THERE"));
}
@Test(expected = HazelcastSerializationException.class)
public void testContainsValue_whenValueNull() {
final IMap map = client.getMap(randomString());
map.containsValue(null);
}
@Test
public void testContainsValue_whenValuePresent() {
final IMap map = client.getMap(randomString());
final Object key = "key";
final Object value = "value";
map.put(key, value);
assertTrue(map.containsValue(value));
}
@Test
public void testContainsValue_whenMultiValuePresent() {
final IMap map = client.getMap(randomString());
final Object value = "value";
map.put("key1", value);
map.put("key2", value);
assertTrue(map.containsValue(value));
}
@Test
public void testGet_whenKeyPresent() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object val = "Val";
map.put(key, val);
assertEquals(val, map.get(key));
}
@Test
public void testGet_whenKeyAbsent() {
final IMap map = client.getMap(randomString());
assertEquals(null, map.get("NOT_THERE"));
}
@Test(expected = NullPointerException.class)
public void testGet_whenKeyNull() {
final IMap map = client.getMap(randomString());
map.get(null);
}
@Test
public void testGetAsync_whenKeyPresent() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object val = "Val";
map.put(key, val);
Future result = map.getAsync(key);
assertEquals(val, result.get());
}
@Test
public void testGetAsync_whenKeyAbsent() throws Exception {
final IMap map = client.getMap(randomString());
Future result = map.getAsync("NOT_THERE");
assertEquals(null, result.get());
}
@Test(expected = NullPointerException.class)
public void testGetAsync_whenKeyNull() throws Exception {
final IMap map = client.getMap(randomString());
map.getAsync(null);
}
@Test
public void testMapSet() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object val = "Val";
map.set(key, val);
assertEquals(val, map.get(key));
}
@Test
public void testMapSet_whenKeyPresent() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object oldValue = "Val";
final Object newValue = "newValue";
map.set(key, oldValue);
map.set(key, newValue);
assertEquals(newValue, map.get(key));
}
@Test
public void testMapSetTTl() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object val = "Val";
map.set(key, val, 5, TimeUnit.MINUTES);
assertEquals(val, map.get(key));
}
@Test
public void testMapSetTTl_whenExpired() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object val = "Val";
map.set(key, val, 1, TimeUnit.SECONDS);
sleepSeconds(2);
assertEquals(null, map.get(key));
}
@Test
public void testMapSetTTl_whenReplacingKeyAndExpired() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object newValue = "newValue";
final Object oldValue = "oldvalue";
map.set(key, oldValue);
map.set(key, newValue, 1, TimeUnit.SECONDS);
sleepSeconds(2);
assertEquals(null, map.get(key));
}
@Test
public void testRemove_WhenKeyAbsent() {
final IMap map = client.getMap(randomString());
assertNull(map.remove("NOT_THERE"));
}
@Test(expected = NullPointerException.class)
public void testRemove_WhenKeyNull() {
final IMap map = client.getMap(randomString());
assertNull(map.remove(null));
}
@Test
public void testRemove_WhenKeyPresent() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "value";
map.put(key, value);
assertEquals(value, map.remove(key));
assertNull(map.get(key));
}
@Test
public void testRemoveKeyValue_WhenPresent() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "value";
map.put(key, value);
assertTrue(map.remove(key, value));
assertNull(map.get(key));
}
@Test
public void testRemoveKeyValue_WhenValueAbsent() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "value";
map.put(key, value);
assertFalse(map.remove(key, "NOT_THERE"));
assertEquals(value, map.get(key));
}
@Test
public void testRemoveKeyValue_WhenKeyAbsent() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "value";
map.put(key, value);
assertFalse(map.remove("NOT_THERE", value));
}
@Test
public void testRemoveAsync() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "value";
map.put(key, value);
Future result = map.removeAsync(key);
assertEquals(value, result.get());
assertEquals(null, map.get(key));
}
@Test
public void testRemoveAsync_whenKeyNotPresent() throws Exception {
final IMap map = client.getMap(randomString());
Future result = map.removeAsync("NOT_THERE");
assertEquals(null, result.get());
}
@Test(expected = NullPointerException.class)
public void testRemoveAsync_whenKeyNull() throws Exception {
final IMap map = client.getMap(randomString());
map.removeAsync(null);
}
@Test
public void testTryRemove_WhenKeyPresentAndNotLocked() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "value";
map.put(key, value);
boolean result = map.tryRemove(key, 1, TimeUnit.SECONDS);
assertTrue(result);
assertNull(map.get(key));
}
@Test
public void testTryRemove_WhenKeyAbsentAndNotLocked() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "value";
boolean result = map.tryRemove(key, 1, TimeUnit.SECONDS);
assertFalse(result);
}
@Test(expected = NullPointerException.class)
public void testDelete_whenKeyNull() {
final IMap map = client.getMap(randomString());
map.delete(null);
}
@Test
public void testDelete_whenKeyPresent() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "value";
map.put(key, value);
map.delete(key);
assertEquals(0, map.size());
}
@Test
public void testDelete_whenKeyAbsent() {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "value";
map.put(key, value);
map.delete("NOT_THERE");
assertEquals(1, map.size());
}
@Test
public void testEvict_whenKeyAbsent() throws InterruptedException {
final IMap map = client.getMap(randomString());
boolean result = map.evict("NOT_THERE");
assertFalse( result );
}
@Test(expected = HazelcastSerializationException.class)
public void testEvict_whenKeyNull() throws InterruptedException {
final IMap map = client.getMap(randomString());
map.evict(null);
}
@Test
public void testEvict() throws InterruptedException {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "value";
map.put(key, value);
boolean result = map.evict(key);
assertTrue(result);
assertEquals(null, map.get(key));
}
@Test
public void testPutAll() {
final int max = 100;
final IMap map = client.getMap(randomString());
final Map expected = new HashMap();
for (int i = 0; i < max; i++) {
expected.put(i, i);
}
map.putAll(expected);
for(Object key : expected.keySet()){
Object value = map.get(key);
Object expectedValue = expected.get(key);
assertEquals(expectedValue, value);
}
}
@Test
public void testGetAll() {
final int max = 100;
final IMap map = client.getMap(randomString());
final Map expected = new HashMap();
for (int i = 0; i < max; i++) {
map.put(i, i);
expected.put(i, i);
}
Map result = map.getAll(expected.keySet());
for(Object key : expected.keySet()){
Object value = result.get(key);
Object expectedValue = expected.get(key);
assertEquals(expectedValue, value);
}
}
public void testGetAll_whenMapEmpty() {
final int max = 10;
final IMap map = client.getMap(randomString());
final Map expected = new HashMap();
for (int i = 0; i < max; i++) {
expected.put(i, i);
}
Map result = map.getAll(expected.keySet());
assertTrue(result.isEmpty());
}
@Test
public void testReplace_whenKeyValueAbsent() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "value";
assertNull(map.replace(key, value));
assertNull(map.get(key));
}
@Test
public void testReplace() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object oldValue = "value";
final Object newValue = "NewValue";
map.put(key, oldValue);
final Object result = map.replace(key, newValue);
assertEquals(oldValue, result);
assertEquals(newValue, map.get(key));
}
@Test
public void testReplaceKeyValue() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "value";
final Object newValue = "NewValue";
map.put(key, value);
final boolean result = map.replace(key, value, newValue);
assertTrue(result);
assertEquals(newValue, map.get(key));
}
@Test
public void testReplaceKeyValue_whenValueAbsent() throws Exception {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "value";
final Object newValue = "NewValue";
map.put(key, value);
final boolean result = map.replace(key, "NOT_THERE", newValue);
assertFalse(result);
assertEquals(value, map.get(key));
}
@Test
public void testPutTransient() throws InterruptedException {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "value";
map.putTransient(key, value, 5, TimeUnit.MINUTES);
assertEquals(value, map.get(key));
}
@Test
public void testPutTransient_whenExpire() throws InterruptedException {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "value";
map.putTransient(key, value, 1, TimeUnit.SECONDS);
sleepSeconds(2);
assertEquals(null, map.get(key));
}
@Test
public void testPutTransient_whenKeyPresent() throws InterruptedException {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object oldValue = "oldValue";
final Object newValue = "newValue";
map.put(key, oldValue);
map.putTransient(key, newValue, 5, TimeUnit.MINUTES);
assertEquals(newValue, map.get(key));
}
@Test
public void testPutTransient_whenKeyPresentAfterExpire() throws InterruptedException {
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object oldValue = "oldValue";
final Object newValue = "newValue";
map.put(key, oldValue);
map.putTransient(key, newValue, 1, TimeUnit.SECONDS);
sleepSeconds(2);
assertEquals(null, map.get(key));
}
@Test
public void testGetEntryView_whenKeyAbsent(){
final IMap map = client.getMap(randomString());
final EntryView view = map.getEntryView("NOT_THERE");
assertEquals(null, view);
}
@Test
public void testGetEntryView(){
final IMap map = client.getMap(randomString());
final Object key = "Key";
final Object value = "Value";
map.put(key, value);
final EntryView view = map.getEntryView(key);
assertEquals(key, view.getKey());
assertEquals(value, view.getValue());
}
@Test
public void testKeySet_whenEmpty() {
final IMap map = client.getMap(randomString());
final Set keySet = map.keySet();
assertTrue(keySet.isEmpty());
}
@Test
public void testKeySet() {
final int max = 81;
final IMap map = client.getMap(randomString());
final Set expected = new TreeSet();
for (int key = 0; key < max; key++) {
Object value = key+"value";
expected.add(key);
map.put(key, value);
}
final Set keySet = map.keySet();
assertEquals(expected, keySet);
}
@Test
public void testKeySet_withPredicate() {
final int max = 44;
final IMap map = client.getMap(randomString());
final Set expected = new TreeSet();
for (int key = 0; key < max; key++) {
Object value = key+"value";
map.put(key, value);
}
expected.add(4);
final Set keySet = map.keySet(new SqlPredicate("this == 4value"));
assertEquals(expected, keySet);
}
@Test
public void testValues_whenEmpty() {
final IMap map = client.getMap(randomString());
final Collection values = map.values();
assertTrue(values.isEmpty());
}
@Test
public void testValues() {
final int max = 23;
final IMap map = client.getMap(randomString());
final Set expected = new TreeSet();
for (int key = 0; key < max; key++) {
Object value = key+"value";
expected.add(value);
map.put(key, value);
}
final Collection collection = map.values();
final Set resultSet = new TreeSet(collection);
assertEquals(expected, resultSet);
}
@Test
public void testValues_withPredicate() {
final int max = 27;
final IMap map = client.getMap(randomString());
final Set expected = new TreeSet();
for (int key = 0; key < max; key++) {
Object value = key+"value";
map.put(key, value);
}
expected.add(4);
final Set keySet = map.keySet(new SqlPredicate("this == 4value"));
assertEquals(expected, keySet);
}
@Test
public void testEntrySet_whenEmpty() {
final IMap map = client.getMap(randomString());
Set<Map.Entry> entrySet = map.entrySet();
assertTrue(entrySet.isEmpty());
}
@Test
public void testEntrySet() {
final int max = 34;
final IMap map = client.getMap(randomString());
final Map expected = new HashMap();
for (int key = 0; key < max; key++) {
Object value = key+"value";
expected.put(key, value);
map.put(key, value);
}
Set<Map.Entry> entrySet = map.entrySet();
for(Map.Entry entry : entrySet){
Object value = entry.getValue();
Object key = entry.getKey();
Object expectedValue = expected.get(key);
assertEquals(expectedValue, value);
}
}
@Test
public void testEntrySet_withPredicate() {
final int max = 44;
final IMap map = client.getMap(randomString());
final Map expected = new HashMap();
for (int key = 0; key < max; key++) {
Object value = key+"value";
expected.put(key, value);
map.put(key, value);
}
final Set<Map.Entry> entrySet = map.entrySet(new SqlPredicate("this == 1value"));
Map.Entry entry = entrySet.iterator().next();
assertEquals(1, entry.getKey());
assertEquals("1value", entry.getValue());
assertEquals(1, entrySet.size());
}
@Test
public void testMapStatistics_withClientOperations() {
final String mapName = randomString();
final LocalMapStats serverMapStats = server.getMap(mapName).getLocalMapStats();
final IMap map = client.getMap(mapName);
final int operationCount = 1123;
for (int i = 0; i < operationCount; i++) {
map.put(i, i);
map.get(i);
map.remove(i);
}
assertEquals("put count", operationCount, serverMapStats.getPutOperationCount());
assertEquals("get count", operationCount, serverMapStats.getGetOperationCount());
assertEquals("remove count", operationCount, serverMapStats.getRemoveOperationCount());
assertTrue("put latency", 0 < serverMapStats.getTotalPutLatency());
assertTrue("get latency", 0 < serverMapStats.getTotalGetLatency());
assertTrue("remove latency", 0 < serverMapStats.getTotalRemoveLatency());
}
@Test(expected = UnsupportedOperationException.class)
public void testAddLocalEntryListener(){
final IMap map = client.getMap(randomString());
map.addLocalEntryListener(new DumEntryListener());
}
@Test(expected = UnsupportedOperationException.class)
public void testAddLocalEntryListener_WithPredicate(){
final IMap map = client.getMap(randomString());
map.addLocalEntryListener(new DumEntryListener(), new DumPredicate(), true);
}
@Test(expected = UnsupportedOperationException.class)
public void testAddLocalEntryListener_WithPredicateAndKey(){
final IMap map = client.getMap(randomString());
map.addLocalEntryListener(new DumEntryListener(), new DumPredicate(), "Key", true);
}
@Test(expected = UnsupportedOperationException.class)
public void testLocalKeySet(){
final IMap map = client.getMap(randomString());
map.localKeySet();
}
@Test(expected = UnsupportedOperationException.class)
public void testLocalKeySet_WithPredicate(){
final IMap map = client.getMap(randomString());
map.localKeySet(new DumPredicate());
}
static class DumEntryListener implements EntryListener {
public void entryAdded(EntryEvent event) {
}
public void entryRemoved(EntryEvent event) {
}
public void entryUpdated(EntryEvent event) {
}
public void entryEvicted(EntryEvent event) {
}
}
static class DumPredicate implements Predicate {
public boolean apply(Map.Entry mapEntry) {
return false;
}
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapBasicTest.java
|
1,131 |
public class OrderItemType implements Serializable, BroadleafEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, OrderItemType> TYPES = new LinkedHashMap<String, OrderItemType>();
public static final OrderItemType BASIC = new OrderItemType("org.broadleafcommerce.core.order.domain.OrderItem", "Basic Order Item");
public static final OrderItemType DISCRETE = new OrderItemType("org.broadleafcommerce.core.order.domain.DiscreteOrderItem", "Discrete Order Item");
public static final OrderItemType EXTERNALLY_PRICED = new OrderItemType("org.broadleafcommerce.core.order.domain.DynamicPriceDiscreteOrderItem", "Externally Priced Discrete Order Item");
public static final OrderItemType BUNDLE = new OrderItemType("org.broadleafcommerce.core.order.domain.BundleOrderItem", "Bundle Order Item");
public static final OrderItemType GIFTWRAP = new OrderItemType("org.broadleafcommerce.core.order.domain.GiftWrapOrderItem", "Gift Wrap Order Item");
public static OrderItemType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public OrderItemType() {
//do nothing
}
public OrderItemType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)) {
TYPES.put(type, this);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OrderItemType other = (OrderItemType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_service_type_OrderItemType.java
|
5,262 |
public class RangeParser implements Aggregator.Parser {
@Override
public String type() {
return InternalRange.TYPE.name();
}
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceConfig<NumericValuesSource> config = new ValuesSourceConfig<NumericValuesSource>(NumericValuesSource.class);
String field = null;
List<RangeAggregator.Range> ranges = null;
String script = null;
String scriptLang = null;
Map<String, Object> scriptParams = null;
boolean keyed = false;
boolean assumeSorted = false;
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
if ("field".equals(currentFieldName)) {
field = parser.text();
} else if ("script".equals(currentFieldName)) {
script = parser.text();
} else if ("lang".equals(currentFieldName)) {
scriptLang = parser.text();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.START_ARRAY) {
if ("ranges".equals(currentFieldName)) {
ranges = new ArrayList<RangeAggregator.Range>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
double from = Double.NEGATIVE_INFINITY;
String fromAsStr = null;
double to = Double.POSITIVE_INFINITY;
String toAsStr = null;
String key = null;
String toOrFromOrKey = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
toOrFromOrKey = parser.currentName();
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if ("from".equals(toOrFromOrKey)) {
from = parser.doubleValue();
} else if ("to".equals(toOrFromOrKey)) {
to = parser.doubleValue();
}
} else if (token == XContentParser.Token.VALUE_STRING) {
if ("from".equals(toOrFromOrKey)) {
fromAsStr = parser.text();
} else if ("to".equals(toOrFromOrKey)) {
toAsStr = parser.text();
} else if ("key".equals(toOrFromOrKey)) {
key = parser.text();
}
}
}
ranges.add(new RangeAggregator.Range(key, from, fromAsStr, to, toAsStr));
}
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.START_OBJECT) {
if ("params".equals(currentFieldName)) {
scriptParams = parser.map();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
if ("keyed".equals(currentFieldName)) {
keyed = parser.booleanValue();
} else if ("script_values_sorted".equals(currentFieldName)) {
assumeSorted = parser.booleanValue();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
} else {
throw new SearchParseException(context, "Unexpected token " + token + " in [" + aggregationName + "].");
}
}
if (ranges == null) {
throw new SearchParseException(context, "Missing [ranges] in ranges aggregator [" + aggregationName + "]");
}
if (script != null) {
config.script(context.scriptService().search(context.lookup(), scriptLang, script, scriptParams));
}
if (!assumeSorted) {
// we need values to be sorted and unique for efficiency
config.ensureSorted(true);
}
if (field == null) {
return new RangeAggregator.Factory(aggregationName, config, InternalRange.FACTORY, ranges, keyed);
}
FieldMapper<?> mapper = context.smartNameFieldMapper(field);
if (mapper == null) {
config.unmapped(true);
return new RangeAggregator.Factory(aggregationName, config, InternalRange.FACTORY, ranges, keyed);
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
config.fieldContext(new FieldContext(field, indexFieldData));
return new RangeAggregator.Factory(aggregationName, config, InternalRange.FACTORY, ranges, keyed);
}
}
| 1no label
|
src_main_java_org_elasticsearch_search_aggregations_bucket_range_RangeParser.java
|
139 |
@Test
public class DateSerializerTest {
private final static int FIELD_SIZE = 8;
private final Date OBJECT = new Date();
private ODateSerializer dateSerializer;
private final byte[] stream = new byte[FIELD_SIZE];
@BeforeClass
public void beforeClass() {
dateSerializer = new ODateSerializer();
}
public void testFieldSize() {
Assert.assertEquals(dateSerializer.getObjectSize(OBJECT), FIELD_SIZE);
}
public void testSerialize() {
dateSerializer.serialize(OBJECT, stream, 0);
Calendar calendar = Calendar.getInstance();
calendar.setTime(OBJECT);
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
Assert.assertEquals(dateSerializer.deserialize(stream, 0), calendar.getTime());
}
public void testSerializeNative() {
dateSerializer.serializeNative(OBJECT, stream, 0);
Calendar calendar = Calendar.getInstance();
calendar.setTime(OBJECT);
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
Assert.assertEquals(dateSerializer.deserializeNative(stream, 0), calendar.getTime());
}
public void testNativeDirectMemoryCompatibility() {
dateSerializer.serializeNative(OBJECT, stream, 0);
Calendar calendar = Calendar.getInstance();
calendar.setTime(OBJECT);
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
ODirectMemoryPointer pointer = new ODirectMemoryPointer(stream);
try {
Assert.assertEquals(dateSerializer.deserializeFromDirectMemory(pointer, 0), calendar.getTime());
} finally {
pointer.free();
}
}
}
| 0true
|
commons_src_test_java_com_orientechnologies_common_serialization_types_DateSerializerTest.java
|
621 |
public class PrepareMergeOperation extends AbstractClusterOperation {
private Address newTargetAddress;
public PrepareMergeOperation() {
}
public PrepareMergeOperation(Address newTargetAddress) {
this.newTargetAddress = newTargetAddress;
}
@Override
public void run() {
final Address caller = getCallerAddress();
final NodeEngineImpl nodeEngine = (NodeEngineImpl) getNodeEngine();
final Node node = nodeEngine.getNode();
final Address masterAddress = node.getMasterAddress();
final ILogger logger = node.loggingService.getLogger(this.getClass().getName());
boolean local = caller == null;
if (!local && !caller.equals(masterAddress)) {
logger.warning("Prepare-merge instruction sent from non-master endpoint: " + caller);
return;
}
logger.warning("Preparing to merge... Waiting for merge instruction...");
node.getClusterService().prepareToMerge(newTargetAddress);
}
@Override
public boolean returnsResponse() {
return true;
}
@Override
public Object getResponse() {
return Boolean.TRUE;
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
newTargetAddress = new Address();
newTargetAddress.readData(in);
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
newTargetAddress.writeData(out);
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_cluster_PrepareMergeOperation.java
|
203 |
public abstract class ClientAbstractSelectionHandler implements SelectionHandler, Runnable {
protected final ILogger logger;
protected final SocketChannelWrapper socketChannel;
protected final ClientConnection connection;
protected final ClientConnectionManagerImpl connectionManager;
protected final IOSelector ioSelector;
private SelectionKey sk;
public ClientAbstractSelectionHandler(final ClientConnection connection, IOSelector ioSelector) {
this.connection = connection;
this.ioSelector = ioSelector;
this.socketChannel = connection.getSocketChannelWrapper();
this.connectionManager = connection.getConnectionManager();
this.logger = Logger.getLogger(getClass().getName());
}
protected void shutdown() {
}
final void handleSocketException(Throwable e) {
if (sk != null) {
sk.cancel();
}
connection.close(e);
StringBuilder sb = new StringBuilder();
sb.append(Thread.currentThread().getName());
sb.append(" Closing socket to endpoint ");
sb.append(connection.getEndPoint());
sb.append(", Cause:").append(e);
logger.warning(sb.toString());
}
final void registerOp(final int operation) {
try {
if (!connection.live()) {
return;
}
if (sk == null) {
sk = socketChannel.keyFor(ioSelector.getSelector());
}
if (sk == null) {
sk = socketChannel.register(ioSelector.getSelector(), operation, this);
} else {
sk.interestOps(sk.interestOps() | operation);
if (sk.attachment() != this) {
sk.attach(this);
}
}
} catch (Throwable e) {
handleSocketException(e);
}
}
public void register() {
ioSelector.addTask(this);
ioSelector.wakeup();
}
}
| 0true
|
hazelcast-client_src_main_java_com_hazelcast_client_connection_nio_ClientAbstractSelectionHandler.java
|
99 |
@SuppressWarnings("serial")
static final class SearchValuesTask<K,V,U>
extends BulkTask<K,V,U> {
final Fun<? super V, ? extends U> searchFunction;
final AtomicReference<U> result;
SearchValuesTask
(BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
Fun<? super V, ? extends U> searchFunction,
AtomicReference<U> result) {
super(p, b, i, f, t);
this.searchFunction = searchFunction; this.result = result;
}
public final U getRawResult() { return result.get(); }
public final void compute() {
final Fun<? super V, ? extends U> searchFunction;
final AtomicReference<U> result;
if ((searchFunction = this.searchFunction) != null &&
(result = this.result) != null) {
for (int i = baseIndex, f, h; batch > 0 &&
(h = ((f = baseLimit) + i) >>> 1) > i;) {
if (result.get() != null)
return;
addToPendingCount(1);
new SearchValuesTask<K,V,U>
(this, batch >>>= 1, baseLimit = h, f, tab,
searchFunction, result).fork();
}
while (result.get() == null) {
U u;
Node<K,V> p;
if ((p = advance()) == null) {
propagateCompletion();
break;
}
if ((u = searchFunction.apply(p.val)) != null) {
if (result.compareAndSet(null, u))
quietlyCompleteRoot();
break;
}
}
}
}
}
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
313 |
public class StandardConfigLocations {
private static final Log LOG = LogFactory.getLog(StandardConfigLocations.class);
public static final String EXTRACONFIGLOCATIONSKEY = "extra.config.locations";
public static final int ALLCONTEXTTYPE = 0;
public static final int WEBCONTEXTTYPE = 1;
public static final int SERVICECONTEXTTYPE = 2;
public static final int TESTCONTEXTTYPE = 3;
public static final int APPCONTEXTTYPE = 4;
public static String[] retrieveAll(int contextType) throws IOException {
String[] response;
BufferedReader reader = null;
try {
reader = new BufferedReader(new InputStreamReader(StandardConfigLocations.class.getResourceAsStream("StandardConfigLocations.txt")));
ArrayList<String> items = new ArrayList<String>();
boolean eof = false;
while (!eof) {
String temp = reader.readLine();
if (temp == null) {
eof = true;
} else {
addContextFile(contextType, items, temp);
}
}
String extraConfigFiles = System.getProperty(EXTRACONFIGLOCATIONSKEY);
if (extraConfigFiles != null) {
String[] files = extraConfigFiles.split(" ");
for (String file : files) {
addContextFile(contextType, items, file);
}
}
response = new String[]{};
response = items.toArray(response);
} finally {
if (reader != null) {
try{ reader.close(); } catch (Throwable e) {
LOG.error("Unable to merge source and patch locations", e);
}
}
}
return response;
}
private static void addContextFile(int contextType, ArrayList<String> items, String temp) {
if (!temp.startsWith("#") && temp.trim().length() > 0 && StandardConfigLocations.class.getClassLoader().getResource(temp.trim()) != null) {
if (
contextType == ALLCONTEXTTYPE ||
((contextType == WEBCONTEXTTYPE || contextType == APPCONTEXTTYPE) && temp.indexOf("-web-") >= 0) ||
((contextType == SERVICECONTEXTTYPE || contextType == TESTCONTEXTTYPE || contextType == APPCONTEXTTYPE) && temp.indexOf("-web-") < 0 && temp.indexOf("-test") < 0 && temp.indexOf("-admin-") < 0) ||
((contextType == SERVICECONTEXTTYPE || contextType == TESTCONTEXTTYPE || contextType == APPCONTEXTTYPE) && temp.indexOf("-admin-applicationContext-persistence") >= 0) ||
(contextType == TESTCONTEXTTYPE && (temp.indexOf("-test") >= 0 || temp.indexOf("-admin-") >= 0 || temp.indexOf("-web-") >= 0))
){
items.add(temp.trim());
}
}
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_extensibility_context_StandardConfigLocations.java
|
38 |
@Test
public class OMVRBTreeNonCompositeTest {
protected OMVRBTree<Double, Double> tree;
@BeforeMethod
public void beforeMethod() throws Exception {
tree = new OMVRBTreeMemory<Double, Double>(4, 0.5f);
for (double i = 1; i < 10; i++) {
tree.put(i, i);
}
}
@Test
public void testGetEntry() {
assertEquals(tree.get(1.0), 1.0);
assertEquals(tree.get(3.0), 3.0);
assertEquals(tree.get(7.0), 7.0);
assertEquals(tree.get(9.0), 9.0);
assertNull(tree.get(10.0));
}
@Test
public void testSubMapInclusive() {
final ONavigableMap<Double, Double> navigableMap = tree.subMap(2.0, true, 7.0, true);
assertEquals(navigableMap.size(), 6);
for (double i = 2; i <= 7; i++) {
assertTrue(navigableMap.containsKey(i));
}
}
@Test
public void testSubMapFromInclusive() {
final ONavigableMap<Double, Double> navigableMap = tree.subMap(2.0, true, 7.0, false);
assertEquals(navigableMap.size(), 5);
for (double i = 2; i < 7; i++) {
assertTrue(navigableMap.containsKey(i));
}
}
@Test
public void testSubMapToInclusive() {
final ONavigableMap<Double, Double> navigableMap = tree.subMap(2.0, false, 7.0, true);
assertEquals(navigableMap.size(), 5);
for (double i = 3; i <= 7; i++) {
assertTrue(navigableMap.containsKey(i));
}
}
@Test
public void testSubMapNonInclusive() {
final ONavigableMap<Double, Double> navigableMap = tree.subMap(2.0, false, 7.0, false);
assertEquals(navigableMap.size(), 4);
for (double i = 3; i < 7; i++) {
assertTrue(navigableMap.containsKey(i));
}
}
@Test
public void testTailMapInclusive() {
final ONavigableMap<Double, Double> navigableMap = tree.tailMap(2.0, true);
assertEquals(navigableMap.size(), 8);
for (double i = 2; i <= 9; i++) {
assertTrue(navigableMap.containsKey(i));
}
}
@Test
public void testTailMapNonInclusive() {
final ONavigableMap<Double, Double> navigableMap = tree.tailMap(2.0, false);
assertEquals(navigableMap.size(), 7);
for (double i = 3; i <= 9; i++) {
assertTrue(navigableMap.containsKey(i));
}
}
@Test
public void testHeadMapInclusive() {
final ONavigableMap<Double, Double> navigableMap = tree.headMap(7.0, true);
assertEquals(navigableMap.size(), 7);
for (double i = 1; i <= 7; i++) {
assertTrue(navigableMap.containsKey(i));
}
}
@Test
public void testHeadMapNonInclusive() {
final ONavigableMap<Double, Double> navigableMap = tree.headMap(7.0, false);
assertEquals(navigableMap.size(), 6);
for (double i = 1; i < 7; i++) {
assertTrue(navigableMap.containsKey(i));
}
}
@Test
public void testGetCeilingEntryKeyExist() {
OMVRBTreeEntry<Double, Double> entry = tree.getCeilingEntry(4.0, OMVRBTree.PartialSearchMode.NONE);
assertEquals(entry.getKey(), 4.0);
entry = tree.getCeilingEntry(4.0, OMVRBTree.PartialSearchMode.HIGHEST_BOUNDARY);
assertEquals(entry.getKey(), 4.0);
entry = tree.getCeilingEntry(4.0, OMVRBTree.PartialSearchMode.LOWEST_BOUNDARY);
assertEquals(entry.getKey(), 4.0);
}
@Test
public void testGetCeilingEntryKeyNotExist() {
OMVRBTreeEntry<Double, Double> entry = tree.getCeilingEntry(4.3, OMVRBTree.PartialSearchMode.NONE);
assertEquals(entry.getKey(), 5.0);
entry = tree.getCeilingEntry(4.3, OMVRBTree.PartialSearchMode.HIGHEST_BOUNDARY);
assertEquals(entry.getKey(), 5.0);
entry = tree.getCeilingEntry(4.3, OMVRBTree.PartialSearchMode.LOWEST_BOUNDARY);
assertEquals(entry.getKey(), 5.0);
entry = tree.getCeilingEntry(20.0, OMVRBTree.PartialSearchMode.NONE);
assertNull(entry);
entry = tree.getCeilingEntry(-1.0, OMVRBTree.PartialSearchMode.NONE);
assertEquals(entry.getKey(), 1.0);
}
@Test
public void testGetFloorEntryKeyExist() {
OMVRBTreeEntry<Double, Double> entry = tree.getFloorEntry(4.0, OMVRBTree.PartialSearchMode.NONE);
assertEquals(entry.getKey(), 4.0);
entry = tree.getFloorEntry(4.0, OMVRBTree.PartialSearchMode.HIGHEST_BOUNDARY);
assertEquals(entry.getKey(), 4.0);
}
@Test
public void testGetFloorEntryKeyNotExist() {
OMVRBTreeEntry<Double, Double> entry = tree.getFloorEntry(4.3, OMVRBTree.PartialSearchMode.NONE);
assertEquals(entry.getKey(), 4.0);
entry = tree.getFloorEntry(4.3, OMVRBTree.PartialSearchMode.HIGHEST_BOUNDARY);
assertEquals(entry.getKey(), 4.0);
entry = tree.getFloorEntry(4.3, OMVRBTree.PartialSearchMode.LOWEST_BOUNDARY);
assertEquals(entry.getKey(), 4.0);
entry = tree.getFloorEntry(20.0, OMVRBTree.PartialSearchMode.NONE);
assertEquals(entry.getKey(), 9.0);
entry = tree.getFloorEntry(-1.0, OMVRBTree.PartialSearchMode.NONE);
assertNull(entry);
}
@Test
public void testHigherEntryKeyExist() {
OMVRBTreeEntry<Double, Double> entry = tree.getHigherEntry(4.0);
assertEquals(entry.getKey(), 5.0);
}
@Test
public void testHigherEntryKeyNotExist() {
OMVRBTreeEntry<Double, Double> entry = tree.getHigherEntry(4.5);
assertEquals(entry.getKey(), 5.0);
}
@Test
public void testHigherEntryNullResult() {
OMVRBTreeEntry<Double, Double> entry = tree.getHigherEntry(12.0);
assertNull(entry);
}
@Test
public void testLowerEntryNullResult() {
OMVRBTreeEntry<Double, Double> entry = tree.getLowerEntry(0.0);
assertNull(entry);
}
@Test
public void testLowerEntryKeyExist() {
OMVRBTreeEntry<Double, Double> entry = tree.getLowerEntry(4.0);
assertEquals(entry.getKey(), 3.0);
}
@Test
public void testLowerEntryKeyNotExist() {
OMVRBTreeEntry<Double, Double> entry = tree.getLowerEntry(4.5);
assertEquals(entry.getKey(), 4.0);
}
}
| 0true
|
core_src_test_java_com_orientechnologies_common_collection_OMVRBTreeNonCompositeTest.java
|
1,370 |
public class ClusterBlocks {
public static final ClusterBlocks EMPTY_CLUSTER_BLOCK = new ClusterBlocks(ImmutableSet.<ClusterBlock>of(), ImmutableMap.<String, ImmutableSet<ClusterBlock>>of());
private final ImmutableSet<ClusterBlock> global;
private final ImmutableMap<String, ImmutableSet<ClusterBlock>> indicesBlocks;
private final ImmutableLevelHolder[] levelHolders;
ClusterBlocks(ImmutableSet<ClusterBlock> global, ImmutableMap<String, ImmutableSet<ClusterBlock>> indicesBlocks) {
this.global = global;
this.indicesBlocks = indicesBlocks;
levelHolders = new ImmutableLevelHolder[ClusterBlockLevel.values().length];
for (ClusterBlockLevel level : ClusterBlockLevel.values()) {
ImmutableSet.Builder<ClusterBlock> globalBuilder = ImmutableSet.builder();
for (ClusterBlock block : global) {
if (block.contains(level)) {
globalBuilder.add(block);
}
}
ImmutableMap.Builder<String, ImmutableSet<ClusterBlock>> indicesBuilder = ImmutableMap.builder();
for (Map.Entry<String, ImmutableSet<ClusterBlock>> entry : indicesBlocks.entrySet()) {
ImmutableSet.Builder<ClusterBlock> indexBuilder = ImmutableSet.builder();
for (ClusterBlock block : entry.getValue()) {
if (block.contains(level)) {
indexBuilder.add(block);
}
}
indicesBuilder.put(entry.getKey(), indexBuilder.build());
}
levelHolders[level.id()] = new ImmutableLevelHolder(globalBuilder.build(), indicesBuilder.build());
}
}
public ImmutableSet<ClusterBlock> global() {
return global;
}
public ImmutableMap<String, ImmutableSet<ClusterBlock>> indices() {
return indicesBlocks;
}
public ImmutableSet<ClusterBlock> global(ClusterBlockLevel level) {
return levelHolders[level.id()].global();
}
public ImmutableMap<String, ImmutableSet<ClusterBlock>> indices(ClusterBlockLevel level) {
return levelHolders[level.id()].indices();
}
/**
* Returns <tt>true</tt> if one of the global blocks as its disable state persistence flag set.
*/
public boolean disableStatePersistence() {
for (ClusterBlock clusterBlock : global) {
if (clusterBlock.disableStatePersistence()) {
return true;
}
}
return false;
}
public boolean hasGlobalBlock(ClusterBlock block) {
return global.contains(block);
}
/**
* Is there a global block with the provided status?
*/
public boolean hasGlobalBlock(RestStatus status) {
for (ClusterBlock clusterBlock : global) {
if (clusterBlock.status().equals(status)) {
return true;
}
}
return false;
}
public boolean hasIndexBlock(String index, ClusterBlock block) {
return indicesBlocks.containsKey(index) && indicesBlocks.get(index).contains(block);
}
public void globalBlockedRaiseException(ClusterBlockLevel level) throws ClusterBlockException {
ClusterBlockException blockException = globalBlockedException(level);
if (blockException != null) {
throw blockException;
}
}
public ClusterBlockException globalBlockedException(ClusterBlockLevel level) {
if (global(level).isEmpty()) {
return null;
}
return new ClusterBlockException(ImmutableSet.copyOf(global(level)));
}
public void indexBlockedRaiseException(ClusterBlockLevel level, String index) throws ClusterBlockException {
ClusterBlockException blockException = indexBlockedException(level, index);
if (blockException != null) {
throw blockException;
}
}
public ClusterBlockException indexBlockedException(ClusterBlockLevel level, String index) {
if (!indexBlocked(level, index)) {
return null;
}
ImmutableSet.Builder<ClusterBlock> builder = ImmutableSet.builder();
builder.addAll(global(level));
ImmutableSet<ClusterBlock> indexBlocks = indices(level).get(index);
if (indexBlocks != null) {
builder.addAll(indexBlocks);
}
return new ClusterBlockException(builder.build());
}
public boolean indexBlocked(ClusterBlockLevel level, String index) {
if (!global(level).isEmpty()) {
return true;
}
ImmutableSet<ClusterBlock> indexBlocks = indices(level).get(index);
if (indexBlocks != null && !indexBlocks.isEmpty()) {
return true;
}
return false;
}
public ClusterBlockException indicesBlockedException(ClusterBlockLevel level, String[] indices) {
boolean indexIsBlocked = false;
for (String index : indices) {
if (indexBlocked(level, index)) {
indexIsBlocked = true;
}
}
if (!indexIsBlocked) {
return null;
}
ImmutableSet.Builder<ClusterBlock> builder = ImmutableSet.builder();
builder.addAll(global(level));
for (String index : indices) {
ImmutableSet<ClusterBlock> indexBlocks = indices(level).get(index);
if (indexBlocks != null) {
builder.addAll(indexBlocks);
}
}
return new ClusterBlockException(builder.build());
}
static class ImmutableLevelHolder {
static final ImmutableLevelHolder EMPTY = new ImmutableLevelHolder(ImmutableSet.<ClusterBlock>of(), ImmutableMap.<String, ImmutableSet<ClusterBlock>>of());
private final ImmutableSet<ClusterBlock> global;
private final ImmutableMap<String, ImmutableSet<ClusterBlock>> indices;
ImmutableLevelHolder(ImmutableSet<ClusterBlock> global, ImmutableMap<String, ImmutableSet<ClusterBlock>> indices) {
this.global = global;
this.indices = indices;
}
public ImmutableSet<ClusterBlock> global() {
return global;
}
public ImmutableMap<String, ImmutableSet<ClusterBlock>> indices() {
return indices;
}
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private Set<ClusterBlock> global = Sets.newHashSet();
private Map<String, Set<ClusterBlock>> indices = Maps.newHashMap();
public Builder() {
}
public Builder blocks(ClusterBlocks blocks) {
global.addAll(blocks.global());
for (Map.Entry<String, ImmutableSet<ClusterBlock>> entry : blocks.indices().entrySet()) {
if (!indices.containsKey(entry.getKey())) {
indices.put(entry.getKey(), Sets.<ClusterBlock>newHashSet());
}
indices.get(entry.getKey()).addAll(entry.getValue());
}
return this;
}
public Builder addBlocks(IndexMetaData indexMetaData) {
if (indexMetaData.state() == IndexMetaData.State.CLOSE) {
addIndexBlock(indexMetaData.index(), MetaDataIndexStateService.INDEX_CLOSED_BLOCK);
}
if (indexMetaData.settings().getAsBoolean(IndexMetaData.SETTING_READ_ONLY, false)) {
addIndexBlock(indexMetaData.index(), IndexMetaData.INDEX_READ_ONLY_BLOCK);
}
if (indexMetaData.settings().getAsBoolean(IndexMetaData.SETTING_BLOCKS_READ, false)) {
addIndexBlock(indexMetaData.index(), IndexMetaData.INDEX_READ_BLOCK);
}
if (indexMetaData.settings().getAsBoolean(IndexMetaData.SETTING_BLOCKS_WRITE, false)) {
addIndexBlock(indexMetaData.index(), IndexMetaData.INDEX_WRITE_BLOCK);
}
if (indexMetaData.settings().getAsBoolean(IndexMetaData.SETTING_BLOCKS_METADATA, false)) {
addIndexBlock(indexMetaData.index(), IndexMetaData.INDEX_METADATA_BLOCK);
}
return this;
}
public Builder addGlobalBlock(ClusterBlock block) {
global.add(block);
return this;
}
public Builder removeGlobalBlock(ClusterBlock block) {
global.remove(block);
return this;
}
public Builder addIndexBlock(String index, ClusterBlock block) {
if (!indices.containsKey(index)) {
indices.put(index, Sets.<ClusterBlock>newHashSet());
}
indices.get(index).add(block);
return this;
}
public Builder removeIndexBlocks(String index) {
if (!indices.containsKey(index)) {
return this;
}
indices.remove(index);
return this;
}
public Builder removeIndexBlock(String index, ClusterBlock block) {
if (!indices.containsKey(index)) {
return this;
}
indices.get(index).remove(block);
if (indices.get(index).isEmpty()) {
indices.remove(index);
}
return this;
}
public ClusterBlocks build() {
ImmutableMap.Builder<String, ImmutableSet<ClusterBlock>> indicesBuilder = ImmutableMap.builder();
for (Map.Entry<String, Set<ClusterBlock>> entry : indices.entrySet()) {
indicesBuilder.put(entry.getKey(), ImmutableSet.copyOf(entry.getValue()));
}
return new ClusterBlocks(ImmutableSet.copyOf(global), indicesBuilder.build());
}
public static ClusterBlocks readClusterBlocks(StreamInput in) throws IOException {
ImmutableSet<ClusterBlock> global = readBlockSet(in);
ImmutableMap.Builder<String, ImmutableSet<ClusterBlock>> indicesBuilder = ImmutableMap.builder();
int size = in.readVInt();
for (int j = 0; j < size; j++) {
indicesBuilder.put(in.readString().intern(), readBlockSet(in));
}
return new ClusterBlocks(global, indicesBuilder.build());
}
public static void writeClusterBlocks(ClusterBlocks blocks, StreamOutput out) throws IOException {
writeBlockSet(blocks.global(), out);
out.writeVInt(blocks.indices().size());
for (Map.Entry<String, ImmutableSet<ClusterBlock>> entry : blocks.indices().entrySet()) {
out.writeString(entry.getKey());
writeBlockSet(entry.getValue(), out);
}
}
private static void writeBlockSet(ImmutableSet<ClusterBlock> blocks, StreamOutput out) throws IOException {
out.writeVInt(blocks.size());
for (ClusterBlock block : blocks) {
block.writeTo(out);
}
}
private static ImmutableSet<ClusterBlock> readBlockSet(StreamInput in) throws IOException {
ImmutableSet.Builder<ClusterBlock> builder = ImmutableSet.builder();
int size = in.readVInt();
for (int i = 0; i < size; i++) {
builder.add(ClusterBlock.readClusterBlock(in));
}
return builder.build();
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_cluster_block_ClusterBlocks.java
|
938 |
class ShardTransportHandler extends BaseTransportRequestHandler<ShardRequest> {
@Override
public ShardRequest newInstance() {
return newShardRequest();
}
@Override
public String executor() {
return executor;
}
@Override
public void messageReceived(final ShardRequest request, final TransportChannel channel) throws Exception {
channel.sendResponse(shardOperation(request));
}
}
| 0true
|
src_main_java_org_elasticsearch_action_support_broadcast_TransportBroadcastOperationAction.java
|
1,641 |
@Component("blDefaultFieldMetadataProvider")
@Scope("prototype")
public class DefaultFieldMetadataProvider extends BasicFieldMetadataProvider {
private static final Log LOG = LogFactory.getLog(DefaultFieldMetadataProvider.class);
@Override
public FieldProviderResponse addMetadata(AddMetadataRequest addMetadataRequest, Map<String, FieldMetadata> metadata) {
Map<String, Object> idMetadata = addMetadataRequest.getDynamicEntityDao().getIdMetadata(addMetadataRequest.getTargetClass());
if (idMetadata != null) {
String idField = (String) idMetadata.get("name");
boolean processField;
//allow id fields without AdminPresentation annotation to pass through
processField = idField.equals(addMetadataRequest.getRequestedField().getName());
if (!processField) {
List<String> propertyNames = addMetadataRequest.getDynamicEntityDao().getPropertyNames(
addMetadataRequest.getTargetClass());
if (!CollectionUtils.isEmpty(propertyNames)) {
List<org.hibernate.type.Type> propertyTypes = addMetadataRequest.getDynamicEntityDao().getPropertyTypes(
addMetadataRequest.getTargetClass());
int index = propertyNames.indexOf(addMetadataRequest.getRequestedField().getName());
if (index >= 0) {
Type myType = propertyTypes.get(index);
//allow OneToOne, ManyToOne and Embeddable fields to pass through
processField = myType.isCollectionType() || myType.isAssociationType() ||
myType.isComponentType() || myType.isEntityType();
}
}
}
if (processField) {
FieldInfo info = buildFieldInfo(addMetadataRequest.getRequestedField());
BasicFieldMetadata basicMetadata = new BasicFieldMetadata();
basicMetadata.setName(addMetadataRequest.getRequestedField().getName());
basicMetadata.setExcluded(false);
metadata.put(addMetadataRequest.getRequestedField().getName(), basicMetadata);
setClassOwnership(addMetadataRequest.getParentClass(), addMetadataRequest.getTargetClass(), metadata, info);
return FieldProviderResponse.HANDLED;
}
}
return FieldProviderResponse.NOT_HANDLED;
}
public void overrideExclusionsFromXml(OverrideViaXmlRequest overrideViaXmlRequest, Map<String, FieldMetadata> metadata) {
//override any and all exclusions derived from xml
Map<String, FieldMetadataOverride> overrides = getTargetedOverride(overrideViaXmlRequest.getDynamicEntityDao(), overrideViaXmlRequest.getRequestedConfigKey(),
overrideViaXmlRequest.getRequestedCeilingEntity());
if (overrides != null) {
for (String propertyName : overrides.keySet()) {
final FieldMetadataOverride localMetadata = overrides.get(propertyName);
Boolean excluded = localMetadata.getExcluded();
for (String key : metadata.keySet()) {
String testKey = overrideViaXmlRequest.getPrefix() + key;
if ((testKey.startsWith(propertyName + ".") || testKey.equals(propertyName)) && excluded != null &&
excluded) {
FieldMetadata fieldMetadata = metadata.get(key);
if (LOG.isDebugEnabled()) {
LOG.debug("setExclusionsBasedOnParents:Excluding " + key +
"because an override annotation declared "+ testKey + " to be excluded");
}
fieldMetadata.setExcluded(true);
continue;
}
if ((testKey.startsWith(propertyName + ".") || testKey.equals(propertyName)) && excluded != null &&
!excluded) {
FieldMetadata fieldMetadata = metadata.get(key);
if (!overrideViaXmlRequest.getParentExcluded()) {
if (LOG.isDebugEnabled()) {
LOG.debug("setExclusionsBasedOnParents:Showing " + key +
"because an override annotation declared " + testKey + " to not be excluded");
}
fieldMetadata.setExcluded(false);
}
}
}
}
}
}
@Override
public FieldProviderResponse addMetadataFromMappingData(AddMetadataFromMappingDataRequest addMetadataFromMappingDataRequest,
FieldMetadata metadata) {
BasicFieldMetadata fieldMetadata = (BasicFieldMetadata) metadata;
fieldMetadata.setFieldType(addMetadataFromMappingDataRequest.getType());
fieldMetadata.setSecondaryType(addMetadataFromMappingDataRequest.getSecondaryType());
if (addMetadataFromMappingDataRequest.getRequestedEntityType() != null &&
!addMetadataFromMappingDataRequest.getRequestedEntityType().isCollectionType()) {
Column column = null;
for (Property property : addMetadataFromMappingDataRequest.getComponentProperties()) {
if (property.getName().equals(addMetadataFromMappingDataRequest.getPropertyName())) {
column = (Column) property.getColumnIterator().next();
break;
}
}
if (column != null) {
fieldMetadata.setLength(column.getLength());
fieldMetadata.setScale(column.getScale());
fieldMetadata.setPrecision(column.getPrecision());
fieldMetadata.setRequired(!column.isNullable());
fieldMetadata.setUnique(column.isUnique());
}
fieldMetadata.setForeignKeyCollection(false);
} else {
fieldMetadata.setForeignKeyCollection(true);
}
fieldMetadata.setMutable(true);
fieldMetadata.setMergedPropertyType(addMetadataFromMappingDataRequest.getMergedPropertyType());
if (SupportedFieldType.BROADLEAF_ENUMERATION.equals(addMetadataFromMappingDataRequest.getType())) {
try {
setupBroadleafEnumeration(fieldMetadata.getBroadleafEnumeration(), fieldMetadata,
addMetadataFromMappingDataRequest.getDynamicEntityDao());
} catch (Exception e) {
throw new RuntimeException(e);
}
}
return FieldProviderResponse.HANDLED;
}
@Override
public FieldProviderResponse addMetadataFromFieldType(AddMetadataFromFieldTypeRequest addMetadataFromFieldTypeRequest,
Map<String, FieldMetadata> metadata) {
if (addMetadataFromFieldTypeRequest.getPresentationAttribute() != null) {
if (
addMetadataFromFieldTypeRequest.getExplicitType() != null &&
addMetadataFromFieldTypeRequest.getExplicitType() != SupportedFieldType.UNKNOWN &&
addMetadataFromFieldTypeRequest.getExplicitType() != SupportedFieldType.BOOLEAN &&
addMetadataFromFieldTypeRequest.getExplicitType() != SupportedFieldType.INTEGER &&
addMetadataFromFieldTypeRequest.getExplicitType() != SupportedFieldType.DATE &&
addMetadataFromFieldTypeRequest.getExplicitType() != SupportedFieldType.STRING &&
addMetadataFromFieldTypeRequest.getExplicitType() != SupportedFieldType.MONEY &&
addMetadataFromFieldTypeRequest.getExplicitType() != SupportedFieldType.DECIMAL &&
addMetadataFromFieldTypeRequest.getExplicitType() != SupportedFieldType.FOREIGN_KEY &&
addMetadataFromFieldTypeRequest.getExplicitType() != SupportedFieldType.ADDITIONAL_FOREIGN_KEY
) {
metadata.put(addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao()
.getMetadata().getFieldMetadata(addMetadataFromFieldTypeRequest.getPrefix(),
addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getComponentProperties(),
addMetadataFromFieldTypeRequest.getExplicitType(), addMetadataFromFieldTypeRequest.getType(),
addMetadataFromFieldTypeRequest.getTargetClass(),
addMetadataFromFieldTypeRequest.getPresentationAttribute(), addMetadataFromFieldTypeRequest.
getMergedPropertyType(), addMetadataFromFieldTypeRequest.getDynamicEntityDao()));
} else if (
addMetadataFromFieldTypeRequest.getExplicitType() != null &&
addMetadataFromFieldTypeRequest.getExplicitType() == SupportedFieldType.BOOLEAN
||
addMetadataFromFieldTypeRequest.getReturnedClass().equals(Boolean.class) ||
addMetadataFromFieldTypeRequest.getReturnedClass().equals(Character.class)
) {
metadata.put(addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao()
.getMetadata().getFieldMetadata(addMetadataFromFieldTypeRequest.getPrefix(),
addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getComponentProperties(),
SupportedFieldType.BOOLEAN, addMetadataFromFieldTypeRequest.getType(),
addMetadataFromFieldTypeRequest.getTargetClass(),
addMetadataFromFieldTypeRequest.getPresentationAttribute(),
addMetadataFromFieldTypeRequest.getMergedPropertyType(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao()));
} else if (
addMetadataFromFieldTypeRequest.getExplicitType() != null &&
addMetadataFromFieldTypeRequest.getExplicitType() == SupportedFieldType.INTEGER
||
addMetadataFromFieldTypeRequest.getReturnedClass().equals(Byte.class) ||
addMetadataFromFieldTypeRequest.getReturnedClass().equals(Short.class) ||
addMetadataFromFieldTypeRequest.getReturnedClass().equals(Integer.class) ||
addMetadataFromFieldTypeRequest.getReturnedClass().equals(Long.class)
) {
if (addMetadataFromFieldTypeRequest.getRequestedPropertyName().equals(addMetadataFromFieldTypeRequest.getIdProperty())) {
metadata.put(addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao().getMetadata().getFieldMetadata(
addMetadataFromFieldTypeRequest.getPrefix(), addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getComponentProperties(),
SupportedFieldType.ID, SupportedFieldType.INTEGER, addMetadataFromFieldTypeRequest.getType(),
addMetadataFromFieldTypeRequest.getTargetClass(), addMetadataFromFieldTypeRequest.getPresentationAttribute(),
addMetadataFromFieldTypeRequest.getMergedPropertyType(), addMetadataFromFieldTypeRequest.getDynamicEntityDao()));
} else {
metadata.put(addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao().getMetadata().getFieldMetadata(addMetadataFromFieldTypeRequest
.getPrefix(), addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getComponentProperties(),
SupportedFieldType.INTEGER, addMetadataFromFieldTypeRequest.getType(),
addMetadataFromFieldTypeRequest.getTargetClass(), addMetadataFromFieldTypeRequest.
getPresentationAttribute(), addMetadataFromFieldTypeRequest.getMergedPropertyType(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao()));
}
} else if (
addMetadataFromFieldTypeRequest.getExplicitType() != null &&
addMetadataFromFieldTypeRequest.getExplicitType() == SupportedFieldType.DATE
||
addMetadataFromFieldTypeRequest.getReturnedClass().equals(Calendar.class) ||
addMetadataFromFieldTypeRequest.getReturnedClass().equals(Date.class) ||
addMetadataFromFieldTypeRequest.getReturnedClass().equals(Timestamp.class)
) {
metadata.put(addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao()
.getMetadata().getFieldMetadata(addMetadataFromFieldTypeRequest.getPrefix(),
addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getComponentProperties(),
SupportedFieldType.DATE, addMetadataFromFieldTypeRequest.getType(),
addMetadataFromFieldTypeRequest.getTargetClass(),
addMetadataFromFieldTypeRequest.getPresentationAttribute(),
addMetadataFromFieldTypeRequest.getMergedPropertyType(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao()
)
);
} else if (
addMetadataFromFieldTypeRequest.getExplicitType() != null &&
addMetadataFromFieldTypeRequest.getExplicitType() == SupportedFieldType.STRING
||
addMetadataFromFieldTypeRequest.getReturnedClass().equals(String.class)
) {
if (addMetadataFromFieldTypeRequest.getRequestedPropertyName().equals(addMetadataFromFieldTypeRequest.getIdProperty())) {
metadata.put(addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao().getMetadata().getFieldMetadata(
addMetadataFromFieldTypeRequest.getPrefix(),
addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getComponentProperties(),
SupportedFieldType.ID, SupportedFieldType.STRING,
addMetadataFromFieldTypeRequest.getType(),
addMetadataFromFieldTypeRequest.getTargetClass(),
addMetadataFromFieldTypeRequest.getPresentationAttribute(),
addMetadataFromFieldTypeRequest.getMergedPropertyType(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao()));
} else {
metadata.put(addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao().getMetadata().getFieldMetadata(
addMetadataFromFieldTypeRequest.getPrefix(),
addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getComponentProperties(),
SupportedFieldType.STRING, addMetadataFromFieldTypeRequest.getType(),
addMetadataFromFieldTypeRequest.getTargetClass(),
addMetadataFromFieldTypeRequest.getPresentationAttribute(),
addMetadataFromFieldTypeRequest.getMergedPropertyType(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao()));
}
} else if (
addMetadataFromFieldTypeRequest.getExplicitType() != null &&
addMetadataFromFieldTypeRequest.getExplicitType() == SupportedFieldType.MONEY
||
addMetadataFromFieldTypeRequest.getReturnedClass().equals(Money.class)
) {
metadata.put(addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao().getMetadata().getFieldMetadata(
addMetadataFromFieldTypeRequest.getPrefix(),
addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getComponentProperties(),
SupportedFieldType.MONEY, addMetadataFromFieldTypeRequest.getType(),
addMetadataFromFieldTypeRequest.getTargetClass(),
addMetadataFromFieldTypeRequest.getPresentationAttribute(),
addMetadataFromFieldTypeRequest.getMergedPropertyType(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao()
)
);
} else if (
addMetadataFromFieldTypeRequest.getExplicitType() != null &&
addMetadataFromFieldTypeRequest.getExplicitType() == SupportedFieldType.DECIMAL
||
addMetadataFromFieldTypeRequest.getReturnedClass().equals(Double.class) ||
addMetadataFromFieldTypeRequest.getReturnedClass().equals(BigDecimal.class)
) {
metadata.put(addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao().getMetadata().getFieldMetadata(
addMetadataFromFieldTypeRequest.getPrefix(),
addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getComponentProperties(),
SupportedFieldType.DECIMAL, addMetadataFromFieldTypeRequest.getType(),
addMetadataFromFieldTypeRequest.getTargetClass(),
addMetadataFromFieldTypeRequest.getPresentationAttribute(),
addMetadataFromFieldTypeRequest.getMergedPropertyType(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao()
)
);
} else if (
addMetadataFromFieldTypeRequest.getExplicitType() != null &&
addMetadataFromFieldTypeRequest.getExplicitType() == SupportedFieldType.FOREIGN_KEY
||
addMetadataFromFieldTypeRequest.getForeignField() != null &&
addMetadataFromFieldTypeRequest.isPropertyForeignKey()
) {
ClassMetadata foreignMetadata;
String foreignKeyClass;
String lookupDisplayProperty;
if (addMetadataFromFieldTypeRequest.getForeignField() == null) {
Class<?>[] entities = addMetadataFromFieldTypeRequest.getDynamicEntityDao().
getAllPolymorphicEntitiesFromCeiling(addMetadataFromFieldTypeRequest.getType().getReturnedClass());
foreignMetadata = addMetadataFromFieldTypeRequest.getDynamicEntityDao().getSessionFactory().getClassMetadata(entities
[entities.length - 1]);
foreignKeyClass = entities[entities.length - 1].getName();
lookupDisplayProperty = ((BasicFieldMetadata) addMetadataFromFieldTypeRequest.
getPresentationAttribute()).getLookupDisplayProperty();
if (StringUtils.isEmpty(lookupDisplayProperty) &&
AdminMainEntity.class.isAssignableFrom(entities[entities.length - 1])) {
lookupDisplayProperty = AdminMainEntity.MAIN_ENTITY_NAME_PROPERTY;
}
if (StringUtils.isEmpty(lookupDisplayProperty)) {
lookupDisplayProperty = "name";
}
} else {
try {
foreignMetadata = addMetadataFromFieldTypeRequest.getDynamicEntityDao().getSessionFactory().
getClassMetadata(Class.forName(addMetadataFromFieldTypeRequest.getForeignField()
.getForeignKeyClass()));
foreignKeyClass = addMetadataFromFieldTypeRequest.getForeignField().getForeignKeyClass();
lookupDisplayProperty = addMetadataFromFieldTypeRequest.getForeignField().getDisplayValueProperty();
if (StringUtils.isEmpty(lookupDisplayProperty) &&
AdminMainEntity.class.isAssignableFrom(Class.forName(foreignKeyClass))) {
lookupDisplayProperty = AdminMainEntity.MAIN_ENTITY_NAME_PROPERTY;
}
if (StringUtils.isEmpty(lookupDisplayProperty)) {
lookupDisplayProperty = "name";
}
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
Class<?> foreignResponseType = foreignMetadata.getIdentifierType().getReturnedClass();
if (foreignResponseType.equals(String.class)) {
metadata.put(addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao().getMetadata().
getFieldMetadata(addMetadataFromFieldTypeRequest.getPrefix(),
addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getComponentProperties(),
SupportedFieldType.FOREIGN_KEY, SupportedFieldType.STRING,
addMetadataFromFieldTypeRequest.getType(),
addMetadataFromFieldTypeRequest.getTargetClass(),
addMetadataFromFieldTypeRequest.getPresentationAttribute(),
addMetadataFromFieldTypeRequest.getMergedPropertyType(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao()
)
);
} else {
metadata.put(addMetadataFromFieldTypeRequest.getRequestedPropertyName(), addMetadataFromFieldTypeRequest
.getDynamicEntityDao().getMetadata().getFieldMetadata(addMetadataFromFieldTypeRequest.getPrefix(),
addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getComponentProperties(),
SupportedFieldType.FOREIGN_KEY, SupportedFieldType.INTEGER,
addMetadataFromFieldTypeRequest.getType(),
addMetadataFromFieldTypeRequest.getTargetClass(),
addMetadataFromFieldTypeRequest.getPresentationAttribute(),
addMetadataFromFieldTypeRequest.getMergedPropertyType(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao()
)
);
}
((BasicFieldMetadata) metadata.get(addMetadataFromFieldTypeRequest.getRequestedPropertyName())).
setForeignKeyProperty(foreignMetadata.getIdentifierPropertyName());
((BasicFieldMetadata) metadata.get(addMetadataFromFieldTypeRequest.getRequestedPropertyName()))
.setForeignKeyClass(foreignKeyClass);
((BasicFieldMetadata) metadata.get(addMetadataFromFieldTypeRequest.getRequestedPropertyName())).
setForeignKeyDisplayValueProperty(lookupDisplayProperty);
} else if (
addMetadataFromFieldTypeRequest.getExplicitType() != null &&
addMetadataFromFieldTypeRequest.getExplicitType() == SupportedFieldType.ADDITIONAL_FOREIGN_KEY
||
addMetadataFromFieldTypeRequest.getAdditionalForeignFields() != null &&
addMetadataFromFieldTypeRequest.getAdditionalForeignKeyIndexPosition() >= 0
) {
if (!addMetadataFromFieldTypeRequest.getType().isEntityType()) {
throw new IllegalArgumentException("Only ManyToOne and OneToOne fields can be marked as a " +
"SupportedFieldType of ADDITIONAL_FOREIGN_KEY");
}
ClassMetadata foreignMetadata;
String foreignKeyClass;
String lookupDisplayProperty;
if (addMetadataFromFieldTypeRequest.getAdditionalForeignKeyIndexPosition() < 0) {
Class<?>[] entities = addMetadataFromFieldTypeRequest.getDynamicEntityDao().getAllPolymorphicEntitiesFromCeiling
(addMetadataFromFieldTypeRequest.getType().getReturnedClass());
foreignMetadata = addMetadataFromFieldTypeRequest.getDynamicEntityDao().getSessionFactory().
getClassMetadata(entities[entities.length - 1]);
foreignKeyClass = entities[entities.length - 1].getName();
lookupDisplayProperty = ((BasicFieldMetadata) addMetadataFromFieldTypeRequest.getPresentationAttribute()).
getLookupDisplayProperty();
if (StringUtils.isEmpty(lookupDisplayProperty) &&
AdminMainEntity.class.isAssignableFrom(entities[entities.length - 1])) {
lookupDisplayProperty = AdminMainEntity.MAIN_ENTITY_NAME_PROPERTY;
}
if (StringUtils.isEmpty(lookupDisplayProperty)) {
lookupDisplayProperty = "name";
}
} else {
try {
foreignMetadata = addMetadataFromFieldTypeRequest.getDynamicEntityDao().getSessionFactory().
getClassMetadata(Class.forName(addMetadataFromFieldTypeRequest.getAdditionalForeignFields()
[addMetadataFromFieldTypeRequest.getAdditionalForeignKeyIndexPosition()].getForeignKeyClass()));
foreignKeyClass = addMetadataFromFieldTypeRequest.getAdditionalForeignFields()[
addMetadataFromFieldTypeRequest.getAdditionalForeignKeyIndexPosition()].getForeignKeyClass();
lookupDisplayProperty = addMetadataFromFieldTypeRequest.getAdditionalForeignFields()[
addMetadataFromFieldTypeRequest.getAdditionalForeignKeyIndexPosition()].getDisplayValueProperty();
if (StringUtils.isEmpty(lookupDisplayProperty) && AdminMainEntity.class.isAssignableFrom(Class.forName(foreignKeyClass))) {
lookupDisplayProperty = AdminMainEntity.MAIN_ENTITY_NAME_PROPERTY;
}
if (StringUtils.isEmpty(lookupDisplayProperty)) {
lookupDisplayProperty = "name";
}
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
Class<?> foreignResponseType = foreignMetadata.getIdentifierType().getReturnedClass();
if (foreignResponseType.equals(String.class)) {
metadata.put(addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao().getMetadata().getFieldMetadata(
addMetadataFromFieldTypeRequest.getPrefix(),
addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getComponentProperties(),
SupportedFieldType.ADDITIONAL_FOREIGN_KEY,
SupportedFieldType.STRING,
addMetadataFromFieldTypeRequest.getType(),
addMetadataFromFieldTypeRequest.getTargetClass(),
addMetadataFromFieldTypeRequest.getPresentationAttribute(),
addMetadataFromFieldTypeRequest.getMergedPropertyType(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao()
)
);
} else {
metadata.put(addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao().getMetadata().
getFieldMetadata(addMetadataFromFieldTypeRequest.getPrefix(),
addMetadataFromFieldTypeRequest.getRequestedPropertyName(),
addMetadataFromFieldTypeRequest.getComponentProperties(),
SupportedFieldType.ADDITIONAL_FOREIGN_KEY, SupportedFieldType.INTEGER,
addMetadataFromFieldTypeRequest.getType(),
addMetadataFromFieldTypeRequest.getTargetClass(),
addMetadataFromFieldTypeRequest.getPresentationAttribute(),
addMetadataFromFieldTypeRequest.getMergedPropertyType(),
addMetadataFromFieldTypeRequest.getDynamicEntityDao()
)
);
}
((BasicFieldMetadata) metadata.get(addMetadataFromFieldTypeRequest.getRequestedPropertyName())).
setForeignKeyProperty(foreignMetadata.getIdentifierPropertyName());
((BasicFieldMetadata) metadata.get(addMetadataFromFieldTypeRequest.getRequestedPropertyName())).
setForeignKeyClass(foreignKeyClass);
((BasicFieldMetadata) metadata.get(addMetadataFromFieldTypeRequest.getRequestedPropertyName())).
setForeignKeyDisplayValueProperty(lookupDisplayProperty);
}
//return type not supported - just skip this property
return FieldProviderResponse.HANDLED;
}
return FieldProviderResponse.NOT_HANDLED;
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_dao_provider_metadata_DefaultFieldMetadataProvider.java
|
2,163 |
public abstract class MatchDocIdSet extends DocIdSet implements Bits {
private final int maxDoc;
private final Bits acceptDocs;
protected MatchDocIdSet(int maxDoc, @Nullable Bits acceptDocs) {
this.maxDoc = maxDoc;
this.acceptDocs = acceptDocs;
}
/**
* Does this document match?
*/
protected abstract boolean matchDoc(int doc);
@Override
public DocIdSetIterator iterator() throws IOException {
if (acceptDocs == null) {
return new NoAcceptDocsIterator(maxDoc);
} else if (acceptDocs instanceof FixedBitSet) {
return new FixedBitSetIterator(((DocIdSet) acceptDocs).iterator());
} else {
return new BothIterator(maxDoc, acceptDocs);
}
}
@Override
public Bits bits() throws IOException {
return this;
}
@Override
public boolean get(int index) {
return matchDoc(index);
}
@Override
public int length() {
return maxDoc;
}
class NoAcceptDocsIterator extends DocIdSetIterator {
private final int maxDoc;
private int doc = -1;
NoAcceptDocsIterator(int maxDoc) {
this.maxDoc = maxDoc;
}
@Override
public int docID() {
return doc;
}
@Override
public int nextDoc() {
do {
doc++;
if (doc >= maxDoc) {
return doc = NO_MORE_DOCS;
}
} while (!matchDoc(doc));
return doc;
}
@Override
public int advance(int target) {
for (doc = target; doc < maxDoc; doc++) {
if (matchDoc(doc)) {
return doc;
}
}
return doc = NO_MORE_DOCS;
}
@Override
public long cost() {
return maxDoc;
}
}
class FixedBitSetIterator extends FilteredDocIdSetIterator {
FixedBitSetIterator(DocIdSetIterator innerIter) {
super(innerIter);
}
@Override
protected boolean match(int doc) {
return matchDoc(doc);
}
}
class BothIterator extends DocIdSetIterator {
private final int maxDoc;
private final Bits acceptDocs;
private int doc = -1;
BothIterator(int maxDoc, Bits acceptDocs) {
this.maxDoc = maxDoc;
this.acceptDocs = acceptDocs;
}
@Override
public int docID() {
return doc;
}
@Override
public int nextDoc() {
do {
doc++;
if (doc >= maxDoc) {
return doc = NO_MORE_DOCS;
}
} while (!(matchDoc(doc) && acceptDocs.get(doc)));
return doc;
}
@Override
public int advance(int target) {
for (doc = target; doc < maxDoc; doc++) {
if (matchDoc(doc) && acceptDocs.get(doc)) {
return doc;
}
}
return doc = NO_MORE_DOCS;
}
@Override
public long cost() {
return maxDoc;
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_common_lucene_docset_MatchDocIdSet.java
|
56 |
public final class DiffApplyingPrimitiveIntIterator extends AbstractPrimitiveIntIterator
{
private enum Phase
{
FILTERED_SOURCE
{
@Override
void computeNext( DiffApplyingPrimitiveIntIterator self )
{
self.computeNextFromSourceAndFilter();
}
},
ADDED_ELEMENTS
{
@Override
void computeNext( DiffApplyingPrimitiveIntIterator self )
{
self.computeNextFromAddedElements();
}
},
NO_ADDED_ELEMENTS
{
@Override
void computeNext( DiffApplyingPrimitiveIntIterator self )
{
self.endReached();
}
};
abstract void computeNext( DiffApplyingPrimitiveIntIterator self );
}
private final PrimitiveIntIterator source;
private final Iterator<?> addedElementsIterator;
private final Set<?> addedElements;
private final Set<?> removedElements;
Phase phase;
public DiffApplyingPrimitiveIntIterator( PrimitiveIntIterator source,
Set<?> addedElements, Set<?> removedElements )
{
this.source = source;
this.addedElements = addedElements;
this.addedElementsIterator = addedElements.iterator();
this.removedElements = removedElements;
phase = Phase.FILTERED_SOURCE;
computeNext();
}
@Override
protected void computeNext()
{
phase.computeNext( this );
}
private void computeNextFromSourceAndFilter()
{
for ( boolean hasNext = source.hasNext(); hasNext; hasNext = source.hasNext() )
{
int value = source.next();
next( value );
if ( !removedElements.contains( value ) && !addedElements.contains( value ) )
{
return;
}
}
transitionToAddedElements();
}
private void transitionToAddedElements()
{
phase = !addedElementsIterator.hasNext() ? Phase.NO_ADDED_ELEMENTS : Phase.ADDED_ELEMENTS;
computeNext();
}
private void computeNextFromAddedElements()
{
if ( addedElementsIterator.hasNext() )
{
next( (Integer) addedElementsIterator.next() );
}
else
{
endReached();
}
}
}
| 1no label
|
community_kernel_src_main_java_org_neo4j_kernel_impl_util_DiffApplyingPrimitiveIntIterator.java
|
25 |
static final class OrCompletion extends Completion {
final CompletableFuture<?> src;
final CompletableFuture<?> snd;
final CompletableFuture<Object> dst;
OrCompletion(CompletableFuture<?> src,
CompletableFuture<?> snd,
CompletableFuture<Object> dst) {
this.src = src; this.snd = snd; this.dst = dst;
}
public final void run() {
final CompletableFuture<?> a;
final CompletableFuture<?> b;
final CompletableFuture<Object> dst;
Object r, t; Throwable ex;
if ((dst = this.dst) != null &&
(((a = this.src) != null && (r = a.result) != null) ||
((b = this.snd) != null && (r = b.result) != null)) &&
compareAndSet(0, 1)) {
if (r instanceof AltResult) {
ex = ((AltResult)r).ex;
t = null;
}
else {
ex = null;
t = r;
}
dst.internalComplete(t, ex);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
| 0true
|
src_main_java_jsr166e_CompletableFuture.java
|
193 |
public class GeneralCLibrary implements CLibrary {
public static native Pointer memmove(Pointer dest, Pointer src, NativeLong len);
static {
Native.register(Platform.C_LIBRARY_NAME);
}
@Override
public void memoryMove(long src, long dest, long len) {
memmove(new Pointer(dest), new Pointer(src), new NativeLong(len));
}
}
| 0true
|
nativeos_src_main_java_com_orientechnologies_nio_GeneralCLibrary.java
|
99 |
public class Precision extends AbstractDecimal {
public static final int DECIMALS = 6;
public static final Precision MIN_VALUE = new Precision(minDoubleValue(DECIMALS));
public static final Precision MAX_VALUE = new Precision(maxDoubleValue(DECIMALS));
private Precision() {}
public Precision(double value) {
super(value, DECIMALS);
}
private Precision(long format) {
super(format, DECIMALS);
}
public static class PrecisionSerializer extends AbstractDecimalSerializer<Precision> {
public PrecisionSerializer() {
super(DECIMALS, Precision.class);
}
@Override
protected Precision construct(long format, int decimals) {
assert decimals==DECIMALS;
return new Precision(format);
}
}
}
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_Precision.java
|
65 |
public class RFC2104HMAC {
private RFC2104HMAC(){}
public static String calculateRFC2104HMAC(String data, String key)
throws SignatureException {
String result;
try {
SecretKeySpec signingKey = new SecretKeySpec(stringToBytes(key),
SIGNATURE_METHOD);
Mac mac = Mac.getInstance(SIGNATURE_METHOD);
mac.init(signingKey);
byte[] rawSignature = mac.doFinal(stringToBytes(data));
result = bytesToString(encode(rawSignature));
result = result.trim();
} catch (Exception e) {
throw new SignatureException("Failed to generate HMAC : "
+ e.getMessage());
}
return result;
}
}
| 0true
|
hazelcast-cloud_src_main_java_com_hazelcast_aws_security_RFC2104HMAC.java
|
607 |
public class BroadleafRequestInterceptor implements WebRequestInterceptor {
@Resource(name = "blRequestProcessor")
protected BroadleafRequestProcessor requestProcessor;
@Override
public void preHandle(WebRequest request) throws Exception {
requestProcessor.process(request);
}
@Override
public void postHandle(WebRequest request, ModelMap model) throws Exception {
//unimplemented
}
@Override
public void afterCompletion(WebRequest request, Exception ex) throws Exception {
requestProcessor.postProcess(request);
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_web_BroadleafRequestInterceptor.java
|
118 |
public class ClientPortableFactory implements PortableFactory {
@Override
public Portable create(int classId) {
switch (classId) {
case ClientPortableHook.GENERIC_ERROR:
return new GenericError();
case ClientPortableHook.AUTH:
return new AuthenticationRequest();
case ClientPortableHook.PRINCIPAL:
return new ClientPrincipal();
case ClientPortableHook.GET_DISTRIBUTED_OBJECT_INFO:
return new GetDistributedObjectsRequest();
case ClientPortableHook.DISTRIBUTED_OBJECT_INFO:
return new DistributedObjectInfo();
case ClientPortableHook.CREATE_PROXY:
return new ClientCreateRequest();
case ClientPortableHook.DESTROY_PROXY:
return new ClientDestroyRequest();
case ClientPortableHook.LISTENER:
return new DistributedObjectListenerRequest();
case ClientPortableHook.MEMBERSHIP_LISTENER:
return new AddMembershipListenerRequest();
case ClientPortableHook.CLIENT_PING:
return new ClientPingRequest();
case ClientPortableHook.GET_PARTITIONS:
return new GetPartitionsRequest();
case ClientPortableHook.REMOVE_LISTENER:
return new RemoveDistributedObjectListenerRequest();
default:
return null;
}
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_client_ClientPortableFactory.java
|
227 |
XPostingsHighlighter highlighter = new XPostingsHighlighter(10000) {
@Override
protected String[][] loadFieldValues(IndexSearcher searcher, String[] fields, int[] docids, int maxLength) throws IOException {
assertThat(fields.length, equalTo(1));
assertThat(docids.length, equalTo(1));
String[][] contents = new String[1][1];
contents[0][0] = text;
return contents;
}
@Override
protected BreakIterator getBreakIterator(String field) {
return new WholeBreakIterator();
}
};
| 0true
|
src_test_java_org_apache_lucene_search_postingshighlight_XPostingsHighlighterTests.java
|
190 |
public class ClientNetworkConfig {
/**
* List of the initial set of addresses.
* Client will use this list to find a running Member, connect to it.
*/
private final List<String> addressList = new ArrayList<String>(10);
/**
* If true, client will route the key based operations to owner of the key at the best effort.
* Note that it uses a cached version of {@link com.hazelcast.core.PartitionService#getPartitions()} and doesn't
* guarantee that the operation will always be executed on the owner. The cached table is updated every 10 seconds.
*/
private boolean smartRouting = true;
/**
* If true, client will redo the operations that were executing on the server and client lost the connection.
* This can be because of network, or simply because the member died. However it is not clear whether the
* application is performed or not. For idempotent operations this is harmless, but for non idempotent ones
* retrying can cause to undesirable effects. Note that the redo can perform on any member.
* <p/>
* If false, the operation will throw {@link RuntimeException} that is wrapping {@link java.io.IOException}.
* TODO clear what is the exception here
*/
private boolean redoOperation;
/**
* Client will be sending heartbeat messages to members and this is the timeout. If there is no any message
* passing between client and member within the {@link ClientNetworkConfig#connectionTimeout} milliseconds the connection
* will be closed.
*/
private int connectionTimeout = 60000;
//TODO heartbeat
/**
* While client is trying to connect initially to one of the members in the {@link ClientNetworkConfig#addressList},
* all might be not available. Instead of giving up, throwing Exception and stopping client, it will
* attempt to retry as much as {@link ClientNetworkConfig#connectionAttemptLimit} times.
*/
private int connectionAttemptLimit = 2;
/**
* Period for the next attempt to find a member to connect. (see {@link ClientNetworkConfig#connectionAttemptLimit}).
*/
private int connectionAttemptPeriod = 3000;
/**
* Will be called with the Socket, each time client creates a connection to any Member.
*/
private SocketInterceptorConfig socketInterceptorConfig;
/**
* Options for creating socket
*/
private SocketOptions socketOptions = new SocketOptions();
/**
* Enabling ssl for client
*/
private SSLConfig sslConfig;
public boolean isSmartRouting() {
return smartRouting;
}
public ClientNetworkConfig setSmartRouting(boolean smartRouting) {
this.smartRouting = smartRouting;
return this;
}
public SocketInterceptorConfig getSocketInterceptorConfig() {
return socketInterceptorConfig;
}
public ClientNetworkConfig setSocketInterceptorConfig(SocketInterceptorConfig socketInterceptorConfig) {
this.socketInterceptorConfig = socketInterceptorConfig;
return this;
}
public int getConnectionAttemptPeriod() {
return connectionAttemptPeriod;
}
public ClientNetworkConfig setConnectionAttemptPeriod(int connectionAttemptPeriod) {
this.connectionAttemptPeriod = connectionAttemptPeriod;
return this;
}
public int getConnectionAttemptLimit() {
return connectionAttemptLimit;
}
public ClientNetworkConfig setConnectionAttemptLimit(int connectionAttemptLimit) {
this.connectionAttemptLimit = connectionAttemptLimit;
return this;
}
public int getConnectionTimeout() {
return connectionTimeout;
}
public ClientNetworkConfig setConnectionTimeout(int connectionTimeout) {
this.connectionTimeout = connectionTimeout;
return this;
}
public ClientNetworkConfig addAddress(String... addresses) {
Collections.addAll(addressList, addresses);
return this;
}
// required for spring module
public ClientNetworkConfig setAddresses(List<String> addresses) {
addressList.clear();
addressList.addAll(addresses);
return this;
}
public List<String> getAddresses() {
if (addressList.size() == 0) {
addAddress("localhost");
}
return addressList;
}
public boolean isRedoOperation() {
return redoOperation;
}
public ClientNetworkConfig setRedoOperation(boolean redoOperation) {
this.redoOperation = redoOperation;
return this;
}
public SocketOptions getSocketOptions() {
return socketOptions;
}
public ClientNetworkConfig setSocketOptions(SocketOptions socketOptions) {
this.socketOptions = socketOptions;
return this;
}
/**
* Returns the current {@link SSLConfig}. It is possible that null is returned if no SSLConfig has been
* set.
*
* @return the SSLConfig.
* @see #setSSLConfig(SSLConfig)
*/
public SSLConfig getSSLConfig() {
return sslConfig;
}
/**
* Sets the {@link SSLConfig}. null value indicates that no SSLConfig should be used.
*
* @param sslConfig the SSLConfig.
* @return the updated ClientNetworkConfig.
* @see #getSSLConfig()
*/
public ClientNetworkConfig setSSLConfig(SSLConfig sslConfig) {
this.sslConfig = sslConfig;
return this;
}
}
| 1no label
|
hazelcast-client_src_main_java_com_hazelcast_client_config_ClientNetworkConfig.java
|
347 |
public class ConfigurationOnlyState {
private static final ThreadLocal<ConfigurationOnlyState> CONFIGURATIONONLYSTATE = ThreadLocalManager.createThreadLocal(ConfigurationOnlyState.class);
public static ConfigurationOnlyState getState() {
return CONFIGURATIONONLYSTATE.get();
}
public static void setState(ConfigurationOnlyState state) {
CONFIGURATIONONLYSTATE.set(state);
}
protected boolean isConfigurationOnly;
public boolean isConfigurationOnly() {
return isConfigurationOnly;
}
public void setConfigurationOnly(boolean configurationOnly) {
isConfigurationOnly = configurationOnly;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_extensibility_jpa_ConfigurationOnlyState.java
|
9 |
static final class AsyncAccept<T> extends Async {
final T arg;
final Action<? super T> fn;
final CompletableFuture<Void> dst;
AsyncAccept(T arg, Action<? super T> fn,
CompletableFuture<Void> dst) {
this.arg = arg; this.fn = fn; this.dst = dst;
}
public final boolean exec() {
CompletableFuture<Void> d; Throwable ex;
if ((d = this.dst) != null && d.result == null) {
try {
fn.accept(arg);
ex = null;
} catch (Throwable rex) {
ex = rex;
}
d.internalComplete(null, ex);
}
return true;
}
private static final long serialVersionUID = 5232453952276885070L;
}
| 0true
|
src_main_java_jsr166e_CompletableFuture.java
|
397 |
context.getExecutionService().execute(new Runnable() {
public void run() {
try {
TreeSet<CacheRecord<K>> records = new TreeSet<CacheRecord<K>>(comparator);
records.addAll(cache.values());
int evictSize = cache.size() * EVICTION_PERCENTAGE / 100;
int i = 0;
for (CacheRecord<K> record : records) {
cache.remove(record.key);
if (++i > evictSize) {
break;
}
}
} finally {
canEvict.set(true);
}
}
});
| 0true
|
hazelcast-client_src_main_java_com_hazelcast_client_nearcache_ClientNearCache.java
|
214 |
public class HydrationDescriptor {
private Map<String, HydrationItemDescriptor> hydratedMutators;
private Method[] idMutators;
private String cacheRegion;
public Map<String, HydrationItemDescriptor> getHydratedMutators() {
return hydratedMutators;
}
public Method[] getIdMutators() {
return idMutators;
}
public String getCacheRegion() {
return cacheRegion;
}
public void setHydratedMutators(Map<String, HydrationItemDescriptor> hydratedMutators) {
this.hydratedMutators = hydratedMutators;
}
public void setIdMutators(Method[] idMutators) {
this.idMutators = idMutators;
}
public void setCacheRegion(String cacheRegion) {
this.cacheRegion = cacheRegion;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_cache_engine_HydrationDescriptor.java
|
566 |
trackedList.addChangeListener(new OMultiValueChangeListener<Integer, String>() {
public void onAfterRecordChanged(final OMultiValueChangeEvent<Integer, String> event) {
firedEvents.add(event);
}
});
| 0true
|
core_src_test_java_com_orientechnologies_orient_core_index_OCompositeIndexDefinitionTest.java
|
117 |
public interface OLAPJob<S> {
public S process(TitanVertex vertex);
}
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_olap_OLAPJob.java
|
128 |
public interface RelationTypeIndex extends TitanIndex {
/**
* Returns the {@link RelationType} on which this index is installed.
* @return
*/
public RelationType getType();
/**
* Returns the sort order of this index. Index entries are sorted in this order and queries
* which use this sort order will be faster.
*
* @return
*/
public Order getSortOrder();
/**
* Returns the (composite) sort key for this index. The composite sort key is an ordered list of {@link RelationType}s
*
* @return
*/
public RelationType[] getSortKey();
/**
* Returns the direction on which this index is installed. An index may cover only one or both directions.
* @return
*/
public Direction getDirection();
/**
* Returns the status of this index
* @return
*/
public SchemaStatus getIndexStatus();
}
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_schema_RelationTypeIndex.java
|
580 |
public class FulfillmentPriceException extends Exception {
private static final long serialVersionUID = 1L;
protected FulfillmentPriceExceptionResponse fulfillmentPriceExceptionResponse;
public FulfillmentPriceException() {
super();
}
public FulfillmentPriceException(String message, Throwable cause) {
super(message, cause);
}
public FulfillmentPriceException(String message) {
super(message);
}
public FulfillmentPriceException(Throwable cause) {
super(cause);
}
public FulfillmentPriceExceptionResponse getFulfillmentPriceExceptionResponse() {
return fulfillmentPriceExceptionResponse;
}
public void setFulfillmentPriceExceptionResponse(FulfillmentPriceExceptionResponse fulfillmentPriceExceptionResponse) {
this.fulfillmentPriceExceptionResponse = fulfillmentPriceExceptionResponse;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_vendor_service_exception_FulfillmentPriceException.java
|
1,468 |
static class AttributesRoutings {
public final ImmutableList<ShardRouting> withSameAttribute;
public final ImmutableList<ShardRouting> withoutSameAttribute;
public final int totalSize;
AttributesRoutings(ImmutableList<ShardRouting> withSameAttribute, ImmutableList<ShardRouting> withoutSameAttribute) {
this.withSameAttribute = withSameAttribute;
this.withoutSameAttribute = withoutSameAttribute;
this.totalSize = withoutSameAttribute.size() + withSameAttribute.size();
}
}
| 0true
|
src_main_java_org_elasticsearch_cluster_routing_IndexShardRoutingTable.java
|
342 |
public class NodesShutdownAction extends ClusterAction<NodesShutdownRequest, NodesShutdownResponse, NodesShutdownRequestBuilder> {
public static final NodesShutdownAction INSTANCE = new NodesShutdownAction();
public static final String NAME = "cluster/nodes/shutdown";
private NodesShutdownAction() {
super(NAME);
}
@Override
public NodesShutdownResponse newResponse() {
return new NodesShutdownResponse();
}
@Override
public NodesShutdownRequestBuilder newRequestBuilder(ClusterAdminClient client) {
return new NodesShutdownRequestBuilder(client);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_node_shutdown_NodesShutdownAction.java
|
1,621 |
public class OTxTask extends OAbstractReplicatedTask {
private static final long serialVersionUID = 1L;
private List<OAbstractRecordReplicatedTask> tasks = new ArrayList<OAbstractRecordReplicatedTask>();
public OTxTask() {
}
public void add(final OAbstractRecordReplicatedTask iTask) {
tasks.add(iTask);
}
@Override
public Object execute(final OServer iServer, ODistributedServerManager iManager, final ODatabaseDocumentTx database)
throws Exception {
ODistributedServerLog.debug(this, iManager.getLocalNodeName(), getNodeSource(), DIRECTION.IN,
"committing transaction against db=%s...", database.getName());
ODatabaseRecordThreadLocal.INSTANCE.set(database);
try {
database.begin();
for (OAbstractRecordReplicatedTask task : tasks) {
task.execute(iServer, iManager, database);
}
database.commit();
} catch (ONeedRetryException e) {
return Boolean.FALSE;
} catch (OTransactionException e) {
return Boolean.FALSE;
} catch (Exception e) {
OLogManager.instance().error(this, "Error on distirbuted transaction commit", e);
return Boolean.FALSE;
}
return Boolean.TRUE;
}
@Override
public QUORUM_TYPE getQuorumType() {
return QUORUM_TYPE.WRITE;
}
@Override
public OFixTxTask getFixTask(final ODistributedRequest iRequest, final ODistributedResponse iBadResponse,
final ODistributedResponse iGoodResponse) {
final OFixTxTask fixTask = new OFixTxTask();
for (OAbstractRecordReplicatedTask t : tasks) {
final ORecordId rid = t.getRid();
final ORecordInternal<?> rec = rid.getRecord();
if (rec == null)
fixTask.add(new ODeleteRecordTask(rid, null));
else {
final ORecordVersion v = rec.getRecordVersion();
v.setRollbackMode();
fixTask.add(new OUpdateRecordTask(rid, rec.toStream(), v, rec.getRecordType()));
}
}
return fixTask;
}
@Override
public void writeExternal(final ObjectOutput out) throws IOException {
out.writeInt(tasks.size());
for (OAbstractRecordReplicatedTask task : tasks)
out.writeObject(task);
}
@Override
public void readExternal(final ObjectInput in) throws IOException, ClassNotFoundException {
final int size = in.readInt();
for (int i = 0; i < size; ++i)
tasks.add((OAbstractRecordReplicatedTask) in.readObject());
}
@Override
public String getName() {
return "tx";
}
@Override
public String getPayload() {
return null;
}
}
| 1no label
|
server_src_main_java_com_orientechnologies_orient_server_distributed_task_OTxTask.java
|
1,047 |
public class MultiTermVectorsShardRequest extends SingleShardOperationRequest<MultiTermVectorsShardRequest> {
private int shardId;
private String preference;
IntArrayList locations;
List<TermVectorRequest> requests;
MultiTermVectorsShardRequest() {
}
MultiTermVectorsShardRequest(String index, int shardId) {
super(index);
this.shardId = shardId;
locations = new IntArrayList();
requests = new ArrayList<TermVectorRequest>();
}
public int shardId() {
return this.shardId;
}
/**
* Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
* <tt>_local</tt> to prefer local shards, <tt>_primary</tt> to execute only on primary shards, or
* a custom value, which guarantees that the same order will be used across different requests.
*/
public MultiTermVectorsShardRequest preference(String preference) {
this.preference = preference;
return this;
}
public String preference() {
return this.preference;
}
public void add(int location, TermVectorRequest request) {
this.locations.add(location);
this.requests.add(request);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
locations = new IntArrayList(size);
requests = new ArrayList<TermVectorRequest>(size);
for (int i = 0; i < size; i++) {
locations.add(in.readVInt());
requests.add(TermVectorRequest.readTermVectorRequest(in));
}
preference = in.readOptionalString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(locations.size());
for (int i = 0; i < locations.size(); i++) {
out.writeVInt(locations.get(i));
requests.get(i).writeTo(out);
}
out.writeOptionalString(preference);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_termvector_MultiTermVectorsShardRequest.java
|
1,726 |
@edu.umd.cs.findbugs.annotations.SuppressWarnings("SE_BAD_FIELD")
public class DataAwareEntryEvent extends EntryEvent {
private final static long serialVersionUID = 1;
protected final Data dataKey;
protected final Data dataNewValue;
protected final Data dataOldValue;
private final transient SerializationService serializationService;
public DataAwareEntryEvent(Member from, int eventType,
String source, Data dataKey,
Data dataNewValue, Data dataOldValue,
SerializationService serializationService) {
super(source, from, eventType, null, null);
this.dataKey = dataKey;
this.dataNewValue = dataNewValue;
this.dataOldValue = dataOldValue;
this.serializationService = serializationService;
}
public Data getKeyData() {
return dataKey;
}
public Data getNewValueData() {
return dataNewValue;
}
public Data getOldValueData() {
return dataOldValue;
}
public Object getKey() {
if (key == null && dataKey != null) {
key = serializationService.toObject(dataKey);
}
return key;
}
public Object getOldValue() {
if (oldValue == null && dataOldValue != null) {
oldValue = serializationService.toObject(dataOldValue);
}
return oldValue;
}
public Object getValue() {
if (value == null && dataNewValue != null) {
value = serializationService.toObject(dataNewValue);
}
return value;
}
public String getLongName() {
return name;
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_map_DataAwareEntryEvent.java
|
18 |
static class ByteEntry implements Comparable<ByteEntry> {
final ByteBuffer key;
final ByteBuffer value;
ByteEntry(ByteBuffer key, ByteBuffer value) {
this.value = value;
this.key = key;
}
@Override
public int compareTo(ByteEntry byteEntry) {
return key.compareTo(byteEntry.key);
}
}
| 0true
|
titan-test_src_main_java_com_thinkaurelius_titan_TestByteBuffer.java
|
5 |
@Component("blChildCategoriesCustomPersistenceHandler")
public class ChildCategoriesCustomPersistenceHandler extends CustomPersistenceHandlerAdapter {
@Override
public Boolean canHandleAdd(PersistencePackage persistencePackage) {
return (!ArrayUtils.isEmpty(persistencePackage.getCustomCriteria()) && persistencePackage.getCustomCriteria()[0].equals("blcAllParentCategories"));
}
@Override
public Entity add(PersistencePackage persistencePackage, DynamicEntityDao dynamicEntityDao, RecordHelper helper) throws ServiceException {
AdornedTargetList adornedTargetList = (AdornedTargetList) persistencePackage.getPersistencePerspective().getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST);
String targetPath = adornedTargetList.getTargetObjectPath() + "." + adornedTargetList.getTargetIdProperty();
String linkedPath = adornedTargetList.getLinkedObjectPath() + "." + adornedTargetList.getLinkedIdProperty();
Long parentId = Long.parseLong(persistencePackage.getEntity().findProperty(linkedPath).getValue());
Long childId = Long.parseLong(persistencePackage.getEntity().findProperty(targetPath).getValue());
Category parent = (Category) dynamicEntityDao.retrieve(CategoryImpl.class, parentId);
Category child = (Category) dynamicEntityDao.retrieve(CategoryImpl.class, childId);
CategoryXref categoryXref = new CategoryXrefImpl();
categoryXref.setSubCategory(child);
categoryXref.setCategory(parent);
if (parent.getAllChildCategoryXrefs().contains(categoryXref)) {
throw new ServiceException("Add unsuccessful. Cannot add a duplicate child category.");
}
checkParents(child, parent);
return helper.getCompatibleModule(OperationType.ADORNEDTARGETLIST).add(persistencePackage);
}
protected void checkParents(Category child, Category parent) throws ServiceException {
if (child.getId().equals(parent.getId())) {
throw new ServiceException("Add unsuccessful. Cannot add a category to itself.");
}
for (CategoryXref category : parent.getAllParentCategoryXrefs()) {
if (!CollectionUtils.isEmpty(category.getCategory().getAllParentCategoryXrefs())) {
checkParents(child, category.getCategory());
}
}
}
}
| 0true
|
admin_broadleaf-admin-module_src_main_java_org_broadleafcommerce_admin_server_service_handler_ChildCategoriesCustomPersistenceHandler.java
|
228 |
assertTrueEventually(new AssertTask() {
public void run() throws Exception {
assertEquals(1, map.size());
}
});
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_executor_ClientExecutorServiceExecuteTest.java
|
208 |
public interface HydratedAnnotationManager {
public HydrationDescriptor getHydrationDescriptor(Object entity);
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_cache_engine_HydratedAnnotationManager.java
|
240 |
public class OCacheLevelOneLocatorImpl implements OCacheLevelOneLocator {
@Override
public OCache threadLocalCache() {
return new OUnboundedWeakCache();
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_cache_OCacheLevelOneLocatorImpl.java
|
88 |
{
@Override
public void run()
{
assertEquals( 1, count( node.getRelationships() ) );
}
};
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_ReadTransactionLogWritingTest.java
|
470 |
public class ExternalModuleNode implements ModuleNode {
private RepositoryNode repositoryNode;
private List<IPackageFragmentRoot> binaryArchives = new ArrayList<>();
protected String moduleSignature;
public ExternalModuleNode(RepositoryNode repositoryNode, String moduleSignature) {
this.moduleSignature = moduleSignature;
this.repositoryNode = repositoryNode;
}
public List<IPackageFragmentRoot> getBinaryArchives() {
return binaryArchives;
}
public CeylonArchiveFileStore getSourceArchive() {
JDTModule module = getModule();
if (module.isCeylonArchive()) {
String sourcePathString = module.getSourceArchivePath();
if (sourcePathString != null) {
IFolder sourceArchive = getExternalSourceArchiveManager().getSourceArchive(Path.fromOSString(sourcePathString));
if (sourceArchive != null && sourceArchive.exists()) {
return ((CeylonArchiveFileStore) ((Resource)sourceArchive).getStore());
}
}
}
return null;
}
public RepositoryNode getRepositoryNode() {
return repositoryNode;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime
* result
+ ((moduleSignature == null) ? 0 : moduleSignature
.hashCode());
result = prime
* result
+ ((repositoryNode == null) ? 0 : repositoryNode.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
ExternalModuleNode other = (ExternalModuleNode) obj;
if (moduleSignature == null) {
if (other.moduleSignature != null)
return false;
} else if (!moduleSignature.equals(other.moduleSignature))
return false;
if (repositoryNode == null) {
if (other.repositoryNode != null)
return false;
} else if (!repositoryNode.equals(other.repositoryNode))
return false;
return true;
}
@Override
public JDTModule getModule() {
for (JDTModule module : CeylonBuilder.getProjectExternalModules(repositoryNode.project)) {
if (module.getSignature().equals(moduleSignature)) {
return module;
}
}
return null;
}
}
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_navigator_ExternalModuleNode.java
|
1,139 |
FACET {
@Override
SearchRequestBuilder addTermsAgg(SearchRequestBuilder builder, String name, String field, String executionHint) {
return builder.addFacet(termsFacet(name).field(field).executionHint(executionHint));
}
@Override
SearchRequestBuilder addTermsStatsAgg(SearchRequestBuilder builder, String name, String keyField, String valueField) {
return builder.addFacet(termsStatsFacet(name).keyField(keyField).valueField(valueField));
}
},
| 0true
|
src_test_java_org_elasticsearch_benchmark_search_aggregations_TermsAggregationSearchBenchmark.java
|
946 |
public abstract class MasterNodeReadOperationRequest<T extends MasterNodeReadOperationRequest> extends MasterNodeOperationRequest<T> {
protected boolean local = false;
@SuppressWarnings("unchecked")
public final T local(boolean local) {
this.local = local;
return (T) this;
}
public final boolean local() {
return local;
}
/**
* Reads the local flag
*/
protected void readLocal(StreamInput in) throws IOException {
readLocal(in, null);
}
/**
* Reads the local flag if on or after the specified min version or if the version is <code>null</code>.
*/
protected void readLocal(StreamInput in, Version minVersion) throws IOException {
if (minVersion == null || in.getVersion().onOrAfter(minVersion)) {
local = in.readBoolean();
}
}
/**
* writes the local flag
*/
protected void writeLocal(StreamOutput out) throws IOException {
writeLocal(out, null);
}
/**
* writes the local flag if on or after the specified min version or if the version is <code>null</code>.
*/
protected void writeLocal(StreamOutput out, Version minVersion) throws IOException {
if (minVersion == null || out.getVersion().onOrAfter(minVersion)) {
out.writeBoolean(local);
}
}
}
| 0true
|
src_main_java_org_elasticsearch_action_support_master_MasterNodeReadOperationRequest.java
|
3,369 |
static class Empty extends PackedArrayAtomicFieldData {
Empty(int numDocs) {
super(numDocs);
}
@Override
public LongValues getLongValues() {
return LongValues.EMPTY;
}
@Override
public DoubleValues getDoubleValues() {
return DoubleValues.EMPTY;
}
@Override
public boolean isMultiValued() {
return false;
}
@Override
public boolean isValuesOrdered() {
return false;
}
@Override
public long getMemorySizeInBytes() {
return 0;
}
@Override
public long getNumberUniqueValues() {
return 0;
}
@Override
public BytesValues getBytesValues(boolean needsHashes) {
return BytesValues.EMPTY;
}
@Override
public ScriptDocValues getScriptValues() {
return ScriptDocValues.EMPTY;
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_fielddata_plain_PackedArrayAtomicFieldData.java
|
1,308 |
public class DiskUsageTests extends ElasticsearchTestCase {
@Test
public void diskUsageCalcTest() {
DiskUsage du = new DiskUsage("node1", 100, 40);
assertThat(du.getFreeDiskAsPercentage(), equalTo(40.0));
assertThat(du.getFreeBytes(), equalTo(40L));
assertThat(du.getUsedBytes(), equalTo(60L));
assertThat(du.getTotalBytes(), equalTo(100L));
}
@Test
public void randomDiskUsageTest() {
int iters = atLeast(1000);
for (int i = 1; i < iters; i++) {
long total = between(Integer.MIN_VALUE, Integer.MAX_VALUE);
long free = between(Integer.MIN_VALUE, Integer.MAX_VALUE);
if (free > total || total <= 0) {
try {
new DiskUsage("random", total, free);
fail("should never reach this");
} catch (IllegalStateException e) {
}
} else {
DiskUsage du = new DiskUsage("random", total, free);
assertThat(du.getFreeBytes(), equalTo(free));
assertThat(du.getTotalBytes(), equalTo(total));
assertThat(du.getUsedBytes(), equalTo(total - free));
assertThat(du.getFreeDiskAsPercentage(), equalTo(100.0 * ((double)free / total)));
}
}
}
}
| 0true
|
src_test_java_org_elasticsearch_cluster_DiskUsageTests.java
|
1,308 |
public class JDTModelLoader extends AbstractModelLoader {
private IJavaProject javaProject;
private CompilerOptions compilerOptions;
private ProblemReporter problemReporter;
private LookupEnvironment lookupEnvironment;
private MissingTypeBinding missingTypeBinding;
private final Object lookupEnvironmentMutex = new Object();
private boolean mustResetLookupEnvironment = false;
private Set<Module> modulesInClassPath = new HashSet<Module>();
public JDTModelLoader(final JDTModuleManager moduleManager, final Modules modules){
this.moduleManager = moduleManager;
this.modules = modules;
javaProject = moduleManager.getJavaProject();
if (javaProject != null) {
compilerOptions = new CompilerOptions(javaProject.getOptions(true));
compilerOptions.ignoreMethodBodies = true;
compilerOptions.storeAnnotations = true;
problemReporter = new ProblemReporter(
DefaultErrorHandlingPolicies.proceedWithAllProblems(),
compilerOptions,
new DefaultProblemFactory());
}
this.timer = new Timer(false);
internalCreate();
if (javaProject != null) {
modelLoaders.put(javaProject.getProject(), new WeakReference<JDTModelLoader>(this));
}
}
public JDTModuleManager getModuleManager() {
return (JDTModuleManager) moduleManager;
}
private void internalCreate() {
this.typeFactory = new GlobalTypeFactory();
this.typeParser = new TypeParser(this);
this.timer = new Timer(false);
createLookupEnvironment();
}
public void createLookupEnvironment() {
if (javaProject == null) {
return;
}
try {
lookupEnvironment = new LookupEnvironment(new ITypeRequestor() {
private Parser basicParser;
@Override
public void accept(ISourceType[] sourceTypes, PackageBinding packageBinding,
AccessRestriction accessRestriction) {
// case of SearchableEnvironment of an IJavaProject is used
ISourceType sourceType = sourceTypes[0];
while (sourceType.getEnclosingType() != null)
sourceType = sourceType.getEnclosingType();
if (sourceType instanceof SourceTypeElementInfo) {
// get source
SourceTypeElementInfo elementInfo = (SourceTypeElementInfo) sourceType;
IType type = elementInfo.getHandle();
ICompilationUnit sourceUnit = (ICompilationUnit) type.getCompilationUnit();
accept(sourceUnit, accessRestriction);
} else {
CompilationResult result = new CompilationResult(sourceType.getFileName(), 1, 1, 0);
CompilationUnitDeclaration unit =
SourceTypeConverter.buildCompilationUnit(
sourceTypes,
SourceTypeConverter.FIELD_AND_METHOD // need field and methods
| SourceTypeConverter.MEMBER_TYPE, // need member types
// no need for field initialization
lookupEnvironment.problemReporter,
result);
lookupEnvironment.buildTypeBindings(unit, accessRestriction);
lookupEnvironment.completeTypeBindings(unit, true);
}
}
@Override
public void accept(IBinaryType binaryType, PackageBinding packageBinding,
AccessRestriction accessRestriction) {
BinaryTypeBinding btb = lookupEnvironment.createBinaryTypeFrom(binaryType, packageBinding, accessRestriction);
if (btb.isNestedType() && !btb.isStatic()) {
for (MethodBinding method : btb.methods()) {
if (method.isConstructor() && method.parameters.length > 0) {
char[] signature = method.signature();
for (IBinaryMethod methodInfo : binaryType.getMethods()) {
if (methodInfo.isConstructor()) {
char[] methodInfoSignature = methodInfo.getMethodDescriptor();
if (new String(signature).equals(new String(methodInfoSignature))) {
IBinaryAnnotation[] binaryAnnotation = methodInfo.getParameterAnnotations(0);
if (binaryAnnotation == null) {
if (methodInfo.getAnnotatedParametersCount() == method.parameters.length + 1) {
AnnotationBinding[][] newParameterAnnotations = new AnnotationBinding[method.parameters.length][];
for (int i=0; i<method.parameters.length; i++) {
IBinaryAnnotation[] goodAnnotations = null;
try {
goodAnnotations = methodInfo.getParameterAnnotations(i + 1);
}
catch(IndexOutOfBoundsException e) {
break;
}
if (goodAnnotations != null) {
AnnotationBinding[] parameterAnnotations = BinaryTypeBinding.createAnnotations(goodAnnotations, lookupEnvironment, new char[][][] {});
newParameterAnnotations[i] = parameterAnnotations;
}
}
method.setParameterAnnotations(newParameterAnnotations);
}
}
}
}
}
}
}
}
}
@Override
public void accept(ICompilationUnit sourceUnit,
AccessRestriction accessRestriction) {
// Switch the current policy and compilation result for this unit to the requested one.
CompilationResult unitResult = new CompilationResult(sourceUnit, 1, 1, compilerOptions.maxProblemsPerUnit);
try {
CompilationUnitDeclaration parsedUnit = basicParser().dietParse(sourceUnit, unitResult);
lookupEnvironment.buildTypeBindings(parsedUnit, accessRestriction);
lookupEnvironment.completeTypeBindings(parsedUnit, true);
} catch (AbortCompilationUnit e) {
// at this point, currentCompilationUnitResult may not be sourceUnit, but some other
// one requested further along to resolve sourceUnit.
if (unitResult.compilationUnit == sourceUnit) { // only report once
//requestor.acceptResult(unitResult.tagAsAccepted());
} else {
throw e; // want to abort enclosing request to compile
}
}
// Display unit error in debug mode
if (BasicSearchEngine.VERBOSE) {
if (unitResult.problemCount > 0) {
System.out.println(unitResult);
}
}
}
private Parser basicParser() {
if (this.basicParser == null) {
ProblemReporter problemReporter =
new ProblemReporter(
DefaultErrorHandlingPolicies.proceedWithAllProblems(),
compilerOptions,
new DefaultProblemFactory());
this.basicParser = new Parser(problemReporter, false);
this.basicParser.reportOnlyOneSyntaxError = true;
}
return this.basicParser;
}
}, compilerOptions, problemReporter, createSearchableEnvironment());
lookupEnvironment.mayTolerateMissingType = true;
missingTypeBinding = new MissingTypeBinding(lookupEnvironment.defaultPackage, new char[][] {"unknown".toCharArray()}, lookupEnvironment);
} catch (JavaModelException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
// TODO : remove when the bug in the AbstractModelLoader is corrected
@Override
public synchronized LazyPackage findOrCreatePackage(Module module, String pkgName) {
LazyPackage pkg = super.findOrCreatePackage(module, pkgName);
if (pkg.getModule() != null
&& pkg.getModule().isJava()){
pkg.setShared(true);
}
Module currentModule = pkg.getModule();
if (currentModule.equals(modules.getDefaultModule()) && ! currentModule.equals(module)) {
currentModule.getPackages().remove(pkg);
pkg.setModule(null);
if (module != null) {
module.getPackages().add(pkg);
pkg.setModule(module);
}
}
return pkg;
}
@Override
public void loadStandardModules() {
// Now create JDK and Oracle modules (cf. https://github.com/ceylon/ceylon-ide-eclipse/issues/733 )
loadJDKModules();
/*
* We start by loading java.lang because we will need it no matter what.
*/
Module jdkModule = findOrCreateModule(JAVA_BASE_MODULE_NAME, JDKUtils.jdk.version);
Module languageModule = getLanguageModule();
if (getModuleManager().isLoadDependenciesFromModelLoaderFirst() && !isBootstrap) {
findOrCreatePackage(languageModule, CEYLON_LANGUAGE);
}
loadPackage(jdkModule, "java.lang", false);
loadPackage(languageModule, "com.redhat.ceylon.compiler.java.metadata", false);
loadPackage(languageModule, "com.redhat.ceylon.compiler.java.language", false);
}
private String getToplevelQualifiedName(final String pkgName, String name) {
if (! Util.isInitialLowerCase(name)) {
name = Util.quoteIfJavaKeyword(name);
}
String className = pkgName.isEmpty() ? name : Util.quoteJavaKeywords(pkgName) + "." + name;
return className;
}
@Override
public synchronized boolean loadPackage(Module module, String packageName, boolean loadDeclarations) {
packageName = Util.quoteJavaKeywords(packageName);
if(loadDeclarations && !loadedPackages.add(cacheKeyByModule(module, packageName))){
return true;
}
if (module instanceof JDTModule) {
JDTModule jdtModule = (JDTModule) module;
List<IPackageFragmentRoot> roots = jdtModule.getPackageFragmentRoots();
IPackageFragment packageFragment = null;
for (IPackageFragmentRoot root : roots) {
// skip packages that are not present
if(! root.exists() || ! javaProject.isOnClasspath(root))
continue;
try {
IClasspathEntry entry = root.getRawClasspathEntry();
//TODO: is the following really necessary?
//Note that getContentKind() returns an undefined
//value for a classpath container or variable
if (entry.getEntryKind()!=IClasspathEntry.CPE_CONTAINER &&
entry.getEntryKind()!=IClasspathEntry.CPE_VARIABLE &&
entry.getContentKind()==IPackageFragmentRoot.K_SOURCE &&
!CeylonBuilder.isCeylonSourceEntry(entry)) {
continue;
}
packageFragment = root.getPackageFragment(packageName);
if(! packageFragment.exists()){
continue;
}
} catch (JavaModelException e) {
if (! e.isDoesNotExist()) {
e.printStackTrace();
}
continue;
}
if(!loadDeclarations) {
// we found the package
return true;
}
// we have a few virtual types in java.lang that we need to load but they are not listed from class files
if(module.getNameAsString().equals(JAVA_BASE_MODULE_NAME)
&& packageName.equals("java.lang")) {
loadJavaBaseArrays();
}
IClassFile[] classFiles = new IClassFile[] {};
org.eclipse.jdt.core.ICompilationUnit[] compilationUnits = new org.eclipse.jdt.core.ICompilationUnit[] {};
try {
classFiles = packageFragment.getClassFiles();
} catch (JavaModelException e) {
e.printStackTrace();
}
try {
compilationUnits = packageFragment.getCompilationUnits();
} catch (JavaModelException e) {
e.printStackTrace();
}
List<IType> typesToLoad = new LinkedList<>();
for (IClassFile classFile : classFiles) {
IType type = classFile.getType();
typesToLoad.add(type);
}
for (org.eclipse.jdt.core.ICompilationUnit compilationUnit : compilationUnits) {
// skip removed CUs
if(!compilationUnit.exists())
continue;
try {
for (IType type : compilationUnit.getTypes()) {
typesToLoad.add(type);
}
} catch (JavaModelException e) {
e.printStackTrace();
}
}
for (IType type : typesToLoad) {
String typeFullyQualifiedName = type.getFullyQualifiedName();
String[] nameParts = typeFullyQualifiedName.split("\\.");
String typeQualifiedName = nameParts[nameParts.length - 1];
// only top-levels are added in source declarations
if (typeQualifiedName.indexOf('$') > 0) {
continue;
}
if (type.exists()
&& !sourceDeclarations.containsKey(getToplevelQualifiedName(type.getPackageFragment().getElementName(), typeFullyQualifiedName))
&& ! isTypeHidden(module, typeFullyQualifiedName)) {
convertToDeclaration(module, typeFullyQualifiedName, DeclarationType.VALUE);
}
}
}
}
return false;
}
synchronized public void refreshNameEnvironment() {
try {
lookupEnvironment.nameEnvironment = createSearchableEnvironment();
} catch (JavaModelException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static class ModelLoaderNameEnvironment extends SearchableEnvironment {
public ModelLoaderNameEnvironment(IJavaProject javaProject) throws JavaModelException {
super((JavaProject)javaProject, (WorkingCopyOwner) null);
}
public IJavaProject getJavaProject() {
return project;
}
public IType findTypeInNameLookup(char[][] compoundTypeName) {
if (compoundTypeName == null) return null;
int length = compoundTypeName.length;
if (length <= 1) {
if (length == 0) return null;
return findTypeInNameLookup(new String(compoundTypeName[0]), IPackageFragment.DEFAULT_PACKAGE_NAME);
}
int lengthM1 = length - 1;
char[][] packageName = new char[lengthM1][];
System.arraycopy(compoundTypeName, 0, packageName, 0, lengthM1);
return findTypeInNameLookup(
new String(compoundTypeName[lengthM1]),
CharOperation.toString(packageName));
}
public IType findTypeInNameLookup(String typeName, String packageName) {
JavaElementRequestor packageRequestor = new JavaElementRequestor();
nameLookup.seekPackageFragments(packageName, false, packageRequestor);
LinkedList<IPackageFragment> packagesToSearchIn = new LinkedList<>();
for (IPackageFragment pf : packageRequestor.getPackageFragments()) {
IPackageFragmentRoot packageRoot = (IPackageFragmentRoot) pf.getAncestor(IJavaElement.PACKAGE_FRAGMENT_ROOT);
try {
if (packageRoot.getKind() == IPackageFragmentRoot.K_SOURCE) {
packagesToSearchIn.addFirst(pf);
continue;
}
if (isInCeylonClassesOutputFolder(packageRoot.getPath())) {
continue;
}
packagesToSearchIn.addLast(pf);
} catch (JavaModelException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
IType type = null;
for (IPackageFragment pf : packagesToSearchIn) {
// We use considerSecondTypes = false because we will do it explicitly afterwards, in order to use waitForIndexes=true
// TODO : when migrating to Luna only (removing Kepler support), we will be able to simply call :
// nameLookup.findType(typeName, pf, false, NameLookup.ACCEPT_ALL,
// true /* waitForIndices */,
// true /* considerSecondaryTypes */)
// But unfortunately, Kepler doesn't provide the ability to set the 'waitForIndexes' parameter to true.
type = nameLookup.findType(typeName, pf, false, NameLookup.ACCEPT_ALL);
if (type == null) {
JavaModelManager manager = JavaModelManager.getJavaModelManager();
try {
// This is a Copy / Paste from :
// org.eclipse.jdt.internal.core.NameLookup.findSecondaryType(...), in order to be able to call it with waitForIndexes = true:
// type = nameLookup.findSecondaryType(pf.getElementName(), typeName, pf.getJavaProject(), true, null);
IJavaProject javaProject = pf.getJavaProject();
@SuppressWarnings("rawtypes")
Map secondaryTypePaths = manager.secondaryTypes(javaProject, true, null);
if (secondaryTypePaths.size() > 0) {
@SuppressWarnings("rawtypes")
Map types = (Map) secondaryTypePaths.get(packageName==null?"":packageName); //$NON-NLS-1$
if (types != null && types.size() > 0) {
boolean startsWithDollar = false;
if(typeName.startsWith("$")) {
startsWithDollar = true;
typeName = typeName.substring(1);
}
String[] parts = typeName.split("(\\.|\\$)");
if (startsWithDollar) {
parts[0] = "$" + parts[0];
}
int index = 0;
String topLevelClassName = parts[index++];
IType currentClass = (IType) types.get(topLevelClassName);
IType result = currentClass;
while (index < parts.length) {
result = null;
String nestedClassName = parts[index++];
if (currentClass != null && currentClass.exists()) {
currentClass = currentClass.getType(nestedClassName);
result = currentClass;
} else {
break;
}
}
type = result;
}
}
}
catch (JavaModelException jme) {
// give up
}
}
if (type != null) {
break;
}
}
return type;
}
@Override
protected NameEnvironmentAnswer find(String typeName, String packageName) {
if (packageName == null)
packageName = IPackageFragment.DEFAULT_PACKAGE_NAME;
if (this.owner != null) {
String source = this.owner.findSource(typeName, packageName);
if (source != null) {
ICompilationUnit cu = new BasicCompilationUnit(source.toCharArray(),
CharOperation.splitOn('.', packageName.toCharArray()),
typeName + org.eclipse.jdt.internal.core.util.Util.defaultJavaExtension());
return new NameEnvironmentAnswer(cu, null);
}
}
IType type = findTypeInNameLookup(typeName, packageName);
if (type != null) {
// construct name env answer
if (type instanceof BinaryType) { // BinaryType
try {
return new NameEnvironmentAnswer((IBinaryType) ((BinaryType) type).getElementInfo(), null);
} catch (JavaModelException npe) {
// fall back to using owner
}
} else { //SourceType
try {
// retrieve the requested type
SourceTypeElementInfo sourceType = (SourceTypeElementInfo)((SourceType) type).getElementInfo();
ISourceType topLevelType = sourceType;
while (topLevelType.getEnclosingType() != null) {
topLevelType = topLevelType.getEnclosingType();
}
// find all siblings (other types declared in same unit, since may be used for name resolution)
IType[] types = sourceType.getHandle().getCompilationUnit().getTypes();
ISourceType[] sourceTypes = new ISourceType[types.length];
// in the resulting collection, ensure the requested type is the first one
sourceTypes[0] = sourceType;
int length = types.length;
for (int i = 0, index = 1; i < length; i++) {
ISourceType otherType =
(ISourceType) ((JavaElement) types[i]).getElementInfo();
if (!otherType.equals(topLevelType) && index < length) // check that the index is in bounds (see https://bugs.eclipse.org/bugs/show_bug.cgi?id=62861)
sourceTypes[index++] = otherType;
}
return new NameEnvironmentAnswer(sourceTypes, null);
} catch (JavaModelException jme) {
if (jme.isDoesNotExist() && String.valueOf(TypeConstants.PACKAGE_INFO_NAME).equals(typeName)) {
// in case of package-info.java the type doesn't exist in the model,
// but the CU may still help in order to fetch package level annotations.
return new NameEnvironmentAnswer((ICompilationUnit)type.getParent(), null);
}
// no usable answer
}
}
}
return null;
}
}
private INameEnvironment createSearchableEnvironment() throws JavaModelException {
return new ModelLoaderNameEnvironment(javaProject);
}
synchronized private LookupEnvironment getLookupEnvironment() {
if (mustResetLookupEnvironment) {
synchronized (lookupEnvironment) {
createLookupEnvironment();
}
mustResetLookupEnvironment = false;
}
return lookupEnvironment;
}
@Override
public boolean searchAgain(Module module, String name) {
if (module instanceof JDTModule) {
JDTModule jdtModule = (JDTModule) module;
if (jdtModule.isCeylonBinaryArchive() || jdtModule.isJavaBinaryArchive()) {
String classRelativePath = name.replace('.', '/');
return jdtModule.containsClass(classRelativePath + ".class") || jdtModule.containsClass(classRelativePath + "_.class");
} else if (jdtModule.isProjectModule()) {
int nameLength = name.length();
int packageEnd = name.lastIndexOf('.');
int classNameStart = packageEnd + 1;
String packageName = packageEnd > 0 ? name.substring(0, packageEnd) : "";
String className = classNameStart < nameLength ? name.substring(classNameStart) : "";
boolean moduleContainsJava = false;
for (IPackageFragmentRoot root : jdtModule.getPackageFragmentRoots()) {
try {
IPackageFragment pf = root.getPackageFragment(packageName);
if (pf.exists() &&
javaProject.isOnClasspath(pf)) {
if (((IPackageFragment)pf).containsJavaResources()) {
moduleContainsJava = true;
break;
}
}
} catch (JavaModelException e) {
e.printStackTrace();
moduleContainsJava = true; // Just in case ...
}
}
if (moduleContainsJava) {
ModelLoaderNameEnvironment nameEnvironment = getNameEnvironment();
if (nameEnvironment.findTypeInNameLookup(className, packageName) != null ||
nameEnvironment.findTypeInNameLookup(className + "_", packageName) != null) {
return true;
}
}
return false;
}
}
return false;
}
@Override
public boolean searchAgain(LazyPackage lazyPackage, String name) {
return searchAgain(lazyPackage.getModule(), lazyPackage.getQualifiedName(lazyPackage.getQualifiedNameString(), name));
}
@Override
public synchronized ClassMirror lookupNewClassMirror(Module module, String name) {
if (sourceDeclarations.containsKey(name)) {
return new SourceClass(sourceDeclarations.get(name));
}
ClassMirror mirror = buildClassMirror(name);
if (mirror == null && lastPartHasLowerInitial(name)) {
// We have to try the unmunged name first, so that we find the symbol
// from the source in preference to the symbol from any
// pre-existing .class file
mirror = buildClassMirror(name+"_");
}
return mirror;
}
public synchronized MissingTypeBinding getMissingTypeBinding() {
return missingTypeBinding;
}
public static interface ActionOnResolvedType {
void doWithBinding(ReferenceBinding referenceBinding);
}
private static WeakHashMap<IProject, WeakReference<JDTModelLoader>> modelLoaders = new WeakHashMap<>();
public static JDTModelLoader getModelLoader(IProject project) {
WeakReference<JDTModelLoader> modelLoaderRef = modelLoaders.get(project);
if (modelLoaderRef != null) {
return modelLoaderRef.get();
}
return null;
}
public static JDTModelLoader getModelLoader(IJavaProject javaProject) {
return getModelLoader(javaProject.getProject());
}
public static JDTModelLoader getModelLoader(IType type) {
return type == null ? null : getModelLoader(type.getJavaProject());
}
public static interface ActionOnMethodBinding {
void doWithBinding(IType declaringClassModel, ReferenceBinding declaringClassBinding, MethodBinding methodBinding);
}
public static interface ActionOnClassBinding {
void doWithBinding(IType classModel, ReferenceBinding classBinding);
}
public static boolean doWithReferenceBinding(final IType typeModel, final ReferenceBinding binding, final ActionOnClassBinding action) {
if (typeModel == null) {
throw new ModelResolutionException("Resolving action requested on a missing declaration");
}
if (binding == null) {
return false;
}
PackageBinding packageBinding = binding.getPackage();
if (packageBinding == null) {
return false;
}
LookupEnvironment lookupEnvironment = packageBinding.environment;
if (lookupEnvironment == null) {
return false;
}
JDTModelLoader modelLoader = getModelLoader(typeModel);
if (modelLoader == null) {
throw new ModelResolutionException("The Model Loader corresponding the type '" + typeModel.getFullyQualifiedName() + "'");
}
synchronized (modelLoader.lookupEnvironmentMutex) {
if (modelLoader.lookupEnvironment != lookupEnvironment) {
return false;
}
action.doWithBinding(typeModel, binding);
return true;
}
}
public static boolean doWithMethodBinding(final IType declaringClassModel, final MethodBinding binding, final ActionOnMethodBinding action) {
if (declaringClassModel == null) {
throw new ModelResolutionException("Resolving action requested on a missing declaration");
}
if (binding == null) {
return false;
}
ReferenceBinding declaringClassBinding = binding.declaringClass;
if (declaringClassBinding == null) {
return false;
}
PackageBinding packageBinding = declaringClassBinding.getPackage();
if (packageBinding == null) {
return false;
}
LookupEnvironment lookupEnvironment = packageBinding.environment;
if (lookupEnvironment == null) {
return false;
}
JDTModelLoader modelLoader = getModelLoader(declaringClassModel);
if (modelLoader == null) {
throw new ModelResolutionException("The Model Loader corresponding the type '" + declaringClassModel.getFullyQualifiedName() + "' doesn't exist");
}
synchronized (modelLoader.lookupEnvironmentMutex) {
if (modelLoader.lookupEnvironment != lookupEnvironment) {
return false;
}
action.doWithBinding(declaringClassModel, declaringClassBinding, binding);
return true;
}
}
public static void doWithResolvedType(IType typeModel, ActionOnResolvedType action) {
if (typeModel == null || ! typeModel.exists()) {
throw new ModelResolutionException("Resolving action requested on a missing declaration");
}
JDTModelLoader modelLoader = getModelLoader(typeModel);
if (modelLoader == null) {
throw new ModelResolutionException("The Model Loader is not available to resolve type '" + typeModel.getFullyQualifiedName() + "'");
}
char[][] compoundName = CharOperation.splitOn('.', typeModel.getFullyQualifiedName().toCharArray());
LookupEnvironment lookupEnvironment = modelLoader.getLookupEnvironment();
synchronized (modelLoader.lookupEnvironmentMutex) {
ReferenceBinding binding;
try {
binding = toBinding(typeModel, lookupEnvironment, compoundName);
} catch (JavaModelException e) {
throw new ModelResolutionException(e);
}
if (binding == null) {
throw new ModelResolutionException("Binding not found for type : '" + typeModel.getFullyQualifiedName() + "'");
}
action.doWithBinding(binding);
}
}
public static IType toType(ReferenceBinding binding) {
ModelLoaderNameEnvironment nameEnvironment = (ModelLoaderNameEnvironment) binding.getPackage().environment.nameEnvironment;
char[][] compoundName = ((ReferenceBinding) binding).compoundName;
IType typeModel = nameEnvironment.findTypeInNameLookup(compoundName);
if (typeModel == null && ! (binding instanceof MissingTypeBinding)) {
throw new ModelResolutionException("JDT reference binding without a JDT IType element !");
}
return typeModel;
}
private JDTClass buildClassMirror(String name) {
if (javaProject == null) {
return null;
}
try {
LookupEnvironment theLookupEnvironment = getLookupEnvironment();
char[][] uncertainCompoundName = CharOperation.splitOn('.', name.toCharArray());
int numberOfParts = uncertainCompoundName.length;
char[][] compoundName = null;
IType type = null;
for (int i=numberOfParts-1; i>0; i--) {
char[][] triedPackageName = new char[0][];
for (int j=0; j<i; j++) {
triedPackageName = CharOperation.arrayConcat(triedPackageName, uncertainCompoundName[j]);
}
char[] triedClassName = new char[0];
for (int k=i; k<numberOfParts; k++) {
triedClassName = CharOperation.concat(triedClassName, uncertainCompoundName[k], '$');
}
ModelLoaderNameEnvironment nameEnvironment = getNameEnvironment();
type = nameEnvironment.findTypeInNameLookup(CharOperation.charToString(triedClassName),
CharOperation.toString(triedPackageName));
if (type != null) {
compoundName = CharOperation.arrayConcat(triedPackageName, triedClassName);
break;
}
}
if (type == null) {
return null;
}
ReferenceBinding binding = toBinding(type, theLookupEnvironment, compoundName);
if (binding != null) {
return new JDTClass(binding, type);
}
} catch (JavaModelException e) {
e.printStackTrace();
}
return null;
}
private static ReferenceBinding toBinding(IType type, LookupEnvironment theLookupEnvironment, char[][] compoundName) throws JavaModelException {
ITypeRoot typeRoot = type.getTypeRoot();
if (typeRoot instanceof IClassFile) {
ClassFile classFile = (ClassFile) typeRoot;
IFile classFileRsrc = (IFile) classFile.getCorrespondingResource();
if (classFileRsrc!=null && !classFileRsrc.exists()) {
//the .class file has been deleted
return null;
}
BinaryTypeBinding binaryTypeBinding = null;
try {
IBinaryType binaryType = classFile.getBinaryTypeInfo(classFileRsrc, true);
binaryTypeBinding = theLookupEnvironment.cacheBinaryType(binaryType, null);
} catch(JavaModelException e) {
if (! e.isDoesNotExist()) {
throw e;
}
}
if (binaryTypeBinding == null) {
ReferenceBinding existingType = theLookupEnvironment.getCachedType(compoundName);
if (existingType == null || ! (existingType instanceof BinaryTypeBinding)) {
return null;
}
binaryTypeBinding = (BinaryTypeBinding) existingType;
}
return binaryTypeBinding;
} else {
ReferenceBinding referenceBinding = theLookupEnvironment.getType(compoundName);
if (referenceBinding != null && ! (referenceBinding instanceof BinaryTypeBinding)) {
if (referenceBinding instanceof ProblemReferenceBinding) {
ProblemReferenceBinding problemReferenceBinding = (ProblemReferenceBinding) referenceBinding;
if (problemReferenceBinding.problemId() == ProblemReasons.InternalNameProvided) {
referenceBinding = problemReferenceBinding.closestReferenceMatch();
} else {
System.out.println(ProblemReferenceBinding.problemReasonString(problemReferenceBinding.problemId()));
return null;
}
}
return referenceBinding;
}
return null;
}
}
private ModelLoaderNameEnvironment getNameEnvironment() {
ModelLoaderNameEnvironment searchableEnvironment = (ModelLoaderNameEnvironment)getLookupEnvironment().nameEnvironment;
return searchableEnvironment;
}
@Override
public synchronized Declaration convertToDeclaration(Module module, String typeName,
DeclarationType declarationType) {
if (sourceDeclarations.containsKey(typeName)) {
return sourceDeclarations.get(typeName).getModelDeclaration();
}
try {
return super.convertToDeclaration(module, typeName, declarationType);
} catch(RuntimeException e) {
// FIXME: pretty sure this is plain wrong as it ignores problems and especially ModelResolutionException and just plain hides them
return null;
}
}
@Override
public void addModuleToClassPath(Module module, ArtifactResult artifact) {
if(artifact != null && module instanceof LazyModule)
((LazyModule)module).loadPackageList(artifact);
if (module instanceof JDTModule) {
JDTModule jdtModule = (JDTModule) module;
if (! jdtModule.equals(getLanguageModule()) && (jdtModule.isCeylonBinaryArchive() || jdtModule.isJavaBinaryArchive())) {
CeylonProjectModulesContainer container = CeylonClasspathUtil.getCeylonProjectModulesClasspathContainer(javaProject);
if (container != null) {
IPath modulePath = new Path(artifact.artifact().getPath());
IClasspathEntry newEntry = container.addNewClasspathEntryIfNecessary(modulePath);
if (newEntry!=null) {
try {
JavaCore.setClasspathContainer(container.getPath(), new IJavaProject[] { javaProject },
new IClasspathContainer[] {new CeylonProjectModulesContainer(container)}, null);
} catch (JavaModelException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
refreshNameEnvironment();
}
}
}
}
modulesInClassPath.add(module);
}
@Override
protected boolean isOverridingMethod(MethodMirror methodSymbol) {
return ((JDTMethod)methodSymbol).isOverridingMethod();
}
@Override
protected boolean isOverloadingMethod(MethodMirror methodSymbol) {
return ((JDTMethod)methodSymbol).isOverloadingMethod();
}
@Override
protected Unit getCompiledUnit(LazyPackage pkg, ClassMirror classMirror) {
Unit unit = null;
if (classMirror != null && classMirror instanceof JDTClass) {
JDTClass jdtClass = (JDTClass) classMirror;
String unitName = jdtClass.getFileName();
if (!jdtClass.isBinary()) {
// This search is for source Java classes since several classes might have the same file name
// and live inside the same Java source file => into the same Unit
for (Unit unitToTest : pkg.getUnits()) {
if (unitToTest.getFilename().equals(unitName)) {
return unitToTest;
}
}
}
unit = newCompiledUnit(pkg, jdtClass);
}
if (unit == null) {
unit = unitsByPackage.get(pkg);
if(unit == null){
unit = new PackageTypeFactory(pkg);
unit.setPackage(pkg);
unitsByPackage.put(pkg, unit);
}
}
return unit;
}
public void setModuleAndPackageUnits() {
Context context = getModuleManager().getContext();
for (Module module : context.getModules().getListOfModules()) {
if (module instanceof JDTModule) {
JDTModule jdtModule = (JDTModule) module;
if (jdtModule.isCeylonBinaryArchive()) {
for (Package p : jdtModule.getPackages()) {
if (p.getUnit() == null) {
ClassMirror packageClassMirror = lookupClassMirror(jdtModule, p.getQualifiedNameString() + "." + Naming.PACKAGE_DESCRIPTOR_CLASS_NAME);
if (packageClassMirror == null) {
packageClassMirror = lookupClassMirror(jdtModule, p.getQualifiedNameString() + "." + Naming.PACKAGE_DESCRIPTOR_CLASS_NAME.substring(1));
}
// some modules do not declare their main package, because they don't have any declaration to share
// there, for example, so this can be null
if(packageClassMirror != null)
p.setUnit(newCompiledUnit((LazyPackage) p, packageClassMirror));
}
if (p.getNameAsString().equals(jdtModule.getNameAsString())) {
if (jdtModule.getUnit() == null) {
ClassMirror moduleClassMirror = lookupClassMirror(jdtModule, p.getQualifiedNameString() + "." + Naming.MODULE_DESCRIPTOR_CLASS_NAME);
if (moduleClassMirror == null) {
moduleClassMirror = lookupClassMirror(jdtModule, p.getQualifiedNameString() + "." + Naming.OLD_MODULE_DESCRIPTOR_CLASS_NAME);
}
if (moduleClassMirror != null) {
jdtModule.setUnit(newCompiledUnit((LazyPackage) p, moduleClassMirror));
}
}
}
}
}
}
}
}
private Unit newCompiledUnit(LazyPackage pkg, ClassMirror classMirror) {
Unit unit;
JDTClass jdtClass = (JDTClass) classMirror;
IType type = jdtClass.getType();
if (type == null) {
return null;
}
ITypeRoot typeRoot = type.getTypeRoot();
StringBuilder sb = new StringBuilder();
List<String> parts = pkg.getName();
for (int i = 0; i < parts.size(); i++) {
String part = parts.get(i);
if (! part.isEmpty()) {
sb.append(part);
sb.append('/');
}
}
sb.append(jdtClass.getFileName());
String relativePath = sb.toString();
String fileName = jdtClass.getFileName();
String fullPath = jdtClass.getFullPath();
if (!jdtClass.isBinary()) {
unit = new JavaCompilationUnit((org.eclipse.jdt.core.ICompilationUnit)typeRoot, fileName, relativePath, fullPath, pkg);
}
else {
if (jdtClass.isCeylon()) {
if (pkg.getModule() instanceof JDTModule) {
JDTModule module = (JDTModule) pkg.getModule();
IProject originalProject = module.getOriginalProject();
if (originalProject != null) {
unit = new CrossProjectBinaryUnit((IClassFile)typeRoot, fileName, relativePath, fullPath, pkg);
} else {
unit = new CeylonBinaryUnit((IClassFile)typeRoot, fileName, relativePath, fullPath, pkg);
}
} else {
unit = new CeylonBinaryUnit((IClassFile)typeRoot, fileName, relativePath, fullPath, pkg);
}
}
else {
unit = new JavaClassFile((IClassFile)typeRoot, fileName, relativePath, fullPath, pkg);
}
}
return unit;
}
@Override
protected void logError(String message) {
//System.err.println("ERROR: "+message);
}
@Override
protected void logWarning(String message) {
//System.err.println("WARNING: "+message);
}
@Override
protected void logVerbose(String message) {
//System.err.println("NOTE: "+message);
}
@Override
public synchronized void removeDeclarations(List<Declaration> declarations) {
List<Declaration> allDeclarations = new ArrayList<Declaration>(declarations.size());
Set<Package> changedPackages = new HashSet<Package>();
allDeclarations.addAll(declarations);
for (Declaration declaration : declarations) {
Unit unit = declaration.getUnit();
if (unit != null) {
changedPackages.add(unit.getPackage());
}
retrieveInnerDeclarations(declaration, allDeclarations);
}
for (Declaration decl : allDeclarations) {
String fqn = getToplevelQualifiedName(decl.getContainer().getQualifiedNameString(), decl.getName());
sourceDeclarations.remove(fqn);
}
super.removeDeclarations(allDeclarations);
for (Package changedPackage : changedPackages) {
loadedPackages.remove(cacheKeyByModule(changedPackage.getModule(), changedPackage.getNameAsString()));
}
mustResetLookupEnvironment = true;
}
private void retrieveInnerDeclarations(Declaration declaration,
List<Declaration> allDeclarations) {
List<Declaration> members;
try {
members = declaration.getMembers();
} catch(Exception e) {
members = Collections.emptyList();
}
allDeclarations.addAll(members);
for (Declaration member : members) {
retrieveInnerDeclarations(member, allDeclarations);
}
}
private final Map<String, SourceDeclarationHolder> sourceDeclarations = new TreeMap<String, SourceDeclarationHolder>();
public synchronized Set<String> getSourceDeclarations() {
Set<String> declarations = new HashSet<String>();
declarations.addAll(sourceDeclarations.keySet());
return declarations;
}
public synchronized SourceDeclarationHolder getSourceDeclaration(String declarationName) {
return sourceDeclarations.get(declarationName);
}
public class PackageTypeFactory extends TypeFactory {
public PackageTypeFactory(Package pkg) {
super(moduleManager.getContext());
assert (pkg != null);
setPackage(pkg);
}
}
public class GlobalTypeFactory extends TypeFactory {
public GlobalTypeFactory() {
super(moduleManager.getContext());
}
@Override
public Package getPackage() {
synchronized (JDTModelLoader.this) {
if(super.getPackage() == null){
super.setPackage(modules.getLanguageModule()
.getDirectPackage(Module.LANGUAGE_MODULE_NAME));
}
return super.getPackage();
}
}
}
public static interface SourceFileObjectManager {
void setupSourceFileObjects(List<?> treeHolders);
}
public synchronized void setupSourceFileObjects(List<?> treeHolders) {
addSourcePhasedUnits(treeHolders, true);
}
public synchronized void addSourcePhasedUnits(List<?> treeHolders, final boolean isSourceToCompile) {
for (Object treeHolder : treeHolders) {
if (treeHolder instanceof PhasedUnit) {
final PhasedUnit unit = (PhasedUnit) treeHolder;
final String pkgName = unit.getPackage().getQualifiedNameString();
unit.getCompilationUnit().visit(new SourceDeclarationVisitor(){
@Override
public void loadFromSource(Tree.Declaration decl) {
if (decl.getIdentifier()!=null) {
String fqn = getToplevelQualifiedName(pkgName, decl.getIdentifier().getText());
if (! sourceDeclarations.containsKey(fqn)) {
sourceDeclarations.put(fqn, new SourceDeclarationHolder(unit, decl, isSourceToCompile));
}
}
}
@Override
public void loadFromSource(ModuleDescriptor that) {
}
@Override
public void loadFromSource(PackageDescriptor that) {
}
});
}
}
}
public void addSourceArchivePhasedUnits(List<PhasedUnit> sourceArchivePhasedUnits) {
addSourcePhasedUnits(sourceArchivePhasedUnits, false);
}
public synchronized void clearCachesOnPackage(String packageName) {
List<String> keysToRemove = new ArrayList<String>(classMirrorCache.size());
for (Entry<String, ClassMirror> element : classMirrorCache.entrySet()) {
if (element.getValue() == null) {
String className = element.getKey();
if (className != null) {
String classPackageName =className.replaceAll("\\.[^\\.]+$", "");
if (classPackageName.equals(packageName)) {
keysToRemove.add(className);
}
}
}
}
for (String keyToRemove : keysToRemove) {
classMirrorCache.remove(keyToRemove);
}
Package pkg = findPackage(packageName);
loadedPackages.remove(cacheKeyByModule(pkg.getModule(), packageName));
mustResetLookupEnvironment = true;
}
public synchronized void clearClassMirrorCacheForClass(JDTModule module, String classNameToRemove) {
classMirrorCache.remove(cacheKeyByModule(module, classNameToRemove));
mustResetLookupEnvironment = true;
}
@Override
protected LazyValue makeToplevelAttribute(ClassMirror classMirror) {
if (classMirror instanceof SourceClass) {
return (LazyValue) (((SourceClass) classMirror).getModelDeclaration());
}
return super.makeToplevelAttribute(classMirror);
}
@Override
protected LazyMethod makeToplevelMethod(ClassMirror classMirror) {
if (classMirror instanceof SourceClass) {
return (LazyMethod) (((SourceClass) classMirror).getModelDeclaration());
}
return super.makeToplevelMethod(classMirror);
}
@Override
protected LazyClass makeLazyClass(ClassMirror classMirror, Class superClass,
MethodMirror constructor) {
if (classMirror instanceof SourceClass) {
return (LazyClass) (((SourceClass) classMirror).getModelDeclaration());
}
return super.makeLazyClass(classMirror, superClass, constructor);
}
@Override
protected LazyInterface makeLazyInterface(ClassMirror classMirror) {
if (classMirror instanceof SourceClass) {
return (LazyInterface) ((SourceClass) classMirror).getModelDeclaration();
}
return super.makeLazyInterface(classMirror);
}
public TypeFactory getTypeFactory() {
return (TypeFactory) typeFactory;
}
public synchronized Package findPackage(String quotedPkgName) {
String pkgName = quotedPkgName.replace("$", "");
// in theory we only have one package with the same name per module in eclipse
for(Package pkg : packagesByName.values()){
if(pkg.getNameAsString().equals(pkgName))
return pkg;
}
return null;
}
@Override
protected Module findModuleForClassMirror(ClassMirror classMirror) {
String pkgName = getPackageNameForQualifiedClassName(classMirror);
return lookupModuleByPackageName(pkgName);
}
public void loadJDKModules() {
for(String jdkModule : JDKUtils.getJDKModuleNames())
findOrCreateModule(jdkModule, JDKUtils.jdk.version);
for(String jdkOracleModule : JDKUtils.getOracleJDKModuleNames())
findOrCreateModule(jdkOracleModule, JDKUtils.jdk.version);
}
@Override
public synchronized LazyPackage findOrCreateModulelessPackage(String pkgName) {
return (LazyPackage) findPackage(pkgName);
}
@Override
public boolean isModuleInClassPath(Module module) {
return modulesInClassPath.contains(module) ||
( (module instanceof JDTModule) &&
((JDTModule) module).isProjectModule());
}
@Override
protected boolean needsLocalDeclarations() {
return false;
}
void addJDKModuleToClassPath(Module module) {
modulesInClassPath.add(module);
}
}
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_core_model_JDTModelLoader.java
|
540 |
Collections.sort(response, new Comparator<Ordered>() {
@Override
public int compare(Ordered o1, Ordered o2) {
return new Integer(o1.getOrder()).compareTo(o2.getOrder());
}
});
| 0true
|
common_src_main_java_org_broadleafcommerce_common_util_SortedListFactoryBean.java
|
227 |
public interface ModuleConfigurationDao {
public ModuleConfiguration readById(Long id);
public ModuleConfiguration save(ModuleConfiguration config);
public void delete(ModuleConfiguration config);
public List<ModuleConfiguration> readAllByType(ModuleConfigurationType type);
public List<ModuleConfiguration> readActiveByType(ModuleConfigurationType type);
public List<ModuleConfiguration> readByType(Class<? extends ModuleConfiguration> type);
/**
* Returns the number of milliseconds that the current date/time will be cached for queries before refreshing.
* This aids in query caching, otherwise every query that utilized current date would be different and caching
* would be ineffective.
*
* @return the milliseconds to cache the current date/time
*/
public Long getCurrentDateResolution();
/**
* Sets the number of milliseconds that the current date/time will be cached for queries before refreshing.
* This aids in query caching, otherwise every query that utilized current date would be different and caching
* would be ineffective.
*
* @param currentDateResolution the milliseconds to cache the current date/time
*/
public void setCurrentDateResolution(Long currentDateResolution);
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_config_dao_ModuleConfigurationDao.java
|
29 |
{
@Override
public int defaultPort()
{
return 5001;
}
@Override
public int port()
{
return config.getAddress().getPort();
}
}, receiver, logging);
| 1no label
|
enterprise_cluster_src_main_java_org_neo4j_cluster_client_ClusterClient.java
|
3,328 |
static class Empty extends FloatArrayAtomicFieldData {
Empty(int numDocs) {
super(numDocs);
}
@Override
public LongValues getLongValues() {
return LongValues.EMPTY;
}
@Override
public DoubleValues getDoubleValues() {
return DoubleValues.EMPTY;
}
@Override
public boolean isMultiValued() {
return false;
}
@Override
public long getNumberUniqueValues() {
return 0;
}
@Override
public boolean isValuesOrdered() {
return false;
}
@Override
public long getMemorySizeInBytes() {
return 0;
}
@Override
public BytesValues getBytesValues(boolean needsHashes) {
return BytesValues.EMPTY;
}
@Override
public ScriptDocValues getScriptValues() {
return ScriptDocValues.EMPTY;
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_fielddata_plain_FloatArrayAtomicFieldData.java
|
331 |
static final class Fields {
static final XContentBuilderString PLUGINS = new XContentBuilderString("plugins");
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_node_info_PluginsInfo.java
|
576 |
private static class MembershipListenerImpl implements MembershipListener {
private List<EventObject> events = Collections.synchronizedList(new LinkedList<EventObject>());
public void memberAdded(MembershipEvent e) {
events.add(e);
}
public void memberRemoved(MembershipEvent e) {
events.add(e);
}
public void memberAttributeChanged(MemberAttributeEvent memberAttributeEvent) {
}
}
| 0true
|
hazelcast_src_test_java_com_hazelcast_cluster_ClusterMembershipTest.java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.