Unnamed: 0
int64 0
6.45k
| func
stringlengths 37
161k
| target
class label 2
classes | project
stringlengths 33
167
|
---|---|---|---|
729 |
public class CollectionAddListenerRequest extends CallableClientRequest implements RetryableRequest {
private String name;
private boolean includeValue;
private String serviceName;
public CollectionAddListenerRequest() {
}
public CollectionAddListenerRequest(String name, boolean includeValue) {
this.name = name;
this.includeValue = includeValue;
}
@Override
public Object call() throws Exception {
final ClientEndpoint endpoint = getEndpoint();
final ClientEngine clientEngine = getClientEngine();
ItemListener listener = new ItemListener() {
@Override
public void itemAdded(ItemEvent item) {
send(item);
}
@Override
public void itemRemoved(ItemEvent item) {
send(item);
}
private void send(ItemEvent event) {
if (endpoint.live()) {
Data item = clientEngine.toData(event.getItem());
final ItemEventType eventType = event.getEventType();
final String uuid = event.getMember().getUuid();
PortableItemEvent portableItemEvent = new PortableItemEvent(item, eventType, uuid);
endpoint.sendEvent(portableItemEvent, getCallId());
}
}
};
final EventService eventService = clientEngine.getEventService();
final CollectionEventFilter filter = new CollectionEventFilter(includeValue);
final EventRegistration registration = eventService.registerListener(getServiceName(), name, filter, listener);
final String registrationId = registration.getId();
endpoint.setListenerRegistration(getServiceName(), name, registrationId);
return registrationId;
}
@Override
public String getServiceName() {
return serviceName;
}
public void setServiceName(String serviceName) {
this.serviceName = serviceName;
}
@Override
public int getFactoryId() {
return CollectionPortableHook.F_ID;
}
@Override
public int getClassId() {
return CollectionPortableHook.COLLECTION_ADD_LISTENER;
}
public void write(PortableWriter writer) throws IOException {
writer.writeUTF("n", name);
writer.writeBoolean("i", includeValue);
writer.writeUTF("s", serviceName);
}
public void read(PortableReader reader) throws IOException {
name = reader.readUTF("n");
includeValue = reader.readBoolean("i");
serviceName = reader.readUTF("s");
}
@Override
public Permission getRequiredPermission() {
if (ListService.SERVICE_NAME.equals(serviceName)) {
return new ListPermission(name, ActionConstants.ACTION_LISTEN);
} else if (SetService.SERVICE_NAME.equals(serviceName)) {
return new SetPermission(name, ActionConstants.ACTION_LISTEN);
}
throw new IllegalArgumentException("No service matched!!!");
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_collection_client_CollectionAddListenerRequest.java
|
43 |
public interface BiFun<A,B,T> { T apply(A a, B b); }
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
179 |
public enum ProcessURLAction {
PAGE,
PRODUCT,
CATEGORY,
PROCEED,
REDIRECT,
UNKNOWN
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_web_ProcessURLAction.java
|
2,947 |
public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory {
private String language;
@Inject
public StemmerTokenFilterFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
super(index, indexSettings, name, settings);
this.language = Strings.capitalize(settings.get("language", settings.get("name", "porter")));
}
@Override
public TokenStream create(TokenStream tokenStream) {
if ("arabic".equalsIgnoreCase(language)) {
return new ArabicStemFilter(tokenStream);
} else if ("armenian".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new ArmenianStemmer());
} else if ("basque".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new BasqueStemmer());
} else if ("brazilian".equalsIgnoreCase(language)) {
return new BrazilianStemFilter(tokenStream);
} else if ("bulgarian".equalsIgnoreCase(language)) {
return new BulgarianStemFilter(tokenStream);
} else if ("catalan".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new CatalanStemmer());
} else if ("czech".equalsIgnoreCase(language)) {
return new CzechStemFilter(tokenStream);
} else if ("danish".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new DanishStemmer());
} else if ("dutch".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new DutchStemmer());
} else if ("english".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new EnglishStemmer());
} else if ("finnish".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new FinnishStemmer());
} else if ("french".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new FrenchStemmer());
} else if ("german".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new GermanStemmer());
} else if ("german2".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new German2Stemmer());
} else if ("hungarian".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new HungarianStemmer());
} else if ("italian".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new ItalianStemmer());
} else if ("kp".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new KpStemmer());
} else if ("kstem".equalsIgnoreCase(language)) {
return new KStemFilter(tokenStream);
} else if ("lovins".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new LovinsStemmer());
} else if ("latvian".equalsIgnoreCase(language)) {
return new LatvianStemFilter(tokenStream);
} else if ("norwegian".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new NorwegianStemmer());
} else if ("minimal_norwegian".equalsIgnoreCase(language) || "minimalNorwegian".equals(language)) {
return new NorwegianMinimalStemFilter(tokenStream);
} else if ("porter".equalsIgnoreCase(language)) {
return new PorterStemFilter(tokenStream);
} else if ("porter2".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new PorterStemmer());
} else if ("portuguese".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new PortugueseStemmer());
} else if ("romanian".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new RomanianStemmer());
} else if ("russian".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new RussianStemmer());
} else if ("spanish".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new SpanishStemmer());
} else if ("swedish".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new SwedishStemmer());
} else if ("turkish".equalsIgnoreCase(language)) {
return new SnowballFilter(tokenStream, new TurkishStemmer());
} else if ("minimal_english".equalsIgnoreCase(language) || "minimalEnglish".equalsIgnoreCase(language)) {
return new EnglishMinimalStemFilter(tokenStream);
} else if ("possessive_english".equalsIgnoreCase(language) || "possessiveEnglish".equalsIgnoreCase(language)) {
return new EnglishPossessiveFilter(version, tokenStream);
} else if ("light_finish".equalsIgnoreCase(language) || "lightFinish".equalsIgnoreCase(language)) {
// leaving this for backward compatibility
return new FinnishLightStemFilter(tokenStream);
} else if ("light_finnish".equalsIgnoreCase(language) || "lightFinnish".equalsIgnoreCase(language)) {
return new FinnishLightStemFilter(tokenStream);
} else if ("light_french".equalsIgnoreCase(language) || "lightFrench".equalsIgnoreCase(language)) {
return new FrenchLightStemFilter(tokenStream);
} else if ("minimal_french".equalsIgnoreCase(language) || "minimalFrench".equalsIgnoreCase(language)) {
return new FrenchMinimalStemFilter(tokenStream);
} else if ("light_german".equalsIgnoreCase(language) || "lightGerman".equalsIgnoreCase(language)) {
return new GermanLightStemFilter(tokenStream);
} else if ("minimal_german".equalsIgnoreCase(language) || "minimalGerman".equalsIgnoreCase(language)) {
return new GermanMinimalStemFilter(tokenStream);
} else if ("hindi".equalsIgnoreCase(language)) {
return new HindiStemFilter(tokenStream);
} else if ("light_hungarian".equalsIgnoreCase(language) || "lightHungarian".equalsIgnoreCase(language)) {
return new HungarianLightStemFilter(tokenStream);
} else if ("indonesian".equalsIgnoreCase(language)) {
return new IndonesianStemFilter(tokenStream);
} else if ("light_italian".equalsIgnoreCase(language) || "lightItalian".equalsIgnoreCase(language)) {
return new ItalianLightStemFilter(tokenStream);
} else if ("light_portuguese".equalsIgnoreCase(language) || "lightPortuguese".equalsIgnoreCase(language)) {
return new PortugueseLightStemFilter(tokenStream);
} else if ("minimal_portuguese".equalsIgnoreCase(language) || "minimalPortuguese".equalsIgnoreCase(language)) {
return new PortugueseMinimalStemFilter(tokenStream);
} else if ("portuguese".equalsIgnoreCase(language)) {
return new PortugueseStemFilter(tokenStream);
} else if ("light_russian".equalsIgnoreCase(language) || "lightRussian".equalsIgnoreCase(language)) {
return new RussianLightStemFilter(tokenStream);
} else if ("light_spanish".equalsIgnoreCase(language) || "lightSpanish".equalsIgnoreCase(language)) {
return new SpanishLightStemFilter(tokenStream);
} else if ("light_swedish".equalsIgnoreCase(language) || "lightSwedish".equalsIgnoreCase(language)) {
return new SwedishLightStemFilter(tokenStream);
} else if ("greek".equalsIgnoreCase(language)) {
return new GreekStemFilter(tokenStream);
}
return new SnowballFilter(tokenStream, language);
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_analysis_StemmerTokenFilterFactory.java
|
430 |
public enum CustomFieldSearchableTypes {
NOT_SPECIFIED,YES,NO
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_presentation_client_CustomFieldSearchableTypes.java
|
415 |
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.FIELD})
public @interface AdminPresentationDataDrivenEnumeration {
/**
* <p>Optional - only required if the target entity is other than DataDrivenEnumerationValueImpl. The annotated
* field must be of type String. DataDrivenEnumerationValueImpl is the standard entity for storing data driven enumerations,
* but a completely different entity (e.g. CountryImpl) could be substituted, if applicable.</p>
*
* <p>Specify the target entity that should be queried for the list of options that will be presented to the user in a
* drop down list. The value selected from the dropdown will become the String value for this field.</p>
*
* @return the entity class representing the data to populate a dropdown field in the admin tool
*/
Class<?> optionListEntity() default DataDrivenEnumerationValueImpl.class;
/**
* <p>Optional - only required if it is desirable to filter the list of items returned from the query for the optionListEntity. This is useful if you
* only want to present part of a table as options in the data driven enumeration. Note, when configuring for optionListEntity
* equals DataDrivenEnumerationValueImpl, it is generally appropriate to denote:</p>
*
* <p>@OptionFilterParam(param="type.key", value="[the key value of the DataDrivenEnumerationImpl instance]", paramType=[your param type])</p>
*
* <p>Additional parameters with which to filter the list of options shown to the user in the admin tool</p>
*
* @return list of parameters with which to filter the option list
*/
OptionFilterParam[] optionFilterParams() default {};
/**
* <p>Optional - only required if the optionListEntity is not DataDrivenEnumerationValueImpl.</p>
*
* <p>Specify the field in the target entity that contains the value that will be persisted into this annotated field.</p>
*
* @return the value field in the target entity
*/
String optionValueFieldName() default "";
/**
* <p>Optional - only required if the optionListEntity is not DataDrivenEnumerationValueImpl.</p>
*
* <p>Specify the field in the target entity that contains the display value that will be shown to the user in the dropdown field</p>
*
* @return the display field in the target entity
*/
String optionDisplayFieldName() default "";
/**
* <p>Optional - only required if you want to allow users to edit (or enter new values) in the dropdown. If true, users will
* be able to type their own value or select from one of the data-driven values. This is only required when the optionListEntity
* is not DataDrivenEnumerationValueImpl, since that class already defines this property (i.e. the modifiable property)</p>
*
* <p>Whether or not the user can type in the data-driven field</p>
*
* @return whether or not the user can type in the data-driven field
*/
boolean optionCanEditValues() default false;
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_presentation_AdminPresentationDataDrivenEnumeration.java
|
144 |
@Test
public class IntegerSerializerTest {
private static final int FIELD_SIZE = 4;
private static final Integer OBJECT = 1;
private OIntegerSerializer integerSerializer;
byte[] stream = new byte[FIELD_SIZE];
@BeforeClass
public void beforeClass() {
integerSerializer = new OIntegerSerializer();
}
public void testFieldSize() {
Assert.assertEquals(integerSerializer.getObjectSize(null), FIELD_SIZE);
}
public void testSerialize() {
integerSerializer.serialize(OBJECT, stream, 0);
Assert.assertEquals(integerSerializer.deserialize(stream, 0), OBJECT);
}
public void testSerializeNative() {
integerSerializer.serializeNative(OBJECT, stream, 0);
Assert.assertEquals(integerSerializer.deserializeNative(stream, 0), OBJECT);
}
public void testNativeDirectMemoryCompatibility() {
integerSerializer.serializeNative(OBJECT, stream, 0);
ODirectMemoryPointer pointer = new ODirectMemoryPointer(stream);
try {
Assert.assertEquals(integerSerializer.deserializeFromDirectMemory(pointer, 0), OBJECT);
} finally {
pointer.free();
}
}
}
| 0true
|
commons_src_test_java_com_orientechnologies_common_serialization_types_IntegerSerializerTest.java
|
1,373 |
public class AliasAction implements Streamable {
public static enum Type {
ADD((byte) 0),
REMOVE((byte) 1);
private final byte value;
Type(byte value) {
this.value = value;
}
public byte value() {
return value;
}
public static Type fromValue(byte value) {
if (value == 0) {
return ADD;
} else if (value == 1) {
return REMOVE;
} else {
throw new ElasticsearchIllegalArgumentException("No type for action [" + value + "]");
}
}
}
private Type actionType;
private String index;
private String alias;
@Nullable
private String filter;
@Nullable
private String indexRouting;
@Nullable
private String searchRouting;
private AliasAction() {
}
public AliasAction(AliasAction other) {
this.actionType = other.actionType;
this.index = other.index;
this.alias = other.alias;
this.filter = other.filter;
this.indexRouting = other.indexRouting;
this.searchRouting = other.searchRouting;
}
public AliasAction(Type actionType) {
this.actionType = actionType;
}
public AliasAction(Type actionType, String index, String alias) {
this.actionType = actionType;
this.index = index;
this.alias = alias;
}
public AliasAction(Type actionType, String index, String alias, String filter) {
this.actionType = actionType;
this.index = index;
this.alias = alias;
this.filter = filter;
}
public Type actionType() {
return actionType;
}
public AliasAction index(String index) {
this.index = index;
return this;
}
public String index() {
return index;
}
public AliasAction alias(String alias) {
this.alias = alias;
return this;
}
public String alias() {
return alias;
}
public String filter() {
return filter;
}
public AliasAction filter(String filter) {
this.filter = filter;
return this;
}
public AliasAction filter(Map<String, Object> filter) {
if (filter == null || filter.isEmpty()) {
this.filter = null;
return this;
}
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(filter);
this.filter = builder.string();
return this;
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + filter + "]", e);
}
}
public AliasAction filter(FilterBuilder filterBuilder) {
if (filterBuilder == null) {
this.filter = null;
return this;
}
try {
XContentBuilder builder = XContentFactory.jsonBuilder();
filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.close();
this.filter = builder.string();
return this;
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to build json for alias request", e);
}
}
public AliasAction routing(String routing) {
this.indexRouting = routing;
this.searchRouting = routing;
return this;
}
public String indexRouting() {
return indexRouting;
}
public AliasAction indexRouting(String indexRouting) {
this.indexRouting = indexRouting;
return this;
}
public String searchRouting() {
return searchRouting;
}
public AliasAction searchRouting(String searchRouting) {
this.searchRouting = searchRouting;
return this;
}
public static AliasAction readAliasAction(StreamInput in) throws IOException {
AliasAction aliasAction = new AliasAction();
aliasAction.readFrom(in);
return aliasAction;
}
@Override
public void readFrom(StreamInput in) throws IOException {
actionType = Type.fromValue(in.readByte());
index = in.readOptionalString();
alias = in.readOptionalString();
filter = in.readOptionalString();
indexRouting = in.readOptionalString();
searchRouting = in.readOptionalString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeByte(actionType.value());
out.writeOptionalString(index);
out.writeOptionalString(alias);
out.writeOptionalString(filter);
out.writeOptionalString(indexRouting);
out.writeOptionalString(searchRouting);
}
public static AliasAction newAddAliasAction(String index, String alias) {
return new AliasAction(Type.ADD, index, alias);
}
public static AliasAction newRemoveAliasAction(String index, String alias) {
return new AliasAction(Type.REMOVE, index, alias);
}
}
| 0true
|
src_main_java_org_elasticsearch_cluster_metadata_AliasAction.java
|
1,461 |
public class OGraphCommandExecutorSQLFactory implements OCommandExecutorSQLFactory {
private static final Map<String, Class<? extends OCommandExecutorSQLAbstract>> COMMANDS;
static {
// COMMANDS
final Map<String, Class<? extends OCommandExecutorSQLAbstract>> commands = new HashMap<String, Class<? extends OCommandExecutorSQLAbstract>>();
commands.put(OCommandExecutorSQLCreateEdge.NAME, OCommandExecutorSQLCreateEdge.class);
commands.put(OCommandExecutorSQLDeleteEdge.NAME, OCommandExecutorSQLDeleteEdge.class);
commands.put(OCommandExecutorSQLCreateVertex.NAME, OCommandExecutorSQLCreateVertex.class);
commands.put(OCommandExecutorSQLDeleteVertex.NAME, OCommandExecutorSQLDeleteVertex.class);
COMMANDS = Collections.unmodifiableMap(commands);
}
/**
* {@inheritDoc}
*/
public Set<String> getCommandNames() {
return COMMANDS.keySet();
}
/**
* {@inheritDoc}
*/
public OCommandExecutorSQLAbstract createCommand(final String name) throws OCommandExecutionException {
final Class<? extends OCommandExecutorSQLAbstract> clazz = COMMANDS.get(name);
if (clazz == null) {
throw new OCommandExecutionException("Unknowned command name :" + name);
}
try {
return clazz.newInstance();
} catch (Exception e) {
throw new OCommandExecutionException("Error in creation of command " + name
+ "(). Probably there is not an empty constructor or the constructor generates errors", e);
}
}
/**
* Returns a OrientBaseGraph implementation from the current database in thread local.
*
* @return
*/
public static OrientBaseGraph getGraph() {
ODatabaseRecord database = ODatabaseRecordThreadLocal.INSTANCE.get();
if (!(database instanceof ODatabaseDocumentTx))
database = new ODatabaseDocumentTx((ODatabaseRecordTx) database);
return new OrientGraphNoTx((ODatabaseDocumentTx) database);
}
}
| 1no label
|
graphdb_src_main_java_com_orientechnologies_orient_graph_sql_OGraphCommandExecutorSQLFactory.java
|
160 |
public class OStringSerializer implements OBinarySerializer<String> {
public static final OStringSerializer INSTANCE = new OStringSerializer();
public static final byte ID = 13;
public int getObjectSize(final String object, Object... hints) {
return object.length() * 2 + OIntegerSerializer.INT_SIZE;
}
public void serialize(final String object, final byte[] stream, int startPosition, Object... hints) {
int length = object.length();
OIntegerSerializer.INSTANCE.serialize(length, stream, startPosition);
startPosition += OIntegerSerializer.INT_SIZE;
char[] stringContent = new char[length];
object.getChars(0, length, stringContent, 0);
for (char character : stringContent) {
stream[startPosition] = (byte) character;
startPosition++;
stream[startPosition] = (byte) (character >>> 8);
startPosition++;
}
}
public String deserialize(final byte[] stream, int startPosition) {
int len = OIntegerSerializer.INSTANCE.deserialize(stream, startPosition);
char[] buffer = new char[len];
startPosition += OIntegerSerializer.INT_SIZE;
for (int i = 0; i < len; i++) {
buffer[i] = (char) ((0xFF & stream[startPosition]) | ((0xFF & stream[startPosition + 1]) << 8));
startPosition += 2;
}
return new String(buffer);
}
public int getObjectSize(byte[] stream, int startPosition) {
return OIntegerSerializer.INSTANCE.deserialize(stream, startPosition) * 2 + OIntegerSerializer.INT_SIZE;
}
public byte getId() {
return ID;
}
public int getObjectSizeNative(byte[] stream, int startPosition) {
return OIntegerSerializer.INSTANCE.deserializeNative(stream, startPosition) * 2 + OIntegerSerializer.INT_SIZE;
}
public void serializeNative(String object, byte[] stream, int startPosition, Object... hints) {
int length = object.length();
OIntegerSerializer.INSTANCE.serializeNative(length, stream, startPosition);
startPosition += OIntegerSerializer.INT_SIZE;
char[] stringContent = new char[length];
object.getChars(0, length, stringContent, 0);
for (char character : stringContent) {
stream[startPosition] = (byte) character;
startPosition++;
stream[startPosition] = (byte) (character >>> 8);
startPosition++;
}
}
public String deserializeNative(byte[] stream, int startPosition) {
int len = OIntegerSerializer.INSTANCE.deserializeNative(stream, startPosition);
char[] buffer = new char[len];
startPosition += OIntegerSerializer.INT_SIZE;
for (int i = 0; i < len; i++) {
buffer[i] = (char) ((0xFF & stream[startPosition]) | ((0xFF & stream[startPosition + 1]) << 8));
startPosition += 2;
}
return new String(buffer);
}
@Override
public void serializeInDirectMemory(String object, ODirectMemoryPointer pointer, long offset, Object... hints) {
int length = object.length();
pointer.setInt(offset, length);
offset += OIntegerSerializer.INT_SIZE;
byte[] binaryData = new byte[length * 2];
char[] stringContent = new char[length];
object.getChars(0, length, stringContent, 0);
int counter = 0;
for (char character : stringContent) {
binaryData[counter] = (byte) character;
counter++;
binaryData[counter] = (byte) (character >>> 8);
counter++;
}
pointer.set(offset, binaryData, 0, binaryData.length);
}
@Override
public String deserializeFromDirectMemory(ODirectMemoryPointer pointer, long offset) {
int len = pointer.getInt(offset);
final char[] buffer = new char[len];
offset += OIntegerSerializer.INT_SIZE;
byte[] binaryData = pointer.get(offset, buffer.length * 2);
for (int i = 0; i < len; i++)
buffer[i] = (char) ((0xFF & binaryData[i << 1]) | ((0xFF & binaryData[(i << 1) + 1]) << 8));
return new String(buffer);
}
@Override
public int getObjectSizeInDirectMemory(ODirectMemoryPointer pointer, long offset) {
return pointer.getInt(offset) * 2 + OIntegerSerializer.INT_SIZE;
}
public boolean isFixedLength() {
return false;
}
public int getFixedLength() {
return 0;
}
@Override
public String preprocess(String value, Object... hints) {
return value;
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_serialization_types_OStringSerializer.java
|
1,523 |
public static class Result {
private final boolean changed;
private final RoutingTable routingTable;
private final AllocationExplanation explanation;
/**
* Creates a new {@link RoutingAllocation.Result}
*
* @param changed a flag to determine whether the actual {@link RoutingTable} has been changed
* @param routingTable the {@link RoutingTable} this Result references
* @param explanation Explanation of the Result
*/
public Result(boolean changed, RoutingTable routingTable, AllocationExplanation explanation) {
this.changed = changed;
this.routingTable = routingTable;
this.explanation = explanation;
}
/** determine whether the actual {@link RoutingTable} has been changed
* @return <code>true</code> if the {@link RoutingTable} has been changed by allocation. Otherwise <code>false</code>
*/
public boolean changed() {
return this.changed;
}
/**
* Get the {@link RoutingTable} referenced by this result
* @return referenced {@link RoutingTable}
*/
public RoutingTable routingTable() {
return routingTable;
}
/**
* Get the explanation of this result
* @return explanation
*/
public AllocationExplanation explanation() {
return explanation;
}
}
| 0true
|
src_main_java_org_elasticsearch_cluster_routing_allocation_RoutingAllocation.java
|
105 |
class CorrectionUtil {
static int getLevenshteinDistance(String x, String y) {
int n = x.length(); // length of s
int m = y.length(); // length of t
if (n == 0) return m;
if (m == 0) return n;
int p[] = new int[n+1]; //'previous' cost array, horizontally
int d[] = new int[n+1]; // cost array, horizontally
int _d[]; //placeholder to assist in swapping p and d
// indexes into strings s and t
int i; // iterates through s
int j; // iterates through t
char t_j; // jth character of t
int cost; // cost
for (i = 0; i<=n; i++) {
p[i] = i;
}
for (j = 1; j<=m; j++) {
t_j = y.charAt(j-1);
d[0] = j;
for (i=1; i<=n; i++) {
cost = x.charAt(i-1)==t_j ? 0 : 1;
// minimum of cell to the left+1, to the top+1, diagonally left and up +cost
d[i] = Math.min(Math.min(d[i-1]+1, p[i]+1), p[i-1]+cost);
}
// copy current distance counts to 'previous row' distance counts
_d = p;
p = d;
d = _d;
}
// our last action in the above loop was to switch d and p, so p now
// actually has the most recent cost counts
return p[n];
}
static Tree.Body getClassOrInterfaceBody(Tree.Declaration decNode) {
if (decNode instanceof Tree.ClassDefinition) {
return ((Tree.ClassDefinition) decNode).getClassBody();
}
else if (decNode instanceof Tree.InterfaceDefinition){
return ((Tree.InterfaceDefinition) decNode).getInterfaceBody();
}
else if (decNode instanceof Tree.ObjectDefinition){
return ((Tree.ObjectDefinition) decNode).getClassBody();
}
else {
return null;
}
}
static Tree.CompilationUnit getRootNode(PhasedUnit unit) {
IEditorPart ce = getCurrentEditor();
if (ce instanceof CeylonEditor) {
CeylonParseController cpc = ((CeylonEditor) ce).getParseController();
if (cpc!=null) {
Tree.CompilationUnit rn = cpc.getRootNode();
if (rn!=null) {
Unit u = rn.getUnit();
if (u.equals(unit.getUnit())) {
return rn;
}
}
}
}
return unit.getCompilationUnit();
}
static String asIntersectionTypeString(List<ProducedType> types) {
StringBuffer missingSatisfiedTypesText = new StringBuffer();
for( ProducedType missingSatisfiedType: types ) {
if( missingSatisfiedTypesText.length() != 0 ) {
missingSatisfiedTypesText.append(" & ");
}
missingSatisfiedTypesText.append(missingSatisfiedType.getProducedTypeName());
}
return missingSatisfiedTypesText.toString();
}
static String defaultValue(Unit unit, ProducedType t) {
if (isTypeUnknown(t)) {
return "nothing";
}
TypeDeclaration tn = t.getDeclaration();
if(tn.isAlias()){
return defaultValue(unit, tn.getExtendedType());
}
boolean isClass = tn instanceof Class;
if (unit.isOptionalType(t)) {
return "null";
}
else if (isClass &&
tn.equals(unit.getBooleanDeclaration())) {
return "false";
}
else if (isClass &&
tn.equals(unit.getIntegerDeclaration())) {
return "0";
}
else if (isClass &&
tn.equals(unit.getFloatDeclaration())) {
return "0.0";
}
else if (isClass &&
tn.equals(unit.getStringDeclaration())) {
return "\"\"";
}
else if (isClass &&
tn.equals(unit.getByteDeclaration())) {
return "0.byte";
}
else if (isClass &&
tn.equals(unit.getTupleDeclaration())) {
final int minimumLength = unit.getTupleMinimumLength(t);
final List<ProducedType> tupleTypes = unit.getTupleElementTypes(t);
final StringBuilder sb = new StringBuilder();
for(int i = 0 ; i < minimumLength ; i++){
sb.append(sb.length() == 0 ? "[" : ", ");
ProducedType currentType = tupleTypes.get(i);
if(unit.isSequentialType(currentType)){
currentType = unit.getSequentialElementType(currentType);
}
sb.append(defaultValue(unit, currentType));
}
sb.append(']');
return sb.toString();
}
else if (unit.isSequentialType(t)) {
final StringBuilder sb = new StringBuilder();
sb.append('[');
if (!unit.getEmptyDeclaration().getType().isSubtypeOf(t)) {
sb.append(defaultValue(unit, unit.getSequentialElementType(t)));
}
sb.append(']');
return sb.toString();
}
else if (unit.isIterableType(t)) {
final StringBuilder sb = new StringBuilder();
sb.append('{');
if (!unit.getEmptyDeclaration().getType().isSubtypeOf(t)) {
sb.append(defaultValue(unit, unit.getIteratedType(t)));
}
sb.append('}');
return sb.toString();
}
else {
return "nothing";
}
}
static Region computeSelection(int offset, String def) {
int length;
int loc = def.indexOf("= nothing");
if (loc<0) loc = def.indexOf("=> nothing");
if (loc<0) {
loc = def.indexOf("= ");
if (loc<0) loc = def.indexOf("=> ");
if (loc<0) {
loc = def.indexOf("{")+1;
length=0;
}
else {
loc = def.indexOf(" ", loc)+1;
int semi = def.indexOf(";", loc);
length = semi<0 ? def.length()-loc:semi-loc;
}
}
else {
loc = def.indexOf(" ", loc)+1;
length = 7;
}
return new Region(offset + loc, length);
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_CorrectionUtil.java
|
679 |
constructors[LIST_REPLICATION] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
public IdentifiedDataSerializable createNew(Integer arg) {
return new ListReplicationOperation();
}
};
| 0true
|
hazelcast_src_main_java_com_hazelcast_collection_CollectionDataSerializerHook.java
|
34 |
public abstract class Adapter
implements ClusterListener
{
@Override
public void enteredCluster( ClusterConfiguration clusterConfiguration )
{
}
@Override
public void joinedCluster( InstanceId instanceId, URI member )
{
}
@Override
public void leftCluster( InstanceId instanceId )
{
}
@Override
public void leftCluster()
{
}
@Override
public void elected( String role, InstanceId instanceId, URI electedMember )
{
}
@Override
public void unelected( String role, InstanceId instanceId, URI electedMember )
{
}
}
| 1no label
|
enterprise_cluster_src_main_java_org_neo4j_cluster_protocol_cluster_ClusterListener.java
|
863 |
public class OUser extends ODocumentWrapper {
private static final long serialVersionUID = 1L;
public static final String ADMIN = "admin";
public static final String CLASS_NAME = "OUser";
public enum STATUSES {
SUSPENDED, ACTIVE
}
// AVOID THE INVOCATION OF SETTER
protected Set<ORole> roles = new HashSet<ORole>();
/**
* Constructor used in unmarshalling.
*/
public OUser() {
}
public OUser(final String iName) {
super(CLASS_NAME);
document.field("name", iName);
setAccountStatus(STATUSES.ACTIVE);
}
public OUser(String iUserName, final String iUserPassword) {
super("OUser");
document.field("name", iUserName);
setPassword(iUserPassword);
setAccountStatus(STATUSES.ACTIVE);
}
/**
* Create the user by reading the source document.
*/
public OUser(final ODocument iSource) {
fromStream(iSource);
}
@Override
@OAfterDeserialization
public void fromStream(final ODocument iSource) {
if (document != null)
return;
document = iSource;
roles = new HashSet<ORole>();
final Collection<ODocument> loadedRoles = iSource.field("roles");
if (loadedRoles != null)
for (final ODocument d : loadedRoles) {
final ORole role = document.getDatabase().getMetadata().getSecurity().getRole((String) d.field("name"));
if (role == null) {
OLogManager.instance().warn(this, "User '%s' declare to have the role '%s' but it does not exist in database, skipt it",
getName(), d.field("name"));
document.getDatabase().getMetadata().getSecurity().repair();
} else
roles.add(role);
}
}
/**
* Checks if the user has the permission to access to the requested resource for the requested operation.
*
* @param iResource
* Requested resource
* @param iOperation
* Requested operation
* @return The role that has granted the permission if any, otherwise a OSecurityAccessException exception is raised
* @exception OSecurityAccessException
*/
public ORole allow(final String iResource, final int iOperation) {
if (roles == null || roles.isEmpty())
throw new OSecurityAccessException(document.getDatabase().getName(), "User '" + document.field("name")
+ "' has no role defined");
final ORole role = checkIfAllowed(iResource, iOperation);
if (role == null)
throw new OSecurityAccessException(document.getDatabase().getName(), "User '" + document.field("name")
+ "' has no the permission to execute the operation '" + ORole.permissionToString(iOperation)
+ "' against the resource: " + iResource);
return role;
}
/**
* Checks if the user has the permission to access to the requested resource for the requested operation.
*
* @param iResource
* Requested resource
* @param iOperation
* Requested operation
* @return The role that has granted the permission if any, otherwise null
*/
public ORole checkIfAllowed(final String iResource, final int iOperation) {
for (ORole r : roles) {
if (r == null)
OLogManager.instance().warn(this,
"User '%s' has a null role, bypass it. Consider to fix this user roles before to continue", getName());
else if (r.allow(iResource, iOperation))
return r;
}
return null;
}
/**
* Checks if a rule was defined for the user.
*
* @param iResource
* Requested resource
* @return True is a rule is defined, otherwise false
*/
public boolean isRuleDefined(final String iResource) {
for (ORole r : roles)
if (r == null)
OLogManager.instance().warn(this,
"User '%s' has a null role, bypass it. Consider to fix this user roles before to continue", getName());
else if (r.hasRule(iResource))
return true;
return false;
}
public boolean checkPassword(final String iPassword) {
return OSecurityManager.instance().check(iPassword, (String) document.field("password"));
}
public String getName() {
return document.field("name");
}
public OUser setName(final String iName) {
document.field("name", iName);
return this;
}
public String getPassword() {
return document.field("password");
}
public OUser setPassword(final String iPassword) {
document.field("password", iPassword);
return this;
}
public static final String encryptPassword(final String iPassword) {
return OSecurityManager.instance().digest2String(iPassword, true);
}
public STATUSES getAccountStatus() {
final String status = (String) document.field("status");
if (status == null)
throw new OSecurityException("User '" + getName() + "' has no status");
return STATUSES.valueOf(status);
}
public void setAccountStatus(STATUSES accountStatus) {
document.field("status", accountStatus);
}
public Set<ORole> getRoles() {
return roles;
}
public OUser addRole(final String iRole) {
if (iRole != null)
addRole(document.getDatabase().getMetadata().getSecurity().getRole(iRole));
return this;
}
public OUser addRole(final ORole iRole) {
if (iRole != null)
roles.add(iRole);
final HashSet<ODocument> persistentRoles = new HashSet<ODocument>();
for (ORole r : roles) {
persistentRoles.add(r.toStream());
}
document.field("roles", persistentRoles);
return this;
}
@Override
@SuppressWarnings("unchecked")
public OUser save() {
document.save(OUser.class.getSimpleName());
return this;
}
@Override
public String toString() {
return getName();
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_metadata_security_OUser.java
|
1,125 |
public static class Factory implements NativeScriptFactory {
@Override
public ExecutableScript newScript(@Nullable Map<String, Object> params) {
return new NativeConstantForLoopScoreScript(params);
}
}
| 0true
|
src_test_java_org_elasticsearch_benchmark_scripts_score_script_NativeConstantForLoopScoreScript.java
|
1,098 |
SAFE {
boolean compare(BytesRef b1, BytesRef b2) {
return b1.bytesEquals(b2);
}
},
| 0true
|
src_test_java_org_elasticsearch_benchmark_common_util_BytesRefComparisonsBenchmark.java
|
150 |
@Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_SC_TYPE")
@Cache(usage= CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blCMSElements")
@AdminPresentationClass(populateToOneFields = PopulateToOneFieldsEnum.TRUE, friendlyName = "StructuredContentTypeImpl_baseStructuredContentType")
public class StructuredContentTypeImpl implements StructuredContentType, AdminMainEntity {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "StructuredContentTypeId")
@GenericGenerator(
name="StructuredContentTypeId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="StructuredContentTypeImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.cms.structure.domain.StructuredContentTypeImpl")
}
)
@Column(name = "SC_TYPE_ID")
protected Long id;
@Column (name = "NAME")
@AdminPresentation(friendlyName = "StructuredContentTypeImpl_Name", order = 1, gridOrder = 1, group = "StructuredContentTypeImpl_Details", prominent = true)
@Index(name="SC_TYPE_NAME_INDEX", columnNames={"NAME"})
protected String name;
@Column (name = "DESCRIPTION")
protected String description;
@ManyToOne(targetEntity = StructuredContentFieldTemplateImpl.class)
@JoinColumn(name="SC_FLD_TMPLT_ID")
protected StructuredContentFieldTemplate structuredContentFieldTemplate;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getName() {
return name;
}
@Override
public void setName(String name) {
this.name = name;
}
@Override
public String getDescription() {
return description;
}
@Override
public void setDescription(String description) {
this.description = description;
}
@Override
public StructuredContentFieldTemplate getStructuredContentFieldTemplate() {
return structuredContentFieldTemplate;
}
@Override
public void setStructuredContentFieldTemplate(StructuredContentFieldTemplate scft) {
this.structuredContentFieldTemplate = scft;
}
@Override
public String getMainEntityName() {
return getName();
}
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_structure_domain_StructuredContentTypeImpl.java
|
1,204 |
longLongMap = build(type, limit, smartSize, availableProcessors, new Recycler.C<LongLongOpenHashMap>() {
@Override
public LongLongOpenHashMap newInstance(int sizing) {
return new LongLongOpenHashMap(size(sizing));
}
@Override
public void clear(LongLongOpenHashMap value) {
value.clear();
}
});
| 0true
|
src_main_java_org_elasticsearch_cache_recycler_CacheRecycler.java
|
380 |
static class MyEntryListener implements EntryListener {
public AtomicInteger add = new AtomicInteger(0);
public void entryAdded(EntryEvent event) {
add.incrementAndGet();
}
public void entryRemoved(EntryEvent event) {
}
public void entryUpdated(EntryEvent event) {
}
public void entryEvicted(EntryEvent event) {
}
};
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_multimap_ClientMultiMapListenerStressTest.java
|
4,896 |
public class RestRecoveryAction extends AbstractCatAction {
@Inject
protected RestRecoveryAction(Settings settings, Client client, RestController restController) {
super(settings, client);
restController.registerHandler(GET, "/_cat/recovery", this);
restController.registerHandler(GET, "/_cat/recovery/{index}", this);
}
@Override
void documentation(StringBuilder sb) {
sb.append("/_cat/recovery\n");
sb.append("/_cat/recovery/{index}\n");
}
@Override
public void doRequest(final RestRequest request, final RestChannel channel) {
final String[] indices = Strings.splitStringByCommaToArray(request.param("index"));
final ClusterStateRequest clusterStateRequest = new ClusterStateRequest();
clusterStateRequest.clear().nodes(true);
clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local()));
clusterStateRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterStateRequest.masterNodeTimeout()));
client.admin().cluster().state(clusterStateRequest, new ActionListener<ClusterStateResponse>() {
@Override
public void onResponse(final ClusterStateResponse clusterStateResponse) {
IndicesStatusRequest indicesStatusRequest = new IndicesStatusRequest(indices);
indicesStatusRequest.recovery(true);
indicesStatusRequest.operationThreading(BroadcastOperationThreading.SINGLE_THREAD);
client.admin().indices().status(indicesStatusRequest, new ActionListener<IndicesStatusResponse>() {
@Override
public void onResponse(IndicesStatusResponse indicesStatusResponse) {
Map<String, Long> primarySizes = new HashMap<String, Long>();
Set<ShardStatus> replicas = new HashSet<ShardStatus>();
// Loop through all the shards in the index status, keeping
// track of the primary shard size with a Map and the
// recovering shards in a Set of ShardStatus objects
for (ShardStatus shardStatus : indicesStatusResponse.getShards()) {
if (shardStatus.getShardRouting().primary()) {
primarySizes.put(shardStatus.getShardRouting().getIndex() + shardStatus.getShardRouting().getId(),
shardStatus.getStoreSize().bytes());
} else if (shardStatus.getState() == IndexShardState.RECOVERING) {
replicas.add(shardStatus);
}
}
try {
channel.sendResponse(RestTable.buildResponse(buildRecoveryTable(request, clusterStateResponse, primarySizes, replicas), request, channel));
} catch (Throwable e) {
try {
channel.sendResponse(new XContentThrowableRestResponse(request, e));
} catch (IOException e2) {
logger.error("Unable to send recovery status response", e2);
}
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(new XContentThrowableRestResponse(request, e));
} catch (IOException e1) {
logger.error("Failed to send failure response", e1);
}
}
});
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(new XContentThrowableRestResponse(request, e));
} catch (IOException e1) {
logger.error("Failed to send failure response", e1);
}
}
});
}
@Override
Table getTableWithHeader(RestRequest request) {
Table t = new Table();
t.startHeaders().addCell("index", "alias:i,idx;desc:index name")
.addCell("shard", "alias:s,sh;desc:shard name")
.addCell("target", "alias:t;text-align:right;desc:bytes of source shard")
.addCell("recovered", "alias:r;text-align:right;desc:bytes recovered so far")
.addCell("percent", "alias:per,ratio;text-align:right;desc:percent recovered so far")
.addCell("host", "alias:h;desc:node host where source shard lives")
.addCell("ip", "desc:node ip where source shard lives")
.addCell("node", "alias:n;desc:node name where source shard lives")
.endHeaders();
return t;
}
/**
* buildRecoveryTable will build a table of recovery information suitable
* for displaying at the command line.
*
* @param request
* @param state Current cluster state.
* @param primarySizes A Map of {@code index + shardId} strings to store size for all primary shards.
* @param recoveringReplicas A Set of {@link org.elasticsearch.action.admin.indices.status.ShardStatus} objects for each recovering replica to be displayed.
* @return A table containing index, shardId, node, target size, recovered size and percentage for each recovering replica
*/
public Table buildRecoveryTable(RestRequest request, ClusterStateResponse state, Map<String, Long> primarySizes, Set<ShardStatus> recoveringReplicas) {
Table t = getTableWithHeader(request);
for (ShardStatus status : recoveringReplicas) {
DiscoveryNode node = state.getState().nodes().get(status.getShardRouting().currentNodeId());
String index = status.getShardRouting().getIndex();
int id = status.getShardId();
long replicaSize = status.getStoreSize().bytes();
Long primarySize = primarySizes.get(index + id);
t.startRow();
t.addCell(index);
t.addCell(id);
t.addCell(primarySize);
t.addCell(replicaSize);
t.addCell(primarySize == null ? null : String.format(Locale.ROOT, "%1.1f%%", 100.0 * (float) replicaSize / primarySize));
t.addCell(node == null ? null : node.getHostName());
t.addCell(node == null ? null : node.getHostAddress());
t.addCell(node == null ? null : node.name());
t.endRow();
}
return t;
}
}
| 1no label
|
src_main_java_org_elasticsearch_rest_action_cat_RestRecoveryAction.java
|
418 |
class NotSerializableDocument extends ODocument {
private static final long serialVersionUID = 1L;
private void writeObject(ObjectOutputStream oos) throws IOException {
throw new NotSerializableException();
}
}
| 0true
|
core_src_test_java_com_orientechnologies_orient_core_db_record_TrackedListTest.java
|
170 |
public static interface ManagedBlocker {
/**
* Possibly blocks the current thread, for example waiting for
* a lock or condition.
*
* @return {@code true} if no additional blocking is necessary
* (i.e., if isReleasable would return true)
* @throws InterruptedException if interrupted while waiting
* (the method is not required to do so, but is allowed to)
*/
boolean block() throws InterruptedException;
/**
* Returns {@code true} if blocking is unnecessary.
*/
boolean isReleasable();
}
| 0true
|
src_main_java_jsr166y_ForkJoinPool.java
|
222 |
public class Orient extends OListenerManger<OOrientListener> {
public static final String ORIENTDB_HOME = "ORIENTDB_HOME";
public static final String URL_SYNTAX = "<engine>:<db-type>:<db-name>[?<db-param>=<db-value>[&]]*";
protected static final Orient instance = new Orient();
protected static boolean registerDatabaseByPath = false;
protected final Map<String, OEngine> engines = new HashMap<String, OEngine>();
protected final Map<String, OStorage> storages = new HashMap<String, OStorage>();
protected final Set<ODatabaseLifecycleListener> dbLifecycleListeners = new HashSet<ODatabaseLifecycleListener>();
protected final ODatabaseFactory databaseFactory = new ODatabaseFactory();
protected final OScriptManager scriptManager = new OScriptManager();
protected OClusterFactory clusterFactory = new ODefaultClusterFactory();
protected ORecordFactoryManager recordFactoryManager = new ORecordFactoryManager();
protected OrientShutdownHook shutdownHook;
protected final Timer timer = new Timer(true);
protected final ThreadGroup threadGroup = new ThreadGroup("OrientDB");
protected final AtomicInteger serialId = new AtomicInteger();
protected OMemoryWatchDog memoryWatchDog;
protected OProfilerMBean profiler = new OProfiler(); ;
protected ODatabaseThreadLocalFactory databaseThreadFactory;
protected volatile boolean active = false;
protected Orient() {
super(new OAdaptiveLock(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean()));
startup();
}
public Orient startup() {
getLock().lock();
try {
if (active)
// ALREADY ACTIVE
return this;
shutdownHook = new OrientShutdownHook();
// REGISTER THE EMBEDDED ENGINE
registerEngine(new OEngineLocal());
registerEngine(new OEngineLocalPaginated());
registerEngine(new OEngineMemory());
registerEngine("com.orientechnologies.orient.client.remote.OEngineRemote");
if (OGlobalConfiguration.PROFILER_ENABLED.getValueAsBoolean())
// ACTIVATE RECORDING OF THE PROFILER
profiler.startRecording();
if (OGlobalConfiguration.ENVIRONMENT_DUMP_CFG_AT_STARTUP.getValueAsBoolean())
OGlobalConfiguration.dumpConfiguration(System.out);
memoryWatchDog = new OMemoryWatchDog();
active = true;
return this;
} finally {
getLock().unlock();
}
}
public Orient shutdown() {
getLock().lock();
try {
if (!active)
return this;
active = false;
if (memoryWatchDog != null) {
// SHUTDOWN IT AND WAIT FOR COMPLETITION
memoryWatchDog.interrupt();
try {
memoryWatchDog.join();
} catch (InterruptedException e) {
}
}
if (shutdownHook != null) {
shutdownHook.cancel();
shutdownHook = null;
}
OLogManager.instance().debug(this, "Orient Engine is shutting down...");
// CALL THE SHUTDOWN ON ALL THE LISTENERS
for (OOrientListener l : browseListeners()) {
if (l != null)
l.onShutdown();
}
// SHUTDOWN ENGINES
for (OEngine engine : engines.values())
engine.shutdown();
engines.clear();
if (databaseFactory != null)
// CLOSE ALL DATABASES
databaseFactory.shutdown();
if (storages != null) {
// CLOSE ALL THE STORAGES
final List<OStorage> storagesCopy = new ArrayList<OStorage>(storages.values());
for (OStorage stg : storagesCopy) {
OLogManager.instance().info(this, "Shutting down storage: " + stg.getName() + "...");
stg.close(true);
}
}
if (OMMapManagerLocator.getInstance() != null)
OMMapManagerLocator.getInstance().shutdown();
if (threadGroup != null)
// STOP ALL THE PENDING THREADS
threadGroup.interrupt();
resetListeners();
timer.purge();
profiler.shutdown();
OLogManager.instance().info(this, "Orient Engine shutdown complete\n");
} finally {
getLock().unlock();
}
return this;
}
public OStorage loadStorage(String iURL) {
if (iURL == null || iURL.length() == 0)
throw new IllegalArgumentException("URL missed");
if (iURL.endsWith("/"))
iURL = iURL.substring(0, iURL.length() - 1);
// SEARCH FOR ENGINE
int pos = iURL.indexOf(':');
if (pos <= 0)
throw new OConfigurationException("Error in database URL: the engine was not specified. Syntax is: " + URL_SYNTAX
+ ". URL was: " + iURL);
final String engineName = iURL.substring(0, pos);
getLock().lock();
try {
final OEngine engine = engines.get(engineName.toLowerCase());
if (engine == null)
throw new OConfigurationException("Error on opening database: the engine '" + engineName + "' was not found. URL was: "
+ iURL + ". Registered engines are: " + engines.keySet());
// SEARCH FOR DB-NAME
iURL = iURL.substring(pos + 1);
pos = iURL.indexOf('?');
Map<String, String> parameters = null;
String dbPath = null;
if (pos > 0) {
dbPath = iURL.substring(0, pos);
iURL = iURL.substring(pos + 1);
// PARSE PARAMETERS
parameters = new HashMap<String, String>();
String[] pairs = iURL.split("&");
String[] kv;
for (String pair : pairs) {
kv = pair.split("=");
if (kv.length < 2)
throw new OConfigurationException("Error on opening database: parameter has no value. Syntax is: " + URL_SYNTAX
+ ". URL was: " + iURL);
parameters.put(kv[0], kv[1]);
}
} else
dbPath = iURL;
final String dbName = registerDatabaseByPath ? dbPath : OIOUtils.getRelativePathIfAny(dbPath, null);
OStorage storage;
if (engine.isShared()) {
// SEARCH IF ALREADY USED
storage = storages.get(dbName);
if (storage == null) {
// NOT FOUND: CREATE IT
storage = engine.createStorage(dbPath, parameters);
storages.put(dbName, storage);
}
} else {
// REGISTER IT WITH A SERIAL NAME TO AVOID BEING REUSED
storage = engine.createStorage(dbPath, parameters);
storages.put(dbName + "__" + serialId.incrementAndGet(), storage);
}
for (OOrientListener l : browseListeners())
l.onStorageRegistered(storage);
return storage;
} finally {
getLock().unlock();
}
}
public OStorage registerStorage(final OStorage iStorage) throws IOException {
getLock().lock();
try {
for (OOrientListener l : browseListeners())
l.onStorageRegistered(iStorage);
if (!storages.containsKey(iStorage.getName()))
storages.put(iStorage.getName(), iStorage);
} finally {
getLock().unlock();
}
return iStorage;
}
public OStorage getStorage(final String iDbName) {
getLock().lock();
try {
return storages.get(iDbName);
} finally {
getLock().unlock();
}
}
public void registerEngine(final OEngine iEngine) {
getLock().lock();
try {
engines.put(iEngine.getName(), iEngine);
} finally {
getLock().unlock();
}
}
private void registerEngine(final String iClassName) {
try {
final Class<?> cls = Class.forName(iClassName);
registerEngine((OEngine) cls.newInstance());
} catch (Exception e) {
}
}
/**
* Returns the engine by its name.
*
* @param iEngineName
* Engine name to retrieve
* @return OEngine instance of found, otherwise null
*/
public OEngine getEngine(final String iEngineName) {
getLock().lock();
try {
return engines.get(iEngineName);
} finally {
getLock().unlock();
}
}
public Set<String> getEngines() {
getLock().lock();
try {
return Collections.unmodifiableSet(engines.keySet());
} finally {
getLock().unlock();
}
}
public void unregisterStorageByName(final String iName) {
final String dbName = registerDatabaseByPath ? iName : OIOUtils.getRelativePathIfAny(iName, null);
final OStorage stg = storages.get(dbName);
unregisterStorage(stg);
}
public void unregisterStorage(final OStorage iStorage) {
if (!active)
// SHUTDOWNING OR NOT ACTIVE: RETURN
return;
if (iStorage == null)
return;
getLock().lock();
try {
// UNREGISTER ALL THE LISTENER ONE BY ONE AVOIDING SELF-RECURSION BY REMOVING FROM THE LIST
final Iterable<OOrientListener> listenerCopy = getListenersCopy();
for (Iterator<OOrientListener> it = listenerCopy.iterator(); it.hasNext();) {
final OOrientListener l = it.next();
unregisterListener(l);
l.onStorageUnregistered(iStorage);
}
for (Entry<String, OStorage> s : storages.entrySet()) {
if (s.getValue().equals(iStorage)) {
storages.remove(s.getKey());
break;
}
}
} finally {
getLock().unlock();
}
}
public Collection<OStorage> getStorages() {
getLock().lock();
try {
return new ArrayList<OStorage>(storages.values());
} finally {
getLock().unlock();
}
}
public Timer getTimer() {
return timer;
}
public void removeShutdownHook() {
if (shutdownHook != null)
Runtime.getRuntime().removeShutdownHook(shutdownHook);
}
public Iterator<ODatabaseLifecycleListener> getDbLifecycleListeners() {
return dbLifecycleListeners.iterator();
}
public void addDbLifecycleListener(final ODatabaseLifecycleListener iListener) {
dbLifecycleListeners.add(iListener);
}
public void removeDbLifecycleListener(final ODatabaseLifecycleListener iListener) {
dbLifecycleListeners.remove(iListener);
}
public static Orient instance() {
return instance;
}
public ThreadGroup getThreadGroup() {
return threadGroup;
}
public ODatabaseThreadLocalFactory getDatabaseThreadFactory() {
return databaseThreadFactory;
}
public OMemoryWatchDog getMemoryWatchDog() {
return memoryWatchDog;
}
public ORecordFactoryManager getRecordFactoryManager() {
return recordFactoryManager;
}
public OClusterFactory getClusterFactory() {
return clusterFactory;
}
public ODatabaseFactory getDatabaseFactory() {
return databaseFactory;
}
public void setRecordFactoryManager(final ORecordFactoryManager iRecordFactoryManager) {
recordFactoryManager = iRecordFactoryManager;
}
public static String getHomePath() {
String v = System.getProperty("orient.home");
if (v == null)
v = System.getProperty(ORIENTDB_HOME);
if (v == null)
v = System.getenv(ORIENTDB_HOME);
return v;
}
public void setClusterFactory(final OClusterFactory clusterFactory) {
this.clusterFactory = clusterFactory;
}
public OProfilerMBean getProfiler() {
return profiler;
}
public void registerThreadDatabaseFactory(final ODatabaseThreadLocalFactory iDatabaseFactory) {
databaseThreadFactory = iDatabaseFactory;
}
public OScriptManager getScriptManager() {
return scriptManager;
}
/**
* Tells if to register database by path. Default is false. Setting to true allows to have multiple databases in different path
* with the same name.
*
* @see #setRegisterDatabaseByPath(boolean)
* @return
*/
public static boolean isRegisterDatabaseByPath() {
return registerDatabaseByPath;
}
/**
* Register database by path. Default is false. Setting to true allows to have multiple databases in different path with the same
* name.
*
* @param iValue
*/
public static void setRegisterDatabaseByPath(final boolean iValue) {
registerDatabaseByPath = iValue;
}
public void setProfiler(final OProfilerMBean iProfiler) {
profiler = iProfiler;
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_Orient.java
|
1,485 |
public static class Builder {
private long version;
private final Map<String, IndexRoutingTable> indicesRouting = newHashMap();
public Builder() {
}
public Builder(RoutingTable routingTable) {
version = routingTable.version;
for (IndexRoutingTable indexRoutingTable : routingTable) {
indicesRouting.put(indexRoutingTable.index(), indexRoutingTable);
}
}
public Builder updateNodes(RoutingNodes routingNodes) {
// this is being called without pre initializing the routing table, so we must copy over the version as well
this.version = routingNodes.routingTable().version();
Map<String, IndexRoutingTable.Builder> indexRoutingTableBuilders = newHashMap();
for (RoutingNode routingNode : routingNodes) {
for (MutableShardRouting shardRoutingEntry : routingNode) {
// every relocating shard has a double entry, ignore the target one.
if (shardRoutingEntry.state() == ShardRoutingState.INITIALIZING && shardRoutingEntry.relocatingNodeId() != null)
continue;
String index = shardRoutingEntry.index();
IndexRoutingTable.Builder indexBuilder = indexRoutingTableBuilders.get(index);
if (indexBuilder == null) {
indexBuilder = new IndexRoutingTable.Builder(index);
indexRoutingTableBuilders.put(index, indexBuilder);
}
IndexShardRoutingTable refData = routingNodes.routingTable().index(shardRoutingEntry.index()).shard(shardRoutingEntry.id());
indexBuilder.addShard(refData, shardRoutingEntry);
}
}
for (MutableShardRouting shardRoutingEntry : Iterables.concat(routingNodes.unassigned(), routingNodes.ignoredUnassigned())) {
String index = shardRoutingEntry.index();
IndexRoutingTable.Builder indexBuilder = indexRoutingTableBuilders.get(index);
if (indexBuilder == null) {
indexBuilder = new IndexRoutingTable.Builder(index);
indexRoutingTableBuilders.put(index, indexBuilder);
}
IndexShardRoutingTable refData = routingNodes.routingTable().index(shardRoutingEntry.index()).shard(shardRoutingEntry.id());
indexBuilder.addShard(refData, shardRoutingEntry);
}
for (ShardId shardId : routingNodes.getShardsToClearPostAllocationFlag()) {
IndexRoutingTable.Builder indexRoutingBuilder = indexRoutingTableBuilders.get(shardId.index().name());
if (indexRoutingBuilder != null) {
indexRoutingBuilder.clearPostAllocationFlag(shardId);
}
}
for (IndexRoutingTable.Builder indexBuilder : indexRoutingTableBuilders.values()) {
add(indexBuilder);
}
return this;
}
public Builder updateNumberOfReplicas(int numberOfReplicas, String... indices) throws IndexMissingException {
if (indices == null || indices.length == 0) {
indices = indicesRouting.keySet().toArray(new String[indicesRouting.keySet().size()]);
}
for (String index : indices) {
IndexRoutingTable indexRoutingTable = indicesRouting.get(index);
if (indexRoutingTable == null) {
// ignore index missing failure, its closed...
continue;
}
int currentNumberOfReplicas = indexRoutingTable.shards().get(0).size() - 1; // remove the required primary
IndexRoutingTable.Builder builder = new IndexRoutingTable.Builder(index);
// re-add all the shards
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
builder.addIndexShard(indexShardRoutingTable);
}
if (currentNumberOfReplicas < numberOfReplicas) {
// now, add "empty" ones
for (int i = 0; i < (numberOfReplicas - currentNumberOfReplicas); i++) {
builder.addReplica();
}
} else if (currentNumberOfReplicas > numberOfReplicas) {
int delta = currentNumberOfReplicas - numberOfReplicas;
if (delta <= 0) {
// ignore, can't remove below the current one...
} else {
for (int i = 0; i < delta; i++) {
builder.removeReplica();
}
}
}
indicesRouting.put(index, builder.build());
}
return this;
}
public Builder addAsNew(IndexMetaData indexMetaData) {
if (indexMetaData.state() == IndexMetaData.State.OPEN) {
IndexRoutingTable.Builder indexRoutingBuilder = new IndexRoutingTable.Builder(indexMetaData.index())
.initializeAsNew(indexMetaData);
add(indexRoutingBuilder);
}
return this;
}
public Builder addAsRecovery(IndexMetaData indexMetaData) {
if (indexMetaData.state() == IndexMetaData.State.OPEN) {
IndexRoutingTable.Builder indexRoutingBuilder = new IndexRoutingTable.Builder(indexMetaData.index())
.initializeAsRecovery(indexMetaData);
add(indexRoutingBuilder);
}
return this;
}
public Builder addAsRestore(IndexMetaData indexMetaData, RestoreSource restoreSource) {
IndexRoutingTable.Builder indexRoutingBuilder = new IndexRoutingTable.Builder(indexMetaData.index())
.initializeAsRestore(indexMetaData, restoreSource);
add(indexRoutingBuilder);
return this;
}
public Builder addAsNewRestore(IndexMetaData indexMetaData, RestoreSource restoreSource) {
IndexRoutingTable.Builder indexRoutingBuilder = new IndexRoutingTable.Builder(indexMetaData.index())
.initializeAsNewRestore(indexMetaData, restoreSource);
add(indexRoutingBuilder);
return this;
}
public Builder add(IndexRoutingTable indexRoutingTable) {
indexRoutingTable.validate();
indicesRouting.put(indexRoutingTable.index(), indexRoutingTable);
return this;
}
public Builder add(IndexRoutingTable.Builder indexRoutingTableBuilder) {
add(indexRoutingTableBuilder.build());
return this;
}
public Builder remove(String index) {
indicesRouting.remove(index);
return this;
}
public Builder version(long version) {
this.version = version;
return this;
}
public RoutingTable build() {
// normalize the versions right before we build it...
for (IndexRoutingTable indexRoutingTable : indicesRouting.values()) {
indicesRouting.put(indexRoutingTable.index(), indexRoutingTable.normalizeVersions());
}
return new RoutingTable(version, indicesRouting);
}
public static RoutingTable readFrom(StreamInput in) throws IOException {
Builder builder = new Builder();
builder.version = in.readLong();
int size = in.readVInt();
for (int i = 0; i < size; i++) {
IndexRoutingTable index = IndexRoutingTable.Builder.readFrom(in);
builder.add(index);
}
return builder.build();
}
public static void writeTo(RoutingTable table, StreamOutput out) throws IOException {
out.writeLong(table.version);
out.writeVInt(table.indicesRouting.size());
for (IndexRoutingTable index : table.indicesRouting.values()) {
IndexRoutingTable.Builder.writeTo(index, out);
}
}
}
| 0true
|
src_main_java_org_elasticsearch_cluster_routing_RoutingTable.java
|
1,115 |
static class StringEntry {
String key;
int counter;
StringEntry(String key, int counter) {
this.key = key;
this.counter = counter;
}
}
| 0true
|
src_test_java_org_elasticsearch_benchmark_hppc_StringMapAdjustOrPutBenchmark.java
|
5,128 |
aggregators[i] = new Aggregator(first.name(), BucketAggregationMode.MULTI_BUCKETS, AggregatorFactories.EMPTY, 1, first.context(), first.parent()) {
ObjectArray<Aggregator> aggregators;
{
aggregators = BigArrays.newObjectArray(estimatedBucketsCount, context.pageCacheRecycler());
aggregators.set(0, first);
for (long i = 1; i < estimatedBucketsCount; ++i) {
aggregators.set(i, createAndRegisterContextAware(parent.context(), factory, parent, estimatedBucketsCount));
}
}
@Override
public boolean shouldCollect() {
return first.shouldCollect();
}
@Override
protected void doPostCollection() {
for (long i = 0; i < aggregators.size(); ++i) {
final Aggregator aggregator = aggregators.get(i);
if (aggregator != null) {
aggregator.postCollection();
}
}
}
@Override
public void collect(int doc, long owningBucketOrdinal) throws IOException {
aggregators = BigArrays.grow(aggregators, owningBucketOrdinal + 1);
Aggregator aggregator = aggregators.get(owningBucketOrdinal);
if (aggregator == null) {
aggregator = createAndRegisterContextAware(parent.context(), factory, parent, estimatedBucketsCount);
aggregators.set(owningBucketOrdinal, aggregator);
}
aggregator.collect(doc, 0);
}
@Override
public void setNextReader(AtomicReaderContext reader) {
}
@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) {
return aggregators.get(owningBucketOrdinal).buildAggregation(0);
}
@Override
public InternalAggregation buildEmptyAggregation() {
return first.buildEmptyAggregation();
}
@Override
public void doRelease() {
Releasables.release(aggregators);
}
};
| 1no label
|
src_main_java_org_elasticsearch_search_aggregations_AggregatorFactories.java
|
2,897 |
private static class InstanceOfPredicate implements Predicate, DataSerializable {
private Class klass;
public InstanceOfPredicate(Class klass) {
this.klass = klass;
}
@Override
public boolean apply(Map.Entry mapEntry) {
Object value = mapEntry.getValue();
if (value == null) {
return false;
}
return klass.isAssignableFrom(value.getClass());
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(klass.getName());
}
@Override
public void readData(ObjectDataInput in) throws IOException {
String klassName = in.readUTF();
try {
klass = in.getClassLoader().loadClass(klassName);
} catch (ClassNotFoundException e) {
throw new HazelcastSerializationException("Failed to load class: " + klass, e);
}
}
@Override
public String toString() {
return " instanceOf (" + klass.getName() + ")";
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_query_Predicates.java
|
420 |
delegatingFuture.andThen(new ExecutionCallback<V>() {
@Override
public void onResponse(V response) {
if (nearCache != null) {
nearCache.put(keyData, response);
}
}
@Override
public void onFailure(Throwable t) {
}
});
| 0true
|
hazelcast-client_src_main_java_com_hazelcast_client_proxy_ClientMapProxy.java
|
31 |
public class ClusterClient extends LifecycleAdapter
implements ClusterMonitor, Cluster, AtomicBroadcast, Snapshot, Election, BindingNotifier
{
public static final Setting<Long> clusterJoinTimeout = Settings.setting("ha.cluster_join_timeout", Settings.DURATION, "0s");
public interface Configuration
{
int getServerId();
HostnamePort getAddress();
List<HostnamePort> getInitialHosts();
String getClusterName();
boolean isAllowedToCreateCluster();
// Cluster timeout settings
long defaultTimeout(); // default is 5s
long heartbeatInterval(); // inherits defaultTimeout
long heartbeatTimeout(); // heartbeatInterval * 2 by default
long broadcastTimeout(); // default is 30s
long learnTimeout(); // inherits defaultTimeout
long paxosTimeout(); // inherits defaultTimeout
long phase1Timeout(); // inherits paxosTimeout
long phase2Timeout(); // inherits paxosTimeout
long joinTimeout(); // inherits defaultTimeout
long configurationTimeout(); // inherits defaultTimeout
long leaveTimeout(); // inherits paxosTimeout
long electionTimeout(); // inherits paxosTimeout
long clusterJoinTimeout(); // Whether to timeout the whole process or not
String name(); // Cluster client name, if any
}
public static Configuration adapt( final Config config )
{
return new Configuration()
{
@Override
public int getServerId()
{
return config.get( ClusterSettings.server_id );
}
@Override
public List<HostnamePort> getInitialHosts()
{
return config.get( ClusterSettings.initial_hosts );
}
@Override
public String getClusterName()
{
return config.get( ClusterSettings.cluster_name );
}
@Override
public HostnamePort getAddress()
{
return config.get( ClusterSettings.cluster_server );
}
@Override
public boolean isAllowedToCreateCluster()
{
return config.get( ClusterSettings.allow_init_cluster );
}
// Timeouts
@Override
public long defaultTimeout()
{
return config.get( ClusterSettings.default_timeout );
}
@Override
public long heartbeatTimeout()
{
return config.get( ClusterSettings.heartbeat_timeout );
}
@Override
public long heartbeatInterval()
{
return config.get( ClusterSettings.heartbeat_interval );
}
@Override
public long joinTimeout()
{
return config.get( ClusterSettings.join_timeout );
}
@Override
public long configurationTimeout()
{
return config.get( ClusterSettings.configuration_timeout );
}
@Override
public long leaveTimeout()
{
return config.get( ClusterSettings.leave_timeout );
}
@Override
public long electionTimeout()
{
return config.get( ClusterSettings.election_timeout );
}
@Override
public long broadcastTimeout()
{
return config.get( ClusterSettings.broadcast_timeout );
}
@Override
public long paxosTimeout()
{
return config.get( ClusterSettings.paxos_timeout );
}
@Override
public long phase1Timeout()
{
return config.get( ClusterSettings.phase1_timeout );
}
@Override
public long phase2Timeout()
{
return config.get( ClusterSettings.phase2_timeout );
}
@Override
public long learnTimeout()
{
return config.get( ClusterSettings.learn_timeout );
}
@Override
public long clusterJoinTimeout()
{
return config.get(clusterJoinTimeout);
}
@Override
public String name()
{
return config.get( ClusterSettings.instance_name );
}
};
}
private final LifeSupport life = new LifeSupport();
private final Cluster cluster;
private final AtomicBroadcast broadcast;
private final Heartbeat heartbeat;
private final Snapshot snapshot;
private final Election election;
private final ProtocolServer server;
public ClusterClient( final Configuration config, final Logging logging,
ElectionCredentialsProvider electionCredentialsProvider,
ObjectInputStreamFactory objectInputStreamFactory,
ObjectOutputStreamFactory objectOutputStreamFactory )
{
MessageTimeoutStrategy timeoutStrategy = new MessageTimeoutStrategy(
new FixedTimeoutStrategy( config.defaultTimeout() ) )
.timeout( HeartbeatMessage.sendHeartbeat, config.heartbeatInterval() )
.timeout( HeartbeatMessage.timed_out, config.heartbeatTimeout() )
.timeout( AtomicBroadcastMessage.broadcastTimeout, config.broadcastTimeout() )
.timeout( LearnerMessage.learnTimedout, config.learnTimeout() )
.timeout( ProposerMessage.phase1Timeout, config.phase1Timeout() )
.timeout( ProposerMessage.phase2Timeout, config.phase2Timeout() )
.timeout( ClusterMessage.joiningTimeout, config.joinTimeout() )
.timeout( ClusterMessage.configurationTimeout, config.configurationTimeout() )
.timeout( ClusterMessage.leaveTimedout, config.leaveTimeout() )
.timeout( ElectionMessage.electionTimeout, config.electionTimeout() );
MultiPaxosServerFactory protocolServerFactory = new MultiPaxosServerFactory(
new ClusterConfiguration( config
.getClusterName(), logging.getMessagesLog( ClusterConfiguration.class ) ), logging );
InMemoryAcceptorInstanceStore acceptorInstanceStore = new InMemoryAcceptorInstanceStore();
InternalLoggerFactory.setDefaultFactory( new NettyLoggerFactory(logging) );
NetworkReceiver receiver = new NetworkReceiver( new NetworkReceiver.Configuration()
{
@Override
public HostnamePort clusterServer()
{
return config.getAddress();
}
@Override
public int defaultPort()
{
return 5001;
}
@Override
public String name()
{
return config.name();
}
}, logging );
NetworkSender sender = new NetworkSender(new NetworkSender.Configuration()
{
@Override
public int defaultPort()
{
return 5001;
}
@Override
public int port()
{
return config.getAddress().getPort();
}
}, receiver, logging);
ExecutorLifecycleAdapter stateMachineExecutor = new ExecutorLifecycleAdapter( new Factory<ExecutorService>()
{
@Override
public ExecutorService newInstance()
{
return Executors.newSingleThreadExecutor( new NamedThreadFactory( "State machine" ) );
}
} );
server = protocolServerFactory.newProtocolServer( new InstanceId( config.getServerId() ), timeoutStrategy,
receiver, sender,
acceptorInstanceStore, electionCredentialsProvider, stateMachineExecutor, objectInputStreamFactory, objectOutputStreamFactory );
receiver.addNetworkChannelsListener( new NetworkReceiver.NetworkChannelsListener()
{
volatile private StateTransitionLogger logger = null;
@Override
public void listeningAt( URI me )
{
server.listeningAt( me );
if (logger == null)
{
logger = new StateTransitionLogger( logging );
server.addStateTransitionListener( logger );
}
}
@Override
public void channelOpened( URI to )
{
}
@Override
public void channelClosed( URI to )
{
}
} );
life.add( sender );
life.add( stateMachineExecutor );
life.add( receiver );
// Timeout timer - triggers every 10 ms
life.add( new TimeoutTrigger() );
life.add( new ClusterJoin( new ClusterJoin.Configuration()
{
@Override
public List<HostnamePort> getInitialHosts()
{
return config.getInitialHosts();
}
@Override
public String getClusterName()
{
return config.getClusterName();
}
@Override
public boolean isAllowedToCreateCluster()
{
return config.isAllowedToCreateCluster();
}
@Override
public long getClusterJoinTimeout()
{
return config.clusterJoinTimeout();
}
}, server, logging ) );
cluster = server.newClient( Cluster.class );
broadcast = server.newClient( AtomicBroadcast.class );
heartbeat = server.newClient( Heartbeat.class );
snapshot = server.newClient( Snapshot.class );
election = server.newClient( Election.class );
}
@Override
public void init() throws Throwable
{
life.init();
}
@Override
public void start() throws Throwable
{
life.start();
}
@Override
public void stop() throws Throwable
{
life.stop();
}
@Override
public void broadcast( Payload payload )
{
broadcast.broadcast( payload );
}
@Override
public void addAtomicBroadcastListener( AtomicBroadcastListener listener )
{
broadcast.addAtomicBroadcastListener( listener );
}
@Override
public void removeAtomicBroadcastListener( AtomicBroadcastListener listener )
{
broadcast.removeAtomicBroadcastListener( listener );
}
@Override
public void create( String clusterName )
{
cluster.create( clusterName );
}
@Override
public Future<ClusterConfiguration> join( String clusterName, URI... otherServerUrls )
{
return cluster.join( clusterName, otherServerUrls );
}
@Override
public void leave()
{
cluster.leave();
}
@Override
public void addClusterListener( ClusterListener listener )
{
cluster.addClusterListener( listener );
}
@Override
public void removeClusterListener( ClusterListener listener )
{
cluster.removeClusterListener( listener );
}
@Override
public void addHeartbeatListener( HeartbeatListener listener )
{
heartbeat.addHeartbeatListener( listener );
}
@Override
public void removeHeartbeatListener( HeartbeatListener listener )
{
heartbeat.removeHeartbeatListener( listener );
}
@Override
public void demote( InstanceId node )
{
election.demote( node );
}
@Override
public void performRoleElections()
{
election.performRoleElections();
}
@Override
public void promote( InstanceId node, String role )
{
election.promote( node, role );
}
@Override
public void setSnapshotProvider( SnapshotProvider snapshotProvider )
{
snapshot.setSnapshotProvider( snapshotProvider );
}
@Override
public void refreshSnapshot()
{
snapshot.refreshSnapshot();
}
public void addBindingListener( BindingListener bindingListener )
{
server.addBindingListener( bindingListener );
}
@Override
public void removeBindingListener( BindingListener listener )
{
server.removeBindingListener( listener );
}
public void dumpDiagnostics( StringBuilder appendTo )
{
StateMachines stateMachines = server.getStateMachines();
for ( StateMachine stateMachine : stateMachines.getStateMachines() )
{
appendTo.append( " " ).append( stateMachine.getMessageType().getSimpleName() ).append( ":" )
.append( stateMachine.getState().toString() ).append( "\n" );
}
appendTo.append( "Current timeouts:\n" );
for ( Map.Entry<Object, Timeouts.Timeout> objectTimeoutEntry : stateMachines.getTimeouts().getTimeouts()
.entrySet() )
{
appendTo.append( objectTimeoutEntry.getKey().toString() ).append( ":" )
.append( objectTimeoutEntry.getValue().getTimeoutMessage().toString() );
}
}
public InstanceId getServerId()
{
return server.getServerId();
}
public URI getClusterServer()
{
return server.boundAt();
}
public class TimeoutTrigger implements Lifecycle
{
private ScheduledExecutorService scheduler;
private ScheduledFuture<?> tickFuture;
@Override
public void init() throws Throwable
{
server.getTimeouts().tick( System.currentTimeMillis() );
}
@Override
public void start() throws Throwable
{
scheduler = Executors.newSingleThreadScheduledExecutor(
new DaemonThreadFactory( "timeout-clusterClient" ) );
tickFuture = scheduler.scheduleWithFixedDelay( new Runnable()
{
@Override
public void run()
{
long now = System.currentTimeMillis();
server.getTimeouts().tick( now );
}
}, 0, 10, TimeUnit.MILLISECONDS );
}
@Override
public void stop() throws Throwable
{
tickFuture.cancel( true );
scheduler.shutdownNow();
}
@Override
public void shutdown() throws Throwable
{
}
}
}
| 1no label
|
enterprise_cluster_src_main_java_org_neo4j_cluster_client_ClusterClient.java
|
52 |
@Controller("blAdminAssetUploadController")
@RequestMapping("/{sectionKey}")
public class AdminAssetUploadController extends AdminAbstractController {
@Resource(name = "blEntityConfiguration")
protected EntityConfiguration entityConfiguration;
@Resource(name = "blStaticAssetStorageService")
protected StaticAssetStorageService staticAssetStorageService;
@Resource(name = "blStaticAssetService")
protected StaticAssetService staticAssetService;
@Resource(name = "blAdminAssetController")
protected AdminAssetController assetController;
@RequestMapping(value = "/{id}/chooseAsset", method = RequestMethod.GET)
public String chooseMediaForMapKey(HttpServletRequest request, HttpServletResponse response, Model model,
@PathVariable(value = "sectionKey") String sectionKey,
@PathVariable(value = "id") String id,
@RequestParam MultiValueMap<String, String> requestParams) throws Exception {
Map<String, String> pathVars = new HashMap<String, String>();
pathVars.put("sectionKey", AdminAssetController.SECTION_KEY);
assetController.viewEntityList(request, response, model, pathVars, requestParams);
ListGrid listGrid = (ListGrid) model.asMap().get("listGrid");
listGrid.setPathOverride("/" + sectionKey + "/" + id + "/chooseAsset");
listGrid.setListGridType(Type.ASSET);
String userAgent = request.getHeader("User-Agent");
model.addAttribute("isIE", userAgent.contains("MSIE"));
model.addAttribute("viewType", "modal/selectAsset");
model.addAttribute("currentUrl", request.getRequestURL().toString());
model.addAttribute("modalHeaderType", "selectAsset");
model.addAttribute("currentParams", new ObjectMapper().writeValueAsString(requestParams));
// We need these attributes to be set appropriately here
model.addAttribute("entityId", id);
model.addAttribute("sectionKey", sectionKey);
return "modules/modalContainer";
}
@RequestMapping(value = "/{id}/uploadAsset", method = RequestMethod.POST)
public ResponseEntity<Map<String, Object>> upload(HttpServletRequest request,
@RequestParam("file") MultipartFile file,
@PathVariable(value="sectionKey") String sectionKey, @PathVariable(value="id") String id) throws IOException {
Map<String, Object> responseMap = new HashMap<String, Object>();
Map<String, String> properties = new HashMap<String, String>();
properties.put("entityType", sectionKey);
properties.put("entityId", id);
StaticAsset staticAsset = staticAssetService.createStaticAssetFromFile(file, properties);
staticAssetStorageService.createStaticAssetStorageFromFile(file, staticAsset);
String staticAssetUrlPrefix = staticAssetService.getStaticAssetUrlPrefix();
if (staticAssetUrlPrefix != null && !staticAssetUrlPrefix.startsWith("/")) {
staticAssetUrlPrefix = "/" + staticAssetUrlPrefix;
}
String assetUrl = staticAssetUrlPrefix + staticAsset.getFullUrl();
responseMap.put("adminDisplayAssetUrl", request.getContextPath() + assetUrl);
responseMap.put("assetUrl", assetUrl);
if (staticAsset instanceof ImageStaticAssetImpl) {
responseMap.put("image", Boolean.TRUE);
responseMap.put("assetThumbnail", assetUrl + "?smallAdminThumbnail");
responseMap.put("assetLarge", assetUrl + "?largeAdminThumbnail");
} else {
responseMap.put("image", Boolean.FALSE);
}
HttpHeaders responseHeaders = new HttpHeaders();
responseHeaders.add("Content-Type", "text/html; charset=utf-8");
return new ResponseEntity<Map<String, Object>>(responseMap, responseHeaders, HttpStatus.OK);
}
@RequestMapping(value = "/uploadAsset", method = RequestMethod.POST)
/**
* Used by the Asset list view to upload an asset and then immediately show the
* edit form for that record.
*
* @param request
* @param file
* @param sectionKey
* @return
* @throws IOException
*/
public String upload(HttpServletRequest request,
@RequestParam("file") MultipartFile file,
@PathVariable(value="sectionKey") String sectionKey) throws IOException {
StaticAsset staticAsset = staticAssetService.createStaticAssetFromFile(file, null);
staticAssetStorageService.createStaticAssetStorageFromFile(file, staticAsset);
String staticAssetUrlPrefix = staticAssetService.getStaticAssetUrlPrefix();
if (staticAssetUrlPrefix != null && !staticAssetUrlPrefix.startsWith("/")) {
staticAssetUrlPrefix = "/" + staticAssetUrlPrefix;
}
return "redirect:/assets/" + staticAsset.getId();
}
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_admin_web_controller_AdminAssetUploadController.java
|
33 |
static final class ParameterInfo
extends InvocationCompletionProposal {
private ParameterInfo(int offset, Declaration dec,
ProducedReference producedReference,
Scope scope, CeylonParseController cpc,
boolean namedInvocation) {
super(offset, "", "show parameters", "", dec,
producedReference, scope, cpc, true,
true, namedInvocation, false, null);
}
@Override
boolean isParameterInfo() {
return true;
}
@Override
public Point getSelection(IDocument document) {
return null;
}
@Override
public void apply(IDocument document) {}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_InvocationCompletionProposal.java
|
138 |
public interface IndexBuilder {
/**
* Adds the given key to the composite key of this index
*
* @param key
* @return this IndexBuilder
*/
public IndexBuilder addKey(PropertyKey key);
/**
* Adds the given key and associated parameters to the composite key of this index
* @param key
* @param parameters
* @return this IndexBuilder
*/
public IndexBuilder addKey(PropertyKey key, Parameter... parameters);
/**
* Restricts this index to only those elements that have the provided schemaType. If this graph index indexes
* vertices, then the argument is expected to be a vertex label and only vertices with that label will be indexed.
* Likewise, for edges and properties only those with the matching relation type will be indexed.
*
* @param schemaType
* @return this IndexBuilder
*/
public IndexBuilder indexOnly(TitanSchemaType schemaType);
/**
* Makes this a unique index for the configured element type,
* i.e. an index key can be associated with at most one element in the graph.
*
* @return this IndexBuilder
*/
public IndexBuilder unique();
/**
* Builds a composite index according to the specification
*
* @return the created composite {@link TitanGraphIndex}
*/
public TitanGraphIndex buildCompositeIndex();
/**
* Builds a mixed index according to the specification against the backend index with the given name (i.e.
* the name under which that index is configured in the graph configuration)
*
* @param backingIndex the name of the mixed index
* @return the created mixed {@link TitanGraphIndex}
*/
public TitanGraphIndex buildMixedIndex(String backingIndex);
}
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_schema_TitanManagement.java
|
117 |
private static class VerificationLogHook extends LogHookAdapter<LogEntry>
{
private final Set<Xid> startXids = new HashSet<>();
@Override
public boolean accept( LogEntry item )
{
if ( item instanceof LogEntry.Start )
assertTrue( startXids.add( ((LogEntry.Start) item).getXid() ) );
return true;
}
}
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestInjectMultipleStartEntries.java
|
5,803 |
public class CustomHighlighter implements Highlighter {
@Override
public String[] names() {
return new String[] { "test-custom" };
}
@Override
public HighlightField highlight(HighlighterContext highlighterContext) {
SearchContextHighlight.Field field = highlighterContext.field;
List<Text> responses = Lists.newArrayList();
responses.add(new StringText("standard response"));
if (field.options() != null) {
for (Map.Entry<String, Object> entry : field.options().entrySet()) {
responses.add(new StringText("field:" + entry.getKey() + ":" + entry.getValue()));
}
}
return new HighlightField(highlighterContext.fieldName, responses.toArray(new Text[]{}));
}
}
| 1no label
|
src_test_java_org_elasticsearch_search_highlight_CustomHighlighter.java
|
817 |
public class ClearScrollRequestBuilder extends ActionRequestBuilder<ClearScrollRequest, ClearScrollResponse, ClearScrollRequestBuilder> {
public ClearScrollRequestBuilder(Client client) {
super((InternalClient) client, new ClearScrollRequest());
}
public ClearScrollRequestBuilder setScrollIds(List<String> cursorIds) {
request.setScrollIds(cursorIds);
return this;
}
public ClearScrollRequestBuilder addScrollId(String cursorId) {
request.addScrollId(cursorId);
return this;
}
@Override
protected void doExecute(ActionListener<ClearScrollResponse> listener) {
((Client) client).clearScroll(request, listener);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_search_ClearScrollRequestBuilder.java
|
1,062 |
public class TermVectorUnitTests extends ElasticsearchLuceneTestCase {
@Test
public void streamResponse() throws Exception {
TermVectorResponse outResponse = new TermVectorResponse("a", "b", "c");
outResponse.setExists(true);
writeStandardTermVector(outResponse);
// write
ByteArrayOutputStream outBuffer = new ByteArrayOutputStream();
OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer);
outResponse.writeTo(out);
// read
ByteArrayInputStream esInBuffer = new ByteArrayInputStream(outBuffer.toByteArray());
InputStreamStreamInput esBuffer = new InputStreamStreamInput(esInBuffer);
TermVectorResponse inResponse = new TermVectorResponse("a", "b", "c");
inResponse.readFrom(esBuffer);
// see if correct
checkIfStandardTermVector(inResponse);
outResponse = new TermVectorResponse("a", "b", "c");
writeEmptyTermVector(outResponse);
// write
outBuffer = new ByteArrayOutputStream();
out = new OutputStreamStreamOutput(outBuffer);
outResponse.writeTo(out);
// read
esInBuffer = new ByteArrayInputStream(outBuffer.toByteArray());
esBuffer = new InputStreamStreamInput(esInBuffer);
inResponse = new TermVectorResponse("a", "b", "c");
inResponse.readFrom(esBuffer);
assertTrue(inResponse.isExists());
}
private void writeEmptyTermVector(TermVectorResponse outResponse) throws IOException {
Directory dir = newDirectory();
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT));
conf.setOpenMode(OpenMode.CREATE);
IndexWriter writer = new IndexWriter(dir, conf);
FieldType type = new FieldType(TextField.TYPE_STORED);
type.setStoreTermVectorOffsets(true);
type.setStoreTermVectorPayloads(false);
type.setStoreTermVectorPositions(true);
type.setStoreTermVectors(true);
type.freeze();
Document d = new Document();
d.add(new Field("id", "abc", StringField.TYPE_STORED));
writer.updateDocument(new Term("id", "abc"), d);
writer.commit();
writer.close();
DirectoryReader dr = DirectoryReader.open(dir);
IndexSearcher s = new IndexSearcher(dr);
TopDocs search = s.search(new TermQuery(new Term("id", "abc")), 1);
ScoreDoc[] scoreDocs = search.scoreDocs;
int doc = scoreDocs[0].doc;
Fields fields = dr.getTermVectors(doc);
EnumSet<Flag> flags = EnumSet.of(Flag.Positions, Flag.Offsets);
outResponse.setFields(fields, null, flags, fields);
outResponse.setExists(true);
dr.close();
dir.close();
}
private void writeStandardTermVector(TermVectorResponse outResponse) throws IOException {
Directory dir = newDirectory();
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT));
conf.setOpenMode(OpenMode.CREATE);
IndexWriter writer = new IndexWriter(dir, conf);
FieldType type = new FieldType(TextField.TYPE_STORED);
type.setStoreTermVectorOffsets(true);
type.setStoreTermVectorPayloads(false);
type.setStoreTermVectorPositions(true);
type.setStoreTermVectors(true);
type.freeze();
Document d = new Document();
d.add(new Field("id", "abc", StringField.TYPE_STORED));
d.add(new Field("title", "the1 quick brown fox jumps over the1 lazy dog", type));
d.add(new Field("desc", "the1 quick brown fox jumps over the1 lazy dog", type));
writer.updateDocument(new Term("id", "abc"), d);
writer.commit();
writer.close();
DirectoryReader dr = DirectoryReader.open(dir);
IndexSearcher s = new IndexSearcher(dr);
TopDocs search = s.search(new TermQuery(new Term("id", "abc")), 1);
ScoreDoc[] scoreDocs = search.scoreDocs;
int doc = scoreDocs[0].doc;
Fields termVectors = dr.getTermVectors(doc);
EnumSet<Flag> flags = EnumSet.of(Flag.Positions, Flag.Offsets);
outResponse.setFields(termVectors, null, flags, termVectors);
dr.close();
dir.close();
}
private void checkIfStandardTermVector(TermVectorResponse inResponse) throws IOException {
Fields fields = inResponse.getFields();
assertThat(fields.terms("title"), Matchers.notNullValue());
assertThat(fields.terms("desc"), Matchers.notNullValue());
assertThat(fields.size(), equalTo(2));
}
@Test
public void testRestRequestParsing() throws Exception {
BytesReference inputBytes = new BytesArray(
" {\"fields\" : [\"a\", \"b\",\"c\"], \"offsets\":false, \"positions\":false, \"payloads\":true}");
TermVectorRequest tvr = new TermVectorRequest(null, null, null);
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(inputBytes);
TermVectorRequest.parseRequest(tvr, parser);
Set<String> fields = tvr.selectedFields();
assertThat(fields.contains("a"), equalTo(true));
assertThat(fields.contains("b"), equalTo(true));
assertThat(fields.contains("c"), equalTo(true));
assertThat(tvr.offsets(), equalTo(false));
assertThat(tvr.positions(), equalTo(false));
assertThat(tvr.payloads(), equalTo(true));
String additionalFields = "b,c ,d, e ";
RestTermVectorAction.addFieldStringsFromParameter(tvr, additionalFields);
assertThat(tvr.selectedFields().size(), equalTo(5));
assertThat(fields.contains("d"), equalTo(true));
assertThat(fields.contains("e"), equalTo(true));
additionalFields = "";
RestTermVectorAction.addFieldStringsFromParameter(tvr, additionalFields);
inputBytes = new BytesArray(" {\"offsets\":false, \"positions\":false, \"payloads\":true}");
tvr = new TermVectorRequest(null, null, null);
parser = XContentFactory.xContent(XContentType.JSON).createParser(inputBytes);
TermVectorRequest.parseRequest(tvr, parser);
additionalFields = "";
RestTermVectorAction.addFieldStringsFromParameter(tvr, additionalFields);
assertThat(tvr.selectedFields(), equalTo(null));
additionalFields = "b,c ,d, e ";
RestTermVectorAction.addFieldStringsFromParameter(tvr, additionalFields);
assertThat(tvr.selectedFields().size(), equalTo(4));
}
@Test
public void testRequestParsingThrowsException() throws Exception {
BytesReference inputBytes = new BytesArray(
" {\"fields\" : \"a, b,c \", \"offsets\":false, \"positions\":false, \"payloads\":true, \"meaningless_term\":2}");
TermVectorRequest tvr = new TermVectorRequest(null, null, null);
boolean threwException = false;
try {
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(inputBytes);
TermVectorRequest.parseRequest(tvr, parser);
} catch (Exception e) {
threwException = true;
}
assertThat(threwException, equalTo(true));
}
@Test
public void streamRequest() throws IOException {
for (int i = 0; i < 10; i++) {
TermVectorRequest request = new TermVectorRequest("index", "type", "id");
request.offsets(random().nextBoolean());
request.fieldStatistics(random().nextBoolean());
request.payloads(random().nextBoolean());
request.positions(random().nextBoolean());
request.termStatistics(random().nextBoolean());
String parent = random().nextBoolean() ? "someParent" : null;
request.parent(parent);
String pref = random().nextBoolean() ? "somePreference" : null;
request.preference(pref);
// write
ByteArrayOutputStream outBuffer = new ByteArrayOutputStream();
OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer);
request.writeTo(out);
// read
ByteArrayInputStream esInBuffer = new ByteArrayInputStream(outBuffer.toByteArray());
InputStreamStreamInput esBuffer = new InputStreamStreamInput(esInBuffer);
TermVectorRequest req2 = new TermVectorRequest(null, null, null);
req2.readFrom(esBuffer);
assertThat(request.offsets(), equalTo(req2.offsets()));
assertThat(request.fieldStatistics(), equalTo(req2.fieldStatistics()));
assertThat(request.payloads(), equalTo(req2.payloads()));
assertThat(request.positions(), equalTo(req2.positions()));
assertThat(request.termStatistics(), equalTo(req2.termStatistics()));
assertThat(request.preference(), equalTo(pref));
assertThat(request.routing(), equalTo(parent));
}
}
@Test
public void testFieldTypeToTermVectorString() throws Exception {
FieldType ft = new FieldType();
ft.setStoreTermVectorOffsets(false);
ft.setStoreTermVectorPayloads(true);
ft.setStoreTermVectors(true);
ft.setStoreTermVectorPositions(true);
String ftOpts = AbstractFieldMapper.termVectorOptionsToString(ft);
assertThat("with_positions_payloads", equalTo(ftOpts));
AllFieldMapper.Builder builder = new AllFieldMapper.Builder();
boolean exceptiontrown = false;
try {
TypeParsers.parseTermVector("", ftOpts, builder);
} catch (MapperParsingException e) {
exceptiontrown = true;
}
assertThat("TypeParsers.parseTermVector should accept string with_positions_payloads but does not.", exceptiontrown, equalTo(false));
}
@Test
public void testTermVectorStringGenerationWithoutPositions() throws Exception {
FieldType ft = new FieldType();
ft.setStoreTermVectorOffsets(true);
ft.setStoreTermVectorPayloads(true);
ft.setStoreTermVectors(true);
ft.setStoreTermVectorPositions(false);
String ftOpts = AbstractFieldMapper.termVectorOptionsToString(ft);
assertThat(ftOpts, equalTo("with_offsets"));
}
@Test
public void testMultiParser() throws Exception {
byte[] data = Streams.copyToBytesFromClasspath("/org/elasticsearch/action/termvector/multiRequest1.json");
BytesReference bytes = new BytesArray(data);
MultiTermVectorsRequest request = new MultiTermVectorsRequest();
request.add(new TermVectorRequest(), bytes);
checkParsedParameters(request);
data = Streams.copyToBytesFromClasspath("/org/elasticsearch/action/termvector/multiRequest2.json");
bytes = new BytesArray(data);
request = new MultiTermVectorsRequest();
request.add(new TermVectorRequest(), bytes);
checkParsedParameters(request);
}
void checkParsedParameters(MultiTermVectorsRequest request) {
Set<String> ids = new HashSet<String>();
ids.add("1");
ids.add("2");
Set<String> fields = new HashSet<String>();
fields.add("a");
fields.add("b");
fields.add("c");
for (TermVectorRequest singleRequest : request.requests) {
assertThat(singleRequest.index(), equalTo("testidx"));
assertThat(singleRequest.type(), equalTo("test"));
assertThat(singleRequest.payloads(), equalTo(false));
assertThat(singleRequest.positions(), equalTo(false));
assertThat(singleRequest.offsets(), equalTo(false));
assertThat(singleRequest.termStatistics(), equalTo(true));
assertThat(singleRequest.fieldStatistics(), equalTo(false));
assertThat(singleRequest.id(),Matchers.anyOf(Matchers.equalTo("1"), Matchers.equalTo("2")));
assertThat(singleRequest.selectedFields(), equalTo(fields));
}
}
}
| 0true
|
src_test_java_org_elasticsearch_action_termvector_TermVectorUnitTests.java
|
477 |
public class GetAliasesRequestBuilder extends BaseAliasesRequestBuilder<GetAliasesResponse, GetAliasesRequestBuilder> {
public GetAliasesRequestBuilder(IndicesAdminClient client, String... aliases) {
super(client, aliases);
}
@Override
protected void doExecute(ActionListener<GetAliasesResponse> listener) {
((IndicesAdminClient) client).getAliases(request, listener);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_alias_get_GetAliasesRequestBuilder.java
|
58 |
class AddInitializerProposal extends InitializerProposal {
private AddInitializerProposal(TypedDeclaration dec, int offset, int length,
TextChange change) {
super("Add initializer to '" + dec.getName() + "'",
change, dec, dec.getType(), new Region(offset, length),
MINOR_CHANGE, -1, null);
}
private static void addInitializerProposal(Tree.CompilationUnit cu,
Collection<ICompletionProposal> proposals, IFile file,
Tree.TypedDeclaration decNode, Tree.SpecifierOrInitializerExpression sie) {
MethodOrValue dec = (MethodOrValue) decNode.getDeclarationModel();
if (dec==null) return;
if (dec.getInitializerParameter()==null && !dec.isFormal()) {
TextChange change = new TextFileChange("Add Initializer", file);
int offset = decNode.getStopIndex();
String defaultValue = defaultValue(cu.getUnit(), dec.getType());
String def;
int selectionOffset;
if (decNode instanceof Tree.MethodDeclaration) {
def = " => " + defaultValue;
selectionOffset = offset + 4;
}
else {
def = " = " + defaultValue;
selectionOffset = offset + 3;
}
change.setEdit(new InsertEdit(offset, def));
proposals.add(new AddInitializerProposal(dec,
selectionOffset, defaultValue.length(),
change));
}
}
static void addInitializerProposals(Collection<ICompletionProposal> proposals,
IFile file, Tree.CompilationUnit cu, Node node) {
if (node instanceof Tree.AttributeDeclaration) {
Tree.AttributeDeclaration attDecNode = (Tree.AttributeDeclaration) node;
Tree.SpecifierOrInitializerExpression sie =
attDecNode.getSpecifierOrInitializerExpression();
if (!(sie instanceof Tree.LazySpecifierExpression)) {
addInitializerProposal(cu, proposals, file, attDecNode, sie);
}
}
if (node instanceof Tree.MethodDeclaration) {
Tree.MethodDeclaration methDecNode = (Tree.MethodDeclaration) node;
Tree.SpecifierExpression sie = methDecNode.getSpecifierExpression();
addInitializerProposal(cu, proposals, file, methDecNode, sie);
}
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_AddInitializerProposal.java
|
918 |
public class PlainActionFuture<T> extends AdapterActionFuture<T, T> {
public static <T> PlainActionFuture<T> newFuture() {
return new PlainActionFuture<T>();
}
@Override
protected T convert(T listenerResponse) {
return listenerResponse;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_support_PlainActionFuture.java
|
127 |
public class ClientResponse implements IdentifiedDataSerializable {
private Data response;
private int callId;
private boolean isEvent;
private boolean isError;
public ClientResponse() {
}
public ClientResponse(Data response, boolean isError, int callId) {
this.response = response;
this.isError = isError;
this.callId = callId;
}
public ClientResponse(Data response, int callId, boolean isEvent) {
this.response = response;
this.callId = callId;
this.isEvent = isEvent;
}
public Data getResponse() {
return response;
}
public int getCallId() {
return callId;
}
public boolean isEvent() {
return isEvent;
}
public boolean isError() {
return isError;
}
@Override
public int getFactoryId() {
return ClientDataSerializerHook.ID;
}
@Override
public int getId() {
return ClientDataSerializerHook.CLIENT_RESPONSE;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeInt(callId);
out.writeBoolean(isEvent);
out.writeBoolean(isError);
response.writeData(out);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
callId = in.readInt();
isEvent = in.readBoolean();
isError = in.readBoolean();
response = new Data();
response.readData(in);
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_client_ClientResponse.java
|
473 |
public interface ClientPartitionService {
Address getPartitionOwner(int partitionId);
int getPartitionId(Data key);
int getPartitionId(Object key);
int getPartitionCount();
Partition getPartition(int partitionId);
}
| 0true
|
hazelcast-client_src_main_java_com_hazelcast_client_spi_ClientPartitionService.java
|
135 |
@Test
public class BinarySerializerTest {
private int FIELD_SIZE;
private byte[] OBJECT;
private OBinaryTypeSerializer binarySerializer;
byte[] stream;
@BeforeClass
public void beforeClass() {
binarySerializer = new OBinaryTypeSerializer();
OBJECT = new byte[] { 1, 2, 3, 4, 5, 6 };
FIELD_SIZE = OBJECT.length + OIntegerSerializer.INT_SIZE;
stream = new byte[FIELD_SIZE];
}
public void testFieldSize() {
Assert.assertEquals(binarySerializer.getObjectSize(OBJECT), FIELD_SIZE);
}
public void testSerialize() {
binarySerializer.serialize(OBJECT, stream, 0);
Assert.assertEquals(binarySerializer.deserialize(stream, 0), OBJECT);
}
public void testSerializeNative() {
binarySerializer.serializeNative(OBJECT, stream, 0);
Assert.assertEquals(binarySerializer.deserializeNative(stream, 0), OBJECT);
}
public void testNativeDirectMemoryCompatibility() {
binarySerializer.serializeNative(OBJECT, stream, 0);
ODirectMemoryPointer pointer = new ODirectMemoryPointer(stream);
try {
Assert.assertEquals(binarySerializer.deserializeFromDirectMemory(pointer, 0), OBJECT);
} finally {
pointer.free();
}
}
}
| 0true
|
commons_src_test_java_com_orientechnologies_common_serialization_types_BinarySerializerTest.java
|
1,346 |
public class OWriteAheadLog {
private static final long ONE_KB = 1024L;
public static final String MASTER_RECORD_EXTENSION = ".wmr";
public static final String WAL_SEGMENT_EXTENSION = ".wal";
private OLogSequenceNumber lastCheckpoint;
private final Object syncObject = new Object();
private final List<LogSegment> logSegments = new ArrayList<LogSegment>();
private boolean useFirstMasterRecord = true;
private final int maxPagesCacheSize;
private final int commitDelay;
private final long maxSegmentSize;
private final long maxLogSize;
private long logSize;
private final File walLocation;
private File masterRecordFile;
private final RandomAccessFile masterRecordLSNHolder;
private OLogSequenceNumber firstMasterRecord;
private OLogSequenceNumber secondMasterRecord;
private volatile OLogSequenceNumber flushedLsn;
private final OStorageLocalAbstract storage;
private boolean closed;
private static String calculateWalPath(OStorageLocalAbstract storage) {
String walPath = OGlobalConfiguration.WAL_LOCATION.getValueAsString();
if (walPath == null)
walPath = storage.getStoragePath();
return walPath;
}
public OWriteAheadLog(OStorageLocalAbstract storage) throws IOException {
this(OGlobalConfiguration.WAL_CACHE_SIZE.getValueAsInteger(), OGlobalConfiguration.WAL_COMMIT_TIMEOUT.getValueAsInteger(),
OGlobalConfiguration.WAL_MAX_SEGMENT_SIZE.getValueAsInteger() * ONE_KB * ONE_KB, OGlobalConfiguration.WAL_MAX_SIZE
.getValueAsInteger() * ONE_KB * ONE_KB, storage);
}
public OWriteAheadLog(int maxPagesCacheSize, int commitDelay, long maxSegmentSize, long maxLogSize, OStorageLocalAbstract storage)
throws IOException {
this.maxPagesCacheSize = maxPagesCacheSize;
this.commitDelay = commitDelay;
this.maxSegmentSize = maxSegmentSize;
this.maxLogSize = maxLogSize;
this.storage = storage;
try {
this.walLocation = new File(calculateWalPath(this.storage));
File[] walFiles = this.walLocation.listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return validateName(name);
}
});
if (walFiles == null)
throw new IllegalStateException(
"Location passed in WAL does not exist, or IO error was happened. DB can not work in durable mode in such case.");
if (walFiles.length == 0) {
LogSegment logSegment = new LogSegment(new File(this.walLocation, getSegmentName(0)), maxPagesCacheSize);
logSegment.init();
logSegment.startFlush();
logSegments.add(logSegment);
logSize = 0;
flushedLsn = null;
} else {
for (File walFile : walFiles) {
LogSegment logSegment = new LogSegment(walFile, maxPagesCacheSize);
logSegment.init();
logSegments.add(logSegment);
logSize += logSegment.filledUpTo();
}
Collections.sort(logSegments);
logSegments.get(logSegments.size() - 1).startFlush();
flushedLsn = readFlushedLSN();
}
masterRecordFile = new File(walLocation, this.storage.getName() + MASTER_RECORD_EXTENSION);
masterRecordLSNHolder = new RandomAccessFile(masterRecordFile, "rws");
if (masterRecordLSNHolder.length() > 0) {
firstMasterRecord = readMasterRecord(this.storage.getName(), 0);
secondMasterRecord = readMasterRecord(this.storage.getName(), 1);
if (firstMasterRecord == null) {
useFirstMasterRecord = true;
lastCheckpoint = secondMasterRecord;
} else if (secondMasterRecord == null) {
useFirstMasterRecord = false;
lastCheckpoint = firstMasterRecord;
} else {
if (firstMasterRecord.compareTo(secondMasterRecord) >= 0) {
lastCheckpoint = firstMasterRecord;
useFirstMasterRecord = false;
} else {
lastCheckpoint = secondMasterRecord;
useFirstMasterRecord = true;
}
}
}
fixMasterRecords();
} catch (FileNotFoundException e) {
// never happened
OLogManager.instance().error(this, "Error during file initialization for storage %s", e, this.storage.getName());
throw new IllegalStateException("Error during file initialization for storage " + this.storage.getName(), e);
}
}
public File getWalLocation() {
return walLocation;
}
public OLogSequenceNumber begin() throws IOException {
synchronized (syncObject) {
checkForClose();
LogSegment first = logSegments.get(0);
if (first.filledUpTo() == 0)
return null;
return first.begin();
}
}
public OLogSequenceNumber end() throws IOException {
synchronized (syncObject) {
checkForClose();
int lastIndex = logSegments.size() - 1;
LogSegment last = logSegments.get(lastIndex);
while (last.filledUpTo == 0) {
lastIndex--;
if (lastIndex >= 0)
last = logSegments.get(lastIndex);
else
return null;
}
return last.end();
}
}
public void flush() {
synchronized (syncObject) {
checkForClose();
LogSegment last = logSegments.get(logSegments.size() - 1);
last.flush();
}
}
private void fixMasterRecords() throws IOException {
if (firstMasterRecord != null) {
int index = (int) (firstMasterRecord.getSegment() - logSegments.get(0).getOrder());
if (logSegments.size() <= index || index < 0) {
firstMasterRecord = null;
} else {
LogSegment firstMasterRecordSegment = logSegments.get(index);
if (firstMasterRecordSegment.filledUpTo() <= firstMasterRecord.getPosition())
firstMasterRecord = null;
}
}
if (secondMasterRecord != null) {
int index = (int) (secondMasterRecord.getSegment() - logSegments.get(0).getOrder());
if (logSegments.size() <= index || index < 0) {
secondMasterRecord = null;
} else {
LogSegment secondMasterRecordSegment = logSegments.get(index);
if (secondMasterRecordSegment.filledUpTo() <= secondMasterRecord.getPosition())
secondMasterRecord = null;
}
}
if (firstMasterRecord != null && secondMasterRecord != null)
return;
if (firstMasterRecord == null && secondMasterRecord == null) {
masterRecordLSNHolder.setLength(0);
masterRecordLSNHolder.getFD().sync();
lastCheckpoint = null;
} else {
if (secondMasterRecord == null)
secondMasterRecord = firstMasterRecord;
else
firstMasterRecord = secondMasterRecord;
lastCheckpoint = firstMasterRecord;
writeMasterRecord(0, firstMasterRecord);
writeMasterRecord(1, secondMasterRecord);
}
}
private OLogSequenceNumber readMasterRecord(String storageName, int index) throws IOException {
CRC32 crc32 = new CRC32();
try {
masterRecordLSNHolder.seek(index * (OIntegerSerializer.INT_SIZE + 2 * OLongSerializer.LONG_SIZE));
int firstCRC = masterRecordLSNHolder.readInt();
long segment = masterRecordLSNHolder.readLong();
long position = masterRecordLSNHolder.readLong();
byte[] serializedLSN = new byte[2 * OLongSerializer.LONG_SIZE];
OLongSerializer.INSTANCE.serialize(segment, serializedLSN, 0);
OLongSerializer.INSTANCE.serialize(position, serializedLSN, OLongSerializer.LONG_SIZE);
crc32.update(serializedLSN);
if (firstCRC != ((int) crc32.getValue())) {
OLogManager.instance().error(this, "Can not restore %d WAL master record for storage %s crc check is failed", index,
storageName);
return null;
}
return new OLogSequenceNumber(segment, position);
} catch (EOFException eofException) {
OLogManager.instance().warn(this, "Can not restore %d WAL master record for storage %s", index, storageName);
return null;
}
}
private void writeMasterRecord(int index, OLogSequenceNumber masterRecord) throws IOException {
masterRecordLSNHolder.seek(index * (OIntegerSerializer.INT_SIZE + 2 * OLongSerializer.LONG_SIZE));
CRC32 crc32 = new CRC32();
byte[] serializedLSN = new byte[2 * OLongSerializer.LONG_SIZE];
OLongSerializer.INSTANCE.serialize(masterRecord.getSegment(), serializedLSN, 0);
OLongSerializer.INSTANCE.serialize(masterRecord.getPosition(), serializedLSN, OLongSerializer.LONG_SIZE);
crc32.update(serializedLSN);
masterRecordLSNHolder.writeInt((int) crc32.getValue());
masterRecordLSNHolder.writeLong(masterRecord.getSegment());
masterRecordLSNHolder.writeLong(masterRecord.getPosition());
}
private String getSegmentName(long order) {
return storage.getName() + "." + order + WAL_SEGMENT_EXTENSION;
}
public OLogSequenceNumber logFuzzyCheckPointStart() throws IOException {
synchronized (syncObject) {
checkForClose();
OFuzzyCheckpointStartRecord record = new OFuzzyCheckpointStartRecord(lastCheckpoint);
log(record);
return record.getLsn();
}
}
public OLogSequenceNumber logFuzzyCheckPointEnd() throws IOException {
synchronized (syncObject) {
checkForClose();
OFuzzyCheckpointEndRecord record = new OFuzzyCheckpointEndRecord();
log(record);
return record.getLsn();
}
}
public OLogSequenceNumber log(OWALRecord record) throws IOException {
synchronized (syncObject) {
checkForClose();
final byte[] serializedForm = OWALRecordsFactory.INSTANCE.toStream(record);
LogSegment last = logSegments.get(logSegments.size() - 1);
long lastSize = last.filledUpTo();
final OLogSequenceNumber lsn = last.logRecord(serializedForm);
record.setLsn(lsn);
if (record.isUpdateMasterRecord()) {
lastCheckpoint = lsn;
if (useFirstMasterRecord) {
firstMasterRecord = lsn;
writeMasterRecord(0, firstMasterRecord);
useFirstMasterRecord = false;
} else {
secondMasterRecord = lsn;
writeMasterRecord(1, secondMasterRecord);
useFirstMasterRecord = true;
}
}
final long sizeDiff = last.filledUpTo() - lastSize;
logSize += sizeDiff;
if (logSize >= maxLogSize) {
LogSegment first = logSegments.get(0);
first.stopFlush(false);
logSize -= first.filledUpTo();
first.delete(false);
logSegments.remove(0);
fixMasterRecords();
}
if (last.filledUpTo() >= maxSegmentSize) {
last.stopFlush(true);
last = new LogSegment(new File(walLocation, getSegmentName(last.getOrder() + 1)), maxPagesCacheSize);
last.init();
last.startFlush();
logSegments.add(last);
}
return lsn;
}
}
public long size() {
synchronized (syncObject) {
return logSize;
}
}
public void shrinkTill(OLogSequenceNumber lsn) throws IOException {
if (lsn == null)
return;
synchronized (syncObject) {
ListIterator<LogSegment> iterator = logSegments.listIterator(logSegments.size());
while (iterator.hasPrevious()) {
final LogSegment logSegment = iterator.previous();
if (logSegment.end() == null || logSegment.end().compareTo(lsn) >= 0)
continue;
logSegment.delete(false);
iterator.remove();
}
}
}
public void close() throws IOException {
close(true);
}
public void close(boolean flush) throws IOException {
synchronized (syncObject) {
if (closed)
return;
closed = true;
for (LogSegment logSegment : logSegments)
logSegment.close(flush);
masterRecordLSNHolder.close();
}
}
private void checkForClose() {
if (closed)
throw new OStorageException("WAL log " + walLocation + " has been closed");
}
public void delete() throws IOException {
delete(false);
}
public void delete(boolean flush) throws IOException {
synchronized (syncObject) {
close(flush);
for (LogSegment logSegment : logSegments)
logSegment.delete(false);
boolean deleted = masterRecordFile.delete();
while (!deleted) {
OMemoryWatchDog.freeMemoryForResourceCleanup(100);
deleted = !masterRecordFile.exists() || masterRecordFile.delete();
}
}
}
public void logDirtyPages(Set<ODirtyPage> dirtyPages) throws IOException {
synchronized (syncObject) {
checkForClose();
log(new ODirtyPagesRecord(dirtyPages));
}
}
public OLogSequenceNumber getLastCheckpoint() {
synchronized (syncObject) {
checkForClose();
return lastCheckpoint;
}
}
public OWALRecord read(OLogSequenceNumber lsn) throws IOException {
synchronized (syncObject) {
checkForClose();
long segment = lsn.getSegment();
int index = (int) (segment - logSegments.get(0).getOrder());
if (index < 0 || index >= logSegments.size())
return null;
LogSegment logSegment = logSegments.get(index);
byte[] recordEntry = logSegment.readRecord(lsn);
if (recordEntry == null)
return null;
final OWALRecord record = OWALRecordsFactory.INSTANCE.fromStream(recordEntry);
record.setLsn(lsn);
return record;
}
}
public OLogSequenceNumber next(OLogSequenceNumber lsn) throws IOException {
synchronized (syncObject) {
checkForClose();
long order = lsn.getSegment();
int index = (int) (order - logSegments.get(0).getOrder());
if (index < 0 || index >= logSegments.size())
return null;
LogSegment logSegment = logSegments.get(index);
OLogSequenceNumber nextLSN = logSegment.getNextLSN(lsn);
if (nextLSN == null) {
index++;
if (index >= logSegments.size())
return null;
LogSegment nextSegment = logSegments.get(index);
if (nextSegment.filledUpTo() == 0)
return null;
nextLSN = nextSegment.begin();
}
return nextLSN;
}
}
public OLogSequenceNumber getFlushedLSN() {
synchronized (syncObject) {
checkForClose();
return flushedLsn;
}
}
private OLogSequenceNumber readFlushedLSN() throws IOException {
int segment = logSegments.size() - 1;
while (segment >= 0) {
LogSegment logSegment = logSegments.get(segment);
OLogSequenceNumber flushedLSN = logSegment.readFlushedLSN();
if (flushedLSN == null)
segment--;
else
return flushedLSN;
}
return null;
}
public static boolean validateName(String name) {
if (!name.toLowerCase().endsWith(".wal"))
return false;
int walOrderStartIndex = name.indexOf('.');
if (walOrderStartIndex == name.length() - 4)
return false;
int walOrderEndIndex = name.indexOf('.', walOrderStartIndex + 1);
String walOrder = name.substring(walOrderStartIndex + 1, walOrderEndIndex);
try {
Integer.parseInt(walOrder);
} catch (NumberFormatException e) {
return false;
}
return true;
}
public OLogSequenceNumber logFullCheckpointStart() throws IOException {
return log(new OFullCheckpointStartRecord(lastCheckpoint));
}
public void logFullCheckpointEnd() throws IOException {
synchronized (syncObject) {
checkForClose();
log(new OCheckpointEndRecord());
}
}
private final class LogSegment implements Comparable<LogSegment> {
private final RandomAccessFile rndFile;
private final File file;
private long filledUpTo;
private final long order;
private final int maxPagesCacheSize;
private boolean closed;
private OWALPage currentPage;
private final ConcurrentLinkedQueue<OWALPage> pagesCache = new ConcurrentLinkedQueue<OWALPage>();
private long nextPositionToFlush;
private long flushId;
private final ScheduledExecutorService commitExecutor = Executors.newSingleThreadScheduledExecutor(new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread thread = new Thread(r);
thread.setDaemon(true);
thread.setName("WAL Flush Task");
return thread;
}
});
private OLogSequenceNumber last = null;
private volatile boolean flushNewData = true;
private LogSegment(File file, int maxPagesCacheSize) throws IOException {
this.file = file;
this.maxPagesCacheSize = maxPagesCacheSize;
order = extractOrder(file.getName());
closed = false;
rndFile = new RandomAccessFile(file, "rw");
}
public void startFlush() {
if (commitDelay > 0)
commitExecutor.scheduleAtFixedRate(new FlushTask(), commitDelay, commitDelay, TimeUnit.MILLISECONDS);
}
public void stopFlush(boolean flush) {
if (flush)
flush();
if (!commitExecutor.isShutdown()) {
commitExecutor.shutdown();
try {
if (!commitExecutor
.awaitTermination(OGlobalConfiguration.WAL_SHUTDOWN_TIMEOUT.getValueAsInteger(), TimeUnit.MILLISECONDS))
throw new OStorageException("WAL flush task for " + getPath() + " segment can not be stopped.");
} catch (InterruptedException e) {
OLogManager.instance().error(this, "Can not shutdown background WAL commit thread.");
}
}
}
public long getOrder() {
return order;
}
public void init() throws IOException {
selfCheck();
initPageCache();
initLastPage();
}
private void initLastPage() throws IOException {
synchronized (rndFile) {
long pagesCount = rndFile.length() / OWALPage.PAGE_SIZE;
long currentPage = pagesCount - 1;
if (currentPage < 0)
return;
do {
rndFile.seek(currentPage * OWALPage.PAGE_SIZE);
byte[] content = new byte[OWALPage.PAGE_SIZE];
rndFile.readFully(content);
ODirectMemoryPointer pointer = new ODirectMemoryPointer(content);
try {
final OWALPage page = new OWALPage(pointer, false);
int lastPosition = findLastRecord(page, true);
if (lastPosition > -1) {
last = new OLogSequenceNumber(order, currentPage * OWALPage.PAGE_SIZE + lastPosition);
return;
}
currentPage--;
} finally {
pointer.free();
}
} while (currentPage >= 0);
}
}
private void initPageCache() throws IOException {
synchronized (rndFile) {
long pagesCount = rndFile.length() / OWALPage.PAGE_SIZE;
if (pagesCount == 0)
return;
rndFile.seek((pagesCount - 1) * OWALPage.PAGE_SIZE);
byte[] content = new byte[OWALPage.PAGE_SIZE];
rndFile.readFully(content);
flushId = OLongSerializer.INSTANCE.deserializeNative(content, OWALPage.FLUSH_ID_OFFSET);
ODirectMemoryPointer pointer = new ODirectMemoryPointer(content);
currentPage = new OWALPage(pointer, false);
filledUpTo = (pagesCount - 1) * OWALPage.PAGE_SIZE + currentPage.getFilledUpTo();
nextPositionToFlush = (pagesCount - 1) * OWALPage.PAGE_SIZE;
pagesCache.add(currentPage);
}
}
private long extractOrder(String name) {
int walOrderStartIndex = name.indexOf('.') + 1;
int walOrderEndIndex = name.indexOf('.', walOrderStartIndex);
String walOrder = name.substring(walOrderStartIndex, walOrderEndIndex);
try {
return Long.parseLong(walOrder);
} catch (NumberFormatException e) {
// never happen
throw new IllegalStateException(e);
}
}
@Override
public int compareTo(LogSegment other) {
final long otherOrder = other.order;
if (order > otherOrder)
return 1;
else if (order < otherOrder)
return -1;
return 0;
}
public long filledUpTo() throws IOException {
return filledUpTo;
}
public OLogSequenceNumber begin() throws IOException {
if (!pagesCache.isEmpty())
return new OLogSequenceNumber(order, OWALPage.RECORDS_OFFSET);
if (rndFile.length() > 0)
return new OLogSequenceNumber(order, OWALPage.RECORDS_OFFSET);
return null;
}
public OLogSequenceNumber end() {
return last;
}
private int findLastRecord(OWALPage page, boolean skipTailRecords) {
int prevOffset = OWALPage.RECORDS_OFFSET;
int pageOffset = OWALPage.RECORDS_OFFSET;
int maxOffset = page.getFilledUpTo();
while (pageOffset < maxOffset) {
prevOffset = pageOffset;
pageOffset += page.getSerializedRecordSize(pageOffset);
}
if (skipTailRecords && page.recordTail(prevOffset))
return -1;
return prevOffset;
}
public void delete(boolean flush) throws IOException {
close(flush);
boolean deleted = file.delete();
while (!deleted) {
OMemoryWatchDog.freeMemoryForResourceCleanup(100);
deleted = !file.exists() || file.delete();
}
}
public String getPath() {
return file.getAbsolutePath();
}
public OLogSequenceNumber logRecord(byte[] record) throws IOException {
flushNewData = true;
int pageOffset = (int) (filledUpTo % OWALPage.PAGE_SIZE);
long pageIndex = filledUpTo / OWALPage.PAGE_SIZE;
if (pageOffset == 0 && pageIndex > 0)
pageIndex--;
int pos = 0;
boolean firstChunk = true;
OLogSequenceNumber lsn = null;
while (pos < record.length) {
if (currentPage == null) {
ODirectMemoryPointer pointer = new ODirectMemoryPointer(OWALPage.PAGE_SIZE);
currentPage = new OWALPage(pointer, true);
pagesCache.add(currentPage);
filledUpTo += OWALPage.RECORDS_OFFSET;
}
int freeSpace = currentPage.getFreeSpace();
if (freeSpace < OWALPage.MIN_RECORD_SIZE) {
filledUpTo += freeSpace + OWALPage.RECORDS_OFFSET;
ODirectMemoryPointer pointer = new ODirectMemoryPointer(OWALPage.PAGE_SIZE);
currentPage = new OWALPage(pointer, true);
pagesCache.add(currentPage);
pageIndex++;
freeSpace = currentPage.getFreeSpace();
}
final OWALPage walPage = currentPage;
synchronized (walPage) {
final int entrySize = OWALPage.calculateSerializedSize(record.length - pos);
int addedChunkOffset;
if (entrySize <= freeSpace) {
if (pos == 0)
addedChunkOffset = walPage.appendRecord(record, false, !firstChunk);
else
addedChunkOffset = walPage.appendRecord(Arrays.copyOfRange(record, pos, record.length), false, !firstChunk);
pos = record.length;
} else {
int chunkSize = OWALPage.calculateRecordSize(freeSpace);
if (chunkSize > record.length - pos)
chunkSize = record.length - pos;
addedChunkOffset = walPage.appendRecord(Arrays.copyOfRange(record, pos, pos + chunkSize), true, !firstChunk);
pos += chunkSize;
}
if (firstChunk) {
lsn = new OLogSequenceNumber(order, pageIndex * OWALPage.PAGE_SIZE + addedChunkOffset);
}
int spaceDiff = freeSpace - walPage.getFreeSpace();
filledUpTo += spaceDiff;
firstChunk = false;
}
}
if (pagesCache.size() > maxPagesCacheSize) {
OLogManager.instance().info(this, "Max cache limit is reached (%d vs. %d), sync flush is performed.", maxPagesCacheSize,
pagesCache.size());
flush();
}
last = lsn;
return last;
}
public byte[] readRecord(OLogSequenceNumber lsn) throws IOException {
assert lsn.getSegment() == order;
if (lsn.getPosition() >= filledUpTo)
return null;
if (flushedLsn == null || flushedLsn.compareTo(lsn) < 0)
flush();
byte[] record = null;
long pageIndex = lsn.getPosition() / OWALPage.PAGE_SIZE;
int pageOffset = (int) (lsn.getPosition() % OWALPage.PAGE_SIZE);
long pageCount = (filledUpTo + OWALPage.PAGE_SIZE - 1) / OWALPage.PAGE_SIZE;
while (pageIndex < pageCount) {
synchronized (rndFile) {
byte[] pageContent = new byte[OWALPage.PAGE_SIZE];
rndFile.seek(pageIndex * OWALPage.PAGE_SIZE);
rndFile.readFully(pageContent);
ODirectMemoryPointer pointer = new ODirectMemoryPointer(pageContent);
try {
OWALPage page = new OWALPage(pointer, false);
byte[] content = page.getRecord(pageOffset);
if (record == null)
record = content;
else {
byte[] oldRecord = record;
record = new byte[record.length + content.length];
System.arraycopy(oldRecord, 0, record, 0, oldRecord.length);
System.arraycopy(content, 0, record, oldRecord.length, record.length - oldRecord.length);
}
if (page.mergeWithNextPage(pageOffset)) {
pageOffset = OWALPage.RECORDS_OFFSET;
pageIndex++;
} else
break;
} finally {
pointer.free();
}
}
}
return record;
}
public OLogSequenceNumber getNextLSN(OLogSequenceNumber lsn) throws IOException {
final byte[] record = readRecord(lsn);
if (record == null)
return null;
long pos = lsn.getPosition();
long pageIndex = pos / OWALPage.PAGE_SIZE;
int pageOffset = (int) (pos - pageIndex * OWALPage.PAGE_SIZE);
int restOfRecord = record.length;
while (restOfRecord > 0) {
int entrySize = OWALPage.calculateSerializedSize(restOfRecord);
if (entrySize + pageOffset < OWALPage.PAGE_SIZE) {
if (entrySize + pageOffset <= OWALPage.PAGE_SIZE - OWALPage.MIN_RECORD_SIZE)
pos += entrySize;
else
pos += OWALPage.PAGE_SIZE - pageOffset + OWALPage.RECORDS_OFFSET;
break;
} else if (entrySize + pageOffset == OWALPage.PAGE_SIZE) {
pos += entrySize + OWALPage.RECORDS_OFFSET;
break;
} else {
int chunkSize = OWALPage.calculateRecordSize(OWALPage.PAGE_SIZE - pageOffset);
restOfRecord -= chunkSize;
pos += OWALPage.PAGE_SIZE - pageOffset + OWALPage.RECORDS_OFFSET;
pageOffset = OWALPage.RECORDS_OFFSET;
}
}
if (pos >= filledUpTo)
return null;
return new OLogSequenceNumber(order, pos);
}
public void close(boolean flush) throws IOException {
if (!closed) {
stopFlush(flush);
rndFile.close();
closed = true;
if (!pagesCache.isEmpty()) {
for (OWALPage page : pagesCache)
page.getPagePointer().free();
}
currentPage = null;
}
}
private void selfCheck() throws IOException {
if (!pagesCache.isEmpty())
throw new IllegalStateException("WAL cache is not empty, we can not verify WAL after it was started to be used");
synchronized (rndFile) {
long pagesCount = rndFile.length() / OWALPage.PAGE_SIZE;
if (rndFile.length() % OWALPage.PAGE_SIZE > 0) {
OLogManager.instance().error(this, "Last WAL page was written partially, auto fix.");
rndFile.setLength(OWALPage.PAGE_SIZE * pagesCount);
}
long currentPage = pagesCount - 1;
CRC32 crc32 = new CRC32();
while (currentPage >= 0) {
crc32.reset();
byte[] content = new byte[OWALPage.PAGE_SIZE];
rndFile.seek(currentPage * OWALPage.PAGE_SIZE);
rndFile.readFully(content);
int pageCRC = OIntegerSerializer.INSTANCE.deserializeNative(content, 0);
crc32.update(content, OIntegerSerializer.INT_SIZE, OWALPage.PAGE_SIZE - OIntegerSerializer.INT_SIZE);
int calculatedCRC = (int) crc32.getValue();
if (pageCRC != calculatedCRC) {
OLogManager.instance().error(this, "%d WAL page has been broken and will be truncated.", currentPage);
currentPage--;
pagesCount = currentPage + 1;
rndFile.setLength(pagesCount * OWALPage.PAGE_SIZE);
} else
break;
}
if (currentPage < 0)
return;
byte[] content = new byte[OWALPage.PAGE_SIZE];
rndFile.seek(currentPage * OWALPage.PAGE_SIZE);
rndFile.readFully(content);
currentPage--;
long intialFlushId = OLongSerializer.INSTANCE.deserializeNative(content, OWALPage.FLUSH_ID_OFFSET);
long loadedFlushId = intialFlushId;
int flushedPagesCount = 1;
while (currentPage >= 0) {
content = new byte[OWALPage.PAGE_SIZE];
rndFile.seek(currentPage * OWALPage.PAGE_SIZE);
rndFile.readFully(content);
crc32.reset();
crc32.update(content, OIntegerSerializer.INT_SIZE, OWALPage.PAGE_SIZE - OIntegerSerializer.INT_SIZE);
int calculatedCRC = (int) crc32.getValue();
int pageCRC = OIntegerSerializer.INSTANCE.deserializeNative(content, 0);
if (pageCRC != calculatedCRC) {
OLogManager.instance().error(this, "%d WAL page has been broken and will be truncated.", currentPage);
currentPage--;
pagesCount = currentPage + 1;
rndFile.setLength(pagesCount * OWALPage.PAGE_SIZE);
flushedPagesCount = 0;
} else {
loadedFlushId = OLongSerializer.INSTANCE.deserializeNative(content, OWALPage.FLUSH_ID_OFFSET);
if (loadedFlushId == intialFlushId) {
flushedPagesCount++;
currentPage--;
} else
break;
}
}
if (flushedPagesCount != 0) {
content = new byte[OWALPage.PAGE_SIZE];
rndFile.seek((currentPage + 1) * OWALPage.PAGE_SIZE);
rndFile.readFully(content);
final int firstFlushIndex = OIntegerSerializer.INSTANCE.deserializeNative(content, OWALPage.FLUSH_INDEX_OFFSET);
if (firstFlushIndex != 0) {
OLogManager.instance().error(this, "%d WAL page has been broken and will be truncated.", currentPage + 1);
pagesCount = currentPage + 1;
rndFile.setLength(pagesCount * OWALPage.PAGE_SIZE);
flushedPagesCount = 0;
}
}
currentPage += flushedPagesCount;
while (currentPage >= 0) {
rndFile.seek(currentPage * OWALPage.PAGE_SIZE);
rndFile.readFully(content);
ODirectMemoryPointer pointer = new ODirectMemoryPointer(content);
try {
OWALPage page = new OWALPage(pointer, false);
int pageOffset = findLastRecord(page, false);
if (pageOffset >= 0) {
if (page.mergeWithNextPage(pageOffset)) {
page.truncateTill(pageOffset);
rndFile.seek(currentPage * OWALPage.PAGE_SIZE);
content = pointer.get(0, OWALPage.PAGE_SIZE);
rndFile.write(content);
if (page.isEmpty()) {
OLogManager.instance().error(this, "%d WAL page has been broken and will be truncated.", currentPage);
currentPage--;
pagesCount = currentPage + 1;
rndFile.setLength(pagesCount * OWALPage.PAGE_SIZE);
} else
break;
} else
break;
} else
break;
} finally {
pointer.free();
}
}
rndFile.getFD().sync();
}
}
public OLogSequenceNumber readFlushedLSN() throws IOException {
long pages = rndFile.length() / OWALPage.PAGE_SIZE;
if (pages == 0)
return null;
long pageIndex = pages - 1;
while (true) {
rndFile.seek(pageIndex * OWALPage.PAGE_SIZE);
byte[] pageContent = new byte[OWALPage.PAGE_SIZE];
rndFile.readFully(pageContent);
ODirectMemoryPointer pointer = new ODirectMemoryPointer(pageContent);
try {
OWALPage page = new OWALPage(pointer, false);
int pageOffset = findLastRecord(page, true);
if (pageOffset < 0) {
pageIndex--;
if (pageIndex < 0)
return null;
continue;
}
return new OLogSequenceNumber(order, pageIndex * OWALPage.PAGE_SIZE + pageOffset);
} finally {
pointer.free();
}
}
}
public void flush() {
if (!commitExecutor.isShutdown()) {
try {
commitExecutor.submit(new FlushTask()).get();
} catch (InterruptedException e) {
Thread.interrupted();
throw new OStorageException("Thread was interrupted during flush", e);
} catch (ExecutionException e) {
throw new OStorageException("Error during WAL segment " + getPath() + " flush.");
}
} else {
new FlushTask().run();
}
}
private final class FlushTask implements Runnable {
private FlushTask() {
}
@Override
public void run() {
try {
commit();
} catch (Throwable e) {
OLogManager.instance().error(this, "Error during WAL background flush", e);
}
}
private void commit() throws IOException {
if (pagesCache.isEmpty())
return;
if (!flushNewData)
return;
flushNewData = false;
final int maxSize = pagesCache.size();
ODirectMemoryPointer[] pagesToFlush = new ODirectMemoryPointer[maxSize];
long filePointer = nextPositionToFlush;
int lastRecordOffset = -1;
long lastPageIndex = -1;
int flushedPages = 0;
Iterator<OWALPage> pageIterator = pagesCache.iterator();
while (flushedPages < maxSize) {
final OWALPage page = pageIterator.next();
synchronized (page) {
ODirectMemoryPointer dataPointer;
if (flushedPages == maxSize - 1) {
dataPointer = new ODirectMemoryPointer(OWALPage.PAGE_SIZE);
page.getPagePointer().moveData(0, dataPointer, 0, OWALPage.PAGE_SIZE);
} else {
dataPointer = page.getPagePointer();
}
pagesToFlush[flushedPages] = dataPointer;
int recordOffset = findLastRecord(page, true);
if (recordOffset >= 0) {
lastRecordOffset = recordOffset;
lastPageIndex = flushedPages;
}
}
flushedPages++;
}
flushId++;
synchronized (rndFile) {
rndFile.seek(filePointer);
for (int i = 0; i < pagesToFlush.length; i++) {
ODirectMemoryPointer dataPointer = pagesToFlush[i];
byte[] pageContent = dataPointer.get(0, OWALPage.PAGE_SIZE);
if (i == pagesToFlush.length - 1)
dataPointer.free();
OLongSerializer.INSTANCE.serializeNative(flushId, pageContent, OWALPage.FLUSH_ID_OFFSET);
OIntegerSerializer.INSTANCE.serializeNative(i, pageContent, OWALPage.FLUSH_INDEX_OFFSET);
flushPage(pageContent);
filePointer += OWALPage.PAGE_SIZE;
}
rndFile.getFD().sync();
}
long oldPositionToFlush = nextPositionToFlush;
nextPositionToFlush = filePointer - OWALPage.PAGE_SIZE;
if (lastRecordOffset >= 0)
flushedLsn = new OLogSequenceNumber(order, oldPositionToFlush + lastPageIndex * OWALPage.PAGE_SIZE + lastRecordOffset);
for (int i = 0; i < flushedPages - 1; i++) {
OWALPage page = pagesCache.poll();
page.getPagePointer().free();
}
assert !pagesCache.isEmpty();
}
private void flushPage(byte[] content) throws IOException {
CRC32 crc32 = new CRC32();
crc32.update(content, OIntegerSerializer.INT_SIZE, OWALPage.PAGE_SIZE - OIntegerSerializer.INT_SIZE);
OIntegerSerializer.INSTANCE.serializeNative((int) crc32.getValue(), content, 0);
rndFile.write(content);
}
}
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_storage_impl_local_paginated_wal_OWriteAheadLog.java
|
1,265 |
@ClusterScope(scope = ElasticsearchIntegrationTest.Scope.TEST, numNodes = 0, transportClientRatio = 1.0)
public class TransportClientTests extends ElasticsearchIntegrationTest {
@Test
public void testPickingUpChangesInDiscoveryNode() {
String nodeName = cluster().startNode(ImmutableSettings.builder().put("node.data", false));
TransportClient client = (TransportClient) cluster().client(nodeName);
assertThat(client.connectedNodes().get(0).dataNode(), Matchers.equalTo(false));
}
}
| 0true
|
src_test_java_org_elasticsearch_client_transport_TransportClientTests.java
|
518 |
public class TransportIndicesExistsAction extends TransportMasterNodeReadOperationAction<IndicesExistsRequest, IndicesExistsResponse> {
@Inject
public TransportIndicesExistsAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool) {
super(settings, transportService, clusterService, threadPool);
}
@Override
protected String executor() {
// lightweight in memory check
return ThreadPool.Names.SAME;
}
@Override
protected String transportAction() {
return IndicesExistsAction.NAME;
}
@Override
protected IndicesExistsRequest newRequest() {
return new IndicesExistsRequest();
}
@Override
protected IndicesExistsResponse newResponse() {
return new IndicesExistsResponse();
}
@Override
protected void doExecute(IndicesExistsRequest request, ActionListener<IndicesExistsResponse> listener) {
// don't call this since it will throw IndexMissingException
//request.indices(clusterService.state().metaData().concreteIndices(request.indices()));
super.doExecute(request, listener);
}
@Override
protected ClusterBlockException checkBlock(IndicesExistsRequest request, ClusterState state) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA, request.indices());
}
@Override
protected void masterOperation(final IndicesExistsRequest request, final ClusterState state, final ActionListener<IndicesExistsResponse> listener) throws ElasticsearchException {
boolean exists;
try {
// Similar as the previous behaviour, but now also aliases and wildcards are supported.
clusterService.state().metaData().concreteIndices(request.indices(), request.indicesOptions());
exists = true;
} catch (IndexMissingException e) {
exists = false;
}
listener.onResponse(new IndicesExistsResponse(exists));
}
}
| 1no label
|
src_main_java_org_elasticsearch_action_admin_indices_exists_indices_TransportIndicesExistsAction.java
|
617 |
public interface BroadleafTimeZoneResolver {
public TimeZone resolveTimeZone(WebRequest request);
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_web_BroadleafTimeZoneResolver.java
|
1,218 |
public class CeylonNature extends ProjectNatureBase {
public static final String NATURE_ID = PLUGIN_ID + ".ceylonNature";
public static boolean isEnabled(IProject project) {
try {
return project.hasNature(NATURE_ID);
} catch (CoreException e) {
e.printStackTrace();
return false;
}
}
private String systemRepo;
boolean enableJdtClasses;
boolean astAwareIncrementalBuilds;
boolean hideWarnings;
boolean keepSettings;
boolean compileJs;
boolean compileJava;
public CeylonNature() {
keepSettings=true;
}
public CeylonNature(String systemRepo,
boolean enableJdtClasses,
boolean hideWarnings,
boolean java,
boolean js,
boolean astAwareIncrementalBuilds) {
this.systemRepo = systemRepo;
this.enableJdtClasses = enableJdtClasses;
this.hideWarnings = hideWarnings;
compileJs = js;
compileJava = java;
this.astAwareIncrementalBuilds = astAwareIncrementalBuilds;
}
public String getNatureID() {
return NATURE_ID;
}
public String getBuilderID() {
return BUILDER_ID;
}
public void addToProject(final IProject project) {
super.addToProject(project);
try {
new CeylonLanguageModuleContainer(project).install();
} catch (JavaModelException e) {
e.printStackTrace();
}
new CeylonProjectModulesContainer(project).runReconfigure();
}
protected void refreshPrefs() {
// TODO implement preferences and hook in here
}
/**
* Run the Java builder before the Ceylon builder, since
* it's more common for Ceylon to call Java than the
* other way around, and because the Java builder erases
* the output directory during a full build.
*/
protected String getUpstreamBuilderID() {
return JavaCore.BUILDER_ID;
}
@Override
protected Map<String, String> getBuilderArguments() {
Map<String, String> args = super.getBuilderArguments();
if (!keepSettings) {
if (!"${ceylon.repo}".equals(systemRepo)) {
args.put("systemRepo", systemRepo);
} else {
args.remove("systemRepo");
}
if (hideWarnings) {
args.put("hideWarnings", "true");
}
else {
args.remove("hideWarnings");
}
if (enableJdtClasses) {
args.put("explodeModules", "true");
}
else {
args.remove("explodeModules");
}
if (astAwareIncrementalBuilds) {
args.remove("astAwareIncrementalBuilds");
} else {
args.put("astAwareIncrementalBuilds", "false");
}
if (compileJava) {
args.remove("compileJava");
} else {
args.put("compileJava", "false");
}
if (compileJs) {
args.put("compileJs", "true");
} else {
args.remove("compileJs");
}
}
return args;
}
}
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_core_builder_CeylonNature.java
|
3,689 |
public class TTLFieldMapper extends LongFieldMapper implements InternalMapper, RootMapper {
public static final String NAME = "_ttl";
public static final String CONTENT_TYPE = "_ttl";
public static class Defaults extends LongFieldMapper.Defaults {
public static final String NAME = TTLFieldMapper.CONTENT_TYPE;
public static final FieldType TTL_FIELD_TYPE = new FieldType(LongFieldMapper.Defaults.FIELD_TYPE);
static {
TTL_FIELD_TYPE.setStored(true);
TTL_FIELD_TYPE.setIndexed(true);
TTL_FIELD_TYPE.setTokenized(false);
TTL_FIELD_TYPE.freeze();
}
public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.DISABLED;
public static final long DEFAULT = -1;
}
public static class Builder extends NumberFieldMapper.Builder<Builder, TTLFieldMapper> {
private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED;
private long defaultTTL = Defaults.DEFAULT;
public Builder() {
super(Defaults.NAME, new FieldType(Defaults.TTL_FIELD_TYPE));
}
public Builder enabled(EnabledAttributeMapper enabled) {
this.enabledState = enabled;
return builder;
}
public Builder defaultTTL(long defaultTTL) {
this.defaultTTL = defaultTTL;
return builder;
}
@Override
public TTLFieldMapper build(BuilderContext context) {
return new TTLFieldMapper(fieldType, enabledState, defaultTTL, ignoreMalformed(context),coerce(context), postingsProvider, docValuesProvider, fieldDataSettings, context.indexSettings());
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
TTLFieldMapper.Builder builder = ttl();
parseField(builder, builder.name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
EnabledAttributeMapper enabledState = nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED;
builder.enabled(enabledState);
} else if (fieldName.equals("default")) {
TimeValue ttlTimeValue = nodeTimeValue(fieldNode, null);
if (ttlTimeValue != null) {
builder.defaultTTL(ttlTimeValue.millis());
}
}
}
return builder;
}
}
private EnabledAttributeMapper enabledState;
private long defaultTTL;
public TTLFieldMapper() {
this(new FieldType(Defaults.TTL_FIELD_TYPE), Defaults.ENABLED_STATE, Defaults.DEFAULT, Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, null, null, ImmutableSettings.EMPTY);
}
protected TTLFieldMapper(FieldType fieldType, EnabledAttributeMapper enabled, long defaultTTL, Explicit<Boolean> ignoreMalformed,
Explicit<Boolean> coerce, PostingsFormatProvider postingsProvider, DocValuesFormatProvider docValuesProvider,
@Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(Defaults.NAME, Defaults.NAME, Defaults.NAME, Defaults.NAME), Defaults.PRECISION_STEP,
Defaults.BOOST, fieldType, null, Defaults.NULL_VALUE, ignoreMalformed, coerce,
postingsProvider, docValuesProvider, null, null, fieldDataSettings, indexSettings, MultiFields.empty(), null);
this.enabledState = enabled;
this.defaultTTL = defaultTTL;
}
public boolean enabled() {
return this.enabledState.enabled;
}
public long defaultTTL() {
return this.defaultTTL;
}
@Override
public boolean hasDocValues() {
return false;
}
// Overrides valueForSearch to display live value of remaining ttl
@Override
public Object valueForSearch(Object value) {
long now;
SearchContext searchContext = SearchContext.current();
if (searchContext != null) {
now = searchContext.nowInMillis();
} else {
now = System.currentTimeMillis();
}
long val = value(value);
return val - now;
}
// Other implementation for realtime get display
public Object valueForSearch(long expirationTime) {
return expirationTime - System.currentTimeMillis();
}
@Override
public void validate(ParseContext context) throws MapperParsingException {
}
@Override
public void preParse(ParseContext context) throws IOException {
}
@Override
public void postParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
public void parse(ParseContext context) throws IOException, MapperParsingException {
if (context.sourceToParse().ttl() < 0) { // no ttl has been provided externally
long ttl;
if (context.parser().currentToken() == XContentParser.Token.VALUE_STRING) {
ttl = TimeValue.parseTimeValue(context.parser().text(), null).millis();
} else {
ttl = context.parser().longValue(coerce.value());
}
if (ttl <= 0) {
throw new MapperParsingException("TTL value must be > 0. Illegal value provided [" + ttl + "]");
}
context.sourceToParse().ttl(ttl);
}
}
@Override
public boolean includeInObject() {
return true;
}
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException, AlreadyExpiredException {
if (enabledState.enabled && !context.sourceToParse().flyweight()) {
long ttl = context.sourceToParse().ttl();
if (ttl <= 0 && defaultTTL > 0) { // no ttl provided so we use the default value
ttl = defaultTTL;
context.sourceToParse().ttl(ttl);
}
if (ttl > 0) { // a ttl has been provided either externally or in the _source
long timestamp = context.sourceToParse().timestamp();
long expire = new Date(timestamp + ttl).getTime();
long now = System.currentTimeMillis();
// there is not point indexing already expired doc
if (context.sourceToParse().origin() == SourceToParse.Origin.PRIMARY && now >= expire) {
throw new AlreadyExpiredException(context.index(), context.type(), context.id(), timestamp, ttl, now);
}
// the expiration timestamp (timestamp + ttl) is set as field
fields.add(new CustomLongNumericField(this, expire, fieldType));
}
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
// if all are defaults, no sense to write it at all
if (!includeDefaults && enabledState == Defaults.ENABLED_STATE && defaultTTL == Defaults.DEFAULT) {
return builder;
}
builder.startObject(CONTENT_TYPE);
if (includeDefaults || enabledState != Defaults.ENABLED_STATE) {
builder.field("enabled", enabledState.enabled);
}
if (includeDefaults || defaultTTL != Defaults.DEFAULT && enabledState.enabled) {
builder.field("default", defaultTTL);
}
builder.endObject();
return builder;
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
TTLFieldMapper ttlMergeWith = (TTLFieldMapper) mergeWith;
if (!mergeContext.mergeFlags().simulate()) {
if (ttlMergeWith.defaultTTL != -1) {
this.defaultTTL = ttlMergeWith.defaultTTL;
}
if (ttlMergeWith.enabledState != enabledState && !ttlMergeWith.enabledState.unset()) {
this.enabledState = ttlMergeWith.enabledState;
}
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_mapper_internal_TTLFieldMapper.java
|
345 |
static class MapTryLockTimeOutThread extends TestHelper {
public MapTryLockTimeOutThread(IMap map, String upKey, String downKey){
super(map, upKey, downKey);
}
public void doRun() throws Exception{
if(map.tryLock(upKey, 1, TimeUnit.MILLISECONDS)){
try{
if(map.tryLock(downKey, 1, TimeUnit.MILLISECONDS )){
try {
work();
}finally {
map.unlock(downKey);
}
}
}finally {
map.unlock(upKey);
}
}
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapTryLockConcurrentTests.java
|
216 |
public class HydrationScanner implements ClassVisitor, FieldVisitor, AnnotationVisitor {
private static final int CLASSSTAGE = 0;
private static final int FIELDSTAGE = 1;
@SuppressWarnings("unchecked")
public HydrationScanner(Class topEntityClass, Class entityClass) {
this.topEntityClass = topEntityClass;
this.entityClass = entityClass;
}
private String cacheRegion;
private Map<String, Method[]> idMutators = new HashMap<String, Method[]>();
private Map<String, HydrationItemDescriptor> cacheMutators = new HashMap<String, HydrationItemDescriptor>();
@SuppressWarnings("unchecked")
private final Class entityClass;
@SuppressWarnings("unchecked")
private final Class topEntityClass;
private int stage = CLASSSTAGE;
@SuppressWarnings("unchecked")
private Class clazz;
private String annotation;
private String fieldName;
@SuppressWarnings("unchecked")
private Class fieldClass;
public void init() {
try {
InputStream in = HydrationScanner.class.getClassLoader().getResourceAsStream(topEntityClass.getName().replace('.', '/') + ".class");
new ClassReader(in).accept(this, ClassReader.SKIP_DEBUG);
in = HydrationScanner.class.getClassLoader().getResourceAsStream(entityClass.getName().replace('.', '/') + ".class");
new ClassReader(in).accept(this, ClassReader.SKIP_DEBUG);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public String getCacheRegion() {
return cacheRegion;
}
public Map<String, Method[]> getIdMutators() {
return idMutators;
}
public Map<String, HydrationItemDescriptor> getCacheMutators() {
return cacheMutators;
}
//Common
public AnnotationVisitor visitAnnotation(String arg0, boolean arg1) {
Type annotationType = Type.getType(arg0);
switch(stage) {
case CLASSSTAGE: {
if (annotationType.getClassName().equals(Cache.class.getName())){
annotation = Cache.class.getName();
}
break;
}
case FIELDSTAGE: {
if (annotationType.getClassName().equals(Id.class.getName())){
idMutators.put(fieldName, retrieveMutators());
}
if (annotationType.getClassName().equals(Hydrated.class.getName())){
annotation = Hydrated.class.getName();
}
break;
}
default : {
annotation = null;
fieldName = null;
break;
}
}
return this;
}
private Method[] retrieveMutators() {
String mutatorName = fieldName.substring(0,1).toUpperCase() + fieldName.substring(1, fieldName.length());
Method getMethod = null;
try {
getMethod = clazz.getMethod("get"+mutatorName, new Class[]{});
} catch (Exception e) {
//do nothing
}
if (getMethod == null) {
try {
getMethod = clazz.getMethod("is"+mutatorName, new Class[]{});
} catch (Exception e) {
//do nothing
}
}
if (getMethod == null) {
try {
getMethod = clazz.getMethod(fieldName, new Class[]{});
} catch (Exception e) {
//do nothing
}
}
Method setMethod = null;
try {
setMethod = clazz.getMethod("set"+mutatorName, new Class[]{fieldClass});
} catch (Exception e) {
//do nothing
}
if (getMethod == null || setMethod == null) {
throw new RuntimeException("Unable to find a getter and setter method for the AdminPresentation field: " + fieldName + ". Make sure you have a getter method entitled: get" + mutatorName + "(), or is" + mutatorName + "(), or " + fieldName + "(). Make sure you have a setter method entitled: set" + mutatorName + "(..).");
}
return new Method[]{getMethod, setMethod};
}
//FieldVisitor
public void visitAttribute(Attribute arg0) {
//do nothing
}
public void visitEnd() {
//do nothing
}
//ClassVisitor
public void visit(int arg0, int arg1, String arg2, String arg3, String arg4, String[] arg5) {
try {
clazz = Class.forName(arg2.replaceAll("/", "."));
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
stage = CLASSSTAGE;
}
public FieldVisitor visitField(int arg0, String arg1, String arg2, String arg3, Object arg4) {
stage = FIELDSTAGE;
fieldName = arg1;
Type fieldType = Type.getType(arg2);
switch(fieldType.getSort()){
case Type.BOOLEAN:
fieldClass = boolean.class;
break;
case Type.BYTE:
fieldClass = byte.class;
break;
case Type.CHAR:
fieldClass = char.class;
break;
case Type.DOUBLE:
fieldClass = double.class;
break;
case Type.FLOAT:
fieldClass = float.class;
break;
case Type.INT:
fieldClass = int.class;
break;
case Type.LONG:
fieldClass = long.class;
break;
case Type.SHORT:
fieldClass = short.class;
break;
case Type.OBJECT:
try {
fieldClass = Class.forName(Type.getType(arg2).getClassName());
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
break;
}
return this;
}
public void visitInnerClass(String arg0, String arg1, String arg2, int arg3) {
//do nothing
}
public MethodVisitor visitMethod(int arg0, String arg1, String arg2, String arg3, String[] arg4) {
return new EmptyVisitor();
}
public void visitOuterClass(String arg0, String arg1, String arg2) {
//do nothing
}
public void visitSource(String arg0, String arg1) {
//do nothing
}
//AnnotationVisitor
public void visit(String arg0, Object arg1) {
if (Cache.class.getName().equals(annotation) && "region".equals(arg0)) {
cacheRegion = (String) arg1;
}
if (Hydrated.class.getName().equals(annotation) && "factoryMethod".equals(arg0)) {
HydrationItemDescriptor itemDescriptor = new HydrationItemDescriptor();
itemDescriptor.setFactoryMethod((String) arg1);
itemDescriptor.setMutators(retrieveMutators());
cacheMutators.put(fieldName, itemDescriptor);
}
}
public AnnotationVisitor visitAnnotation(String arg0, String arg1) {
return this;
}
public AnnotationVisitor visitArray(String arg0) {
return this;
}
public void visitEnum(String arg0, String arg1, String arg2) {
//do nothing
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_cache_engine_HydrationScanner.java
|
228 |
XPostingsHighlighter highlighter = new XPostingsHighlighter() {
@Override
public Passage[] getEmptyHighlight(String fieldName, BreakIterator bi, int maxPassages) {
return new Passage[0];
}
};
| 0true
|
src_test_java_org_apache_lucene_search_postingshighlight_XPostingsHighlighterTests.java
|
591 |
public interface StatusHandler {
public void handleStatus(String serviceName, ServiceStatusType status);
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_vendor_service_monitor_StatusHandler.java
|
541 |
public class DeleteMappingRequestBuilder extends AcknowledgedRequestBuilder<DeleteMappingRequest, DeleteMappingResponse, DeleteMappingRequestBuilder> {
public DeleteMappingRequestBuilder(IndicesAdminClient indicesClient) {
super((InternalIndicesAdminClient) indicesClient, new DeleteMappingRequest());
}
/**
* Sets the indices the delete mapping will execute on
*/
public DeleteMappingRequestBuilder setIndices(String... indices) {
request.indices(indices);
return this;
}
/**
* Sets the type of the mapping to remove
*/
public DeleteMappingRequestBuilder setType(String... types) {
request.types(types);
return this;
}
/**
* Specifies what type of requested indices to ignore and wildcard indices expressions.
*
* For example indices that don't exist.
*/
public DeleteMappingRequestBuilder setIndicesOptions(IndicesOptions options) {
request.indicesOptions(options);
return this;
}
@Override
protected void doExecute(ActionListener<DeleteMappingResponse> listener) {
((IndicesAdminClient) client).deleteMapping(request, listener);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_mapping_delete_DeleteMappingRequestBuilder.java
|
311 |
public class ToggleBreakpointAdapter implements IToggleBreakpointsTarget {
private static final String JDT_DEBUG_PLUGIN_ID= "org.eclipse.jdt.debug";
public ToggleBreakpointAdapter() {
}
public void toggleLineBreakpoints(IWorkbenchPart part, ISelection selection) throws CoreException {
if (selection instanceof ITextSelection) {
ITextSelection textSel= (ITextSelection) selection;
IEditorPart editorPart= (IEditorPart) part.getAdapter(IEditorPart.class);
//TODO: handle org.eclipse.ui.ide.FileStoreEditorInput
// to set breakpoints in code from archives
IEditorInput editorInput = editorPart.getEditorInput();
final IFile origSrcFile;
if (editorInput instanceof IFileEditorInput) {
origSrcFile= ((IFileEditorInput)editorInput).getFile();
} else if (editorInput instanceof FileStoreEditorInput) {
URI uri = ((FileStoreEditorInput) editorInput).getURI();
IResource resource = ExternalSourceArchiveManager.toResource(uri);
if (resource instanceof IFile) {
origSrcFile = (IFile) resource;
} else {
origSrcFile = null;
}
} else {
origSrcFile = null;
}
final int lineNumber = textSel.getStartLine()+1;
IWorkspaceRunnable wr= new IWorkspaceRunnable() {
public void run(IProgressMonitor monitor) throws CoreException {
IMarker marker = findBreakpointMarker(origSrcFile, lineNumber);
if (marker != null) {
// The following will delete the associated marker
clearLineBreakpoint(origSrcFile, lineNumber);
} else {
// The following will create a marker as a side-effect
setLineBreakpoint(origSrcFile, lineNumber);
}
}
};
try {
getWorkspace().run(wr, null);
}
catch (CoreException e) {
throw new DebugException(e.getStatus());
}
}
}
private IMarker findBreakpointMarker(IFile srcFile, int lineNumber) throws CoreException {
IMarker[] markers = srcFile.findMarkers(IBreakpoint.LINE_BREAKPOINT_MARKER, true, IResource.DEPTH_INFINITE);
for (int k = 0; k < markers.length; k++ ){
if (((Integer) markers[k].getAttribute(IMarker.LINE_NUMBER)).intValue() == lineNumber){
return markers[k];
}
}
return null;
}
public void setLineBreakpoint(IFile file, int lineNumber) throws CoreException {
String srcFileName= file.getName();
String typeName= srcFileName.substring(0, srcFileName.lastIndexOf('.'));
Map<String,Object> bkptAttributes= new HashMap<String, Object>();
bkptAttributes.put("org.eclipse.jdt.debug.core.sourceName", srcFileName);
bkptAttributes.put("org.eclipse.jdt.debug.core.typeName", typeName);
try {
JDIDebugModel.createStratumBreakpoint(file, null, srcFileName, null, null, lineNumber, -1, -1, 0, true, bkptAttributes);
}
catch (CoreException e) {
e.printStackTrace();
}
}
public void clearLineBreakpoint(IFile file, int lineNumber) throws CoreException {
try {
IBreakpoint lineBkpt= findStratumBreakpoint(file, lineNumber);
if (lineBkpt != null) {
lineBkpt.delete();
}
}
catch (CoreException e) {
e.printStackTrace();
}
}
public void disableLineBreakpoint(IFile file, int lineNumber) throws CoreException {
try {
IBreakpoint lineBkpt= findStratumBreakpoint(file, lineNumber);
if (lineBkpt != null) {
lineBkpt.setEnabled(false);
}
}
catch (CoreException e) {
e.printStackTrace();
}
}
public void enableLineBreakpoint(IFile file, int lineNumber) throws CoreException {
try {
IBreakpoint lineBkpt= findStratumBreakpoint(file, lineNumber);
if (lineBkpt != null) {
lineBkpt.setEnabled(true);
}
}
catch (CoreException e) {
e.printStackTrace();
}
}
/**
* Returns a Java line breakpoint that is already registered with the breakpoint
* manager for a type with the given name at the given line number.
*
* @param typeName fully qualified type name
* @param lineNumber line number
* @return a Java line breakpoint that is already registered with the breakpoint
* manager for a type with the given name at the given line number or <code>null</code>
* if no such breakpoint is registered
* @exception CoreException if unable to retrieve the associated marker
* attributes (line number).
*/
public static IJavaLineBreakpoint findStratumBreakpoint(IResource resource, int lineNumber) throws CoreException {
String modelId= JDT_DEBUG_PLUGIN_ID;
String markerType= "org.eclipse.jdt.debug.javaStratumLineBreakpointMarker";
IBreakpointManager manager= DebugPlugin.getDefault().getBreakpointManager();
IBreakpoint[] breakpoints= manager.getBreakpoints(modelId);
for (int i = 0; i < breakpoints.length; i++) {
if (!(breakpoints[i] instanceof IJavaLineBreakpoint)) {
continue;
}
IJavaLineBreakpoint breakpoint = (IJavaLineBreakpoint) breakpoints[i];
IMarker marker = breakpoint.getMarker();
if (marker != null && marker.exists() && marker.getType().equals(markerType)) {
if (breakpoint.getLineNumber() == lineNumber &&
resource.equals(marker.getResource())) {
return breakpoint;
}
}
}
return null;
}
public boolean canToggleLineBreakpoints(IWorkbenchPart part, ISelection selection) {
return true;
}
public void toggleMethodBreakpoints(IWorkbenchPart part, ISelection selection) throws CoreException {
}
public boolean canToggleMethodBreakpoints(IWorkbenchPart part, ISelection selection) {
return false;
}
public void toggleWatchpoints(IWorkbenchPart part, ISelection selection) throws CoreException {
}
public boolean canToggleWatchpoints(IWorkbenchPart part, ISelection selection) {
return false;
}
}
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_ToggleBreakpointAdapter.java
|
1,173 |
LOCAL {
@Override
public Transport newTransport(Settings settings, ThreadPool threadPool) {
return new LocalTransport(settings, threadPool, Version.CURRENT);
}
},
| 0true
|
src_test_java_org_elasticsearch_benchmark_transport_TransportBenchmark.java
|
1,761 |
@Component("blPersistenceManager")
@Scope("prototype")
public class PersistenceManagerImpl implements InspectHelper, PersistenceManager, ApplicationContextAware {
private static final Log LOG = LogFactory.getLog(PersistenceManagerImpl.class);
@Resource(name="blDynamicEntityDao")
protected DynamicEntityDao dynamicEntityDao;
@Resource(name="blCustomPersistenceHandlers")
protected List<CustomPersistenceHandler> customPersistenceHandlers = new ArrayList<CustomPersistenceHandler>();
@Resource(name="blCustomPersistenceHandlerFilters")
protected List<CustomPersistenceHandlerFilter> customPersistenceHandlerFilters = new ArrayList<CustomPersistenceHandlerFilter>();
@Resource(name="blTargetEntityManagers")
protected Map<String, String> targetEntityManagers = new HashMap<String, String>();
@Resource(name="blAdminSecurityRemoteService")
protected SecurityVerifier adminRemoteSecurityService;
@Resource(name="blPersistenceModules")
protected PersistenceModule[] modules;
protected TargetModeType targetMode;
protected ApplicationContext applicationContext;
@PostConstruct
public void postConstruct() {
for (PersistenceModule module : modules) {
module.setPersistenceManager(this);
}
}
// public void close() throws Exception {
// //do nothing
// }
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
@Override
public Class<?>[] getAllPolymorphicEntitiesFromCeiling(Class<?> ceilingClass) {
return dynamicEntityDao.getAllPolymorphicEntitiesFromCeiling(ceilingClass);
}
@Override
public Class<?>[] getUpDownInheritance(String testClassname) throws ClassNotFoundException {
return getUpDownInheritance(Class.forName(testClassname));
}
@Override
public Class<?>[] getUpDownInheritance(Class<?> testClass) {
Class<?>[] pEntities = dynamicEntityDao.getAllPolymorphicEntitiesFromCeiling(testClass);
Class<?> topConcreteClass = pEntities[pEntities.length - 1];
List<Class<?>> temp = new ArrayList<Class<?>>(pEntities.length);
temp.addAll(Arrays.asList(pEntities));
Collections.reverse(temp);
boolean eof = false;
while (!eof) {
Class<?> superClass = topConcreteClass.getSuperclass();
PersistentClass persistentClass = dynamicEntityDao.getPersistentClass(superClass.getName());
if (persistentClass == null) {
eof = true;
} else {
temp.add(0, superClass);
topConcreteClass = superClass;
}
}
return temp.toArray(new Class<?>[temp.size()]);
}
@Override
public Class<?>[] getPolymorphicEntities(String ceilingEntityFullyQualifiedClassname) throws ClassNotFoundException {
Class<?>[] entities = getAllPolymorphicEntitiesFromCeiling(Class.forName(ceilingEntityFullyQualifiedClassname));
return entities;
}
@Override
public Map<String, FieldMetadata> getSimpleMergedProperties(String entityName, PersistencePerspective persistencePerspective) {
return dynamicEntityDao.getSimpleMergedProperties(entityName, persistencePerspective);
}
@Override
public ClassMetadata getMergedClassMetadata(final Class<?>[] entities, Map<MergedPropertyType, Map<String, FieldMetadata>> mergedProperties) {
ClassMetadata classMetadata = new ClassMetadata();
classMetadata.setPolymorphicEntities(dynamicEntityDao.getClassTree(entities));
List<Property> propertiesList = new ArrayList<Property>();
for (PersistenceModule module : modules) {
module.extractProperties(entities, mergedProperties, propertiesList);
}
/*
* Insert inherited fields whose order has been specified
*/
for (int i = 0; i < entities.length - 1; i++) {
for (Property myProperty : propertiesList) {
if (myProperty.getMetadata().getInheritedFromType().equals(entities[i].getName()) && myProperty.getMetadata().getOrder() != null) {
for (Property property : propertiesList) {
if (!property.getMetadata().getInheritedFromType().equals(entities[i].getName()) && property.getMetadata().getOrder() != null && property.getMetadata().getOrder() >= myProperty.getMetadata().getOrder()) {
property.getMetadata().setOrder(property.getMetadata().getOrder() + 1);
}
}
}
}
}
Property[] properties = new Property[propertiesList.size()];
properties = propertiesList.toArray(properties);
Arrays.sort(properties, new Comparator<Property>() {
@Override
public int compare(Property o1, Property o2) {
Integer tabOrder1 = o1.getMetadata().getTabOrder() == null ? 99999 : o1.getMetadata().getTabOrder();
Integer tabOrder2 = o2.getMetadata().getTabOrder() == null ? 99999 : o2.getMetadata().getTabOrder();
Integer groupOrder1 = null;
Integer groupOrder2 = null;
if (o1.getMetadata() instanceof BasicFieldMetadata) {
BasicFieldMetadata b1 = (BasicFieldMetadata) o1.getMetadata();
groupOrder1 = b1.getGroupOrder();
}
groupOrder1 = groupOrder1 == null ? 99999 : groupOrder1;
if (o2.getMetadata() instanceof BasicFieldMetadata) {
BasicFieldMetadata b2 = (BasicFieldMetadata) o2.getMetadata();
groupOrder2 = b2.getGroupOrder();
}
groupOrder2 = groupOrder2 == null ? 99999 : groupOrder2;
Integer fieldOrder1 = o1.getMetadata().getOrder() == null ? 99999 : o1.getMetadata().getOrder();
Integer fieldOrder2 = o2.getMetadata().getOrder() == null ? 99999 : o2.getMetadata().getOrder();
String friendlyName1 = o1.getMetadata().getFriendlyName() == null ? "zzzz" : o1.getMetadata().getFriendlyName();
String friendlyName2 = o2.getMetadata().getFriendlyName() == null ? "zzzz" : o2.getMetadata().getFriendlyName();
String name1 = o1.getName() == null ? "zzzzz" : o1.getName();
String name2 = o2.getName() == null ? "zzzzz" : o2.getName();
return new CompareToBuilder()
.append(tabOrder1, tabOrder2)
.append(groupOrder1, groupOrder2)
.append(fieldOrder1, fieldOrder2)
.append(friendlyName1, friendlyName2)
.append(name1, name2)
.toComparison();
}
});
classMetadata.setProperties(properties);
classMetadata.setCurrencyCode(Money.defaultCurrency().getCurrencyCode());
return classMetadata;
}
@Override
public DynamicResultSet inspect(PersistencePackage persistencePackage) throws ServiceException, ClassNotFoundException {
// check to see if there is a custom handler registered
for (CustomPersistenceHandler handler : getCustomPersistenceHandlers()) {
if (handler.canHandleInspect(persistencePackage)) {
if (!handler.willHandleSecurity(persistencePackage)) {
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.INSPECT);
}
DynamicResultSet results = handler.inspect(persistencePackage, dynamicEntityDao, this);
return results;
}
}
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.INSPECT);
Class<?>[] entities = getPolymorphicEntities(persistencePackage.getCeilingEntityFullyQualifiedClassname());
Map<MergedPropertyType, Map<String, FieldMetadata>> allMergedProperties = new HashMap<MergedPropertyType, Map<String, FieldMetadata>>();
for (PersistenceModule module : modules) {
module.updateMergedProperties(persistencePackage, allMergedProperties);
}
ClassMetadata mergedMetadata = getMergedClassMetadata(entities, allMergedProperties);
DynamicResultSet results = new DynamicResultSet(mergedMetadata);
return results;
}
@Override
public DynamicResultSet fetch(PersistencePackage persistencePackage, CriteriaTransferObject cto) throws ServiceException {
//check to see if there is a custom handler registered
for (CustomPersistenceHandler handler : getCustomPersistenceHandlers()) {
if (handler.canHandleFetch(persistencePackage)) {
if (!handler.willHandleSecurity(persistencePackage)) {
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.FETCH);
}
DynamicResultSet results = handler.fetch(persistencePackage, cto, dynamicEntityDao, (RecordHelper) getCompatibleModule(OperationType.BASIC));
return postFetch(results, persistencePackage, cto);
}
}
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.FETCH);
PersistenceModule myModule = getCompatibleModule(persistencePackage.getPersistencePerspective().getOperationTypes().getFetchType());
try {
return postFetch(myModule.fetch(persistencePackage, cto), persistencePackage, cto);
} catch (ServiceException e) {
if (e.getCause() instanceof NoPossibleResultsException) {
DynamicResultSet drs = new DynamicResultSet(null, new Entity[] {}, 0);
return postFetch(drs, persistencePackage, cto);
}
throw e;
}
}
protected DynamicResultSet postFetch(DynamicResultSet resultSet, PersistencePackage persistencePackage,
CriteriaTransferObject cto)
throws ServiceException {
// Expose the start index so that we can utilize when building the UI
resultSet.setStartIndex(cto.getFirstResult());
resultSet.setPageSize(cto.getMaxResults());
return resultSet;
}
@Override
public Entity add(PersistencePackage persistencePackage) throws ServiceException {
//check to see if there is a custom handler registered
//execute the root PersistencePackage
Entity response;
checkRoot: {
//if there is a validation exception in the root check, let it bubble, as we need a valid, persisted
//entity to execute the subPackage code later
for (CustomPersistenceHandler handler : getCustomPersistenceHandlers()) {
if (handler.canHandleAdd(persistencePackage)) {
if (!handler.willHandleSecurity(persistencePackage)) {
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.ADD);
}
response = handler.add(persistencePackage, dynamicEntityDao, (RecordHelper) getCompatibleModule(OperationType.BASIC));
break checkRoot;
}
}
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.ADD);
PersistenceModule myModule = getCompatibleModule(persistencePackage.getPersistencePerspective().getOperationTypes().getAddType());
response = myModule.add(persistencePackage);
}
if (!MapUtils.isEmpty(persistencePackage.getSubPackages())) {
// Once the entity has been saved, we can utilize its id for the subsequent dynamic forms
Class<?> entityClass;
try {
entityClass = Class.forName(response.getType()[0]);
} catch (ClassNotFoundException e) {
throw new ServiceException(e);
}
Map<String, Object> idMetadata = getDynamicEntityDao().getIdMetadata(entityClass);
String idProperty = (String) idMetadata.get("name");
String idVal = response.findProperty(idProperty).getValue();
Map<String, List<String>> subPackageValidationErrors = new HashMap<String, List<String>>();
for (Map.Entry<String,PersistencePackage> subPackage : persistencePackage.getSubPackages().entrySet()) {
Entity subResponse;
try {
subPackage.getValue().setCustomCriteria(new String[]{subPackage.getValue().getCustomCriteria()[0], idVal});
//Run through any subPackages -- add up any validation errors
checkHandler: {
for (CustomPersistenceHandler handler : getCustomPersistenceHandlers()) {
if (handler.canHandleAdd(subPackage.getValue())) {
if (!handler.willHandleSecurity(subPackage.getValue())) {
adminRemoteSecurityService.securityCheck(subPackage.getValue().getCeilingEntityFullyQualifiedClassname(), EntityOperationType.ADD);
}
subResponse = handler.add(subPackage.getValue(), dynamicEntityDao, (RecordHelper) getCompatibleModule(OperationType.BASIC));
subPackage.getValue().setEntity(subResponse);
break checkHandler;
}
}
adminRemoteSecurityService.securityCheck(subPackage.getValue().getCeilingEntityFullyQualifiedClassname(), EntityOperationType.ADD);
PersistenceModule subModule = getCompatibleModule(subPackage.getValue().getPersistencePerspective().getOperationTypes().getAddType());
subResponse = subModule.add(persistencePackage);
subPackage.getValue().setEntity(subResponse);
}
} catch (ValidationException e) {
subPackage.getValue().setEntity(e.getEntity());
}
}
//Build up validation errors in all of the subpackages, even those that might not have thrown ValidationExceptions
for (Map.Entry<String, PersistencePackage> subPackage : persistencePackage.getSubPackages().entrySet()) {
for (Map.Entry<String, List<String>> error : subPackage.getValue().getEntity().getValidationErrors().entrySet()) {
subPackageValidationErrors.put(subPackage.getKey() + DynamicEntityFormInfo.FIELD_SEPARATOR + error.getKey(), error.getValue());
}
}
response.getValidationErrors().putAll(subPackageValidationErrors);
}
if (response.isValidationFailure()) {
throw new ValidationException(response, "The entity has failed validation");
}
return postAdd(response, persistencePackage);
}
protected Entity postAdd(Entity entity, PersistencePackage persistencePackage) throws ServiceException {
//do nothing
return entity;
}
@Override
public Entity update(PersistencePackage persistencePackage) throws ServiceException {
//check to see if there is a custom handler registered
//execute the root PersistencePackage
Entity response;
try {
checkRoot: {
for (CustomPersistenceHandler handler : getCustomPersistenceHandlers()) {
if (handler.canHandleUpdate(persistencePackage)) {
if (!handler.willHandleSecurity(persistencePackage)) {
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.UPDATE);
}
response = handler.update(persistencePackage, dynamicEntityDao, (RecordHelper) getCompatibleModule(OperationType.BASIC));
break checkRoot;
}
}
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.UPDATE);
PersistenceModule myModule = getCompatibleModule(persistencePackage.getPersistencePerspective().getOperationTypes().getUpdateType());
response = myModule.update(persistencePackage);
}
} catch (ValidationException e) {
response = e.getEntity();
}
Map<String, List<String>> subPackageValidationErrors = new HashMap<String, List<String>>();
for (Map.Entry<String,PersistencePackage> subPackage : persistencePackage.getSubPackages().entrySet()) {
try {
//Run through any subPackages -- add up any validation errors
checkHandler: {
for (CustomPersistenceHandler handler : getCustomPersistenceHandlers()) {
if (handler.canHandleUpdate(subPackage.getValue())) {
if (!handler.willHandleSecurity(subPackage.getValue())) {
adminRemoteSecurityService.securityCheck(subPackage.getValue().getCeilingEntityFullyQualifiedClassname(), EntityOperationType.UPDATE);
}
Entity subResponse = handler.update(subPackage.getValue(), dynamicEntityDao, (RecordHelper) getCompatibleModule(OperationType.BASIC));
subPackage.getValue().setEntity(subResponse);
break checkHandler;
}
}
adminRemoteSecurityService.securityCheck(subPackage.getValue().getCeilingEntityFullyQualifiedClassname(), EntityOperationType.UPDATE);
PersistenceModule subModule = getCompatibleModule(subPackage.getValue().getPersistencePerspective().getOperationTypes().getUpdateType());
Entity subResponse = subModule.update(persistencePackage);
subPackage.getValue().setEntity(subResponse);
}
} catch (ValidationException e) {
subPackage.getValue().setEntity(e.getEntity());
}
}
//Build up validation errors in all of the subpackages, even those that might not have thrown ValidationExceptions
for (Map.Entry<String, PersistencePackage> subPackage : persistencePackage.getSubPackages().entrySet()) {
for (Map.Entry<String, List<String>> error : subPackage.getValue().getEntity().getValidationErrors().entrySet()) {
subPackageValidationErrors.put(subPackage.getKey() + DynamicEntityFormInfo.FIELD_SEPARATOR + error.getKey(), error.getValue());
}
}
response.getValidationErrors().putAll(subPackageValidationErrors);
if (response.isValidationFailure()) {
throw new ValidationException(response, "The entity has failed validation");
}
return postUpdate(response, persistencePackage);
}
protected Entity postUpdate(Entity entity, PersistencePackage persistencePackage) throws ServiceException {
//do nothing
return entity;
}
@Override
public void remove(PersistencePackage persistencePackage) throws ServiceException {
//check to see if there is a custom handler registered
for (CustomPersistenceHandler handler : getCustomPersistenceHandlers()) {
if (handler.canHandleRemove(persistencePackage)) {
if (!handler.willHandleSecurity(persistencePackage)) {
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.REMOVE);
}
handler.remove(persistencePackage, dynamicEntityDao, (RecordHelper) getCompatibleModule(OperationType.BASIC));
return;
}
}
adminRemoteSecurityService.securityCheck(persistencePackage.getCeilingEntityFullyQualifiedClassname(), EntityOperationType.REMOVE);
PersistenceModule myModule = getCompatibleModule(persistencePackage.getPersistencePerspective().getOperationTypes().getRemoveType());
myModule.remove(persistencePackage);
}
@Override
public PersistenceModule getCompatibleModule(OperationType operationType) {
PersistenceModule myModule = null;
for (PersistenceModule module : modules) {
if (module.isCompatible(operationType)) {
myModule = module;
break;
}
}
if (myModule == null) {
LOG.error("Unable to find a compatible remote service module for the operation type: " + operationType);
throw new RuntimeException("Unable to find a compatible remote service module for the operation type: " + operationType);
}
return myModule;
}
@Override
public DynamicEntityDao getDynamicEntityDao() {
return dynamicEntityDao;
}
@Override
public void setDynamicEntityDao(DynamicEntityDao dynamicEntityDao) {
this.dynamicEntityDao = dynamicEntityDao;
}
@Override
public Map<String, String> getTargetEntityManagers() {
return targetEntityManagers;
}
@Override
public void setTargetEntityManagers(Map<String, String> targetEntityManagers) {
this.targetEntityManagers = targetEntityManagers;
}
@Override
public TargetModeType getTargetMode() {
return targetMode;
}
@Override
public void setTargetMode(TargetModeType targetMode) {
String targetManagerRef = targetEntityManagers.get(targetMode.getType());
EntityManager targetManager = (EntityManager) applicationContext.getBean(targetManagerRef);
if (targetManager == null) {
throw new RuntimeException("Unable to find a target entity manager registered with the key: " + targetMode + ". Did you add an entity manager with this key to the targetEntityManagers property?");
}
dynamicEntityDao.setStandardEntityManager(targetManager);
this.targetMode = targetMode;
}
@Override
public List<CustomPersistenceHandler> getCustomPersistenceHandlers() {
List<CustomPersistenceHandler> cloned = new ArrayList<CustomPersistenceHandler>();
cloned.addAll(customPersistenceHandlers);
if (getCustomPersistenceHandlerFilters() != null) {
for (CustomPersistenceHandlerFilter filter : getCustomPersistenceHandlerFilters()) {
Iterator<CustomPersistenceHandler> itr = cloned.iterator();
while (itr.hasNext()) {
CustomPersistenceHandler handler = itr.next();
if (!filter.shouldUseHandler(handler.getClass().getName())) {
itr.remove();
}
}
}
}
Collections.sort(cloned, new Comparator<CustomPersistenceHandler>() {
@Override
public int compare(CustomPersistenceHandler o1, CustomPersistenceHandler o2) {
return new Integer(o1.getOrder()).compareTo(new Integer(o2.getOrder()));
}
});
return cloned;
}
@Override
public void setCustomPersistenceHandlers(List<CustomPersistenceHandler> customPersistenceHandlers) {
this.customPersistenceHandlers = customPersistenceHandlers;
}
public SecurityVerifier getAdminRemoteSecurityService() {
return adminRemoteSecurityService;
}
public void setAdminRemoteSecurityService(AdminSecurityServiceRemote adminRemoteSecurityService) {
this.adminRemoteSecurityService = adminRemoteSecurityService;
}
public List<CustomPersistenceHandlerFilter> getCustomPersistenceHandlerFilters() {
return customPersistenceHandlerFilters;
}
public void setCustomPersistenceHandlerFilters(List<CustomPersistenceHandlerFilter> customPersistenceHandlerFilters) {
this.customPersistenceHandlerFilters = customPersistenceHandlerFilters;
}
public PersistenceModule[] getModules() {
return modules;
}
public void setModules(PersistenceModule[] modules) {
this.modules = modules;
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_service_persistence_PersistenceManagerImpl.java
|
291 |
new Thread(new Runnable() {
public void run() {
lock.lock();
try {
if (lock.isLockedByCurrentThread()) {
count.incrementAndGet();
}
awaitLatch.countDown();
condition.await();
if (lock.isLockedByCurrentThread()) {
count.incrementAndGet();
}
} catch (InterruptedException ignored) {
} finally {
lock.unlock();
finalLatch.countDown();
}
}
}).start();
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_lock_ClientConditionTest.java
|
591 |
public final class HeartbeatOperation extends AbstractClusterOperation
implements JoinOperation, IdentifiedDataSerializable {
@Override
public void run() {
// do nothing ...
}
@Override
public int getFactoryId() {
return ClusterDataSerializerHook.F_ID;
}
@Override
public int getId() {
return ClusterDataSerializerHook.HEARTBEAT;
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_cluster_HeartbeatOperation.java
|
375 |
public static class TestCombinerFactory
implements CombinerFactory<String, Integer, Integer> {
public TestCombinerFactory() {
}
@Override
public Combiner<String, Integer, Integer> newCombiner(String key) {
return new TestCombiner();
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_mapreduce_DistributedMapperClientMapReduceTest.java
|
2,319 |
public final class MapReduceUtil {
private static final String EXECUTOR_NAME_PREFIX = "mapreduce::hz::";
private static final String SERVICE_NAME = MapReduceService.SERVICE_NAME;
private static final float DEFAULT_MAP_GROWTH_FACTOR = 0.75f;
private MapReduceUtil() {
}
public static JobProcessInformationImpl createJobProcessInformation(JobTaskConfiguration configuration,
JobSupervisor supervisor) {
NodeEngine nodeEngine = configuration.getNodeEngine();
if (configuration.getKeyValueSource() instanceof PartitionIdAware) {
int partitionCount = nodeEngine.getPartitionService().getPartitionCount();
return new JobProcessInformationImpl(partitionCount, supervisor);
} else {
int partitionCount = nodeEngine.getClusterService().getMemberList().size();
return new MemberAssigningJobProcessInformationImpl(partitionCount, supervisor);
}
}
public static void notifyRemoteException(JobSupervisor supervisor, Throwable throwable) {
MapReduceService mapReduceService = supervisor.getMapReduceService();
NodeEngine nodeEngine = mapReduceService.getNodeEngine();
try {
Address jobOwner = supervisor.getJobOwner();
if (supervisor.isOwnerNode()) {
supervisor.notifyRemoteException(jobOwner, throwable);
} else {
String name = supervisor.getConfiguration().getName();
String jobId = supervisor.getConfiguration().getJobId();
NotifyRemoteExceptionOperation operation = new NotifyRemoteExceptionOperation(name, jobId, throwable);
OperationService os = nodeEngine.getOperationService();
os.send(operation, jobOwner);
}
} catch (Exception e) {
ILogger logger = nodeEngine.getLogger(MapReduceUtil.class);
logger.warning("Could not notify remote map-reduce owner", e);
}
}
public static JobPartitionState.State stateChange(Address owner, int partitionId, JobPartitionState.State currentState,
JobProcessInformationImpl processInformation,
JobTaskConfiguration configuration) {
JobPartitionState[] partitionStates = processInformation.getPartitionStates();
JobPartitionState partitionState = partitionStates[partitionId];
// If not yet assigned we don't need to check owner and state
JobPartitionState.State finalState = null;
if (partitionState != null) {
if (!owner.equals(partitionState.getOwner())) {
return null;
}
if (partitionState.getState() != currentState) {
return null;
}
if (currentState == MAPPING) {
finalState = stateChangeMapping(partitionId, partitionState, processInformation, owner, configuration);
} else if (currentState == REDUCING) {
finalState = stateChangeReducing(partitionId, partitionState, processInformation, owner);
}
}
if (currentState == WAITING) {
if (compareAndSwapPartitionState(partitionId, partitionState, processInformation, owner, MAPPING)) {
finalState = MAPPING;
}
}
return finalState;
}
private static JobPartitionState.State stateChangeReducing(int partitionId, JobPartitionState oldPartitionState,
JobProcessInformationImpl processInformation, Address owner) {
if (compareAndSwapPartitionState(partitionId, oldPartitionState, processInformation, owner, PROCESSED)) {
return PROCESSED;
}
return null;
}
private static JobPartitionState.State stateChangeMapping(int partitionId, JobPartitionState oldPartitionState,
JobProcessInformationImpl processInformation, Address owner,
JobTaskConfiguration configuration) {
JobPartitionState.State newState = PROCESSED;
if (configuration.getReducerFactory() != null) {
newState = REDUCING;
}
if (compareAndSwapPartitionState(partitionId, oldPartitionState, processInformation, owner, newState)) {
return newState;
}
return null;
}
private static boolean compareAndSwapPartitionState(int partitionId, JobPartitionState oldPartitionState,
JobProcessInformationImpl processInformation, Address owner,
JobPartitionState.State newState) {
JobPartitionState newPartitionState = new JobPartitionStateImpl(owner, newState);
if (processInformation.updatePartitionState(partitionId, oldPartitionState, newPartitionState)) {
return true;
}
return false;
}
public static <K, V> Map<Address, Map<K, V>> mapResultToMember(JobSupervisor supervisor, Map<K, V> result) {
Set<Object> unassignedKeys = new HashSet<Object>();
for (Map.Entry<K, V> entry : result.entrySet()) {
Address address = supervisor.getReducerAddressByKey(entry.getKey());
if (address == null) {
unassignedKeys.add(entry.getKey());
}
}
if (unassignedKeys.size() > 0) {
requestAssignment(unassignedKeys, supervisor);
}
// Now assign all keys
Map<Address, Map<K, V>> mapping = new HashMap<Address, Map<K, V>>();
for (Map.Entry<K, V> entry : result.entrySet()) {
Address address = supervisor.getReducerAddressByKey(entry.getKey());
if (address != null) {
Map<K, V> data = mapping.get(address);
if (data == null) {
data = new HashMap<K, V>();
mapping.put(address, data);
}
data.put(entry.getKey(), entry.getValue());
}
}
return mapping;
}
private static void requestAssignment(Set<Object> keys, JobSupervisor supervisor) {
try {
MapReduceService mapReduceService = supervisor.getMapReduceService();
String name = supervisor.getConfiguration().getName();
String jobId = supervisor.getConfiguration().getJobId();
KeysAssignmentResult assignmentResult = mapReduceService
.processRequest(supervisor.getJobOwner(), new KeysAssignmentOperation(name, jobId, keys), name);
if (assignmentResult.getResultState() == SUCCESSFUL) {
Map<Object, Address> assignment = assignmentResult.getAssignment();
for (Map.Entry<Object, Address> entry : assignment.entrySet()) {
// Cache the keys for later mappings
if (!supervisor.assignKeyReducerAddress(entry.getKey(), entry.getValue())) {
throw new IllegalStateException("Key reducer assignment in illegal state");
}
}
}
} catch (Exception e) {
// Just announce it to higher levels
throw new RuntimeException(e);
}
}
public static String printPartitionStates(JobPartitionState[] partitionStates) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < partitionStates.length; i++) {
if (i > 0) {
sb.append(", ");
}
sb.append("[").append(i).append("=>");
sb.append(partitionStates[i] == null ? "null" : partitionStates[i].getState()).append("]");
}
return sb.toString();
}
public static <V> List<V> executeOperation(OperationFactory operationFactory, MapReduceService mapReduceService,
NodeEngine nodeEngine, boolean returnsResponse) {
ClusterService cs = nodeEngine.getClusterService();
OperationService os = nodeEngine.getOperationService();
Collection<MemberImpl> members = cs.getMemberList();
List<V> results = returnsResponse ? new ArrayList<V>() : null;
for (MemberImpl member : members) {
try {
Operation operation = operationFactory.createOperation();
if (cs.getThisAddress().equals(member.getAddress())) {
// Locally we can call the operation directly
operation.setNodeEngine(nodeEngine);
operation.setCallerUuid(nodeEngine.getLocalMember().getUuid());
operation.setService(mapReduceService);
operation.run();
if (returnsResponse) {
V response = (V) operation.getResponse();
if (response != null) {
results.add(response);
}
}
} else {
if (returnsResponse) {
InvocationBuilder ib = os.createInvocationBuilder(SERVICE_NAME, operation, member.getAddress());
V response = (V) ib.invoke().get();
if (response != null) {
results.add(response);
}
} else {
os.send(operation, member.getAddress());
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
return results;
}
public static <V> V executeOperation(Operation operation, Address address, MapReduceService mapReduceService,
NodeEngine nodeEngine) {
ClusterService cs = nodeEngine.getClusterService();
OperationService os = nodeEngine.getOperationService();
boolean returnsResponse = operation.returnsResponse();
try {
if (cs.getThisAddress().equals(address)) {
// Locally we can call the operation directly
operation.setNodeEngine(nodeEngine);
operation.setCallerUuid(nodeEngine.getLocalMember().getUuid());
operation.setService(mapReduceService);
operation.run();
if (returnsResponse) {
return (V) operation.getResponse();
}
} else {
if (returnsResponse) {
InvocationBuilder ib = os.createInvocationBuilder(SERVICE_NAME, operation, address);
return (V) ib.invoke().get();
} else {
os.send(operation, address);
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
return null;
}
public static String buildExecutorName(String name) {
return EXECUTOR_NAME_PREFIX + name;
}
public static int mapSize(final int sourceSize) {
return sourceSize == 0 ? 0 : (int) (sourceSize / DEFAULT_MAP_GROWTH_FACTOR) + 1;
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_mapreduce_impl_MapReduceUtil.java
|
3,845 |
public class GeoPolygonFilterParser implements FilterParser {
public static final String NAME = "geo_polygon";
public static final String POINTS = "points";
@Inject
public GeoPolygonFilterParser() {
}
@Override
public String[] names() {
return new String[]{NAME, "geoPolygon"};
}
@Override
public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
boolean cache = false;
CacheKeyFilter.Key cacheKey = null;
String fieldName = null;
List<GeoPoint> shell = Lists.newArrayList();
boolean normalizeLon = true;
boolean normalizeLat = true;
String filterName = null;
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
if(POINTS.equals(currentFieldName)) {
while((token = parser.nextToken()) != Token.END_ARRAY) {
shell.add(GeoPoint.parse(parser));
}
} else {
throw new QueryParsingException(parseContext.index(), "[geo_polygon] filter does not support [" + currentFieldName + "]");
}
}
}
} else if (token.isValue()) {
if ("_name".equals(currentFieldName)) {
filterName = parser.text();
} else if ("_cache".equals(currentFieldName)) {
cache = parser.booleanValue();
} else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) {
cacheKey = new CacheKeyFilter.Key(parser.text());
} else if ("normalize".equals(currentFieldName)) {
normalizeLat = parser.booleanValue();
normalizeLon = parser.booleanValue();
} else {
throw new QueryParsingException(parseContext.index(), "[geo_polygon] filter does not support [" + currentFieldName + "]");
}
}
}
if (shell.isEmpty()) {
throw new QueryParsingException(parseContext.index(), "no points defined for geo_polygon filter");
} else {
if(shell.size() < 3) {
throw new QueryParsingException(parseContext.index(), "to few points defined for geo_polygon filter");
}
GeoPoint start = shell.get(0);
if(!start.equals(shell.get(shell.size()-1))) {
shell.add(start);
}
if(shell.size() < 4) {
throw new QueryParsingException(parseContext.index(), "to few points defined for geo_polygon filter");
}
}
if (normalizeLat || normalizeLon) {
for (GeoPoint point : shell) {
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
}
}
MapperService.SmartNameFieldMappers smartMappers = parseContext.smartFieldMappers(fieldName);
if (smartMappers == null || !smartMappers.hasMapper()) {
throw new QueryParsingException(parseContext.index(), "failed to find geo_point field [" + fieldName + "]");
}
FieldMapper<?> mapper = smartMappers.mapper();
if (!(mapper instanceof GeoPointFieldMapper)) {
throw new QueryParsingException(parseContext.index(), "field [" + fieldName + "] is not a geo_point field");
}
IndexGeoPointFieldData<?> indexFieldData = parseContext.fieldData().getForField(mapper);
Filter filter = new GeoPolygonFilter(indexFieldData, shell.toArray(new GeoPoint[shell.size()]));
if (cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
filter = wrapSmartNameFilter(filter, smartMappers, parseContext);
if (filterName != null) {
parseContext.addNamedFilter(filterName, filter);
}
return filter;
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_query_GeoPolygonFilterParser.java
|
796 |
static final class Fields {
static final XContentBuilderString TOOK = new XContentBuilderString("took");
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
static final XContentBuilderString MATCHES = new XContentBuilderString("matches");
static final XContentBuilderString _INDEX = new XContentBuilderString("_index");
static final XContentBuilderString _ID = new XContentBuilderString("_id");
static final XContentBuilderString _SCORE = new XContentBuilderString("_score");
static final XContentBuilderString HIGHLIGHT = new XContentBuilderString("highlight");
}
| 0true
|
src_main_java_org_elasticsearch_action_percolate_PercolateResponse.java
|
1,423 |
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
for (Object task : allTasks) {
if (task instanceof UpdateTask) {
UpdateTask uTask = (UpdateTask) task;
ClusterStateUpdateResponse response = new ClusterStateUpdateResponse(true);
uTask.listener.onResponse(response);
}
}
}
});
| 0true
|
src_main_java_org_elasticsearch_cluster_metadata_MetaDataMappingService.java
|
852 |
public class TransportSearchScrollAction extends TransportAction<SearchScrollRequest, SearchResponse> {
private final TransportSearchScrollQueryThenFetchAction queryThenFetchAction;
private final TransportSearchScrollQueryAndFetchAction queryAndFetchAction;
private final TransportSearchScrollScanAction scanAction;
@Inject
public TransportSearchScrollAction(Settings settings, ThreadPool threadPool, TransportService transportService,
TransportSearchScrollQueryThenFetchAction queryThenFetchAction,
TransportSearchScrollQueryAndFetchAction queryAndFetchAction,
TransportSearchScrollScanAction scanAction) {
super(settings, threadPool);
this.queryThenFetchAction = queryThenFetchAction;
this.queryAndFetchAction = queryAndFetchAction;
this.scanAction = scanAction;
transportService.registerHandler(SearchScrollAction.NAME, new TransportHandler());
}
@Override
protected void doExecute(SearchScrollRequest request, ActionListener<SearchResponse> listener) {
try {
ParsedScrollId scrollId = parseScrollId(request.scrollId());
if (scrollId.getType().equals(QUERY_THEN_FETCH_TYPE)) {
queryThenFetchAction.execute(request, scrollId, listener);
} else if (scrollId.getType().equals(QUERY_AND_FETCH_TYPE)) {
queryAndFetchAction.execute(request, scrollId, listener);
} else if (scrollId.getType().equals(SCAN)) {
scanAction.execute(request, scrollId, listener);
} else {
throw new ElasticsearchIllegalArgumentException("Scroll id type [" + scrollId.getType() + "] unrecognized");
}
} catch (Throwable e) {
listener.onFailure(e);
}
}
private class TransportHandler extends BaseTransportRequestHandler<SearchScrollRequest> {
@Override
public SearchScrollRequest newInstance() {
return new SearchScrollRequest();
}
@Override
public void messageReceived(SearchScrollRequest request, final TransportChannel channel) throws Exception {
// no need for a threaded listener
request.listenerThreaded(false);
execute(request, new ActionListener<SearchResponse>() {
@Override
public void onResponse(SearchResponse result) {
try {
channel.sendResponse(result);
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(e);
} catch (Exception e1) {
logger.warn("Failed to send response for search", e1);
}
}
});
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}
}
| 0true
|
src_main_java_org_elasticsearch_action_search_TransportSearchScrollAction.java
|
107 |
static class SamplePortable implements Portable {
public int a;
public SamplePortable(int a) {
this.a = a;
}
public SamplePortable() {
}
public int getFactoryId() {
return 5;
}
public int getClassId() {
return 6;
}
public void writePortable(PortableWriter writer) throws IOException {
writer.writeInt("a", a);
}
public void readPortable(PortableReader reader) throws IOException {
a = reader.readInt("a");
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_ClientIssueTest.java
|
1,399 |
public class MetaDataDeleteIndexService extends AbstractComponent {
private final ThreadPool threadPool;
private final ClusterService clusterService;
private final AllocationService allocationService;
private final NodeIndexDeletedAction nodeIndexDeletedAction;
private final MetaDataService metaDataService;
@Inject
public MetaDataDeleteIndexService(Settings settings, ThreadPool threadPool, ClusterService clusterService, AllocationService allocationService,
NodeIndexDeletedAction nodeIndexDeletedAction, MetaDataService metaDataService) {
super(settings);
this.threadPool = threadPool;
this.clusterService = clusterService;
this.allocationService = allocationService;
this.nodeIndexDeletedAction = nodeIndexDeletedAction;
this.metaDataService = metaDataService;
}
public void deleteIndex(final Request request, final Listener userListener) {
// we lock here, and not within the cluster service callback since we don't want to
// block the whole cluster state handling
final Semaphore mdLock = metaDataService.indexMetaDataLock(request.index);
// quick check to see if we can acquire a lock, otherwise spawn to a thread pool
if (mdLock.tryAcquire()) {
deleteIndex(request, userListener, mdLock);
return;
}
threadPool.executor(ThreadPool.Names.MANAGEMENT).execute(new Runnable() {
@Override
public void run() {
try {
if (!mdLock.tryAcquire(request.masterTimeout.nanos(), TimeUnit.NANOSECONDS)) {
userListener.onFailure(new ProcessClusterEventTimeoutException(request.masterTimeout, "acquire index lock"));
return;
}
} catch (InterruptedException e) {
userListener.onFailure(e);
return;
}
deleteIndex(request, userListener, mdLock);
}
});
}
private void deleteIndex(final Request request, final Listener userListener, Semaphore mdLock) {
final DeleteIndexListener listener = new DeleteIndexListener(mdLock, userListener);
clusterService.submitStateUpdateTask("delete-index [" + request.index + "]", Priority.URGENT, new TimeoutClusterStateUpdateTask() {
@Override
public TimeValue timeout() {
return request.masterTimeout;
}
@Override
public void onFailure(String source, Throwable t) {
listener.onFailure(t);
}
@Override
public ClusterState execute(final ClusterState currentState) {
if (!currentState.metaData().hasConcreteIndex(request.index)) {
throw new IndexMissingException(new Index(request.index));
}
logger.info("[{}] deleting index", request.index);
RoutingTable.Builder routingTableBuilder = RoutingTable.builder(currentState.routingTable());
routingTableBuilder.remove(request.index);
MetaData newMetaData = MetaData.builder(currentState.metaData())
.remove(request.index)
.build();
RoutingAllocation.Result routingResult = allocationService.reroute(
ClusterState.builder(currentState).routingTable(routingTableBuilder).metaData(newMetaData).build());
ClusterBlocks blocks = ClusterBlocks.builder().blocks(currentState.blocks()).removeIndexBlocks(request.index).build();
// wait for events from all nodes that it has been removed from their respective metadata...
int count = currentState.nodes().size();
// add the notifications that the store was deleted from *data* nodes
count += currentState.nodes().dataNodes().size();
final AtomicInteger counter = new AtomicInteger(count);
// this listener will be notified once we get back a notification based on the cluster state change below.
final NodeIndexDeletedAction.Listener nodeIndexDeleteListener = new NodeIndexDeletedAction.Listener() {
@Override
public void onNodeIndexDeleted(String index, String nodeId) {
if (index.equals(request.index)) {
if (counter.decrementAndGet() == 0) {
listener.onResponse(new Response(true));
nodeIndexDeletedAction.remove(this);
}
}
}
@Override
public void onNodeIndexStoreDeleted(String index, String nodeId) {
if (index.equals(request.index)) {
if (counter.decrementAndGet() == 0) {
listener.onResponse(new Response(true));
nodeIndexDeletedAction.remove(this);
}
}
}
};
nodeIndexDeletedAction.add(nodeIndexDeleteListener);
listener.future = threadPool.schedule(request.timeout, ThreadPool.Names.SAME, new Runnable() {
@Override
public void run() {
listener.onResponse(new Response(false));
nodeIndexDeletedAction.remove(nodeIndexDeleteListener);
}
});
return ClusterState.builder(currentState).routingResult(routingResult).metaData(newMetaData).blocks(blocks).build();
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
}
});
}
class DeleteIndexListener implements Listener {
private final AtomicBoolean notified = new AtomicBoolean();
private final Semaphore mdLock;
private final Listener listener;
volatile ScheduledFuture<?> future;
private DeleteIndexListener(Semaphore mdLock, Listener listener) {
this.mdLock = mdLock;
this.listener = listener;
}
@Override
public void onResponse(final Response response) {
if (notified.compareAndSet(false, true)) {
mdLock.release();
if (future != null) {
future.cancel(false);
}
listener.onResponse(response);
}
}
@Override
public void onFailure(Throwable t) {
if (notified.compareAndSet(false, true)) {
mdLock.release();
if (future != null) {
future.cancel(false);
}
listener.onFailure(t);
}
}
}
public static interface Listener {
void onResponse(Response response);
void onFailure(Throwable t);
}
public static class Request {
final String index;
TimeValue timeout = TimeValue.timeValueSeconds(10);
TimeValue masterTimeout = MasterNodeOperationRequest.DEFAULT_MASTER_NODE_TIMEOUT;
public Request(String index) {
this.index = index;
}
public Request timeout(TimeValue timeout) {
this.timeout = timeout;
return this;
}
public Request masterTimeout(TimeValue masterTimeout) {
this.masterTimeout = masterTimeout;
return this;
}
}
public static class Response {
private final boolean acknowledged;
public Response(boolean acknowledged) {
this.acknowledged = acknowledged;
}
public boolean acknowledged() {
return acknowledged;
}
}
}
| 0true
|
src_main_java_org_elasticsearch_cluster_metadata_MetaDataDeleteIndexService.java
|
25 |
public class ErrorCommandProcessor extends AbstractTextCommandProcessor<ErrorCommand> {
public ErrorCommandProcessor(TextCommandService textCommandService) {
super(textCommandService);
}
public void handle(ErrorCommand command) {
textCommandService.sendResponse(command);
}
public void handleRejection(ErrorCommand command) {
handle(command);
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_ascii_memcache_ErrorCommandProcessor.java
|
4,470 |
class RecoveryFilesInfoRequest extends TransportRequest {
private long recoveryId;
private ShardId shardId;
List<String> phase1FileNames;
List<Long> phase1FileSizes;
List<String> phase1ExistingFileNames;
List<Long> phase1ExistingFileSizes;
long phase1TotalSize;
long phase1ExistingTotalSize;
RecoveryFilesInfoRequest() {
}
RecoveryFilesInfoRequest(long recoveryId, ShardId shardId, List<String> phase1FileNames, List<Long> phase1FileSizes, List<String> phase1ExistingFileNames, List<Long> phase1ExistingFileSizes, long phase1TotalSize, long phase1ExistingTotalSize) {
this.recoveryId = recoveryId;
this.shardId = shardId;
this.phase1FileNames = phase1FileNames;
this.phase1FileSizes = phase1FileSizes;
this.phase1ExistingFileNames = phase1ExistingFileNames;
this.phase1ExistingFileSizes = phase1ExistingFileSizes;
this.phase1TotalSize = phase1TotalSize;
this.phase1ExistingTotalSize = phase1ExistingTotalSize;
}
public long recoveryId() {
return this.recoveryId;
}
public ShardId shardId() {
return shardId;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
recoveryId = in.readLong();
shardId = ShardId.readShardId(in);
int size = in.readVInt();
phase1FileNames = new ArrayList<String>(size);
for (int i = 0; i < size; i++) {
phase1FileNames.add(in.readString());
}
size = in.readVInt();
phase1FileSizes = new ArrayList<Long>(size);
for (int i = 0; i < size; i++) {
phase1FileSizes.add(in.readVLong());
}
size = in.readVInt();
phase1ExistingFileNames = new ArrayList<String>(size);
for (int i = 0; i < size; i++) {
phase1ExistingFileNames.add(in.readString());
}
size = in.readVInt();
phase1ExistingFileSizes = new ArrayList<Long>(size);
for (int i = 0; i < size; i++) {
phase1ExistingFileSizes.add(in.readVLong());
}
phase1TotalSize = in.readVLong();
phase1ExistingTotalSize = in.readVLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeLong(recoveryId);
shardId.writeTo(out);
out.writeVInt(phase1FileNames.size());
for (String phase1FileName : phase1FileNames) {
out.writeString(phase1FileName);
}
out.writeVInt(phase1FileSizes.size());
for (Long phase1FileSize : phase1FileSizes) {
out.writeVLong(phase1FileSize);
}
out.writeVInt(phase1ExistingFileNames.size());
for (String phase1ExistingFileName : phase1ExistingFileNames) {
out.writeString(phase1ExistingFileName);
}
out.writeVInt(phase1ExistingFileSizes.size());
for (Long phase1ExistingFileSize : phase1ExistingFileSizes) {
out.writeVLong(phase1ExistingFileSize);
}
out.writeVLong(phase1TotalSize);
out.writeVLong(phase1ExistingTotalSize);
}
}
| 1no label
|
src_main_java_org_elasticsearch_indices_recovery_RecoveryFilesInfoRequest.java
|
3,228 |
public abstract class ScriptDocValues {
public static final ScriptDocValues EMPTY = new Empty();
public static final Strings EMPTY_STRINGS = new Strings(BytesValues.EMPTY);
protected int docId;
protected boolean listLoaded = false;
public void setNextDocId(int docId) {
this.docId = docId;
this.listLoaded = false;
}
public abstract boolean isEmpty();
public abstract List<?> getValues();
public static class Empty extends ScriptDocValues {
@Override
public void setNextDocId(int docId) {
}
@Override
public boolean isEmpty() {
return true;
}
@Override
public List<?> getValues() {
return Collections.emptyList();
}
}
public final static class Strings extends ScriptDocValues {
private final BytesValues values;
private final CharsRef spare = new CharsRef();
private SlicedObjectList<String> list;
public Strings(BytesValues values) {
this.values = values;
list = new SlicedObjectList<String>(values.isMultiValued() ? new String[10] : new String[1]) {
@Override
public void grow(int newLength) {
assert offset == 0; // NOTE: senseless if offset != 0
if (values.length >= newLength) {
return;
}
final String[] current = values;
values = new String[ArrayUtil.oversize(newLength, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
System.arraycopy(current, 0, values, 0, current.length);
}
};
}
@Override
public boolean isEmpty() {
return values.setDocument(docId) == 0;
}
public BytesValues getInternalValues() {
return this.values;
}
public BytesRef getBytesValue() {
int numValues = values.setDocument(docId);
if (numValues == 0) {
return null;
}
return values.nextValue();
}
public String getValue() {
String value = null;
if (values.setDocument(docId) > 0) {
UnicodeUtil.UTF8toUTF16(values.nextValue(), spare);
value = spare.toString();
}
return value;
}
public List<String> getValues() {
if (!listLoaded) {
final int numValues = values.setDocument(docId);
list.offset = 0;
list.grow(numValues);
list.length = numValues;
for (int i = 0; i < numValues; i++) {
BytesRef next = values.nextValue();
UnicodeUtil.UTF8toUTF16(next, spare);
list.values[i] = spare.toString();
}
listLoaded = true;
}
return list;
}
}
public static class Longs extends ScriptDocValues {
private final LongValues values;
private final MutableDateTime date = new MutableDateTime(0, DateTimeZone.UTC);
private final SlicedLongList list;
public Longs(LongValues values) {
this.values = values;
this.list = new SlicedLongList(values.isMultiValued() ? 10 : 1);
}
public LongValues getInternalValues() {
return this.values;
}
@Override
public boolean isEmpty() {
return values.setDocument(docId) == 0;
}
public long getValue() {
int numValues = values.setDocument(docId);
if (numValues == 0) {
return 0l;
}
return values.nextValue();
}
public List<Long> getValues() {
if (!listLoaded) {
final int numValues = values.setDocument(docId);
list.offset = 0;
list.grow(numValues);
list.length = numValues;
for (int i = 0; i < numValues; i++) {
list.values[i] = values.nextValue();
}
listLoaded = true;
}
return list;
}
public MutableDateTime getDate() {
date.setMillis(getValue());
return date;
}
}
public static class Doubles extends ScriptDocValues {
private final DoubleValues values;
private final SlicedDoubleList list;
public Doubles(DoubleValues values) {
this.values = values;
this.list = new SlicedDoubleList(values.isMultiValued() ? 10 : 1);
}
public DoubleValues getInternalValues() {
return this.values;
}
@Override
public boolean isEmpty() {
return values.setDocument(docId) == 0;
}
public double getValue() {
int numValues = values.setDocument(docId);
if (numValues == 0) {
return 0d;
}
return values.nextValue();
}
public List<Double> getValues() {
if (!listLoaded) {
int numValues = values.setDocument(docId);
list.offset = 0;
list.grow(numValues);
list.length = numValues;
for (int i = 0; i < numValues; i++) {
list.values[i] = values.nextValue();
}
listLoaded = true;
}
return list;
}
}
public static class GeoPoints extends ScriptDocValues {
private final GeoPointValues values;
private final SlicedObjectList<GeoPoint> list;
public GeoPoints(GeoPointValues values) {
this.values = values;
list = new SlicedObjectList<GeoPoint>(values.isMultiValued() ? new GeoPoint[10] : new GeoPoint[1]) {
@Override
public void grow(int newLength) {
assert offset == 0; // NOTE: senseless if offset != 0
if (values.length >= newLength) {
return;
}
final GeoPoint[] current = values;
values = new GeoPoint[ArrayUtil.oversize(newLength, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
System.arraycopy(current, 0, values, 0, current.length);
}
};
}
@Override
public boolean isEmpty() {
return values.setDocument(docId) == 0;
}
public GeoPoint getValue() {
int numValues = values.setDocument(docId);
if (numValues == 0) {
return null;
}
return values.nextValue();
}
public double getLat() {
return getValue().lat();
}
public double[] getLats() {
List<GeoPoint> points = getValues();
double[] lats = new double[points.size()];
for (int i = 0; i < points.size(); i++) {
lats[i] = points.get(i).lat();
}
return lats;
}
public double[] getLons() {
List<GeoPoint> points = getValues();
double[] lons = new double[points.size()];
for (int i = 0; i < points.size(); i++) {
lons[i] = points.get(i).lon();
}
return lons;
}
public double getLon() {
return getValue().lon();
}
public List<GeoPoint> getValues() {
if (!listLoaded) {
int numValues = values.setDocument(docId);
list.offset = 0;
list.grow(numValues);
list.length = numValues;
for (int i = 0; i < numValues; i++) {
GeoPoint next = values.nextValue();
GeoPoint point = list.values[i];
if (point == null) {
point = list.values[i] = new GeoPoint();
}
point.reset(next.lat(), next.lon());
list.values[i] = point;
}
listLoaded = true;
}
return list;
}
public double factorDistance(double lat, double lon) {
GeoPoint point = getValue();
return GeoDistance.FACTOR.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.DEFAULT);
}
public double factorDistanceWithDefault(double lat, double lon, double defaultValue) {
if (isEmpty()) {
return defaultValue;
}
GeoPoint point = getValue();
return GeoDistance.FACTOR.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.DEFAULT);
}
public double factorDistance02(double lat, double lon) {
GeoPoint point = getValue();
return GeoDistance.FACTOR.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.DEFAULT) + 1;
}
public double factorDistance13(double lat, double lon) {
GeoPoint point = getValue();
return GeoDistance.FACTOR.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.DEFAULT) + 2;
}
public double arcDistance(double lat, double lon) {
GeoPoint point = getValue();
return GeoDistance.ARC.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.DEFAULT);
}
public double arcDistanceWithDefault(double lat, double lon, double defaultValue) {
if (isEmpty()) {
return defaultValue;
}
GeoPoint point = getValue();
return GeoDistance.ARC.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.DEFAULT);
}
public double arcDistanceInKm(double lat, double lon) {
GeoPoint point = getValue();
return GeoDistance.ARC.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.KILOMETERS);
}
public double arcDistanceInKmWithDefault(double lat, double lon, double defaultValue) {
if (isEmpty()) {
return defaultValue;
}
GeoPoint point = getValue();
return GeoDistance.ARC.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.KILOMETERS);
}
public double arcDistanceInMiles(double lat, double lon) {
GeoPoint point = getValue();
return GeoDistance.ARC.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.MILES);
}
public double arcDistanceInMilesWithDefault(double lat, double lon, double defaultValue) {
if (isEmpty()) {
return defaultValue;
}
GeoPoint point = getValue();
return GeoDistance.ARC.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.MILES);
}
public double distance(double lat, double lon) {
GeoPoint point = getValue();
return GeoDistance.PLANE.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.DEFAULT);
}
public double distanceWithDefault(double lat, double lon, double defaultValue) {
if (isEmpty()) {
return defaultValue;
}
GeoPoint point = getValue();
return GeoDistance.PLANE.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.DEFAULT);
}
public double distanceInKm(double lat, double lon) {
GeoPoint point = getValue();
return GeoDistance.PLANE.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.KILOMETERS);
}
public double distanceInKmWithDefault(double lat, double lon, double defaultValue) {
if (isEmpty()) {
return defaultValue;
}
GeoPoint point = getValue();
return GeoDistance.PLANE.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.KILOMETERS);
}
public double distanceInMiles(double lat, double lon) {
GeoPoint point = getValue();
return GeoDistance.PLANE.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.MILES);
}
public double distanceInMilesWithDefault(double lat, double lon, double defaultValue) {
if (isEmpty()) {
return defaultValue;
}
GeoPoint point = getValue();
return GeoDistance.PLANE.calculate(point.lat(), point.lon(), lat, lon, DistanceUnit.MILES);
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_fielddata_ScriptDocValues.java
|
104 |
assertTrueEventually(new AssertTask() {
public void run() {
try {
assertTrue(latch.await(10, TimeUnit.SECONDS));
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_ClientIssueTest.java
|
274 |
public interface OCommandRequestAsynch {
public OCommandResultListener getResultListener();
public void setResultListener(OCommandResultListener iListener);
public boolean isAsynchronous();
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_command_OCommandRequestAsynch.java
|
5,831 |
public static class Field {
// Fields that default to null or -1 are often set to their real default in HighlighterParseElement#parse
private final String field;
private int fragmentCharSize = -1;
private int numberOfFragments = -1;
private int fragmentOffset = -1;
private String encoder;
private String[] preTags;
private String[] postTags;
private Boolean scoreOrdered;
private Boolean highlightFilter;
private Boolean requireFieldMatch;
private String highlighterType;
private Boolean forceSource;
private String fragmenter;
private int boundaryMaxScan = -1;
private Character[] boundaryChars = null;
private Query highlightQuery;
private int noMatchSize = -1;
private Set<String> matchedFields;
private Map<String, Object> options;
private int phraseLimit = -1;
public Field(String field) {
this.field = field;
}
public String field() {
return field;
}
public int fragmentCharSize() {
return fragmentCharSize;
}
public void fragmentCharSize(int fragmentCharSize) {
this.fragmentCharSize = fragmentCharSize;
}
public int numberOfFragments() {
return numberOfFragments;
}
public void numberOfFragments(int numberOfFragments) {
this.numberOfFragments = numberOfFragments;
}
public int fragmentOffset() {
return fragmentOffset;
}
public void fragmentOffset(int fragmentOffset) {
this.fragmentOffset = fragmentOffset;
}
public String encoder() {
return encoder;
}
public void encoder(String encoder) {
this.encoder = encoder;
}
public String[] preTags() {
return preTags;
}
public void preTags(String[] preTags) {
this.preTags = preTags;
}
public String[] postTags() {
return postTags;
}
public void postTags(String[] postTags) {
this.postTags = postTags;
}
public Boolean scoreOrdered() {
return scoreOrdered;
}
public void scoreOrdered(boolean scoreOrdered) {
this.scoreOrdered = scoreOrdered;
}
public Boolean highlightFilter() {
return highlightFilter;
}
public void highlightFilter(boolean highlightFilter) {
this.highlightFilter = highlightFilter;
}
public Boolean requireFieldMatch() {
return requireFieldMatch;
}
public void requireFieldMatch(boolean requireFieldMatch) {
this.requireFieldMatch = requireFieldMatch;
}
public String highlighterType() {
return highlighterType;
}
public void highlighterType(String type) {
this.highlighterType = type;
}
public Boolean forceSource() {
return forceSource;
}
public void forceSource(boolean forceSource) {
this.forceSource = forceSource;
}
public String fragmenter() {
return fragmenter;
}
public void fragmenter(String fragmenter) {
this.fragmenter = fragmenter;
}
public int boundaryMaxScan() {
return boundaryMaxScan;
}
public void boundaryMaxScan(int boundaryMaxScan) {
this.boundaryMaxScan = boundaryMaxScan;
}
public Character[] boundaryChars() {
return boundaryChars;
}
public void boundaryChars(Character[] boundaryChars) {
this.boundaryChars = boundaryChars;
}
public Query highlightQuery() {
return highlightQuery;
}
public void highlightQuery(Query highlightQuery) {
this.highlightQuery = highlightQuery;
}
public int noMatchSize() {
return noMatchSize;
}
public void noMatchSize(int noMatchSize) {
this.noMatchSize = noMatchSize;
}
public int phraseLimit() {
return phraseLimit;
}
public void phraseLimit(int phraseLimit) {
this.phraseLimit = phraseLimit;
}
public Set<String> matchedFields() {
return matchedFields;
}
public void matchedFields(Set<String> matchedFields) {
this.matchedFields = matchedFields;
}
public Map<String, Object> options() {
return options;
}
public void options(Map<String, Object> options) {
this.options = options;
}
}
| 1no label
|
src_main_java_org_elasticsearch_search_highlight_SearchContextHighlight.java
|
1,088 |
public class BloomBench {
public static void main(String[] args) throws Exception {
SecureRandom random = new SecureRandom();
final int ELEMENTS = (int) SizeValue.parseSizeValue("1m").singles();
final double fpp = 0.01;
BloomFilter gFilter = BloomFilter.create(ELEMENTS, fpp);
System.out.println("G SIZE: " + new ByteSizeValue(gFilter.getSizeInBytes()));
FuzzySet lFilter = FuzzySet.createSetBasedOnMaxMemory((int) gFilter.getSizeInBytes());
//FuzzySet lFilter = FuzzySet.createSetBasedOnQuality(ELEMENTS, 0.97f);
for (int i = 0; i < ELEMENTS; i++) {
BytesRef bytesRef = new BytesRef(Strings.randomBase64UUID(random));
gFilter.put(bytesRef);
lFilter.addValue(bytesRef);
}
int lFalse = 0;
int gFalse = 0;
for (int i = 0; i < ELEMENTS; i++) {
BytesRef bytesRef = new BytesRef(Strings.randomBase64UUID(random));
if (gFilter.mightContain(bytesRef)) {
gFalse++;
}
if (lFilter.contains(bytesRef) == FuzzySet.ContainsResult.MAYBE) {
lFalse++;
}
}
System.out.println("Failed positives, g[" + gFalse + "], l[" + lFalse + "]");
}
}
| 0true
|
src_test_java_org_elasticsearch_benchmark_bloom_BloomBench.java
|
544 |
public class StringUtil {
public static long getChecksum(String test) {
try {
byte buffer[] = test.getBytes();
ByteArrayInputStream bais = new ByteArrayInputStream(buffer);
CheckedInputStream cis = new CheckedInputStream(bais, new Adler32());
byte readBuffer[] = new byte[buffer.length];
cis.read(readBuffer);
return cis.getChecksum().getValue();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public static double determineSimilarity(String test1, String test2) {
String first = new String(test1);
first = first.replaceAll("[ \\t\\n\\r\\f\\v\\/'-]", "");
Long originalChecksum = StringUtil.getChecksum(first);
String second = new String(test2);
second = second.replaceAll("[ \\t\\n\\r\\f\\v\\/'-]", "");
Long myChecksum = StringUtil.getChecksum(second);
StatCalc calc = new StatCalc();
calc.enter(originalChecksum);
calc.enter(myChecksum);
return calc.getStandardDeviation();
}
/**
* Protect against HTTP Response Splitting
* @return
*/
public static String cleanseUrlString(String input){
return removeSpecialCharacters(decodeUrl(input));
}
public static String decodeUrl(String encodedUrl) {
try {
return encodedUrl == null ? null : URLDecoder.decode(encodedUrl, "UTF-8");
} catch (UnsupportedEncodingException e) {
// this should not happen
e.printStackTrace();
return encodedUrl;
}
}
public static String removeSpecialCharacters(String input) {
if (input != null) {
input = input.replaceAll("[ \\r\\n]", "");
}
return input;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_util_StringUtil.java
|
1,111 |
public class FsAppendBenchmark {
public static void main(String[] args) throws Exception {
new File("work/test.log").delete();
RandomAccessFile raf = new RandomAccessFile("work/test.log", "rw");
raf.setLength(0);
boolean CHANNEL = true;
int CHUNK = (int) ByteSizeValue.parseBytesSizeValue("1k").bytes();
long DATA = ByteSizeValue.parseBytesSizeValue("10gb").bytes();
byte[] data = new byte[CHUNK];
new Random().nextBytes(data);
StopWatch watch = new StopWatch().start("write");
if (CHANNEL) {
FileChannel channel = raf.getChannel();
long position = 0;
while (position < DATA) {
channel.write(ByteBuffer.wrap(data), position);
position += data.length;
}
watch.stop().start("flush");
channel.force(true);
} else {
long position = 0;
while (position < DATA) {
raf.write(data);
position += data.length;
}
watch.stop().start("flush");
raf.getFD().sync();
}
raf.close();
watch.stop();
System.out.println("Wrote [" + (new ByteSizeValue(DATA)) + "], chunk [" + (new ByteSizeValue(CHUNK)) + "], in " + watch);
}
private static final ByteBuffer fill = ByteBuffer.allocateDirect(1);
// public static long padLogFile(long position, long currentSize, long preAllocSize) throws IOException {
// if (position + 4096 >= currentSize) {
// currentSize = currentSize + preAllocSize;
// fill.position(0);
// f.getChannel().write(fill, currentSize - fill.remaining());
// }
// return currentSize;
// }
}
| 0true
|
src_test_java_org_elasticsearch_benchmark_fs_FsAppendBenchmark.java
|
1,036 |
public class EntryListenerConfig extends ListenerConfig {
private boolean local = false;
private boolean includeValue = true;
private EntryListenerConfigReadOnly readOnly;
public EntryListenerConfig() {
super();
}
public EntryListenerConfig(String className, boolean local, boolean includeValue) {
super(className);
this.local = local;
this.includeValue = includeValue;
}
public EntryListenerConfig(EntryListener implementation, boolean local, boolean includeValue) {
super(implementation);
this.local = local;
this.includeValue = includeValue;
}
public EntryListenerConfig(EntryListenerConfig config) {
includeValue = config.isIncludeValue();
local = config.isLocal();
implementation = config.getImplementation();
className = config.getClassName();
}
public EntryListenerConfigReadOnly getAsReadOnly() {
if (readOnly == null) {
readOnly = new EntryListenerConfigReadOnly(this);
}
return readOnly;
}
public EntryListener getImplementation() {
return (EntryListener) implementation;
}
public EntryListenerConfig setImplementation(final EntryListener implementation) {
super.setImplementation(implementation);
return this;
}
public boolean isLocal() {
return local;
}
public EntryListenerConfig setLocal(boolean local) {
this.local = local;
return this;
}
public boolean isIncludeValue() {
return includeValue;
}
public EntryListenerConfig setIncludeValue(boolean includeValue) {
this.includeValue = includeValue;
return this;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("EntryListenerConfig");
sb.append("{local=").append(local);
sb.append(", includeValue=").append(includeValue);
sb.append('}');
return sb.toString();
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_config_EntryListenerConfig.java
|
378 |
@Repository("blLocaleDao")
public class LocaleDaoImpl implements LocaleDao {
private static final Log LOG = LogFactory.getLog(LocaleDaoImpl.class);
@PersistenceContext(unitName = "blPU")
protected EntityManager em;
@Resource(name="blEntityConfiguration")
protected EntityConfiguration entityConfiguration;
/**
* @return The locale for the passed in code
*/
@Override
public Locale findLocaleByCode(String localeCode) {
Query query = em.createNamedQuery("BC_READ_LOCALE_BY_CODE");
query.setParameter("localeCode", localeCode);
query.setHint(org.hibernate.ejb.QueryHints.HINT_CACHEABLE, true);
List<Locale> localeList = (List<Locale>) query.getResultList();
if (localeList.size() >= 1) {
if (localeList.size() > 1) {
LOG.warn("Locale code " + localeCode + " exists for more than one locale");
}
return localeList.get(0);
}
return null;
}
/**
* Returns the page template with the passed in id.
*
* @return The default locale
*/
@Override
public Locale findDefaultLocale() {
Query query = em.createNamedQuery("BC_READ_DEFAULT_LOCALE");
query.setHint(org.hibernate.ejb.QueryHints.HINT_CACHEABLE, true);
List<Locale> localeList = (List<Locale>) query.getResultList();
if (localeList.size() >= 1) {
if (localeList.size() > 1) {
LOG.warn("There is more than one default locale configured");
}
return localeList.get(0);
}
return null;
}
/**
* Returns all supported BLC locales.
* @return
*/
public List<Locale> findAllLocales() {
Query query = em.createNamedQuery("BC_READ_ALL_LOCALES");
query.setHint(org.hibernate.ejb.QueryHints.HINT_CACHEABLE, true);
return (List<Locale>) query.getResultList();
}
@Override
public Locale save(Locale locale){
return em.merge(locale);
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_locale_dao_LocaleDaoImpl.java
|
487 |
class ReSendTask implements Runnable {
public void run() {
try {
sleep();
invocationService.reSend(ClientCallFuture.this);
} catch (Exception e) {
if (handler != null) {
invocationService.registerFailedListener(ClientCallFuture.this);
} else {
setResponse(e);
}
}
}
private void sleep(){
try {
Thread.sleep(250);
} catch (InterruptedException ignored) {
}
}
}
| 1no label
|
hazelcast-client_src_main_java_com_hazelcast_client_spi_impl_ClientCallFuture.java
|
162 |
static final class Node<E> {
volatile Node<E> prev;
volatile E item;
volatile Node<E> next;
Node() { // default constructor for NEXT_TERMINATOR, PREV_TERMINATOR
}
/**
* Constructs a new node. Uses relaxed write because item can
* only be seen after publication via casNext or casPrev.
*/
Node(E item) {
UNSAFE.putObject(this, itemOffset, item);
}
boolean casItem(E cmp, E val) {
return UNSAFE.compareAndSwapObject(this, itemOffset, cmp, val);
}
void lazySetNext(Node<E> val) {
UNSAFE.putOrderedObject(this, nextOffset, val);
}
boolean casNext(Node<E> cmp, Node<E> val) {
return UNSAFE.compareAndSwapObject(this, nextOffset, cmp, val);
}
void lazySetPrev(Node<E> val) {
UNSAFE.putOrderedObject(this, prevOffset, val);
}
boolean casPrev(Node<E> cmp, Node<E> val) {
return UNSAFE.compareAndSwapObject(this, prevOffset, cmp, val);
}
// Unsafe mechanics
private static final sun.misc.Unsafe UNSAFE;
private static final long prevOffset;
private static final long itemOffset;
private static final long nextOffset;
static {
try {
UNSAFE = getUnsafe();
Class<?> k = Node.class;
prevOffset = UNSAFE.objectFieldOffset
(k.getDeclaredField("prev"));
itemOffset = UNSAFE.objectFieldOffset
(k.getDeclaredField("item"));
nextOffset = UNSAFE.objectFieldOffset
(k.getDeclaredField("next"));
} catch (Exception e) {
throw new Error(e);
}
}
}
| 0true
|
src_main_java_jsr166y_ConcurrentLinkedDeque.java
|
164 |
return executeRead(new Callable<KeyIterator>() {
@Override
public KeyIterator call() throws Exception {
return edgeStore.getKeys(range, storeTx);
}
@Override
public String toString() {
return "EdgeStoreKeys";
}
});
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_BackendTransaction.java
|
208 |
public class MissingFieldQueryExtension implements FieldQueryExtension {
public static final String NAME = "_missing_";
@Override
public Query query(QueryParseContext parseContext, String queryText) {
String fieldName = queryText;
Filter filter = null;
MapperService.SmartNameFieldMappers smartNameFieldMappers = parseContext.smartFieldMappers(fieldName);
if (smartNameFieldMappers != null) {
if (smartNameFieldMappers.hasMapper()) {
filter = smartNameFieldMappers.mapper().rangeFilter(null, null, true, true, parseContext);
}
}
if (filter == null) {
filter = new TermRangeFilter(fieldName, null, null, true, true);
}
// we always cache this one, really does not change... (exists)
filter = parseContext.cacheFilter(filter, null);
filter = new NotFilter(filter);
// cache the not filter as well, so it will be faster
filter = parseContext.cacheFilter(filter, null);
filter = wrapSmartNameFilter(filter, smartNameFieldMappers, parseContext);
return new XConstantScoreQuery(filter);
}
}
| 1no label
|
src_main_java_org_apache_lucene_queryparser_classic_MissingFieldQueryExtension.java
|
226 |
XPostingsHighlighter highlighter = new XPostingsHighlighter(10000) {
@Override
protected BreakIterator getBreakIterator(String field) {
return new WholeBreakIterator();
}
};
| 0true
|
src_test_java_org_apache_lucene_search_postingshighlight_XPostingsHighlighterTests.java
|
2,519 |
public class SmileXContent implements XContent {
public static XContentBuilder contentBuilder() throws IOException {
return XContentBuilder.builder(smileXContent);
}
final static SmileFactory smileFactory;
public final static SmileXContent smileXContent;
static {
smileFactory = new SmileFactory();
smileFactory.configure(SmileGenerator.Feature.ENCODE_BINARY_AS_7BIT, false); // for now, this is an overhead, might make sense for web sockets
smileXContent = new SmileXContent();
}
private SmileXContent() {
}
@Override
public XContentType type() {
return XContentType.SMILE;
}
@Override
public byte streamSeparator() {
return (byte) 0xFF;
}
@Override
public XContentGenerator createGenerator(OutputStream os) throws IOException {
return new SmileXContentGenerator(smileFactory.createGenerator(os, JsonEncoding.UTF8));
}
@Override
public XContentGenerator createGenerator(Writer writer) throws IOException {
return new SmileXContentGenerator(smileFactory.createGenerator(writer));
}
@Override
public XContentParser createParser(String content) throws IOException {
return new SmileXContentParser(smileFactory.createParser(new FastStringReader(content)));
}
@Override
public XContentParser createParser(InputStream is) throws IOException {
return new SmileXContentParser(smileFactory.createParser(is));
}
@Override
public XContentParser createParser(byte[] data) throws IOException {
return new SmileXContentParser(smileFactory.createParser(data));
}
@Override
public XContentParser createParser(byte[] data, int offset, int length) throws IOException {
return new SmileXContentParser(smileFactory.createParser(data, offset, length));
}
@Override
public XContentParser createParser(BytesReference bytes) throws IOException {
if (bytes.hasArray()) {
return createParser(bytes.array(), bytes.arrayOffset(), bytes.length());
}
return createParser(bytes.streamInput());
}
@Override
public XContentParser createParser(Reader reader) throws IOException {
return new JsonXContentParser(smileFactory.createParser(reader));
}
}
| 1no label
|
src_main_java_org_elasticsearch_common_xcontent_smile_SmileXContent.java
|
121 |
class FillInArgumentNameProposal extends CorrectionProposal {
public FillInArgumentNameProposal(String name, Change change) {
super("Fill in argument name '" + name + "'", change, null);
}
static void addFillInArgumentNameProposal(Collection<ICompletionProposal> proposals,
IDocument doc, IFile file, Tree.SpecifiedArgument sa) {
Tree.Identifier id = sa.getIdentifier();
if (id.getToken()==null) {
TextChange change = new TextFileChange("Convert to Block", file);
change.setEdit(new MultiTextEdit());
Tree.Expression e = sa.getSpecifierExpression().getExpression();
if (e!=null) {
final String name = id.getText();
if (e.getTerm() instanceof Tree.FunctionArgument) {
//convert anon functions to typed named argument
//i.e. (Param param) => result;
//becomes function fun(Param param) => result;
//and (Param param) { return result; };
//becomes function fun(Param param) { return result; }
//and void (Param param) {};
//becomes void fun(Param param) {}
Tree.FunctionArgument fa = (Tree.FunctionArgument) e.getTerm();
if (!fa.getParameterLists().isEmpty()) {
int startIndex = fa.getParameterLists().get(0).getStartIndex();
if (fa.getType().getToken()==null) {
//only really necessary if the anon
//function has a block instead of =>
change.addEdit(new InsertEdit(startIndex, "function "));
}
change.addEdit(new InsertEdit(startIndex, name));
try {
//if it is an anon function with a body,
//we must remove the trailing ; which is
//required by the named arg list syntax
if (fa.getBlock()!=null &&
doc.getChar(sa.getStopIndex())==';') {
change.addEdit(new DeleteEdit(sa.getStopIndex(), 1));
}
}
catch (Exception ex) {}
}
}
else {
//convert other args to specified named args
//i.e. arg;
//becomes name = arg;
change.addEdit(new InsertEdit(sa.getStartIndex(), name + " = "));
}
if (change.getEdit().hasChildren()) {
proposals.add(new FillInArgumentNameProposal(name, change));
}
}
}
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_FillInArgumentNameProposal.java
|
24 |
private SortedSet<Edge> outEdges = new ConcurrentSkipListSet<Edge>(new Comparator<Edge>() {
@Override
public int compare(Edge e1, Edge e2) {
return e1.getEnd().compareTo(e2.getEnd());
}
});
| 0true
|
titan-test_src_main_java_com_thinkaurelius_titan_TestByteBuffer.java
|
94 |
final class ClientExceptionConverters {
private static final JavaClientExceptionConverter JAVA = new JavaClientExceptionConverter();
private static final GenericClientExceptionConverter GENERIC = new GenericClientExceptionConverter();
private ClientExceptionConverters() {
}
static ClientExceptionConverter get(ClientType type) {
if (type == ClientType.JAVA) {
return JAVA;
}
return GENERIC;
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_client_ClientExceptionConverters.java
|
1,540 |
final Comparator<MutableShardRouting> comparator = new Comparator<MutableShardRouting>() {
@Override
public int compare(MutableShardRouting o1,
MutableShardRouting o2) {
if (o1.primary() ^ o2.primary()) {
return o1.primary() ? -1 : o2.primary() ? 1 : 0;
}
final int indexCmp;
if ((indexCmp = o1.index().compareTo(o2.index())) == 0) {
return o1.getId() - o2.getId();
}
return indexCmp;
}
};
| 0true
|
src_main_java_org_elasticsearch_cluster_routing_allocation_allocator_BalancedShardsAllocator.java
|
1,025 |
public abstract class TransportShardSingleOperationAction<Request extends SingleShardOperationRequest, Response extends ActionResponse> extends TransportAction<Request, Response> {
protected final ClusterService clusterService;
protected final TransportService transportService;
final String transportAction;
final String transportShardAction;
final String executor;
protected TransportShardSingleOperationAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService) {
super(settings, threadPool);
this.clusterService = clusterService;
this.transportService = transportService;
this.transportAction = transportAction();
this.transportShardAction = transportAction() + "/s";
this.executor = executor();
transportService.registerHandler(transportAction, new TransportHandler());
transportService.registerHandler(transportShardAction, new ShardTransportHandler());
}
@Override
protected void doExecute(Request request, ActionListener<Response> listener) {
new AsyncSingleAction(request, listener).start();
}
protected abstract String transportAction();
protected abstract String executor();
protected abstract Response shardOperation(Request request, int shardId) throws ElasticsearchException;
protected abstract Request newRequest();
protected abstract Response newResponse();
protected abstract ClusterBlockException checkGlobalBlock(ClusterState state, Request request);
protected abstract ClusterBlockException checkRequestBlock(ClusterState state, Request request);
protected void resolveRequest(ClusterState state, Request request) {
request.index(state.metaData().concreteIndex(request.index()));
}
protected abstract ShardIterator shards(ClusterState state, Request request) throws ElasticsearchException;
class AsyncSingleAction {
private final ActionListener<Response> listener;
private final ShardIterator shardIt;
private final Request request;
private final DiscoveryNodes nodes;
private volatile Throwable lastFailure;
private AsyncSingleAction(Request request, ActionListener<Response> listener) {
this.request = request;
this.listener = listener;
ClusterState clusterState = clusterService.state();
nodes = clusterState.nodes();
ClusterBlockException blockException = checkGlobalBlock(clusterState, request);
if (blockException != null) {
throw blockException;
}
resolveRequest(clusterState, request);
blockException = checkRequestBlock(clusterState, request);
if (blockException != null) {
throw blockException;
}
this.shardIt = shards(clusterState, request);
}
public void start() {
perform(null);
}
private void onFailure(ShardRouting shardRouting, Throwable e) {
if (logger.isTraceEnabled() && e != null) {
logger.trace("{}: failed to execute [{}]", e, shardRouting, request);
}
perform(e);
}
private void perform(@Nullable final Throwable currentFailure) {
Throwable lastFailure = this.lastFailure;
if (lastFailure == null || TransportActions.isReadOverrideException(currentFailure)) {
lastFailure = currentFailure;
this.lastFailure = currentFailure;
}
final ShardRouting shardRouting = shardIt.nextOrNull();
if (shardRouting == null) {
Throwable failure = lastFailure;
if (failure == null || isShardNotAvailableException(failure)) {
failure = new NoShardAvailableActionException(shardIt.shardId());
} else {
if (logger.isDebugEnabled()) {
logger.debug("{}: failed to execute [{}]", failure, shardIt.shardId(), request);
}
}
listener.onFailure(failure);
return;
}
if (shardRouting.currentNodeId().equals(nodes.localNodeId())) {
try {
if (request.operationThreaded()) {
request.beforeLocalFork();
threadPool.executor(executor).execute(new Runnable() {
@Override
public void run() {
try {
Response response = shardOperation(request, shardRouting.id());
listener.onResponse(response);
} catch (Throwable e) {
onFailure(shardRouting, e);
}
}
});
} else {
final Response response = shardOperation(request, shardRouting.id());
listener.onResponse(response);
}
} catch (Throwable e) {
onFailure(shardRouting, e);
}
} else {
DiscoveryNode node = nodes.get(shardRouting.currentNodeId());
if (node == null) {
onFailure(shardRouting, new NoShardAvailableActionException(shardIt.shardId()));
} else {
transportService.sendRequest(node, transportShardAction, new ShardSingleOperationRequest(request, shardRouting.id()), new BaseTransportResponseHandler<Response>() {
@Override
public Response newInstance() {
return newResponse();
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
@Override
public void handleResponse(final Response response) {
listener.onResponse(response);
}
@Override
public void handleException(TransportException exp) {
onFailure(shardRouting, exp);
}
});
}
}
}
}
private class TransportHandler extends BaseTransportRequestHandler<Request> {
@Override
public Request newInstance() {
return newRequest();
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
@Override
public void messageReceived(Request request, final TransportChannel channel) throws Exception {
// no need to have a threaded listener since we just send back a response
request.listenerThreaded(false);
// if we have a local operation, execute it on a thread since we don't spawn
request.operationThreaded(true);
execute(request, new ActionListener<Response>() {
@Override
public void onResponse(Response result) {
try {
channel.sendResponse(result);
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(e);
} catch (Exception e1) {
logger.warn("failed to send response for get", e1);
}
}
});
}
}
private class ShardTransportHandler extends BaseTransportRequestHandler<ShardSingleOperationRequest> {
@Override
public ShardSingleOperationRequest newInstance() {
return new ShardSingleOperationRequest();
}
@Override
public String executor() {
return executor;
}
@Override
public void messageReceived(final ShardSingleOperationRequest request, final TransportChannel channel) throws Exception {
Response response = shardOperation(request.request(), request.shardId());
channel.sendResponse(response);
}
}
class ShardSingleOperationRequest extends TransportRequest {
private Request request;
private int shardId;
ShardSingleOperationRequest() {
}
public ShardSingleOperationRequest(Request request, int shardId) {
super(request);
this.request = request;
this.shardId = shardId;
}
public Request request() {
return request;
}
public int shardId() {
return shardId;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
request = newRequest();
request.readFrom(in);
shardId = in.readVInt();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
request.writeTo(out);
out.writeVInt(shardId);
}
}
}
| 0true
|
src_main_java_org_elasticsearch_action_support_single_shard_TransportShardSingleOperationAction.java
|
812 |
public static class Item {
private final int slot;
private final PercolateShardRequest request;
public Item(int slot, PercolateShardRequest request) {
this.slot = slot;
this.request = request;
}
public int slot() {
return slot;
}
public PercolateShardRequest request() {
return request;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_percolate_TransportShardMultiPercolateAction.java
|
356 |
future.andThen(new ExecutionCallback<Map<String, List<Integer>>>() {
@Override
public void onResponse(Map<String, List<Integer>> response) {
listenerResults.putAll(response);
semaphore.release();
}
@Override
public void onFailure(Throwable t) {
semaphore.release();
}
});
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_mapreduce_ClientMapReduceTest.java
|
330 |
Comparator<Object> nameCompare = new Comparator<Object>() {
public int compare(Object arg0, Object arg1) {
return ((Node) arg0).getNodeName().compareTo(((Node) arg1).getNodeName());
}
};
| 0true
|
common_src_main_java_org_broadleafcommerce_common_extensibility_context_merge_handlers_AttributePreserveInsert.java
|
630 |
public interface BroadleafVariableExpression {
public String getName();
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_web_expression_BroadleafVariableExpression.java
|
65 |
public interface TitanIndexQuery {
/**
* Specifies the maxium number of elements to return
*
* @param limit
* @return
*/
public TitanIndexQuery limit(int limit);
/**
* Specifies the offset of the query. Query results will be retrieved starting at the given offset.
* @param offset
* @return
*/
public TitanIndexQuery offset(int offset);
/**
* Adds the given parameter to the list of parameters of this query.
* Parameters are passed right through to the indexing backend to modify the query behavior.
* @param para
* @return
*/
public TitanIndexQuery addParameter(Parameter para);
/**
* Adds the given parameters to the list of parameters of this query.
* Parameters are passed right through to the indexing backend to modify the query behavior.
* @param paras
* @return
*/
public TitanIndexQuery addParameters(Iterable<Parameter> paras);
/**
* Adds the given parameters to the list of parameters of this query.
* Parameters are passed right through to the indexing backend to modify the query behavior.
* @param paras
* @return
*/
public TitanIndexQuery addParameters(Parameter... paras);
/**
* Sets the element identifier string that is used by this query builder as the token to identifier key references
* in the query string.
* <p/>
* For example, in the query 'v.name: Tom' the element identifier is 'v.'
*
*
* @param identifier The element identifier which must not be blank
* @return This query builder
*/
public TitanIndexQuery setElementIdentifier(String identifier);
/**
* Returns all vertices that match the query in the indexing backend.
*
* @return
*/
public Iterable<Result<Vertex>> vertices();
/**
* Returns all edges that match the query in the indexing backend.
*
* @return
*/
public Iterable<Result<Edge>> edges();
/**
* Returns all properties that match the query in the indexing backend.
*
* @return
*/
public Iterable<Result<TitanProperty>> properties();
/**
* Container of a query result with its score.
* @param <V>
*/
public interface Result<V extends Element> {
/**
* Returns the element that matches the query
*
* @return
*/
public V getElement();
/**
* Returns the score of the result with respect to the query (if available)
* @return
*/
public double getScore();
}
}
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_TitanIndexQuery.java
|
443 |
public static class JvmVersion implements Streamable {
String version;
String vmName;
String vmVersion;
String vmVendor;
JvmVersion(JvmInfo jvmInfo) {
version = jvmInfo.version();
vmName = jvmInfo.vmName();
vmVersion = jvmInfo.vmVersion();
vmVendor = jvmInfo.vmVendor();
}
JvmVersion() {
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
JvmVersion jvm = (JvmVersion) o;
return vmVersion.equals(jvm.vmVersion) && vmVendor.equals(jvm.vmVendor);
}
@Override
public int hashCode() {
return vmVersion.hashCode();
}
public static JvmVersion readJvmVersion(StreamInput in) throws IOException {
JvmVersion jvm = new JvmVersion();
jvm.readFrom(in);
return jvm;
}
@Override
public void readFrom(StreamInput in) throws IOException {
version = in.readString();
vmName = in.readString();
vmVersion = in.readString();
vmVendor = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(version);
out.writeString(vmName);
out.writeString(vmVersion);
out.writeString(vmVendor);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_stats_ClusterStatsNodes.java
|
260 |
public interface OCommand {
/**
* Executes command.
*
* @return The result of command if any, otherwise null
*/
public Object execute();
public OCommandContext getContext();
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_command_OCommand.java
|
936 |
public class OfferRuleType implements Serializable, BroadleafEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, OfferRuleType> TYPES = new LinkedHashMap<String, OfferRuleType>();
public static final OfferRuleType ORDER = new OfferRuleType("ORDER", "Order");
public static final OfferRuleType FULFILLMENT_GROUP = new OfferRuleType("FULFILLMENT_GROUP", "Fulfillment Group");
public static final OfferRuleType CUSTOMER = new OfferRuleType("CUSTOMER", "Customer");
public static final OfferRuleType TIME = new OfferRuleType("TIME", "Time");
public static final OfferRuleType REQUEST = new OfferRuleType("REQUEST", "Request");
public static OfferRuleType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public OfferRuleType() {
//do nothing
}
public OfferRuleType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)) {
TYPES.put(type, this);
}
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OfferRuleType other = (OfferRuleType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_service_type_OfferRuleType.java
|
974 |
public class IndexReplicationOperationRequest<T extends IndexReplicationOperationRequest> extends ActionRequest<T> {
protected TimeValue timeout = ShardReplicationOperationRequest.DEFAULT_TIMEOUT;
protected String index;
protected ReplicationType replicationType = ReplicationType.DEFAULT;
protected WriteConsistencyLevel consistencyLevel = WriteConsistencyLevel.DEFAULT;
public TimeValue timeout() {
return timeout;
}
public String index() {
return this.index;
}
@SuppressWarnings("unchecked")
public T index(String index) {
this.index = index;
return (T) this;
}
/**
* Sets the replication type.
*/
@SuppressWarnings("unchecked")
public T replicationType(ReplicationType replicationType) {
this.replicationType = replicationType;
return (T) this;
}
/**
* Sets the replication type.
*/
public T replicationType(String replicationType) {
return replicationType(ReplicationType.fromString(replicationType));
}
public ReplicationType replicationType() {
return this.replicationType;
}
public WriteConsistencyLevel consistencyLevel() {
return this.consistencyLevel;
}
/**
* Sets the consistency level of write. Defaults to {@link org.elasticsearch.action.WriteConsistencyLevel#DEFAULT}
*/
@SuppressWarnings("unchecked")
public T consistencyLevel(WriteConsistencyLevel consistencyLevel) {
this.consistencyLevel = consistencyLevel;
return (T) this;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (index == null) {
validationException = addValidationError("index name missing", validationException);
}
return validationException;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
replicationType = ReplicationType.fromId(in.readByte());
consistencyLevel = WriteConsistencyLevel.fromId(in.readByte());
timeout = TimeValue.readTimeValue(in);
index = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeByte(replicationType.id());
out.writeByte(consistencyLevel.id());
timeout.writeTo(out);
out.writeString(index);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_support_replication_IndexReplicationOperationRequest.java
|
1,284 |
@Test
public class LocalPaginatedStorageCreateCrashRestore {
private ODatabaseDocumentTx baseDocumentTx;
private ODatabaseDocumentTx testDocumentTx;
private File buildDir;
private final AtomicLong idGen = new AtomicLong();
private ExecutorService executorService = Executors.newCachedThreadPool();
private Process process;
@BeforeClass
public void beforeClass() throws Exception {
OGlobalConfiguration.CACHE_LEVEL1_ENABLED.setValue(false);
OGlobalConfiguration.CACHE_LEVEL1_SIZE.setValue(0);
OGlobalConfiguration.CACHE_LEVEL2_ENABLED.setValue(false);
OGlobalConfiguration.CACHE_LEVEL2_SIZE.setValue(0);
String buildDirectory = System.getProperty("buildDirectory", ".");
buildDirectory += "/localPaginatedStorageCreateCrashRestore";
buildDir = new File(buildDirectory);
if (buildDir.exists())
buildDir.delete();
buildDir.mkdir();
String javaExec = System.getProperty("java.home") + "/bin/java";
System.setProperty("ORIENTDB_HOME", buildDirectory);
ProcessBuilder processBuilder = new ProcessBuilder(javaExec, "-Xmx2048m", "-classpath", System.getProperty("java.class.path"),
"-DORIENTDB_HOME=" + buildDirectory, RemoteDBRunner.class.getName());
processBuilder.inheritIO();
process = processBuilder.start();
Thread.sleep(5000);
}
@AfterClass
public void afterClass() {
testDocumentTx.drop();
baseDocumentTx.drop();
Assert.assertTrue(buildDir.delete());
}
@BeforeMethod
public void beforeMethod() {
baseDocumentTx = new ODatabaseDocumentTx("plocal:" + buildDir.getAbsolutePath() + "/baseLocalPaginatedStorageCrashRestore");
if (baseDocumentTx.exists()) {
baseDocumentTx.open("admin", "admin");
baseDocumentTx.drop();
}
baseDocumentTx.create();
testDocumentTx = new ODatabaseDocumentTx("remote:localhost:3500/testLocalPaginatedStorageCrashRestore");
testDocumentTx.open("admin", "admin");
}
public void testDocumentCreation() throws Exception {
createSchema(baseDocumentTx);
createSchema(testDocumentTx);
List<Future> futures = new ArrayList<Future>();
for (int i = 0; i < 5; i++) {
futures.add(executorService.submit(new DataPropagationTask(baseDocumentTx, testDocumentTx)));
}
Thread.sleep(150000);
long lastTs = System.currentTimeMillis();
process.destroy();
for (Future future : futures) {
try {
future.get();
} catch (Exception e) {
e.printStackTrace();
}
}
testDocumentTx = new ODatabaseDocumentTx("plocal:" + buildDir.getAbsolutePath() + "/testLocalPaginatedStorageCrashRestore");
testDocumentTx.open("admin", "admin");
testDocumentTx.close();
testDocumentTx.open("admin", "admin");
compareDocuments(lastTs);
}
private void createSchema(ODatabaseDocumentTx dbDocumentTx) {
ODatabaseRecordThreadLocal.INSTANCE.set(dbDocumentTx);
OSchema schema = dbDocumentTx.getMetadata().getSchema();
if (!schema.existsClass("TestClass")) {
OClass testClass = schema.createClass("TestClass");
testClass.createProperty("id", OType.LONG);
testClass.createProperty("timestamp", OType.LONG);
testClass.createProperty("stringValue", OType.STRING);
testClass.createIndex("idIndex", OClass.INDEX_TYPE.UNIQUE, "id");
schema.save();
}
}
private void compareDocuments(long lastTs) {
long minTs = Long.MAX_VALUE;
int clusterId = baseDocumentTx.getClusterIdByName("TestClass");
OStorage baseStorage = baseDocumentTx.getStorage();
OPhysicalPosition[] physicalPositions = baseStorage.ceilingPhysicalPositions(clusterId, new OPhysicalPosition(
OClusterPositionFactory.INSTANCE.valueOf(0)));
int recordsRestored = 0;
int recordsTested = 0;
while (physicalPositions.length > 0) {
final ORecordId rid = new ORecordId(clusterId);
for (OPhysicalPosition physicalPosition : physicalPositions) {
rid.clusterPosition = physicalPosition.clusterPosition;
ODatabaseRecordThreadLocal.INSTANCE.set(baseDocumentTx);
ODocument baseDocument = baseDocumentTx.load(rid);
ODatabaseRecordThreadLocal.INSTANCE.set(testDocumentTx);
List<ODocument> testDocuments = testDocumentTx.query(new OSQLSynchQuery<ODocument>("select from TestClass where id = "
+ baseDocument.field("id")));
if (testDocuments.size() == 0) {
if (((Long) baseDocument.field("timestamp")) < minTs)
minTs = baseDocument.field("timestamp");
} else {
ODocument testDocument = testDocuments.get(0);
Assert.assertEquals(testDocument.field("id"), baseDocument.field("id"));
Assert.assertEquals(testDocument.field("timestamp"), baseDocument.field("timestamp"));
Assert.assertEquals(testDocument.field("stringValue"), baseDocument.field("stringValue"));
recordsRestored++;
}
recordsTested++;
if (recordsTested % 10000 == 0)
System.out.println(recordsTested + " were tested, " + recordsRestored + " were restored ...");
}
physicalPositions = baseStorage.higherPhysicalPositions(clusterId, physicalPositions[physicalPositions.length - 1]);
}
System.out.println(recordsRestored + " records were restored. Total records " + recordsTested
+ ". Max interval for lost records " + (lastTs - minTs));
}
public static final class RemoteDBRunner {
public static void main(String[] args) throws Exception {
OGlobalConfiguration.CACHE_LEVEL1_ENABLED.setValue(false);
OGlobalConfiguration.CACHE_LEVEL1_SIZE.setValue(0);
OGlobalConfiguration.CACHE_LEVEL2_ENABLED.setValue(false);
OGlobalConfiguration.CACHE_LEVEL2_SIZE.setValue(0);
OServer server = OServerMain.create();
server.startup(RemoteDBRunner.class
.getResourceAsStream("/com/orientechnologies/orient/core/storage/impl/local/paginated/db-create-config.xml"));
server.activate();
while (true)
;
}
}
public class DataPropagationTask implements Callable<Void> {
private ODatabaseDocumentTx baseDB;
private ODatabaseDocumentTx testDB;
public DataPropagationTask(ODatabaseDocumentTx baseDB, ODatabaseDocumentTx testDocumentTx) {
this.baseDB = new ODatabaseDocumentTx(baseDB.getURL());
this.testDB = new ODatabaseDocumentTx(testDocumentTx.getURL());
}
@Override
public Void call() throws Exception {
Random random = new Random();
baseDB.open("admin", "admin");
testDB.open("admin", "admin");
try {
while (true) {
final ODocument document = new ODocument("TestClass");
document.field("id", idGen.incrementAndGet());
document.field("timestamp", System.currentTimeMillis());
document.field("stringValue", "sfe" + random.nextLong());
saveDoc(document);
}
} finally {
baseDB.close();
testDB.close();
}
}
private void saveDoc(ODocument document) {
ODatabaseRecordThreadLocal.INSTANCE.set(baseDB);
ODocument testDoc = new ODocument();
document.copyTo(testDoc);
document.save();
ODatabaseRecordThreadLocal.INSTANCE.set(testDB);
testDoc.save();
ODatabaseRecordThreadLocal.INSTANCE.set(baseDB);
}
}
}
| 1no label
|
server_src_test_java_com_orientechnologies_orient_core_storage_impl_local_paginated_LocalPaginatedStorageCreateCrashRestore.java
|
587 |
getEntries(iKeys, new IndexEntriesResultListener() {
@Override
public boolean addResult(ODocument entry) {
result.add(entry);
return true;
}
});
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_index_OIndexAbstract.java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.