Unnamed: 0
int64 0
6.45k
| func
stringlengths 29
253k
| target
class label 2
classes | project
stringlengths 36
167
|
---|---|---|---|
17 |
final class EntryIterator extends AbstractEntryIterator<K, V, Map.Entry<K, V>> {
EntryIterator(final OMVRBTreeEntry<K, V> first) {
super(first);
}
public Map.Entry<K, V> next() {
return nextEntry();
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_collection_OMVRBTree.java
|
1,252 |
new OProfilerHookValue() {
public Object getValue() {
lock.readLock().lock();
try {
return bufferPoolLRU.size();
} finally {
lock.readLock().unlock();
}
}
});
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_storage_fs_OMMapManagerOld.java
|
1,513 |
@SuppressWarnings("unchecked")
public class OObjectEnumLazySet<TYPE extends Enum> extends HashSet<TYPE> implements OObjectLazyEnumSerializer<Set<TYPE>>,
Serializable {
private static final long serialVersionUID = -7698875159671927472L;
private final ORecord<?> sourceRecord;
private final Set<Object> underlying;
private boolean converted = false;
private final Class<Enum> enumClass;
public OObjectEnumLazySet(final Class<Enum> iEnumClass, final ORecord<?> iSourceRecord, final Set<Object> iRecordSource) {
this.sourceRecord = iSourceRecord;
this.underlying = iRecordSource;
this.enumClass = iEnumClass;
}
public OObjectEnumLazySet(final Class<Enum> iEnumClass, final ORecord<?> iSourceRecord, final Set<Object> iRecordSource,
final Set<? extends TYPE> iSourceCollection) {
this.sourceRecord = iSourceRecord;
this.underlying = iRecordSource;
this.enumClass = iEnumClass;
convertAll();
addAll(iSourceCollection);
}
public Iterator<TYPE> iterator() {
return (Iterator<TYPE>) new OObjectEnumLazyIterator<TYPE>(enumClass, sourceRecord, underlying.iterator());
}
public int size() {
return underlying.size();
}
public boolean isEmpty() {
return underlying.isEmpty();
}
public boolean contains(final Object o) {
boolean underlyingContains = underlying.contains(o.toString());
return underlyingContains || super.contains(o);
}
public Object[] toArray() {
return toArray(new Object[size()]);
}
public <T> T[] toArray(final T[] a) {
convertAll();
return super.toArray(a);
}
public boolean add(final TYPE e) {
underlying.add(e.name());
return super.add(e);
}
public boolean remove(final Object e) {
underlying.remove(e.toString());
return super.remove(e);
}
public boolean containsAll(final Collection<?> c) {
for (Object o : c)
if (!super.contains(o) && !underlying.contains(o.toString()))
return false;
return true;
}
public boolean addAll(final Collection<? extends TYPE> c) {
boolean modified = false;
setDirty();
for (Object o : c)
modified = add((TYPE) o) || modified;
return modified;
}
public boolean retainAll(final Collection<?> c) {
boolean modified = false;
Iterator<TYPE> e = iterator();
while (e.hasNext()) {
if (!c.contains(e.next())) {
remove(e);
modified = true;
}
}
return modified;
}
public void clear() {
setDirty();
underlying.clear();
}
public boolean removeAll(final Collection<?> c) {
setDirty();
boolean modified = super.removeAll(c);
for (Object o : c) {
modified = modified || underlying.remove(o.toString());
}
return modified;
}
public boolean isConverted() {
return converted;
}
@Override
public String toString() {
return underlying.toString();
}
public void setDirty() {
if (sourceRecord != null)
sourceRecord.setDirty();
}
public void detach() {
convertAll();
}
public void detach(boolean nonProxiedInstance) {
convertAll();
}
public void detachAll(boolean nonProxiedInstance) {
convertAll();
}
@Override
public Set<TYPE> getNonOrientInstance() {
Set<TYPE> set = new HashSet<TYPE>();
set.addAll(this);
return set;
}
@Override
public Object getUnderlying() {
return underlying;
}
protected void convertAll() {
if (converted)
return;
super.clear();
for (Object o : underlying) {
if (o instanceof Number)
o = enumClass.getEnumConstants()[((Number) o).intValue()];
else
o = Enum.valueOf(enumClass, o.toString());
super.add((TYPE) o);
}
converted = true;
}
}
| 0true
|
object_src_main_java_com_orientechnologies_orient_object_enumerations_OObjectEnumLazySet.java
|
909 |
public interface BaseProcessor {
public List<Offer> filterOffers(List<Offer> offers, Customer customer);
}
| 0true
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_service_processor_BaseProcessor.java
|
2,528 |
public class XContentMapValues {
/**
* Extracts raw values (string, int, and so on) based on the path provided returning all of them
* as a single list.
*/
public static List<Object> extractRawValues(String path, Map<String, Object> map) {
List<Object> values = Lists.newArrayList();
String[] pathElements = Strings.splitStringToArray(path, '.');
if (pathElements.length == 0) {
return values;
}
extractRawValues(values, map, pathElements, 0);
return values;
}
@SuppressWarnings({"unchecked"})
private static void extractRawValues(List values, Map<String, Object> part, String[] pathElements, int index) {
if (index == pathElements.length) {
return;
}
String key = pathElements[index];
Object currentValue = part.get(key);
int nextIndex = index + 1;
while (currentValue == null && nextIndex != pathElements.length) {
key += "." + pathElements[nextIndex];
currentValue = part.get(key);
nextIndex++;
}
if (currentValue == null) {
return;
}
if (currentValue instanceof Map) {
extractRawValues(values, (Map<String, Object>) currentValue, pathElements, nextIndex);
} else if (currentValue instanceof List) {
extractRawValues(values, (List) currentValue, pathElements, nextIndex);
} else {
values.add(currentValue);
}
}
@SuppressWarnings({"unchecked"})
private static void extractRawValues(List values, List<Object> part, String[] pathElements, int index) {
for (Object value : part) {
if (value == null) {
continue;
}
if (value instanceof Map) {
extractRawValues(values, (Map<String, Object>) value, pathElements, index);
} else if (value instanceof List) {
extractRawValues(values, (List) value, pathElements, index);
} else {
values.add(value);
}
}
}
public static Object extractValue(String path, Map<String, Object> map) {
String[] pathElements = Strings.splitStringToArray(path, '.');
if (pathElements.length == 0) {
return null;
}
return extractValue(pathElements, 0, map);
}
@SuppressWarnings({"unchecked"})
private static Object extractValue(String[] pathElements, int index, Object currentValue) {
if (index == pathElements.length) {
return currentValue;
}
if (currentValue == null) {
return null;
}
if (currentValue instanceof Map) {
Map map = (Map) currentValue;
String key = pathElements[index];
Object mapValue = map.get(key);
int nextIndex = index + 1;
while (mapValue == null && nextIndex != pathElements.length) {
key += "." + pathElements[nextIndex];
mapValue = map.get(key);
nextIndex++;
}
return extractValue(pathElements, nextIndex, mapValue);
}
if (currentValue instanceof List) {
List valueList = (List) currentValue;
List newList = new ArrayList(valueList.size());
for (Object o : valueList) {
Object listValue = extractValue(pathElements, index, o);
if (listValue != null) {
newList.add(listValue);
}
}
return newList;
}
return null;
}
public static Map<String, Object> filter(Map<String, Object> map, String[] includes, String[] excludes) {
Map<String, Object> result = Maps.newHashMap();
filter(map, result, includes == null ? Strings.EMPTY_ARRAY : includes, excludes == null ? Strings.EMPTY_ARRAY : excludes, new StringBuilder());
return result;
}
private static void filter(Map<String, Object> map, Map<String, Object> into, String[] includes, String[] excludes, StringBuilder sb) {
if (includes.length == 0 && excludes.length == 0) {
into.putAll(map);
return;
}
for (Map.Entry<String, Object> entry : map.entrySet()) {
String key = entry.getKey();
int mark = sb.length();
if (sb.length() > 0) {
sb.append('.');
}
sb.append(key);
String path = sb.toString();
if (Regex.simpleMatch(excludes, path)) {
sb.setLength(mark);
continue;
}
boolean exactIncludeMatch = false; // true if the current position was specifically mentioned
boolean pathIsPrefixOfAnInclude = false; // true if potentially a sub scope can be included
if (includes.length == 0) {
// implied match anything
exactIncludeMatch = true;
} else {
for (String include : includes) {
// check for prefix matches as well to see if we need to zero in, something like: obj1.arr1.* or *.field
// note, this does not work well with middle matches, like obj1.*.obj3
if (include.charAt(0) == '*') {
if (Regex.simpleMatch(include, path)) {
exactIncludeMatch = true;
break;
}
pathIsPrefixOfAnInclude = true;
break;
}
if (include.startsWith(path)) {
if (include.length() == path.length()) {
exactIncludeMatch = true;
break;
} else if (include.length() > path.length() && include.charAt(path.length()) == '.') {
// include might may match deeper paths. Dive deeper.
pathIsPrefixOfAnInclude = true;
break;
}
}
if (Regex.simpleMatch(include, path)) {
exactIncludeMatch = true;
break;
}
}
}
if (!(pathIsPrefixOfAnInclude || exactIncludeMatch)) {
// skip subkeys, not interesting.
sb.setLength(mark);
continue;
}
if (entry.getValue() instanceof Map) {
Map<String, Object> innerInto = Maps.newHashMap();
// if we had an exact match, we want give deeper excludes their chance
filter((Map<String, Object>) entry.getValue(), innerInto, exactIncludeMatch ? Strings.EMPTY_ARRAY : includes, excludes, sb);
if (exactIncludeMatch || !innerInto.isEmpty()) {
into.put(entry.getKey(), innerInto);
}
} else if (entry.getValue() instanceof List) {
List<Object> list = (List<Object>) entry.getValue();
List<Object> innerInto = new ArrayList<Object>(list.size());
// if we had an exact match, we want give deeper excludes their chance
filter(list, innerInto, exactIncludeMatch ? Strings.EMPTY_ARRAY : includes, excludes, sb);
into.put(entry.getKey(), innerInto);
} else if (exactIncludeMatch) {
into.put(entry.getKey(), entry.getValue());
}
sb.setLength(mark);
}
}
private static void filter(List<Object> from, List<Object> to, String[] includes, String[] excludes, StringBuilder sb) {
if (includes.length == 0 && excludes.length == 0) {
to.addAll(from);
return;
}
for (Object o : from) {
if (o instanceof Map) {
Map<String, Object> innerInto = Maps.newHashMap();
filter((Map<String, Object>) o, innerInto, includes, excludes, sb);
if (!innerInto.isEmpty()) {
to.add(innerInto);
}
} else if (o instanceof List) {
List<Object> innerInto = new ArrayList<Object>();
filter((List<Object>) o, innerInto, includes, excludes, sb);
if (!innerInto.isEmpty()) {
to.add(innerInto);
}
} else {
to.add(o);
}
}
}
public static boolean isObject(Object node) {
return node instanceof Map;
}
public static boolean isArray(Object node) {
return node instanceof List;
}
public static String nodeStringValue(Object node, String defaultValue) {
if (node == null) {
return defaultValue;
}
return node.toString();
}
public static float nodeFloatValue(Object node, float defaultValue) {
if (node == null) {
return defaultValue;
}
return nodeFloatValue(node);
}
public static float nodeFloatValue(Object node) {
if (node instanceof Number) {
return ((Number) node).floatValue();
}
return Float.parseFloat(node.toString());
}
public static double nodeDoubleValue(Object node, double defaultValue) {
if (node == null) {
return defaultValue;
}
return nodeDoubleValue(node);
}
public static double nodeDoubleValue(Object node) {
if (node instanceof Number) {
return ((Number) node).doubleValue();
}
return Double.parseDouble(node.toString());
}
public static int nodeIntegerValue(Object node) {
if (node instanceof Number) {
return ((Number) node).intValue();
}
return Integer.parseInt(node.toString());
}
public static int nodeIntegerValue(Object node, int defaultValue) {
if (node == null) {
return defaultValue;
}
if (node instanceof Number) {
return ((Number) node).intValue();
}
return Integer.parseInt(node.toString());
}
public static short nodeShortValue(Object node, short defaultValue) {
if (node == null) {
return defaultValue;
}
return nodeShortValue(node);
}
public static short nodeShortValue(Object node) {
if (node instanceof Number) {
return ((Number) node).shortValue();
}
return Short.parseShort(node.toString());
}
public static byte nodeByteValue(Object node, byte defaultValue) {
if (node == null) {
return defaultValue;
}
return nodeByteValue(node);
}
public static byte nodeByteValue(Object node) {
if (node instanceof Number) {
return ((Number) node).byteValue();
}
return Byte.parseByte(node.toString());
}
public static long nodeLongValue(Object node, long defaultValue) {
if (node == null) {
return defaultValue;
}
return nodeLongValue(node);
}
public static long nodeLongValue(Object node) {
if (node instanceof Number) {
return ((Number) node).longValue();
}
return Long.parseLong(node.toString());
}
public static boolean nodeBooleanValue(Object node, boolean defaultValue) {
if (node == null) {
return defaultValue;
}
return nodeBooleanValue(node);
}
public static boolean nodeBooleanValue(Object node) {
if (node instanceof Boolean) {
return (Boolean) node;
}
if (node instanceof Number) {
return ((Number) node).intValue() != 0;
}
String value = node.toString();
return !(value.equals("false") || value.equals("0") || value.equals("off"));
}
public static TimeValue nodeTimeValue(Object node, TimeValue defaultValue) {
if (node == null) {
return defaultValue;
}
return nodeTimeValue(node);
}
public static TimeValue nodeTimeValue(Object node) {
if (node instanceof Number) {
return TimeValue.timeValueMillis(((Number) node).longValue());
}
return TimeValue.parseTimeValue(node.toString(), null);
}
public static Map<String, Object> nodeMapValue(Object node, String desc) {
if (node instanceof Map) {
return (Map<String, Object>) node;
} else {
throw new ElasticsearchParseException(desc + " should be a hash but was of type: " + node.getClass());
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_common_xcontent_support_XContentMapValues.java
|
2,757 |
public class HttpServer extends AbstractLifecycleComponent<HttpServer> {
private final Environment environment;
private final HttpServerTransport transport;
private final RestController restController;
private final NodeService nodeService;
private final boolean disableSites;
private final PluginSiteFilter pluginSiteFilter = new PluginSiteFilter();
@Inject
public HttpServer(Settings settings, Environment environment, HttpServerTransport transport,
RestController restController,
NodeService nodeService) {
super(settings);
this.environment = environment;
this.transport = transport;
this.restController = restController;
this.nodeService = nodeService;
nodeService.setHttpServer(this);
this.disableSites = componentSettings.getAsBoolean("disable_sites", false);
transport.httpServerAdapter(new Dispatcher(this));
}
static class Dispatcher implements HttpServerAdapter {
private final HttpServer server;
Dispatcher(HttpServer server) {
this.server = server;
}
@Override
public void dispatchRequest(HttpRequest request, HttpChannel channel) {
server.internalDispatchRequest(request, channel);
}
}
@Override
protected void doStart() throws ElasticsearchException {
transport.start();
if (logger.isInfoEnabled()) {
logger.info("{}", transport.boundAddress());
}
nodeService.putAttribute("http_address", transport.boundAddress().publishAddress().toString());
}
@Override
protected void doStop() throws ElasticsearchException {
nodeService.removeAttribute("http_address");
transport.stop();
}
@Override
protected void doClose() throws ElasticsearchException {
transport.close();
}
public HttpInfo info() {
return transport.info();
}
public HttpStats stats() {
return transport.stats();
}
public void internalDispatchRequest(final HttpRequest request, final HttpChannel channel) {
if (request.rawPath().startsWith("/_plugin/")) {
RestFilterChain filterChain = restController.filterChain(pluginSiteFilter);
filterChain.continueProcessing(request, channel);
return;
}
restController.dispatchRequest(request, channel);
}
class PluginSiteFilter extends RestFilter {
@Override
public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) {
handlePluginSite((HttpRequest) request, (HttpChannel) channel);
}
}
void handlePluginSite(HttpRequest request, HttpChannel channel) {
if (disableSites) {
channel.sendResponse(new StringRestResponse(FORBIDDEN));
return;
}
if (request.method() == RestRequest.Method.OPTIONS) {
// when we have OPTIONS request, simply send OK by default (with the Access Control Origin header which gets automatically added)
StringRestResponse response = new StringRestResponse(OK);
channel.sendResponse(response);
return;
}
if (request.method() != RestRequest.Method.GET) {
channel.sendResponse(new StringRestResponse(FORBIDDEN));
return;
}
// TODO for a "/_plugin" endpoint, we should have a page that lists all the plugins?
String path = request.rawPath().substring("/_plugin/".length());
int i1 = path.indexOf('/');
String pluginName;
String sitePath;
if (i1 == -1) {
pluginName = path;
sitePath = null;
// If a trailing / is missing, we redirect to the right page #2654
channel.sendResponse(new HttpRedirectRestResponse(request.rawPath() + "/"));
return;
} else {
pluginName = path.substring(0, i1);
sitePath = path.substring(i1 + 1);
}
if (sitePath.length() == 0) {
sitePath = "/index.html";
}
// Convert file separators.
sitePath = sitePath.replace('/', File.separatorChar);
// this is a plugin provided site, serve it as static files from the plugin location
File siteFile = new File(new File(environment.pluginsFile(), pluginName), "_site");
File file = new File(siteFile, sitePath);
if (!file.exists() || file.isHidden()) {
channel.sendResponse(new StringRestResponse(NOT_FOUND));
return;
}
if (!file.isFile()) {
// If it's not a dir, we send a 403
if (!file.isDirectory()) {
channel.sendResponse(new StringRestResponse(FORBIDDEN));
return;
}
// We don't serve dir but if index.html exists in dir we should serve it
file = new File(file, "index.html");
if (!file.exists() || file.isHidden() || !file.isFile()) {
channel.sendResponse(new StringRestResponse(FORBIDDEN));
return;
}
}
if (!file.getAbsolutePath().startsWith(siteFile.getAbsolutePath())) {
channel.sendResponse(new StringRestResponse(FORBIDDEN));
return;
}
try {
byte[] data = Streams.copyToByteArray(file);
channel.sendResponse(new BytesRestResponse(data, guessMimeType(sitePath)));
} catch (IOException e) {
channel.sendResponse(new StringRestResponse(INTERNAL_SERVER_ERROR));
}
}
// TODO: Don't respond with a mime type that violates the request's Accept header
private String guessMimeType(String path) {
int lastDot = path.lastIndexOf('.');
if (lastDot == -1) {
return "";
}
String extension = path.substring(lastDot + 1).toLowerCase(Locale.ROOT);
String mimeType = DEFAULT_MIME_TYPES.get(extension);
if (mimeType == null) {
return "";
}
return mimeType;
}
static {
// This is not an exhaustive list, just the most common types. Call registerMimeType() to add more.
Map<String, String> mimeTypes = new HashMap<String, String>();
mimeTypes.put("txt", "text/plain");
mimeTypes.put("css", "text/css");
mimeTypes.put("csv", "text/csv");
mimeTypes.put("htm", "text/html");
mimeTypes.put("html", "text/html");
mimeTypes.put("xml", "text/xml");
mimeTypes.put("js", "text/javascript"); // Technically it should be application/javascript (RFC 4329), but IE8 struggles with that
mimeTypes.put("xhtml", "application/xhtml+xml");
mimeTypes.put("json", "application/json");
mimeTypes.put("pdf", "application/pdf");
mimeTypes.put("zip", "application/zip");
mimeTypes.put("tar", "application/x-tar");
mimeTypes.put("gif", "image/gif");
mimeTypes.put("jpeg", "image/jpeg");
mimeTypes.put("jpg", "image/jpeg");
mimeTypes.put("tiff", "image/tiff");
mimeTypes.put("tif", "image/tiff");
mimeTypes.put("png", "image/png");
mimeTypes.put("svg", "image/svg+xml");
mimeTypes.put("ico", "image/vnd.microsoft.icon");
mimeTypes.put("mp3", "audio/mpeg");
DEFAULT_MIME_TYPES = ImmutableMap.copyOf(mimeTypes);
}
public static final Map<String, String> DEFAULT_MIME_TYPES;
}
| 0true
|
src_main_java_org_elasticsearch_http_HttpServer.java
|
915 |
public class DefaultShardOperationFailedException implements ShardOperationFailedException {
private String index;
private int shardId;
private String reason;
private RestStatus status;
private DefaultShardOperationFailedException() {
}
public DefaultShardOperationFailedException(IndexShardException e) {
this.index = e.shardId().index().name();
this.shardId = e.shardId().id();
this.reason = detailedMessage(e);
this.status = e.status();
}
public DefaultShardOperationFailedException(String index, int shardId, Throwable t) {
this.index = index;
this.shardId = shardId;
this.reason = detailedMessage(t);
if (t != null && t instanceof ElasticsearchException) {
status = ((ElasticsearchException) t).status();
} else {
status = RestStatus.INTERNAL_SERVER_ERROR;
}
}
@Override
public String index() {
return this.index;
}
@Override
public int shardId() {
return this.shardId;
}
@Override
public String reason() {
return this.reason;
}
@Override
public RestStatus status() {
return status;
}
public static DefaultShardOperationFailedException readShardOperationFailed(StreamInput in) throws IOException {
DefaultShardOperationFailedException exp = new DefaultShardOperationFailedException();
exp.readFrom(in);
return exp;
}
@Override
public void readFrom(StreamInput in) throws IOException {
if (in.readBoolean()) {
index = in.readString();
}
shardId = in.readVInt();
reason = in.readString();
status = RestStatus.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
if (index == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeString(index);
}
out.writeVInt(shardId);
out.writeString(reason);
RestStatus.writeTo(out, status);
}
@Override
public String toString() {
return "[" + index + "][" + shardId + "] failed, reason [" + reason + "]";
}
}
| 0true
|
src_main_java_org_elasticsearch_action_support_DefaultShardOperationFailedException.java
|
726 |
@RunWith(HazelcastParallelClassRunner.class)
@Category(QuickTest.class)
public class SetTransactionTest extends HazelcastTestSupport {
static final String ELEMENT = "item";
@Test
public void testAdd_withinTxn() throws Exception {
final String setName = randomString();
final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2);
final HazelcastInstance instance = factory.newInstances()[0];
final ISet<String> set = instance.getSet(setName);
final TransactionContext context = instance.newTransactionContext();
context.beginTransaction();
final TransactionalSet<Object> txnSet = context.getSet(setName);
assertTrue(txnSet.add(ELEMENT));
assertEquals(1, txnSet.size());
context.commitTransaction();
assertEquals(1, set.size());
}
@Test
public void testSetSizeAfterAdd_withinTxn() throws Exception {
final String setName = randomString();
final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2);
final HazelcastInstance instance = factory.newInstances()[0];
final ISet<String> set = instance.getSet(setName);
final TransactionContext context = instance.newTransactionContext();
context.beginTransaction();
final TransactionalSet<Object> txnSet = context.getSet(setName);
txnSet.add(ELEMENT);
context.commitTransaction();
assertEquals(1, set.size());
}
@Test
public void testRemove_withinTxn() throws Exception {
final String setName = randomString();
final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2);
final HazelcastInstance instance = factory.newInstances()[0];
final ISet<String> set = instance.getSet(setName);
set.add(ELEMENT);
final TransactionContext context = instance.newTransactionContext();
context.beginTransaction();
final TransactionalSet<Object> txnSet = context.getSet(setName);
assertTrue(txnSet.remove(ELEMENT));
assertFalse(txnSet.remove("NOT_THERE"));
context.commitTransaction();
assertEquals(0, set.size());
}
@Test
public void testSetSizeAfterRemove_withinTxn() throws Exception {
final String setName = randomString();
final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2);
final HazelcastInstance instance = factory.newInstances()[0];
final ISet<String> set = instance.getSet(setName);
set.add(ELEMENT);
final TransactionContext context = instance.newTransactionContext();
context.beginTransaction();
final TransactionalSet<Object> txnSet = context.getSet(setName);
txnSet.remove(ELEMENT);
context.commitTransaction();
assertEquals(0, set.size());
}
@Test
public void testAddDuplicateElement_withinTxn() throws Exception {
final String setName = randomString();
final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2);
final HazelcastInstance instance = factory.newInstances()[0];
final TransactionContext context = instance.newTransactionContext();
context.beginTransaction();
final TransactionalSet<Object> txnSet = context.getSet(setName);
assertTrue(txnSet.add(ELEMENT));
assertFalse(txnSet.add(ELEMENT));
context.commitTransaction();
assertEquals(1, instance.getSet(setName).size());
}
@Test
public void testAddExistingElement_withinTxn() throws Exception {
final String setName = randomString();
final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2);
final HazelcastInstance instance = factory.newInstances()[0];
final ISet<String> set = instance.getSet(setName);
set.add(ELEMENT);
final TransactionContext context = instance.newTransactionContext();
context.beginTransaction();
final TransactionalSet<Object> txnSet = context.getSet(setName);
assertFalse(txnSet.add(ELEMENT));
context.commitTransaction();
assertEquals(1, set.size());
}
@Test
public void testSetSizeAfterAddingDuplicateElement_withinTxn() throws Exception {
final String setName = randomString();
final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2);
final HazelcastInstance instance = factory.newInstances()[0];
final ISet<String> set = instance.getSet(setName);
set.add(ELEMENT);
final TransactionContext context = instance.newTransactionContext();
context.beginTransaction();
final TransactionalSet<Object> txnSet = context.getSet(setName);
txnSet.add(ELEMENT);
context.commitTransaction();
assertEquals(1, set.size());
}
@Test
public void testAddRollBack() throws Exception {
final String setName = randomString();
final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(2);
final HazelcastInstance instance = factory.newInstances()[0];
final ISet<String> set = instance.getSet(setName);
set.add(ELEMENT);
final TransactionContext context = instance.newTransactionContext();
context.beginTransaction();
final TransactionalSet<Object> setTxn = context.getSet(setName);
setTxn.add("itemWillGetRollBacked");
context.rollbackTransaction();
assertEquals(1, set.size());
}
}
| 0true
|
hazelcast_src_test_java_com_hazelcast_collection_SetTransactionTest.java
|
352 |
@RunWith(HazelcastSerialClassRunner.class)
@Category(QuickTest.class)
public class SimpleClientMapInterceptorTest {
static HazelcastInstance server1;
static HazelcastInstance server2;
static HazelcastInstance client;
static SimpleClientInterceptor interceptor;
@BeforeClass
public static void init() {
Config config = new Config();
config.getSerializationConfig().addPortableFactory(PortableHelpersFactory.ID, new PortableHelpersFactory());
server1 = Hazelcast.newHazelcastInstance(config);
server2 = Hazelcast.newHazelcastInstance(config);
ClientConfig clientConfig = new ClientConfig();
clientConfig.getSerializationConfig().addPortableFactory(PortableHelpersFactory.ID, new PortableHelpersFactory());
client = HazelcastClient.newHazelcastClient(clientConfig);
interceptor = new SimpleClientInterceptor();
}
@AfterClass
public static void destroy() {
client.shutdown();
Hazelcast.shutdownAll();
}
@Test
public void clientMapInterceptorTestIssue1238() throws InterruptedException {
final IMap<Object, Object> map = client.getMap("clientMapInterceptorTest");
String id = map.addInterceptor(interceptor);
map.put(1, "New York");
map.put(2, "Istanbul");
map.put(3, "Tokyo");
map.put(4, "London");
map.put(5, "Paris");
map.put(6, "Cairo");
map.put(7, "Hong Kong");
map.remove(1);
try {
map.remove(2);
fail();
} catch (Exception ignore) {
}
assertEquals(map.size(), 6);
assertEquals(map.get(1), null);
assertEquals(map.get(2), "ISTANBUL:");
assertEquals(map.get(3), "TOKYO:");
assertEquals(map.get(4), "LONDON:");
assertEquals(map.get(5), "PARIS:");
assertEquals(map.get(6), "CAIRO:");
assertEquals(map.get(7), "HONG KONG:");
map.removeInterceptor(id);
map.put(8, "Moscow");
assertEquals(map.get(8), "Moscow");
assertEquals(map.get(1), null);
assertEquals(map.get(2), "ISTANBUL");
assertEquals(map.get(3), "TOKYO");
assertEquals(map.get(4), "LONDON");
assertEquals(map.get(5), "PARIS");
assertEquals(map.get(6), "CAIRO");
assertEquals(map.get(7), "HONG KONG");
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_map_SimpleClientMapInterceptorTest.java
|
940 |
@Component("blRecordOfferUsageRollbackHandler")
public class RecordOfferUsageRollbackHandler implements RollbackHandler {
@Resource(name = "blOfferAuditService")
protected OfferAuditService offerAuditService;
@Override
public void rollbackState(Activity<? extends ProcessContext> activity, ProcessContext processContext, Map<String, Object> stateConfiguration) throws RollbackFailureException {
List<OfferAudit> audits = (List<OfferAudit>) stateConfiguration.get(RecordOfferUsageActivity.SAVED_AUDITS);
for (OfferAudit audit : audits) {
offerAuditService.delete(audit);
}
}
}
| 0true
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_service_workflow_RecordOfferUsageRollbackHandler.java
|
1,093 |
public class RecyclerBenchmark {
private static final long NUM_RECYCLES = 5000000L;
private static final Random RANDOM = new Random(0);
private static long bench(final Recycler<?> recycler, long numRecycles, int numThreads) throws InterruptedException {
final AtomicLong recycles = new AtomicLong(numRecycles);
final CountDownLatch latch = new CountDownLatch(1);
final Thread[] threads = new Thread[numThreads];
for (int i = 0; i < numThreads; ++i){
// Thread ids happen to be generated sequentially, so we also generate random threads so that distribution of IDs
// is not perfect for the concurrent recycler
for (int j = RANDOM.nextInt(5); j >= 0; --j) {
new Thread();
}
threads[i] = new Thread() {
@Override
public void run() {
try {
latch.await();
} catch (InterruptedException e) {
return;
}
while (recycles.getAndDecrement() > 0) {
final Recycler.V<?> v = recycler.obtain();
v.release();
}
}
};
}
for (Thread thread : threads) {
thread.start();
}
final long start = System.nanoTime();
latch.countDown();
for (Thread thread : threads) {
thread.join();
}
return System.nanoTime() - start;
}
public static void main(String[] args) throws InterruptedException {
final int limit = 100;
final Recycler.C<Object> c = new Recycler.C<Object>() {
@Override
public Object newInstance(int sizing) {
return new Object();
}
@Override
public void clear(Object value) {}
};
final ImmutableMap<String, Recycler<Object>> recyclers = ImmutableMap.<String, Recycler<Object>>builder()
.put("none", none(c))
.put("concurrent-queue", concurrentDeque(c, limit))
.put("thread-local", threadLocal(dequeFactory(c, limit)))
.put("soft-thread-local", threadLocal(softFactory(dequeFactory(c, limit))))
.put("locked", locked(deque(c, limit)))
.put("concurrent", concurrent(dequeFactory(c, limit), Runtime.getRuntime().availableProcessors()))
.put("soft-concurrent", concurrent(softFactory(dequeFactory(c, limit)), Runtime.getRuntime().availableProcessors())).build();
// warmup
final long start = System.nanoTime();
while (System.nanoTime() - start < TimeUnit.SECONDS.toNanos(10)) {
for (Recycler<?> recycler : recyclers.values()) {
bench(recycler, NUM_RECYCLES, 2);
}
}
// run
for (int numThreads = 1; numThreads <= 4 * Runtime.getRuntime().availableProcessors(); numThreads *= 2) {
System.out.println("## " + numThreads + " threads\n");
System.gc();
Thread.sleep(1000);
for (Recycler<?> recycler : recyclers.values()) {
bench(recycler, NUM_RECYCLES, numThreads);
}
for (int i = 0; i < 5; ++i) {
for (Map.Entry<String, Recycler<Object>> entry : recyclers.entrySet()) {
System.out.println(entry.getKey() + "\t" + TimeUnit.NANOSECONDS.toMillis(bench(entry.getValue(), NUM_RECYCLES, numThreads)));
}
System.out.println();
}
}
}
}
| 0true
|
src_test_java_org_elasticsearch_benchmark_common_recycler_RecyclerBenchmark.java
|
4,676 |
abstract class QueryCollector extends Collector {
final IndexFieldData<?> idFieldData;
final IndexSearcher searcher;
final ConcurrentMap<HashedBytesRef, Query> queries;
final ESLogger logger;
final Lucene.ExistsCollector collector = new Lucene.ExistsCollector();
final HashedBytesRef spare = new HashedBytesRef(new BytesRef());
BytesValues values;
final List<Collector> facetCollectors = new ArrayList<Collector>();
final Collector facetAndAggregatorCollector;
QueryCollector(ESLogger logger, PercolateContext context) {
this.logger = logger;
this.queries = context.percolateQueries();
this.searcher = context.docSearcher();
final FieldMapper<?> idMapper = context.mapperService().smartNameFieldMapper(IdFieldMapper.NAME);
this.idFieldData = context.fieldData().getForField(idMapper);
if (context.facets() != null) {
for (SearchContextFacets.Entry entry : context.facets().entries()) {
if (entry.isGlobal()) {
continue; // not supported for now
}
Collector collector = entry.getFacetExecutor().collector();
if (entry.getFilter() != null) {
if (collector instanceof NestedFacetExecutor.Collector) {
collector = new NestedFacetExecutor.Collector((NestedFacetExecutor.Collector) collector, entry.getFilter());
} else {
collector = new FilteredCollector(collector, entry.getFilter());
}
}
facetCollectors.add(collector);
}
}
List<Collector> collectors = new ArrayList<Collector>(facetCollectors);
if (context.aggregations() != null) {
AggregationContext aggregationContext = new AggregationContext(context);
context.aggregations().aggregationContext(aggregationContext);
List<Aggregator> aggregatorCollectors = new ArrayList<Aggregator>();
Aggregator[] aggregators = context.aggregations().factories().createTopLevelAggregators(aggregationContext);
for (int i = 0; i < aggregators.length; i++) {
if (!(aggregators[i] instanceof GlobalAggregator)) {
Aggregator aggregator = aggregators[i];
if (aggregator.shouldCollect()) {
aggregatorCollectors.add(aggregator);
}
}
}
context.aggregations().aggregators(aggregators);
if (!aggregatorCollectors.isEmpty()) {
collectors.add(new AggregationPhase.AggregationsCollector(aggregatorCollectors, aggregationContext));
}
}
int size = collectors.size();
if (size == 0) {
facetAndAggregatorCollector = null;
} else if (size == 1) {
facetAndAggregatorCollector = collectors.get(0);
} else {
facetAndAggregatorCollector = MultiCollector.wrap(collectors.toArray(new Collector[collectors.size()]));
}
}
@Override
public void setScorer(Scorer scorer) throws IOException {
if (facetAndAggregatorCollector != null) {
facetAndAggregatorCollector.setScorer(scorer);
}
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
// we use the UID because id might not be indexed
values = idFieldData.load(context).getBytesValues(true);
if (facetAndAggregatorCollector != null) {
facetAndAggregatorCollector.setNextReader(context);
}
}
@Override
public boolean acceptsDocsOutOfOrder() {
return true;
}
static Match match(ESLogger logger, PercolateContext context, HighlightPhase highlightPhase) {
return new Match(logger, context, highlightPhase);
}
static Count count(ESLogger logger, PercolateContext context) {
return new Count(logger, context);
}
static MatchAndScore matchAndScore(ESLogger logger, PercolateContext context, HighlightPhase highlightPhase) {
return new MatchAndScore(logger, context, highlightPhase);
}
static MatchAndSort matchAndSort(ESLogger logger, PercolateContext context) {
return new MatchAndSort(logger, context);
}
protected final Query getQuery(int doc) {
final int numValues = values.setDocument(doc);
if (numValues == 0) {
return null;
}
assert numValues == 1;
spare.reset(values.nextValue(), values.currentValueHash());
return queries.get(spare);
}
final static class Match extends QueryCollector {
final PercolateContext context;
final HighlightPhase highlightPhase;
final List<BytesRef> matches = new ArrayList<BytesRef>();
final List<Map<String, HighlightField>> hls = new ArrayList<Map<String, HighlightField>>();
final boolean limit;
final int size;
long counter = 0;
Match(ESLogger logger, PercolateContext context, HighlightPhase highlightPhase) {
super(logger, context);
this.limit = context.limit;
this.size = context.size;
this.context = context;
this.highlightPhase = highlightPhase;
}
@Override
public void collect(int doc) throws IOException {
final Query query = getQuery(doc);
if (query == null) {
// log???
return;
}
// run the query
try {
collector.reset();
if (context.highlight() != null) {
context.parsedQuery(new ParsedQuery(query, ImmutableMap.<String, Filter>of()));
context.hitContext().cache().clear();
}
searcher.search(query, collector);
if (collector.exists()) {
if (!limit || counter < size) {
matches.add(values.copyShared());
if (context.highlight() != null) {
highlightPhase.hitExecute(context, context.hitContext());
hls.add(context.hitContext().hit().getHighlightFields());
}
}
counter++;
if (facetAndAggregatorCollector != null) {
facetAndAggregatorCollector.collect(doc);
}
}
} catch (IOException e) {
logger.warn("[" + spare.bytes.utf8ToString() + "] failed to execute query", e);
}
}
long counter() {
return counter;
}
List<BytesRef> matches() {
return matches;
}
List<Map<String, HighlightField>> hls() {
return hls;
}
}
final static class MatchAndSort extends QueryCollector {
private final TopScoreDocCollector topDocsCollector;
MatchAndSort(ESLogger logger, PercolateContext context) {
super(logger, context);
// TODO: Use TopFieldCollector.create(...) for ascending and decending scoring?
topDocsCollector = TopScoreDocCollector.create(context.size, false);
}
@Override
public void collect(int doc) throws IOException {
final Query query = getQuery(doc);
if (query == null) {
// log???
return;
}
// run the query
try {
collector.reset();
searcher.search(query, collector);
if (collector.exists()) {
topDocsCollector.collect(doc);
if (facetAndAggregatorCollector != null) {
facetAndAggregatorCollector.collect(doc);
}
}
} catch (IOException e) {
logger.warn("[" + spare.bytes.utf8ToString() + "] failed to execute query", e);
}
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
super.setNextReader(context);
topDocsCollector.setNextReader(context);
}
@Override
public void setScorer(Scorer scorer) throws IOException {
topDocsCollector.setScorer(scorer);
}
TopDocs topDocs() {
return topDocsCollector.topDocs();
}
}
final static class MatchAndScore extends QueryCollector {
final PercolateContext context;
final HighlightPhase highlightPhase;
final List<BytesRef> matches = new ArrayList<BytesRef>();
final List<Map<String, HighlightField>> hls = new ArrayList<Map<String, HighlightField>>();
// TODO: Use thread local in order to cache the scores lists?
final FloatArrayList scores = new FloatArrayList();
final boolean limit;
final int size;
long counter = 0;
private Scorer scorer;
MatchAndScore(ESLogger logger, PercolateContext context, HighlightPhase highlightPhase) {
super(logger, context);
this.limit = context.limit;
this.size = context.size;
this.context = context;
this.highlightPhase = highlightPhase;
}
@Override
public void collect(int doc) throws IOException {
final Query query = getQuery(doc);
if (query == null) {
// log???
return;
}
// run the query
try {
collector.reset();
if (context.highlight() != null) {
context.parsedQuery(new ParsedQuery(query, ImmutableMap.<String, Filter>of()));
context.hitContext().cache().clear();
}
searcher.search(query, collector);
if (collector.exists()) {
if (!limit || counter < size) {
matches.add(values.copyShared());
scores.add(scorer.score());
if (context.highlight() != null) {
highlightPhase.hitExecute(context, context.hitContext());
hls.add(context.hitContext().hit().getHighlightFields());
}
}
counter++;
if (facetAndAggregatorCollector != null) {
facetAndAggregatorCollector.collect(doc);
}
}
} catch (IOException e) {
logger.warn("[" + spare.bytes.utf8ToString() + "] failed to execute query", e);
}
}
@Override
public void setScorer(Scorer scorer) throws IOException {
this.scorer = scorer;
}
long counter() {
return counter;
}
List<BytesRef> matches() {
return matches;
}
FloatArrayList scores() {
return scores;
}
List<Map<String, HighlightField>> hls() {
return hls;
}
}
final static class Count extends QueryCollector {
private long counter = 0;
Count(ESLogger logger, PercolateContext context) {
super(logger, context);
}
@Override
public void collect(int doc) throws IOException {
final Query query = getQuery(doc);
if (query == null) {
// log???
return;
}
// run the query
try {
collector.reset();
searcher.search(query, collector);
if (collector.exists()) {
counter++;
if (facetAndAggregatorCollector != null) {
facetAndAggregatorCollector.collect(doc);
}
}
} catch (IOException e) {
logger.warn("[" + spare.bytes.utf8ToString() + "] failed to execute query", e);
}
}
long counter() {
return counter;
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_percolator_QueryCollector.java
|
670 |
sbTree.loadEntriesBetween(rangeFrom, inclusive, rangeTo, inclusive, new OTreeInternal.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
final Object key = entry.getKey();
final V value = entry.getValue();
return addToEntriesResult(transformer, entriesResultListener, key, value);
}
});
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_index_engine_OSBTreeIndexEngine.java
|
831 |
public interface OrderItemPriceDetailAdjustment extends Adjustment {
/**
* Stores the offer name at the time the adjustment was made. Primarily to simplify display
* within the admin.
*
* @return
*/
public String getOfferName();
/**
* Returns the name of the offer at the time the adjustment was made.
* @param offerName
*/
public void setOfferName(String offerName);
public OrderItemPriceDetail getOrderItemPriceDetail();
public void init(OrderItemPriceDetail orderItemPriceDetail, Offer offer, String reason);
public void setOrderItemPriceDetail(OrderItemPriceDetail orderItemPriceDetail);
/**
* Even for items that are on sale, it is possible that an adjustment was made
* to the retail price that gave the customer a better offer.
*
* Since some offers can be applied to the sale price and some only to the
* retail price, this setting provides the required value.
*
* @return true if this adjustment was applied to the sale price
*/
public boolean isAppliedToSalePrice();
public void setAppliedToSalePrice(boolean appliedToSalePrice);
/**
* Value of this adjustment relative to the retail price.
* @return
*/
public Money getRetailPriceValue();
public void setRetailPriceValue(Money retailPriceValue);
/**
* Value of this adjustment relative to the sale price.
*
* @return
*/
public Money getSalesPriceValue();
public void setSalesPriceValue(Money salesPriceValue);
}
| 0true
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_domain_OrderItemPriceDetailAdjustment.java
|
392 |
public class ClusterSearchShardsAction extends ClusterAction<ClusterSearchShardsRequest, ClusterSearchShardsResponse, ClusterSearchShardsRequestBuilder> {
public static final ClusterSearchShardsAction INSTANCE = new ClusterSearchShardsAction();
public static final String NAME = "cluster/shards/search_shards";
private ClusterSearchShardsAction() {
super(NAME);
}
@Override
public ClusterSearchShardsResponse newResponse() {
return new ClusterSearchShardsResponse();
}
@Override
public ClusterSearchShardsRequestBuilder newRequestBuilder(ClusterAdminClient client) {
return new ClusterSearchShardsRequestBuilder(client);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_shards_ClusterSearchShardsAction.java
|
1,387 |
@SuppressWarnings("serial")
public class OMVRBTreeDatabaseLazySave<K, V> extends OMVRBTreeDatabase<K, V> {
protected int maxUpdatesBeforeSave;
protected int updates = 0;
protected boolean transactionRunning = false;
public OMVRBTreeDatabaseLazySave(final ODatabaseRecord iDatabase, final ORID iRID, final int iMaxUpdatesBeforeSave) {
super(iDatabase, iRID);
maxUpdatesBeforeSave = iMaxUpdatesBeforeSave;
}
public OMVRBTreeDatabaseLazySave(final String iClusterName, final OBinarySerializer<K> iKeySerializer,
final OStreamSerializer iValueSerializer, final int keySize, final int iMaxUpdatesBeforeSave) {
super(iClusterName, iKeySerializer, iValueSerializer, keySize);
maxUpdatesBeforeSave = iMaxUpdatesBeforeSave;
}
/**
* Do nothing since all the changes will be committed expressly at lazySave() time or on closing.
*/
@Override
public synchronized int commitChanges() {
return commitChanges(false);
}
public synchronized int commitChanges(boolean force) {
if (transactionRunning || maxUpdatesBeforeSave == 0 || (maxUpdatesBeforeSave > 0 && ++updates >= maxUpdatesBeforeSave) || force) {
updates = 0;
return lazySave();
}
return 0;
}
@Override
public void clear() {
super.clear();
lazySave();
}
public int lazySave() {
return super.commitChanges();
}
@Override
public int optimize(final boolean iForce) {
if (optimization == -1)
// IS ALREADY RUNNING
return 0;
if (!iForce && optimization == 0)
// NO OPTIMIZATION IS NEEDED
return 0;
optimization = iForce ? 2 : 1;
lazySave();
return super.optimize(iForce);
}
/**
* Returns the maximum updates to save the map persistently.
*
* @return 0 means no automatic save, 1 means non-lazy map (save each operation) and > 1 is lazy.
*/
public int getMaxUpdatesBeforeSave() {
return maxUpdatesBeforeSave;
}
/**
* Sets the maximum updates to save the map persistently.
*
* @param iValue
* 0 means no automatic save, 1 means non-lazy map (save each operation) and > 1 is lazy.
*/
public void setMaxUpdatesBeforeSave(final int iValue) {
this.maxUpdatesBeforeSave = iValue;
}
/**
* Change the transaction running mode.
*
* @param iTxRunning
* true if a transaction is running, otherwise false
*/
public void setRunningTransaction(final boolean iTxRunning) {
transactionRunning = iTxRunning;
if (iTxRunning) {
// ASSURE ALL PENDING CHANGES ARE COMMITTED BEFORE TO START A TX
updates = 0;
lazySave();
}
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_type_tree_OMVRBTreeDatabaseLazySave.java
|
100 |
public class OUnsafeMemoryJava7 extends OUnsafeMemory {
@Override
public byte[] get(long pointer, final int length) {
final byte[] result = new byte[length];
unsafe.copyMemory(null, pointer, result, unsafe.arrayBaseOffset(byte[].class), length);
return result;
}
@Override
public void get(long pointer, byte[] array, int arrayOffset, int length) {
pointer += arrayOffset;
unsafe.copyMemory(null, pointer, array, arrayOffset + unsafe.arrayBaseOffset(byte[].class), length);
}
@Override
public void set(long pointer, byte[] content, int arrayOffset, int length) {
unsafe.copyMemory(content, unsafe.arrayBaseOffset(byte[].class) + arrayOffset, null, pointer, length);
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_directmemory_OUnsafeMemoryJava7.java
|
46 |
public class NetworkedServerFactory
{
private LifeSupport life;
private ProtocolServerFactory protocolServerFactory;
private TimeoutStrategy timeoutStrategy;
private Logging logging;
private ObjectInputStreamFactory objectInputStreamFactory;
private ObjectOutputStreamFactory objectOutputStreamFactory;
public NetworkedServerFactory( LifeSupport life, ProtocolServerFactory protocolServerFactory,
TimeoutStrategy timeoutStrategy, Logging logging,
ObjectInputStreamFactory objectInputStreamFactory,
ObjectOutputStreamFactory objectOutputStreamFactory )
{
this.life = life;
this.protocolServerFactory = protocolServerFactory;
this.timeoutStrategy = timeoutStrategy;
this.logging = logging;
this.objectInputStreamFactory = objectInputStreamFactory;
this.objectOutputStreamFactory = objectOutputStreamFactory;
}
public ProtocolServer newNetworkedServer( final Config config, AcceptorInstanceStore acceptorInstanceStore,
ElectionCredentialsProvider electionCredentialsProvider )
{
final NetworkReceiver receiver = new NetworkReceiver(new NetworkReceiver.Configuration()
{
@Override
public HostnamePort clusterServer()
{
return config.get( ClusterSettings.cluster_server );
}
@Override
public int defaultPort()
{
return 5001;
}
@Override
public String name()
{
return null;
}
}, logging);
final NetworkSender sender = new NetworkSender(new NetworkSender.Configuration()
{
@Override
public int defaultPort()
{
return 5001;
}
@Override
public int port()
{
return config.get( ClusterSettings.cluster_server ).getPort();
}
}, receiver, logging);
ExecutorLifecycleAdapter stateMachineExecutor = new ExecutorLifecycleAdapter( new Factory<ExecutorService>()
{
@Override
public ExecutorService newInstance()
{
return Executors.newSingleThreadExecutor( new NamedThreadFactory( "State machine" ) );
}
} );
final ProtocolServer protocolServer = protocolServerFactory.newProtocolServer(
new InstanceId( config.get( ClusterSettings.server_id ) ), timeoutStrategy, receiver, sender,
acceptorInstanceStore, electionCredentialsProvider, stateMachineExecutor, objectInputStreamFactory,
objectOutputStreamFactory );
receiver.addNetworkChannelsListener( new NetworkReceiver.NetworkChannelsListener()
{
private StateTransitionLogger logger;
@Override
public void listeningAt( URI me )
{
protocolServer.listeningAt( me );
if (logger == null)
{
logger = new StateTransitionLogger( logging );
protocolServer.addStateTransitionListener( logger );
}
}
@Override
public void channelOpened( URI to )
{
}
@Override
public void channelClosed( URI to )
{
}
} );
life.add( stateMachineExecutor );
// Timeout timer - triggers every 10 ms
life.add( new Lifecycle()
{
private ScheduledExecutorService scheduler;
@Override
public void init()
throws Throwable
{
protocolServer.getTimeouts().tick( System.currentTimeMillis() );
}
@Override
public void start()
throws Throwable
{
scheduler = Executors.newSingleThreadScheduledExecutor( new DaemonThreadFactory( "timeout" ) );
scheduler.scheduleWithFixedDelay( new Runnable()
{
@Override
public void run()
{
long now = System.currentTimeMillis();
protocolServer.getTimeouts().tick( now );
}
}, 0, 10, TimeUnit.MILLISECONDS );
}
@Override
public void stop()
throws Throwable
{
scheduler.shutdownNow();
}
@Override
public void shutdown()
throws Throwable
{
}
} );
// Add this last to ensure that timeout service is setup first
life.add( sender );
life.add( receiver );
return protocolServer;
}
}
| 1no label
|
enterprise_cluster_src_main_java_org_neo4j_cluster_NetworkedServerFactory.java
|
3,710 |
public final class SingleExecutorThreadFactory extends AbstractExecutorThreadFactory {
private final String threadName;
public SingleExecutorThreadFactory(ThreadGroup threadGroup, ClassLoader classLoader, String threadName) {
super(threadGroup, classLoader);
this.threadName = threadName;
}
@Override
protected Thread createThread(Runnable r) {
return new ManagedThread(r);
}
private class ManagedThread extends Thread {
public ManagedThread(Runnable target) {
super(threadGroup, target, threadName);
}
@Override
public void run() {
try {
super.run();
} catch (OutOfMemoryError e) {
OutOfMemoryErrorDispatcher.onOutOfMemory(e);
}
}
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_util_executor_SingleExecutorThreadFactory.java
|
1,532 |
public class EdgesVerticesMap {
public static final String DIRECTION = Tokens.makeNamespace(EdgesVerticesMap.class) + ".direction";
public enum Counters {
IN_EDGES_PROCESSED,
OUT_EDGES_PROCESSED
}
public static Configuration createConfiguration(final Direction direction) {
final Configuration configuration = new EmptyConfiguration();
configuration.set(DIRECTION, direction.name());
return configuration;
}
public static class Map extends Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex> {
private Direction direction;
@Override
public void setup(final Mapper.Context context) throws IOException, InterruptedException {
this.direction = Direction.valueOf(context.getConfiguration().get(DIRECTION));
}
@Override
public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException {
if (this.direction.equals(IN) || this.direction.equals(BOTH)) {
long edgesProcessed = 0;
for (final Edge e : value.getEdges(IN)) {
final StandardFaunusEdge edge = (StandardFaunusEdge) e;
if (edge.hasPaths()) {
value.getPaths(edge, true);
edgesProcessed++;
edge.clearPaths();
}
}
DEFAULT_COMPAT.incrementContextCounter(context, Counters.IN_EDGES_PROCESSED, edgesProcessed);
} else {
for (final Edge e : value.getEdges(IN)) {
final StandardFaunusEdge edge = (StandardFaunusEdge) e;
if (edge.hasPaths()) {
edge.clearPaths();
}
}
}
if (this.direction.equals(OUT) || this.direction.equals(BOTH)) {
long edgesProcessed = 0;
for (final Edge e : value.getEdges(OUT)) {
final StandardFaunusEdge edge = (StandardFaunusEdge) e;
if (edge.hasPaths()) {
value.getPaths(edge, true);
edgesProcessed++;
edge.clearPaths();
}
}
DEFAULT_COMPAT.incrementContextCounter(context, Counters.OUT_EDGES_PROCESSED, edgesProcessed);
} else {
for (final Edge e : value.getEdges(OUT)) {
final StandardFaunusEdge edge = (StandardFaunusEdge) e;
if (edge.hasPaths()) {
edge.clearPaths();
}
}
}
context.write(NullWritable.get(), value);
}
}
}
| 1no label
|
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_transform_EdgesVerticesMap.java
|
1,415 |
public abstract class OChannel extends OListenerManger<OChannelListener> {
private static final OProfilerMBean PROFILER = Orient.instance().getProfiler();
public Socket socket;
public InputStream inStream;
public OutputStream outStream;
protected final OAdaptiveLock lockRead = new OAdaptiveLock();
protected final OAdaptiveLock lockWrite = new OAdaptiveLock();
protected long timeout;
public int socketBufferSize;
private long metricTransmittedBytes = 0;
private long metricReceivedBytes = 0;
private long metricFlushes = 0;
private static final AtomicLong metricGlobalTransmittedBytes = new AtomicLong();
private static final AtomicLong metricGlobalReceivedBytes = new AtomicLong();
private static final AtomicLong metricGlobalFlushes = new AtomicLong();
private String profilerMetric;
static {
final String profilerMetric = PROFILER.getProcessMetric("network.channel.binary");
PROFILER.registerHookValue(profilerMetric + ".transmittedBytes", "Bytes transmitted to all the network channels",
METRIC_TYPE.SIZE, new OProfilerHookValue() {
public Object getValue() {
return metricGlobalTransmittedBytes.get();
}
});
PROFILER.registerHookValue(profilerMetric + ".receivedBytes", "Bytes received from all the network channels", METRIC_TYPE.SIZE,
new OProfilerHookValue() {
public Object getValue() {
return metricGlobalReceivedBytes.get();
}
});
PROFILER.registerHookValue(profilerMetric + ".flushes", "Number of times the network channels have been flushed",
METRIC_TYPE.COUNTER, new OProfilerHookValue() {
public Object getValue() {
return metricGlobalFlushes.get();
}
});
}
public OChannel(final Socket iSocket, final OContextConfiguration iConfig) throws IOException {
socket = iSocket;
socketBufferSize = iConfig.getValueAsInteger(OGlobalConfiguration.NETWORK_SOCKET_BUFFER_SIZE);
socket.setTcpNoDelay(true);
}
public void acquireWriteLock() {
lockWrite.lock();
}
public void releaseWriteLock() {
lockWrite.unlock();
}
public void acquireReadLock() {
lockRead.lock();
}
public void releaseReadLock() {
lockRead.unlock();
}
public void flush() throws IOException {
outStream.flush();
}
public void close() {
PROFILER.unregisterHookValue(profilerMetric + ".transmittedBytes");
PROFILER.unregisterHookValue(profilerMetric + ".receivedBytes");
PROFILER.unregisterHookValue(profilerMetric + ".flushes");
try {
if (socket != null)
socket.close();
} catch (IOException e) {
}
try {
if (inStream != null)
inStream.close();
} catch (IOException e) {
}
try {
if (outStream != null)
outStream.close();
} catch (IOException e) {
}
for (OChannelListener l : browseListeners())
try {
l.onChannelClose(this);
} catch (Exception e) {
// IGNORE ANY EXCEPTION
}
}
public void connected() {
final String dictProfilerMetric = PROFILER.getProcessMetric("network.channel.binary.*");
profilerMetric = PROFILER.getProcessMetric("network.channel.binary." + socket.getRemoteSocketAddress().toString()
+ socket.getLocalPort() + "".replace('.', '_'));
PROFILER.registerHookValue(profilerMetric + ".transmittedBytes", "Bytes transmitted to a network channel", METRIC_TYPE.SIZE,
new OProfilerHookValue() {
public Object getValue() {
return metricTransmittedBytes;
}
}, dictProfilerMetric + ".transmittedBytes");
PROFILER.registerHookValue(profilerMetric + ".receivedBytes", "Bytes received from a network channel", METRIC_TYPE.SIZE,
new OProfilerHookValue() {
public Object getValue() {
return metricReceivedBytes;
}
}, dictProfilerMetric + ".receivedBytes");
PROFILER.registerHookValue(profilerMetric + ".flushes", "Number of times the network channel has been flushed",
METRIC_TYPE.COUNTER, new OProfilerHookValue() {
public Object getValue() {
return metricFlushes;
}
}, dictProfilerMetric + ".flushes");
}
@Override
public String toString() {
return socket != null ? socket.getRemoteSocketAddress().toString() : "Not connected";
}
protected void updateMetricTransmittedBytes(final int iDelta) {
metricGlobalTransmittedBytes.addAndGet(iDelta);
metricTransmittedBytes += iDelta;
}
protected void updateMetricReceivedBytes(final int iDelta) {
metricGlobalReceivedBytes.addAndGet(iDelta);
metricReceivedBytes += iDelta;
}
protected void updateMetricFlushes() {
metricGlobalFlushes.incrementAndGet();
metricFlushes++;
}
}
| 1no label
|
enterprise_src_main_java_com_orientechnologies_orient_enterprise_channel_OChannel.java
|
488 |
public class ClearIndicesCacheRequest extends BroadcastOperationRequest<ClearIndicesCacheRequest> {
private boolean filterCache = false;
private boolean fieldDataCache = false;
private boolean idCache = false;
private boolean recycler = false;
private String[] fields = null;
private String[] filterKeys = null;
ClearIndicesCacheRequest() {
}
public ClearIndicesCacheRequest(String... indices) {
super(indices);
}
public boolean filterCache() {
return filterCache;
}
public ClearIndicesCacheRequest filterCache(boolean filterCache) {
this.filterCache = filterCache;
return this;
}
public boolean fieldDataCache() {
return this.fieldDataCache;
}
public ClearIndicesCacheRequest fieldDataCache(boolean fieldDataCache) {
this.fieldDataCache = fieldDataCache;
return this;
}
public ClearIndicesCacheRequest fields(String... fields) {
this.fields = fields;
return this;
}
public String[] fields() {
return this.fields;
}
public ClearIndicesCacheRequest filterKeys(String... filterKeys) {
this.filterKeys = filterKeys;
return this;
}
public String[] filterKeys() {
return this.filterKeys;
}
public boolean idCache() {
return this.idCache;
}
public ClearIndicesCacheRequest recycler(boolean recycler) {
this.recycler = recycler;
return this;
}
public boolean recycler() {
return this.recycler;
}
public ClearIndicesCacheRequest idCache(boolean idCache) {
this.idCache = idCache;
return this;
}
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
filterCache = in.readBoolean();
fieldDataCache = in.readBoolean();
idCache = in.readBoolean();
recycler = in.readBoolean();
fields = in.readStringArray();
filterKeys = in.readStringArray();
}
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(filterCache);
out.writeBoolean(fieldDataCache);
out.writeBoolean(idCache);
out.writeBoolean(recycler);
out.writeStringArrayNullable(fields);
out.writeStringArrayNullable(filterKeys);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_cache_clear_ClearIndicesCacheRequest.java
|
1,064 |
public interface FulfillmentBand extends Serializable {
public Long getId();
public void setId(Long id);
/**
* Gets the amount that should be applied to the fulfillment
* cost for the {@link FulfillmentGroup}. This could be applied as
* a percentage or as a flat rate, depending on the result of calling
* {@link #getResultType()}. This is required and should never be null
*
* @return the amount to apply for this band
*/
public BigDecimal getResultAmount();
/**
* Sets the amount that should be applied to the fulfillment cost
* for this band. This can be either a flat rate or a percentage depending
* on {@link #getResultType()}.
*
* @param resultAmount - the percentage or flat rate that should be applied
* as a fulfillment cost for this band
*/
public void setResultAmount(BigDecimal resultAmount);
/**
* Gets how {@link #getResultAmount} should be applied to the fulfillment cost
*
* @return the type of {@link #getResultAmount()} which determines how that value
* should be calculated into the cost
*/
public FulfillmentBandResultAmountType getResultAmountType();
/**
* Sets how {@link #getResultAmount()} should be applied to the fulfillment cost
*
* @param resultAmountType - how the value from {@link #getResultAmount()} should be
* applied to the cost of the {@link FulfillmentGroup}
*/
public void setResultAmountType(FulfillmentBandResultAmountType resultAmountType);
}
| 0true
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_fulfillment_domain_FulfillmentBand.java
|
1,896 |
nodeEngine.getExecutionService().execute("hz:near-cache", new Runnable() {
public void run() {
try {
TreeSet<CacheRecord> records = new TreeSet<CacheRecord>(cache.values());
int evictSize = cache.size() * EVICTION_PERCENTAGE / HUNDRED_PERCENT;
int i = 0;
for (CacheRecord record : records) {
cache.remove(record.key);
updateSizeEstimator(-calculateCost(record));
if (++i > evictSize) {
break;
}
}
} finally {
canEvict.set(true);
}
}
});
| 0true
|
hazelcast_src_main_java_com_hazelcast_map_NearCache.java
|
936 |
public static interface RIDMapper {
ORID map(ORID rid);
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_record_impl_ODocumentHelper.java
|
811 |
public static class Request extends SingleShardOperationRequest {
private int shardId;
private String preference;
private List<Item> items;
public Request() {
}
public Request(String concreteIndex, int shardId, String preference) {
this.index = concreteIndex;
this.shardId = shardId;
this.preference = preference;
this.items = new ArrayList<Item>();
}
public int shardId() {
return shardId;
}
public void add(Item item) {
items.add(item);
}
public List<Item> items() {
return items;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
shardId = in.readVInt();
preference = in.readOptionalString();
int size = in.readVInt();
items = new ArrayList<Item>(size);
for (int i = 0; i < size; i++) {
int slot = in.readVInt();
PercolateShardRequest shardRequest = new PercolateShardRequest(index(), shardId);
shardRequest.documentType(in.readString());
shardRequest.source(in.readBytesReference());
shardRequest.docSource(in.readBytesReference());
shardRequest.onlyCount(in.readBoolean());
Item item = new Item(slot, shardRequest);
items.add(item);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(shardId);
out.writeOptionalString(preference);
out.writeVInt(items.size());
for (Item item : items) {
out.writeVInt(item.slot);
out.writeString(item.request.documentType());
out.writeBytesReference(item.request.source());
out.writeBytesReference(item.request.docSource());
out.writeBoolean(item.request.onlyCount());
}
}
public static class Item {
private final int slot;
private final PercolateShardRequest request;
public Item(int slot, PercolateShardRequest request) {
this.slot = slot;
this.request = request;
}
public int slot() {
return slot;
}
public PercolateShardRequest request() {
return request;
}
}
}
| 0true
|
src_main_java_org_elasticsearch_action_percolate_TransportShardMultiPercolateAction.java
|
850 |
FLOAT("Float", 4, new Class<?>[] { Float.class, Float.TYPE }, new Class<?>[] { Float.class, Number.class }) {
},
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_metadata_schema_OType.java
|
454 |
public static class AdminPresentationDataDrivenEnumeration {
public static final String OPTIONLISTENTITY = "optionListEntity";
public static final String OPTIONVALUEFIELDNAME = "optionValueFieldName";
public static final String OPTIONDISPLAYFIELDNAME = "optionDisplayFieldName";
public static final String OPTIONCANEDITVALUES = "optionCanEditValues";
public static final String OPTIONFILTERPARAMS = "optionFilterParams";
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_presentation_override_PropertyType.java
|
1,922 |
EntryListener<Object, Object> listener = new EntryListener<Object, Object>() {
private void handleEvent(EntryEvent<Object, Object> event) {
if (endpoint.live()) {
Data key = clientEngine.toData(event.getKey());
Data value = clientEngine.toData(event.getValue());
Data oldValue = clientEngine.toData(event.getOldValue());
PortableEntryEvent portableEntryEvent = new PortableEntryEvent(key, value, oldValue, event.getEventType(), event.getMember().getUuid());
endpoint.sendEvent(portableEntryEvent, getCallId());
}
}
public void entryAdded(EntryEvent<Object, Object> event) {
handleEvent(event);
}
public void entryRemoved(EntryEvent<Object, Object> event) {
handleEvent(event);
}
public void entryUpdated(EntryEvent<Object, Object> event) {
handleEvent(event);
}
public void entryEvicted(EntryEvent<Object, Object> event) {
handleEvent(event);
}
};
| 1no label
|
hazelcast_src_main_java_com_hazelcast_map_client_AbstractMapAddEntryListenerRequest.java
|
1,479 |
public final static class UnassignedShards implements Iterable<MutableShardRouting> {
private final List<MutableShardRouting> unassigned;
private int primaries = 0;
private long transactionId = 0;
private final UnassignedShards source;
private final long sourceTransactionId;
public UnassignedShards(UnassignedShards other) {
source = other;
sourceTransactionId = other.transactionId;
unassigned = new ArrayList<MutableShardRouting>(other.unassigned);
primaries = other.primaries;
}
public UnassignedShards() {
unassigned = new ArrayList<MutableShardRouting>();
source = null;
sourceTransactionId = -1;
}
public void add(MutableShardRouting mutableShardRouting) {
if(mutableShardRouting.primary()) {
primaries++;
}
unassigned.add(mutableShardRouting);
transactionId++;
}
public void addAll(Collection<MutableShardRouting> mutableShardRoutings) {
for (MutableShardRouting r : mutableShardRoutings) {
add(r);
}
}
public int size() {
return unassigned.size();
}
public int numPrimaries() {
return primaries;
}
@Override
public Iterator<MutableShardRouting> iterator() {
final Iterator<MutableShardRouting> iterator = unassigned.iterator();
return new Iterator<MutableShardRouting>() {
private MutableShardRouting current;
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public MutableShardRouting next() {
return current = iterator.next();
}
@Override
public void remove() {
iterator.remove();
if (current.primary()) {
primaries--;
}
transactionId++;
}
};
}
public boolean isEmpty() {
return unassigned.isEmpty();
}
public void shuffle() {
Collections.shuffle(unassigned);
}
public void clear() {
transactionId++;
unassigned.clear();
primaries = 0;
}
public void transactionEnd(UnassignedShards shards) {
assert shards.source == this && shards.sourceTransactionId == transactionId :
"Expected ID: " + shards.sourceTransactionId + " actual: " + transactionId + " Expected Source: " + shards.source + " actual: " + this;
transactionId++;
this.unassigned.clear();
this.unassigned.addAll(shards.unassigned);
this.primaries = shards.primaries;
}
public UnassignedShards transactionBegin() {
return new UnassignedShards(this);
}
public MutableShardRouting[] drain() {
MutableShardRouting[] mutableShardRoutings = unassigned.toArray(new MutableShardRouting[unassigned.size()]);
unassigned.clear();
primaries = 0;
transactionId++;
return mutableShardRoutings;
}
}
| 0true
|
src_main_java_org_elasticsearch_cluster_routing_RoutingNodes.java
|
599 |
ex.execute(new Runnable() {
public void run() {
try {
Thread.sleep(random.nextInt(10) * 1000);
final Config config = new Config();
config.setProperty("hazelcast.wait.seconds.before.join", "5");
String name = "group" + random.nextInt(groupCount);
groups.get(name).incrementAndGet();
config.getGroupConfig().setName(name);
final NetworkConfig networkConfig = config.getNetworkConfig();
networkConfig.getJoin().getMulticastConfig().setEnabled(false);
TcpIpConfig tcpIpConfig = networkConfig.getJoin().getTcpIpConfig();
tcpIpConfig.setEnabled(true);
int port = 12301;
networkConfig.setPortAutoIncrement(false);
networkConfig.setPort(port + seed);
for (int i = 0; i < count; i++) {
tcpIpConfig.addMember("127.0.0.1:" + (port + i));
}
HazelcastInstance h = Hazelcast.newHazelcastInstance(config);
mapOfInstances.put(seed, h);
latch.countDown();
} catch (Exception e) {
e.printStackTrace();
}
}
});
| 0true
|
hazelcast_src_test_java_com_hazelcast_cluster_JoinStressTest.java
|
452 |
executor.execute(new Runnable() {
@Override
public void run() {
int half = testValues.length / 2;
for (int i = 0; i < testValues.length; i++) {
final ReplicatedMap map = i < half ? map1 : map2;
final AbstractMap.SimpleEntry<Integer, Integer> entry = testValues[i];
map.put(entry.getKey(), entry.getValue());
keySetTestValues.add(entry.getKey());
}
}
}, 2, EntryEventType.ADDED, 100, 0.75, map1, map2);
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_replicatedmap_ClientReplicatedMapTest.java
|
969 |
threadPool.executor(executor()).execute(new Runnable() {
@Override
public void run() {
try {
onOperation(idx, nodeOperation(newNodeRequest(clusterState.nodes().masterNodeId(), request)));
} catch (Throwable e) {
onFailure(idx, clusterState.nodes().masterNodeId(), e);
}
}
});
| 0true
|
src_main_java_org_elasticsearch_action_support_nodes_TransportNodesOperationAction.java
|
5 |
public class NetworkReceiver
implements MessageSource, Lifecycle
{
public interface Configuration
{
HostnamePort clusterServer();
int defaultPort();
String name(); // Name of this cluster instance. Null in most cases, but tools may use e.g. "Backup"
}
public interface NetworkChannelsListener
{
void listeningAt( URI me );
void channelOpened( URI to );
void channelClosed( URI to );
}
public static final String CLUSTER_SCHEME = "cluster";
public static final String INADDR_ANY = "0.0.0.0";
private ChannelGroup channels;
// Receiving
private NioServerSocketChannelFactory nioChannelFactory;
private ServerBootstrap serverBootstrap;
private Iterable<MessageProcessor> processors = Listeners.newListeners();
private Configuration config;
private StringLogger msgLog;
private Map<URI, Channel> connections = new ConcurrentHashMap<URI, Channel>();
private Iterable<NetworkChannelsListener> listeners = Listeners.newListeners();
volatile boolean bindingDetected = false;
public NetworkReceiver( Configuration config, Logging logging )
{
this.config = config;
this.msgLog = logging.getMessagesLog( getClass() );
}
@Override
public void init()
throws Throwable
{
ThreadRenamingRunnable.setThreadNameDeterminer( ThreadNameDeterminer.CURRENT );
}
@Override
public void start()
throws Throwable
{
channels = new DefaultChannelGroup();
// Listen for incoming connections
nioChannelFactory = new NioServerSocketChannelFactory(
Executors.newCachedThreadPool( new NamedThreadFactory( "Cluster boss" ) ),
Executors.newFixedThreadPool( 2, new NamedThreadFactory( "Cluster worker" ) ), 2 );
serverBootstrap = new ServerBootstrap( nioChannelFactory );
serverBootstrap.setOption("child.tcpNoDelay", true);
serverBootstrap.setPipelineFactory( new NetworkNodePipelineFactory() );
int[] ports = config.clusterServer().getPorts();
int minPort = ports[0];
int maxPort = ports.length == 2 ? ports[1] : minPort;
// Try all ports in the given range
listen( minPort, maxPort );
}
@Override
public void stop()
throws Throwable
{
msgLog.debug( "Shutting down NetworkReceiver" );
channels.close().awaitUninterruptibly();
serverBootstrap.releaseExternalResources();
msgLog.debug( "Shutting down NetworkReceiver complete" );
}
@Override
public void shutdown()
throws Throwable
{
}
private void listen( int minPort, int maxPort )
throws URISyntaxException, ChannelException, UnknownHostException
{
ChannelException ex = null;
for ( int checkPort = minPort; checkPort <= maxPort; checkPort++ )
{
try
{
InetAddress host;
String address = config.clusterServer().getHost();
InetSocketAddress localAddress;
if ( address == null || address.equals( INADDR_ANY ))
{
localAddress = new InetSocketAddress( checkPort );
}
else
{
host = InetAddress.getByName( address );
localAddress = new InetSocketAddress( host, checkPort );
}
Channel listenChannel = serverBootstrap.bind( localAddress );
listeningAt( getURI( localAddress ) );
channels.add( listenChannel );
return;
}
catch ( ChannelException e )
{
ex = e;
}
}
nioChannelFactory.releaseExternalResources();
throw ex;
}
// MessageSource implementation
public void addMessageProcessor( MessageProcessor processor )
{
processors = Listeners.addListener( processor, processors );
}
public void receive( Message message )
{
for ( MessageProcessor processor : processors )
{
try
{
if ( !processor.process( message ) )
{
break;
}
}
catch ( Exception e )
{
// Ignore
}
}
}
private URI getURI( InetSocketAddress address ) throws URISyntaxException
{
String uri;
if (address.getAddress().getHostAddress().startsWith( "0" ))
uri = CLUSTER_SCHEME + "://0.0.0.0:"+address.getPort(); // Socket.toString() already prepends a /
else
uri = CLUSTER_SCHEME + "://" + address.getAddress().getHostAddress()+":"+address.getPort(); // Socket.toString() already prepends a /
// Add name if given
if (config.name() != null)
uri += "/?name="+config.name();
return URI.create( uri );
}
public void listeningAt( final URI me )
{
Listeners.notifyListeners( listeners, new Listeners.Notification<NetworkChannelsListener>()
{
@Override
public void notify( NetworkChannelsListener listener )
{
listener.listeningAt( me );
}
} );
}
protected void openedChannel( final URI uri, Channel ctxChannel )
{
connections.put( uri, ctxChannel );
Listeners.notifyListeners( listeners, new Listeners.Notification<NetworkChannelsListener>()
{
@Override
public void notify( NetworkChannelsListener listener )
{
listener.channelOpened( uri );
}
} );
}
protected void closedChannel( final URI uri )
{
Channel channel = connections.remove( uri );
if ( channel != null )
{
channel.close();
}
Listeners.notifyListeners( listeners, new Listeners.Notification<NetworkChannelsListener>()
{
@Override
public void notify( NetworkChannelsListener listener )
{
listener.channelClosed( uri );
}
} );
}
public void addNetworkChannelsListener( NetworkChannelsListener listener )
{
listeners = Listeners.addListener( listener, listeners );
}
private class NetworkNodePipelineFactory
implements ChannelPipelineFactory
{
@Override
public ChannelPipeline getPipeline() throws Exception
{
ChannelPipeline pipeline = Channels.pipeline();
pipeline.addLast( "frameDecoder",new ObjectDecoder( 1024 * 1000, NetworkNodePipelineFactory.this.getClass().getClassLoader() ) );
pipeline.addLast( "serverHandler", new MessageReceiver() );
return pipeline;
}
}
private class MessageReceiver
extends SimpleChannelHandler
{
@Override
public void channelOpen( ChannelHandlerContext ctx, ChannelStateEvent e ) throws Exception
{
Channel ctxChannel = ctx.getChannel();
openedChannel( getURI( (InetSocketAddress) ctxChannel.getRemoteAddress() ), ctxChannel );
channels.add( ctxChannel );
}
@Override
public void messageReceived( ChannelHandlerContext ctx, MessageEvent event ) throws Exception
{
if (!bindingDetected)
{
InetSocketAddress local = ((InetSocketAddress)event.getChannel().getLocalAddress());
bindingDetected = true;
listeningAt( getURI( local ) );
}
final Message message = (Message) event.getMessage();
// Fix FROM header since sender cannot know it's correct IP/hostname
InetSocketAddress remote = (InetSocketAddress) ctx.getChannel().getRemoteAddress();
String remoteAddress = remote.getAddress().getHostAddress();
URI fromHeader = URI.create( message.getHeader( Message.FROM ) );
fromHeader = URI.create(fromHeader.getScheme()+"://"+remoteAddress + ":" + fromHeader.getPort());
message.setHeader( Message.FROM, fromHeader.toASCIIString() );
msgLog.debug( "Received:" + message );
receive( message );
}
@Override
public void channelDisconnected( ChannelHandlerContext ctx, ChannelStateEvent e ) throws Exception
{
closedChannel( getURI( (InetSocketAddress) ctx.getChannel().getRemoteAddress() ) );
}
@Override
public void channelClosed( ChannelHandlerContext ctx, ChannelStateEvent e ) throws Exception
{
closedChannel( getURI( (InetSocketAddress) ctx.getChannel().getRemoteAddress() ) );
channels.remove( ctx.getChannel() );
}
@Override
public void exceptionCaught( ChannelHandlerContext ctx, ExceptionEvent e ) throws Exception
{
if ( !(e.getCause() instanceof ConnectException) )
{
msgLog.error( "Receive exception:", e.getCause() );
}
}
}
}
| 1no label
|
enterprise_cluster_src_main_java_org_neo4j_cluster_com_NetworkReceiver.java
|
161 |
public class TestTxTimestamps
{
private final EphemeralFileSystemAbstraction fileSystem = new EphemeralFileSystemAbstraction();
private GraphDatabaseAPI db;
@Before
public void doBefore() throws Exception
{
db = (GraphDatabaseAPI) new TestGraphDatabaseFactory().setFileSystem( fileSystem ).newImpermanentDatabaseBuilder().
setConfig( GraphDatabaseSettings.keep_logical_logs, Settings.TRUE ).newGraphDatabase();
}
@After
public void doAfter() throws Exception
{
db.shutdown();
}
@Test
public void doIt() throws Exception
{
long[] expectedStartTimestamps = new long[10];
long[] expectedCommitTimestamps = new long[expectedStartTimestamps.length];
for ( int i = 0; i < expectedStartTimestamps.length; i++ )
{
Transaction tx = db.beginTx();
expectedStartTimestamps[i] = System.currentTimeMillis();
Node node = db.createNode();
node.setProperty( "name", "Mattias " + i );
tx.success();
tx.finish();
expectedCommitTimestamps[i] = System.currentTimeMillis();
}
db.getDependencyResolver().resolveDependency( XaDataSourceManager.class )
.getNeoStoreDataSource().rotateLogicalLog();
ByteBuffer buffer = ByteBuffer.allocate( 1024*500 );
StoreChannel channel = fileSystem.open( new File( db.getStoreDir(),
NeoStoreXaDataSource.LOGICAL_LOG_DEFAULT_NAME + ".v0" ), "r" );
try
{
XaCommandFactory commandFactory = new CommandFactory();
LogIoUtils.readLogHeader( buffer, channel, true );
int foundTxCount = 0;
skipFirstTransaction( buffer, channel, commandFactory ); // Since it's the property index transaction
for (LogEntry entry; (entry = LogIoUtils.readEntry( buffer, channel, commandFactory )) != null; )
{
if ( entry instanceof LogEntry.Start )
{
long diff = ((LogEntry.Start) entry).getTimeWritten()-expectedStartTimestamps[foundTxCount];
long exp = expectedCommitTimestamps[foundTxCount] - expectedStartTimestamps[foundTxCount];
assertTrue( diff + " <= " + exp, diff <= exp );
}
else if ( entry instanceof LogEntry.Commit )
{
long diff = ((LogEntry.Commit) entry).getTimeWritten()-expectedCommitTimestamps[foundTxCount];
long exp = expectedCommitTimestamps[foundTxCount] - expectedStartTimestamps[foundTxCount];
assertTrue( diff + " <= " + exp, diff <= exp );
foundTxCount++;
}
}
assertEquals( expectedCommitTimestamps.length, foundTxCount );
}
finally
{
channel.close();
}
}
private void skipFirstTransaction( ByteBuffer buffer, StoreChannel channel, XaCommandFactory commandFactory ) throws IOException
{
for (LogEntry entry; (entry = LogIoUtils.readEntry( buffer, channel, commandFactory )) != null; )
if ( entry instanceof Commit )
break;
}
private static class CommandFactory extends XaCommandFactory
{
@Override
public XaCommand readCommand( ReadableByteChannel byteChannel,
ByteBuffer buffer ) throws IOException
{
return Command.readCommand( null, null, byteChannel, buffer );
}
}
}
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_xaframework_TestTxTimestamps.java
|
1,543 |
@ManagedDescription("IAtomicReference")
public class AtomicReferenceMBean extends HazelcastMBean<IAtomicReference> {
public AtomicReferenceMBean(IAtomicReference managedObject, ManagementService service) {
super(managedObject, service);
objectName = service.createObjectName("IAtomicReference",managedObject.getName());
}
@ManagedAnnotation("name")
@ManagedDescription("Name of the DistributedObject")
public String getName() {
return managedObject.getName();
}
@ManagedAnnotation("partitionKey")
@ManagedDescription("the partitionKey")
public String getPartitionKey() {
return managedObject.getPartitionKey();
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_jmx_AtomicReferenceMBean.java
|
1,601 |
public class SystemLogRecord implements Comparable<SystemLogRecord>, DataSerializable {
private long date;
private String node;
private String message;
private String type;
public SystemLogRecord() {
}
public SystemLogRecord(long date, String message, String type) {
this.date = date;
this.message = message;
this.type = type;
}
public String getNode() {
return node;
}
public void setNode(String node) {
this.node = node;
}
public long getDate() {
return date;
}
public void setDate(long date) {
this.date = date;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
@Override
public int compareTo(SystemLogRecord o) {
long thisVal = this.date;
long anotherVal = o.getDate();
if (thisVal < anotherVal) {
return -1;
} else {
return thisVal == anotherVal ? 0 : 1;
}
}
@Override
public int hashCode() {
return (int) (date ^ (date >>> 32));
}
@Override
public boolean equals(Object o) {
if (o != null && o instanceof SystemLogRecord) {
return this.compareTo((SystemLogRecord) o) == 0;
}
return false;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeLong(date);
out.writeUTF(message);
out.writeUTF(type);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
date = in.readLong();
message = in.readUTF();
type = in.readUTF();
}
@Override
public String toString() {
return "SystemLogRecord{"
+ "date=" + date
+ ", node='" + node + '\''
+ ", message='" + message + '\''
+ ", type='" + type + '\''
+ '}';
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_logging_SystemLogRecord.java
|
84 |
public class SpringTransactionManagerTest
{
@Test(timeout = 5000)
public void testDoubleUpdateWithJavaTM() throws Exception
{
GraphDatabaseAPI db = (GraphDatabaseAPI) new TestGraphDatabaseFactory()
.newImpermanentDatabase();
TransactionManager tm = new SpringTransactionManager( db );
tm.begin();
Node node;
node = db.createNode();
node.setProperty( "name", "Foo" );
tm.commit();
Transaction transaction = db.beginTx();
assertEquals( "Foo", db.getNodeById( node.getId() ).getProperty( "name" ) );
node.setProperty( "name", "Bar" );
transaction.success();
transaction.finish();
tm.begin();
node.setProperty( "name", "FooBar" );
assertEquals( "FooBar", db.getNodeById( node.getId() ).getProperty(
"name" ) );
tm.commit();
}
}
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_SpringTransactionManagerTest.java
|
3,653 |
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = AllFieldMapper.NAME;
public static final String INDEX_NAME = AllFieldMapper.NAME;
public static final boolean ENABLED = true;
public static final FieldType FIELD_TYPE = new FieldType();
static {
FIELD_TYPE.setIndexed(true);
FIELD_TYPE.setTokenized(true);
FIELD_TYPE.freeze();
}
}
| 0true
|
src_main_java_org_elasticsearch_index_mapper_internal_AllFieldMapper.java
|
1,398 |
@XmlRootElement(name = "media")
@XmlAccessorType(value = XmlAccessType.FIELD)
public class MediaWrapper extends BaseWrapper implements APIWrapper<Media> {
/**
* This allows us to control whether the URL should / can be overwritten, for example by the static asset service.
*/
@XmlTransient
protected boolean allowOverrideUrl = true;
@XmlElement
protected Long id;
@XmlElement
protected String title;
@XmlElement
protected String url;
@XmlElement
protected String altText;
@XmlElement
protected String tags;
@Override
public void wrapDetails(Media media, HttpServletRequest request) {
this.id = media.getId();
this.title = media.getTitle();
this.altText = media.getAltText();
this.tags = media.getTags();
this.url = media.getUrl();
}
@Override
public void wrapSummary(Media media, HttpServletRequest request) {
wrapDetails(media, request);
}
public boolean isAllowOverrideUrl() {
return allowOverrideUrl;
}
public void setAllowOverrideUrl(boolean allow) {
this.allowOverrideUrl = allow;
}
/**
* Call this only if allowOverrideUrl is true, and only AFTER you call wrap.
* @param url
*/
public void setUrl(String url) {
if (allowOverrideUrl) {
this.url = url;
}
}
}
| 0true
|
core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_api_wrapper_MediaWrapper.java
|
115 |
@Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_PAGE_TMPLT")
@Cache(usage= CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blCMSElements")
@AdminPresentationClass(populateToOneFields = PopulateToOneFieldsEnum.TRUE, friendlyName = "PageTemplateImpl_basePageTemplate")
public class PageTemplateImpl implements PageTemplate, AdminMainEntity {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "PageTemplateId")
@GenericGenerator(
name="PageTemplateId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="PageTemplateImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.cms.page.domain.PageTemplateImpl")
}
)
@Column(name = "PAGE_TMPLT_ID")
@AdminPresentation(friendlyName = "PageTemplateImpl_Template_Id",
visibility = VisibilityEnum.HIDDEN_ALL,
readOnly = true)
protected Long id;
@Column (name = "TMPLT_NAME")
@AdminPresentation(friendlyName = "PageTemplateImpl_Template_Name",
prominent = true, gridOrder = 1)
protected String templateName;
@Column (name = "TMPLT_DESCR")
protected String templateDescription;
@Column (name = "TMPLT_PATH")
@AdminPresentation(friendlyName = "PageTemplateImpl_Template_Path",
visibility = VisibilityEnum.HIDDEN_ALL,
readOnly = true)
protected String templatePath;
@ManyToOne(targetEntity = LocaleImpl.class)
@JoinColumn(name = "LOCALE_CODE")
@AdminPresentation(excluded = true)
protected Locale locale;
@ManyToMany(targetEntity = FieldGroupImpl.class, cascade = {CascadeType.ALL})
@JoinTable(name = "BLC_PGTMPLT_FLDGRP_XREF", joinColumns = @JoinColumn(name = "PAGE_TMPLT_ID", referencedColumnName = "PAGE_TMPLT_ID"), inverseJoinColumns = @JoinColumn(name = "FLD_GROUP_ID", referencedColumnName = "FLD_GROUP_ID"))
@Cascade(value={org.hibernate.annotations.CascadeType.ALL, org.hibernate.annotations.CascadeType.DELETE_ORPHAN})
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blCMSElements")
@OrderColumn(name = "GROUP_ORDER")
@BatchSize(size = 20)
protected List<FieldGroup> fieldGroups;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getTemplateName() {
return templateName;
}
@Override
public void setTemplateName(String templateName) {
this.templateName = templateName;
}
@Override
public String getTemplateDescription() {
return templateDescription;
}
@Override
public void setTemplateDescription(String templateDescription) {
this.templateDescription = templateDescription;
}
@Override
public String getTemplatePath() {
return templatePath;
}
@Override
public void setTemplatePath(String templatePath) {
this.templatePath = templatePath;
}
@Override
public Locale getLocale() {
return locale;
}
@Override
public void setLocale(Locale locale) {
this.locale = locale;
}
@Override
public List<FieldGroup> getFieldGroups() {
return fieldGroups;
}
@Override
public void setFieldGroups(List<FieldGroup> fieldGroups) {
this.fieldGroups = fieldGroups;
}
@Override
public String getMainEntityName() {
return getTemplateName();
}
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_page_domain_PageTemplateImpl.java
|
230 |
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface ODocumentInstance {
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_annotation_ODocumentInstance.java
|
1,610 |
public class FieldMetadataOverride {
//fields everyone depends on
private Boolean excluded;
private String friendlyName;
private String securityLevel;
private Integer order;
public Boolean getExcluded() {
return excluded;
}
public void setExcluded(Boolean excluded) {
this.excluded = excluded;
}
public String getFriendlyName() {
return friendlyName;
}
public void setFriendlyName(String friendlyName) {
this.friendlyName = friendlyName;
}
public String getSecurityLevel() {
return securityLevel;
}
public void setSecurityLevel(String securityLevel) {
this.securityLevel = securityLevel;
}
public Integer getOrder() {
return order;
}
public void setOrder(Integer order) {
this.order = order;
}
//basic fields
private SupportedFieldType fieldType;
private SupportedFieldType secondaryType = SupportedFieldType.INTEGER;
private Integer length;
private Boolean required;
private Boolean unique;
private Integer scale;
private Integer precision;
private String foreignKeyProperty;
private String foreignKeyClass;
private String foreignKeyDisplayValueProperty;
private Boolean foreignKeyCollection;
private MergedPropertyType mergedPropertyType;
private String[][] enumerationValues;
private String enumerationClass;
protected Boolean isDerived;
//@AdminPresentation derived fields
private String name;
private VisibilityEnum visibility;
private String group;
private Integer groupOrder;
protected Integer gridOrder;
private String tab;
private Integer tabOrder;
private Boolean groupCollapsed;
private SupportedFieldType explicitFieldType;
private Boolean largeEntry;
private Boolean prominent;
private String columnWidth;
private String broadleafEnumeration;
private Boolean readOnly;
private Map<String, Map<String, String>> validationConfigurations;
private Boolean requiredOverride;
private String tooltip;
private String helpText;
private String hint;
private String lookupDisplayProperty;
private Boolean forcePopulateChildProperties;
private String optionListEntity;
private String optionValueFieldName;
private String optionDisplayFieldName;
private Boolean optionCanEditValues;
private Serializable[][] optionFilterValues;
private String showIfProperty;
private String ruleIdentifier;
private Boolean translatable;
private LookupType lookupType;
//@AdminPresentationMapField derived fields
private Boolean searchable;
private String mapFieldValueClass;
//Not a user definable field
private Boolean toOneLookupCreatedViaAnnotation;
public Boolean getToOneLookupCreatedViaAnnotation() {
return toOneLookupCreatedViaAnnotation;
}
public void setToOneLookupCreatedViaAnnotation(Boolean toOneLookupCreatedViaAnnotation) {
this.toOneLookupCreatedViaAnnotation = toOneLookupCreatedViaAnnotation;
}
public SupportedFieldType getFieldType() {
return fieldType;
}
public void setFieldType(SupportedFieldType fieldType) {
this.fieldType = fieldType;
}
public SupportedFieldType getSecondaryType() {
return secondaryType;
}
public void setSecondaryType(SupportedFieldType secondaryType) {
this.secondaryType = secondaryType;
}
public Integer getLength() {
return length;
}
public void setLength(Integer length) {
this.length = length;
}
public Boolean getRequired() {
return required;
}
public void setRequired(Boolean required) {
this.required = required;
}
public Integer getScale() {
return scale;
}
public void setScale(Integer scale) {
this.scale = scale;
}
public Integer getPrecision() {
return precision;
}
public void setPrecision(Integer precision) {
this.precision = precision;
}
public Boolean getUnique() {
return unique;
}
public void setUnique(Boolean unique) {
this.unique = unique;
}
public String getForeignKeyProperty() {
return foreignKeyProperty;
}
public void setForeignKeyProperty(String foreignKeyProperty) {
this.foreignKeyProperty = foreignKeyProperty;
}
public String getForeignKeyClass() {
return foreignKeyClass;
}
public void setForeignKeyClass(String foreignKeyClass) {
this.foreignKeyClass = foreignKeyClass;
}
public Boolean getForeignKeyCollection() {
return foreignKeyCollection;
}
public void setForeignKeyCollection(Boolean foreignKeyCollection) {
this.foreignKeyCollection = foreignKeyCollection;
}
public MergedPropertyType getMergedPropertyType() {
return mergedPropertyType;
}
public void setMergedPropertyType(MergedPropertyType mergedPropertyType) {
this.mergedPropertyType = mergedPropertyType;
}
public String[][] getEnumerationValues() {
return enumerationValues;
}
public void setEnumerationValues(String[][] enumerationValues) {
this.enumerationValues = enumerationValues;
}
public String getForeignKeyDisplayValueProperty() {
return foreignKeyDisplayValueProperty;
}
public void setForeignKeyDisplayValueProperty(String foreignKeyDisplayValueProperty) {
this.foreignKeyDisplayValueProperty = foreignKeyDisplayValueProperty;
}
public String getEnumerationClass() {
return enumerationClass;
}
public void setEnumerationClass(String enumerationClass) {
this.enumerationClass = enumerationClass;
}
public Boolean getIsDerived() {
return isDerived;
}
public void setDerived(Boolean isDerived) {
this.isDerived = isDerived;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public SupportedFieldType getExplicitFieldType() {
return explicitFieldType;
}
public void setExplicitFieldType(SupportedFieldType fieldType) {
this.explicitFieldType = fieldType;
}
public String getGroup() {
return group;
}
public void setGroup(String group) {
this.group = group;
}
public Boolean isLargeEntry() {
return largeEntry;
}
public void setLargeEntry(Boolean largeEntry) {
this.largeEntry = largeEntry;
}
public Boolean isProminent() {
return prominent;
}
public void setProminent(Boolean prominent) {
this.prominent = prominent;
}
public String getColumnWidth() {
return columnWidth;
}
public void setColumnWidth(String columnWidth) {
this.columnWidth = columnWidth;
}
public String getBroadleafEnumeration() {
return broadleafEnumeration;
}
public void setBroadleafEnumeration(String broadleafEnumeration) {
this.broadleafEnumeration = broadleafEnumeration;
}
public Boolean getReadOnly() {
return readOnly;
}
public Boolean getTranslatable() {
return translatable;
}
public void setTranslatable(Boolean translatable) {
this.translatable = translatable;
}
public String getTab() {
return tab;
}
public void setTab(String tab) {
this.tab = tab;
}
public Integer getTabOrder() {
return tabOrder;
}
public void setTabOrder(Integer tabOrder) {
this.tabOrder = tabOrder;
}
public void setReadOnly(Boolean readOnly) {
this.readOnly = readOnly;
}
public Integer getGroupOrder() {
return groupOrder;
}
public void setGroupOrder(Integer groupOrder) {
this.groupOrder = groupOrder;
}
public Integer getGridOrder() {
return gridOrder;
}
public void setGridOrder(Integer gridOrder) {
this.gridOrder = gridOrder;
}
public Map<String, Map<String, String>> getValidationConfigurations() {
return validationConfigurations;
}
public void setValidationConfigurations(Map<String, Map<String, String>> validationConfigurations) {
this.validationConfigurations = validationConfigurations;
}
public Boolean getRequiredOverride() {
return requiredOverride;
}
public void setRequiredOverride(Boolean requiredOverride) {
this.requiredOverride = requiredOverride;
}
public Boolean getGroupCollapsed() {
return groupCollapsed;
}
public void setGroupCollapsed(Boolean groupCollapsed) {
this.groupCollapsed = groupCollapsed;
}
public String getTooltip() {
return tooltip;
}
public void setTooltip(String tooltip) {
this.tooltip = tooltip;
}
public String getHelpText() {
return helpText;
}
public void setHelpText(String helpText) {
this.helpText = helpText;
}
public String getHint() {
return hint;
}
public void setHint(String hint) {
this.hint = hint;
}
public VisibilityEnum getVisibility() {
return visibility;
}
public void setVisibility(VisibilityEnum visibility) {
this.visibility = visibility;
}
public String getLookupDisplayProperty() {
return lookupDisplayProperty;
}
public void setLookupDisplayProperty(String lookupDisplayProperty) {
this.lookupDisplayProperty = lookupDisplayProperty;
}
public Boolean getForcePopulateChildProperties() {
return forcePopulateChildProperties;
}
public void setForcePopulateChildProperties(Boolean forcePopulateChildProperties) {
this.forcePopulateChildProperties = forcePopulateChildProperties;
}
public Boolean getOptionCanEditValues() {
return optionCanEditValues;
}
public void setOptionCanEditValues(Boolean optionCanEditValues) {
this.optionCanEditValues = optionCanEditValues;
}
public String getOptionDisplayFieldName() {
return optionDisplayFieldName;
}
public void setOptionDisplayFieldName(String optionDisplayFieldName) {
this.optionDisplayFieldName = optionDisplayFieldName;
}
public String getOptionListEntity() {
return optionListEntity;
}
public void setOptionListEntity(String optionListEntity) {
this.optionListEntity = optionListEntity;
}
public String getOptionValueFieldName() {
return optionValueFieldName;
}
public void setOptionValueFieldName(String optionValueFieldName) {
this.optionValueFieldName = optionValueFieldName;
}
public Serializable[][] getOptionFilterValues() {
return optionFilterValues;
}
public void setOptionFilterValues(Serializable[][] optionFilterValues) {
this.optionFilterValues = optionFilterValues;
}
public String getRuleIdentifier() {
return ruleIdentifier;
}
public void setRuleIdentifier(String ruleIdentifier) {
this.ruleIdentifier = ruleIdentifier;
}
public Boolean getSearchable() {
return searchable;
}
public void setSearchable(Boolean searchable) {
this.searchable = searchable;
}
public String getMapFieldValueClass() {
return mapFieldValueClass;
}
public void setMapFieldValueClass(String mapFieldValueClass) {
this.mapFieldValueClass = mapFieldValueClass;
}
//collection fields
private String[] customCriteria;
private OperationType addType;
private OperationType removeType;
private OperationType updateType;
private OperationType fetchType;
private OperationType inspectType;
private Boolean useServerSideInspectionCache;
public String[] getCustomCriteria() {
return customCriteria;
}
public void setCustomCriteria(String[] customCriteria) {
this.customCriteria = customCriteria;
}
public Boolean getUseServerSideInspectionCache() {
return useServerSideInspectionCache;
}
public void setUseServerSideInspectionCache(Boolean useServerSideInspectionCache) {
this.useServerSideInspectionCache = useServerSideInspectionCache;
}
public OperationType getAddType() {
return addType;
}
public void setAddType(OperationType addType) {
this.addType = addType;
}
public OperationType getFetchType() {
return fetchType;
}
public void setFetchType(OperationType fetchType) {
this.fetchType = fetchType;
}
public OperationType getInspectType() {
return inspectType;
}
public void setInspectType(OperationType inspectType) {
this.inspectType = inspectType;
}
public OperationType getRemoveType() {
return removeType;
}
public void setRemoveType(OperationType removeType) {
this.removeType = removeType;
}
public OperationType getUpdateType() {
return updateType;
}
public void setUpdateType(OperationType updateType) {
this.updateType = updateType;
}
//basic collection fields
private AddMethodType addMethodType;
private String manyToField;
public AddMethodType getAddMethodType() {
return addMethodType;
}
public void setAddMethodType(AddMethodType addMethodType) {
this.addMethodType = addMethodType;
}
public String getManyToField() {
return manyToField;
}
public void setManyToField(String manyToField) {
this.manyToField = manyToField;
}
//Adorned target fields
private String parentObjectProperty;
private String parentObjectIdProperty;
private String targetObjectProperty;
private String[] maintainedAdornedTargetFields;
private String[] gridVisibleFields;
private String targetObjectIdProperty;
private String joinEntityClass;
private String sortProperty;
private Boolean sortAscending;
private Boolean ignoreAdornedProperties;
public String[] getGridVisibleFields() {
return gridVisibleFields;
}
public void setGridVisibleFields(String[] gridVisibleFields) {
this.gridVisibleFields = gridVisibleFields;
}
public Boolean isIgnoreAdornedProperties() {
return ignoreAdornedProperties;
}
public void setIgnoreAdornedProperties(Boolean ignoreAdornedProperties) {
this.ignoreAdornedProperties = ignoreAdornedProperties;
}
public String[] getMaintainedAdornedTargetFields() {
return maintainedAdornedTargetFields;
}
public void setMaintainedAdornedTargetFields(String[] maintainedAdornedTargetFields) {
this.maintainedAdornedTargetFields = maintainedAdornedTargetFields;
}
public String getParentObjectIdProperty() {
return parentObjectIdProperty;
}
public void setParentObjectIdProperty(String parentObjectIdProperty) {
this.parentObjectIdProperty = parentObjectIdProperty;
}
public String getParentObjectProperty() {
return parentObjectProperty;
}
public void setParentObjectProperty(String parentObjectProperty) {
this.parentObjectProperty = parentObjectProperty;
}
public Boolean isSortAscending() {
return sortAscending;
}
public void setSortAscending(Boolean sortAscending) {
this.sortAscending = sortAscending;
}
public String getSortProperty() {
return sortProperty;
}
public void setSortProperty(String sortProperty) {
this.sortProperty = sortProperty;
}
public String getTargetObjectIdProperty() {
return targetObjectIdProperty;
}
public void setTargetObjectIdProperty(String targetObjectIdProperty) {
this.targetObjectIdProperty = targetObjectIdProperty;
}
public String getJoinEntityClass() {
return joinEntityClass;
}
public void setJoinEntityClass(String joinEntityClass) {
this.joinEntityClass = joinEntityClass;
}
public String getTargetObjectProperty() {
return targetObjectProperty;
}
public void setTargetObjectProperty(String targetObjectProperty) {
this.targetObjectProperty = targetObjectProperty;
}
//Map fields
private String keyClass;
private String keyPropertyFriendlyName;
private String valueClass;
private Boolean deleteEntityUponRemove;
private String valuePropertyFriendlyName;
private UnspecifiedBooleanType isSimpleValue;
private String mediaField;
private String[][] keys;
private String mapKeyValueProperty;
private String mapKeyOptionEntityClass;
private String mapKeyOptionEntityDisplayField;
private String mapKeyOptionEntityValueField;
private String currencyCodeField;
private Boolean forceFreeFormKeys;
public Boolean isDeleteEntityUponRemove() {
return deleteEntityUponRemove;
}
public void setDeleteEntityUponRemove(Boolean deleteEntityUponRemove) {
this.deleteEntityUponRemove = deleteEntityUponRemove;
}
public UnspecifiedBooleanType getSimpleValue() {
return isSimpleValue;
}
public void setSimpleValue(UnspecifiedBooleanType simpleValue) {
isSimpleValue = simpleValue;
}
public String getKeyClass() {
return keyClass;
}
public void setKeyClass(String keyClass) {
this.keyClass = keyClass;
}
public String getKeyPropertyFriendlyName() {
return keyPropertyFriendlyName;
}
public void setKeyPropertyFriendlyName(String keyPropertyFriendlyName) {
this.keyPropertyFriendlyName = keyPropertyFriendlyName;
}
public String[][] getKeys() {
return keys;
}
public void setKeys(String[][] keys) {
this.keys = keys;
}
public String getMapKeyOptionEntityClass() {
return mapKeyOptionEntityClass;
}
public void setMapKeyOptionEntityClass(String mapKeyOptionEntityClass) {
this.mapKeyOptionEntityClass = mapKeyOptionEntityClass;
}
public String getMapKeyOptionEntityDisplayField() {
return mapKeyOptionEntityDisplayField;
}
public void setMapKeyOptionEntityDisplayField(String mapKeyOptionEntityDisplayField) {
this.mapKeyOptionEntityDisplayField = mapKeyOptionEntityDisplayField;
}
public String getMapKeyOptionEntityValueField() {
return mapKeyOptionEntityValueField;
}
public void setMapKeyOptionEntityValueField(String mapKeyOptionEntityValueField) {
this.mapKeyOptionEntityValueField = mapKeyOptionEntityValueField;
}
public String getMediaField() {
return mediaField;
}
public void setMediaField(String mediaField) {
this.mediaField = mediaField;
}
public String getValueClass() {
return valueClass;
}
public void setValueClass(String valueClass) {
this.valueClass = valueClass;
}
public String getValuePropertyFriendlyName() {
return valuePropertyFriendlyName;
}
public void setValuePropertyFriendlyName(String valuePropertyFriendlyName) {
this.valuePropertyFriendlyName = valuePropertyFriendlyName;
}
public String getShowIfProperty() {
return showIfProperty;
}
public void setShowIfProperty(String showIfProperty) {
this.showIfProperty = showIfProperty;
}
public String getCurrencyCodeField() {
return currencyCodeField;
}
public void setCurrencyCodeField(String currencyCodeField) {
this.currencyCodeField = currencyCodeField;
}
public LookupType getLookupType() {
return lookupType;
}
public void setLookupType(LookupType lookupType) {
this.lookupType = lookupType;
}
public Boolean getForceFreeFormKeys() {
return forceFreeFormKeys;
}
public void setForceFreeFormKeys(Boolean forceFreeFormKeys) {
this.forceFreeFormKeys = forceFreeFormKeys;
}
public String getMapKeyValueProperty() {
return mapKeyValueProperty;
}
public void setMapKeyValueProperty(String mapKeyValueProperty) {
this.mapKeyValueProperty = mapKeyValueProperty;
}
}
| 0true
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_dto_override_FieldMetadataOverride.java
|
106 |
REGEX {
@Override
public boolean evaluate(Object value, Object condition) {
this.preevaluate(value,condition);
if (value == null) return false;
return evaluateRaw(value.toString(),(String)condition);
}
public boolean evaluateRaw(String value, String regex) {
return value.matches(regex);
}
@Override
public boolean isValidCondition(Object condition) {
return condition != null && condition instanceof String && StringUtils.isNotBlank(condition.toString());
}
};
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_attribute_Text.java
|
267 |
public class LoggingMailSender extends JavaMailSenderImpl {
private static final Log LOG = LogFactory.getLog(LoggingMailSender.class);
@Override
public void send(MimeMessagePreparator[] mimeMessagePreparators) throws MailException {
for (MimeMessagePreparator preparator : mimeMessagePreparators) {
try {
MimeMessage mimeMessage = createMimeMessage();
preparator.prepare(mimeMessage);
LOG.info("\"Sending\" email: ");
if (mimeMessage.getContent() instanceof MimeMultipart) {
MimeMultipart msg = (MimeMultipart) mimeMessage.getContent();
DataHandler dh = msg.getBodyPart(0).getDataHandler();
ByteArrayOutputStream baos = null;
try {
baos = new ByteArrayOutputStream();
dh.writeTo(baos);
} catch (Exception e) {
// Do nothing
} finally {
try {
baos.close();
} catch (Exception e) {
LOG.error("Couldn't close byte array output stream");
}
}
} else {
LOG.info(mimeMessage.getContent());
}
} catch (Exception e) {
LOG.error("Could not create message", e);
}
}
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_email_service_LoggingMailSender.java
|
2,806 |
private static class DefaultProcessor extends AnalysisBinderProcessor {
@Override
public void processCharFilters(CharFiltersBindings charFiltersBindings) {
charFiltersBindings.processCharFilter("html_strip", HtmlStripCharFilterFactory.class);
charFiltersBindings.processCharFilter("pattern_replace", PatternReplaceCharFilterFactory.class);
}
@Override
public void processTokenFilters(TokenFiltersBindings tokenFiltersBindings) {
tokenFiltersBindings.processTokenFilter("stop", StopTokenFilterFactory.class);
tokenFiltersBindings.processTokenFilter("reverse", ReverseTokenFilterFactory.class);
tokenFiltersBindings.processTokenFilter("asciifolding", ASCIIFoldingTokenFilterFactory.class);
tokenFiltersBindings.processTokenFilter("length", LengthTokenFilterFactory.class);
tokenFiltersBindings.processTokenFilter("lowercase", LowerCaseTokenFilterFactory.class);
tokenFiltersBindings.processTokenFilter("porter_stem", PorterStemTokenFilterFactory.class);
tokenFiltersBindings.processTokenFilter("kstem", KStemTokenFilterFactory.class);
tokenFiltersBindings.processTokenFilter("standard", StandardTokenFilterFactory.class);
tokenFiltersBindings.processTokenFilter("nGram", NGramTokenFilterFactory.class);
tokenFiltersBindings.processTokenFilter("ngram", NGramTokenFilterFactory.class);
tokenFiltersBindings.processTokenFilter("edgeNGram", EdgeNGramTokenFilterFactory.class);
tokenFiltersBindings.processTokenFilter("edge_ngram", EdgeNGramTokenFilterFactory.class);
tokenFiltersBindings.processTokenFilter("shingle", ShingleTokenFilterFactory.class);
tokenFiltersBindings.processTokenFilter("unique", UniqueTokenFilterFactory.class);
tokenFiltersBindings.processTokenFilter("truncate", TruncateTokenFilterFactory.class);
tokenFiltersBindings.processTokenFilter("trim", TrimTokenFilterFactory.class);
tokenFiltersBindings.processTokenFilter("limit", LimitTokenCountFilterFactory.class);
tokenFiltersBindings.processTokenFilter("common_grams", CommonGramsTokenFilterFactory.class);
}
@Override
public void processTokenizers(TokenizersBindings tokenizersBindings) {
tokenizersBindings.processTokenizer("standard", StandardTokenizerFactory.class);
tokenizersBindings.processTokenizer("uax_url_email", UAX29URLEmailTokenizerFactory.class);
tokenizersBindings.processTokenizer("path_hierarchy", PathHierarchyTokenizerFactory.class);
tokenizersBindings.processTokenizer("keyword", KeywordTokenizerFactory.class);
tokenizersBindings.processTokenizer("letter", LetterTokenizerFactory.class);
tokenizersBindings.processTokenizer("lowercase", LowerCaseTokenizerFactory.class);
tokenizersBindings.processTokenizer("whitespace", WhitespaceTokenizerFactory.class);
tokenizersBindings.processTokenizer("nGram", NGramTokenizerFactory.class);
tokenizersBindings.processTokenizer("ngram", NGramTokenizerFactory.class);
tokenizersBindings.processTokenizer("edgeNGram", EdgeNGramTokenizerFactory.class);
tokenizersBindings.processTokenizer("edge_ngram", EdgeNGramTokenizerFactory.class);
}
@Override
public void processAnalyzers(AnalyzersBindings analyzersBindings) {
analyzersBindings.processAnalyzer("default", StandardAnalyzerProvider.class);
analyzersBindings.processAnalyzer("standard", StandardAnalyzerProvider.class);
analyzersBindings.processAnalyzer("standard_html_strip", StandardHtmlStripAnalyzerProvider.class);
analyzersBindings.processAnalyzer("simple", SimpleAnalyzerProvider.class);
analyzersBindings.processAnalyzer("stop", StopAnalyzerProvider.class);
analyzersBindings.processAnalyzer("whitespace", WhitespaceAnalyzerProvider.class);
analyzersBindings.processAnalyzer("keyword", KeywordAnalyzerProvider.class);
}
}
| 0true
|
src_main_java_org_elasticsearch_index_analysis_AnalysisModule.java
|
1,087 |
public class PartitionGroupConfig {
private boolean enabled = false;
private MemberGroupType groupType = MemberGroupType.PER_MEMBER;
private final List<MemberGroupConfig> memberGroupConfigs = new LinkedList<MemberGroupConfig>();
public enum MemberGroupType {
HOST_AWARE, CUSTOM, PER_MEMBER
}
/**
* Checks if this PartitionGroupConfig is enabled.
*
* @return true if enabled, false otherwise.
*/
public boolean isEnabled() {
return enabled;
}
/**
* Enables or disables this PartitionGroupConfig.
*
* @param enabled true if enabled, false if disabled.
* @return the updated PartitionGroupConfig.
*/
public PartitionGroupConfig setEnabled(final boolean enabled) {
this.enabled = enabled;
return this;
}
/**
* Returns the MemberGroupType configured. Could be null if no MemberGroupType has been configured.
*
* @return the MemberGroupType.
*/
public MemberGroupType getGroupType() {
return groupType;
}
/**
* Sets the MemberGroupType. A @{link MemberGroupType#CUSTOM} indicates that custom groups are created.
* With the {@link MemberGroupType#HOST_AWARE} group type, Hazelcast makes a group for every host, that prevents
* a single host containing primary and backup. See the {@see MemberGroupConfig} for more information.
*
* @param memberGroupType the MemberGroupType to set.
* @return the updated PartitionGroupConfig
* @throws IllegalArgumentException if memberGroupType is null.
* @see #getGroupType()
*/
public PartitionGroupConfig setGroupType(MemberGroupType memberGroupType) {
this.groupType = isNotNull(memberGroupType, "memberGroupType");
return this;
}
/**
* Adds a {@link MemberGroupConfig}. Duplicate elements are not filtered.
*
* @param memberGroupConfig the MemberGroupConfig to add.
* @return the updated PartitionGroupConfig
* @throws IllegalArgumentException if memberGroupConfig is null.
* @see #addMemberGroupConfig(MemberGroupConfig)
*/
public PartitionGroupConfig addMemberGroupConfig(MemberGroupConfig memberGroupConfig) {
memberGroupConfigs.add(isNotNull(memberGroupConfig, "MemberGroupConfig"));
return this;
}
/**
* Returns an unmodifiable collection containing all {@link MemberGroupConfig} elements.
*
* @return the MemberGroupConfig elements.
* @see #setMemberGroupConfigs(java.util.Collection)
*/
public Collection<MemberGroupConfig> getMemberGroupConfigs() {
return Collections.unmodifiableCollection(memberGroupConfigs);
}
/**
* Removes all the {@link MemberGroupType} instances.
*
* @return the updated PartitionGroupConfig.
* @see #setMemberGroupConfigs(java.util.Collection)
*/
public PartitionGroupConfig clear() {
memberGroupConfigs.clear();
return this;
}
/**
* Adds a MemberGroupConfig. This MemberGroupConfig only has meaning when the group-type is set the
* {@link MemberGroupType#CUSTOM}. See the {@link PartitionGroupConfig} for more information and examples
* of how this mechanism works.
*
* @param memberGroupConfigs the collection of MemberGroupConfig to add.
* @return the updated PartitionGroupConfig
* @throws IllegalArgumentException if memberGroupConfigs is null.
* @see #getMemberGroupConfigs()
* @see #clear()
* @see #addMemberGroupConfig(MemberGroupConfig)
*/
public PartitionGroupConfig setMemberGroupConfigs(Collection<MemberGroupConfig> memberGroupConfigs) {
isNotNull(memberGroupConfigs, "memberGroupConfigs");
this.memberGroupConfigs.clear();
this.memberGroupConfigs.addAll(memberGroupConfigs);
return this;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("PartitionGroupConfig");
sb.append("{enabled=").append(enabled);
sb.append(", groupType=").append(groupType);
sb.append(", memberGroupConfigs=").append(memberGroupConfigs);
sb.append('}');
return sb.toString();
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_config_PartitionGroupConfig.java
|
2,618 |
class JoinResponse extends TransportResponse {
ClusterState clusterState;
JoinResponse() {
}
JoinResponse(ClusterState clusterState) {
this.clusterState = clusterState;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
clusterState = ClusterState.Builder.readFrom(in, nodesProvider.nodes().localNode());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
ClusterState.Builder.writeTo(clusterState, out);
}
}
| 0true
|
src_main_java_org_elasticsearch_discovery_zen_membership_MembershipAction.java
|
1,549 |
public abstract class HazelcastMBean<T> implements DynamicMBean, MBeanRegistration {
protected HashMap<String, BeanInfo> attributeMap = new HashMap<String, BeanInfo>();
protected HashMap<String, BeanInfo> operationMap = new HashMap<String, BeanInfo>();
final T managedObject;
final ManagementService service;
String description;
ObjectName objectName;
protected HazelcastMBean(T managedObject, ManagementService service) {
this.managedObject = managedObject;
this.service = service;
}
public void register(HazelcastMBean mbean){
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
try {
mbs.registerMBean(mbean, mbean.objectName);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private void scan() throws Exception {
ManagedDescription descAnn = getClass().getAnnotation(ManagedDescription.class);
if (descAnn != null){
description = descAnn.value();
}
for (Method method: getClass().getMethods()){
if (method.isAnnotationPresent(ManagedAnnotation.class)){
ManagedAnnotation ann = method.getAnnotation(ManagedAnnotation.class);
String name = ann.value();
if (name.isEmpty()){
throw new IllegalArgumentException("Name cannot be empty!");
}
boolean operation = ann.operation();
HashMap<String, BeanInfo> map = operation ? operationMap : attributeMap;
if (map.containsKey(name)){
throw new IllegalArgumentException("Duplicate name: " + name);
}
descAnn = method.getAnnotation(ManagedDescription.class);
String desc = null;
if (descAnn != null){
desc = descAnn.value();
}
map.put(name, new BeanInfo(name, desc, method));
}
}
}
@Override
public Object getAttribute(String attribute)
throws AttributeNotFoundException, MBeanException, ReflectionException {
if (attribute == null || attribute.length() == 0)
throw new NullPointerException("Invalid null attribute requested");
BeanInfo info = attributeMap.get(attribute);
try {
return info.method.invoke(this);
} catch (Exception e) {
throw new ReflectionException(e);
}
}
public void setObjectName(Hashtable<String,String> properties){
try {
objectName = new ObjectName(ManagementService.DOMAIN, properties);
} catch (MalformedObjectNameException e) {
throw new IllegalArgumentException("Failed to create an ObjectName",e);
}
}
@Override
public void setAttribute(Attribute attribute)
throws AttributeNotFoundException, InvalidAttributeValueException, MBeanException, ReflectionException {
throw new UnsupportedOperationException();
}
@Override
public AttributeList getAttributes(String[] attributes) {
AttributeList list = new AttributeList(attributes.length);
try {
for (String attribute : attributes) {
list.add(new Attribute(attribute, getAttribute(attribute)));
}
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
return list;
}
@Override
public AttributeList setAttributes(AttributeList attributes) {
throw new UnsupportedOperationException();
}
@Override
public Object invoke(String actionName, Object[] params, String[] signature)
throws MBeanException, ReflectionException {
if (actionName == null || actionName.isEmpty()){
throw new IllegalArgumentException("Empty actionName");
}
BeanInfo info = operationMap.get(actionName);
if (info == null){
throw new UnsupportedOperationException("Operation: " + actionName + " not registered");
}
try {
return info.method.invoke(this, params);
} catch (Exception e) {
throw new ReflectionException(e);
}
}
@Override
public MBeanInfo getMBeanInfo() {
String className = managedObject.getClass().getName();
return new MBeanInfo(className, description, attributeInfos(), null, operationInfos(), null);
}
private MBeanAttributeInfo[] attributeInfos(){
MBeanAttributeInfo[] array = new MBeanAttributeInfo[attributeMap.size()];
int i = 0;
for (BeanInfo beanInfo: attributeMap.values()){
array[i++] = beanInfo.getAttributeInfo();
}
return array;
}
private MBeanOperationInfo[] operationInfos(){
MBeanOperationInfo[] array = new MBeanOperationInfo[operationMap.size()];
int i = 0;
for (BeanInfo beanInfo: operationMap.values()){
array[i++] = beanInfo.getOperationInfo();
}
return array;
}
private class BeanInfo {
final String name;
final String description;
transient Method method;
public BeanInfo(String name, String description, Method method){
this.name = name;
this.description = description;
this.method = method;
}
public MBeanAttributeInfo getAttributeInfo() {
try {
return new MBeanAttributeInfo(name, description, method, null);
} catch (IntrospectionException e) {
throw new IllegalArgumentException();
}
}
public MBeanOperationInfo getOperationInfo(){
return new MBeanOperationInfo(description, method);
}
}
@Override
public ObjectName preRegister(MBeanServer server, ObjectName name) throws Exception {
try {
scan();
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
return objectName;
}
@Override
public void postRegister(Boolean registrationDone) {
}
@Override
public void preDeregister() throws Exception {
}
@Override
public void postDeregister() {
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_jmx_HazelcastMBean.java
|
2,476 |
static class Wrapped extends PrioritizedRunnable {
private final Runnable runnable;
private Wrapped(Runnable runnable, Priority priority) {
super(priority);
this.runnable = runnable;
}
@Override
public void run() {
runnable.run();
}
}
| 0true
|
src_main_java_org_elasticsearch_common_util_concurrent_PrioritizedRunnable.java
|
2,863 |
public class GalicianAnalyzerProvider extends AbstractIndexAnalyzerProvider<GalicianAnalyzer> {
private final GalicianAnalyzer analyzer;
@Inject
public GalicianAnalyzerProvider(Index index, @IndexSettings Settings indexSettings, Environment env, @Assisted String name, @Assisted Settings settings) {
super(index, indexSettings, name, settings);
analyzer = new GalicianAnalyzer(version,
Analysis.parseStopWords(env, settings, GalicianAnalyzer.getDefaultStopSet(), version),
Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET, version));
}
@Override
public GalicianAnalyzer get() {
return this.analyzer;
}
}
| 0true
|
src_main_java_org_elasticsearch_index_analysis_GalicianAnalyzerProvider.java
|
2,586 |
class RejoinClusterRequestHandler extends BaseTransportRequestHandler<RejoinClusterRequest> {
static final String ACTION = "discovery/zen/rejoin";
@Override
public RejoinClusterRequest newInstance() {
return new RejoinClusterRequest();
}
@Override
public void messageReceived(final RejoinClusterRequest request, final TransportChannel channel) throws Exception {
clusterService.submitStateUpdateTask("received a request to rejoin the cluster from [" + request.fromNodeId + "]", Priority.URGENT, new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
try {
channel.sendResponse(TransportResponse.Empty.INSTANCE);
} catch (Exception e) {
logger.warn("failed to send response on rejoin cluster request handling", e);
}
return rejoin(currentState, "received a request to rejoin the cluster from [" + request.fromNodeId + "]");
}
@Override
public void onFailure(String source, Throwable t) {
logger.error("unexpected failure during [{}]", t, source);
}
});
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}
| 1no label
|
src_main_java_org_elasticsearch_discovery_zen_ZenDiscovery.java
|
625 |
indexEngine.getEntriesMinor(iRangeTo, isInclusive, null, new OIndexEngine.EntriesResultListener() {
@Override
public boolean addResult(ODocument entry) {
return entriesResultListener.addResult(entry);
}
});
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_index_OIndexOneValue.java
|
284 |
public abstract class ActionRequestBuilder<Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> {
protected final Request request;
protected final InternalGenericClient client;
protected ActionRequestBuilder(InternalGenericClient client, Request request) {
this.client = client;
this.request = request;
}
public Request request() {
return this.request;
}
@SuppressWarnings("unchecked")
public final RequestBuilder setListenerThreaded(boolean listenerThreaded) {
request.listenerThreaded(listenerThreaded);
return (RequestBuilder) this;
}
@SuppressWarnings("unchecked")
public final RequestBuilder putHeader(String key, Object value) {
request.putHeader(key, value);
return (RequestBuilder) this;
}
public ListenableActionFuture<Response> execute() {
PlainListenableActionFuture<Response> future = new PlainListenableActionFuture<Response>(request.listenerThreaded(), client.threadPool());
execute(future);
return future;
}
/**
* Short version of execute().actionGet().
*/
public Response get() throws ElasticsearchException {
return execute().actionGet();
}
/**
* Short version of execute().actionGet().
*/
public Response get(TimeValue timeout) throws ElasticsearchException {
return execute().actionGet(timeout);
}
/**
* Short version of execute().actionGet().
*/
public Response get(String timeout) throws ElasticsearchException {
return execute().actionGet(timeout);
}
public void execute(ActionListener<Response> listener) {
doExecute(listener);
}
protected abstract void doExecute(ActionListener<Response> listener);
}
| 0true
|
src_main_java_org_elasticsearch_action_ActionRequestBuilder.java
|
1,301 |
public interface Field {
/**
* Gets the id
* @return the id
*/
public Long getId();
/**
* Sets the id
* @param id
*/
public void setId(Long id);
/**
* Gets the entityType of this Field
* @return the entityType
*/
public FieldEntity getEntityType();
/**
* Sets the entityType
* @param entityType
*/
public void setEntityType(FieldEntity entityType);
/**
* Gets the propertyName of this Field. This would be something like "manufacturer" or "defaultSku.price"
* if the EntityType was "product"
* @return the propertyName
*/
public String getPropertyName();
/**
* Sets the propertyName
* @param propertyName
*/
public void setPropertyName(String propertyName);
/**
* Gets the abbreviation of this Field. This will be used in URL query string parameters for sorting and
* filtering
* @return the abbreviation
*/
public String getAbbreviation();
/**
* Sets the abbreviation
* @param abbreviation
*/
public void setAbbreviation(String abbreviation);
/**
* Gets the searchable flag
* @return whether or not this Field is searchable
*/
public Boolean getSearchable();
/**
* Sets the searchable flag
* @param searchable
*/
public void setSearchable(Boolean searchable);
/**
* Sets the facet field type
* @param facetFieldType
*/
public void setFacetFieldType(FieldType facetFieldType);
/**
* Gets the facet field type. Note that the facet field type is also the type used to perform sorting.
* Any field where there is a desire to facet or sort on should have this FieldType specified.
*
* @see #getSearchableFieldTypes()
* @return the facet field type
*/
public FieldType getFacetFieldType();
/**
* Sets the searchableFieldTypes
*
* @see #getSearchableFieldTypes()
* @param searchableFieldTypes
*/
public void setSearchableFieldTypes(List<FieldType> searchableFieldTypes);
/**
* Gets the dynamic searchable field types. For example, in solr, if you wanted to index a field as both
* text and string, you would have two searchable field types, String and Text
*
* @return the searchable types for this field
*/
public List<FieldType> getSearchableFieldTypes();
/**
* Gets the searchConfigs. Note that a concrete implementation or usage of this class is not available
* in the community version of Broadleaf Commerce.
* @return the searchConfigs
*/
public List<SearchConfig> getSearchConfigs();
/**
* Sets the searchConfigs.
* @param searchConfigs
*/
public void setSearchConfigs (List<SearchConfig> searchConfigs);
/**
* Returns the qualified name of this Field. The default implementation returns the entityType joined
* with the properName by a "."
* @return the qualifiedFieldName
*/
public String getQualifiedFieldName();
/**
* Returns whether or not this field should be considered translatable
* @return translatable
*/
public Boolean getTranslatable();
/**
* Sets whether or not this field should be considered translatable
* @param translatable
*/
public void setTranslatable(Boolean translatable);
}
| 0true
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_search_domain_Field.java
|
59 |
public interface FieldDefinition extends Serializable {
public Long getId();
public void setId(Long id);
public String getName();
public void setName(String name);
public SupportedFieldType getFieldType();
public void setFieldType(SupportedFieldType fieldType);
public String getSecurityLevel();
public void setSecurityLevel(String securityLevel);
public Boolean getHiddenFlag();
public void setHiddenFlag(Boolean hiddenFlag);
public String getValidationRegEx();
public void setValidationRegEx(String validationRegEx);
public Integer getMaxLength();
public void setMaxLength(Integer maxLength);
public String getColumnWidth();
public void setColumnWidth(String columnWidth);
public Boolean getTextAreaFlag();
public void setTextAreaFlag(Boolean textAreaFlag);
public FieldEnumeration getFieldEnumeration();
public void setFieldEnumeration(FieldEnumeration fieldEnumeration);
public Boolean getAllowMultiples();
public void setAllowMultiples(Boolean allowMultiples);
public String getFriendlyName();
public void setFriendlyName(String friendlyName);
public String getValidationErrorMesageKey();
public void setValidationErrorMesageKey(String validationErrorMesageKey);
public FieldGroup getFieldGroup();
public void setFieldGroup(FieldGroup fieldGroup);
public int getFieldOrder();
public void setFieldOrder(int fieldOrder);
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_field_domain_FieldDefinition.java
|
1,749 |
public static class Issue1764UpdatingEntryProcessor
extends AbstractEntryProcessor<String, Issue1764Data> {
private static final long serialVersionUID = 1L;
private String newValue;
public Issue1764UpdatingEntryProcessor(String newValue) {
this.newValue = newValue;
}
public Object process(Map.Entry<String, Issue1764Data> entry) {
Issue1764Data data = entry.getValue();
data.setAttr1(newValue);
entry.setValue(data);
return true;
}
}
| 0true
|
hazelcast_src_test_java_com_hazelcast_map_EntryProcessorTest.java
|
2,053 |
public class GetEntryViewOperation extends KeyBasedMapOperation {
private EntryView<Data, Data> result;
public GetEntryViewOperation(String name, Data dataKey) {
super(name, dataKey);
}
public GetEntryViewOperation() {
}
public void run() {
MapService mapService = getService();
RecordStore recordStore = mapService.getRecordStore(getPartitionId(), name);
Record record = recordStore.getRecord(dataKey);
if (record != null) {
result = mapService.createSimpleEntryView(record.getKey(), mapService.toData(record.getValue()), record);
}
}
@Override
public Object getResponse() {
return result;
}
@Override
public String toString() {
return "GetEntryViewOperation{" +
'}';
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_map_operation_GetEntryViewOperation.java
|
1,100 |
public class OSQLFunctionFirst extends OSQLFunctionConfigurableAbstract {
public static final String NAME = "first";
private Object first = this;
public OSQLFunctionFirst() {
super(NAME, 1, 1);
}
public Object execute(final OIdentifiable iCurrentRecord, Object iCurrentResult, final Object[] iParameters,
final OCommandContext iContext) {
Object value = iParameters[0];
if (value instanceof OSQLFilterItem)
value = ((OSQLFilterItem) value).getValue(iCurrentRecord, iContext);
if (OMultiValue.isMultiValue(value))
value = OMultiValue.getFirstValue(value);
if (first == this)
// ONLY THE FIRST TIME
first = value;
return value;
}
public boolean aggregateResults() {
return configuredParameters.length == 1;
}
@Override
public Object getResult() {
return first;
}
@Override
public boolean filterResult() {
return true;
}
public String getSyntax() {
return "Syntax error: first(<field>)";
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_sql_functions_coll_OSQLFunctionFirst.java
|
1,209 |
public interface PaymentResponse {
public void addPaymentResponseItem(PaymentInfo paymentInfo, PaymentResponseItem paymentResponseItem);
public PaymentResponseItem getPaymentResponseItem(PaymentInfo paymentInfo);
public Map<PaymentInfo, PaymentResponseItem> getResponseItems();
}
| 0true
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_payment_service_module_PaymentResponse.java
|
1,614 |
public class PersistencePerspectiveItemVisitorAdapter implements PersistencePerspectiveItemVisitor {
@Override
public void visit(AdornedTargetList adornedTargetList) {
//do nothing
}
@Override
public void visit(MapStructure mapStructure) {
//do nothing
}
@Override
public void visit(SimpleValueMapStructure simpleValueMapStructure) {
//do nothing
}
@Override
public void visit(ForeignKey foreignKey) {
//do nothing
}
}
| 0true
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_dto_visitor_PersistencePerspectiveItemVisitorAdapter.java
|
2,598 |
class SocketPacketWriter implements SocketWriter<Packet> {
private static final int CONST_BUFFER_NO = 4;
final TcpIpConnection connection;
final IOService ioService;
final ILogger logger;
private final PacketWriter packetWriter;
SocketPacketWriter(TcpIpConnection connection) {
this.connection = connection;
this.ioService = connection.getConnectionManager().ioService;
this.logger = ioService.getLogger(SocketPacketWriter.class.getName());
boolean symmetricEncryptionEnabled = CipherHelper.isSymmetricEncryptionEnabled(ioService);
if (symmetricEncryptionEnabled) {
packetWriter = new SymmetricCipherPacketWriter();
logger.info("Writer started with SymmetricEncryption");
} else {
packetWriter = new DefaultPacketWriter();
}
}
public boolean write(Packet socketWritable, ByteBuffer socketBuffer) throws Exception {
return packetWriter.writePacket(socketWritable, socketBuffer);
}
private interface PacketWriter {
boolean writePacket(Packet packet, ByteBuffer socketBB) throws Exception;
}
private static class DefaultPacketWriter implements PacketWriter {
public boolean writePacket(Packet packet, ByteBuffer socketBB) {
return packet.writeTo(socketBB);
}
}
private class SymmetricCipherPacketWriter implements PacketWriter {
final Cipher cipher;
ByteBuffer packetBuffer = ByteBuffer.allocate(ioService.getSocketSendBufferSize() * IOService.KILO_BYTE);
boolean packetWritten;
SymmetricCipherPacketWriter() {
cipher = init();
}
private Cipher init() {
Cipher c;
try {
c = CipherHelper.createSymmetricWriterCipher(ioService.getSymmetricEncryptionConfig());
} catch (Exception e) {
logger.severe("Symmetric Cipher for WriteHandler cannot be initialized.", e);
CipherHelper.handleCipherException(e, connection);
throw ExceptionUtil.rethrow(e);
}
return c;
}
public boolean writePacket(Packet packet, ByteBuffer socketBuffer) throws Exception {
if (!packetWritten) {
if (socketBuffer.remaining() < CONST_BUFFER_NO) {
return false;
}
int size = cipher.getOutputSize(packet.size());
socketBuffer.putInt(size);
if (packetBuffer.capacity() < packet.size()) {
packetBuffer = ByteBuffer.allocate(packet.size());
}
if (!packet.writeTo(packetBuffer)) {
throw new HazelcastException("Packet didn't fit into the buffer!");
}
packetBuffer.flip();
packetWritten = true;
}
if (socketBuffer.hasRemaining()) {
int outputSize = cipher.getOutputSize(packetBuffer.remaining());
if (outputSize <= socketBuffer.remaining()) {
cipher.update(packetBuffer, socketBuffer);
} else {
int min = Math.min(packetBuffer.remaining(), socketBuffer.remaining());
int len = min / 2;
if (len > 0) {
int limitOld = packetBuffer.limit();
packetBuffer.limit(packetBuffer.position() + len);
cipher.update(packetBuffer, socketBuffer);
packetBuffer.limit(limitOld);
}
}
if (!packetBuffer.hasRemaining()) {
if (socketBuffer.remaining() >= cipher.getOutputSize(0)) {
socketBuffer.put(cipher.doFinal());
packetWritten = false;
packetBuffer.clear();
return true;
}
}
}
return false;
}
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_nio_SocketPacketWriter.java
|
2,697 |
public class LocalGatewayAllocator extends AbstractComponent implements GatewayAllocator {
public static final String INDEX_RECOVERY_INITIAL_SHARDS = "index.recovery.initial_shards";
private final TransportNodesListGatewayStartedShards listGatewayStartedShards;
private final TransportNodesListShardStoreMetaData listShardStoreMetaData;
private final ConcurrentMap<ShardId, Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData>> cachedStores = ConcurrentCollections.newConcurrentMap();
private final ConcurrentMap<ShardId, ObjectLongOpenHashMap<DiscoveryNode>> cachedShardsState = ConcurrentCollections.newConcurrentMap();
private final TimeValue listTimeout;
private final String initialShards;
@Inject
public LocalGatewayAllocator(Settings settings,
TransportNodesListGatewayStartedShards listGatewayStartedShards, TransportNodesListShardStoreMetaData listShardStoreMetaData) {
super(settings);
this.listGatewayStartedShards = listGatewayStartedShards;
this.listShardStoreMetaData = listShardStoreMetaData;
this.listTimeout = componentSettings.getAsTime("list_timeout", TimeValue.timeValueSeconds(30));
this.initialShards = componentSettings.get("initial_shards", "quorum");
logger.debug("using initial_shards [{}], list_timeout [{}]", initialShards, listTimeout);
}
@Override
public void applyStartedShards(StartedRerouteAllocation allocation) {
for (ShardRouting shardRouting : allocation.startedShards()) {
cachedStores.remove(shardRouting.shardId());
cachedShardsState.remove(shardRouting.shardId());
}
}
@Override
public void applyFailedShards(FailedRerouteAllocation allocation) {
for (ShardRouting failedShard : allocation.failedShards()) {
cachedStores.remove(failedShard.shardId());
cachedShardsState.remove(failedShard.shardId());
}
}
@Override
public boolean allocateUnassigned(RoutingAllocation allocation) {
boolean changed = false;
DiscoveryNodes nodes = allocation.nodes();
RoutingNodes routingNodes = allocation.routingNodes();
// First, handle primaries, they must find a place to be allocated on here
Iterator<MutableShardRouting> unassignedIterator = routingNodes.unassigned().iterator();
while (unassignedIterator.hasNext()) {
MutableShardRouting shard = unassignedIterator.next();
if (!shard.primary()) {
continue;
}
// this is an API allocation, ignore since we know there is no data...
if (!routingNodes.routingTable().index(shard.index()).shard(shard.id()).primaryAllocatedPostApi()) {
continue;
}
ObjectLongOpenHashMap<DiscoveryNode> nodesState = buildShardStates(nodes, shard);
int numberOfAllocationsFound = 0;
long highestVersion = -1;
Set<DiscoveryNode> nodesWithHighestVersion = Sets.newHashSet();
final boolean[] states = nodesState.allocated;
final Object[] keys = nodesState.keys;
final long[] values = nodesState.values;
for (int i = 0; i < states.length; i++) {
if (!states[i]) {
continue;
}
DiscoveryNode node = (DiscoveryNode) keys[i];
long version = values[i];
// since we don't check in NO allocation, we need to double check here
if (allocation.shouldIgnoreShardForNode(shard.shardId(), node.id())) {
continue;
}
if (version != -1) {
numberOfAllocationsFound++;
if (highestVersion == -1) {
nodesWithHighestVersion.add(node);
highestVersion = version;
} else {
if (version > highestVersion) {
nodesWithHighestVersion.clear();
nodesWithHighestVersion.add(node);
highestVersion = version;
} else if (version == highestVersion) {
nodesWithHighestVersion.add(node);
}
}
}
}
// check if the counts meets the minimum set
int requiredAllocation = 1;
try {
IndexMetaData indexMetaData = routingNodes.metaData().index(shard.index());
String initialShards = indexMetaData.settings().get(INDEX_RECOVERY_INITIAL_SHARDS, settings.get(INDEX_RECOVERY_INITIAL_SHARDS, this.initialShards));
if ("quorum".equals(initialShards)) {
if (indexMetaData.numberOfReplicas() > 1) {
requiredAllocation = ((1 + indexMetaData.numberOfReplicas()) / 2) + 1;
}
} else if ("quorum-1".equals(initialShards) || "half".equals(initialShards)) {
if (indexMetaData.numberOfReplicas() > 2) {
requiredAllocation = ((1 + indexMetaData.numberOfReplicas()) / 2);
}
} else if ("one".equals(initialShards)) {
requiredAllocation = 1;
} else if ("full".equals(initialShards) || "all".equals(initialShards)) {
requiredAllocation = indexMetaData.numberOfReplicas() + 1;
} else if ("full-1".equals(initialShards) || "all-1".equals(initialShards)) {
if (indexMetaData.numberOfReplicas() > 1) {
requiredAllocation = indexMetaData.numberOfReplicas();
}
} else {
requiredAllocation = Integer.parseInt(initialShards);
}
} catch (Exception e) {
logger.warn("[{}][{}] failed to derived initial_shards from value {}, ignore allocation for {}", shard.index(), shard.id(), initialShards, shard);
}
// not enough found for this shard, continue...
if (numberOfAllocationsFound < requiredAllocation) {
// we can't really allocate, so ignore it and continue
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: not allocating, number_of_allocated_shards_found [{}], required_number [{}]", shard.index(), shard.id(), numberOfAllocationsFound, requiredAllocation);
}
continue;
}
Set<DiscoveryNode> throttledNodes = Sets.newHashSet();
Set<DiscoveryNode> noNodes = Sets.newHashSet();
for (DiscoveryNode discoNode : nodesWithHighestVersion) {
RoutingNode node = routingNodes.node(discoNode.id());
Decision decision = allocation.deciders().canAllocate(shard, node, allocation);
if (decision.type() == Decision.Type.THROTTLE) {
throttledNodes.add(discoNode);
} else if (decision.type() == Decision.Type.NO) {
noNodes.add(discoNode);
} else {
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: allocating [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, discoNode);
}
// we found a match
changed = true;
// make sure we create one with the version from the recovered state
allocation.routingNodes().assign(new MutableShardRouting(shard, highestVersion), node.nodeId());
unassignedIterator.remove();
// found a node, so no throttling, no "no", and break out of the loop
throttledNodes.clear();
noNodes.clear();
break;
}
}
if (throttledNodes.isEmpty()) {
// if we have a node that we "can't" allocate to, force allocation, since this is our master data!
if (!noNodes.isEmpty()) {
DiscoveryNode discoNode = noNodes.iterator().next();
RoutingNode node = routingNodes.node(discoNode.id());
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: forcing allocating [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, discoNode);
}
// we found a match
changed = true;
// make sure we create one with the version from the recovered state
allocation.routingNodes().assign(new MutableShardRouting(shard, highestVersion), node.nodeId());
unassignedIterator.remove();
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: throttling allocation [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, throttledNodes);
}
// we are throttling this, but we have enough to allocate to this node, ignore it for now
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
}
}
if (!routingNodes.hasUnassigned()) {
return changed;
}
// Now, handle replicas, try to assign them to nodes that are similar to the one the primary was allocated on
unassignedIterator = routingNodes.unassigned().iterator();
while (unassignedIterator.hasNext()) {
MutableShardRouting shard = unassignedIterator.next();
// pre-check if it can be allocated to any node that currently exists, so we won't list the store for it for nothing
boolean canBeAllocatedToAtLeastOneNode = false;
for (ObjectCursor<DiscoveryNode> cursor : nodes.dataNodes().values()) {
RoutingNode node = routingNodes.node(cursor.value.id());
if (node == null) {
continue;
}
// if we can't allocate it on a node, ignore it, for example, this handles
// cases for only allocating a replica after a primary
Decision decision = allocation.deciders().canAllocate(shard, node, allocation);
if (decision.type() == Decision.Type.YES) {
canBeAllocatedToAtLeastOneNode = true;
break;
}
}
if (!canBeAllocatedToAtLeastOneNode) {
continue;
}
Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData> shardStores = buildShardStores(nodes, shard);
long lastSizeMatched = 0;
DiscoveryNode lastDiscoNodeMatched = null;
RoutingNode lastNodeMatched = null;
for (Map.Entry<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData> nodeStoreEntry : shardStores.entrySet()) {
DiscoveryNode discoNode = nodeStoreEntry.getKey();
TransportNodesListShardStoreMetaData.StoreFilesMetaData storeFilesMetaData = nodeStoreEntry.getValue();
logger.trace("{}: checking node [{}]", shard, discoNode);
if (storeFilesMetaData == null) {
// already allocated on that node...
continue;
}
RoutingNode node = routingNodes.node(discoNode.id());
if (node == null) {
continue;
}
// check if we can allocate on that node...
// we only check for NO, since if this node is THROTTLING and it has enough "same data"
// then we will try and assign it next time
Decision decision = allocation.deciders().canAllocate(shard, node, allocation);
if (decision.type() == Decision.Type.NO) {
continue;
}
// if it is already allocated, we can't assign to it...
if (storeFilesMetaData.allocated()) {
continue;
}
if (!shard.primary()) {
MutableShardRouting primaryShard = routingNodes.activePrimary(shard);
if (primaryShard != null) {
assert primaryShard.active();
DiscoveryNode primaryNode = nodes.get(primaryShard.currentNodeId());
if (primaryNode != null) {
TransportNodesListShardStoreMetaData.StoreFilesMetaData primaryNodeStore = shardStores.get(primaryNode);
if (primaryNodeStore != null && primaryNodeStore.allocated()) {
long sizeMatched = 0;
for (StoreFileMetaData storeFileMetaData : storeFilesMetaData) {
if (primaryNodeStore.fileExists(storeFileMetaData.name()) && primaryNodeStore.file(storeFileMetaData.name()).isSame(storeFileMetaData)) {
sizeMatched += storeFileMetaData.length();
}
}
if (sizeMatched > lastSizeMatched) {
lastSizeMatched = sizeMatched;
lastDiscoNodeMatched = discoNode;
lastNodeMatched = node;
}
}
}
}
}
}
if (lastNodeMatched != null) {
// we only check on THROTTLE since we checked before before on NO
Decision decision = allocation.deciders().canAllocate(shard, lastNodeMatched, allocation);
if (decision.type() == Decision.Type.THROTTLE) {
if (logger.isTraceEnabled()) {
logger.debug("[{}][{}]: throttling allocation [{}] to [{}] in order to reuse its unallocated persistent store with total_size [{}]", shard.index(), shard.id(), shard, lastDiscoNodeMatched, new ByteSizeValue(lastSizeMatched));
}
// we are throttling this, but we have enough to allocate to this node, ignore it for now
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
} else {
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: allocating [{}] to [{}] in order to reuse its unallocated persistent store with total_size [{}]", shard.index(), shard.id(), shard, lastDiscoNodeMatched, new ByteSizeValue(lastSizeMatched));
}
// we found a match
changed = true;
allocation.routingNodes().assign(shard, lastNodeMatched.nodeId());
unassignedIterator.remove();
}
}
}
return changed;
}
private ObjectLongOpenHashMap<DiscoveryNode> buildShardStates(final DiscoveryNodes nodes, MutableShardRouting shard) {
ObjectLongOpenHashMap<DiscoveryNode> shardStates = cachedShardsState.get(shard.shardId());
ObjectOpenHashSet<String> nodeIds;
if (shardStates == null) {
shardStates = new ObjectLongOpenHashMap<DiscoveryNode>();
cachedShardsState.put(shard.shardId(), shardStates);
nodeIds = ObjectOpenHashSet.from(nodes.dataNodes().keys());
} else {
// clean nodes that have failed
shardStates.keys().removeAll(new ObjectPredicate<DiscoveryNode>() {
@Override
public boolean apply(DiscoveryNode node) {
return !nodes.nodeExists(node.id());
}
});
nodeIds = ObjectOpenHashSet.newInstance();
// we have stored cached from before, see if the nodes changed, if they have, go fetch again
for (ObjectCursor<DiscoveryNode> cursor : nodes.dataNodes().values()) {
DiscoveryNode node = cursor.value;
if (!shardStates.containsKey(node)) {
nodeIds.add(node.id());
}
}
}
if (nodeIds.isEmpty()) {
return shardStates;
}
String[] nodesIdsArray = nodeIds.toArray(String.class);
TransportNodesListGatewayStartedShards.NodesLocalGatewayStartedShards response = listGatewayStartedShards.list(shard.shardId(), nodesIdsArray, listTimeout).actionGet();
if (logger.isDebugEnabled()) {
if (response.failures().length > 0) {
StringBuilder sb = new StringBuilder(shard + ": failures when trying to list shards on nodes:");
for (int i = 0; i < response.failures().length; i++) {
Throwable cause = ExceptionsHelper.unwrapCause(response.failures()[i]);
if (cause instanceof ConnectTransportException) {
continue;
}
sb.append("\n -> ").append(response.failures()[i].getDetailedMessage());
}
logger.debug(sb.toString());
}
}
for (TransportNodesListGatewayStartedShards.NodeLocalGatewayStartedShards nodeShardState : response) {
// -1 version means it does not exists, which is what the API returns, and what we expect to
shardStates.put(nodeShardState.getNode(), nodeShardState.version());
}
return shardStates;
}
private Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData> buildShardStores(DiscoveryNodes nodes, MutableShardRouting shard) {
Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData> shardStores = cachedStores.get(shard.shardId());
ObjectOpenHashSet<String> nodesIds;
if (shardStores == null) {
shardStores = Maps.newHashMap();
cachedStores.put(shard.shardId(), shardStores);
nodesIds = ObjectOpenHashSet.from(nodes.dataNodes().keys());
} else {
nodesIds = ObjectOpenHashSet.newInstance();
// clean nodes that have failed
for (Iterator<DiscoveryNode> it = shardStores.keySet().iterator(); it.hasNext(); ) {
DiscoveryNode node = it.next();
if (!nodes.nodeExists(node.id())) {
it.remove();
}
}
for (ObjectCursor<DiscoveryNode> cursor : nodes.dataNodes().values()) {
DiscoveryNode node = cursor.value;
if (!shardStores.containsKey(node)) {
nodesIds.add(node.id());
}
}
}
if (!nodesIds.isEmpty()) {
String[] nodesIdsArray = nodesIds.toArray(String.class);
TransportNodesListShardStoreMetaData.NodesStoreFilesMetaData nodesStoreFilesMetaData = listShardStoreMetaData.list(shard.shardId(), false, nodesIdsArray, listTimeout).actionGet();
if (logger.isTraceEnabled()) {
if (nodesStoreFilesMetaData.failures().length > 0) {
StringBuilder sb = new StringBuilder(shard + ": failures when trying to list stores on nodes:");
for (int i = 0; i < nodesStoreFilesMetaData.failures().length; i++) {
Throwable cause = ExceptionsHelper.unwrapCause(nodesStoreFilesMetaData.failures()[i]);
if (cause instanceof ConnectTransportException) {
continue;
}
sb.append("\n -> ").append(nodesStoreFilesMetaData.failures()[i].getDetailedMessage());
}
logger.trace(sb.toString());
}
}
for (TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData nodeStoreFilesMetaData : nodesStoreFilesMetaData) {
if (nodeStoreFilesMetaData.storeFilesMetaData() != null) {
shardStores.put(nodeStoreFilesMetaData.getNode(), nodeStoreFilesMetaData.storeFilesMetaData());
}
}
}
return shardStores;
}
}
| 0true
|
src_main_java_org_elasticsearch_gateway_local_LocalGatewayAllocator.java
|
1,091 |
public final class ODefaultSQLFunctionFactory implements OSQLFunctionFactory {
private static final Map<String, Object> FUNCTIONS = new HashMap<String, Object>();
static {
// MISC FUNCTIONS
FUNCTIONS.put(OSQLFunctionCoalesce.NAME.toUpperCase(Locale.ENGLISH), new OSQLFunctionCoalesce());
FUNCTIONS.put(OSQLFunctionIf.NAME.toUpperCase(Locale.ENGLISH), new OSQLFunctionIf());
FUNCTIONS.put(OSQLFunctionIfNull.NAME.toUpperCase(Locale.ENGLISH), new OSQLFunctionIfNull());
FUNCTIONS.put(OSQLFunctionFormat.NAME.toUpperCase(Locale.ENGLISH), new OSQLFunctionFormat());
FUNCTIONS.put(OSQLFunctionDate.NAME.toUpperCase(Locale.ENGLISH), OSQLFunctionDate.class);
FUNCTIONS.put(OSQLFunctionSysdate.NAME.toUpperCase(Locale.ENGLISH), OSQLFunctionSysdate.class);
FUNCTIONS.put(OSQLFunctionCount.NAME.toUpperCase(Locale.ENGLISH), OSQLFunctionCount.class);
FUNCTIONS.put(OSQLFunctionDocument.NAME.toUpperCase(Locale.ENGLISH), OSQLFunctionDocument.class);
FUNCTIONS.put(OSQLFunctionDistinct.NAME.toUpperCase(Locale.ENGLISH), OSQLFunctionDistinct.class);
FUNCTIONS.put(OSQLFunctionUnion.NAME.toUpperCase(Locale.ENGLISH), OSQLFunctionUnion.class);
FUNCTIONS.put(OSQLFunctionIntersect.NAME.toUpperCase(Locale.ENGLISH), OSQLFunctionIntersect.class);
FUNCTIONS.put(OSQLFunctionDifference.NAME.toUpperCase(Locale.ENGLISH), OSQLFunctionDifference.class);
FUNCTIONS.put(OSQLFunctionFirst.NAME.toUpperCase(Locale.ENGLISH), OSQLFunctionFirst.class);
FUNCTIONS.put(OSQLFunctionLast.NAME.toUpperCase(Locale.ENGLISH), OSQLFunctionLast.class);
FUNCTIONS.put(OSQLFunctionList.NAME.toUpperCase(Locale.ENGLISH), OSQLFunctionList.class);
FUNCTIONS.put(OSQLFunctionSet.NAME.toUpperCase(Locale.ENGLISH), OSQLFunctionSet.class);
FUNCTIONS.put(OSQLFunctionMap.NAME.toUpperCase(Locale.ENGLISH), OSQLFunctionMap.class);
FUNCTIONS.put(OSQLFunctionEncode.NAME.toUpperCase(Locale.ENGLISH), new OSQLFunctionEncode());
FUNCTIONS.put(OSQLFunctionDecode.NAME.toUpperCase(Locale.ENGLISH), new OSQLFunctionDecode());
// MATH FUNCTIONS
FUNCTIONS.put(OSQLFunctionMin.NAME.toUpperCase(Locale.ENGLISH), OSQLFunctionMin.class);
FUNCTIONS.put(OSQLFunctionMax.NAME.toUpperCase(Locale.ENGLISH), OSQLFunctionMax.class);
FUNCTIONS.put(OSQLFunctionSum.NAME.toUpperCase(Locale.ENGLISH), OSQLFunctionSum.class);
FUNCTIONS.put(OSQLFunctionAverage.NAME.toUpperCase(Locale.ENGLISH), OSQLFunctionAverage.class);
FUNCTIONS.put(OSQLFunctionEval.NAME.toUpperCase(Locale.ENGLISH), OSQLFunctionEval.class);
// GEO FUNCTIONS
FUNCTIONS.put(OSQLFunctionDistance.NAME.toUpperCase(Locale.ENGLISH), new OSQLFunctionDistance());
}
@Override
public Set<String> getFunctionNames() {
return FUNCTIONS.keySet();
}
@Override
public boolean hasFunction(final String name) {
return FUNCTIONS.containsKey(name);
}
@Override
public OSQLFunction createFunction(final String name) {
final Object obj = FUNCTIONS.get(name);
if (obj == null)
throw new OCommandExecutionException("Unknowned function name :" + name);
if (obj instanceof OSQLFunction)
return (OSQLFunction) obj;
else {
// it's a class
final Class<?> clazz = (Class<?>) obj;
try {
return (OSQLFunction) clazz.newInstance();
} catch (Exception e) {
throw new OCommandExecutionException("Error in creation of function " + name
+ "(). Probably there is not an empty constructor or the constructor generates errors", e);
}
}
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_sql_functions_ODefaultSQLFunctionFactory.java
|
305 |
class StreamUtils {
private StreamUtils() { }
/**
* Given an IStreamContentAccessor, determines the appropriate encoding to use
* and reads the stream's contents as a String. Specifically, if the IStreamContentAccessor
* is also an IEncodedStreamContentAccessor, that encoding is used. Otherwise, the
* platform default encoding is used.
*/
public static String readStreamContents(IStreamContentAccessor sca) throws CoreException {
InputStream is= sca.getContents();
if (is != null) {
String encoding= null;
if (sca instanceof IEncodedStreamContentAccessor) {
try {
encoding= ((IEncodedStreamContentAccessor) sca).getCharset();
} catch (Exception e) {
}
}
if (encoding == null)
encoding= ResourcesPlugin.getEncoding();
return readStreamContents(is, encoding);
}
return null;
}
/**
* Reads the contents of the given reader into a string using the encoding
* associated with the reader. Returns null if an error occurred.
*/
public static String readReaderContents(Reader r) {
BufferedReader reader= null;
try {
StringBuffer buffer= new StringBuffer();
char[] part= new char[2048];
int read= 0;
reader= new BufferedReader(r);
while ((read= reader.read(part)) != -1)
buffer.append(part, 0, read);
return buffer.toString();
} catch (IOException ex) {
System.err.println("I/O Exception: " + ex.getMessage());
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException ex) {
// silently ignored
}
}
}
return null;
}
/**
* Reads the contents of the given input stream into a string using the given encoding.
* Returns null if an error occurred.
*/
public static String readStreamContents(InputStream is, String encoding) {
try {
return readReaderContents(new InputStreamReader(is, encoding));
} catch (UnsupportedEncodingException e) {
return null;
}
}
public static String readStreamContents(InputStream is) {
return readStreamContents(is, ResourcesPlugin.getEncoding());
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_editor_StreamUtils.java
|
781 |
public class TestOfferTimeZoneProcessorImpl implements OfferTimeZoneProcessor {
private static final Log LOG = LogFactory.getLog(OfferTimeZoneProcessorImpl.class);
public TimeZone getTimeZone(Offer offer) {
return TimeZone.getTimeZone("CST");
}
}
| 0true
|
integration_src_test_java_org_broadleafcommerce_core_offer_TestOfferTimeZoneProcessorImpl.java
|
5,127 |
public class AggregatorFactories {
public static final AggregatorFactories EMPTY = new Empty();
private final AggregatorFactory[] factories;
public static Builder builder() {
return new Builder();
}
private AggregatorFactories(AggregatorFactory[] factories) {
this.factories = factories;
}
private static Aggregator createAndRegisterContextAware(AggregationContext context, AggregatorFactory factory, Aggregator parent, long estimatedBucketsCount) {
final Aggregator aggregator = factory.create(context, parent, estimatedBucketsCount);
if (aggregator.shouldCollect()) {
context.registerReaderContextAware(aggregator);
}
return aggregator;
}
/**
* Create all aggregators so that they can be consumed with multiple buckets.
*/
public Aggregator[] createSubAggregators(Aggregator parent, final long estimatedBucketsCount) {
Aggregator[] aggregators = new Aggregator[count()];
for (int i = 0; i < factories.length; ++i) {
final AggregatorFactory factory = factories[i];
final Aggregator first = createAndRegisterContextAware(parent.context(), factory, parent, estimatedBucketsCount);
if (first.bucketAggregationMode() == BucketAggregationMode.MULTI_BUCKETS) {
// This aggregator already supports multiple bucket ordinals, can be used directly
aggregators[i] = first;
continue;
}
// the aggregator doesn't support multiple ordinals, let's wrap it so that it does.
aggregators[i] = new Aggregator(first.name(), BucketAggregationMode.MULTI_BUCKETS, AggregatorFactories.EMPTY, 1, first.context(), first.parent()) {
ObjectArray<Aggregator> aggregators;
{
aggregators = BigArrays.newObjectArray(estimatedBucketsCount, context.pageCacheRecycler());
aggregators.set(0, first);
for (long i = 1; i < estimatedBucketsCount; ++i) {
aggregators.set(i, createAndRegisterContextAware(parent.context(), factory, parent, estimatedBucketsCount));
}
}
@Override
public boolean shouldCollect() {
return first.shouldCollect();
}
@Override
protected void doPostCollection() {
for (long i = 0; i < aggregators.size(); ++i) {
final Aggregator aggregator = aggregators.get(i);
if (aggregator != null) {
aggregator.postCollection();
}
}
}
@Override
public void collect(int doc, long owningBucketOrdinal) throws IOException {
aggregators = BigArrays.grow(aggregators, owningBucketOrdinal + 1);
Aggregator aggregator = aggregators.get(owningBucketOrdinal);
if (aggregator == null) {
aggregator = createAndRegisterContextAware(parent.context(), factory, parent, estimatedBucketsCount);
aggregators.set(owningBucketOrdinal, aggregator);
}
aggregator.collect(doc, 0);
}
@Override
public void setNextReader(AtomicReaderContext reader) {
}
@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) {
return aggregators.get(owningBucketOrdinal).buildAggregation(0);
}
@Override
public InternalAggregation buildEmptyAggregation() {
return first.buildEmptyAggregation();
}
@Override
public void doRelease() {
Releasables.release(aggregators);
}
};
}
return aggregators;
}
public Aggregator[] createTopLevelAggregators(AggregationContext ctx) {
// These aggregators are going to be used with a single bucket ordinal, no need to wrap the PER_BUCKET ones
Aggregator[] aggregators = new Aggregator[factories.length];
for (int i = 0; i < factories.length; i++) {
aggregators[i] = createAndRegisterContextAware(ctx, factories[i], null, 0);
}
return aggregators;
}
public int count() {
return factories.length;
}
void setParent(AggregatorFactory parent) {
for (AggregatorFactory factory : factories) {
factory.parent = parent;
}
}
public void validate() {
for (AggregatorFactory factory : factories) {
factory.validate();
}
}
private final static class Empty extends AggregatorFactories {
private static final AggregatorFactory[] EMPTY_FACTORIES = new AggregatorFactory[0];
private static final Aggregator[] EMPTY_AGGREGATORS = new Aggregator[0];
private Empty() {
super(EMPTY_FACTORIES);
}
@Override
public Aggregator[] createSubAggregators(Aggregator parent, long estimatedBucketsCount) {
return EMPTY_AGGREGATORS;
}
@Override
public Aggregator[] createTopLevelAggregators(AggregationContext ctx) {
return EMPTY_AGGREGATORS;
}
}
public static class Builder {
private List<AggregatorFactory> factories = new ArrayList<AggregatorFactory>();
public Builder add(AggregatorFactory factory) {
factories.add(factory);
return this;
}
public AggregatorFactories build() {
if (factories.isEmpty()) {
return EMPTY;
}
return new AggregatorFactories(factories.toArray(new AggregatorFactory[factories.size()]));
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_search_aggregations_AggregatorFactories.java
|
2,199 |
public class OrFilter extends Filter {
private final List<? extends Filter> filters;
public OrFilter(List<? extends Filter> filters) {
this.filters = filters;
}
public List<? extends Filter> filters() {
return filters;
}
@Override
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
if (filters.size() == 1) {
return filters.get(0).getDocIdSet(context, acceptDocs);
}
List<DocIdSet> sets = new ArrayList<DocIdSet>(filters.size());
for (int i = 0; i < filters.size(); i++) {
DocIdSet set = filters.get(i).getDocIdSet(context, acceptDocs);
if (DocIdSets.isEmpty(set)) { // none matching for this filter, continue
continue;
}
sets.add(set);
}
if (sets.size() == 0) {
return null;
}
if (sets.size() == 1) {
return sets.get(0);
}
return new OrDocIdSet(sets.toArray(new DocIdSet[sets.size()]));
}
@Override
public int hashCode() {
int hash = 7;
hash = 31 * hash + (null == filters ? 0 : filters.hashCode());
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if ((obj == null) || (obj.getClass() != this.getClass()))
return false;
OrFilter other = (OrFilter) obj;
return equalFilters(filters, other.filters);
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
for (Filter filter : filters) {
if (builder.length() > 0) {
builder.append(' ');
}
builder.append(filter);
}
return builder.toString();
}
private boolean equalFilters(List<? extends Filter> filters1, List<? extends Filter> filters2) {
return (filters1 == filters2) || ((filters1 != null) && filters1.equals(filters2));
}
}
| 0true
|
src_main_java_org_elasticsearch_common_lucene_search_OrFilter.java
|
3,696 |
public abstract class AbstractExecutorThreadFactory implements ThreadFactory {
protected final ClassLoader classLoader;
protected final ThreadGroup threadGroup;
public AbstractExecutorThreadFactory(ThreadGroup threadGroup, ClassLoader classLoader) {
this.threadGroup = threadGroup;
this.classLoader = classLoader;
}
@Override
public final Thread newThread(Runnable r) {
final Thread t = createThread(r);
ClassLoader cl = classLoader != null ? classLoader : Thread.currentThread().getContextClassLoader();
t.setContextClassLoader(cl);
if (t.isDaemon()) {
t.setDaemon(false);
}
if (t.getPriority() != Thread.NORM_PRIORITY) {
t.setPriority(Thread.NORM_PRIORITY);
}
return t;
}
protected abstract Thread createThread(Runnable r);
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_util_executor_AbstractExecutorThreadFactory.java
|
4,943 |
public class RestSearchAction extends BaseRestHandler {
@Inject
public RestSearchAction(Settings settings, Client client, RestController controller) {
super(settings, client);
controller.registerHandler(GET, "/_search", this);
controller.registerHandler(POST, "/_search", this);
controller.registerHandler(GET, "/{index}/_search", this);
controller.registerHandler(POST, "/{index}/_search", this);
controller.registerHandler(GET, "/{index}/{type}/_search", this);
controller.registerHandler(POST, "/{index}/{type}/_search", this);
}
@Override
public void handleRequest(final RestRequest request, final RestChannel channel) {
SearchRequest searchRequest;
try {
searchRequest = RestSearchAction.parseSearchRequest(request);
searchRequest.listenerThreaded(false);
SearchOperationThreading operationThreading = SearchOperationThreading.fromString(request.param("operation_threading"), null);
if (operationThreading != null) {
if (operationThreading == SearchOperationThreading.NO_THREADS) {
// since we don't spawn, don't allow no_threads, but change it to a single thread
operationThreading = SearchOperationThreading.SINGLE_THREAD;
}
searchRequest.operationThreading(operationThreading);
}
} catch (Exception e) {
if (logger.isDebugEnabled()) {
logger.debug("failed to parse search request parameters", e);
}
try {
XContentBuilder builder = restContentBuilder(request);
channel.sendResponse(new XContentRestResponse(request, BAD_REQUEST, builder.startObject().field("error", e.getMessage()).endObject()));
} catch (IOException e1) {
logger.error("Failed to send failure response", e1);
}
return;
}
client.search(searchRequest, new ActionListener<SearchResponse>() {
@Override
public void onResponse(SearchResponse response) {
try {
XContentBuilder builder = restContentBuilder(request);
builder.startObject();
response.toXContent(builder, request);
builder.endObject();
channel.sendResponse(new XContentRestResponse(request, response.status(), builder));
} catch (Exception e) {
if (logger.isDebugEnabled()) {
logger.debug("failed to execute search (building response)", e);
}
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(new XContentThrowableRestResponse(request, e));
} catch (IOException e1) {
logger.error("Failed to send failure response", e1);
}
}
});
}
public static SearchRequest parseSearchRequest(RestRequest request) {
String[] indices = Strings.splitStringByCommaToArray(request.param("index"));
SearchRequest searchRequest = new SearchRequest(indices);
// get the content, and put it in the body
if (request.hasContent()) {
searchRequest.source(request.content(), request.contentUnsafe());
} else {
String source = request.param("source");
if (source != null) {
searchRequest.source(source);
}
}
// add extra source based on the request parameters
searchRequest.extraSource(parseSearchSource(request));
searchRequest.searchType(request.param("search_type"));
String scroll = request.param("scroll");
if (scroll != null) {
searchRequest.scroll(new Scroll(parseTimeValue(scroll, null)));
}
searchRequest.types(Strings.splitStringByCommaToArray(request.param("type")));
searchRequest.routing(request.param("routing"));
searchRequest.preference(request.param("preference"));
searchRequest.indicesOptions(IndicesOptions.fromRequest(request, searchRequest.indicesOptions()));
return searchRequest;
}
public static SearchSourceBuilder parseSearchSource(RestRequest request) {
SearchSourceBuilder searchSourceBuilder = null;
String queryString = request.param("q");
if (queryString != null) {
QueryStringQueryBuilder queryBuilder = QueryBuilders.queryString(queryString);
queryBuilder.defaultField(request.param("df"));
queryBuilder.analyzer(request.param("analyzer"));
queryBuilder.analyzeWildcard(request.paramAsBoolean("analyze_wildcard", false));
queryBuilder.lowercaseExpandedTerms(request.paramAsBoolean("lowercase_expanded_terms", true));
queryBuilder.lenient(request.paramAsBoolean("lenient", null));
String defaultOperator = request.param("default_operator");
if (defaultOperator != null) {
if ("OR".equals(defaultOperator)) {
queryBuilder.defaultOperator(QueryStringQueryBuilder.Operator.OR);
} else if ("AND".equals(defaultOperator)) {
queryBuilder.defaultOperator(QueryStringQueryBuilder.Operator.AND);
} else {
throw new ElasticsearchIllegalArgumentException("Unsupported defaultOperator [" + defaultOperator + "], can either be [OR] or [AND]");
}
}
if (searchSourceBuilder == null) {
searchSourceBuilder = new SearchSourceBuilder();
}
searchSourceBuilder.query(queryBuilder);
}
int from = request.paramAsInt("from", -1);
if (from != -1) {
if (searchSourceBuilder == null) {
searchSourceBuilder = new SearchSourceBuilder();
}
searchSourceBuilder.from(from);
}
int size = request.paramAsInt("size", -1);
if (size != -1) {
if (searchSourceBuilder == null) {
searchSourceBuilder = new SearchSourceBuilder();
}
searchSourceBuilder.size(size);
}
if (request.hasParam("explain")) {
if (searchSourceBuilder == null) {
searchSourceBuilder = new SearchSourceBuilder();
}
searchSourceBuilder.explain(request.paramAsBoolean("explain", null));
}
if (request.hasParam("version")) {
if (searchSourceBuilder == null) {
searchSourceBuilder = new SearchSourceBuilder();
}
searchSourceBuilder.version(request.paramAsBoolean("version", null));
}
if (request.hasParam("timeout")) {
if (searchSourceBuilder == null) {
searchSourceBuilder = new SearchSourceBuilder();
}
searchSourceBuilder.timeout(request.paramAsTime("timeout", null));
}
String sField = request.param("fields");
if (sField != null) {
if (searchSourceBuilder == null) {
searchSourceBuilder = new SearchSourceBuilder();
}
if (!Strings.hasText(sField)) {
searchSourceBuilder.noFields();
} else {
String[] sFields = Strings.splitStringByCommaToArray(sField);
if (sFields != null) {
for (String field : sFields) {
searchSourceBuilder.field(field);
}
}
}
}
FetchSourceContext fetchSourceContext = FetchSourceContext.parseFromRestRequest(request);
if (fetchSourceContext != null) {
if (searchSourceBuilder == null) {
searchSourceBuilder = new SearchSourceBuilder();
}
searchSourceBuilder.fetchSource(fetchSourceContext);
}
if (request.hasParam("track_scores")) {
if (searchSourceBuilder == null) {
searchSourceBuilder = new SearchSourceBuilder();
}
searchSourceBuilder.trackScores(request.paramAsBoolean("track_scores", false));
}
String sSorts = request.param("sort");
if (sSorts != null) {
if (searchSourceBuilder == null) {
searchSourceBuilder = new SearchSourceBuilder();
}
String[] sorts = Strings.splitStringByCommaToArray(sSorts);
for (String sort : sorts) {
int delimiter = sort.lastIndexOf(":");
if (delimiter != -1) {
String sortField = sort.substring(0, delimiter);
String reverse = sort.substring(delimiter + 1);
if ("asc".equals(reverse)) {
searchSourceBuilder.sort(sortField, SortOrder.ASC);
} else if ("desc".equals(reverse)) {
searchSourceBuilder.sort(sortField, SortOrder.DESC);
}
} else {
searchSourceBuilder.sort(sort);
}
}
}
String sIndicesBoost = request.param("indices_boost");
if (sIndicesBoost != null) {
if (searchSourceBuilder == null) {
searchSourceBuilder = new SearchSourceBuilder();
}
String[] indicesBoost = Strings.splitStringByCommaToArray(sIndicesBoost);
for (String indexBoost : indicesBoost) {
int divisor = indexBoost.indexOf(',');
if (divisor == -1) {
throw new ElasticsearchIllegalArgumentException("Illegal index boost [" + indexBoost + "], no ','");
}
String indexName = indexBoost.substring(0, divisor);
String sBoost = indexBoost.substring(divisor + 1);
try {
searchSourceBuilder.indexBoost(indexName, Float.parseFloat(sBoost));
} catch (NumberFormatException e) {
throw new ElasticsearchIllegalArgumentException("Illegal index boost [" + indexBoost + "], boost not a float number");
}
}
}
String sStats = request.param("stats");
if (sStats != null) {
if (searchSourceBuilder == null) {
searchSourceBuilder = new SearchSourceBuilder();
}
searchSourceBuilder.stats(Strings.splitStringByCommaToArray(sStats));
}
String suggestField = request.param("suggest_field");
if (suggestField != null) {
String suggestText = request.param("suggest_text", queryString);
int suggestSize = request.paramAsInt("suggest_size", 5);
if (searchSourceBuilder == null) {
searchSourceBuilder = new SearchSourceBuilder();
}
String suggestMode = request.param("suggest_mode");
searchSourceBuilder.suggest().addSuggestion(
termSuggestion(suggestField).field(suggestField).text(suggestText).size(suggestSize)
.suggestMode(suggestMode)
);
}
return searchSourceBuilder;
}
}
| 1no label
|
src_main_java_org_elasticsearch_rest_action_search_RestSearchAction.java
|
1,400 |
public abstract class AbstractAccessDelegate<T extends HazelcastRegion> implements AccessDelegate<T> {
protected final ILogger LOG;
protected final T hazelcastRegion;
protected final RegionCache cache;
protected final Comparator<Object> versionComparator;
protected AbstractAccessDelegate(final T hazelcastRegion, final Properties props) {
super();
this.hazelcastRegion = hazelcastRegion;
LOG = hazelcastRegion.getLogger();
if (hazelcastRegion instanceof AbstractTransactionalDataRegion) {
this.versionComparator = ((AbstractTransactionalDataRegion) hazelcastRegion)
.getCacheDataDescription().getVersionComparator();
} else {
this.versionComparator = null;
}
cache = hazelcastRegion.getCache();
}
public final T getHazelcastRegion() {
return hazelcastRegion;
}
protected boolean put(final Object key, final Object value, final Object currentVersion) {
try {
return cache.put(key, value, currentVersion);
} catch (HazelcastException e) {
if(LOG.isFinestEnabled()){
LOG.finest( "Could not put into Cache[" + hazelcastRegion.getName() + "]: " + e.getMessage());
}
return false;
}
}
protected boolean update(final Object key, final Object value,
final Object currentVersion, final Object previousVersion, final SoftLock lock) {
try {
return cache.update(key, value, currentVersion, previousVersion, lock);
} catch (HazelcastException e) {
if(LOG.isFinestEnabled()){
LOG.finest( "Could not update Cache[" + hazelcastRegion.getName() + "]: " + e.getMessage());
}
return false;
}
}
public Object get(final Object key, final long txTimestamp) throws CacheException {
try {
return cache.get(key);
} catch (HazelcastException e) {
if(LOG.isFinestEnabled()){
LOG.finest( "Could not read from Cache[" + hazelcastRegion.getName() + "]: " + e.getMessage());
}
return null;
}
}
public boolean putFromLoad(final Object key, final Object value, final long txTimestamp,
final Object version) throws CacheException {
return putFromLoad(key, value, txTimestamp, version, true);
}
public void remove(final Object key) throws CacheException {
try {
cache.remove(key);
} catch (HazelcastException e) {
throw new CacheException("Operation timeout during remove operation from cache!", e);
}
}
public void removeAll() throws CacheException {
cache.clear();
}
public void evict(final Object key) throws CacheException {
remove(key);
}
public void evictAll() throws CacheException {
cache.clear();
}
/**
* NO-OP
*/
public SoftLock lockRegion() throws CacheException {
return null;
}
/**
* NO-OP
*/
public void unlockRegion(final SoftLock lock) throws CacheException {
}
/**
* This is an asynchronous cache access strategy.
* NO-OP
*/
public boolean insert(final Object key, final Object value, final Object version) throws CacheException {
return false;
}
/**
* This is an asynchronous cache access strategy.
* NO-OP
*/
public boolean update(final Object key, final Object value, final Object currentVersion, final Object previousVersion)
throws CacheException {
return false;
}
}
| 0true
|
hazelcast-hibernate_hazelcast-hibernate4_src_main_java_com_hazelcast_hibernate_access_AbstractAccessDelegate.java
|
784 |
public class MetricInstrumentedSchemaCache implements SchemaCache {
public static final String METRICS_NAME = "schemacache";
public static final String METRICS_TYPENAME = "name";
public static final String METRICS_RELATIONS = "relations";
private final SchemaCache cache;
public MetricInstrumentedSchemaCache(final StoreRetrieval retriever) {
cache = new StandardSchemaCache(new StoreRetrieval() {
@Override
public Long retrieveSchemaByName(String typeName, StandardTitanTx tx) {
incAction(METRICS_TYPENAME,CacheMetricsAction.MISS,tx);
return retriever.retrieveSchemaByName(typeName, tx);
}
@Override
public EntryList retrieveSchemaRelations(long schemaId, BaseRelationType type, Direction dir, StandardTitanTx tx) {
incAction(METRICS_RELATIONS,CacheMetricsAction.MISS,tx);
return retriever.retrieveSchemaRelations(schemaId, type, dir, tx);
}
});
}
private void incAction(String type, CacheMetricsAction action, StandardTitanTx tx) {
if (tx.getConfiguration().getGroupName()!=null) {
MetricManager.INSTANCE.getCounter(tx.getConfiguration().getGroupName(), METRICS_NAME, type, action.getName()).inc();
}
}
@Override
public Long getSchemaId(String schemaName, StandardTitanTx tx) {
incAction(METRICS_TYPENAME,CacheMetricsAction.RETRIEVAL,tx);
return cache.getSchemaId(schemaName, tx);
}
@Override
public EntryList getSchemaRelations(long schemaId, BaseRelationType type, Direction dir, StandardTitanTx tx) {
incAction(METRICS_RELATIONS,CacheMetricsAction.RETRIEVAL,tx);
return cache.getSchemaRelations(schemaId, type, dir, tx);
}
@Override
public void expireSchemaElement(long schemaId) {
cache.expireSchemaElement(schemaId);
}
}
| 1no label
|
titan-core_src_main_java_com_thinkaurelius_titan_graphdb_database_cache_MetricInstrumentedSchemaCache.java
|
2,061 |
public interface UntargettedBinding<T> extends Binding<T> {
}
| 0true
|
src_main_java_org_elasticsearch_common_inject_spi_UntargettedBinding.java
|
82 |
public class OConsoleApplication {
protected enum RESULT {
OK, ERROR, EXIT
};
protected InputStream in = System.in; // System.in;
protected PrintStream out = System.out;
protected PrintStream err = System.err;
protected String wordSeparator = " ";
protected String[] helpCommands = { "help", "?" };
protected String[] exitCommands = { "exit", "bye", "quit" };
protected Map<String, String> properties = new HashMap<String, String>();
// protected OConsoleReader reader = new TTYConsoleReader();
protected OConsoleReader reader = new DefaultConsoleReader();
protected boolean interactiveMode;
protected String[] args;
protected static final String[] COMMENT_PREFIXS = new String[] { "#", "--", "//" };
public void setReader(OConsoleReader iReader) {
this.reader = iReader;
reader.setConsole(this);
}
public OConsoleApplication(String[] iArgs) {
this.args = iArgs;
}
public int run() {
interactiveMode = isInteractiveMode(args);
onBefore();
int result = 0;
if (interactiveMode) {
// EXECUTE IN INTERACTIVE MODE
// final BufferedReader reader = new BufferedReader(new InputStreamReader(in));
String consoleInput;
while (true) {
out.println();
out.print("orientdb> ");
consoleInput = reader.readLine();
if (consoleInput == null || consoleInput.length() == 0)
continue;
if (!executeCommands(new ODFACommandStream(consoleInput), false))
break;
}
} else {
// EXECUTE IN BATCH MODE
result = executeBatch(getCommandLine(args)) ? 0 : 1;
}
onAfter();
return result;
}
protected boolean isInteractiveMode(String[] args) {
return args.length == 0;
}
protected boolean executeBatch(final String commandLine) {
final File commandFile = new File(commandLine);
OCommandStream scanner;
try {
scanner = new ODFACommandStream(commandFile);
} catch (FileNotFoundException e) {
scanner = new ODFACommandStream(commandLine);
}
return executeCommands(scanner, true);
}
protected boolean executeCommands(final OCommandStream commandStream, final boolean iExitOnException) {
final StringBuilder commandBuffer = new StringBuilder();
try {
while (commandStream.hasNext()) {
String commandLine = commandStream.nextCommand();
if (commandLine.isEmpty())
// EMPTY LINE
continue;
if (isComment(commandLine))
continue;
// SCRIPT CASE: MANAGE ENSEMBLING ALL TOGETHER
if (isCollectingCommands(commandLine)) {
// BEGIN: START TO COLLECT
commandBuffer.append(commandLine);
commandLine = null;
} else if (commandLine.startsWith("end") && commandBuffer.length() > 0) {
// END: FLUSH IT
commandLine = commandBuffer.toString();
commandBuffer.setLength(0);
} else if (commandBuffer.length() > 0) {
// BUFFER IT
commandBuffer.append(';');
commandBuffer.append(commandLine);
commandLine = null;
}
if (commandLine != null) {
final RESULT status = execute(commandLine);
commandLine = null;
if (status == RESULT.EXIT || status == RESULT.ERROR && iExitOnException)
return false;
}
}
if (commandBuffer.length() > 0) {
final RESULT status = execute(commandBuffer.toString());
if (status == RESULT.EXIT || status == RESULT.ERROR && iExitOnException)
return false;
}
} finally {
commandStream.close();
}
return true;
}
protected boolean isComment(final String commandLine) {
for (String comment : COMMENT_PREFIXS)
if (commandLine.startsWith(comment))
return true;
return false;
}
protected boolean isCollectingCommands(final String iLine) {
return false;
}
protected RESULT execute(String iCommand) {
iCommand = iCommand.trim();
if (iCommand.length() == 0)
// NULL LINE: JUMP IT
return RESULT.OK;
if (isComment(iCommand))
// COMMENT: JUMP IT
return RESULT.OK;
String[] commandWords = OStringParser.getWords(iCommand, wordSeparator);
for (String cmd : helpCommands)
if (cmd.equals(commandWords[0])) {
help();
return RESULT.OK;
}
for (String cmd : exitCommands)
if (cmd.equals(commandWords[0])) {
return RESULT.EXIT;
}
Method lastMethodInvoked = null;
final StringBuilder lastCommandInvoked = new StringBuilder();
final String commandLowerCase = iCommand.toLowerCase();
for (Entry<Method, Object> entry : getConsoleMethods().entrySet()) {
final Method m = entry.getKey();
final String methodName = m.getName();
final ConsoleCommand ann = m.getAnnotation(ConsoleCommand.class);
final StringBuilder commandName = new StringBuilder();
char ch;
int commandWordCount = 1;
for (int i = 0; i < methodName.length(); ++i) {
ch = methodName.charAt(i);
if (Character.isUpperCase(ch)) {
commandName.append(" ");
ch = Character.toLowerCase(ch);
commandWordCount++;
}
commandName.append(ch);
}
if (!commandLowerCase.equals(commandName.toString()) && !commandLowerCase.startsWith(commandName.toString() + " ")) {
if (ann == null)
continue;
String[] aliases = ann.aliases();
if (aliases == null || aliases.length == 0)
continue;
boolean aliasMatch = false;
for (String alias : aliases) {
if (iCommand.startsWith(alias.split(" ")[0])) {
aliasMatch = true;
commandWordCount = 1;
break;
}
}
if (!aliasMatch)
continue;
}
Object[] methodArgs;
// BUILD PARAMETERS
if (ann != null && !ann.splitInWords()) {
methodArgs = new String[] { iCommand.substring(iCommand.indexOf(' ') + 1) };
} else {
if (m.getParameterTypes().length > commandWords.length - commandWordCount) {
// METHOD PARAMS AND USED PARAMS MISMATCH: CHECK FOR OPTIONALS
for (int paramNum = m.getParameterAnnotations().length - 1; paramNum > -1; paramNum--) {
final Annotation[] paramAnn = m.getParameterAnnotations()[paramNum];
if (paramAnn != null)
for (int annNum = paramAnn.length - 1; annNum > -1; annNum--) {
if (paramAnn[annNum] instanceof ConsoleParameter) {
final ConsoleParameter annotation = (ConsoleParameter) paramAnn[annNum];
if (annotation.optional())
commandWords = OArrays.copyOf(commandWords, commandWords.length + 1);
break;
}
}
}
}
methodArgs = OArrays.copyOfRange(commandWords, commandWordCount, commandWords.length);
}
try {
m.invoke(entry.getValue(), methodArgs);
} catch (IllegalArgumentException e) {
lastMethodInvoked = m;
// GET THE COMMAND NAME
lastCommandInvoked.setLength(0);
for (int i = 0; i < commandWordCount; ++i) {
if (lastCommandInvoked.length() > 0)
lastCommandInvoked.append(" ");
lastCommandInvoked.append(commandWords[i]);
}
continue;
} catch (Exception e) {
// e.printStackTrace();
// err.println();
if (e.getCause() != null)
onException(e.getCause());
else
e.printStackTrace();
return RESULT.ERROR;
}
return RESULT.OK;
}
if (lastMethodInvoked != null)
syntaxError(lastCommandInvoked.toString(), lastMethodInvoked);
error("\n!Unrecognized command: '%s'", iCommand);
return RESULT.ERROR;
}
protected void syntaxError(String iCommand, Method m) {
error(
"\n!Wrong syntax. If you're using a file make sure all commands are delimited by semicolon (;) or a linefeed (\\n)\n\r\n\r Expected: %s ",
iCommand);
String paramName = null;
String paramDescription = null;
boolean paramOptional = false;
StringBuilder buffer = new StringBuilder("\n\nWhere:\n\n");
for (Annotation[] annotations : m.getParameterAnnotations()) {
for (Annotation ann : annotations) {
if (ann instanceof com.orientechnologies.common.console.annotation.ConsoleParameter) {
paramName = ((com.orientechnologies.common.console.annotation.ConsoleParameter) ann).name();
paramDescription = ((com.orientechnologies.common.console.annotation.ConsoleParameter) ann).description();
paramOptional = ((com.orientechnologies.common.console.annotation.ConsoleParameter) ann).optional();
break;
}
}
if (paramName == null)
paramName = "?";
if (paramOptional)
message("[<%s>] ", paramName);
else
message("<%s> ", paramName);
buffer.append("* ");
buffer.append(String.format("%-15s", paramName));
if (paramDescription != null)
buffer.append(String.format("%-15s", paramDescription));
buffer.append("\n");
}
message(buffer.toString());
}
/**
* Returns a map of all console method and the object they can be called on.
*
* @return Map<Method,Object>
*/
protected Map<Method, Object> getConsoleMethods() {
// search for declared command collections
final Iterator<OConsoleCommandCollection> ite = ServiceRegistry.lookupProviders(OConsoleCommandCollection.class);
final Collection<Object> candidates = new ArrayList<Object>();
candidates.add(this);
while (ite.hasNext()) {
try {
// make a copy and set it's context
final OConsoleCommandCollection cc = ite.next().getClass().newInstance();
cc.setContext(this);
candidates.add(cc);
} catch (InstantiationException ex) {
Logger.getLogger(OConsoleApplication.class.getName()).log(Level.WARNING, ex.getMessage());
} catch (IllegalAccessException ex) {
Logger.getLogger(OConsoleApplication.class.getName()).log(Level.WARNING, ex.getMessage());
}
}
final Map<Method, Object> consoleMethods = new TreeMap<Method, Object>(new Comparator<Method>() {
public int compare(Method o1, Method o2) {
int res = o1.getName().compareTo(o2.getName());
if (res == 0)
res = o1.toString().compareTo(o2.toString());
return res;
}
});
for (final Object candidate : candidates) {
final Method[] methods = candidate.getClass().getMethods();
for (Method m : methods) {
if (Modifier.isAbstract(m.getModifiers()) || Modifier.isStatic(m.getModifiers()) || !Modifier.isPublic(m.getModifiers())) {
continue;
}
if (m.getReturnType() != Void.TYPE) {
continue;
}
consoleMethods.put(m, candidate);
}
}
return consoleMethods;
}
protected Map<String, Object> addCommand(Map<String, Object> commandsTree, String commandLine) {
return commandsTree;
}
protected void help() {
message("\nAVAILABLE COMMANDS:\n");
for (Method m : getConsoleMethods().keySet()) {
com.orientechnologies.common.console.annotation.ConsoleCommand annotation = m
.getAnnotation(com.orientechnologies.common.console.annotation.ConsoleCommand.class);
if (annotation == null)
continue;
message("* %-70s%s\n", getCorrectMethodName(m), annotation.description());
}
message("* %-70s%s\n", getClearName("help"), "Print this help");
message("* %-70s%s\n", getClearName("exit"), "Close the console");
}
public static String getCorrectMethodName(Method m) {
StringBuilder buffer = new StringBuilder();
buffer.append(getClearName(m.getName()));
for (int i = 0; i < m.getParameterAnnotations().length; i++) {
for (int j = 0; j < m.getParameterAnnotations()[i].length; j++) {
if (m.getParameterAnnotations()[i][j] instanceof com.orientechnologies.common.console.annotation.ConsoleParameter) {
buffer
.append(" <"
+ ((com.orientechnologies.common.console.annotation.ConsoleParameter) m.getParameterAnnotations()[i][j]).name()
+ ">");
}
}
}
return buffer.toString();
}
public static String getClearName(String iJavaName) {
StringBuilder buffer = new StringBuilder();
char c;
if (iJavaName != null) {
buffer.append(iJavaName.charAt(0));
for (int i = 1; i < iJavaName.length(); ++i) {
c = iJavaName.charAt(i);
if (Character.isUpperCase(c)) {
buffer.append(' ');
}
buffer.append(Character.toLowerCase(c));
}
}
return buffer.toString();
}
protected String getCommandLine(String[] iArguments) {
StringBuilder command = new StringBuilder();
for (int i = 0; i < iArguments.length; ++i) {
if (i > 0)
command.append(" ");
command.append(iArguments[i]);
}
return command.toString();
}
protected void onBefore() {
}
protected void onAfter() {
}
protected void onException(Throwable throwable) {
throwable.printStackTrace();
}
public void message(final String iMessage, final Object... iArgs) {
final int verboseLevel = getVerboseLevel();
if (verboseLevel > 1)
out.printf(iMessage, iArgs);
}
public void error(final String iMessage, final Object... iArgs) {
final int verboseLevel = getVerboseLevel();
if (verboseLevel > 0)
out.printf(iMessage, iArgs);
}
public int getVerboseLevel() {
final String v = properties.get("verbose");
final int verboseLevel = v != null ? Integer.parseInt(v) : 2;
return verboseLevel;
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_console_OConsoleApplication.java
|
389 |
new Thread(){
public void run() {
mm.forceUnlock(key);
forceUnlock.countDown();
}
}.start();
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_multimap_ClientMultiMapLockTest.java
|
3,221 |
public abstract class AbstractReplicatedRecordStore<K, V>
extends AbstractBaseReplicatedRecordStore<K, V> {
static final String CLEAR_REPLICATION_MAGIC_KEY = ReplicatedMapService.SERVICE_NAME + "$CLEAR$MESSAGE$";
public AbstractReplicatedRecordStore(String name, NodeEngine nodeEngine, CleanerRegistrator cleanerRegistrator,
ReplicatedMapService replicatedMapService) {
super(name, nodeEngine, cleanerRegistrator, replicatedMapService);
}
@Override
public Object remove(Object key) {
ValidationUtil.isNotNull(key, "key");
long time = System.currentTimeMillis();
storage.checkState();
V oldValue;
K marshalledKey = (K) marshallKey(key);
synchronized (getMutex(marshalledKey)) {
final ReplicatedRecord current = storage.get(marshalledKey);
final VectorClock vectorClock;
if (current == null) {
oldValue = null;
} else {
vectorClock = current.getVectorClock();
oldValue = (V) current.getValue();
// Force removal of the underlying stored entry
storage.remove(marshalledKey, current);
vectorClock.incrementClock(localMember);
ReplicationMessage message = buildReplicationMessage(key, null, vectorClock, -1);
replicationPublisher.publishReplicatedMessage(message);
}
cancelTtlEntry(marshalledKey);
}
Object unmarshalledOldValue = unmarshallValue(oldValue);
fireEntryListenerEvent(key, unmarshalledOldValue, null);
if (replicatedMapConfig.isStatisticsEnabled()) {
mapStats.incrementRemoves(System.currentTimeMillis() - time);
}
return unmarshalledOldValue;
}
@Override
public Object get(Object key) {
ValidationUtil.isNotNull(key, "key");
long time = System.currentTimeMillis();
storage.checkState();
ReplicatedRecord replicatedRecord = storage.get(marshallKey(key));
// Force return null on ttl expiration (but before cleanup thread run)
long ttlMillis = replicatedRecord == null ? 0 : replicatedRecord.getTtlMillis();
if (ttlMillis > 0 && System.currentTimeMillis() - replicatedRecord.getUpdateTime() >= ttlMillis) {
replicatedRecord = null;
}
Object value = replicatedRecord == null ? null : unmarshallValue(replicatedRecord.getValue());
if (replicatedMapConfig.isStatisticsEnabled()) {
mapStats.incrementGets(System.currentTimeMillis() - time);
}
return value;
}
@Override
public Object put(Object key, Object value) {
ValidationUtil.isNotNull(key, "key");
ValidationUtil.isNotNull(value, "value");
storage.checkState();
return put(key, value, 0, TimeUnit.MILLISECONDS);
}
@Override
public Object put(Object key, Object value, long ttl, TimeUnit timeUnit) {
ValidationUtil.isNotNull(key, "key");
ValidationUtil.isNotNull(value, "value");
ValidationUtil.isNotNull(timeUnit, "timeUnit");
if (ttl < 0) {
throw new IllegalArgumentException("ttl must be a positive integer");
}
long time = System.currentTimeMillis();
storage.checkState();
V oldValue = null;
K marshalledKey = (K) marshallKey(key);
V marshalledValue = (V) marshallValue(value);
synchronized (getMutex(marshalledKey)) {
final long ttlMillis = ttl == 0 ? 0 : timeUnit.toMillis(ttl);
final ReplicatedRecord old = storage.get(marshalledKey);
final VectorClock vectorClock;
if (old == null) {
vectorClock = new VectorClock();
ReplicatedRecord<K, V> record = buildReplicatedRecord(marshalledKey, marshalledValue, vectorClock, ttlMillis);
storage.put(marshalledKey, record);
} else {
oldValue = (V) old.getValue();
vectorClock = old.getVectorClock();
storage.get(marshalledKey).setValue(marshalledValue, localMemberHash, ttlMillis);
}
if (ttlMillis > 0) {
scheduleTtlEntry(ttlMillis, marshalledKey, null);
} else {
cancelTtlEntry(marshalledKey);
}
vectorClock.incrementClock(localMember);
ReplicationMessage message = buildReplicationMessage(key, value, vectorClock, ttlMillis);
replicationPublisher.publishReplicatedMessage(message);
}
Object unmarshalledOldValue = unmarshallValue(oldValue);
fireEntryListenerEvent(key, unmarshalledOldValue, value);
if (replicatedMapConfig.isStatisticsEnabled()) {
mapStats.incrementPuts(System.currentTimeMillis() - time);
}
return unmarshalledOldValue;
}
@Override
public boolean containsKey(Object key) {
ValidationUtil.isNotNull(key, "key");
storage.checkState();
mapStats.incrementOtherOperations();
return storage.containsKey(marshallKey(key));
}
@Override
public boolean containsValue(Object value) {
ValidationUtil.isNotNull(value, "value");
storage.checkState();
mapStats.incrementOtherOperations();
for (Map.Entry<K, ReplicatedRecord<K, V>> entry : storage.entrySet()) {
V entryValue = entry.getValue().getValue();
if (value == entryValue || (entryValue != null && unmarshallValue(entryValue).equals(value))) {
return true;
}
}
return false;
}
@Override
public Set keySet() {
storage.checkState();
Set keySet = new HashSet(storage.size());
for (K key : storage.keySet()) {
keySet.add(unmarshallKey(key));
}
mapStats.incrementOtherOperations();
return keySet;
}
@Override
public Collection values() {
storage.checkState();
List values = new ArrayList(storage.size());
for (ReplicatedRecord record : storage.values()) {
values.add(unmarshallValue(record.getValue()));
}
mapStats.incrementOtherOperations();
return values;
}
@Override
public Collection values(Comparator comparator) {
List values = (List) values();
Collections.sort(values, comparator);
return values;
}
@Override
public Set entrySet() {
storage.checkState();
Set entrySet = new HashSet(storage.size());
for (Map.Entry<K, ReplicatedRecord<K, V>> entry : storage.entrySet()) {
Object key = unmarshallKey(entry.getKey());
Object value = unmarshallValue(entry.getValue().getValue());
entrySet.add(new AbstractMap.SimpleEntry(key, value));
}
mapStats.incrementOtherOperations();
return entrySet;
}
@Override
public ReplicatedRecord getReplicatedRecord(Object key) {
ValidationUtil.isNotNull(key, "key");
storage.checkState();
return storage.get(marshallKey(key));
}
@Override
public boolean isEmpty() {
mapStats.incrementOtherOperations();
return storage.isEmpty();
}
@Override
public int size() {
mapStats.incrementOtherOperations();
return storage.size();
}
@Override
public void clear(boolean distribute, boolean emptyReplicationQueue) {
storage.checkState();
if (emptyReplicationQueue) {
replicationPublisher.emptyReplicationQueue();
}
storage.clear();
if (distribute) {
replicationPublisher.distributeClear(emptyReplicationQueue);
}
mapStats.incrementOtherOperations();
}
@Override
public String addEntryListener(EntryListener listener, Object key) {
ValidationUtil.isNotNull(listener, "listener");
EventFilter eventFilter = new ReplicatedEntryEventFilter(marshallKey(key));
mapStats.incrementOtherOperations();
return replicatedMapService.addEventListener(listener, eventFilter, getName());
}
@Override
public String addEntryListener(EntryListener listener, Predicate predicate, Object key) {
ValidationUtil.isNotNull(listener, "listener");
EventFilter eventFilter = new ReplicatedQueryEventFilter(marshallKey(key), predicate);
mapStats.incrementOtherOperations();
return replicatedMapService.addEventListener(listener, eventFilter, getName());
}
@Override
public boolean removeEntryListenerInternal(String id) {
ValidationUtil.isNotNull(id, "id");
mapStats.incrementOtherOperations();
return replicatedMapService.removeEventListener(getName(), id);
}
private ReplicationMessage buildReplicationMessage(Object key, Object value, VectorClock vectorClock, long ttlMillis) {
return new ReplicationMessage(getName(), key, value, vectorClock, localMember, localMemberHash, ttlMillis);
}
private ReplicatedRecord buildReplicatedRecord(Object key, Object value, VectorClock vectorClock, long ttlMillis) {
return new ReplicatedRecord(key, value, vectorClock, localMemberHash, ttlMillis);
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_replicatedmap_record_AbstractReplicatedRecordStore.java
|
1,335 |
public interface ClusterStateUpdateListener {
/**
* Called when the cluster state update is acknowledged
*/
void onResponse(ClusterStateUpdateResponse response);
/**
* Called when any error is thrown during the cluster state update processing
*/
void onFailure(Throwable t);
}
| 0true
|
src_main_java_org_elasticsearch_cluster_ack_ClusterStateUpdateListener.java
|
1,214 |
CONCURRENT {
@Override
<T> Recycler<T> build(Recycler.C<T> c, int limit, int availableProcessors) {
return concurrent(dequeFactory(c, limit), availableProcessors);
}
},
| 0true
|
src_main_java_org_elasticsearch_cache_recycler_CacheRecycler.java
|
20 |
static final class CompletionNode {
final Completion completion;
volatile CompletionNode next;
CompletionNode(Completion completion) { this.completion = completion; }
}
| 0true
|
src_main_java_jsr166e_CompletableFuture.java
|
522 |
public class ORecordNotFoundException extends OException {
private static final long serialVersionUID = -265573123216968L;
public ORecordNotFoundException(final String string) {
super(string);
}
public ORecordNotFoundException(final String message, final Throwable cause) {
super(message, cause);
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_exception_ORecordNotFoundException.java
|
1,256 |
"Number of times a memory mapped page has been reused in short time", METRIC_TYPE.COUNTER, new OProfilerHookValue() {
public Object getValue() {
return metricReusedPagesBetweenLast;
}
});
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_storage_fs_OMMapManagerOld.java
|
52 |
final class NestedCompletionProposal implements ICompletionProposal,
ICompletionProposalExtension2 {
private final Declaration dec;
private final int offset;
public NestedCompletionProposal(Declaration dec, int offset) {
super();
this.dec = dec;
this.offset = offset;
}
@Override
public void apply(IDocument document) {
try {
int len = 0;
while (isJavaIdentifierPart(document.getChar(offset+len))) {
len++;
}
document.replace(offset, len, getText(false));
}
catch (BadLocationException e) {
e.printStackTrace();
}
}
@Override
public Point getSelection(IDocument document) {
return null;
}
@Override
public String getAdditionalProposalInfo() {
return null;
}
@Override
public String getDisplayString() {
return getText(true);
}
@Override
public Image getImage() {
return getImageForDeclaration(dec);
}
@Override
public IContextInformation getContextInformation() {
return null;
}
private String getText(boolean description) {
StringBuilder sb = new StringBuilder()
.append(dec.getName());
if (dec instanceof Functional) {
appendPositionalArgs(dec, getUnit(),
sb, false, description);
}
return sb.toString();
}
@Override
public void apply(ITextViewer viewer, char trigger,
int stateMask, int offset) {
apply(viewer.getDocument());
}
@Override
public void selected(ITextViewer viewer, boolean smartToggle) {}
@Override
public void unselected(ITextViewer viewer) {}
@Override
public boolean validate(IDocument document, int currentOffset,
DocumentEvent event) {
if (event==null) {
return true;
}
else {
try {
String content = document.get(offset,
currentOffset-offset);
String filter = content.trim().toLowerCase();
if ((dec.getName().toLowerCase())
.startsWith(filter)) {
return true;
}
}
catch (BadLocationException e) {
// ignore concurrently modified document
}
return false;
}
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_RefinementCompletionProposal.java
|
3,532 |
public class BinaryMappingTests extends ElasticsearchTestCase {
@Test
public void testDefaultMapping() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "binary")
.endObject()
.endObject()
.endObject().endObject().string();
DocumentMapper mapper = MapperTestUtils.newParser().parse(mapping);
FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper("field");
assertThat(fieldMapper, instanceOf(BinaryFieldMapper.class));
assertThat(fieldMapper.fieldType().stored(), equalTo(false));
}
}
| 0true
|
src_test_java_org_elasticsearch_index_mapper_binary_BinaryMappingTests.java
|
254 |
private class FilterDeletedColumns implements Predicate<Column> {
private final long ts;
private FilterDeletedColumns(long ts) {
this.ts = ts;
}
@Override
public boolean apply(Column input) {
return !input.isMarkedForDelete(ts);
}
}
| 0true
|
titan-cassandra_src_main_java_com_thinkaurelius_titan_diskstorage_cassandra_embedded_CassandraEmbeddedKeyColumnValueStore.java
|
314 |
public enum Type {
/**
* Once the database has been opened, these configuration options cannot
* be changed for the entire life of the database
*/
FIXED,
/**
* These options can only be changed for the entire database cluster at
* once when all instances are shut down
*/
GLOBAL_OFFLINE,
/**
* These options can only be changed globally across the entire database
* cluster
*/
GLOBAL,
/**
* These options are global but can be overwritten by a local
* configuration file
*/
MASKABLE,
/**
* These options can ONLY be provided through a local configuration file
*/
LOCAL;
}
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_configuration_ConfigOption.java
|
153 |
return Maps.transformValues(indexes,new Function<IndexProvider, IndexFeatures>() {
@Nullable
@Override
public IndexFeatures apply(@Nullable IndexProvider indexProvider) {
return indexProvider.getFeatures();
}
});
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_diskstorage_Backend.java
|
1,134 |
public enum ClientType {
JAVA,
CSHARP,
CPP,
PYTHON,
RUBY,
OTHER
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_core_ClientType.java
|
1,263 |
addOperation(operations, new Runnable() {
public void run() {
IMap map = hazelcast.getMap("myMap");
map.addIndex("year", true);
}
}, 1);
| 0true
|
hazelcast_src_main_java_com_hazelcast_examples_AllTest.java
|
420 |
runConflictingTx(new TxJob() {
@Override
public void run(IndexTransaction tx) {
tx.delete(defStore, defDoc, TEXT, ImmutableMap.of(), false);
}
}, new TxJob() {
| 0true
|
titan-test_src_main_java_com_thinkaurelius_titan_diskstorage_indexing_IndexProviderTest.java
|
838 |
public class OfferContext {
private static final ThreadLocal<OfferContext> OFFERCONTEXT = ThreadLocalManager.createThreadLocal(OfferContext.class);
public static OfferContext getOfferContext() {
return OFFERCONTEXT.get();
}
public static void setOfferContext(OfferContext offerContext) {
OFFERCONTEXT.set(offerContext);
}
protected Boolean executePromotionCalculation = true;
public Boolean getExecutePromotionCalculation() {
return executePromotionCalculation;
}
public void setExecutePromotionCalculation(Boolean executePromotionCalculation) {
this.executePromotionCalculation = executePromotionCalculation;
}
}
| 0true
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_service_OfferContext.java
|
215 |
public class HydrationItemDescriptor {
private String factoryMethod;
private Method[] mutators;
public String getFactoryMethod() {
return factoryMethod;
}
public void setFactoryMethod(String factoryMethod) {
this.factoryMethod = factoryMethod;
}
public Method[] getMutators() {
return mutators;
}
public void setMutators(Method[] mutators) {
this.mutators = mutators;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_cache_engine_HydrationItemDescriptor.java
|
1,244 |
autoFlushTask = new TimerTask() {
@Override
public void run() {
flush();
}
};
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_storage_fs_OMMapManagerNew.java
|
1,891 |
public class Scopes {
private Scopes() {
}
/**
* One instance per {@link Injector}. Also see {@code @}{@link Singleton}.
*/
public static final Scope SINGLETON = new Scope() {
public <T> Provider<T> scope(Key<T> key, final Provider<T> creator) {
return new Provider<T>() {
private volatile T instance;
// DCL on a volatile is safe as of Java 5, which we obviously require.
@SuppressWarnings("DoubleCheckedLocking")
public T get() {
if (instance == null) {
/*
* Use a pretty coarse lock. We don't want to run into deadlocks
* when two threads try to load circularly-dependent objects.
* Maybe one of these days we will identify independent graphs of
* objects and offer to load them in parallel.
*/
synchronized (InjectorImpl.class) {
if (instance == null) {
instance = creator.get();
}
}
}
return instance;
}
public String toString() {
return String.format(Locale.ROOT, "%s[%s]", creator, SINGLETON);
}
};
}
@Override
public String toString() {
return "Scopes.SINGLETON";
}
};
/**
* No scope; the same as not applying any scope at all. Each time the
* Injector obtains an instance of an object with "no scope", it injects this
* instance then immediately forgets it. When the next request for the same
* binding arrives it will need to obtain the instance over again.
* <p/>
* <p>This exists only in case a class has been annotated with a scope
* annotation such as {@link Singleton @Singleton}, and you need to override
* this to "no scope" in your binding.
*
* @since 2.0
*/
public static final Scope NO_SCOPE = new Scope() {
public <T> Provider<T> scope(Key<T> key, Provider<T> unscoped) {
return unscoped;
}
@Override
public String toString() {
return "Scopes.NO_SCOPE";
}
};
/**
* Scopes an internal factory.
*/
static <T> InternalFactory<? extends T> scope(Key<T> key, InjectorImpl injector,
InternalFactory<? extends T> creator, Scoping scoping) {
if (scoping.isNoScope()) {
return creator;
}
Scope scope = scoping.getScopeInstance();
Provider<T> scoped
= scope.scope(key, new ProviderToInternalFactoryAdapter<T>(injector, creator));
return new InternalFactoryToProviderAdapter<T>(
Initializables.<Provider<? extends T>>of(scoped));
}
/**
* Replaces annotation scopes with instance scopes using the Injector's annotation-to-instance
* map. If the scope annotation has no corresponding instance, an error will be added and unscoped
* will be retuned.
*/
static Scoping makeInjectable(Scoping scoping, InjectorImpl injector, Errors errors) {
Class<? extends Annotation> scopeAnnotation = scoping.getScopeAnnotation();
if (scopeAnnotation == null) {
return scoping;
}
Scope scope = injector.state.getScope(scopeAnnotation);
if (scope != null) {
return Scoping.forInstance(scope);
}
errors.scopeNotFound(scopeAnnotation);
return Scoping.UNSCOPED;
}
}
| 0true
|
src_main_java_org_elasticsearch_common_inject_Scopes.java
|
3,733 |
public static class Nested {
public static final Nested NO = new Nested(false, false, false);
public static Nested newNested(boolean includeInParent, boolean includeInRoot) {
return new Nested(true, includeInParent, includeInRoot);
}
private final boolean nested;
private final boolean includeInParent;
private final boolean includeInRoot;
private Nested(boolean nested, boolean includeInParent, boolean includeInRoot) {
this.nested = nested;
this.includeInParent = includeInParent;
this.includeInRoot = includeInRoot;
}
public boolean isNested() {
return nested;
}
public boolean isIncludeInParent() {
return includeInParent;
}
public boolean isIncludeInRoot() {
return includeInRoot;
}
}
| 0true
|
src_main_java_org_elasticsearch_index_mapper_object_ObjectMapper.java
|
3,104 |
public final class EngineSearcherTotalHitsMatcher extends TypeSafeMatcher<Engine.Searcher> {
private final Query query;
private final int totalHits;
public EngineSearcherTotalHitsMatcher(Query query, int totalHits) {
this.query = query;
this.totalHits = totalHits;
}
@Override
public boolean matchesSafely(Engine.Searcher searcher) {
try {
long count = Lucene.count(searcher.searcher(), query);
return count == totalHits;
} catch (IOException e) {
return false;
}
}
@Override
public void describeTo(Description description) {
description.appendText("total hits of size ").appendValue(totalHits).appendText(" with query ").appendValue(query);
}
public static Matcher<Engine.Searcher> engineSearcherTotalHits(Query query, int totalHits) {
return new EngineSearcherTotalHitsMatcher(query, totalHits);
}
public static Matcher<Engine.Searcher> engineSearcherTotalHits(int totalHits) {
return new EngineSearcherTotalHitsMatcher(Queries.newMatchAllQuery(), totalHits);
}
}
| 1no label
|
src_test_java_org_elasticsearch_index_engine_EngineSearcherTotalHitsMatcher.java
|
964 |
public abstract class NodesOperationResponse<NodeResponse extends NodeOperationResponse> extends ActionResponse implements Iterable<NodeResponse> {
private ClusterName clusterName;
protected NodeResponse[] nodes;
private Map<String, NodeResponse> nodesMap;
protected NodesOperationResponse() {
}
protected NodesOperationResponse(ClusterName clusterName, NodeResponse[] nodes) {
this.clusterName = clusterName;
this.nodes = nodes;
}
public ClusterName getClusterName() {
return this.clusterName;
}
public String getClusterNameAsString() {
return this.clusterName.value();
}
public NodeResponse[] getNodes() {
return nodes;
}
public NodeResponse getAt(int position) {
return nodes[position];
}
@Override
public Iterator<NodeResponse> iterator() {
return getNodesMap().values().iterator();
}
public Map<String, NodeResponse> getNodesMap() {
if (nodesMap == null) {
nodesMap = Maps.newHashMap();
for (NodeResponse nodeResponse : nodes) {
nodesMap.put(nodeResponse.getNode().id(), nodeResponse);
}
}
return nodesMap;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
clusterName = ClusterName.readClusterName(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
clusterName.writeTo(out);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_support_nodes_NodesOperationResponse.java
|
980 |
public interface ORecordSerializer {
public ORecordInternal<?> fromStream(byte[] iSource, ORecordInternal<?> iRecord, String[] iFields);
public byte[] toStream(ORecordInternal<?> iSource, boolean iOnlyDelta);
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_serialization_serializer_record_ORecordSerializer.java
|
2,624 |
public interface ZenPing extends LifecycleComponent<ZenPing> {
void setNodesProvider(DiscoveryNodesProvider nodesProvider);
void ping(PingListener listener, TimeValue timeout) throws ElasticsearchException;
public interface PingListener {
void onPing(PingResponse[] pings);
}
public static class PingResponse implements Streamable {
public static PingResponse[] EMPTY = new PingResponse[0];
private ClusterName clusterName;
private DiscoveryNode target;
private DiscoveryNode master;
private PingResponse() {
}
public PingResponse(DiscoveryNode target, DiscoveryNode master, ClusterName clusterName) {
this.target = target;
this.master = master;
this.clusterName = clusterName;
}
public ClusterName clusterName() {
return this.clusterName;
}
public DiscoveryNode target() {
return target;
}
public DiscoveryNode master() {
return master;
}
public static PingResponse readPingResponse(StreamInput in) throws IOException {
PingResponse response = new PingResponse();
response.readFrom(in);
return response;
}
@Override
public void readFrom(StreamInput in) throws IOException {
clusterName = readClusterName(in);
target = readNode(in);
if (in.readBoolean()) {
master = readNode(in);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
clusterName.writeTo(out);
target.writeTo(out);
if (master == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
master.writeTo(out);
}
}
@Override
public String toString() {
return "ping_response{target [" + target + "], master [" + master + "], cluster_name[" + clusterName.value() + "]}";
}
}
}
| 0true
|
src_main_java_org_elasticsearch_discovery_zen_ping_ZenPing.java
|
2,030 |
= new DefaultBindingTargetVisitor<Object, Object>() {
@Override
public Object visit(InstanceBinding<?> binding) {
return binding.getInstance();
}
@Override
protected Object visitOther(Binding<?> binding) {
throw new IllegalArgumentException();
}
};
| 0true
|
src_main_java_org_elasticsearch_common_inject_spi_Elements.java
|
883 |
public final class CountDownLatchPortableHook implements PortableHook {
static final int F_ID = FactoryIdHelper.getFactoryId(FactoryIdHelper.CDL_PORTABLE_FACTORY, -14);
static final int COUNT_DOWN = 1;
static final int AWAIT = 2;
static final int SET_COUNT = 3;
static final int GET_COUNT = 4;
@Override
public int getFactoryId() {
return F_ID;
}
@Override
public PortableFactory createFactory() {
return new PortableFactory() {
@Override
public Portable create(int classId) {
switch (classId) {
case COUNT_DOWN:
return new CountDownRequest();
case AWAIT:
return new AwaitRequest();
case SET_COUNT:
return new SetCountRequest();
case GET_COUNT:
return new GetCountRequest();
default:
return null;
}
}
};
}
@Override
public Collection<ClassDefinition> getBuiltinDefinitions() {
return null;
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_concurrent_countdownlatch_client_CountDownLatchPortableHook.java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.