Unnamed: 0
int64 0
6.45k
| func
stringlengths 37
143k
| target
class label 2
classes | project
stringlengths 33
157
|
---|---|---|---|
3,709 |
private class ManagedThread extends Thread {
protected final int id;
public ManagedThread(Runnable target, String name, int id) {
super(threadGroup, target, name);
this.id = id;
}
public void run() {
try {
super.run();
} catch (OutOfMemoryError e) {
OutOfMemoryErrorDispatcher.onOutOfMemory(e);
} finally {
try {
idQ.offer(id);
} catch (Throwable ignored) {
}
}
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_util_executor_PoolExecutorThreadFactory.java
|
287 |
public class FailedNodeException extends ElasticsearchException {
private final String nodeId;
public FailedNodeException(String nodeId, String msg, Throwable cause) {
super(msg, cause);
this.nodeId = nodeId;
}
public String nodeId() {
return this.nodeId;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_FailedNodeException.java
|
20 |
static final class CompletionNode {
final Completion completion;
volatile CompletionNode next;
CompletionNode(Completion completion) { this.completion = completion; }
}
| 0true
|
src_main_java_jsr166e_CompletableFuture.java
|
2,423 |
public class MultiMapEventFilter implements EventFilter, DataSerializable {
boolean includeValue;
Data key;
public MultiMapEventFilter() {
}
public MultiMapEventFilter(boolean includeValue, Data key) {
this.includeValue = includeValue;
this.key = key;
}
public boolean isIncludeValue() {
return includeValue;
}
public Data getKey() {
return key;
}
public void writeData(ObjectDataOutput out) throws IOException {
out.writeBoolean(includeValue);
IOUtil.writeNullableData(out, key);
}
public void readData(ObjectDataInput in) throws IOException {
includeValue = in.readBoolean();
key = IOUtil.readNullableData(in);
}
public boolean eval(Object arg) {
return false;
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_multimap_MultiMapEventFilter.java
|
2,516 |
public class JsonXContentGenerator implements XContentGenerator {
protected final JsonGenerator generator;
private boolean writeLineFeedAtEnd;
public JsonXContentGenerator(JsonGenerator generator) {
this.generator = generator;
}
@Override
public XContentType contentType() {
return XContentType.JSON;
}
@Override
public void usePrettyPrint() {
generator.useDefaultPrettyPrinter();
}
@Override
public void usePrintLineFeedAtEnd() {
writeLineFeedAtEnd = true;
}
@Override
public void writeStartArray() throws IOException {
generator.writeStartArray();
}
@Override
public void writeEndArray() throws IOException {
generator.writeEndArray();
}
@Override
public void writeStartObject() throws IOException {
generator.writeStartObject();
}
@Override
public void writeEndObject() throws IOException {
generator.writeEndObject();
}
@Override
public void writeFieldName(String name) throws IOException {
generator.writeFieldName(name);
}
@Override
public void writeFieldName(XContentString name) throws IOException {
generator.writeFieldName(name);
}
@Override
public void writeString(String text) throws IOException {
generator.writeString(text);
}
@Override
public void writeString(char[] text, int offset, int len) throws IOException {
generator.writeString(text, offset, len);
}
@Override
public void writeUTF8String(byte[] text, int offset, int length) throws IOException {
generator.writeUTF8String(text, offset, length);
}
@Override
public void writeBinary(byte[] data, int offset, int len) throws IOException {
generator.writeBinary(data, offset, len);
}
@Override
public void writeBinary(byte[] data) throws IOException {
generator.writeBinary(data);
}
@Override
public void writeNumber(int v) throws IOException {
generator.writeNumber(v);
}
@Override
public void writeNumber(long v) throws IOException {
generator.writeNumber(v);
}
@Override
public void writeNumber(double d) throws IOException {
generator.writeNumber(d);
}
@Override
public void writeNumber(float f) throws IOException {
generator.writeNumber(f);
}
@Override
public void writeBoolean(boolean state) throws IOException {
generator.writeBoolean(state);
}
@Override
public void writeNull() throws IOException {
generator.writeNull();
}
@Override
public void writeStringField(String fieldName, String value) throws IOException {
generator.writeStringField(fieldName, value);
}
@Override
public void writeStringField(XContentString fieldName, String value) throws IOException {
generator.writeFieldName(fieldName);
generator.writeString(value);
}
@Override
public void writeBooleanField(String fieldName, boolean value) throws IOException {
generator.writeBooleanField(fieldName, value);
}
@Override
public void writeBooleanField(XContentString fieldName, boolean value) throws IOException {
generator.writeFieldName(fieldName);
generator.writeBoolean(value);
}
@Override
public void writeNullField(String fieldName) throws IOException {
generator.writeNullField(fieldName);
}
@Override
public void writeNullField(XContentString fieldName) throws IOException {
generator.writeFieldName(fieldName);
generator.writeNull();
}
@Override
public void writeNumberField(String fieldName, int value) throws IOException {
generator.writeNumberField(fieldName, value);
}
@Override
public void writeNumberField(XContentString fieldName, int value) throws IOException {
generator.writeFieldName(fieldName);
generator.writeNumber(value);
}
@Override
public void writeNumberField(String fieldName, long value) throws IOException {
generator.writeNumberField(fieldName, value);
}
@Override
public void writeNumberField(XContentString fieldName, long value) throws IOException {
generator.writeFieldName(fieldName);
generator.writeNumber(value);
}
@Override
public void writeNumberField(String fieldName, double value) throws IOException {
generator.writeNumberField(fieldName, value);
}
@Override
public void writeNumberField(XContentString fieldName, double value) throws IOException {
generator.writeFieldName(fieldName);
generator.writeNumber(value);
}
@Override
public void writeNumberField(String fieldName, float value) throws IOException {
generator.writeNumberField(fieldName, value);
}
@Override
public void writeNumberField(XContentString fieldName, float value) throws IOException {
generator.writeFieldName(fieldName);
generator.writeNumber(value);
}
@Override
public void writeBinaryField(String fieldName, byte[] data) throws IOException {
generator.writeBinaryField(fieldName, data);
}
@Override
public void writeBinaryField(XContentString fieldName, byte[] value) throws IOException {
generator.writeFieldName(fieldName);
generator.writeBinary(value);
}
@Override
public void writeArrayFieldStart(String fieldName) throws IOException {
generator.writeArrayFieldStart(fieldName);
}
@Override
public void writeArrayFieldStart(XContentString fieldName) throws IOException {
generator.writeFieldName(fieldName);
generator.writeStartArray();
}
@Override
public void writeObjectFieldStart(String fieldName) throws IOException {
generator.writeObjectFieldStart(fieldName);
}
@Override
public void writeObjectFieldStart(XContentString fieldName) throws IOException {
generator.writeFieldName(fieldName);
generator.writeStartObject();
}
@Override
public void writeRawField(String fieldName, byte[] content, OutputStream bos) throws IOException {
generator.writeRaw(", \"");
generator.writeRaw(fieldName);
generator.writeRaw("\" : ");
flush();
bos.write(content);
}
@Override
public void writeRawField(String fieldName, byte[] content, int offset, int length, OutputStream bos) throws IOException {
generator.writeRaw(", \"");
generator.writeRaw(fieldName);
generator.writeRaw("\" : ");
flush();
bos.write(content, offset, length);
}
@Override
public void writeRawField(String fieldName, InputStream content, OutputStream bos) throws IOException {
generator.writeRaw(", \"");
generator.writeRaw(fieldName);
generator.writeRaw("\" : ");
flush();
Streams.copy(content, bos);
}
@Override
public final void writeRawField(String fieldName, BytesReference content, OutputStream bos) throws IOException {
XContentType contentType = XContentFactory.xContentType(content);
if (contentType != null) {
writeObjectRaw(fieldName, content, bos);
} else {
writeFieldName(fieldName);
// we could potentially optimize this to not rely on exception logic...
String sValue = content.toUtf8();
try {
writeNumber(Long.parseLong(sValue));
} catch (NumberFormatException e) {
try {
writeNumber(Double.parseDouble(sValue));
} catch (NumberFormatException e1) {
writeString(sValue);
}
}
}
}
protected void writeObjectRaw(String fieldName, BytesReference content, OutputStream bos) throws IOException {
generator.writeRaw(", \"");
generator.writeRaw(fieldName);
generator.writeRaw("\" : ");
flush();
content.writeTo(bos);
}
@Override
public void copyCurrentStructure(XContentParser parser) throws IOException {
// the start of the parser
if (parser.currentToken() == null) {
parser.nextToken();
}
if (parser instanceof JsonXContentParser) {
generator.copyCurrentStructure(((JsonXContentParser) parser).parser);
} else {
XContentHelper.copyCurrentStructure(this, parser);
}
}
@Override
public void flush() throws IOException {
generator.flush();
}
@Override
public void close() throws IOException {
if (generator.isClosed()) {
return;
}
if (writeLineFeedAtEnd) {
flush();
generator.writeRaw(LF);
}
generator.close();
}
private static final SerializedString LF = new SerializedString("\n");
}
| 1no label
|
src_main_java_org_elasticsearch_common_xcontent_json_JsonXContentGenerator.java
|
731 |
public class DeleteByQueryAction extends Action<DeleteByQueryRequest, DeleteByQueryResponse, DeleteByQueryRequestBuilder> {
public static final DeleteByQueryAction INSTANCE = new DeleteByQueryAction();
public static final String NAME = "deleteByQuery";
private DeleteByQueryAction() {
super(NAME);
}
@Override
public DeleteByQueryResponse newResponse() {
return new DeleteByQueryResponse();
}
@Override
public DeleteByQueryRequestBuilder newRequestBuilder(Client client) {
return new DeleteByQueryRequestBuilder(client);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_deletebyquery_DeleteByQueryAction.java
|
3,749 |
@SuppressWarnings("deprecation")
public class WebFilter implements Filter {
protected static final String HAZELCAST_SESSION_ATTRIBUTE_SEPARATOR = "::hz::";
private static final ILogger LOGGER = Logger.getLogger(WebFilter.class);
private static final LocalCacheEntry NULL_ENTRY = new LocalCacheEntry();
private static final String HAZELCAST_REQUEST = "*hazelcast-request";
private static final String HAZELCAST_SESSION_COOKIE_NAME = "hazelcast.sessionId";
private static final ConcurrentMap<String, String> MAP_ORIGINAL_SESSIONS = new ConcurrentHashMap<String, String>(1000);
private static final ConcurrentMap<String, HazelcastHttpSession> MAP_SESSIONS =
new ConcurrentHashMap<String, HazelcastHttpSession>(1000);
protected ServletContext servletContext;
protected FilterConfig filterConfig;
private String sessionCookieName = HAZELCAST_SESSION_COOKIE_NAME;
private HazelcastInstance hazelcastInstance;
private String clusterMapName = "none";
private String sessionCookieDomain;
private boolean sessionCookieSecure;
private boolean sessionCookieHttpOnly;
private boolean stickySession = true;
private boolean shutdownOnDestroy = true;
private boolean deferredWrite;
private Properties properties;
public WebFilter() {
}
public WebFilter(Properties properties) {
this();
this.properties = properties;
}
static void destroyOriginalSession(HttpSession originalSession) {
String hazelcastSessionId = MAP_ORIGINAL_SESSIONS.remove(originalSession.getId());
if (hazelcastSessionId != null) {
HazelcastHttpSession hazelSession = MAP_SESSIONS.remove(hazelcastSessionId);
if (hazelSession != null) {
hazelSession.webFilter.destroySession(hazelSession, false);
}
}
}
private static synchronized String generateSessionId() {
final String id = UuidUtil.buildRandomUuidString();
final StringBuilder sb = new StringBuilder("HZ");
final char[] chars = id.toCharArray();
for (final char c : chars) {
if (c != '-') {
if (Character.isLetter(c)) {
sb.append(Character.toUpperCase(c));
} else {
sb.append(c);
}
}
}
return sb.toString();
}
public final void init(final FilterConfig config)
throws ServletException {
filterConfig = config;
servletContext = config.getServletContext();
initInstance();
String mapName = getParam("map-name");
if (mapName != null) {
clusterMapName = mapName;
} else {
clusterMapName = "_web_" + servletContext.getServletContextName();
}
try {
Config hzConfig = hazelcastInstance.getConfig();
String sessionTTL = getParam("session-ttl-seconds");
if (sessionTTL != null) {
MapConfig mapConfig = hzConfig.getMapConfig(clusterMapName);
mapConfig.setTimeToLiveSeconds(Integer.parseInt(sessionTTL));
hzConfig.addMapConfig(mapConfig);
}
} catch (UnsupportedOperationException ignored) {
LOGGER.info("client cannot access Config.");
}
initCookieParams();
initParams();
if (!stickySession) {
getClusterMap().addEntryListener(new EntryListener<String, Object>() {
public void entryAdded(EntryEvent<String, Object> entryEvent) {
}
public void entryRemoved(EntryEvent<String, Object> entryEvent) {
if (entryEvent.getMember() == null || !entryEvent.getMember().localMember()) {
removeSessionLocally(entryEvent.getKey());
}
}
public void entryUpdated(EntryEvent<String, Object> entryEvent) {
}
public void entryEvicted(EntryEvent<String, Object> entryEvent) {
entryRemoved(entryEvent);
}
}, false);
}
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.finest("sticky:" + stickySession + ", shutdown-on-destroy: " + shutdownOnDestroy
+ ", map-name: " + clusterMapName);
}
}
private void initParams() {
String stickySessionParam = getParam("sticky-session");
if (stickySessionParam != null) {
stickySession = Boolean.valueOf(stickySessionParam);
}
String shutdownOnDestroyParam = getParam("shutdown-on-destroy");
if (shutdownOnDestroyParam != null) {
shutdownOnDestroy = Boolean.valueOf(shutdownOnDestroyParam);
}
String deferredWriteParam = getParam("deferred-write");
if (deferredWriteParam != null) {
deferredWrite = Boolean.parseBoolean(deferredWriteParam);
}
}
private void initCookieParams() {
String cookieName = getParam("cookie-name");
if (cookieName != null) {
sessionCookieName = cookieName;
}
String cookieDomain = getParam("cookie-domain");
if (cookieDomain != null) {
sessionCookieDomain = cookieDomain;
}
String cookieSecure = getParam("cookie-secure");
if (cookieSecure != null) {
sessionCookieSecure = Boolean.valueOf(cookieSecure);
}
String cookieHttpOnly = getParam("cookie-http-only");
if (cookieHttpOnly != null) {
sessionCookieHttpOnly = Boolean.valueOf(cookieHttpOnly);
}
}
private void initInstance() throws ServletException {
if (properties == null) {
properties = new Properties();
}
setProperty(HazelcastInstanceLoader.CONFIG_LOCATION);
setProperty(HazelcastInstanceLoader.INSTANCE_NAME);
setProperty(HazelcastInstanceLoader.USE_CLIENT);
setProperty(HazelcastInstanceLoader.CLIENT_CONFIG_LOCATION);
hazelcastInstance = getInstance(properties);
}
private void setProperty(String propertyName) {
String value = getParam(propertyName);
if (value != null) {
properties.setProperty(propertyName, value);
}
}
private void removeSessionLocally(String sessionId) {
HazelcastHttpSession hazelSession = MAP_SESSIONS.remove(sessionId);
if (hazelSession != null) {
MAP_ORIGINAL_SESSIONS.remove(hazelSession.originalSession.getId());
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.finest("Destroying session locally " + hazelSession);
}
hazelSession.destroy();
}
}
private String extractAttributeKey(String key) {
return key.substring(key.indexOf(HAZELCAST_SESSION_ATTRIBUTE_SEPARATOR) + HAZELCAST_SESSION_ATTRIBUTE_SEPARATOR.length());
}
private HazelcastHttpSession createNewSession(RequestWrapper requestWrapper, String existingSessionId) {
String id = existingSessionId != null ? existingSessionId : generateSessionId();
if (requestWrapper.getOriginalSession(false) != null) {
LOGGER.finest("Original session exists!!!");
}
HttpSession originalSession = requestWrapper.getOriginalSession(true);
HazelcastHttpSession hazelcastSession = new HazelcastHttpSession(WebFilter.this, id, originalSession, deferredWrite);
MAP_SESSIONS.put(hazelcastSession.getId(), hazelcastSession);
String oldHazelcastSessionId = MAP_ORIGINAL_SESSIONS.put(originalSession.getId(), hazelcastSession.getId());
if (oldHazelcastSessionId != null) {
if (LOGGER.isFinestEnabled()) {
LOGGER.finest("!!! Overriding an existing hazelcastSessionId " + oldHazelcastSessionId);
}
}
if (LOGGER.isFinestEnabled()) {
LOGGER.finest("Created new session with id: " + id);
LOGGER.finest(MAP_SESSIONS.size() + " is sessions.size and originalSessions.size: " + MAP_ORIGINAL_SESSIONS.size());
}
addSessionCookie(requestWrapper, id);
if (deferredWrite) {
loadHazelcastSession(hazelcastSession);
}
return hazelcastSession;
}
private void loadHazelcastSession(HazelcastHttpSession hazelcastSession) {
Set<Entry<String, Object>> entrySet = getClusterMap().entrySet(new SessionAttributePredicate(hazelcastSession.getId()));
Map<String, LocalCacheEntry> cache = hazelcastSession.localCache;
for (Entry<String, Object> entry : entrySet) {
String attributeKey = extractAttributeKey(entry.getKey());
LocalCacheEntry cacheEntry = cache.get(attributeKey);
if (cacheEntry == null) {
cacheEntry = new LocalCacheEntry();
cache.put(attributeKey, cacheEntry);
}
if (LOGGER.isFinestEnabled()) {
LOGGER.finest("Storing " + attributeKey + " on session " + hazelcastSession.getId());
}
cacheEntry.value = entry.getValue();
cacheEntry.dirty = false;
}
}
private void prepareReloadingSession(HazelcastHttpSession hazelcastSession) {
if (deferredWrite && hazelcastSession != null) {
Map<String, LocalCacheEntry> cache = hazelcastSession.localCache;
for (LocalCacheEntry cacheEntry : cache.values()) {
cacheEntry.reload = true;
}
}
}
/**
* Destroys a session, determining if it should be destroyed clusterwide automatically or via expiry.
*
* @param session The session to be destroyed
* @param removeGlobalSession boolean value - true if the session should be destroyed irrespective of active time
*/
private void destroySession(HazelcastHttpSession session, boolean removeGlobalSession) {
if (LOGGER.isFinestEnabled()) {
LOGGER.finest("Destroying local session: " + session.getId());
}
MAP_SESSIONS.remove(session.getId());
MAP_ORIGINAL_SESSIONS.remove(session.originalSession.getId());
session.destroy();
if (removeGlobalSession) {
if (LOGGER.isFinestEnabled()) {
LOGGER.finest("Destroying cluster session: " + session.getId() + " => Ignore-timeout: true");
}
IMap<String, Object> clusterMap = getClusterMap();
clusterMap.delete(session.getId());
clusterMap.executeOnEntries(new InvalidateEntryProcessor(session.getId()));
}
}
private IMap<String, Object> getClusterMap() {
return hazelcastInstance.getMap(clusterMapName);
}
private HazelcastHttpSession getSessionWithId(final String sessionId) {
HazelcastHttpSession session = MAP_SESSIONS.get(sessionId);
if (session != null && !session.isValid()) {
destroySession(session, true);
session = null;
}
return session;
}
private void addSessionCookie(final RequestWrapper req, final String sessionId) {
final Cookie sessionCookie = new Cookie(sessionCookieName, sessionId);
String path = req.getContextPath();
if ("".equals(path)) {
path = "/";
}
sessionCookie.setPath(path);
sessionCookie.setMaxAge(-1);
if (sessionCookieDomain != null) {
sessionCookie.setDomain(sessionCookieDomain);
}
try {
sessionCookie.setHttpOnly(sessionCookieHttpOnly);
} catch (NoSuchMethodError e) {
LOGGER.info("must be servlet spec before 3.0, don't worry about it!");
}
sessionCookie.setSecure(sessionCookieSecure);
req.res.addCookie(sessionCookie);
}
private String getSessionCookie(final RequestWrapper req) {
final Cookie[] cookies = req.getCookies();
if (cookies != null) {
for (final Cookie cookie : cookies) {
final String name = cookie.getName();
final String value = cookie.getValue();
if (name.equalsIgnoreCase(sessionCookieName)) {
return value;
}
}
}
return null;
}
public final void doFilter(ServletRequest req, ServletResponse res, final FilterChain chain)
throws IOException, ServletException {
if (!(req instanceof HttpServletRequest)) {
chain.doFilter(req, res);
} else {
if (req instanceof RequestWrapper) {
LOGGER.finest("Request is instance of RequestWrapper! Continue...");
chain.doFilter(req, res);
return;
}
HttpServletRequest httpReq = (HttpServletRequest) req;
RequestWrapper existingReq = (RequestWrapper) req.getAttribute(HAZELCAST_REQUEST);
final ResponseWrapper resWrapper = new ResponseWrapper((HttpServletResponse) res);
final RequestWrapper reqWrapper = new RequestWrapper(httpReq, resWrapper);
if (existingReq != null) {
reqWrapper.setHazelcastSession(existingReq.hazelcastSession, existingReq.requestedSessionId);
}
chain.doFilter(reqWrapper, resWrapper);
if (existingReq != null) {
return;
}
HazelcastHttpSession session = reqWrapper.getSession(false);
if (session != null && session.isValid() && (session.sessionChanged() || !deferredWrite)) {
if (LOGGER.isFinestEnabled()) {
LOGGER.finest("PUTTING SESSION " + session.getId());
}
session.sessionDeferredWrite();
}
}
}
public final void destroy() {
MAP_SESSIONS.clear();
MAP_ORIGINAL_SESSIONS.clear();
shutdownInstance();
}
protected HazelcastInstance getInstance(Properties properties) throws ServletException {
return HazelcastInstanceLoader.createInstance(filterConfig, properties);
}
protected void shutdownInstance() {
if (shutdownOnDestroy && hazelcastInstance != null) {
hazelcastInstance.getLifecycleService().shutdown();
}
}
private String getParam(String name) {
if (properties != null && properties.containsKey(name)) {
return properties.getProperty(name);
} else {
return filterConfig.getInitParameter(name);
}
}
private static class ResponseWrapper extends HttpServletResponseWrapper {
public ResponseWrapper(final HttpServletResponse original) {
super(original);
}
}
private static class LocalCacheEntry {
volatile boolean dirty;
volatile boolean reload;
boolean removed;
private Object value;
}
private class RequestWrapper extends HttpServletRequestWrapper {
final ResponseWrapper res;
HazelcastHttpSession hazelcastSession;
String requestedSessionId;
public RequestWrapper(final HttpServletRequest req,
final ResponseWrapper res) {
super(req);
this.res = res;
req.setAttribute(HAZELCAST_REQUEST, this);
}
public void setHazelcastSession(HazelcastHttpSession hazelcastSession, String requestedSessionId) {
this.hazelcastSession = hazelcastSession;
this.requestedSessionId = requestedSessionId;
}
HttpSession getOriginalSession(boolean create) {
return super.getSession(create);
}
@Override
public RequestDispatcher getRequestDispatcher(final String path) {
final ServletRequest original = getRequest();
return new RequestDispatcher() {
public void forward(ServletRequest servletRequest, ServletResponse servletResponse)
throws ServletException, IOException {
original.getRequestDispatcher(path).forward(servletRequest, servletResponse);
}
public void include(ServletRequest servletRequest, ServletResponse servletResponse)
throws ServletException, IOException {
original.getRequestDispatcher(path).include(servletRequest, servletResponse);
}
};
}
public HazelcastHttpSession fetchHazelcastSession() {
if (requestedSessionId == null) {
requestedSessionId = getSessionCookie(this);
}
if (requestedSessionId == null) {
requestedSessionId = getParameter(HAZELCAST_SESSION_COOKIE_NAME);
}
if (requestedSessionId != null) {
hazelcastSession = getSessionWithId(requestedSessionId);
if (hazelcastSession == null) {
final Boolean existing = (Boolean) getClusterMap().get(requestedSessionId);
if (existing != null && existing) {
// we already have the session in the cluster loading it...
hazelcastSession = createNewSession(RequestWrapper.this, requestedSessionId);
}
}
}
return hazelcastSession;
}
@Override
public HttpSession getSession() {
return getSession(true);
}
@Override
public HazelcastHttpSession getSession(final boolean create) {
if (hazelcastSession != null && !hazelcastSession.isValid()) {
LOGGER.finest("Session is invalid!");
destroySession(hazelcastSession, true);
hazelcastSession = null;
} else if (hazelcastSession != null) {
return hazelcastSession;
}
HttpSession originalSession = getOriginalSession(false);
if (originalSession != null) {
String hazelcastSessionId = MAP_ORIGINAL_SESSIONS.get(originalSession.getId());
if (hazelcastSessionId != null) {
hazelcastSession = MAP_SESSIONS.get(hazelcastSessionId);
return hazelcastSession;
}
MAP_ORIGINAL_SESSIONS.remove(originalSession.getId());
originalSession.invalidate();
}
hazelcastSession = fetchHazelcastSession();
if (hazelcastSession == null && create) {
hazelcastSession = createNewSession(RequestWrapper.this, null);
}
if (deferredWrite) {
prepareReloadingSession(hazelcastSession);
}
return hazelcastSession;
}
} // END of RequestWrapper
private class HazelcastHttpSession implements HttpSession {
volatile boolean valid = true;
final String id;
final HttpSession originalSession;
final WebFilter webFilter;
private final Map<String, LocalCacheEntry> localCache;
private final boolean deferredWrite;
public HazelcastHttpSession(WebFilter webFilter, final String sessionId,
HttpSession originalSession, boolean deferredWrite) {
this.webFilter = webFilter;
this.id = sessionId;
this.originalSession = originalSession;
this.deferredWrite = deferredWrite;
this.localCache = deferredWrite ? new ConcurrentHashMap<String, LocalCacheEntry>() : null;
}
public Object getAttribute(final String name) {
IMap<String, Object> clusterMap = getClusterMap();
if (deferredWrite) {
LocalCacheEntry cacheEntry = localCache.get(name);
if (cacheEntry == null || cacheEntry.reload) {
Object value = clusterMap.get(buildAttributeName(name));
if (value == null) {
cacheEntry = NULL_ENTRY;
} else {
cacheEntry = new LocalCacheEntry();
cacheEntry.value = value;
cacheEntry.reload = false;
}
localCache.put(name, cacheEntry);
}
return cacheEntry != NULL_ENTRY ? cacheEntry.value : null;
}
return clusterMap.get(buildAttributeName(name));
}
public Enumeration<String> getAttributeNames() {
final Set<String> keys = selectKeys();
return new Enumeration<String>() {
private final String[] elements = keys.toArray(new String[keys.size()]);
private int index;
@Override
public boolean hasMoreElements() {
return index < elements.length;
}
@Override
public String nextElement() {
return elements[index++];
}
};
}
public String getId() {
return id;
}
public ServletContext getServletContext() {
return servletContext;
}
public HttpSessionContext getSessionContext() {
return originalSession.getSessionContext();
}
public Object getValue(final String name) {
return getAttribute(name);
}
public String[] getValueNames() {
final Set<String> keys = selectKeys();
return keys.toArray(new String[keys.size()]);
}
public void invalidate() {
originalSession.invalidate();
destroySession(this, true);
}
public boolean isNew() {
return originalSession.isNew();
}
public void putValue(final String name, final Object value) {
setAttribute(name, value);
}
public void removeAttribute(final String name) {
if (deferredWrite) {
LocalCacheEntry entry = localCache.get(name);
if (entry != null && entry != NULL_ENTRY) {
entry.value = null;
entry.removed = true;
// dirty needs to be set as last value for memory visibility reasons!
entry.dirty = true;
}
} else {
getClusterMap().delete(buildAttributeName(name));
}
}
public void setAttribute(final String name, final Object value) {
if (name == null) {
throw new NullPointerException("name must not be null");
}
if (value == null) {
throw new IllegalArgumentException("value must not be null");
}
if (deferredWrite) {
LocalCacheEntry entry = localCache.get(name);
if (entry == null || entry == NULL_ENTRY) {
entry = new LocalCacheEntry();
localCache.put(name, entry);
}
entry.value = value;
entry.dirty = true;
} else {
getClusterMap().put(buildAttributeName(name), value);
}
}
public void removeValue(final String name) {
removeAttribute(name);
}
public boolean sessionChanged() {
if (!deferredWrite) {
return false;
}
for (Entry<String, LocalCacheEntry> entry : localCache.entrySet()) {
if (entry.getValue().dirty) {
return true;
}
}
return false;
}
public long getCreationTime() {
return originalSession.getCreationTime();
}
public long getLastAccessedTime() {
return originalSession.getLastAccessedTime();
}
public int getMaxInactiveInterval() {
return originalSession.getMaxInactiveInterval();
}
public void setMaxInactiveInterval(int maxInactiveSeconds) {
originalSession.setMaxInactiveInterval(maxInactiveSeconds);
}
void destroy() {
valid = false;
}
public boolean isValid() {
return valid;
}
private String buildAttributeName(String name) {
return id + HAZELCAST_SESSION_ATTRIBUTE_SEPARATOR + name;
}
private void sessionDeferredWrite() {
IMap<String, Object> clusterMap = getClusterMap();
if (deferredWrite) {
Iterator<Entry<String, LocalCacheEntry>> iterator = localCache.entrySet().iterator();
while (iterator.hasNext()) {
Entry<String, LocalCacheEntry> entry = iterator.next();
if (entry.getValue().dirty) {
LocalCacheEntry cacheEntry = entry.getValue();
if (cacheEntry.removed) {
clusterMap.delete(buildAttributeName(entry.getKey()));
iterator.remove();
} else {
clusterMap.put(buildAttributeName(entry.getKey()), cacheEntry.value);
cacheEntry.dirty = false;
}
}
}
}
if (!clusterMap.containsKey(id)) {
clusterMap.put(id, Boolean.TRUE);
}
}
private Set<String> selectKeys() {
Set<String> keys = new HashSet<String>();
if (!deferredWrite) {
for (String qualifiedAttributeKey : getClusterMap().keySet(new SessionAttributePredicate(id))) {
keys.add(extractAttributeKey(qualifiedAttributeKey));
}
} else {
for (Entry<String, LocalCacheEntry> entry : localCache.entrySet()) {
if (!entry.getValue().removed) {
keys.add(entry.getKey());
}
}
}
return keys;
}
} // END of HazelSession
} // END of WebFilter
| 1no label
|
hazelcast-wm_src_main_java_com_hazelcast_web_WebFilter.java
|
23 |
public interface Generator<T> { T get(); }
| 0true
|
src_main_java_jsr166e_CompletableFuture.java
|
36 |
public class OMVRBTreeEntryPosition<K, V> {
public OMVRBTreeEntry<K, V> entry;
public int position;
public OMVRBTreeEntryPosition(final OMVRBTreeEntryPosition<K, V> entryPosition) {
this.entry = entryPosition.entry;
this.position = entryPosition.position;
}
public OMVRBTreeEntryPosition(final OMVRBTreeEntry<K, V> entry) {
assign(entry);
}
public OMVRBTreeEntryPosition(final OMVRBTreeEntry<K, V> entry, final int iPosition) {
assign(entry, iPosition);
}
public void assign(final OMVRBTreeEntry<K, V> entry, final int iPosition) {
this.entry = entry;
this.position = iPosition;
}
public void assign(final OMVRBTreeEntry<K, V> entry) {
this.entry = entry;
this.position = entry != null ? entry.getTree().getPageIndex() : -1;
}
public K getKey() {
return entry != null ? entry.getKey(position) : null;
}
public V getValue() {
return entry != null ? entry.getValue(position) : null;
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_collection_OMVRBTreeEntryPosition.java
|
42 |
public interface Idfiable {
/**
* Unique identifier for this entity.
*
* @return Unique long id for this entity
*/
public long getLongId();
}
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_Idfiable.java
|
416 |
public class ClientIdGeneratorProxy extends ClientProxy implements IdGenerator {
private static final int BLOCK_SIZE = 10000;
final String name;
final IAtomicLong atomicLong;
AtomicInteger residue;
AtomicLong local;
public ClientIdGeneratorProxy(String instanceName, String serviceName, String objectId, IAtomicLong atomicLong) {
super(instanceName, serviceName, objectId);
this.atomicLong = atomicLong;
this.name = objectId;
residue = new AtomicInteger(BLOCK_SIZE);
local = new AtomicLong(-1);
}
public boolean init(long id) {
if (id <= 0) {
return false;
}
long step = (id / BLOCK_SIZE);
synchronized (this) {
boolean init = atomicLong.compareAndSet(0, step + 1);
if (init) {
local.set(step);
residue.set((int) (id % BLOCK_SIZE) + 1);
}
return init;
}
}
public long newId() {
int value = residue.getAndIncrement();
if (value >= BLOCK_SIZE) {
synchronized (this) {
value = residue.get();
if (value >= BLOCK_SIZE) {
local.set(atomicLong.getAndIncrement());
residue.set(0);
}
return newId();
}
}
return local.get() * BLOCK_SIZE + value;
}
protected void onDestroy() {
atomicLong.destroy();
residue = null;
local = null;
}
@Override
public String toString() {
return "IdGenerator{" + "name='" + getName() + '\'' + '}';
}
}
| 1no label
|
hazelcast-client_src_main_java_com_hazelcast_client_proxy_ClientIdGeneratorProxy.java
|
434 |
public class ClientSetProxy<E> extends AbstractClientCollectionProxy<E> implements ISet<E> {
public ClientSetProxy(String instanceName, String serviceName, String name) {
super(instanceName, serviceName, name);
}
@Override
public String toString() {
return "ISet{" + "name='" + getName() + '\'' + '}';
}
}
| 1no label
|
hazelcast-client_src_main_java_com_hazelcast_client_proxy_ClientSetProxy.java
|
502 |
public class CreateIndexClusterStateUpdateRequest extends ClusterStateUpdateRequest<CreateIndexClusterStateUpdateRequest> {
final String cause;
final String index;
private IndexMetaData.State state = IndexMetaData.State.OPEN;
private Settings settings = ImmutableSettings.Builder.EMPTY_SETTINGS;
private Map<String, String> mappings = Maps.newHashMap();
private Map<String, IndexMetaData.Custom> customs = newHashMap();
private Set<ClusterBlock> blocks = Sets.newHashSet();
CreateIndexClusterStateUpdateRequest(String cause, String index) {
this.cause = cause;
this.index = index;
}
public CreateIndexClusterStateUpdateRequest settings(Settings settings) {
this.settings = settings;
return this;
}
public CreateIndexClusterStateUpdateRequest mappings(Map<String, String> mappings) {
this.mappings.putAll(mappings);
return this;
}
public CreateIndexClusterStateUpdateRequest customs(Map<String, IndexMetaData.Custom> customs) {
this.customs.putAll(customs);
return this;
}
public CreateIndexClusterStateUpdateRequest blocks(Set<ClusterBlock> blocks) {
this.blocks.addAll(blocks);
return this;
}
public CreateIndexClusterStateUpdateRequest state(IndexMetaData.State state) {
this.state = state;
return this;
}
public String cause() {
return cause;
}
public String index() {
return index;
}
public IndexMetaData.State state() {
return state;
}
public Settings settings() {
return settings;
}
public Map<String, String> mappings() {
return mappings;
}
public Map<String, IndexMetaData.Custom> customs() {
return customs;
}
public Set<ClusterBlock> blocks() {
return blocks;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_create_CreateIndexClusterStateUpdateRequest.java
|
5,843 |
public class InternalSearchHit implements SearchHit {
private static final Object[] EMPTY_SORT_VALUES = new Object[0];
private static final Text MAX_TERM_AS_TEXT = new StringAndBytesText(BytesRefFieldComparatorSource.MAX_TERM.utf8ToString());
private transient int docId;
private float score = Float.NEGATIVE_INFINITY;
private Text id;
private Text type;
private long version = -1;
private BytesReference source;
private Map<String, SearchHitField> fields = ImmutableMap.of();
private Map<String, HighlightField> highlightFields = null;
private Object[] sortValues = EMPTY_SORT_VALUES;
private String[] matchedQueries = Strings.EMPTY_ARRAY;
private Explanation explanation;
@Nullable
private SearchShardTarget shard;
private Map<String, Object> sourceAsMap;
private byte[] sourceAsBytes;
private InternalSearchHit() {
}
public InternalSearchHit(int docId, String id, Text type, Map<String, SearchHitField> fields) {
this.docId = docId;
this.id = new StringAndBytesText(id);
this.type = type;
this.fields = fields;
}
public int docId() {
return this.docId;
}
public void shardTarget(SearchShardTarget shardTarget) {
this.shard = shardTarget;
}
public void score(float score) {
this.score = score;
}
@Override
public float score() {
return this.score;
}
@Override
public float getScore() {
return score();
}
public void version(long version) {
this.version = version;
}
@Override
public long version() {
return this.version;
}
@Override
public long getVersion() {
return this.version;
}
@Override
public String index() {
return shard.index();
}
@Override
public String getIndex() {
return index();
}
@Override
public String id() {
return id.string();
}
@Override
public String getId() {
return id();
}
@Override
public String type() {
return type.string();
}
@Override
public String getType() {
return type();
}
/**
* Returns bytes reference, also un compress the source if needed.
*/
public BytesReference sourceRef() {
try {
this.source = CompressorFactory.uncompressIfNeeded(this.source);
return this.source;
} catch (IOException e) {
throw new ElasticsearchParseException("failed to decompress source", e);
}
}
/**
* Sets representation, might be compressed....
*/
public InternalSearchHit sourceRef(BytesReference source) {
this.source = source;
this.sourceAsBytes = null;
this.sourceAsMap = null;
return this;
}
@Override
public BytesReference getSourceRef() {
return sourceRef();
}
/**
* Internal source representation, might be compressed....
*/
public BytesReference internalSourceRef() {
return source;
}
@Override
public byte[] source() {
if (source == null) {
return null;
}
if (sourceAsBytes != null) {
return sourceAsBytes;
}
this.sourceAsBytes = sourceRef().toBytes();
return this.sourceAsBytes;
}
@Override
public boolean isSourceEmpty() {
return source == null;
}
@Override
public Map<String, Object> getSource() {
return sourceAsMap();
}
@Override
public String sourceAsString() {
if (source == null) {
return null;
}
try {
return XContentHelper.convertToJson(sourceRef(), false);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to convert source to a json string");
}
}
@Override
public String getSourceAsString() {
return sourceAsString();
}
@SuppressWarnings({"unchecked"})
@Override
public Map<String, Object> sourceAsMap() throws ElasticsearchParseException {
if (source == null) {
return null;
}
if (sourceAsMap != null) {
return sourceAsMap;
}
sourceAsMap = SourceLookup.sourceAsMap(source);
return sourceAsMap;
}
@Override
public Iterator<SearchHitField> iterator() {
return fields.values().iterator();
}
@Override
public SearchHitField field(String fieldName) {
return fields().get(fieldName);
}
@Override
public Map<String, SearchHitField> fields() {
if (fields == null) {
return ImmutableMap.of();
}
return fields;
}
// returns the fields without handling null cases
public Map<String, SearchHitField> fieldsOrNull() {
return this.fields;
}
@Override
public Map<String, SearchHitField> getFields() {
return fields();
}
public void fields(Map<String, SearchHitField> fields) {
this.fields = fields;
}
public Map<String, HighlightField> internalHighlightFields() {
return highlightFields;
}
@Override
public Map<String, HighlightField> highlightFields() {
if (highlightFields == null) {
return ImmutableMap.of();
}
return this.highlightFields;
}
@Override
public Map<String, HighlightField> getHighlightFields() {
return highlightFields();
}
public void highlightFields(Map<String, HighlightField> highlightFields) {
this.highlightFields = highlightFields;
}
public void sortValues(Object[] sortValues) {
// LUCENE 4 UPGRADE: There must be a better way
// we want to convert to a Text object here, and not BytesRef
if (sortValues != null) {
for (int i = 0; i < sortValues.length; i++) {
if (sortValues[i] instanceof BytesRef) {
sortValues[i] = new StringAndBytesText(new BytesArray((BytesRef) sortValues[i]));
}
}
}
this.sortValues = sortValues;
}
@Override
public Object[] sortValues() {
return sortValues;
}
@Override
public Object[] getSortValues() {
return sortValues();
}
@Override
public Explanation explanation() {
return explanation;
}
@Override
public Explanation getExplanation() {
return explanation();
}
public void explanation(Explanation explanation) {
this.explanation = explanation;
}
@Override
public SearchShardTarget shard() {
return shard;
}
@Override
public SearchShardTarget getShard() {
return shard();
}
public void shard(SearchShardTarget target) {
this.shard = target;
}
public void matchedQueries(String[] matchedQueries) {
this.matchedQueries = matchedQueries;
}
@Override
public String[] matchedQueries() {
return this.matchedQueries;
}
@Override
public String[] getMatchedQueries() {
return this.matchedQueries;
}
public static class Fields {
static final XContentBuilderString _INDEX = new XContentBuilderString("_index");
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _ID = new XContentBuilderString("_id");
static final XContentBuilderString _VERSION = new XContentBuilderString("_version");
static final XContentBuilderString _SCORE = new XContentBuilderString("_score");
static final XContentBuilderString FIELDS = new XContentBuilderString("fields");
static final XContentBuilderString HIGHLIGHT = new XContentBuilderString("highlight");
static final XContentBuilderString SORT = new XContentBuilderString("sort");
static final XContentBuilderString MATCHED_QUERIES = new XContentBuilderString("matched_queries");
static final XContentBuilderString _EXPLANATION = new XContentBuilderString("_explanation");
static final XContentBuilderString VALUE = new XContentBuilderString("value");
static final XContentBuilderString DESCRIPTION = new XContentBuilderString("description");
static final XContentBuilderString DETAILS = new XContentBuilderString("details");
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (explanation() != null) {
builder.field("_shard", shard.shardId());
builder.field("_node", shard.nodeIdText());
}
builder.field(Fields._INDEX, shard.indexText());
builder.field(Fields._TYPE, type);
builder.field(Fields._ID, id);
if (version != -1) {
builder.field(Fields._VERSION, version);
}
if (Float.isNaN(score)) {
builder.nullField(Fields._SCORE);
} else {
builder.field(Fields._SCORE, score);
}
if (source != null) {
RestXContentBuilder.restDocumentSource(source, builder, params);
}
if (fields != null && !fields.isEmpty()) {
builder.startObject(Fields.FIELDS);
for (SearchHitField field : fields.values()) {
if (field.values().isEmpty()) {
continue;
}
String fieldName = field.getName();
if (field.isMetadataField()) {
builder.field(fieldName, field.value());
} else {
builder.startArray(fieldName);
for (Object value : field.getValues()) {
builder.value(value);
}
builder.endArray();
}
}
builder.endObject();
}
if (highlightFields != null && !highlightFields.isEmpty()) {
builder.startObject(Fields.HIGHLIGHT);
for (HighlightField field : highlightFields.values()) {
builder.field(field.name());
if (field.fragments() == null) {
builder.nullValue();
} else {
builder.startArray();
for (Text fragment : field.fragments()) {
builder.value(fragment);
}
builder.endArray();
}
}
builder.endObject();
}
if (sortValues != null && sortValues.length > 0) {
builder.startArray(Fields.SORT);
for (Object sortValue : sortValues) {
if (sortValue != null && sortValue.equals(MAX_TERM_AS_TEXT)) {
// We don't display MAX_TERM in JSON responses in case some clients have UTF-8 parsers that wouldn't accept a
// non-character in the response, even though this is valid UTF-8
builder.nullValue();
} else {
builder.value(sortValue);
}
}
builder.endArray();
}
if (matchedQueries.length > 0) {
builder.startArray(Fields.MATCHED_QUERIES);
for (String matchedFilter : matchedQueries) {
builder.value(matchedFilter);
}
builder.endArray();
}
if (explanation() != null) {
builder.field(Fields._EXPLANATION);
buildExplanation(builder, explanation());
}
builder.endObject();
return builder;
}
private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException {
builder.startObject();
builder.field(Fields.VALUE, explanation.getValue());
builder.field(Fields.DESCRIPTION, explanation.getDescription());
Explanation[] innerExps = explanation.getDetails();
if (innerExps != null) {
builder.startArray(Fields.DETAILS);
for (Explanation exp : innerExps) {
buildExplanation(builder, exp);
}
builder.endArray();
}
builder.endObject();
}
public static InternalSearchHit readSearchHit(StreamInput in, InternalSearchHits.StreamContext context) throws IOException {
InternalSearchHit hit = new InternalSearchHit();
hit.readFrom(in, context);
return hit;
}
@Override
public void readFrom(StreamInput in) throws IOException {
readFrom(in, InternalSearchHits.streamContext().streamShardTarget(InternalSearchHits.StreamContext.ShardTargetType.STREAM));
}
public void readFrom(StreamInput in, InternalSearchHits.StreamContext context) throws IOException {
score = in.readFloat();
id = in.readText();
type = in.readSharedText();
version = in.readLong();
source = in.readBytesReference();
if (source.length() == 0) {
source = null;
}
if (in.readBoolean()) {
explanation = readExplanation(in);
}
int size = in.readVInt();
if (size == 0) {
fields = ImmutableMap.of();
} else if (size == 1) {
SearchHitField hitField = readSearchHitField(in);
fields = ImmutableMap.of(hitField.name(), hitField);
} else if (size == 2) {
SearchHitField hitField1 = readSearchHitField(in);
SearchHitField hitField2 = readSearchHitField(in);
fields = ImmutableMap.of(hitField1.name(), hitField1, hitField2.name(), hitField2);
} else if (size == 3) {
SearchHitField hitField1 = readSearchHitField(in);
SearchHitField hitField2 = readSearchHitField(in);
SearchHitField hitField3 = readSearchHitField(in);
fields = ImmutableMap.of(hitField1.name(), hitField1, hitField2.name(), hitField2, hitField3.name(), hitField3);
} else if (size == 4) {
SearchHitField hitField1 = readSearchHitField(in);
SearchHitField hitField2 = readSearchHitField(in);
SearchHitField hitField3 = readSearchHitField(in);
SearchHitField hitField4 = readSearchHitField(in);
fields = ImmutableMap.of(hitField1.name(), hitField1, hitField2.name(), hitField2, hitField3.name(), hitField3, hitField4.name(), hitField4);
} else if (size == 5) {
SearchHitField hitField1 = readSearchHitField(in);
SearchHitField hitField2 = readSearchHitField(in);
SearchHitField hitField3 = readSearchHitField(in);
SearchHitField hitField4 = readSearchHitField(in);
SearchHitField hitField5 = readSearchHitField(in);
fields = ImmutableMap.of(hitField1.name(), hitField1, hitField2.name(), hitField2, hitField3.name(), hitField3, hitField4.name(), hitField4, hitField5.name(), hitField5);
} else {
ImmutableMap.Builder<String, SearchHitField> builder = ImmutableMap.builder();
for (int i = 0; i < size; i++) {
SearchHitField hitField = readSearchHitField(in);
builder.put(hitField.name(), hitField);
}
fields = builder.build();
}
size = in.readVInt();
if (size == 0) {
highlightFields = ImmutableMap.of();
} else if (size == 1) {
HighlightField field = readHighlightField(in);
highlightFields = ImmutableMap.of(field.name(), field);
} else if (size == 2) {
HighlightField field1 = readHighlightField(in);
HighlightField field2 = readHighlightField(in);
highlightFields = ImmutableMap.of(field1.name(), field1, field2.name(), field2);
} else if (size == 3) {
HighlightField field1 = readHighlightField(in);
HighlightField field2 = readHighlightField(in);
HighlightField field3 = readHighlightField(in);
highlightFields = ImmutableMap.of(field1.name(), field1, field2.name(), field2, field3.name(), field3);
} else if (size == 4) {
HighlightField field1 = readHighlightField(in);
HighlightField field2 = readHighlightField(in);
HighlightField field3 = readHighlightField(in);
HighlightField field4 = readHighlightField(in);
highlightFields = ImmutableMap.of(field1.name(), field1, field2.name(), field2, field3.name(), field3, field4.name(), field4);
} else {
ImmutableMap.Builder<String, HighlightField> builder = ImmutableMap.builder();
for (int i = 0; i < size; i++) {
HighlightField field = readHighlightField(in);
builder.put(field.name(), field);
}
highlightFields = builder.build();
}
size = in.readVInt();
if (size > 0) {
sortValues = new Object[size];
for (int i = 0; i < sortValues.length; i++) {
byte type = in.readByte();
if (type == 0) {
sortValues[i] = null;
} else if (type == 1) {
sortValues[i] = in.readString();
} else if (type == 2) {
sortValues[i] = in.readInt();
} else if (type == 3) {
sortValues[i] = in.readLong();
} else if (type == 4) {
sortValues[i] = in.readFloat();
} else if (type == 5) {
sortValues[i] = in.readDouble();
} else if (type == 6) {
sortValues[i] = in.readByte();
} else if (type == 7) {
sortValues[i] = in.readShort();
} else if (type == 8) {
sortValues[i] = in.readBoolean();
} else if (type == 9) {
sortValues[i] = in.readText();
} else {
throw new IOException("Can't match type [" + type + "]");
}
}
}
size = in.readVInt();
if (size > 0) {
matchedQueries = new String[size];
for (int i = 0; i < size; i++) {
matchedQueries[i] = in.readString();
}
}
if (context.streamShardTarget() == InternalSearchHits.StreamContext.ShardTargetType.STREAM) {
if (in.readBoolean()) {
shard = readSearchShardTarget(in);
}
} else if (context.streamShardTarget() == InternalSearchHits.StreamContext.ShardTargetType.LOOKUP) {
int lookupId = in.readVInt();
if (lookupId > 0) {
shard = context.handleShardLookup().get(lookupId);
}
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
writeTo(out, InternalSearchHits.streamContext().streamShardTarget(InternalSearchHits.StreamContext.ShardTargetType.STREAM));
}
public void writeTo(StreamOutput out, InternalSearchHits.StreamContext context) throws IOException {
out.writeFloat(score);
out.writeText(id);
out.writeSharedText(type);
out.writeLong(version);
out.writeBytesReference(source);
if (explanation == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
writeExplanation(out, explanation);
}
if (fields == null) {
out.writeVInt(0);
} else {
out.writeVInt(fields.size());
for (SearchHitField hitField : fields().values()) {
hitField.writeTo(out);
}
}
if (highlightFields == null) {
out.writeVInt(0);
} else {
out.writeVInt(highlightFields.size());
for (HighlightField highlightField : highlightFields.values()) {
highlightField.writeTo(out);
}
}
if (sortValues.length == 0) {
out.writeVInt(0);
} else {
out.writeVInt(sortValues.length);
for (Object sortValue : sortValues) {
if (sortValue == null) {
out.writeByte((byte) 0);
} else {
Class type = sortValue.getClass();
if (type == String.class) {
out.writeByte((byte) 1);
out.writeString((String) sortValue);
} else if (type == Integer.class) {
out.writeByte((byte) 2);
out.writeInt((Integer) sortValue);
} else if (type == Long.class) {
out.writeByte((byte) 3);
out.writeLong((Long) sortValue);
} else if (type == Float.class) {
out.writeByte((byte) 4);
out.writeFloat((Float) sortValue);
} else if (type == Double.class) {
out.writeByte((byte) 5);
out.writeDouble((Double) sortValue);
} else if (type == Byte.class) {
out.writeByte((byte) 6);
out.writeByte((Byte) sortValue);
} else if (type == Short.class) {
out.writeByte((byte) 7);
out.writeShort((Short) sortValue);
} else if (type == Boolean.class) {
out.writeByte((byte) 8);
out.writeBoolean((Boolean) sortValue);
} else if (sortValue instanceof Text) {
out.writeByte((byte) 9);
out.writeText((Text) sortValue);
} else {
throw new IOException("Can't handle sort field value of type [" + type + "]");
}
}
}
}
if (matchedQueries.length == 0) {
out.writeVInt(0);
} else {
out.writeVInt(matchedQueries.length);
for (String matchedFilter : matchedQueries) {
out.writeString(matchedFilter);
}
}
if (context.streamShardTarget() == InternalSearchHits.StreamContext.ShardTargetType.STREAM) {
if (shard == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
shard.writeTo(out);
}
} else if (context.streamShardTarget() == InternalSearchHits.StreamContext.ShardTargetType.LOOKUP) {
if (shard == null) {
out.writeVInt(0);
} else {
out.writeVInt(context.shardHandleLookup().get(shard));
}
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_search_internal_InternalSearchHit.java
|
43 |
public interface ONavigableSet<E> extends SortedSet<E> {
/**
* Returns the greatest element in this set strictly less than the given element, or {@code null} if there is no such element.
*
* @param e
* the value to match
* @return the greatest element less than {@code e}, or {@code null} if there is no such element
* @throws ClassCastException
* if the specified element cannot be compared with the elements currently in the set
* @throws NullPointerException
* if the specified element is null and this set does not permit null elements
*/
E lower(E e);
/**
* Returns the greatest element in this set less than or equal to the given element, or {@code null} if there is no such element.
*
* @param e
* the value to match
* @return the greatest element less than or equal to {@code e}, or {@code null} if there is no such element
* @throws ClassCastException
* if the specified element cannot be compared with the elements currently in the set
* @throws NullPointerException
* if the specified element is null and this set does not permit null elements
*/
E floor(E e);
/**
* Returns the least element in this set greater than or equal to the given element, or {@code null} if there is no such element.
*
* @param e
* the value to match
* @return the least element greater than or equal to {@code e}, or {@code null} if there is no such element
* @throws ClassCastException
* if the specified element cannot be compared with the elements currently in the set
* @throws NullPointerException
* if the specified element is null and this set does not permit null elements
*/
E ceiling(E e);
/**
* Returns the least element in this set strictly greater than the given element, or {@code null} if there is no such element.
*
* @param e
* the value to match
* @return the least element greater than {@code e}, or {@code null} if there is no such element
* @throws ClassCastException
* if the specified element cannot be compared with the elements currently in the set
* @throws NullPointerException
* if the specified element is null and this set does not permit null elements
*/
E higher(E e);
/**
* Retrieves and removes the first (lowest) element, or returns {@code null} if this set is empty.
*
* @return the first element, or {@code null} if this set is empty
*/
E pollFirst();
/**
* Retrieves and removes the last (highest) element, or returns {@code null} if this set is empty.
*
* @return the last element, or {@code null} if this set is empty
*/
E pollLast();
/**
* Returns an iterator over the elements in this set, in ascending order.
*
* @return an iterator over the elements in this set, in ascending order
*/
OLazyIterator<E> iterator();
/**
* Returns a reverse order view of the elements contained in this set. The descending set is backed by this set, so changes to the
* set are reflected in the descending set, and vice-versa. If either set is modified while an iteration over either set is in
* progress (except through the iterator's own {@code remove} operation), the results of the iteration are undefined.
*
* <p>
* The returned set has an ordering equivalent to
* <tt>{@link Collections#reverseOrder(Comparator) Collections.reverseOrder}(comparator())</tt>. The expression
* {@code s.descendingSet().descendingSet()} returns a view of {@code s} essentially equivalent to {@code s}.
*
* @return a reverse order view of this set
*/
ONavigableSet<E> descendingSet();
/**
* Returns an iterator over the elements in this set, in descending order. Equivalent in effect to
* {@code descendingSet().iterator()}.
*
* @return an iterator over the elements in this set, in descending order
*/
Iterator<E> descendingIterator();
/**
* Returns a view of the portion of this set whose elements range from {@code fromElement} to {@code toElement}. If
* {@code fromElement} and {@code toElement} are equal, the returned set is empty unless {@code fromExclusive} and
* {@code toExclusive} are both true. The returned set is backed by this set, so changes in the returned set are reflected in this
* set, and vice-versa. The returned set supports all optional set operations that this set supports.
*
* <p>
* The returned set will throw an {@code IllegalArgumentException} on an attempt to insert an element outside its range.
*
* @param fromElement
* low endpoint of the returned set
* @param fromInclusive
* {@code true} if the low endpoint is to be included in the returned view
* @param toElement
* high endpoint of the returned set
* @param toInclusive
* {@code true} if the high endpoint is to be included in the returned view
* @return a view of the portion of this set whose elements range from {@code fromElement}, inclusive, to {@code toElement},
* exclusive
* @throws ClassCastException
* if {@code fromElement} and {@code toElement} cannot be compared to one another using this set's comparator (or, if
* the set has no comparator, using natural ordering). Implementations may, but are not required to, throw this
* exception if {@code fromElement} or {@code toElement} cannot be compared to elements currently in the set.
* @throws NullPointerException
* if {@code fromElement} or {@code toElement} is null and this set does not permit null elements
* @throws IllegalArgumentException
* if {@code fromElement} is greater than {@code toElement}; or if this set itself has a restricted range, and
* {@code fromElement} or {@code toElement} lies outside the bounds of the range.
*/
ONavigableSet<E> subSet(E fromElement, boolean fromInclusive, E toElement, boolean toInclusive);
/**
* Returns a view of the portion of this set whose elements are less than (or equal to, if {@code inclusive} is true)
* {@code toElement}. The returned set is backed by this set, so changes in the returned set are reflected in this set, and
* vice-versa. The returned set supports all optional set operations that this set supports.
*
* <p>
* The returned set will throw an {@code IllegalArgumentException} on an attempt to insert an element outside its range.
*
* @param toElement
* high endpoint of the returned set
* @param inclusive
* {@code true} if the high endpoint is to be included in the returned view
* @return a view of the portion of this set whose elements are less than (or equal to, if {@code inclusive} is true)
* {@code toElement}
* @throws ClassCastException
* if {@code toElement} is not compatible with this set's comparator (or, if the set has no comparator, if
* {@code toElement} does not implement {@link Comparable}). Implementations may, but are not required to, throw this
* exception if {@code toElement} cannot be compared to elements currently in the set.
* @throws NullPointerException
* if {@code toElement} is null and this set does not permit null elements
* @throws IllegalArgumentException
* if this set itself has a restricted range, and {@code toElement} lies outside the bounds of the range
*/
ONavigableSet<E> headSet(E toElement, boolean inclusive);
/**
* Returns a view of the portion of this set whose elements are greater than (or equal to, if {@code inclusive} is true)
* {@code fromElement}. The returned set is backed by this set, so changes in the returned set are reflected in this set, and
* vice-versa. The returned set supports all optional set operations that this set supports.
*
* <p>
* The returned set will throw an {@code IllegalArgumentException} on an attempt to insert an element outside its range.
*
* @param fromElement
* low endpoint of the returned set
* @param inclusive
* {@code true} if the low endpoint is to be included in the returned view
* @return a view of the portion of this set whose elements are greater than or equal to {@code fromElement}
* @throws ClassCastException
* if {@code fromElement} is not compatible with this set's comparator (or, if the set has no comparator, if
* {@code fromElement} does not implement {@link Comparable}). Implementations may, but are not required to, throw this
* exception if {@code fromElement} cannot be compared to elements currently in the set.
* @throws NullPointerException
* if {@code fromElement} is null and this set does not permit null elements
* @throws IllegalArgumentException
* if this set itself has a restricted range, and {@code fromElement} lies outside the bounds of the range
*/
ONavigableSet<E> tailSet(E fromElement, boolean inclusive);
/**
* {@inheritDoc}
*
* <p>
* Equivalent to {@code subSet(fromElement, true, toElement, false)}.
*
* @throws ClassCastException
* {@inheritDoc}
* @throws NullPointerException
* {@inheritDoc}
* @throws IllegalArgumentException
* {@inheritDoc}
*/
SortedSet<E> subSet(E fromElement, E toElement);
/**
* {@inheritDoc}
*
* <p>
* Equivalent to {@code headSet(toElement, false)}.
*
* @throws ClassCastException
* {@inheritDoc}
* @throws NullPointerException
* {@inheritDoc}
* @throws IllegalArgumentException
* {@inheritDoc} na
*/
SortedSet<E> headSet(E toElement);
/**
* {@inheritDoc}
*
* <p>
* Equivalent to {@code tailSet(fromElement, true)}.
*
* @throws ClassCastException
* {@inheritDoc}
* @throws NullPointerException
* {@inheritDoc}
* @throws IllegalArgumentException
* {@inheritDoc}
*/
SortedSet<E> tailSet(E fromElement);
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_collection_ONavigableSet.java
|
1,955 |
public class MapRemoveRequest extends KeyBasedClientRequest implements Portable, SecureRequest {
protected String name;
protected Data key;
protected long threadId;
protected transient long startTime;
public MapRemoveRequest() {
}
public MapRemoveRequest(String name, Data key, long threadId) {
this.name = name;
this.key = key;
this.threadId = threadId;
}
public int getFactoryId() {
return MapPortableHook.F_ID;
}
public int getClassId() {
return MapPortableHook.REMOVE;
}
public Object getKey() {
return key;
}
@Override
protected void beforeProcess() {
startTime = System.currentTimeMillis();
}
@Override
protected void afterResponse() {
final long latency = System.currentTimeMillis() - startTime;
final MapService mapService = getService();
MapContainer mapContainer = mapService.getMapContainer(name);
if (mapContainer.getMapConfig().isStatisticsEnabled()) {
mapService.getLocalMapStatsImpl(name).incrementRemoves(latency);
}
}
protected Operation prepareOperation() {
RemoveOperation op = new RemoveOperation(name, key);
op.setThreadId(threadId);
return op;
}
public String getServiceName() {
return MapService.SERVICE_NAME;
}
public void write(PortableWriter writer) throws IOException {
writer.writeUTF("n", name);
writer.writeLong("t", threadId);
final ObjectDataOutput out = writer.getRawDataOutput();
key.writeData(out);
}
public void read(PortableReader reader) throws IOException {
name = reader.readUTF("n");
threadId = reader.readLong("t");
final ObjectDataInput in = reader.getRawDataInput();
key = new Data();
key.readData(in);
}
public Permission getRequiredPermission() {
return new MapPermission(name, ActionConstants.ACTION_REMOVE);
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_map_client_MapRemoveRequest.java
|
1,211 |
public class PaymentInfoAdditionalFieldType implements Serializable, BroadleafEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, PaymentInfoAdditionalFieldType> TYPES = new LinkedHashMap<String, PaymentInfoAdditionalFieldType>();
public static final PaymentInfoAdditionalFieldType NAME_ON_CARD = new PaymentInfoAdditionalFieldType("NAME_ON_CARD", "Cardholders Name");
public static final PaymentInfoAdditionalFieldType CARD_TYPE = new PaymentInfoAdditionalFieldType("CARD_TYPE", "Card Type");
public static final PaymentInfoAdditionalFieldType EXP_MONTH = new PaymentInfoAdditionalFieldType("EXP_MONTH", "Expiration Month");
public static final PaymentInfoAdditionalFieldType EXP_YEAR = new PaymentInfoAdditionalFieldType("EXP_YEAR", "Expiration Year");
// Generic Fields that can be used for multiple payment types
public static final PaymentInfoAdditionalFieldType PAYMENT_TYPE = new PaymentInfoAdditionalFieldType("PAYMENT_TYPE", "Type of Payment");
public static final PaymentInfoAdditionalFieldType NAME_ON_ACCOUNT = new PaymentInfoAdditionalFieldType("NAME_ON_ACCOUNT", "Name on Account");
public static final PaymentInfoAdditionalFieldType ACCOUNT_TYPE = new PaymentInfoAdditionalFieldType("ACCOUNT_TYPE", "Account Type");
public static final PaymentInfoAdditionalFieldType LAST_FOUR = new PaymentInfoAdditionalFieldType("LAST_FOUR", "Last Four Digits ofAccount or CC");
public static final PaymentInfoAdditionalFieldType GIFT_CARD_NUM = new PaymentInfoAdditionalFieldType("GIFT_CARD_NUM", "Gift Card Number");
public static final PaymentInfoAdditionalFieldType EMAIL = new PaymentInfoAdditionalFieldType("EMAIL", "Email");
public static final PaymentInfoAdditionalFieldType ACCOUNT_CREDIT_NUM = new PaymentInfoAdditionalFieldType("ACCOUNT_CREDIT_NUM", "Account Credit Number");
public static final PaymentInfoAdditionalFieldType AUTH_CODE = new PaymentInfoAdditionalFieldType("AUTH_CODE", "Authorization Code");
public static final PaymentInfoAdditionalFieldType REQUEST_ID = new PaymentInfoAdditionalFieldType("REQUEST_ID", "Request Id");
public static final PaymentInfoAdditionalFieldType SUBSCRIPTION_ID = new PaymentInfoAdditionalFieldType("SUBSCRIPTION_ID", "Subscription Id");
public static final PaymentInfoAdditionalFieldType SUBSCRIPTION_TITLE = new PaymentInfoAdditionalFieldType("SUBSCRIPTION_TITLE", "Subscription Title");
public static PaymentInfoAdditionalFieldType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public PaymentInfoAdditionalFieldType() {
//do nothing
}
public PaymentInfoAdditionalFieldType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)) {
TYPES.put(type, this);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
PaymentInfoAdditionalFieldType other = (PaymentInfoAdditionalFieldType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_payment_service_type_PaymentInfoAdditionalFieldType.java
|
3,424 |
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
recoveryStatus = new RecoveryStatus();
recoveryStatus.updateStage(RecoveryStatus.Stage.INIT);
try {
if (indexShard.routingEntry().restoreSource() != null) {
logger.debug("restoring from {} ...", indexShard.routingEntry().restoreSource());
snapshotService.restore(recoveryStatus);
} else {
logger.debug("starting recovery from {} ...", shardGateway);
shardGateway.recover(indexShouldExists, recoveryStatus);
}
lastIndexVersion = recoveryStatus.index().version();
lastTranslogId = -1;
lastTranslogLength = 0;
lastTotalTranslogOperations = recoveryStatus.translog().currentTranslogOperations();
// start the shard if the gateway has not started it already. Note that if the gateway
// moved shard to POST_RECOVERY, it may have been started as well if:
// 1) master sent a new cluster state indicating shard is initializing
// 2) IndicesClusterStateService#applyInitializingShard will send a shard started event
// 3) Master will mark shard as started and this will be processed locally.
IndexShardState shardState = indexShard.state();
if (shardState != IndexShardState.POST_RECOVERY && shardState != IndexShardState.STARTED) {
indexShard.postRecovery("post recovery from gateway");
}
// refresh the shard
indexShard.refresh(new Engine.Refresh("post_gateway").force(true));
recoveryStatus.time(System.currentTimeMillis() - recoveryStatus.startTime());
recoveryStatus.updateStage(RecoveryStatus.Stage.DONE);
if (logger.isDebugEnabled()) {
logger.debug("recovery completed from [{}], took [{}]", shardGateway, timeValueMillis(recoveryStatus.time()));
} else if (logger.isTraceEnabled()) {
StringBuilder sb = new StringBuilder();
sb.append("recovery completed from ").append(shardGateway).append(", took [").append(timeValueMillis(recoveryStatus.time())).append("]\n");
sb.append(" index : files [").append(recoveryStatus.index().numberOfFiles()).append("] with total_size [").append(new ByteSizeValue(recoveryStatus.index().totalSize())).append("], took[").append(TimeValue.timeValueMillis(recoveryStatus.index().time())).append("]\n");
sb.append(" : recovered_files [").append(recoveryStatus.index().numberOfRecoveredFiles()).append("] with total_size [").append(new ByteSizeValue(recoveryStatus.index().recoveredTotalSize())).append("]\n");
sb.append(" : reusing_files [").append(recoveryStatus.index().numberOfReusedFiles()).append("] with total_size [").append(new ByteSizeValue(recoveryStatus.index().reusedTotalSize())).append("]\n");
sb.append(" start : took [").append(TimeValue.timeValueMillis(recoveryStatus.start().time())).append("], check_index [").append(timeValueMillis(recoveryStatus.start().checkIndexTime())).append("]\n");
sb.append(" translog : number_of_operations [").append(recoveryStatus.translog().currentTranslogOperations()).append("], took [").append(TimeValue.timeValueMillis(recoveryStatus.translog().time())).append("]");
logger.trace(sb.toString());
}
listener.onRecoveryDone();
scheduleSnapshotIfNeeded();
} catch (IndexShardGatewayRecoveryException e) {
if (indexShard.state() == IndexShardState.CLOSED) {
// got closed on us, just ignore this recovery
listener.onIgnoreRecovery("shard closed");
return;
}
if ((e.getCause() instanceof IndexShardClosedException) || (e.getCause() instanceof IndexShardNotStartedException)) {
// got closed on us, just ignore this recovery
listener.onIgnoreRecovery("shard closed");
return;
}
listener.onRecoveryFailed(e);
} catch (IndexShardClosedException e) {
listener.onIgnoreRecovery("shard closed");
} catch (IndexShardNotStartedException e) {
listener.onIgnoreRecovery("shard closed");
} catch (Exception e) {
if (indexShard.state() == IndexShardState.CLOSED) {
// got closed on us, just ignore this recovery
listener.onIgnoreRecovery("shard closed");
return;
}
listener.onRecoveryFailed(new IndexShardGatewayRecoveryException(shardId, "failed recovery", e));
}
}
});
| 1no label
|
src_main_java_org_elasticsearch_index_gateway_IndexShardGatewayService.java
|
1,289 |
public class LaunchHelper {
static void addFiles(List<IFile> files, IResource resource) {
switch (resource.getType()) {
case IResource.FILE:
IFile file = (IFile) resource;
IPath path = file.getFullPath(); //getProjectRelativePath();
if (path!=null && "ceylon".equals(path.getFileExtension()) ) {
files.add(file);
}
break;
case IResource.FOLDER:
case IResource.PROJECT:
IContainer folder = (IContainer) resource;
try {
for (IResource child: folder.members()) {
addFiles(files, child);
}
}
catch (CoreException e) {
e.printStackTrace();
}
break;
}
}
static Object[] findDeclarationFromFiles(List<IFile> files) {
List<Declaration> topLevelDeclarations = new LinkedList<Declaration>();
List<IFile> correspondingfiles = new LinkedList<IFile>();
for (IFile file : files) {
IProject project = file.getProject();
TypeChecker typeChecker = getProjectTypeChecker(project);
if (typeChecker != null) {
PhasedUnit phasedUnit = typeChecker.getPhasedUnits()
.getPhasedUnit(createResourceVirtualFile(file));
if (phasedUnit!=null) {
List<Declaration> declarations = phasedUnit.getDeclarations();
for (Declaration d : declarations) {
if (isRunnable(d)) {
topLevelDeclarations.add(d);
correspondingfiles.add(file);
}
}
}
}
}
Declaration declarationToRun = null;
IFile fileToRun = null;
if (topLevelDeclarations.size() == 0) {
MessageDialog.openError(EditorUtil.getShell(), "Ceylon Launcher",
"No ceylon runnable element");
}
else if (topLevelDeclarations.size() > 1) {
declarationToRun = chooseDeclaration(topLevelDeclarations);
if (declarationToRun!=null) {
fileToRun = correspondingfiles.get(topLevelDeclarations.indexOf(declarationToRun));
}
}
else {
declarationToRun = topLevelDeclarations.get(0);
fileToRun = correspondingfiles.get(0);
}
return new Object[] {declarationToRun, fileToRun};
}
private static boolean isRunnable(Declaration d) {
boolean candidateDeclaration = true;
if (!d.isToplevel() || !d.isShared()) {
candidateDeclaration = false;
}
if (d instanceof Method) {
Method methodDecl = (Method) d;
if (!methodDecl.getParameterLists().isEmpty() &&
!methodDecl.getParameterLists().get(0).getParameters().isEmpty()) {
candidateDeclaration = false;
}
}
else if (d instanceof Class) {
Class classDecl = (Class) d;
if (classDecl.isAbstract() ||
classDecl.getParameterList()==null ||
!classDecl.getParameterList().getParameters().isEmpty()) {
candidateDeclaration = false;
}
}
else {
candidateDeclaration = false;
}
return candidateDeclaration;
}
static Module getModule(IProject project, String fullModuleName) {
fullModuleName = normalizeFullModuleName(fullModuleName);
if (fullModuleName != null) {
String[] parts = fullModuleName.split("/");
if (parts != null && parts.length != 2) {
return null;
}
for (Module module: getProjectDeclaredSourceModules(project)) {
if (module.getNameAsString().equals(parts[0]) &&
module.getVersion().equals(parts[1])) {
return module;
}
}
if (isDefaultModulePresent(project)) {
return getDefaultModule(project);
}
}
return null;
}
private static String normalizeFullModuleName(String fullModuleName) {
if (Module.DEFAULT_MODULE_NAME.equals(fullModuleName)) {
return getFullModuleName(getEmptyDefaultModule());
} else {
return fullModuleName;
}
}
private static Module getDefaultModule(IProject project) {
Module defaultModule = getProjectModules(project).getDefaultModule();
if (defaultModule == null) {
defaultModule = getEmptyDefaultModule();
}
return defaultModule;
}
private static Module getEmptyDefaultModule() {
Module defaultModule = new Module();
defaultModule.setName(Arrays.asList(new String[]{Module.DEFAULT_MODULE_NAME}));
defaultModule.setVersion("unversioned");
defaultModule.setDefault(true);
return defaultModule;
}
static Module getModule(Declaration decl) {
if (decl.getUnit().getPackage() != null) {
if (decl.getUnit().getPackage().getModule() != null) {
return decl.getUnit().getPackage().getModule();
}
}
return getEmptyDefaultModule();
}
static String getModuleFullName(Declaration decl) {
Module module = getModule(decl);
if (module.isDefault()) {
return Module.DEFAULT_MODULE_NAME;
} else {
return getFullModuleName(module);
}
}
static Set<Module> getModules(IProject project, boolean includeDefault) {
Set<Module> modules = new HashSet<Module>();
for(Module module: getProjectDeclaredSourceModules(project)) {
if (module.isAvailable()
&& !module.getNameAsString().startsWith(Module.LANGUAGE_MODULE_NAME) &&
!module.isJava() ) {
if ((module.isDefault() && includeDefault) // TODO : this is *never* true : the default module is not in the requested list
|| (!module.isDefault() &&
module.getPackage(module.getNameAsString()) != null)){
modules.add(module);
}
}
}
if (modules.isEmpty() || isDefaultModulePresent(project)) {
modules.add(getDefaultModule(project));
}
return modules;
}
private static boolean isDefaultModulePresent(IProject project) {
Module defaultModule =
getProjectModules(project).getDefaultModule();
if (defaultModule != null) {
List<Declaration> decls =
getDeclarationsForModule(project, defaultModule);
if (!decls.isEmpty()) {
return true;
}
}
return false;
}
static boolean isModuleInProject(IProject project, String fullModuleName) {
if (fullModuleName.equals(Module.DEFAULT_MODULE_NAME) &&
isDefaultModulePresent(project)) {
return true;
}
for (Module module : getModules(project, false)) {
if (fullModuleName != null
&& fullModuleName.equals(getFullModuleName(module))) {
return true;
}
}
return false;
}
static String getFullModuleName(Module module) {
return module.getNameAsString()+"/"+module.getVersion();
}
static List<Declaration> getDeclarationsForModule(IProject project, Module module) {
List<Declaration> modDecls = new LinkedList<Declaration>();
if (module != null) {
List<Package> pkgs = module.getPackages(); // avoid concurrent exception
for (Package pkg : pkgs) {
if (pkg.getModule() != null && isPackageInProject(project, pkg))
for (Declaration decl : pkg.getMembers()) {
if (isRunnable(decl)) {
modDecls.add(decl);
}
}
}
}
return modDecls;
}
private static boolean isPackageInProject(IProject project, Package pkg) {
TypeChecker typeChecker = getProjectTypeChecker(project);
List<PhasedUnit> pus = typeChecker.getPhasedUnits().getPhasedUnits();
for (PhasedUnit phasedUnit : pus) {
if (pkg.equals(phasedUnit.getPackage())) {
return true;
}
}
return false;
}
static List<Declaration> getDeclarationsForModule(String projectName,
String fullModuleName) {
IProject project = getProjectFromName(projectName);
Module module = getModule(project, fullModuleName);
return getDeclarationsForModule(project, module);
}
/**
* Does not attempt to get all declarations before it returns true
* @param project
* @param fullModuleName
* @param topLevelName
* @return boolean if a top-level is contained in a module
*/
static boolean isModuleContainsTopLevel(IProject project,
String fullModuleName, String topLevelName) {
if (!isModuleInProject(project, fullModuleName)) {
return false;
}
if (Module.DEFAULT_MODULE_NAME.equals(fullModuleName)) {
fullModuleName = getFullModuleName(getDefaultModule(project));
}
Module mod = getModule(project, fullModuleName);
if (mod == null) {
return false;
}
for (Package pkg : mod.getPackages()) {
for (Declaration decl : pkg.getMembers()) {
if (getRunnableName(decl).equals(topLevelName)) {
return true;
}
}
}
return false;
}
static String getRunnableName(Declaration d) {
return d.getQualifiedNameString().replace("::", ".");
}
static Declaration chooseDeclaration(final List<Declaration> decls) {
FilteredItemsSelectionDialog sd =
new CeylonTopLevelSelectionDialog(EditorUtil.getShell(),
false, decls);
if (sd.open() == Window.OK) {
return (Declaration)sd.getFirstResult();
}
return null;
}
static Module chooseModule(String projectName, boolean includeDefault) {
return chooseModule(getProjectFromName(projectName), includeDefault);
}
static Module chooseModule(IProject project, boolean includeDefault) {
if (getDefaultOrOnlyModule(project, includeDefault) != null) {
return getDefaultOrOnlyModule(project, includeDefault);
}
Set<Module> modules = getModules(project, true);
FilteredItemsSelectionDialog cmsd =
new CeylonModuleSelectionDialog(EditorUtil.getShell(),
modules, "Choose Ceylon Module");
if (cmsd.open() == Window.OK) {
return (Module)cmsd.getFirstResult();
}
return null;
}
static IProject getProjectFromName(String projectName) {
if (projectName != null && projectName.length() > 0) {
IWorkspace workspace = getWorkspace();
IStatus status = workspace.validateName(projectName, IResource.PROJECT);
if (status.isOK()) {
return workspace.getRoot().getProject(projectName);
}
}
return null;
}
static String getTopLevelNormalName(String moduleFullName, String displayName) {
if (displayName.contains(DEFAULT_RUN_MARKER) &&
moduleFullName.indexOf('/') != -1) {
return moduleFullName.substring(0,
moduleFullName.indexOf('/')) + ".run";
}
return displayName;
}
static String getTopLevelDisplayName(Declaration decl) {
String topLevelName = getRunnableName(decl);
if (getModule(decl) != null &&
decl.equals(getDefaultRunnableForModule(getModule(decl)))) {
topLevelName = "run" + DEFAULT_RUN_MARKER;
}
return topLevelName;
}
static Module getDefaultOrOnlyModule(IProject project,
boolean includeDefault) {
Set<Module> modules = getModules(project, true);
//if only one real module or just one default module, just send it back
if (modules.size() == 1) {
return modules.iterator().next();
}
if (modules.size() ==2 && !includeDefault) {
Iterator<Module> modIterator = modules.iterator();
while (modIterator.hasNext()) {
Module realMod = modIterator.next();
if (!realMod.isDefault()) {
return realMod;
}
}
}
return null;
}
static Declaration getDefaultRunnableForModule(Module mod) {
Declaration decl = null;
if (mod.getRootPackage() != null) {
decl = mod.getRootPackage()
.getDirectMember("run", null, false);
}
return decl;
}
static Module getModule(IFolder folder) {
Package pkg = getPackage(folder);
if (pkg != null) {
return pkg.getModule();
}
return null;
}
static boolean isBuilderEnabled(IProject project, String property) {
if (CAN_LAUNCH_AS_CEYLON_JAVA_MODULE.equals(property)) {
return CeylonBuilder.compileToJava(project);
} else if (CAN_LAUNCH_AS_CEYLON_JAVASCIPT_MODULE.equals(property)) {
return CeylonBuilder.compileToJs(project);
}
return false;
}
}
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_core_launch_LaunchHelper.java
|
498 |
public final class ClientInvocationServiceImpl implements ClientInvocationService {
private final HazelcastClient client;
private final ClientConnectionManager connectionManager;
private final ConcurrentMap<String, Integer> registrationMap = new ConcurrentHashMap<String, Integer>();
private final ConcurrentMap<String, String> registrationAliasMap = new ConcurrentHashMap<String, String>();
private final Set<ClientCallFuture> failedListeners =
Collections.newSetFromMap(new ConcurrentHashMap<ClientCallFuture, Boolean>());
public ClientInvocationServiceImpl(HazelcastClient client) {
this.client = client;
this.connectionManager = client.getConnectionManager();
}
public <T> ICompletableFuture<T> invokeOnRandomTarget(ClientRequest request) throws Exception {
return send(request);
}
public <T> ICompletableFuture<T> invokeOnTarget(ClientRequest request, Address target) throws Exception {
return send(request, target);
}
public <T> ICompletableFuture<T> invokeOnKeyOwner(ClientRequest request, Object key) throws Exception {
ClientPartitionServiceImpl partitionService = (ClientPartitionServiceImpl) client.getClientPartitionService();
final Address owner = partitionService.getPartitionOwner(partitionService.getPartitionId(key));
if (owner != null) {
return invokeOnTarget(request, owner);
}
return invokeOnRandomTarget(request);
}
public <T> ICompletableFuture<T> invokeOnRandomTarget(ClientRequest request, EventHandler handler) throws Exception {
return sendAndHandle(request, handler);
}
public <T> ICompletableFuture<T> invokeOnTarget(ClientRequest request, Address target, EventHandler handler)
throws Exception {
return sendAndHandle(request, target, handler);
}
public <T> ICompletableFuture<T> invokeOnKeyOwner(ClientRequest request, Object key, EventHandler handler)
throws Exception {
ClientPartitionServiceImpl partitionService = (ClientPartitionServiceImpl) client.getClientPartitionService();
final Address owner = partitionService.getPartitionOwner(partitionService.getPartitionId(key));
if (owner != null) {
return invokeOnTarget(request, owner, handler);
}
return invokeOnRandomTarget(request, handler);
}
// NIO public
public ICompletableFuture send(ClientRequest request, ClientConnection connection) {
request.setSingleConnection();
return doSend(request, connection, null);
}
public Future reSend(ClientCallFuture future) throws Exception {
final ClientConnection connection = connectionManager.tryToConnect(null);
sendInternal(future, connection);
return future;
}
public void registerFailedListener(ClientCallFuture future) {
failedListeners.add(future);
}
public void triggerFailedListeners() {
final Iterator<ClientCallFuture> iterator = failedListeners.iterator();
while (iterator.hasNext()) {
final ClientCallFuture failedListener = iterator.next();
iterator.remove();
failedListener.resend();
}
}
public void registerListener(String uuid, Integer callId) {
registrationAliasMap.put(uuid, uuid);
registrationMap.put(uuid, callId);
}
public void reRegisterListener(String uuid, String alias, Integer callId) {
final String oldAlias = registrationAliasMap.put(uuid, alias);
if (oldAlias != null) {
registrationMap.remove(oldAlias);
registrationMap.put(alias, callId);
}
}
public boolean isRedoOperation() {
return client.getClientConfig().isRedoOperation();
}
public String deRegisterListener(String alias) {
final String uuid = registrationAliasMap.remove(alias);
if (uuid != null) {
final Integer callId = registrationMap.remove(alias);
connectionManager.removeEventHandler(callId);
}
return uuid;
}
//NIO private
private ICompletableFuture send(ClientRequest request) throws Exception {
final ClientConnection connection = connectionManager.tryToConnect(null);
return doSend(request, connection, null);
}
private ICompletableFuture send(ClientRequest request, Address target) throws Exception {
final ClientConnection connection = connectionManager.tryToConnect(target);
return doSend(request, connection, null);
}
private ICompletableFuture sendAndHandle(ClientRequest request, EventHandler handler) throws Exception {
final ClientConnection connection = connectionManager.tryToConnect(null);
return doSend(request, connection, handler);
}
private ICompletableFuture sendAndHandle(ClientRequest request, Address target, EventHandler handler) throws Exception {
final ClientConnection connection = connectionManager.tryToConnect(target);
return doSend(request, connection, handler);
}
private ICompletableFuture doSend(ClientRequest request, ClientConnection connection, EventHandler handler) {
final ClientCallFuture future = new ClientCallFuture(client, request, handler);
sendInternal(future, connection);
return future;
}
private void sendInternal(ClientCallFuture future, ClientConnection connection) {
connection.registerCallId(future);
future.setConnection(connection);
final SerializationService ss = client.getSerializationService();
final Data data = ss.toData(future.getRequest());
if (!connection.write(new DataAdapter(data))) {
final int callId = future.getRequest().getCallId();
connection.deRegisterCallId(callId);
connection.deRegisterEventHandler(callId);
future.notify(new TargetNotMemberException("Address : " + connection.getRemoteEndpoint()));
}
}
}
| 1no label
|
hazelcast-client_src_main_java_com_hazelcast_client_spi_impl_ClientInvocationServiceImpl.java
|
665 |
public class ValidateQueryRequest extends BroadcastOperationRequest<ValidateQueryRequest> {
private static final XContentType contentType = Requests.CONTENT_TYPE;
private BytesReference source;
private boolean sourceUnsafe;
private boolean explain;
private String[] types = Strings.EMPTY_ARRAY;
long nowInMillis;
ValidateQueryRequest() {
this(Strings.EMPTY_ARRAY);
}
/**
* Constructs a new validate request against the provided indices. No indices provided means it will
* run against all indices.
*/
public ValidateQueryRequest(String... indices) {
super(indices);
indicesOptions(IndicesOptions.fromOptions(false, false, true, false));
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = super.validate();
return validationException;
}
@Override
protected void beforeStart() {
if (sourceUnsafe) {
source = source.copyBytesArray();
sourceUnsafe = false;
}
}
/**
* The source to execute.
*/
BytesReference source() {
return source;
}
public ValidateQueryRequest source(QuerySourceBuilder sourceBuilder) {
this.source = sourceBuilder.buildAsBytes(contentType);
this.sourceUnsafe = false;
return this;
}
/**
* The source to execute in the form of a map.
*/
public ValidateQueryRequest source(Map source) {
try {
XContentBuilder builder = XContentFactory.contentBuilder(contentType);
builder.map(source);
return source(builder);
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
}
}
public ValidateQueryRequest source(XContentBuilder builder) {
this.source = builder.bytes();
this.sourceUnsafe = false;
return this;
}
/**
* The query source to validate. It is preferable to use either {@link #source(byte[])}
* or {@link #source(QuerySourceBuilder)}.
*/
public ValidateQueryRequest source(String source) {
this.source = new BytesArray(source);
this.sourceUnsafe = false;
return this;
}
/**
* The source to validate.
*/
public ValidateQueryRequest source(byte[] source) {
return source(source, 0, source.length, false);
}
/**
* The source to validate.
*/
public ValidateQueryRequest source(byte[] source, int offset, int length, boolean unsafe) {
return source(new BytesArray(source, offset, length), unsafe);
}
/**
* The source to validate.
*/
public ValidateQueryRequest source(BytesReference source, boolean unsafe) {
this.source = source;
this.sourceUnsafe = unsafe;
return this;
}
/**
* The types of documents the query will run against. Defaults to all types.
*/
public String[] types() {
return this.types;
}
/**
* The types of documents the query will run against. Defaults to all types.
*/
public ValidateQueryRequest types(String... types) {
this.types = types;
return this;
}
/**
* Indicate if detailed information about query is requested
*/
public void explain(boolean explain) {
this.explain = explain;
}
/**
* Indicates if detailed information about query is requested
*/
public boolean explain() {
return explain;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
sourceUnsafe = false;
source = in.readBytesReference();
int typesSize = in.readVInt();
if (typesSize > 0) {
types = new String[typesSize];
for (int i = 0; i < typesSize; i++) {
types[i] = in.readString();
}
}
explain = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBytesReference(source);
out.writeVInt(types.length);
for (String type : types) {
out.writeString(type);
}
out.writeBoolean(explain);
}
@Override
public String toString() {
String sSource = "_na_";
try {
sSource = XContentHelper.convertToJson(source, false);
} catch (Exception e) {
// ignore
}
return "[" + Arrays.toString(indices) + "]" + Arrays.toString(types) + ", source[" + sSource + "], explain:" + explain;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_validate_query_ValidateQueryRequest.java
|
823 |
@Entity
@Table(name = "BLC_OFFER_ITEM_CRITERIA")
@Inheritance(strategy=InheritanceType.JOINED)
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements")
@AdminPresentationClass(friendlyName = "OfferItemCriteriaImpl_baseOfferItemCriteria")
public class OfferItemCriteriaImpl implements OfferItemCriteria {
public static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator= "OfferItemCriteriaId")
@GenericGenerator(
name="OfferItemCriteriaId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="OfferItemCriteriaImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.offer.domain.OfferItemCriteriaImpl")
}
)
@Column(name = "OFFER_ITEM_CRITERIA_ID")
@AdminPresentation(friendlyName = "OfferItemCriteriaImpl_Item_Criteria_Id", group = "OfferItemCriteriaImpl_Description", visibility = VisibilityEnum.HIDDEN_ALL)
protected Long id;
@Column(name = "QUANTITY", nullable=false)
@AdminPresentation(friendlyName = "OfferItemCriteriaImpl_Quantity", group = "OfferItemCriteriaImpl_Description", visibility =VisibilityEnum.HIDDEN_ALL)
protected Integer quantity;
@Lob
@Type(type = "org.hibernate.type.StringClobType")
@Column(name = "ORDER_ITEM_MATCH_RULE", length = Integer.MAX_VALUE - 1)
@AdminPresentation(friendlyName = "OfferItemCriteriaImpl_Order_Item_Match_Rule", group = "OfferItemCriteriaImpl_Description", visibility = VisibilityEnum.HIDDEN_ALL)
protected String orderItemMatchRule;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public Integer getQuantity() {
return quantity;
}
@Override
public void setQuantity(Integer receiveQuantity) {
this.quantity = receiveQuantity;
}
@Override
public String getMatchRule() {
return orderItemMatchRule;
}
@Override
public void setMatchRule(String matchRule) {
this.orderItemMatchRule = matchRule;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((id == null) ? 0 : id.hashCode());
result = prime * result + ((orderItemMatchRule == null) ? 0 : orderItemMatchRule.hashCode());
result = prime * result + ((quantity == null) ? 0 : quantity.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OfferItemCriteriaImpl other = (OfferItemCriteriaImpl) obj;
if (id != null && other.id != null) {
return id.equals(other.id);
}
if (orderItemMatchRule == null) {
if (other.orderItemMatchRule != null)
return false;
} else if (!orderItemMatchRule.equals(other.orderItemMatchRule))
return false;
if (quantity == null) {
if (other.quantity != null)
return false;
} else if (!quantity.equals(other.quantity))
return false;
return true;
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_domain_OfferItemCriteriaImpl.java
|
5,092 |
transportService.sendRequest(node, SearchQueryFetchTransportHandler.ACTION, request, new BaseTransportResponseHandler<QueryFetchSearchResult>() {
@Override
public QueryFetchSearchResult newInstance() {
return new QueryFetchSearchResult();
}
@Override
public void handleResponse(QueryFetchSearchResult response) {
listener.onResult(response);
}
@Override
public void handleException(TransportException exp) {
listener.onFailure(exp);
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
});
| 1no label
|
src_main_java_org_elasticsearch_search_action_SearchServiceTransportAction.java
|
290 |
@Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name="BLC_DATA_DRVN_ENUM")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements")
@AdminPresentationClass(populateToOneFields = PopulateToOneFieldsEnum.TRUE, friendlyName = "DataDrivenEnumerationImpl_friendyName")
public class DataDrivenEnumerationImpl implements DataDrivenEnumeration {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "DataDrivenEnumerationId")
@GenericGenerator(
name="DataDrivenEnumerationId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="DataDrivenEnumerationImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.common.enumeration.domain.DataDrivenEnumerationImpl")
}
)
@Column(name = "ENUM_ID")
protected Long id;
@Column(name = "ENUM_KEY")
@Index(name = "ENUM_KEY_INDEX", columnNames = {"KEY"})
@AdminPresentation(friendlyName = "DataDrivenEnumerationImpl_Key", order = 1, gridOrder = 1, prominent = true)
protected String key;
@Column(name = "MODIFIABLE")
@AdminPresentation(friendlyName = "DataDrivenEnumerationImpl_Modifiable", order = 2, gridOrder = 2, prominent = true)
protected Boolean modifiable = false;
@OneToMany(mappedBy = "type", targetEntity = DataDrivenEnumerationValueImpl.class, cascade = {CascadeType.ALL})
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements")
@AdminPresentationCollection(addType = AddMethodType.PERSIST, friendlyName = "DataDrivenEnumerationImpl_Enum_Values", order = 3)
protected List<DataDrivenEnumerationValue> enumValues = new ArrayList<DataDrivenEnumerationValue>();
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getKey() {
return key;
}
@Override
public void setKey(String key) {
this.key = key;
}
@Override
public Boolean getModifiable() {
if (modifiable == null) {
return Boolean.FALSE;
} else {
return modifiable;
}
}
@Override
public void setModifiable(Boolean modifiable) {
this.modifiable = modifiable;
}
@Override
public List<DataDrivenEnumerationValue> getEnumValues() {
return enumValues;
}
@Override
public void setEnumValues(List<DataDrivenEnumerationValue> enumValues) {
this.enumValues = enumValues;
}
@Override
@Deprecated
public List<DataDrivenEnumerationValue> getOrderItems() {
return enumValues;
}
@Override
@Deprecated
public void setOrderItems(List<DataDrivenEnumerationValue> orderItems) {
this.enumValues = orderItems;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_enumeration_domain_DataDrivenEnumerationImpl.java
|
25 |
final class FunctionCompletionProposal extends
CompletionProposal {
private final CeylonParseController cpc;
private final Declaration dec;
private FunctionCompletionProposal(int offset, String prefix,
String desc, String text, Declaration dec,
CeylonParseController cpc) {
super(offset, prefix,
getDecoratedImage(dec.isShared() ?
CEYLON_FUN : CEYLON_LOCAL_FUN,
getDecorationAttributes(dec), false),
desc, text);
this.cpc = cpc;
this.dec = dec;
}
private DocumentChange createChange(IDocument document)
throws BadLocationException {
DocumentChange change =
new DocumentChange("Complete Invocation", document);
change.setEdit(new MultiTextEdit());
HashSet<Declaration> decs = new HashSet<Declaration>();
Tree.CompilationUnit cu = cpc.getRootNode();
importDeclaration(decs, dec, cu);
int il=applyImports(change, decs, cu, document);
change.addEdit(createEdit(document));
offset+=il;
return change;
}
@Override
public boolean isAutoInsertable() {
return false;
}
@Override
public void apply(IDocument document) {
try {
createChange(document).perform(new NullProgressMonitor());
}
catch (Exception e) {
e.printStackTrace();
}
}
protected static void addFunctionProposal(int offset,
final CeylonParseController cpc,
Tree.Primary primary,
List<ICompletionProposal> result,
final Declaration dec,
IDocument doc) {
Tree.Term arg = primary;
while (arg instanceof Tree.Expression) {
arg = ((Tree.Expression) arg).getTerm();
}
final int start = arg.getStartIndex();
final int stop = arg.getStopIndex();
int origin = primary.getStartIndex();
String argText;
String prefix;
try {
//the argument
argText = doc.get(start, stop-start+1);
//the text to replace
prefix = doc.get(origin, offset-origin);
}
catch (BadLocationException e) {
return;
}
String text = dec.getName(arg.getUnit())
+ "(" + argText + ")";
if (((Functional)dec).isDeclaredVoid()) {
text += ";";
}
Unit unit = cpc.getRootNode().getUnit();
result.add(new FunctionCompletionProposal(offset, prefix,
getDescriptionFor(dec, unit) + "(...)", text, dec, cpc));
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_FunctionCompletionProposal.java
|
149 |
public class ReadMostlyVector<E>
implements List<E>, RandomAccess, Cloneable, java.io.Serializable {
private static final long serialVersionUID = 8673264195747942595L;
/*
* This class exists mainly as a vehicle to exercise various
* constructions using SequenceLocks. Read-only methods
* take one of a few forms:
*
* Short methods,including get(index), continually retry obtaining
* a snapshot of array, count, and element, using sequence number
* to validate.
*
* Methods that are potentially O(n) (or worse) try once in
* read-only mode, and then lock. When in read-only mode, they
* validate only at the end of an array scan unless the element is
* actually used (for example, as an argument of method equals).
*
* We rely on some invariants that are always true, even for field
* reads in read-only mode that have not yet been validated:
* - array != null
* - count >= 0
*/
/**
* The maximum size of array to allocate.
* See CopyOnWriteArrayList for explanation.
*/
private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
// fields are non-private to simplify nested class access
Object[] array;
final StampedLock lock;
int count;
final int capacityIncrement;
/**
* Creates an empty vector with the given initial capacity and
* capacity increment.
*
* @param initialCapacity the initial capacity of the underlying array
* @param capacityIncrement if non-zero, the number to
* add when resizing to accommodate additional elements.
* If zero, the array size is doubled when resized.
*
* @throws IllegalArgumentException if initial capacity is negative
*/
public ReadMostlyVector(int initialCapacity, int capacityIncrement) {
super();
if (initialCapacity < 0)
throw new IllegalArgumentException("Illegal Capacity: "+
initialCapacity);
this.array = new Object[initialCapacity];
this.capacityIncrement = capacityIncrement;
this.lock = new StampedLock();
}
/**
* Creates an empty vector with the given initial capacity.
*
* @param initialCapacity the initial capacity of the underlying array
* @throws IllegalArgumentException if initial capacity is negative
*/
public ReadMostlyVector(int initialCapacity) {
this(initialCapacity, 0);
}
/**
* Creates an empty vector.
*/
public ReadMostlyVector() {
this.capacityIncrement = 0;
this.lock = new StampedLock();
}
/**
* Creates a vector containing the elements of the specified
* collection, in the order they are returned by the collection's
* iterator.
*
* @param c the collection of initially held elements
* @throws NullPointerException if the specified collection is null
*/
public ReadMostlyVector(Collection<? extends E> c) {
Object[] elements = c.toArray();
// c.toArray might (incorrectly) not return Object[] (see 6260652)
if (elements.getClass() != Object[].class)
elements = Arrays.copyOf(elements, elements.length, Object[].class);
this.array = elements;
this.count = elements.length;
this.capacityIncrement = 0;
this.lock = new StampedLock();
}
// internal constructor for clone
ReadMostlyVector(Object[] array, int count, int capacityIncrement) {
this.array = array;
this.count = count;
this.capacityIncrement = capacityIncrement;
this.lock = new StampedLock();
}
static final int INITIAL_CAP = 16;
// For explanation, see CopyOnWriteArrayList
final Object[] grow(int minCapacity) {
Object[] items;
int newCapacity;
if ((items = array) == null)
newCapacity = INITIAL_CAP;
else {
int oldCapacity = array.length;
newCapacity = oldCapacity + ((capacityIncrement > 0) ?
capacityIncrement : oldCapacity);
}
if (newCapacity - minCapacity < 0)
newCapacity = minCapacity;
if (newCapacity - MAX_ARRAY_SIZE > 0) {
if (minCapacity < 0) // overflow
throw new OutOfMemoryError();
else if (minCapacity > MAX_ARRAY_SIZE)
newCapacity = Integer.MAX_VALUE;
else
newCapacity = MAX_ARRAY_SIZE;
}
return array = ((items == null) ?
new Object[newCapacity] :
Arrays.copyOf(items, newCapacity));
}
/*
* Internal versions of most base functionality, wrapped
* in different ways from public methods from this class
* as well as sublist and iterator classes.
*/
static int findFirstIndex(Object[] items, Object x, int index, int fence) {
int len;
if (items != null && (len = items.length) > 0) {
int start = (index < 0) ? 0 : index;
int bound = (fence < len) ? fence : len;
for (int i = start; i < bound; ++i) {
Object e = items[i];
if ((x == null) ? e == null : x.equals(e))
return i;
}
}
return -1;
}
static int findLastIndex(Object[] items, Object x, int index, int origin) {
int len;
if (items != null && (len = items.length) > 0) {
int last = (index < len) ? index : len - 1;
int start = (origin < 0) ? 0 : origin;
for (int i = last; i >= start; --i) {
Object e = items[i];
if ((x == null) ? e == null : x.equals(e))
return i;
}
}
return -1;
}
final void rawAdd(E e) {
int n = count;
Object[] items = array;
if (items == null || n >= items.length)
items = grow(n + 1);
items[n] = e;
count = n + 1;
}
final void rawAddAt(int index, E e) {
int n = count;
Object[] items = array;
if (index > n)
throw new ArrayIndexOutOfBoundsException(index);
if (items == null || n >= items.length)
items = grow(n + 1);
if (index < n)
System.arraycopy(items, index, items, index + 1, n - index);
items[index] = e;
count = n + 1;
}
final boolean rawAddAllAt(int index, Object[] elements) {
int n = count;
Object[] items = array;
if (index < 0 || index > n)
throw new ArrayIndexOutOfBoundsException(index);
int len = elements.length;
if (len == 0)
return false;
int newCount = n + len;
if (items == null || newCount >= items.length)
items = grow(newCount);
int mv = n - index;
if (mv > 0)
System.arraycopy(items, index, items, index + len, mv);
System.arraycopy(elements, 0, items, index, len);
count = newCount;
return true;
}
final boolean rawRemoveAt(int index) {
int n = count - 1;
Object[] items = array;
if (items == null || index < 0 || index > n)
return false;
int mv = n - index;
if (mv > 0)
System.arraycopy(items, index + 1, items, index, mv);
items[n] = null;
count = n;
return true;
}
/**
* Internal version of removeAll for lists and sublists. In this
* and other similar methods below, the bound argument is, if
* non-negative, the purported upper bound of a list/sublist, or
* is left negative if the bound should be determined via count
* field under lock.
*/
final boolean lockedRemoveAll(Collection<?> c, int origin, int bound) {
boolean removed = false;
final StampedLock lock = this.lock;
long stamp = lock.writeLock();
try {
int n = count;
int fence = bound < 0 || bound > n ? n : bound;
if (origin >= 0 && origin < fence) {
for (Object x : c) {
while (rawRemoveAt(findFirstIndex(array, x, origin, fence)))
removed = true;
}
}
} finally {
lock.unlockWrite(stamp);
}
return removed;
}
final boolean lockedRetainAll(Collection<?> c, int origin, int bound) {
final StampedLock lock = this.lock;
boolean removed = false;
if (c != this) {
long stamp = lock.writeLock();
try {
Object[] items;
int i, n;
if ((items = array) != null && (n = count) > 0 &&
n < items.length && (i = origin) >= 0) {
int fence = bound < 0 || bound > n ? n : bound;
while (i < fence) {
if (c.contains(items[i]))
++i;
else {
--fence;
int mv = --n - i;
if (mv > 0)
System.arraycopy(items, i + 1, items, i, mv);
}
}
if (count != n) {
count = n;
removed = true;
}
}
} finally {
lock.unlockWrite(stamp);
}
}
return removed;
}
final void internalClear(int origin, int bound) {
Object[] items;
int n, len;
if ((items = array) != null && (len = items.length) > 0) {
if (origin < 0)
origin = 0;
if ((n = count) > len)
n = len;
int fence = bound < 0 || bound > n ? n : bound;
int removed = fence - origin;
int newCount = n - removed;
int mv = n - (origin + removed);
if (mv > 0)
System.arraycopy(items, origin + removed, items, origin, mv);
for (int i = n; i < newCount; ++i)
items[i] = null;
count = newCount;
}
}
final boolean internalContainsAll(Collection<?> c, int origin, int bound) {
Object[] items;
int n, len;
if ((items = array) != null && (len = items.length) > 0) {
if (origin < 0)
origin = 0;
if ((n = count) > len)
n = len;
int fence = bound < 0 || bound > n ? n : bound;
for (Object e : c) {
if (findFirstIndex(items, e, origin, fence) < 0)
return false;
}
}
else if (!c.isEmpty())
return false;
return true;
}
final boolean internalEquals(List<?> list, int origin, int bound) {
Object[] items;
int n, len;
if ((items = array) != null && (len = items.length) > 0) {
if (origin < 0)
origin = 0;
if ((n = count) > len)
n = len;
int fence = bound < 0 || bound > n ? n : bound;
Iterator<?> it = list.iterator();
for (int i = origin; i < fence; ++i) {
if (!it.hasNext())
return false;
Object y = it.next();
Object x = items[i];
if (x != y && (x == null || !x.equals(y)))
return false;
}
if (it.hasNext())
return false;
}
else if (!list.isEmpty())
return false;
return true;
}
final int internalHashCode(int origin, int bound) {
int hash = 1;
Object[] items;
int n, len;
if ((items = array) != null && (len = items.length) > 0) {
if (origin < 0)
origin = 0;
if ((n = count) > len)
n = len;
int fence = bound < 0 || bound > n ? n : bound;
for (int i = origin; i < fence; ++i) {
Object e = items[i];
hash = 31*hash + (e == null ? 0 : e.hashCode());
}
}
return hash;
}
final String internalToString(int origin, int bound) {
Object[] items;
int n, len;
if ((items = array) != null && (len = items.length) > 0) {
if ((n = count) > len)
n = len;
int fence = bound < 0 || bound > n ? n : bound;
int i = (origin < 0) ? 0 : origin;
if (i != fence) {
StringBuilder sb = new StringBuilder();
sb.append('[');
for (;;) {
Object e = items[i];
sb.append((e == this) ? "(this Collection)" : e.toString());
if (++i < fence)
sb.append(',').append(' ');
else
return sb.append(']').toString();
}
}
}
return "[]";
}
final Object[] internalToArray(int origin, int bound) {
Object[] items;
int n, len;
if ((items = array) != null && (len = items.length) > 0) {
if (origin < 0)
origin = 0;
if ((n = count) > len)
n = len;
int fence = bound < 0 || bound > n ? n : bound;
int i = (origin < 0) ? 0 : origin;
if (i != fence)
return Arrays.copyOfRange(items, i, fence, Object[].class);
}
return new Object[0];
}
@SuppressWarnings("unchecked")
final <T> T[] internalToArray(T[] a, int origin, int bound) {
int alen = a.length;
Object[] items;
int n, len;
if ((items = array) != null && (len = items.length) > 0) {
if (origin < 0)
origin = 0;
if ((n = count) > len)
n = len;
int fence = bound < 0 || bound > n ? n : bound;
int i = (origin < 0) ? 0 : origin;
int rlen = fence - origin;
if (rlen > 0) {
if (alen >= rlen) {
System.arraycopy(items, 0, a, origin, rlen);
if (alen > rlen)
a[rlen] = null;
return a;
}
return (T[]) Arrays.copyOfRange(items, i, fence, a.getClass());
}
}
if (alen > 0)
a[0] = null;
return a;
}
// public List methods
public boolean add(E e) {
final StampedLock lock = this.lock;
long stamp = lock.writeLock();
try {
rawAdd(e);
} finally {
lock.unlockWrite(stamp);
}
return true;
}
public void add(int index, E element) {
final StampedLock lock = this.lock;
long stamp = lock.writeLock();
try {
rawAddAt(index, element);
} finally {
lock.unlockWrite(stamp);
}
}
public boolean addAll(Collection<? extends E> c) {
Object[] elements = c.toArray();
int len = elements.length;
if (len == 0)
return false;
final StampedLock lock = this.lock;
long stamp = lock.writeLock();
try {
Object[] items = array;
int n = count;
int newCount = n + len;
if (items == null || newCount >= items.length)
items = grow(newCount);
System.arraycopy(elements, 0, items, n, len);
count = newCount;
} finally {
lock.unlockWrite(stamp);
}
return true;
}
public boolean addAll(int index, Collection<? extends E> c) {
Object[] elements = c.toArray();
boolean ret;
final StampedLock lock = this.lock;
long stamp = lock.writeLock();
try {
ret = rawAddAllAt(index, elements);
} finally {
lock.unlockWrite(stamp);
}
return ret;
}
public void clear() {
final StampedLock lock = this.lock;
long stamp = lock.writeLock();
try {
int n = count;
Object[] items = array;
if (items != null) {
for (int i = 0; i < n; i++)
items[i] = null;
}
count = 0;
} finally {
lock.unlockWrite(stamp);
}
}
public boolean contains(Object o) {
return indexOf(o, 0) >= 0;
}
public boolean containsAll(Collection<?> c) {
boolean ret;
final StampedLock lock = this.lock;
long stamp = lock.readLock();
try {
ret = internalContainsAll(c, 0, -1);
} finally {
lock.unlockRead(stamp);
}
return ret;
}
public boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof List))
return false;
final StampedLock lock = this.lock;
long stamp = lock.readLock();
try {
return internalEquals((List<?>)o, 0, -1);
} finally {
lock.unlockRead(stamp);
}
}
public E get(int index) {
final StampedLock lock = this.lock;
long stamp = lock.tryOptimisticRead();
Object[] items;
if (index >= 0 && (items = array) != null &&
index < count && index < items.length) {
@SuppressWarnings("unchecked") E e = (E)items[index];
if (lock.validate(stamp))
return e;
}
return lockedGet(index);
}
@SuppressWarnings("unchecked") private E lockedGet(int index) {
boolean oobe = false;
E e = null;
final StampedLock lock = this.lock;
long stamp = lock.readLock();
try {
Object[] items;
if ((items = array) != null && index < items.length &&
index < count && index >= 0)
e = (E)items[index];
else
oobe = true;
} finally {
lock.unlockRead(stamp);
}
if (oobe)
throw new ArrayIndexOutOfBoundsException(index);
return e;
}
public int hashCode() {
int h;
final StampedLock lock = this.lock;
long s = lock.readLock();
try {
h = internalHashCode(0, -1);
} finally {
lock.unlockRead(s);
}
return h;
}
public int indexOf(Object o) {
int idx;
final StampedLock lock = this.lock;
long stamp = lock.readLock();
try {
idx = findFirstIndex(array, o, 0, count);
} finally {
lock.unlockRead(stamp);
}
return idx;
}
public boolean isEmpty() {
final StampedLock lock = this.lock;
long stamp = lock.tryOptimisticRead();
return count == 0; // no need for validation
}
public Iterator<E> iterator() {
return new Itr<E>(this, 0);
}
public int lastIndexOf(Object o) {
int idx;
final StampedLock lock = this.lock;
long stamp = lock.readLock();
try {
idx = findLastIndex(array, o, count - 1, 0);
} finally {
lock.unlockRead(stamp);
}
return idx;
}
public ListIterator<E> listIterator() {
return new Itr<E>(this, 0);
}
public ListIterator<E> listIterator(int index) {
return new Itr<E>(this, index);
}
@SuppressWarnings("unchecked") public E remove(int index) {
E oldValue = null;
boolean oobe = false;
final StampedLock lock = this.lock;
long stamp = lock.writeLock();
try {
if (index < 0 || index >= count)
oobe = true;
else {
oldValue = (E) array[index];
rawRemoveAt(index);
}
} finally {
lock.unlockWrite(stamp);
}
if (oobe)
throw new ArrayIndexOutOfBoundsException(index);
return oldValue;
}
public boolean remove(Object o) {
final StampedLock lock = this.lock;
long stamp = lock.writeLock();
try {
return rawRemoveAt(findFirstIndex(array, o, 0, count));
} finally {
lock.unlockWrite(stamp);
}
}
public boolean removeAll(Collection<?> c) {
return lockedRemoveAll(c, 0, -1);
}
public boolean retainAll(Collection<?> c) {
return lockedRetainAll(c, 0, -1);
}
@SuppressWarnings("unchecked") public E set(int index, E element) {
E oldValue = null;
boolean oobe = false;
final StampedLock lock = this.lock;
long stamp = lock.writeLock();
try {
Object[] items = array;
if (items == null || index < 0 || index >= count)
oobe = true;
else {
oldValue = (E) items[index];
items[index] = element;
}
} finally {
lock.unlockWrite(stamp);
}
if (oobe)
throw new ArrayIndexOutOfBoundsException(index);
return oldValue;
}
public int size() {
final StampedLock lock = this.lock;
long stamp = lock.tryOptimisticRead();
return count; // no need for validation
}
private int lockedSize() {
int n;
final StampedLock lock = this.lock;
long stamp = lock.readLock();
try {
n = count;
} finally {
lock.unlockRead(stamp);
}
return n;
}
public List<E> subList(int fromIndex, int toIndex) {
int ssize = toIndex - fromIndex;
if (ssize >= 0 && fromIndex >= 0) {
ReadMostlyVectorSublist<E> ret = null;
final StampedLock lock = this.lock;
long stamp = lock.readLock();
try {
if (toIndex <= count)
ret = new ReadMostlyVectorSublist<E>(this, fromIndex, ssize);
} finally {
lock.unlockRead(stamp);
}
if (ret != null)
return ret;
}
throw new ArrayIndexOutOfBoundsException(fromIndex < 0 ? fromIndex : toIndex);
}
public Object[] toArray() {
final StampedLock lock = this.lock;
long stamp = lock.readLock();
try {
return internalToArray(0, -1);
} finally {
lock.unlockRead(stamp);
}
}
public <T> T[] toArray(T[] a) {
final StampedLock lock = this.lock;
long stamp = lock.readLock();
try {
return internalToArray(a, 0, -1);
} finally {
lock.unlockRead(stamp);
}
}
public String toString() {
final StampedLock lock = this.lock;
long stamp = lock.readLock();
try {
return internalToString(0, -1);
} finally {
lock.unlockRead(stamp);
}
}
// ReadMostlyVector-only methods
/**
* Appends the element, if not present.
*
* @param e element to be added to this list, if absent
* @return {@code true} if the element was added
*/
public boolean addIfAbsent(E e) {
boolean ret;
final StampedLock lock = this.lock;
long stamp = lock.writeLock();
try {
if (findFirstIndex(array, e, 0, count) < 0) {
rawAdd(e);
ret = true;
}
else
ret = false;
} finally {
lock.unlockWrite(stamp);
}
return ret;
}
/**
* Appends all of the elements in the specified collection that
* are not already contained in this list, to the end of
* this list, in the order that they are returned by the
* specified collection's iterator.
*
* @param c collection containing elements to be added to this list
* @return the number of elements added
* @throws NullPointerException if the specified collection is null
* @see #addIfAbsent(Object)
*/
public int addAllAbsent(Collection<? extends E> c) {
int added = 0;
Object[] cs = c.toArray();
int clen = cs.length;
if (clen != 0) {
long stamp = lock.writeLock();
try {
for (int i = 0; i < clen; ++i) {
@SuppressWarnings("unchecked")
E e = (E) cs[i];
if (findFirstIndex(array, e, 0, count) < 0) {
rawAdd(e);
++added;
}
}
} finally {
lock.unlockWrite(stamp);
}
}
return added;
}
/**
* Returns an iterator operating over a snapshot copy of the
* elements of this collection created upon construction of the
* iterator. The iterator does <em>NOT</em> support the
* {@code remove} method.
*
* @return an iterator over the elements in this list in proper sequence
*/
public Iterator<E> snapshotIterator() {
return new SnapshotIterator<E>(this);
}
static final class SnapshotIterator<E> implements Iterator<E> {
private final Object[] items;
private int cursor;
SnapshotIterator(ReadMostlyVector<E> v) { items = v.toArray(); }
public boolean hasNext() { return cursor < items.length; }
@SuppressWarnings("unchecked") public E next() {
if (cursor < items.length)
return (E) items[cursor++];
throw new NoSuchElementException();
}
public void remove() { throw new UnsupportedOperationException() ; }
}
/** Interface describing a void action of one argument */
public interface Action<A> { void apply(A a); }
public void forEachReadOnly(Action<E> action) {
final StampedLock lock = this.lock;
long stamp = lock.readLock();
try {
Object[] items;
int len, n;
if ((items = array) != null && (len = items.length) > 0 &&
(n = count) <= len) {
for (int i = 0; i < n; ++i) {
@SuppressWarnings("unchecked") E e = (E)items[i];
action.apply(e);
}
}
} finally {
lock.unlockRead(stamp);
}
}
// Vector-only methods
/** See {@link Vector#firstElement} */
public E firstElement() {
final StampedLock lock = this.lock;
long stamp = lock.tryOptimisticRead();
Object[] items;
if ((items = array) != null && count > 0 && items.length > 0) {
@SuppressWarnings("unchecked") E e = (E)items[0];
if (lock.validate(stamp))
return e;
}
return lockedFirstElement();
}
@SuppressWarnings("unchecked") private E lockedFirstElement() {
Object e = null;
boolean oobe = false;
final StampedLock lock = this.lock;
long stamp = lock.readLock();
try {
Object[] items = array;
if (items != null && count > 0 && items.length > 0)
e = items[0];
else
oobe = true;
} finally {
lock.unlockRead(stamp);
}
if (oobe)
throw new NoSuchElementException();
return (E) e;
}
/** See {@link Vector#lastElement} */
public E lastElement() {
final StampedLock lock = this.lock;
long stamp = lock.tryOptimisticRead();
Object[] items;
int i;
if ((items = array) != null && (i = count - 1) >= 0 &&
i < items.length) {
@SuppressWarnings("unchecked") E e = (E)items[i];
if (lock.validate(stamp))
return e;
}
return lockedLastElement();
}
@SuppressWarnings("unchecked") private E lockedLastElement() {
Object e = null;
boolean oobe = false;
final StampedLock lock = this.lock;
long stamp = lock.readLock();
try {
Object[] items = array;
int i = count - 1;
if (items != null && i >= 0 && i < items.length)
e = items[i];
else
oobe = true;
} finally {
lock.unlockRead(stamp);
}
if (oobe)
throw new NoSuchElementException();
return (E) e;
}
/** See {@link Vector#indexOf(Object, int)} */
public int indexOf(Object o, int index) {
if (index < 0)
throw new ArrayIndexOutOfBoundsException(index);
int idx;
final StampedLock lock = this.lock;
long stamp = lock.readLock();
try {
idx = findFirstIndex(array, o, index, count);
} finally {
lock.unlockRead(stamp);
}
return idx;
}
/** See {@link Vector#lastIndexOf(Object, int)} */
public int lastIndexOf(Object o, int index) {
boolean oobe = false;
int idx = -1;
final StampedLock lock = this.lock;
long stamp = lock.readLock();
try {
if (index < count)
idx = findLastIndex(array, o, index, 0);
else
oobe = true;
} finally {
lock.unlockRead(stamp);
}
if (oobe)
throw new ArrayIndexOutOfBoundsException(index);
return idx;
}
/** See {@link Vector#setSize} */
public void setSize(int newSize) {
if (newSize < 0)
throw new ArrayIndexOutOfBoundsException(newSize);
final StampedLock lock = this.lock;
long stamp = lock.writeLock();
try {
Object[] items;
int n = count;
if (newSize > n)
grow(newSize);
else if ((items = array) != null) {
for (int i = newSize ; i < n ; i++)
items[i] = null;
}
count = newSize;
} finally {
lock.unlockWrite(stamp);
}
}
/** See {@link Vector#copyInto} */
public void copyInto(Object[] anArray) {
final StampedLock lock = this.lock;
long stamp = lock.writeLock();
try {
Object[] items;
if ((items = array) != null)
System.arraycopy(items, 0, anArray, 0, count);
} finally {
lock.unlockWrite(stamp);
}
}
/** See {@link Vector#trimToSize} */
public void trimToSize() {
final StampedLock lock = this.lock;
long stamp = lock.writeLock();
try {
Object[] items = array;
int n = count;
if (items != null && n < items.length)
array = Arrays.copyOf(items, n);
} finally {
lock.unlockWrite(stamp);
}
}
/** See {@link Vector#ensureCapacity} */
public void ensureCapacity(int minCapacity) {
if (minCapacity > 0) {
final StampedLock lock = this.lock;
long stamp = lock.writeLock();
try {
Object[] items = array;
int cap = (items == null) ? 0 : items.length;
if (minCapacity - cap > 0)
grow(minCapacity);
} finally {
lock.unlockWrite(stamp);
}
}
}
/** See {@link Vector#elements} */
public Enumeration<E> elements() {
return new Itr<E>(this, 0);
}
/** See {@link Vector#capacity} */
public int capacity() {
return array.length;
}
/** See {@link Vector#elementAt} */
public E elementAt(int index) {
return get(index);
}
/** See {@link Vector#setElementAt} */
public void setElementAt(E obj, int index) {
set(index, obj);
}
/** See {@link Vector#removeElementAt} */
public void removeElementAt(int index) {
remove(index);
}
/** See {@link Vector#insertElementAt} */
public void insertElementAt(E obj, int index) {
add(index, obj);
}
/** See {@link Vector#addElement} */
public void addElement(E obj) {
add(obj);
}
/** See {@link Vector#removeElement} */
public boolean removeElement(Object obj) {
return remove(obj);
}
/** See {@link Vector#removeAllElements} */
public void removeAllElements() {
clear();
}
// other methods
public ReadMostlyVector<E> clone() {
Object[] a = null;
int n;
final StampedLock lock = this.lock;
long stamp = lock.readLock();
try {
Object[] items = array;
if (items == null)
n = 0;
else {
int len = items.length;
if ((n = count) > len)
n = len;
a = Arrays.copyOf(items, n);
}
} finally {
lock.unlockRead(stamp);
}
return new ReadMostlyVector<E>(a, n, capacityIncrement);
}
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
final StampedLock lock = this.lock;
long stamp = lock.readLock();
try {
s.defaultWriteObject();
} finally {
lock.unlockRead(stamp);
}
}
static final class Itr<E> implements ListIterator<E>, Enumeration<E> {
final StampedLock lock;
final ReadMostlyVector<E> list;
Object[] items;
long seq;
int cursor;
int fence;
int lastRet;
Itr(ReadMostlyVector<E> list, int index) {
final StampedLock lock = list.lock;
long stamp = lock.readLock();
try {
this.list = list;
this.lock = lock;
this.items = list.array;
this.fence = list.count;
this.cursor = index;
this.lastRet = -1;
} finally {
this.seq = lock.tryConvertToOptimisticRead(stamp);
}
if (index < 0 || index > fence)
throw new ArrayIndexOutOfBoundsException(index);
}
public boolean hasPrevious() {
return cursor > 0;
}
public int nextIndex() {
return cursor;
}
public int previousIndex() {
return cursor - 1;
}
public boolean hasNext() {
return cursor < fence;
}
public E next() {
int i = cursor;
Object[] es = items;
if (es == null || i < 0 || i >= fence || i >= es.length)
throw new NoSuchElementException();
@SuppressWarnings("unchecked") E e = (E)es[i];
lastRet = i;
cursor = i + 1;
if (!lock.validate(seq))
throw new ConcurrentModificationException();
return e;
}
public E previous() {
int i = cursor - 1;
Object[] es = items;
if (es == null || i < 0 || i >= fence || i >= es.length)
throw new NoSuchElementException();
@SuppressWarnings("unchecked") E e = (E)es[i];
lastRet = i;
cursor = i;
if (!lock.validate(seq))
throw new ConcurrentModificationException();
return e;
}
public void remove() {
int i = lastRet;
if (i < 0)
throw new IllegalStateException();
if ((seq = lock.tryConvertToWriteLock(seq)) == 0)
throw new ConcurrentModificationException();
try {
list.rawRemoveAt(i);
fence = list.count;
cursor = i;
lastRet = -1;
} finally {
seq = lock.tryConvertToOptimisticRead(seq);
}
}
public void set(E e) {
int i = lastRet;
Object[] es = items;
if (es == null || i < 0 | i >= fence)
throw new IllegalStateException();
if ((seq = lock.tryConvertToWriteLock(seq)) == 0)
throw new ConcurrentModificationException();
try {
es[i] = e;
} finally {
seq = lock.tryConvertToOptimisticRead(seq);
}
}
public void add(E e) {
int i = cursor;
if (i < 0)
throw new IllegalStateException();
if ((seq = lock.tryConvertToWriteLock(seq)) == 0)
throw new ConcurrentModificationException();
try {
list.rawAddAt(i, e);
items = list.array;
fence = list.count;
cursor = i + 1;
lastRet = -1;
} finally {
seq = lock.tryConvertToOptimisticRead(seq);
}
}
public boolean hasMoreElements() { return hasNext(); }
public E nextElement() { return next(); }
}
static final class ReadMostlyVectorSublist<E>
implements List<E>, RandomAccess, java.io.Serializable {
private static final long serialVersionUID = 3041673470172026059L;
final ReadMostlyVector<E> list;
final int offset;
volatile int size;
ReadMostlyVectorSublist(ReadMostlyVector<E> list,
int offset, int size) {
this.list = list;
this.offset = offset;
this.size = size;
}
private void rangeCheck(int index) {
if (index < 0 || index >= size)
throw new ArrayIndexOutOfBoundsException(index);
}
public boolean add(E element) {
final StampedLock lock = list.lock;
long stamp = lock.writeLock();
try {
int c = size;
list.rawAddAt(c + offset, element);
size = c + 1;
} finally {
lock.unlockWrite(stamp);
}
return true;
}
public void add(int index, E element) {
final StampedLock lock = list.lock;
long stamp = lock.writeLock();
try {
if (index < 0 || index > size)
throw new ArrayIndexOutOfBoundsException(index);
list.rawAddAt(index + offset, element);
++size;
} finally {
lock.unlockWrite(stamp);
}
}
public boolean addAll(Collection<? extends E> c) {
Object[] elements = c.toArray();
final StampedLock lock = list.lock;
long stamp = lock.writeLock();
try {
int s = size;
int pc = list.count;
list.rawAddAllAt(offset + s, elements);
int added = list.count - pc;
size = s + added;
return added != 0;
} finally {
lock.unlockWrite(stamp);
}
}
public boolean addAll(int index, Collection<? extends E> c) {
Object[] elements = c.toArray();
final StampedLock lock = list.lock;
long stamp = lock.writeLock();
try {
int s = size;
if (index < 0 || index > s)
throw new ArrayIndexOutOfBoundsException(index);
int pc = list.count;
list.rawAddAllAt(index + offset, elements);
int added = list.count - pc;
size = s + added;
return added != 0;
} finally {
lock.unlockWrite(stamp);
}
}
public void clear() {
final StampedLock lock = list.lock;
long stamp = lock.writeLock();
try {
list.internalClear(offset, offset + size);
size = 0;
} finally {
lock.unlockWrite(stamp);
}
}
public boolean contains(Object o) {
return indexOf(o) >= 0;
}
public boolean containsAll(Collection<?> c) {
final StampedLock lock = list.lock;
long stamp = lock.readLock();
try {
return list.internalContainsAll(c, offset, offset + size);
} finally {
lock.unlockRead(stamp);
}
}
public boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof List))
return false;
final StampedLock lock = list.lock;
long stamp = lock.readLock();
try {
return list.internalEquals((List<?>)(o), offset, offset + size);
} finally {
lock.unlockRead(stamp);
}
}
public E get(int index) {
if (index < 0 || index >= size)
throw new ArrayIndexOutOfBoundsException(index);
return list.get(index + offset);
}
public int hashCode() {
final StampedLock lock = list.lock;
long stamp = lock.readLock();
try {
return list.internalHashCode(offset, offset + size);
} finally {
lock.unlockRead(stamp);
}
}
public int indexOf(Object o) {
final StampedLock lock = list.lock;
long stamp = lock.readLock();
try {
int idx = findFirstIndex(list.array, o, offset, offset + size);
return idx < 0 ? -1 : idx - offset;
} finally {
lock.unlockRead(stamp);
}
}
public boolean isEmpty() {
return size() == 0;
}
public Iterator<E> iterator() {
return new SubItr<E>(this, offset);
}
public int lastIndexOf(Object o) {
final StampedLock lock = list.lock;
long stamp = lock.readLock();
try {
int idx = findLastIndex(list.array, o, offset + size - 1, offset);
return idx < 0 ? -1 : idx - offset;
} finally {
lock.unlockRead(stamp);
}
}
public ListIterator<E> listIterator() {
return new SubItr<E>(this, offset);
}
public ListIterator<E> listIterator(int index) {
return new SubItr<E>(this, index + offset);
}
public E remove(int index) {
final StampedLock lock = list.lock;
long stamp = lock.writeLock();
try {
Object[] items = list.array;
int i = index + offset;
if (items == null || index < 0 || index >= size || i >= items.length)
throw new ArrayIndexOutOfBoundsException(index);
@SuppressWarnings("unchecked") E result = (E)items[i];
list.rawRemoveAt(i);
size--;
return result;
} finally {
lock.unlockWrite(stamp);
}
}
public boolean remove(Object o) {
final StampedLock lock = list.lock;
long stamp = lock.writeLock();
try {
if (list.rawRemoveAt(findFirstIndex(list.array, o, offset,
offset + size))) {
--size;
return true;
}
else
return false;
} finally {
lock.unlockWrite(stamp);
}
}
public boolean removeAll(Collection<?> c) {
return list.lockedRemoveAll(c, offset, offset + size);
}
public boolean retainAll(Collection<?> c) {
return list.lockedRetainAll(c, offset, offset + size);
}
public E set(int index, E element) {
if (index < 0 || index >= size)
throw new ArrayIndexOutOfBoundsException(index);
return list.set(index+offset, element);
}
public int size() {
return size;
}
public List<E> subList(int fromIndex, int toIndex) {
int c = size;
int ssize = toIndex - fromIndex;
if (fromIndex < 0)
throw new ArrayIndexOutOfBoundsException(fromIndex);
if (toIndex > c || ssize < 0)
throw new ArrayIndexOutOfBoundsException(toIndex);
return new ReadMostlyVectorSublist<E>(list, offset+fromIndex, ssize);
}
public Object[] toArray() {
final StampedLock lock = list.lock;
long stamp = lock.readLock();
try {
return list.internalToArray(offset, offset + size);
} finally {
lock.unlockRead(stamp);
}
}
public <T> T[] toArray(T[] a) {
final StampedLock lock = list.lock;
long stamp = lock.readLock();
try {
return list.internalToArray(a, offset, offset + size);
} finally {
lock.unlockRead(stamp);
}
}
public String toString() {
final StampedLock lock = list.lock;
long stamp = lock.readLock();
try {
return list.internalToString(offset, offset + size);
} finally {
lock.unlockRead(stamp);
}
}
}
static final class SubItr<E> implements ListIterator<E> {
final ReadMostlyVectorSublist<E> sublist;
final ReadMostlyVector<E> list;
final StampedLock lock;
Object[] items;
long seq;
int cursor;
int origin;
int fence;
int lastRet;
SubItr(ReadMostlyVectorSublist<E> sublist, int index) {
final StampedLock lock = sublist.list.lock;
long stamp = lock.readLock();
try {
this.sublist = sublist;
this.list = sublist.list;
this.lock = lock;
this.cursor = index;
this.origin = sublist.offset;
this.fence = origin + sublist.size;
this.lastRet = -1;
} finally {
this.seq = lock.tryConvertToOptimisticRead(stamp);
}
if (index < 0 || cursor > fence)
throw new ArrayIndexOutOfBoundsException(index);
}
public int nextIndex() {
return cursor - origin;
}
public int previousIndex() {
return cursor - origin - 1;
}
public boolean hasNext() {
return cursor < fence;
}
public boolean hasPrevious() {
return cursor > origin;
}
public E next() {
int i = cursor;
Object[] es = items;
if (es == null || i < origin || i >= fence || i >= es.length)
throw new NoSuchElementException();
@SuppressWarnings("unchecked") E e = (E)es[i];
lastRet = i;
cursor = i + 1;
if (!lock.validate(seq))
throw new ConcurrentModificationException();
return e;
}
public E previous() {
int i = cursor - 1;
Object[] es = items;
if (es == null || i < 0 || i >= fence || i >= es.length)
throw new NoSuchElementException();
@SuppressWarnings("unchecked") E e = (E)es[i];
lastRet = i;
cursor = i;
if (!lock.validate(seq))
throw new ConcurrentModificationException();
return e;
}
public void remove() {
int i = lastRet;
if (i < 0)
throw new IllegalStateException();
if ((seq = lock.tryConvertToWriteLock(seq)) == 0)
throw new ConcurrentModificationException();
try {
list.rawRemoveAt(i);
fence = origin + sublist.size;
cursor = i;
lastRet = -1;
} finally {
seq = lock.tryConvertToOptimisticRead(seq);
}
}
public void set(E e) {
int i = lastRet;
if (i < origin || i >= fence)
throw new IllegalStateException();
if ((seq = lock.tryConvertToWriteLock(seq)) == 0)
throw new ConcurrentModificationException();
try {
list.set(i, e);
} finally {
seq = lock.tryConvertToOptimisticRead(seq);
}
}
public void add(E e) {
int i = cursor;
if (i < origin || i >= fence)
throw new IllegalStateException();
if ((seq = lock.tryConvertToWriteLock(seq)) == 0)
throw new ConcurrentModificationException();
try {
list.rawAddAt(i, e);
items = list.array;
fence = origin + sublist.size;
cursor = i + 1;
lastRet = -1;
} finally {
seq = lock.tryConvertToOptimisticRead(seq);
}
}
}
}
| 0true
|
src_main_java_jsr166e_extra_ReadMostlyVector.java
|
784 |
public class MetricInstrumentedSchemaCache implements SchemaCache {
public static final String METRICS_NAME = "schemacache";
public static final String METRICS_TYPENAME = "name";
public static final String METRICS_RELATIONS = "relations";
private final SchemaCache cache;
public MetricInstrumentedSchemaCache(final StoreRetrieval retriever) {
cache = new StandardSchemaCache(new StoreRetrieval() {
@Override
public Long retrieveSchemaByName(String typeName, StandardTitanTx tx) {
incAction(METRICS_TYPENAME,CacheMetricsAction.MISS,tx);
return retriever.retrieveSchemaByName(typeName, tx);
}
@Override
public EntryList retrieveSchemaRelations(long schemaId, BaseRelationType type, Direction dir, StandardTitanTx tx) {
incAction(METRICS_RELATIONS,CacheMetricsAction.MISS,tx);
return retriever.retrieveSchemaRelations(schemaId, type, dir, tx);
}
});
}
private void incAction(String type, CacheMetricsAction action, StandardTitanTx tx) {
if (tx.getConfiguration().getGroupName()!=null) {
MetricManager.INSTANCE.getCounter(tx.getConfiguration().getGroupName(), METRICS_NAME, type, action.getName()).inc();
}
}
@Override
public Long getSchemaId(String schemaName, StandardTitanTx tx) {
incAction(METRICS_TYPENAME,CacheMetricsAction.RETRIEVAL,tx);
return cache.getSchemaId(schemaName, tx);
}
@Override
public EntryList getSchemaRelations(long schemaId, BaseRelationType type, Direction dir, StandardTitanTx tx) {
incAction(METRICS_RELATIONS,CacheMetricsAction.RETRIEVAL,tx);
return cache.getSchemaRelations(schemaId, type, dir, tx);
}
@Override
public void expireSchemaElement(long schemaId) {
cache.expireSchemaElement(schemaId);
}
}
| 1no label
|
titan-core_src_main_java_com_thinkaurelius_titan_graphdb_database_cache_MetricInstrumentedSchemaCache.java
|
2,038 |
public abstract class BasePutOperation extends LockAwareOperation implements BackupAwareOperation {
protected transient Data dataOldValue;
protected transient EntryEventType eventType;
public BasePutOperation(String name, Data dataKey, Data value) {
super(name, dataKey, value, -1);
}
public BasePutOperation(String name, Data dataKey, Data value, long ttl) {
super(name, dataKey, value, ttl);
}
public BasePutOperation() {
}
public void afterRun() {
mapService.interceptAfterPut(name, dataValue);
if (eventType == null)
eventType = dataOldValue == null ? EntryEventType.ADDED : EntryEventType.UPDATED;
mapService.publishEvent(getCallerAddress(), name, eventType, dataKey, dataOldValue, dataValue);
invalidateNearCaches();
if (mapContainer.getWanReplicationPublisher() != null && mapContainer.getWanMergePolicy() != null) {
Record record = recordStore.getRecord(dataKey);
if (record == null) {
return;
}
final SimpleEntryView entryView = mapService.createSimpleEntryView(dataKey, mapService.toData(dataValue), record);
mapService.publishWanReplicationUpdate(name, entryView);
}
}
public boolean shouldBackup() {
return true;
}
public Operation getBackupOperation() {
Record record = recordStore.getRecord(dataKey);
RecordInfo replicationInfo = mapService.createRecordInfo(record);
return new PutBackupOperation(name, dataKey, dataValue, replicationInfo);
}
public final int getAsyncBackupCount() {
return mapContainer.getAsyncBackupCount();
}
public final int getSyncBackupCount() {
return mapContainer.getBackupCount();
}
public void onWaitExpire() {
final ResponseHandler responseHandler = getResponseHandler();
responseHandler.sendResponse(null);
}
@Override
public String toString() {
return "BasePutOperation{" + name + "}";
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_map_operation_BasePutOperation.java
|
176 |
@RunWith(HazelcastSerialClassRunner.class)
@Category(QuickTest.class)
public class ClientAtomicLongTest {
static final String name = "test1";
static HazelcastInstance client;
static HazelcastInstance server;
static IAtomicLong l;
@BeforeClass
public static void init(){
server = Hazelcast.newHazelcastInstance();
client = HazelcastClient.newHazelcastClient();
l = client.getAtomicLong(name);
}
@AfterClass
public static void destroy() {
client.shutdown();
Hazelcast.shutdownAll();
}
@Before
@After
public void clear() throws IOException {
l.set(0);
}
@Test
public void test() throws Exception {
assertEquals(0, l.getAndAdd(2));
assertEquals(2, l.get());
l.set(5);
assertEquals(5, l.get());
assertEquals(8, l.addAndGet(3));
assertFalse(l.compareAndSet(7, 4));
assertEquals(8, l.get());
assertTrue(l.compareAndSet(8, 4));
assertEquals(4, l.get());
assertEquals(3, l.decrementAndGet());
assertEquals(3, l.getAndIncrement());
assertEquals(4, l.getAndSet(9));
assertEquals(10, l.incrementAndGet());
}
@Test(expected = IllegalArgumentException.class)
public void apply_whenCalledWithNullFunction() {
IAtomicLong ref = client.getAtomicLong("apply_whenCalledWithNullFunction");
ref.apply(null);
}
@Test
public void apply() {
IAtomicLong ref = client.getAtomicLong("apply");
assertEquals(new Long(1), ref.apply(new AddOneFunction()));
assertEquals(0, ref.get());
}
@Test
public void apply_whenException() {
IAtomicLong ref = client.getAtomicLong("apply_whenException");
ref.set(1);
try {
ref.apply(new FailingFunction());
fail();
} catch (WoohaaException expected) {
}
assertEquals(1, ref.get());
}
@Test(expected = IllegalArgumentException.class)
public void alter_whenCalledWithNullFunction() {
IAtomicLong ref = client.getAtomicLong("alter_whenCalledWithNullFunction");
ref.alter(null);
}
@Test
public void alter_whenException() {
IAtomicLong ref = client.getAtomicLong("alter_whenException");
ref.set(10);
try {
ref.alter(new FailingFunction());
fail();
} catch (WoohaaException expected) {
}
assertEquals(10, ref.get());
}
@Test
public void alter() {
IAtomicLong ref = client.getAtomicLong("alter");
ref.set(10);
ref.alter(new AddOneFunction());
assertEquals(11, ref.get());
}
@Test(expected = IllegalArgumentException.class)
public void alterAndGet_whenCalledWithNullFunction() {
IAtomicLong ref = client.getAtomicLong("alterAndGet_whenCalledWithNullFunction");
ref.alterAndGet(null);
}
@Test
public void alterAndGet_whenException() {
IAtomicLong ref = client.getAtomicLong("alterAndGet_whenException");
ref.set(10);
try {
ref.alterAndGet(new FailingFunction());
fail();
} catch (WoohaaException expected) {
}
assertEquals(10, ref.get());
}
@Test
public void alterAndGet() {
IAtomicLong ref = client.getAtomicLong("alterAndGet");
ref.set(10);
assertEquals(11, ref.alterAndGet(new AddOneFunction()));
assertEquals(11, ref.get());
}
@Test(expected = IllegalArgumentException.class)
public void getAndAlter_whenCalledWithNullFunction() {
IAtomicLong ref = client.getAtomicLong("getAndAlter_whenCalledWithNullFunction");
ref.getAndAlter(null);
}
@Test
public void getAndAlter_whenException() {
IAtomicLong ref = client.getAtomicLong("getAndAlter_whenException");
ref.set(10);
try {
ref.getAndAlter(new FailingFunction());
fail();
} catch (WoohaaException expected) {
}
assertEquals(10, ref.get());
}
@Test
public void getAndAlter() {
IAtomicLong ref = client.getAtomicLong("getAndAlter");
ref.set(10);
assertEquals(10, ref.getAndAlter(new AddOneFunction()));
assertEquals(11, ref.get());
}
private static class AddOneFunction implements IFunction<Long, Long> {
@Override
public Long apply(Long input) {
return input+1;
}
}
private static class FailingFunction implements IFunction<Long, Long> {
@Override
public Long apply(Long input) {
throw new WoohaaException();
}
}
private static class WoohaaException extends RuntimeException {
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_atomiclong_ClientAtomicLongTest.java
|
3,743 |
final class HazelcastInstanceLoader {
public static final String INSTANCE_NAME = "instance-name";
public static final String CONFIG_LOCATION = "config-location";
public static final String USE_CLIENT = "use-client";
public static final String CLIENT_CONFIG_LOCATION = "client-config-location";
private static final ILogger LOGGER = Logger.getLogger(HazelcastInstanceLoader.class);
private static final int DEFAULT_CONNECTION_ATTEMPT_LIMIT = 3;
private HazelcastInstanceLoader() {
}
public static HazelcastInstance createInstance(final FilterConfig filterConfig, final Properties properties)
throws ServletException {
final String instanceName = properties.getProperty(INSTANCE_NAME);
final String configLocation = properties.getProperty(CONFIG_LOCATION);
final String useClientProp = properties.getProperty(USE_CLIENT);
final String clientConfigLocation = properties.getProperty(CLIENT_CONFIG_LOCATION);
final boolean useClient = !isEmpty(useClientProp) && Boolean.parseBoolean(useClientProp);
URL configUrl = null;
if (useClient && !isEmpty(clientConfigLocation)) {
configUrl = getConfigURL(filterConfig, clientConfigLocation);
} else if (!isEmpty(configLocation)) {
configUrl = getConfigURL(filterConfig, configLocation);
}
if (useClient) {
return createClientInstance(configUrl);
}
Config config;
if (configUrl == null) {
config = new XmlConfigBuilder().build();
} else {
try {
config = new UrlXmlConfig(configUrl);
} catch (IOException e) {
throw new ServletException(e);
}
}
return createHazelcastInstance(instanceName, config);
}
private static HazelcastInstance createHazelcastInstance(String instanceName, Config config) {
if (!isEmpty(instanceName)) {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info(format("getOrCreateHazelcastInstance for session replication, using name '%s'", instanceName));
}
config.setInstanceName(instanceName);
return Hazelcast.getOrCreateHazelcastInstance(config);
} else {
LOGGER.info("Creating a new HazelcastInstance for session replication");
return Hazelcast.newHazelcastInstance(config);
}
}
private static HazelcastInstance createClientInstance(URL configUrl) throws ServletException {
LOGGER.warning("Creating HazelcastClient for session replication...");
LOGGER.warning("make sure this client has access to an already running cluster...");
ClientConfig clientConfig;
if (configUrl == null) {
clientConfig = new ClientConfig();
clientConfig.getNetworkConfig().setConnectionAttemptLimit(DEFAULT_CONNECTION_ATTEMPT_LIMIT);
} else {
try {
clientConfig = new XmlClientConfigBuilder(configUrl).build();
} catch (IOException e) {
throw new ServletException(e);
}
}
return HazelcastClient.newHazelcastClient(clientConfig);
}
private static URL getConfigURL(final FilterConfig filterConfig, final String configLocation) throws ServletException {
URL configUrl = null;
try {
configUrl = filterConfig.getServletContext().getResource(configLocation);
} catch (MalformedURLException ignore) {
LOGGER.info("ignored MalformedURLException");
}
if (configUrl == null) {
configUrl = ConfigLoader.locateConfig(configLocation);
}
if (configUrl == null) {
throw new ServletException("Could not load configuration '" + configLocation + "'");
}
return configUrl;
}
private static boolean isEmpty(String s) {
return s == null || s.trim().length() == 0;
}
}
| 1no label
|
hazelcast-wm_src_main_java_com_hazelcast_web_HazelcastInstanceLoader.java
|
274 |
public interface OCommandRequestAsynch {
public OCommandResultListener getResultListener();
public void setResultListener(OCommandResultListener iListener);
public boolean isAsynchronous();
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_command_OCommandRequestAsynch.java
|
193 |
public class TruncateTokenFilter extends TokenFilter {
private final CharTermAttribute termAttribute = addAttribute(CharTermAttribute.class);
private final int size;
public TruncateTokenFilter(TokenStream in, int size) {
super(in);
this.size = size;
}
@Override
public final boolean incrementToken() throws IOException {
if (input.incrementToken()) {
final int length = termAttribute.length();
if (length > size) {
termAttribute.setLength(size);
}
return true;
} else {
return false;
}
}
}
| 0true
|
src_main_java_org_apache_lucene_analysis_miscellaneous_TruncateTokenFilter.java
|
582 |
public class TransportOptimizeAction extends TransportBroadcastOperationAction<OptimizeRequest, OptimizeResponse, ShardOptimizeRequest, ShardOptimizeResponse> {
private final IndicesService indicesService;
@Inject
public TransportOptimizeAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
TransportService transportService, IndicesService indicesService) {
super(settings, threadPool, clusterService, transportService);
this.indicesService = indicesService;
}
@Override
protected String executor() {
return ThreadPool.Names.OPTIMIZE;
}
@Override
protected String transportAction() {
return OptimizeAction.NAME;
}
@Override
protected OptimizeRequest newRequest() {
return new OptimizeRequest();
}
@Override
protected OptimizeResponse newResponse(OptimizeRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) {
int successfulShards = 0;
int failedShards = 0;
List<ShardOperationFailedException> shardFailures = null;
for (int i = 0; i < shardsResponses.length(); i++) {
Object shardResponse = shardsResponses.get(i);
if (shardResponse == null) {
// a non active shard, ignore...
} else if (shardResponse instanceof BroadcastShardOperationFailedException) {
failedShards++;
if (shardFailures == null) {
shardFailures = newArrayList();
}
shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse));
} else {
successfulShards++;
}
}
return new OptimizeResponse(shardsResponses.length(), successfulShards, failedShards, shardFailures);
}
@Override
protected ShardOptimizeRequest newShardRequest() {
return new ShardOptimizeRequest();
}
@Override
protected ShardOptimizeRequest newShardRequest(ShardRouting shard, OptimizeRequest request) {
return new ShardOptimizeRequest(shard.index(), shard.id(), request);
}
@Override
protected ShardOptimizeResponse newShardResponse() {
return new ShardOptimizeResponse();
}
@Override
protected ShardOptimizeResponse shardOperation(ShardOptimizeRequest request) throws ElasticsearchException {
IndexShard indexShard = indicesService.indexServiceSafe(request.index()).shardSafe(request.shardId());
indexShard.optimize(new Engine.Optimize()
.waitForMerge(request.waitForMerge())
.maxNumSegments(request.maxNumSegments())
.onlyExpungeDeletes(request.onlyExpungeDeletes())
.flush(request.flush())
);
return new ShardOptimizeResponse(request.index(), request.shardId());
}
/**
* The refresh request works against *all* shards.
*/
@Override
protected GroupShardsIterator shards(ClusterState clusterState, OptimizeRequest request, String[] concreteIndices) {
return clusterState.routingTable().allActiveShardsGrouped(concreteIndices, true);
}
@Override
protected ClusterBlockException checkGlobalBlock(ClusterState state, OptimizeRequest request) {
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA);
}
@Override
protected ClusterBlockException checkRequestBlock(ClusterState state, OptimizeRequest request, String[] concreteIndices) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA, concreteIndices);
}
}
| 1no label
|
src_main_java_org_elasticsearch_action_admin_indices_optimize_TransportOptimizeAction.java
|
300 |
public class SiteNotFoundException extends RuntimeException {
public SiteNotFoundException() {
//do nothing
}
public SiteNotFoundException(Throwable cause) {
super(cause);
}
public SiteNotFoundException(String message) {
super(message);
}
public SiteNotFoundException(String message, Throwable cause) {
super(message, cause);
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_exception_SiteNotFoundException.java
|
4,034 |
public class MultiMatchQuery extends MatchQuery {
private boolean useDisMax = true;
private float tieBreaker;
public void setUseDisMax(boolean useDisMax) {
this.useDisMax = useDisMax;
}
public void setTieBreaker(float tieBreaker) {
this.tieBreaker = tieBreaker;
}
public MultiMatchQuery(QueryParseContext parseContext) {
super(parseContext);
}
private Query parseAndApply(Type type, String fieldName, Object value, String minimumShouldMatch, Float boostValue) throws IOException {
Query query = parse(type, fieldName, value);
if (query instanceof BooleanQuery) {
Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch);
}
if (boostValue != null && query != null) {
query.setBoost(boostValue);
}
return query;
}
public Query parse(Type type, Map<String, Float> fieldNames, Object value, String minimumShouldMatch) throws IOException {
if (fieldNames.size() == 1) {
Map.Entry<String, Float> fieldBoost = fieldNames.entrySet().iterator().next();
Float boostValue = fieldBoost.getValue();
return parseAndApply(type, fieldBoost.getKey(), value, minimumShouldMatch, boostValue);
}
if (useDisMax) {
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(tieBreaker);
boolean clauseAdded = false;
for (String fieldName : fieldNames.keySet()) {
Float boostValue = fieldNames.get(fieldName);
Query query = parseAndApply(type, fieldName, value, minimumShouldMatch, boostValue);
if (query != null) {
clauseAdded = true;
disMaxQuery.add(query);
}
}
return clauseAdded ? disMaxQuery : null;
} else {
BooleanQuery booleanQuery = new BooleanQuery();
for (String fieldName : fieldNames.keySet()) {
Float boostValue = fieldNames.get(fieldName);
Query query = parseAndApply(type, fieldName, value, minimumShouldMatch, boostValue);
if (query != null) {
booleanQuery.add(query, BooleanClause.Occur.SHOULD);
}
}
return !booleanQuery.clauses().isEmpty() ? booleanQuery : null;
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_search_MultiMatchQuery.java
|
162 |
public final class OperationFactoryWrapper implements OperationFactory {
private OperationFactory opFactory;
private String uuid;
public OperationFactoryWrapper() {
}
public OperationFactoryWrapper(OperationFactory opFactory, String uuid) {
this.opFactory = opFactory;
this.uuid = uuid;
}
@Override
public Operation createOperation() {
Operation op = opFactory.createOperation();
op.setCallerUuid(uuid);
return op;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(uuid);
out.writeObject(opFactory);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
uuid = in.readUTF();
opFactory = in.readObject();
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_client_OperationFactoryWrapper.java
|
2,594 |
class SocketPacketReader implements SocketReader {
private static final int CONST_BUFFER_NO = 4;
Packet packet;
final PacketReader packetReader;
final TcpIpConnection connection;
final IOService ioService;
final ILogger logger;
public SocketPacketReader(TcpIpConnection connection) {
this.connection = connection;
this.ioService = connection.getConnectionManager().ioService;
this.logger = ioService.getLogger(getClass().getName());
boolean symmetricEncryptionEnabled = CipherHelper.isSymmetricEncryptionEnabled(ioService);
if (symmetricEncryptionEnabled) {
packetReader = new SymmetricCipherPacketReader();
logger.info("Reader started with SymmetricEncryption");
} else {
packetReader = new DefaultPacketReader();
}
}
public void read(ByteBuffer inBuffer) throws Exception {
packetReader.readPacket(inBuffer);
}
private void enqueueFullPacket(final Packet p) {
p.setConn(connection);
ioService.handleMemberPacket(p);
}
private interface PacketReader {
void readPacket(ByteBuffer inBuffer) throws Exception;
}
private class DefaultPacketReader implements PacketReader {
public void readPacket(ByteBuffer inBuffer) {
while (inBuffer.hasRemaining()) {
if (packet == null) {
packet = obtainReadable();
}
boolean complete = packet.readFrom(inBuffer);
if (complete) {
enqueueFullPacket(packet);
packet = null;
} else {
break;
}
}
}
}
private final class SymmetricCipherPacketReader implements PacketReader {
int size = -1;
final Cipher cipher;
ByteBuffer cipherBuffer = ByteBuffer.allocate(ioService.getSocketReceiveBufferSize() * IOService.KILO_BYTE);
private SymmetricCipherPacketReader() {
cipher = init();
}
Cipher init() {
Cipher c;
try {
c = CipherHelper.createSymmetricReaderCipher(ioService.getSymmetricEncryptionConfig());
} catch (Exception e) {
logger.severe("Symmetric Cipher for ReadHandler cannot be initialized.", e);
CipherHelper.handleCipherException(e, connection);
throw ExceptionUtil.rethrow(e);
}
return c;
}
public void readPacket(ByteBuffer inBuffer) throws Exception {
while (inBuffer.hasRemaining()) {
try {
if (size == -1) {
if (inBuffer.remaining() < CONST_BUFFER_NO) {
return;
}
size = inBuffer.getInt();
if (cipherBuffer.capacity() < size) {
cipherBuffer = ByteBuffer.allocate(size);
}
}
int remaining = inBuffer.remaining();
if (remaining < size) {
cipher.update(inBuffer, cipherBuffer);
size -= remaining;
} else if (remaining == size) {
cipher.doFinal(inBuffer, cipherBuffer);
size = -1;
} else {
int oldLimit = inBuffer.limit();
int newLimit = inBuffer.position() + size;
inBuffer.limit(newLimit);
cipher.doFinal(inBuffer, cipherBuffer);
inBuffer.limit(oldLimit);
size = -1;
}
} catch (ShortBufferException e) {
logger.warning(e);
}
cipherBuffer.flip();
while (cipherBuffer.hasRemaining()) {
if (packet == null) {
packet = obtainReadable();
}
boolean complete = packet.readFrom(cipherBuffer);
if (complete) {
enqueueFullPacket(packet);
packet = null;
}
}
cipherBuffer.clear();
}
}
}
public Packet obtainReadable() {
return new Packet(ioService.getSerializationContext());
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_nio_SocketPacketReader.java
|
1,408 |
public class CeylonPlugin extends AbstractUIPlugin implements CeylonResources {
public static final String PLUGIN_ID = "com.redhat.ceylon.eclipse.ui";
public static final String DIST_PLUGIN_ID = "com.redhat.ceylon.dist";
public static final String EMBEDDED_REPO_PLUGIN_ID = "com.redhat.ceylon.dist.repo";
public static final String LANGUAGE_ID = "ceylon";
public static final String EDITOR_ID = PLUGIN_ID + ".editor";
private static final String[] RUNTIME_LIBRARIES = new String[]{
"com.redhat.ceylon.compiler.java-"+Versions.CEYLON_VERSION_NUMBER+".jar",
"com.redhat.ceylon.typechecker-"+Versions.CEYLON_VERSION_NUMBER+".jar",
"com.redhat.ceylon.module-resolver-"+Versions.CEYLON_VERSION_NUMBER+".jar",
"com.redhat.ceylon.common-"+Versions.CEYLON_VERSION_NUMBER+".jar",
"org.jboss.modules-1.3.3.Final.jar",
};
private static final String[] COMPILETIME_LIBRARIES = new String[]{
"com.redhat.ceylon.typechecker-"+Versions.CEYLON_VERSION_NUMBER+".jar",
};
private FontRegistry fontRegistry;
/**
* The unique instance of this plugin class
*/
protected static CeylonPlugin pluginInstance;
private File ceylonRepository = null;
private BundleContext bundleContext;
/**
* - If the 'ceylon.repo' property exist, returns the corresponding file
* <br>
* - Else return the internal repo folder
*
* @return
*/
public File getCeylonRepository() {
return ceylonRepository;
}
public static CeylonPlugin getInstance() {
if (pluginInstance==null) new CeylonPlugin();
return pluginInstance;
}
public CeylonPlugin() {
final String version = System.getProperty("java.version");
if (!version.startsWith("1.7") && !version.startsWith("1.8")) {
Display.getDefault().asyncExec(new Runnable() {
@Override
public void run() {
ErrorDialog.openError(getWorkbench().getActiveWorkbenchWindow().getShell(),
"Ceylon IDE does not support this JVM",
"Ceylon IDE requires Java 1.7 or 1.8.",
new Status(IStatus.ERROR, PLUGIN_ID,
"Eclipse is running on a Java " + version + " VM.",
null));
}});
}
pluginInstance = this;
}
@Override
public void start(BundleContext context) throws Exception {
String ceylonRepositoryProperty = System.getProperty("ceylon.repo", "");
ceylonRepository = getCeylonPluginRepository(ceylonRepositoryProperty);
super.start(context);
this.bundleContext = context;
addResourceFilterPreference();
registerProjectOpenCloseListener();
CeylonEncodingSynchronizer.getInstance().install();
Job registerCeylonModules = new Job("Load the Ceylon Metamodel for plugin dependencies") {
protected IStatus run(IProgressMonitor monitor) {
Activator.loadBundleAsModule(bundleContext.getBundle());
return Status.OK_STATUS;
};
};
registerCeylonModules.setRule(ResourcesPlugin.getWorkspace().getRoot());
registerCeylonModules.schedule();
}
@Override
public void stop(BundleContext context) throws Exception {
super.stop(context);
unregisterProjectOpenCloseListener();
CeylonEncodingSynchronizer.getInstance().uninstall();
}
private void addResourceFilterPreference() throws BackingStoreException {
new Job("Add Resource Filter for Ceylon projects") {
@Override
protected IStatus run(IProgressMonitor monitor) {
IEclipsePreferences instancePreferences = InstanceScope.INSTANCE
.getNode(JavaCore.PLUGIN_ID);
/*IEclipsePreferences defaultPreferences = DefaultScope.INSTANCE
.getNode(JavaCore.PLUGIN_ID);*/
String filter = instancePreferences.get(CORE_JAVA_BUILD_RESOURCE_COPY_FILTER, "");
if (filter.isEmpty()) {
filter = "*.launch, *.ceylon";
}
else if (!filter.contains("*.ceylon")) {
filter += ", *.ceylon";
}
instancePreferences.put(CORE_JAVA_BUILD_RESOURCE_COPY_FILTER, filter);
try {
instancePreferences.flush();
}
catch (BackingStoreException e) {
e.printStackTrace();
}
return Status.OK_STATUS;
}
}.schedule();
}
/**
* - If the property is not empty, return the corresponding file
* <br>
* - Else return the internal repo folder
*
* @param ceylonRepositoryProperty
* @return
*
*/
public static File getCeylonPluginRepository(String ceylonRepositoryProperty) {
File ceylonRepository=null;
if (!"".equals(ceylonRepositoryProperty)) {
File ceylonRepositoryPath = new File(ceylonRepositoryProperty);
if (ceylonRepositoryPath.exists()) {
ceylonRepository = ceylonRepositoryPath;
}
}
if (ceylonRepository == null) {
try {
Bundle bundle = Platform.getBundle(EMBEDDED_REPO_PLUGIN_ID);
IPath path = new Path("repo");
if (bundle == null) {
bundle = Platform.getBundle(DIST_PLUGIN_ID);
path = new Path("embeddedRepository").append(path);
}
URL eclipseUrl = FileLocator.find(bundle, path, null);
URL fileURL = FileLocator.resolve(eclipseUrl);
String urlPath = fileURL.getPath();
URI fileURI = new URI("file", null, urlPath, null);
ceylonRepository = new File(fileURI);
}
catch (Exception e) {
e.printStackTrace();
}
}
return ceylonRepository;
}
/**
* Returns the list of jars in the bundled system repo that are required by the ceylon.language module at runtime
*/
public static List<String> getRuntimeRequiredJars(){
return getRequiredJars(RUNTIME_LIBRARIES);
}
/**
* Returns the list of jars in the bundled system repo that are required by the ceylon.language module at compiletime
*/
public static List<String> getCompiletimeRequiredJars(){
return getRequiredJars(COMPILETIME_LIBRARIES);
}
/**
* Returns the list of jars in the bundled lib folder required to launch a module
*/
public static List<String> getModuleLauncherJars(){
try {
Bundle bundle = Platform.getBundle(DIST_PLUGIN_ID);
Path path = new Path("lib");
URL eclipseUrl = FileLocator.find(bundle, path, null);
URL fileURL = FileLocator.resolve(eclipseUrl);
File libDir = new File(fileURL.getPath());
List<String> jars = new ArrayList<String>();
jars.add(new File(libDir, "ceylon-bootstrap.jar").getAbsolutePath());
return jars;
} catch (IOException x) {
x.printStackTrace();
return Collections.emptyList();
}
}
private static List<String> getRequiredJars(String[] libraries){
File repoDir = getCeylonPluginRepository(System.getProperty("ceylon.repo", ""));
try{
List<String> jars = new ArrayList<String>(libraries.length);
for(String jar : libraries){
File libDir = new File(repoDir, getRepoFolder(jar));
if( !libDir.exists() ) {
System.out.println("WARNING directory doesn't exist: " + libDir);
}
jars.add(new File(libDir, jar).getAbsolutePath());
}
return jars;
} catch (Exception x) {
x.printStackTrace();
return Collections.emptyList();
}
}
private static String getRepoFolder(String jarName) {
int lastDot = jarName.lastIndexOf('.');
int lastDash = jarName.lastIndexOf('-');
return jarName.substring(0, lastDash).replace('.', '/')
+ "/" + jarName.substring(lastDash + 1, lastDot);
}
public String getID() {
return PLUGIN_ID;
}
public String getLanguageID() {
return LANGUAGE_ID;
}
private static IPath iconsPath = new Path("icons/");
public ImageDescriptor image(String file) {
URL url = FileLocator.find(getBundle(),
iconsPath.append(file), null);
if (url!=null) {
return ImageDescriptor.createFromURL(url);
}
else {
return null;
}
}
@Override
protected void initializeImageRegistry(ImageRegistry reg) {
reg.put(JAVA_FILE, image("jcu_obj.gif"));
reg.put(GENERIC_FILE, image("file_obj.gif"));
reg.put(CEYLON_PROJECT, image("prj_obj.gif"));
reg.put(CEYLON_FILE, image("unit.gif"));
reg.put(CEYLON_MODULE_DESC, image("m_desc.gif"));
reg.put(CEYLON_PACKAGE_DESC, image("p_desc.gif"));
reg.put(CEYLON_FOLDER, image("fldr_obj.gif"));
reg.put(CEYLON_SOURCE_FOLDER, image("packagefolder_obj.gif"));
reg.put(CEYLON_MODULE, image("jar_l_obj.gif"));
reg.put(CEYLON_BINARY_ARCHIVE, image("jar_obj.gif"));
reg.put(CEYLON_SOURCE_ARCHIVE, image("jar_src_obj.gif"));
reg.put(CEYLON_PACKAGE, image("package_obj.gif"));
reg.put(CEYLON_IMPORT_LIST, image("impc_obj.gif"));
reg.put(CEYLON_IMPORT, image("imp_obj.gif"));
reg.put(CEYLON_ALIAS, image("types.gif"));
reg.put(CEYLON_CLASS, image("class_obj.gif"));
reg.put(CEYLON_INTERFACE, image("int_obj.gif"));
reg.put(CEYLON_LOCAL_CLASS, image("innerclass_private_obj.gif"));
reg.put(CEYLON_LOCAL_INTERFACE, image("innerinterface_private_obj.gif"));
reg.put(CEYLON_METHOD, image("public_co.gif"));
reg.put(CEYLON_ATTRIBUTE, image("field_public_obj.gif"));
reg.put(CEYLON_LOCAL_METHOD, image("private_co.gif"));
reg.put(CEYLON_LOCAL_ATTRIBUTE, image("field_private_obj.gif"));
reg.put(CEYLON_PARAMETER_METHOD, image("methpro_obj.gif"));
reg.put(CEYLON_PARAMETER, image("field_protected_obj.gif"));
reg.put(CEYLON_TYPE_PARAMETER, image("typevariable_obj.gif"));
reg.put(CEYLON_ARGUMENT, image("arg_co.gif"));
reg.put(CEYLON_DEFAULT_REFINEMENT, image("over_co.gif"));
reg.put(CEYLON_FORMAL_REFINEMENT, image("implm_co.gif"));
reg.put(CEYLON_OPEN_DECLARATION, image("opentype.gif"));
reg.put(CEYLON_SEARCH_RESULTS, image("search_ref_obj.gif"));
reg.put(CEYLON_CORRECTION, image("correction_change.gif"));
reg.put(CEYLON_DELETE_IMPORT, image("correction_delete_import.gif"));
reg.put(CEYLON_CHANGE, image("change.png"));
reg.put(CEYLON_COMPOSITE_CHANGE, image("composite_change.png"));
reg.put(CEYLON_RENAME, image("correction_rename.png"));
reg.put(CEYLON_DELETE, image("delete_edit.gif"));
reg.put(CEYLON_MOVE, image("file_change.png"));
reg.put(CEYLON_ADD, image("add_obj.gif"));
reg.put(CEYLON_REORDER, image("order_obj.gif"));
reg.put(CEYLON_REVEAL, image("reveal.gif"));
reg.put(CEYLON_ADD_CORRECTION, image("add_correction.gif"));
reg.put(CEYLON_REMOVE_CORRECTION, image("remove_correction.gif"));
reg.put(CEYLON_NEW_PROJECT, image("newprj_wiz.png"));
reg.put(CEYLON_NEW_FILE, image("newfile_wiz.png"));
reg.put(CEYLON_NEW_MODULE, image("addlibrary_wiz.png"));
reg.put(CEYLON_NEW_PACKAGE, image("newpack_wiz.png"));
reg.put(CEYLON_NEW_FOLDER, image("newfolder_wiz.gif"));
reg.put(CEYLON_EXPORT_CAR, image("jar_pack_wiz.png"));
reg.put(CEYLON_REFS, image("search_ref_obj.png"));
reg.put(CEYLON_DECS, image("search_decl_obj.png"));
reg.put(CEYLON_INHERITED, image("inher_co.gif"));
reg.put(CEYLON_HIER, image("hierarchy_co.gif"));
reg.put(CEYLON_SUP, image("super_co.gif"));
reg.put(CEYLON_SUB, image("sub_co.gif"));
reg.put(CEYLON_OUTLINE, image("outline_co.gif"));
reg.put(CEYLON_HIERARCHY, image("class_hi.gif"));
reg.put(CEYLON_SOURCE, image("source.gif"));
reg.put(ELE32, image("ceylon_icon_32px.png"));
reg.put(CEYLON_ERR, image("error_co.gif"));
reg.put(CEYLON_WARN, image("warning_co.gif"));
reg.put(GOTO, image("goto_obj.gif"));
reg.put(HIERARCHY, image("class_hi_view.gif"));
reg.put(SHIFT_LEFT, image("shift_l_edit.gif"));
reg.put(SHIFT_RIGHT, image("shift_r_edit.gif"));
reg.put(QUICK_ASSIST, image("quickassist_obj.gif"));
reg.put(BUILDER, image("builder.gif"));
reg.put(CONFIG_ANN, image("configure_annotations.gif"));
reg.put(CONFIG_ANN_DIS, image("configure_annotations_disabled.gif"));
reg.put(MODULE_VERSION, image("module_version.gif"));
reg.put(HIDE_PRIVATE, image("hideprivate.gif"));
reg.put(EXPAND_ALL, image("expandall.gif"));
reg.put(PAGING, image("paging.gif"));
reg.put(SHOW_DOC, image("show_doc.gif"));
reg.put(REPOSITORIES, image("repositories.gif"));
reg.put(RUNTIME_OBJ, image("runtime_obj.gif"));
reg.put(CEYLON_LOCAL_NAME, image("localvariable_obj.gif"));
reg.put(MULTIPLE_TYPES, image("types.gif"));
reg.put(CEYLON_ERROR, image("error_obj.gif"));
reg.put(CEYLON_WARNING, image("warning_obj.gif"));
reg.put(CEYLON_FUN, image("public_fun.gif"));
reg.put(CEYLON_LOCAL_FUN, image("private_fun.gif"));
reg.put(WARNING_IMAGE, image(WARNING_IMAGE));
reg.put(ERROR_IMAGE, image(ERROR_IMAGE));
reg.put(REFINES_IMAGE, image(REFINES_IMAGE));
reg.put(IMPLEMENTS_IMAGE, image(IMPLEMENTS_IMAGE));
reg.put(FINAL_IMAGE, image(FINAL_IMAGE));
reg.put(ABSTRACT_IMAGE, image(ABSTRACT_IMAGE));
reg.put(VARIABLE_IMAGE, image(VARIABLE_IMAGE));
reg.put(ANNOTATION_IMAGE, image(ANNOTATION_IMAGE));
reg.put(ENUM_IMAGE, image(ENUM_IMAGE));
reg.put(ALIAS_IMAGE, image(ALIAS_IMAGE));
reg.put(DEPRECATED_IMAGE, image(DEPRECATED_IMAGE));
reg.put(PROJECT_MODE, image("prj_mode.gif"));
reg.put(PACKAGE_MODE, image("package_mode.gif"));
reg.put(MODULE_MODE, image("module_mode.gif"));
reg.put(FOLDER_MODE, image("folder_mode.gif"));
reg.put(UNIT_MODE, image("unit_mode.gif"));
reg.put(TYPE_MODE, image("type_mode.gif"));
reg.put(FLAT_MODE, image("flatLayout.gif"));
reg.put(TREE_MODE, image("hierarchicalLayout.gif"));
reg.put(TERMINATE_STATEMENT, image("correction_cast.gif"));
reg.put(FORMAT_BLOCK, image("format_block.gif"));
reg.put(REMOVE_COMMENT, image("remove_comment_edit.gif"));
reg.put(ADD_COMMENT, image("comment_edit.gif"));
reg.put(TOGGLE_COMMENT, image("url.gif"));
reg.put(CORRECT_INDENT, image("correctindent.gif"));
reg.put(LAST_EDIT, image("last_edit_pos.gif"));
reg.put(NEXT_ANN, image("next_nav.gif"));
reg.put(PREV_ANN, image("prev_nav.gif"));
reg.put(SORT_ALPHA, image("alphab_sort_co.gif"));
reg.put(CEYLON_LITERAL, image("correction_change.gif"));
}
private void registerProjectOpenCloseListener() {
getWorkspace().addResourceChangeListener(projectOpenCloseListener,
IResourceChangeEvent.POST_CHANGE);
}
private void unregisterProjectOpenCloseListener() {
getWorkspace().removeResourceChangeListener(projectOpenCloseListener);
}
IResourceChangeListener projectOpenCloseListener = new ProjectChangeListener();
public BundleContext getBundleContext() {
return this.bundleContext;
}
/**
* Utility class that tries to adapt a non null object to the specified type
*
* @param object
* the object to adapt
* @param type
* the class to adapt to
* @return the adapted object
*/
public static Object adapt(Object object, Class<?> type) {
if (type.isInstance(object)) {
return object;
} else if (object instanceof IAdaptable) {
return ((IAdaptable) object).getAdapter(type);
}
return Platform.getAdapterManager().getAdapter(object, type);
}
public FontRegistry getFontRegistry() {
// Hopefully this gets called late enough, i.e., after a Display has been
// created on the current thread (see FontRegistry constructor).
if (fontRegistry == null) {
fontRegistry= new FontRegistry();
}
return fontRegistry;
}
}
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_ui_CeylonPlugin.java
|
1,607 |
public class SimpleValueMapStructure extends MapStructure {
private static final long serialVersionUID = 1L;
private String valuePropertyName;
private String valuePropertyFriendlyName;
public SimpleValueMapStructure() {
super();
}
/**
* @param keyClassName
* @param keyPropertyName
* @param keyPropertyFriendlyName
* @param valueClassName
* @param mapProperty
*/
public SimpleValueMapStructure(String keyClassName, String keyPropertyName, String keyPropertyFriendlyName, String valueClassName, String valuePropertyName, String valuePropertyFriendlyName, String mapProperty, String mapKeyValueProperty) {
super(keyClassName, keyPropertyName, keyPropertyFriendlyName, valueClassName, mapProperty, false, mapKeyValueProperty);
this.valuePropertyFriendlyName = valuePropertyFriendlyName;
this.valuePropertyName = valuePropertyName;
}
public String getValuePropertyName() {
return valuePropertyName;
}
public void setValuePropertyName(String valuePropertyName) {
this.valuePropertyName = valuePropertyName;
}
public String getValuePropertyFriendlyName() {
return valuePropertyFriendlyName;
}
public void setValuePropertyFriendlyName(String valuePropertyFriendlyName) {
this.valuePropertyFriendlyName = valuePropertyFriendlyName;
}
public void accept(PersistencePerspectiveItemVisitor visitor) {
visitor.visit(this);
}
@Override
public PersistencePerspectiveItem clonePersistencePerspectiveItem() {
SimpleValueMapStructure mapStructure = new SimpleValueMapStructure();
mapStructure.setKeyClassName(getKeyClassName());
mapStructure.setKeyPropertyName(getKeyPropertyName());
mapStructure.setValuePropertyFriendlyName(getKeyPropertyFriendlyName());
mapStructure.setValueClassName(getValueClassName());
mapStructure.setMapProperty(getMapProperty());
mapStructure.setDeleteValueEntity(getDeleteValueEntity());
mapStructure.valuePropertyName = valuePropertyName;
mapStructure.valuePropertyFriendlyName = valuePropertyFriendlyName;
return mapStructure;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof SimpleValueMapStructure)) return false;
if (!super.equals(o)) return false;
SimpleValueMapStructure that = (SimpleValueMapStructure) o;
if (valuePropertyFriendlyName != null ? !valuePropertyFriendlyName.equals(that.valuePropertyFriendlyName) : that.valuePropertyFriendlyName != null)
return false;
if (valuePropertyName != null ? !valuePropertyName.equals(that.valuePropertyName) : that.valuePropertyName != null)
return false;
return true;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + (valuePropertyName != null ? valuePropertyName.hashCode() : 0);
result = 31 * result + (valuePropertyFriendlyName != null ? valuePropertyFriendlyName.hashCode() : 0);
return result;
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_dto_SimpleValueMapStructure.java
|
3,845 |
public class GeoPolygonFilterParser implements FilterParser {
public static final String NAME = "geo_polygon";
public static final String POINTS = "points";
@Inject
public GeoPolygonFilterParser() {
}
@Override
public String[] names() {
return new String[]{NAME, "geoPolygon"};
}
@Override
public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
boolean cache = false;
CacheKeyFilter.Key cacheKey = null;
String fieldName = null;
List<GeoPoint> shell = Lists.newArrayList();
boolean normalizeLon = true;
boolean normalizeLat = true;
String filterName = null;
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
if(POINTS.equals(currentFieldName)) {
while((token = parser.nextToken()) != Token.END_ARRAY) {
shell.add(GeoPoint.parse(parser));
}
} else {
throw new QueryParsingException(parseContext.index(), "[geo_polygon] filter does not support [" + currentFieldName + "]");
}
}
}
} else if (token.isValue()) {
if ("_name".equals(currentFieldName)) {
filterName = parser.text();
} else if ("_cache".equals(currentFieldName)) {
cache = parser.booleanValue();
} else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) {
cacheKey = new CacheKeyFilter.Key(parser.text());
} else if ("normalize".equals(currentFieldName)) {
normalizeLat = parser.booleanValue();
normalizeLon = parser.booleanValue();
} else {
throw new QueryParsingException(parseContext.index(), "[geo_polygon] filter does not support [" + currentFieldName + "]");
}
}
}
if (shell.isEmpty()) {
throw new QueryParsingException(parseContext.index(), "no points defined for geo_polygon filter");
} else {
if(shell.size() < 3) {
throw new QueryParsingException(parseContext.index(), "to few points defined for geo_polygon filter");
}
GeoPoint start = shell.get(0);
if(!start.equals(shell.get(shell.size()-1))) {
shell.add(start);
}
if(shell.size() < 4) {
throw new QueryParsingException(parseContext.index(), "to few points defined for geo_polygon filter");
}
}
if (normalizeLat || normalizeLon) {
for (GeoPoint point : shell) {
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
}
}
MapperService.SmartNameFieldMappers smartMappers = parseContext.smartFieldMappers(fieldName);
if (smartMappers == null || !smartMappers.hasMapper()) {
throw new QueryParsingException(parseContext.index(), "failed to find geo_point field [" + fieldName + "]");
}
FieldMapper<?> mapper = smartMappers.mapper();
if (!(mapper instanceof GeoPointFieldMapper)) {
throw new QueryParsingException(parseContext.index(), "field [" + fieldName + "] is not a geo_point field");
}
IndexGeoPointFieldData<?> indexFieldData = parseContext.fieldData().getForField(mapper);
Filter filter = new GeoPolygonFilter(indexFieldData, shell.toArray(new GeoPoint[shell.size()]));
if (cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
filter = wrapSmartNameFilter(filter, smartMappers, parseContext);
if (filterName != null) {
parseContext.addNamedFilter(filterName, filter);
}
return filter;
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_query_GeoPolygonFilterParser.java
|
385 |
public class ClusterUpdateSettingsAction extends ClusterAction<ClusterUpdateSettingsRequest, ClusterUpdateSettingsResponse, ClusterUpdateSettingsRequestBuilder> {
public static final ClusterUpdateSettingsAction INSTANCE = new ClusterUpdateSettingsAction();
public static final String NAME = "cluster/settings/update";
private ClusterUpdateSettingsAction() {
super(NAME);
}
@Override
public ClusterUpdateSettingsResponse newResponse() {
return new ClusterUpdateSettingsResponse();
}
@Override
public ClusterUpdateSettingsRequestBuilder newRequestBuilder(ClusterAdminClient client) {
return new ClusterUpdateSettingsRequestBuilder(client);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_settings_ClusterUpdateSettingsAction.java
|
410 |
public class ClientClusterProxy implements Cluster {
private final ClientClusterServiceImpl clusterService;
public ClientClusterProxy(ClientClusterServiceImpl clusterService) {
this.clusterService = clusterService;
}
@Override
public String addMembershipListener(MembershipListener listener) {
return clusterService.addMembershipListenerWithInit(listener);
}
@Override
public boolean removeMembershipListener(String registrationId) {
return clusterService.removeMembershipListener(registrationId);
}
@Override
public Set<Member> getMembers() {
final Collection<MemberImpl> members = clusterService.getMemberList();
return members != null ? new LinkedHashSet<Member>(members) : Collections.<Member>emptySet();
}
@Override
public Member getLocalMember() {
throw new UnsupportedOperationException("Client has no local member!");
}
@Override
public long getClusterTime() {
return clusterService.getClusterTime();
}
}
| 0true
|
hazelcast-client_src_main_java_com_hazelcast_client_proxy_ClientClusterProxy.java
|
507 |
public class DayOfMonthType implements Serializable, BroadleafEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, DayOfMonthType> TYPES = new LinkedHashMap<String, DayOfMonthType>();
public static final DayOfMonthType ONE = new DayOfMonthType("1", "01");
public static final DayOfMonthType TWO = new DayOfMonthType("2", "02");
public static final DayOfMonthType THREE = new DayOfMonthType("3", "03");
public static final DayOfMonthType FOUR = new DayOfMonthType("4", "04");
public static final DayOfMonthType FIVE = new DayOfMonthType("5", "05");
public static final DayOfMonthType SIX = new DayOfMonthType("6", "06");
public static final DayOfMonthType SEVEN = new DayOfMonthType("7", "07");
public static final DayOfMonthType EIGHT = new DayOfMonthType("8", "08");
public static final DayOfMonthType NINE = new DayOfMonthType("9", "09");
public static final DayOfMonthType TEN = new DayOfMonthType("10", "10");
public static final DayOfMonthType ELEVEN = new DayOfMonthType("11", "11");
public static final DayOfMonthType TWELVE = new DayOfMonthType("12", "12");
public static final DayOfMonthType THIRTEEN = new DayOfMonthType("13", "13");
public static final DayOfMonthType FOURTEEN = new DayOfMonthType("14", "14");
public static final DayOfMonthType FIFTEEN = new DayOfMonthType("15", "15");
public static final DayOfMonthType SIXTEEN = new DayOfMonthType("16", "16");
public static final DayOfMonthType SEVENTEEN = new DayOfMonthType("17", "17");
public static final DayOfMonthType EIGHTEEN = new DayOfMonthType("18", "18");
public static final DayOfMonthType NINETEEN = new DayOfMonthType("19", "19");
public static final DayOfMonthType TWENTY = new DayOfMonthType("20", "20");
public static final DayOfMonthType TWENTYONE = new DayOfMonthType("21", "21");
public static final DayOfMonthType TWNETYTWO = new DayOfMonthType("22", "22");
public static final DayOfMonthType TWENTYTHREE = new DayOfMonthType("23", "23");
public static final DayOfMonthType TWENTYFOUR = new DayOfMonthType("24", "24");
public static final DayOfMonthType TWENTYFIVE = new DayOfMonthType("25", "25");
public static final DayOfMonthType TWENTYSIX = new DayOfMonthType("26", "26");
public static final DayOfMonthType TWENTYSEVEN = new DayOfMonthType("27", "27");
public static final DayOfMonthType TWENTYEIGHT = new DayOfMonthType("28", "28");
public static final DayOfMonthType TWENTYNINE = new DayOfMonthType("29", "29");
public static final DayOfMonthType THIRTY = new DayOfMonthType("30", "30");
public static final DayOfMonthType THIRTYONE = new DayOfMonthType("31", "31");
public static DayOfMonthType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public DayOfMonthType() {
//do nothing
}
public DayOfMonthType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)) {
TYPES.put(type, this);
} else {
throw new RuntimeException("Cannot add the type: (" + type + "). It already exists as a type via " + getInstance(type).getClass().getName());
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
DayOfMonthType other = (DayOfMonthType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
}
| 1no label
|
common_src_main_java_org_broadleafcommerce_common_time_DayOfMonthType.java
|
262 |
public enum TIMEOUT_STRATEGY {
RETURN, EXCEPTION
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_command_OCommandContext.java
|
1,249 |
public class DefaultSchemaProvider implements SchemaProvider {
public static final DefaultSchemaProvider INSTANCE = new DefaultSchemaProvider();
private DefaultSchemaProvider() {}
@Override
public EdgeLabelDefinition getEdgeLabel(String name) {
return new EdgeLabelDefinition(name, FaunusElement.NO_ID, Multiplicity.MULTI,false);
}
@Override
public PropertyKeyDefinition getPropertyKey(String name) {
return new PropertyKeyDefinition(name, FaunusElement.NO_ID, Cardinality.SINGLE,Object.class);
}
@Override
public RelationTypeDefinition getRelationType(String name) {
return null;
}
@Override
public VertexLabelDefinition getVertexLabel(String name) {
return new VertexLabelDefinition(name, FaunusElement.NO_ID,false,false);
}
public static SchemaProvider asBackupProvider(final SchemaProvider provider) {
return asBackupProvider(provider,INSTANCE);
}
public static SchemaProvider asBackupProvider(final SchemaProvider provider, final SchemaProvider backup) {
return new SchemaProvider() {
@Override
public EdgeLabelDefinition getEdgeLabel(String name) {
EdgeLabelDefinition def = provider.getEdgeLabel(name);
if (def!=null) return def;
else return backup.getEdgeLabel(name);
}
@Override
public PropertyKeyDefinition getPropertyKey(String name) {
PropertyKeyDefinition def = provider.getPropertyKey(name);
if (def!=null) return def;
else return backup.getPropertyKey(name);
}
@Override
public RelationTypeDefinition getRelationType(String name) {
return provider.getRelationType(name);
}
@Override
public VertexLabelDefinition getVertexLabel(String name) {
VertexLabelDefinition def = provider.getVertexLabel(name);
if (def!=null) return def;
else return backup.getVertexLabel(name);
}
};
}
}
| 1no label
|
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_DefaultSchemaProvider.java
|
2,612 |
public final class UTFEncoderDecoder {
private static final int STRING_CHUNK_SIZE = 16 * 1024;
private static final UTFEncoderDecoder INSTANCE;
static {
INSTANCE = buildUTFUtil();
}
private final StringCreator stringCreator;
private final boolean hazelcastEnterpriseActive;
private UTFEncoderDecoder(boolean fastStringCreator) {
this(fastStringCreator ? buildFastStringCreator() : new DefaultStringCreator(), false);
}
private UTFEncoderDecoder(StringCreator stringCreator, boolean hazelcastEnterpriseActive) {
this.stringCreator = stringCreator;
this.hazelcastEnterpriseActive = hazelcastEnterpriseActive;
}
public StringCreator getStringCreator() {
return stringCreator;
}
public static void writeUTF(final DataOutput out, final String str, byte[] buffer) throws IOException {
INSTANCE.writeUTF0(out, str, buffer);
}
public static String readUTF(final DataInput in, byte[] buffer) throws IOException {
return INSTANCE.readUTF0(in, buffer);
}
public boolean isHazelcastEnterpriseActive() {
return hazelcastEnterpriseActive;
}
public void writeUTF0(final DataOutput out, final String str, byte[] buffer) throws IOException {
boolean isNull = str == null;
out.writeBoolean(isNull);
if (isNull) {
return;
}
int length = str.length();
out.writeInt(length);
if (length > 0) {
int chunkSize = (length / STRING_CHUNK_SIZE) + 1;
for (int i = 0; i < chunkSize; i++) {
int beginIndex = Math.max(0, i * STRING_CHUNK_SIZE - 1);
int endIndex = Math.min((i + 1) * STRING_CHUNK_SIZE - 1, length);
writeShortUTF(out, str, beginIndex, endIndex, buffer);
}
}
}
private void writeShortUTF(final DataOutput out,
final String str,
final int beginIndex,
final int endIndex,
byte[] buffer) throws IOException {
int utfLength = 0;
int c = 0;
int count = 0;
/* use charAt instead of copying String to char array */
for (int i = beginIndex; i < endIndex; i++) {
c = str.charAt(i);
if ((c >= 0x0001) && (c <= 0x007F)) {
utfLength++;
} else if (c > 0x07FF) {
utfLength += 3;
} else {
utfLength += 2;
}
}
if (utfLength > 65535) {
throw new UTFDataFormatException("encoded string too long:"
+ utfLength + " bytes");
}
out.writeShort(utfLength);
int i;
for (i = beginIndex; i < endIndex; i++) {
c = str.charAt(i);
if (!((c >= 0x0001) && (c <= 0x007F))) {
break;
}
buffering(buffer, count++, (byte) c, out);
}
for (; i < endIndex; i++) {
c = str.charAt(i);
if ((c >= 0x0001) && (c <= 0x007F)) {
buffering(buffer, count++, (byte) c, out);
} else if (c > 0x07FF) {
buffering(buffer, count++, (byte) (0xE0 | ((c >> 12) & 0x0F)), out);
buffering(buffer, count++, (byte) (0x80 | ((c >> 6) & 0x3F)), out);
buffering(buffer, count++, (byte) (0x80 | ((c) & 0x3F)), out);
} else {
buffering(buffer, count++, (byte) (0xC0 | ((c >> 6) & 0x1F)), out);
buffering(buffer, count++, (byte) (0x80 | ((c) & 0x3F)), out);
}
}
int length = count % buffer.length;
out.write(buffer, 0, length == 0 ? buffer.length : length);
}
public String readUTF0(final DataInput in, byte[] buffer) throws IOException {
boolean isNull = in.readBoolean();
if (isNull) {
return null;
}
int length = in.readInt();
final char[] data = new char[length];
if (length > 0) {
int chunkSize = length / STRING_CHUNK_SIZE + 1;
for (int i = 0; i < chunkSize; i++) {
int beginIndex = Math.max(0, i * STRING_CHUNK_SIZE - 1);
int endIndex = Math.min((i + 1) * STRING_CHUNK_SIZE - 1, length);
readShortUTF(in, data, beginIndex, endIndex, buffer);
}
}
return stringCreator.buildString(data);
}
private void readShortUTF(final DataInput in, final char[] data,
final int beginIndex, final int endIndex,
byte[] buffer) throws IOException {
final int utflen = in.readShort();
int c = 0;
int char2 = 0;
int char3 = 0;
int count = 0;
int charArrCount = beginIndex;
int lastCount = -1;
while (count < utflen) {
c = buffered(buffer, count, utflen, in) & 0xff;
if (c > 127) {
break;
}
lastCount = count;
count++;
data[charArrCount++] = (char) c;
}
while (count < utflen) {
if (lastCount > -1 && lastCount < count) {
c = buffered(buffer, count, utflen, in) & 0xff;
}
switch (c >> 4) {
case 0:
case 1:
case 2:
case 3:
case 4:
case 5:
case 6:
case 7:
/* 0xxxxxxx */
lastCount = count;
count++;
data[charArrCount++] = (char) c;
break;
case 12:
case 13:
/* 110x xxxx 10xx xxxx */
lastCount = count++;
if (count + 1 > utflen) {
throw new UTFDataFormatException("malformed input: partial character at end");
}
char2 = buffered(buffer, count++, utflen, in);
if ((char2 & 0xC0) != 0x80) {
throw new UTFDataFormatException("malformed input around byte " + count);
}
data[charArrCount++] = (char) (((c & 0x1F) << 6) | (char2 & 0x3F));
break;
case 14:
/* 1110 xxxx 10xx xxxx 10xx xxxx */
lastCount = count++;
if (count + 2 > utflen) {
throw new UTFDataFormatException("malformed input: partial character at end");
}
char2 = buffered(buffer, count++, utflen, in);
char3 = buffered(buffer, count++, utflen, in);
if (((char2 & 0xC0) != 0x80) || ((char3 & 0xC0) != 0x80)) {
throw new UTFDataFormatException("malformed input around byte " + (count - 1));
}
data[charArrCount++] = (char) (((c & 0x0F) << 12) | ((char2 & 0x3F) << 6) | ((char3 & 0x3F) << 0));
break;
default:
/* 10xx xxxx, 1111 xxxx */
throw new UTFDataFormatException("malformed input around byte " + count);
}
}
}
private void buffering(byte[] buffer, int pos, byte value, DataOutput out) throws IOException {
int innerPos = pos % buffer.length;
if (pos > 0 && innerPos == 0) {
out.write(buffer, 0, buffer.length);
}
buffer[innerPos] = value;
}
private byte buffered(byte[] buffer, int pos, int utfLenght, DataInput in) throws IOException {
int innerPos = pos % buffer.length;
if (innerPos == 0) {
int length = Math.min(buffer.length, utfLenght - pos);
in.readFully(buffer, 0, length);
}
return buffer[innerPos];
}
public static boolean useOldStringConstructor() {
try {
Class<String> clazz = String.class;
clazz.getDeclaredConstructor(int.class, int.class, char[].class);
return true;
} catch (Throwable ignore) {
}
return false;
}
private static UTFEncoderDecoder buildUTFUtil() {
try {
Class<?> clazz = Class.forName("com.hazelcast.nio.utf8.EnterpriseStringCreator");
Method method = clazz.getDeclaredMethod("findBestStringCreator");
return new UTFEncoderDecoder((StringCreator) method.invoke(clazz), true);
} catch (Throwable t) {
}
boolean faststringEnabled = Boolean.parseBoolean(System.getProperty("hazelcast.nio.faststring", "true"));
return new UTFEncoderDecoder(faststringEnabled ? buildFastStringCreator() : new DefaultStringCreator(), false);
}
private static StringCreator buildFastStringCreator() {
try {
// Give access to the package private String constructor
Constructor<String> constructor = null;
if (UTFEncoderDecoder.useOldStringConstructor()) {
constructor = String.class.getDeclaredConstructor(int.class, int.class, char[].class);
} else {
constructor = String.class.getDeclaredConstructor(char[].class, boolean.class);
}
if (constructor != null) {
constructor.setAccessible(true);
return new FastStringCreator(constructor);
}
} catch (Throwable ignore) {
}
return null;
}
private static class DefaultStringCreator implements UTFEncoderDecoder.StringCreator {
@Override
public String buildString(char[] chars) {
return new String(chars);
}
}
private static class FastStringCreator implements UTFEncoderDecoder.StringCreator {
private final Constructor<String> constructor;
private final boolean useOldStringConstructor;
public FastStringCreator(Constructor<String> constructor) {
this.constructor = constructor;
this.useOldStringConstructor = constructor.getParameterTypes().length == 3;
}
@Override
public String buildString(char[] chars) {
try {
if (useOldStringConstructor) {
return constructor.newInstance(0, chars.length, chars);
} else {
return constructor.newInstance(chars, Boolean.TRUE);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
public interface StringCreator {
String buildString(char[] chars);
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_nio_UTFEncoderDecoder.java
|
910 |
public class TransportSuggestAction extends TransportBroadcastOperationAction<SuggestRequest, SuggestResponse, ShardSuggestRequest, ShardSuggestResponse> {
private final IndicesService indicesService;
private final SuggestPhase suggestPhase;
@Inject
public TransportSuggestAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService,
IndicesService indicesService, SuggestPhase suggestPhase) {
super(settings, threadPool, clusterService, transportService);
this.indicesService = indicesService;
this.suggestPhase = suggestPhase;
}
@Override
protected String executor() {
return ThreadPool.Names.SUGGEST;
}
@Override
protected String transportAction() {
return SuggestAction.NAME;
}
@Override
protected SuggestRequest newRequest() {
return new SuggestRequest();
}
@Override
protected ShardSuggestRequest newShardRequest() {
return new ShardSuggestRequest();
}
@Override
protected ShardSuggestRequest newShardRequest(ShardRouting shard, SuggestRequest request) {
return new ShardSuggestRequest(shard.index(), shard.id(), request);
}
@Override
protected ShardSuggestResponse newShardResponse() {
return new ShardSuggestResponse();
}
@Override
protected GroupShardsIterator shards(ClusterState clusterState, SuggestRequest request, String[] concreteIndices) {
Map<String, Set<String>> routingMap = clusterState.metaData().resolveSearchRouting(request.routing(), request.indices());
return clusterService.operationRouting().searchShards(clusterState, request.indices(), concreteIndices, routingMap, request.preference());
}
@Override
protected ClusterBlockException checkGlobalBlock(ClusterState state, SuggestRequest request) {
return state.blocks().globalBlockedException(ClusterBlockLevel.READ);
}
@Override
protected ClusterBlockException checkRequestBlock(ClusterState state, SuggestRequest countRequest, String[] concreteIndices) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.READ, concreteIndices);
}
@Override
protected SuggestResponse newResponse(SuggestRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) {
int successfulShards = 0;
int failedShards = 0;
final Map<String, List<Suggest.Suggestion>> groupedSuggestions = new HashMap<String, List<Suggest.Suggestion>>();
List<ShardOperationFailedException> shardFailures = null;
for (int i = 0; i < shardsResponses.length(); i++) {
Object shardResponse = shardsResponses.get(i);
if (shardResponse == null) {
// simply ignore non active shards
} else if (shardResponse instanceof BroadcastShardOperationFailedException) {
failedShards++;
if (shardFailures == null) {
shardFailures = newArrayList();
}
shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse));
} else {
Suggest suggest = ((ShardSuggestResponse) shardResponse).getSuggest();
Suggest.group(groupedSuggestions, suggest);
successfulShards++;
}
}
return new SuggestResponse(new Suggest(Suggest.reduce(groupedSuggestions)), shardsResponses.length(), successfulShards, failedShards, shardFailures);
}
@Override
protected ShardSuggestResponse shardOperation(ShardSuggestRequest request) throws ElasticsearchException {
IndexService indexService = indicesService.indexServiceSafe(request.index());
IndexShard indexShard = indexService.shardSafe(request.shardId());
final Engine.Searcher searcher = indexShard.acquireSearcher("suggest");
XContentParser parser = null;
try {
BytesReference suggest = request.suggest();
if (suggest != null && suggest.length() > 0) {
parser = XContentFactory.xContent(suggest).createParser(suggest);
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchIllegalArgumentException("suggest content missing");
}
final SuggestionSearchContext context = suggestPhase.parseElement().parseInternal(parser, indexService.mapperService(), request.index(), request.shardId());
final Suggest result = suggestPhase.execute(context, searcher.reader());
return new ShardSuggestResponse(request.index(), request.shardId(), result);
}
return new ShardSuggestResponse(request.index(), request.shardId(), new Suggest());
} catch (Throwable ex) {
throw new ElasticsearchException("failed to execute suggest", ex);
} finally {
searcher.release();
if (parser != null) {
parser.close();
}
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_action_suggest_TransportSuggestAction.java
|
1,150 |
public class OSQLMethodNormalize extends OAbstractSQLMethod {
public static final String NAME = "normalize";
public OSQLMethodNormalize() {
super(NAME, 0, 2);
}
@Override
public Object execute(OIdentifiable iCurrentRecord, OCommandContext iContext, Object ioResult, Object[] iMethodParams) {
if (ioResult != null) {
final Normalizer.Form form = iMethodParams != null && iMethodParams.length > 0 ? Normalizer.Form
.valueOf(OStringSerializerHelper.getStringContent(iMethodParams[0].toString())) : Normalizer.Form.NFD;
String normalized = Normalizer.normalize(ioResult.toString(), form);
if (iMethodParams != null && iMethodParams.length > 1) {
normalized = normalized.replaceAll(OStringSerializerHelper.getStringContent(iMethodParams[0].toString()), "");
} else {
normalized = normalized.replaceAll("\\p{InCombiningDiacriticalMarks}+", "");
}
ioResult = normalized;
}
return ioResult;
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_sql_method_misc_OSQLMethodNormalize.java
|
1,053 |
@Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_PERSONAL_MESSAGE")
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
public class PersonalMessageImpl implements PersonalMessage {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "PersonalMessageId")
@GenericGenerator(
name="PersonalMessageId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="PersonalMessageImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.order.domain.PersonalMessageImpl")
}
)
@Column(name = "PERSONAL_MESSAGE_ID")
protected Long id;
@Column(name = "MESSAGE_TO")
@AdminPresentation(friendlyName = "PersonalMessageImpl_Message_To", order=1, group = "PersonalMessageImpl_Personal_Message")
protected String messageTo;
@Column(name = "MESSAGE_FROM")
@AdminPresentation(friendlyName = "PersonalMessageImpl_Message_From", order=2, group = "PersonalMessageImpl_Personal_Message")
protected String messageFrom;
@Column(name = "MESSAGE")
@AdminPresentation(friendlyName = "PersonalMessageImpl_Message", order=3, group = "PersonalMessageImpl_Personal_Message")
protected String message;
@Column(name = "OCCASION")
@AdminPresentation(friendlyName = "PersonalMessageImpl_Occasion", order=4, group = "PersonalMessageImpl_Personal_Message")
protected String occasion;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getMessageTo() {
return messageTo;
}
@Override
public void setMessageTo(String messageTo) {
this.messageTo = messageTo;
}
@Override
public String getMessageFrom() {
return messageFrom;
}
@Override
public void setMessageFrom(String messageFrom) {
this.messageFrom = messageFrom;
}
@Override
public String getMessage() {
return message;
}
@Override
public void setMessage(String message) {
this.message = message;
}
@Override
public String getOccasion() {
return occasion;
}
@Override
public void setOccasion(String occasion) {
this.occasion = occasion;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((message == null) ? 0 : message.hashCode());
result = prime * result + ((messageFrom == null) ? 0 : messageFrom.hashCode());
result = prime * result + ((messageTo == null) ? 0 : messageTo.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
PersonalMessageImpl other = (PersonalMessageImpl) obj;
if (id != null && other.id != null) {
return id.equals(other.id);
}
if (message == null) {
if (other.message != null)
return false;
} else if (!message.equals(other.message))
return false;
if (messageFrom == null) {
if (other.messageFrom != null)
return false;
} else if (!messageFrom.equals(other.messageFrom))
return false;
if (messageTo == null) {
if (other.messageTo != null)
return false;
} else if (!messageTo.equals(other.messageTo))
return false;
return true;
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_PersonalMessageImpl.java
|
327 |
EntryListener listener1 = new EntryAdapter() {
public void entryAdded(EntryEvent event) {
latch1Add.countDown();
}
public void entryRemoved(EntryEvent event) {
latch1Remove.countDown();
}
};
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapTest.java
|
116 |
public static interface ForkJoinWorkerThreadFactory {
/**
* Returns a new worker thread operating in the given pool.
*
* @param pool the pool this thread works in
* @return the new worker thread
* @throws NullPointerException if the pool is null
*/
public ForkJoinWorkerThread newThread(ForkJoinPool pool);
}
| 0true
|
src_main_java_jsr166e_ForkJoinPool.java
|
266 |
public class OCommandExecutorNotFoundException extends OCommandExecutionException {
private static final long serialVersionUID = -7430575036316163711L;
public OCommandExecutorNotFoundException(String message, Throwable cause) {
super(message, cause);
}
public OCommandExecutorNotFoundException(String message) {
super(message);
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_command_OCommandExecutorNotFoundException.java
|
192 |
public interface AdminMainEntity {
public static final String MAIN_ENTITY_NAME_PROPERTY = "__adminMainEntity";
/**
* @return the display name of this entity for the admin screen
*/
public String getMainEntityName();
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_admin_domain_AdminMainEntity.java
|
133 |
public interface StructuredContentField extends Serializable {
/**
* Gets the primary key.
*
* @return the primary key
*/
@Nullable
public Long getId();
/**
* Sets the primary key.
*
* @param id the new primary key
*/
public void setId(@Nullable Long id);
/**
* Returns the fieldKey associated with this field. The key used for a
* <code>StructuredContentField</code> is determined by the associated
* {@link org.broadleafcommerce.cms.field.domain.FieldDefinition} that was used by the
* Content Management System to create this instance.
*
* As an example, a <code>StructuredContentType</code> might be configured to contain a
* field definition with a key of "targetUrl".
*
* @return the key associated with this item
* @see org.broadleafcommerce.cms.field.domain.FieldDefinition
*/
@Nonnull
public String getFieldKey();
/**
* Sets the fieldKey.
* @param fieldKey
* @see org.broadleafcommerce.cms.field.domain.FieldDefinition
*/
public void setFieldKey(@Nonnull String fieldKey);
/**
* Returns the parent <code>StructuredContent</code> item to which this
* field belongs.
*
* @return
*/
@Nonnull
public StructuredContent getStructuredContent();
/**
* Sets the parent <code>StructuredContent</code> item.
* @param structuredContent
*/
public void setStructuredContent(@Nonnull StructuredContent structuredContent);
/**
* Builds a copy of this item. Used by the content management system when an
* item is edited.
*
* @return a copy of this item
*/
@Nonnull
public StructuredContentField cloneEntity();
/**
* Returns the value for this custom field.
*
* @param value
*/
public void setValue(@Nonnull String value);
/**
* Sets the value of this custom field.
* @return
*/
@Nonnull
public String getValue();
/**
* Returns audit information for this content item.
*
* @return
*/
@Nullable
public AdminAuditable getAuditable();
/**
* Sets audit information for this content item. Default implementations automatically
* populate this data during persistence.
*
* @param auditable
*/
public void setAuditable(@Nullable AdminAuditable auditable);
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_structure_domain_StructuredContentField.java
|
1,268 |
return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<Response>>() {
@Override
public ActionFuture<Response> doWithNode(DiscoveryNode node) throws ElasticsearchException {
return proxy.execute(node, request);
}
});
| 1no label
|
src_main_java_org_elasticsearch_client_transport_support_InternalTransportClient.java
|
1,180 |
@Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_PAYMENT_LOG")
public class PaymentLogImpl implements PaymentLog {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "PaymentLogId")
@GenericGenerator(
name="PaymentLogId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="PaymentLogImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.payment.domain.PaymentLogImpl")
}
)
@Column(name = "PAYMENT_LOG_ID")
protected Long id;
@Column(name = "USER_NAME", nullable=false)
@Index(name="PAYMENTLOG_USER_INDEX", columnNames={"USER_NAME"})
@AdminPresentation(friendlyName = "PaymentLogImpl_User_Name", order = 1, group = "PaymentLogImpl_Payment_Log", readOnly = true)
protected String userName;
@Column(name = "TRANSACTION_TIMESTAMP", nullable=false)
@Temporal(TemporalType.TIMESTAMP)
@AdminPresentation(friendlyName = "PaymentLogImpl_Transaction_Time", order = 3, group = "PaymentLogImpl_Payment_Log", readOnly = true)
protected Date transactionTimestamp;
@Column(name = "ORDER_PAYMENT_ID")
@Index(name="PAYMENTLOG_ORDERPAYMENT_INDEX", columnNames={"ORDER_PAYMENT_ID"})
@AdminPresentation(excluded = true, readOnly = true)
protected Long paymentInfoId;
@ManyToOne(targetEntity = CustomerImpl.class)
@JoinColumn(name = "CUSTOMER_ID")
@Index(name="PAYMENTLOG_CUSTOMER_INDEX", columnNames={"CUSTOMER_ID"})
protected Customer customer;
@Column(name = "PAYMENT_INFO_REFERENCE_NUMBER")
@Index(name="PAYMENTLOG_REFERENCE_INDEX", columnNames={"PAYMENT_INFO_REFERENCE_NUMBER"})
@AdminPresentation(friendlyName = "PaymentLogImpl_Payment_Ref_Number", order = 4, group = "PaymentLogImpl_Payment_Log", readOnly = true)
protected String paymentInfoReferenceNumber;
@Column(name = "TRANSACTION_TYPE", nullable=false)
@Index(name="PAYMENTLOG_TRANTYPE_INDEX", columnNames={"TRANSACTION_TYPE"})
@AdminPresentation(friendlyName = "PaymentLogImpl_Transaction_Type", order = 5, group = "PaymentLogImpl_Payment_Log", readOnly = true)
protected String transactionType;
@Column(name = "TRANSACTION_SUCCESS")
@AdminPresentation(friendlyName = "PaymentLogImpl_Transaction_Successfule", order = 6, group = "PaymentLogImpl_Payment_Log", readOnly = true)
protected Boolean transactionSuccess = false;
@Column(name = "EXCEPTION_MESSAGE")
@AdminPresentation(friendlyName = "PaymentLogImpl_Exception_Message", order = 7, group = "PaymentLogImpl_Payment_Log", readOnly = true)
protected String exceptionMessage;
@Column(name = "LOG_TYPE", nullable=false)
@Index(name="PAYMENTLOG_LOGTYPE_INDEX", columnNames={"LOG_TYPE"})
@AdminPresentation(friendlyName = "PaymentLogImpl_Type", order = 8, group = "PaymentLogImpl_Payment_Log", readOnly = true)
protected String logType;
@Column(name = "AMOUNT_PAID", precision=19, scale=5)
@AdminPresentation(friendlyName = "PaymentLogImpl_Amount", order = 2, group = "PaymentLogImpl_Payment_Log", readOnly = true)
protected BigDecimal amountPaid;
@ManyToOne(targetEntity = BroadleafCurrencyImpl.class)
@JoinColumn(name = "CURRENCY_CODE")
@AdminPresentation(friendlyName = "PaymentLogImpl_currency", order = 2, group = "PaymentLogImpl_Payment_Log", readOnly = true)
protected BroadleafCurrency currency;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getUserName() {
return userName;
}
@Override
public void setUserName(String userName) {
this.userName = userName;
}
@Override
public Date getTransactionTimestamp() {
return transactionTimestamp;
}
@Override
public void setTransactionTimestamp(Date transactionTimestamp) {
this.transactionTimestamp = transactionTimestamp;
}
@Override
public Long getPaymentInfoId() {
return paymentInfoId;
}
@Override
public void setPaymentInfoId(Long paymentInfoId) {
this.paymentInfoId = paymentInfoId;
}
@Override
public Customer getCustomer() {
return customer;
}
@Override
public void setCustomer(Customer customer) {
this.customer = customer;
}
@Override
public String getPaymentInfoReferenceNumber() {
return paymentInfoReferenceNumber;
}
@Override
public void setPaymentInfoReferenceNumber(String paymentInfoReferenceNumber) {
this.paymentInfoReferenceNumber = paymentInfoReferenceNumber;
}
@Override
public TransactionType getTransactionType() {
return TransactionType.getInstance(transactionType);
}
@Override
public void setTransactionType(TransactionType transactionType) {
this.transactionType = transactionType.getType();
}
@Override
public PaymentLogEventType getLogType() {
return PaymentLogEventType.getInstance(logType);
}
@Override
public void setLogType(PaymentLogEventType logType) {
this.logType = logType.getType();
}
@Override
public Boolean getTransactionSuccess() {
if (transactionSuccess == null) {
return Boolean.FALSE;
} else {
return transactionSuccess;
}
}
@Override
public void setTransactionSuccess(Boolean transactionSuccess) {
this.transactionSuccess = transactionSuccess;
}
@Override
public String getExceptionMessage() {
return exceptionMessage;
}
@Override
public void setExceptionMessage(String exceptionMessage) {
this.exceptionMessage = exceptionMessage;
}
@Override
public Money getAmountPaid() {
return BroadleafCurrencyUtils.getMoney(amountPaid, currency);
}
@Override
public void setAmountPaid(Money amountPaid) {
this.amountPaid = Money.toAmount(amountPaid);
}
@Override
public BroadleafCurrency getCurrency() {
return currency;
}
@Override
public void setCurrency(BroadleafCurrency currency) {
this.currency = currency;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((customer == null) ? 0 : customer.hashCode());
result = prime * result + ((paymentInfoId == null) ? 0 : paymentInfoId.hashCode());
result = prime * result + ((paymentInfoReferenceNumber == null) ? 0 : paymentInfoReferenceNumber.hashCode());
result = prime * result + ((transactionTimestamp == null) ? 0 : transactionTimestamp.hashCode());
result = prime * result + ((userName == null) ? 0 : userName.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
PaymentLogImpl other = (PaymentLogImpl) obj;
if (id != null && other.id != null) {
return id.equals(other.id);
}
if (customer == null) {
if (other.customer != null) {
return false;
}
} else if (!customer.equals(other.customer)) {
return false;
}
if (paymentInfoId == null) {
if (other.paymentInfoId != null) {
return false;
}
} else if (!paymentInfoId.equals(other.paymentInfoId)) {
return false;
}
if (paymentInfoReferenceNumber == null) {
if (other.paymentInfoReferenceNumber != null) {
return false;
}
} else if (!paymentInfoReferenceNumber.equals(other.paymentInfoReferenceNumber)) {
return false;
}
if (transactionTimestamp == null) {
if (other.transactionTimestamp != null) {
return false;
}
} else if (!transactionTimestamp.equals(other.transactionTimestamp)) {
return false;
}
if (userName == null) {
if (other.userName != null) {
return false;
}
} else if (!userName.equals(other.userName)) {
return false;
}
return true;
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_payment_domain_PaymentLogImpl.java
|
152 |
static final class ReadMostlyVectorSublist<E>
implements List<E>, RandomAccess, java.io.Serializable {
private static final long serialVersionUID = 3041673470172026059L;
final ReadMostlyVector<E> list;
final int offset;
volatile int size;
ReadMostlyVectorSublist(ReadMostlyVector<E> list,
int offset, int size) {
this.list = list;
this.offset = offset;
this.size = size;
}
private void rangeCheck(int index) {
if (index < 0 || index >= size)
throw new ArrayIndexOutOfBoundsException(index);
}
public boolean add(E element) {
final StampedLock lock = list.lock;
long stamp = lock.writeLock();
try {
int c = size;
list.rawAddAt(c + offset, element);
size = c + 1;
} finally {
lock.unlockWrite(stamp);
}
return true;
}
public void add(int index, E element) {
final StampedLock lock = list.lock;
long stamp = lock.writeLock();
try {
if (index < 0 || index > size)
throw new ArrayIndexOutOfBoundsException(index);
list.rawAddAt(index + offset, element);
++size;
} finally {
lock.unlockWrite(stamp);
}
}
public boolean addAll(Collection<? extends E> c) {
Object[] elements = c.toArray();
final StampedLock lock = list.lock;
long stamp = lock.writeLock();
try {
int s = size;
int pc = list.count;
list.rawAddAllAt(offset + s, elements);
int added = list.count - pc;
size = s + added;
return added != 0;
} finally {
lock.unlockWrite(stamp);
}
}
public boolean addAll(int index, Collection<? extends E> c) {
Object[] elements = c.toArray();
final StampedLock lock = list.lock;
long stamp = lock.writeLock();
try {
int s = size;
if (index < 0 || index > s)
throw new ArrayIndexOutOfBoundsException(index);
int pc = list.count;
list.rawAddAllAt(index + offset, elements);
int added = list.count - pc;
size = s + added;
return added != 0;
} finally {
lock.unlockWrite(stamp);
}
}
public void clear() {
final StampedLock lock = list.lock;
long stamp = lock.writeLock();
try {
list.internalClear(offset, offset + size);
size = 0;
} finally {
lock.unlockWrite(stamp);
}
}
public boolean contains(Object o) {
return indexOf(o) >= 0;
}
public boolean containsAll(Collection<?> c) {
final StampedLock lock = list.lock;
long stamp = lock.readLock();
try {
return list.internalContainsAll(c, offset, offset + size);
} finally {
lock.unlockRead(stamp);
}
}
public boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof List))
return false;
final StampedLock lock = list.lock;
long stamp = lock.readLock();
try {
return list.internalEquals((List<?>)(o), offset, offset + size);
} finally {
lock.unlockRead(stamp);
}
}
public E get(int index) {
if (index < 0 || index >= size)
throw new ArrayIndexOutOfBoundsException(index);
return list.get(index + offset);
}
public int hashCode() {
final StampedLock lock = list.lock;
long stamp = lock.readLock();
try {
return list.internalHashCode(offset, offset + size);
} finally {
lock.unlockRead(stamp);
}
}
public int indexOf(Object o) {
final StampedLock lock = list.lock;
long stamp = lock.readLock();
try {
int idx = findFirstIndex(list.array, o, offset, offset + size);
return idx < 0 ? -1 : idx - offset;
} finally {
lock.unlockRead(stamp);
}
}
public boolean isEmpty() {
return size() == 0;
}
public Iterator<E> iterator() {
return new SubItr<E>(this, offset);
}
public int lastIndexOf(Object o) {
final StampedLock lock = list.lock;
long stamp = lock.readLock();
try {
int idx = findLastIndex(list.array, o, offset + size - 1, offset);
return idx < 0 ? -1 : idx - offset;
} finally {
lock.unlockRead(stamp);
}
}
public ListIterator<E> listIterator() {
return new SubItr<E>(this, offset);
}
public ListIterator<E> listIterator(int index) {
return new SubItr<E>(this, index + offset);
}
public E remove(int index) {
final StampedLock lock = list.lock;
long stamp = lock.writeLock();
try {
Object[] items = list.array;
int i = index + offset;
if (items == null || index < 0 || index >= size || i >= items.length)
throw new ArrayIndexOutOfBoundsException(index);
@SuppressWarnings("unchecked") E result = (E)items[i];
list.rawRemoveAt(i);
size--;
return result;
} finally {
lock.unlockWrite(stamp);
}
}
public boolean remove(Object o) {
final StampedLock lock = list.lock;
long stamp = lock.writeLock();
try {
if (list.rawRemoveAt(findFirstIndex(list.array, o, offset,
offset + size))) {
--size;
return true;
}
else
return false;
} finally {
lock.unlockWrite(stamp);
}
}
public boolean removeAll(Collection<?> c) {
return list.lockedRemoveAll(c, offset, offset + size);
}
public boolean retainAll(Collection<?> c) {
return list.lockedRetainAll(c, offset, offset + size);
}
public E set(int index, E element) {
if (index < 0 || index >= size)
throw new ArrayIndexOutOfBoundsException(index);
return list.set(index+offset, element);
}
public int size() {
return size;
}
public List<E> subList(int fromIndex, int toIndex) {
int c = size;
int ssize = toIndex - fromIndex;
if (fromIndex < 0)
throw new ArrayIndexOutOfBoundsException(fromIndex);
if (toIndex > c || ssize < 0)
throw new ArrayIndexOutOfBoundsException(toIndex);
return new ReadMostlyVectorSublist<E>(list, offset+fromIndex, ssize);
}
public Object[] toArray() {
final StampedLock lock = list.lock;
long stamp = lock.readLock();
try {
return list.internalToArray(offset, offset + size);
} finally {
lock.unlockRead(stamp);
}
}
public <T> T[] toArray(T[] a) {
final StampedLock lock = list.lock;
long stamp = lock.readLock();
try {
return list.internalToArray(a, offset, offset + size);
} finally {
lock.unlockRead(stamp);
}
}
public String toString() {
final StampedLock lock = list.lock;
long stamp = lock.readLock();
try {
return list.internalToString(offset, offset + size);
} finally {
lock.unlockRead(stamp);
}
}
}
| 0true
|
src_main_java_jsr166e_extra_ReadMostlyVector.java
|
459 |
@Service("blResourceBundlingService")
public class ResourceBundlingServiceImpl implements ResourceBundlingService {
protected static final Log LOG = LogFactory.getLog(ResourceBundlingServiceImpl.class);
protected static final String DEFAULT_STORAGE_DIRECTORY = System.getProperty("java.io.tmpdir");
// Map of known versioned bundle names ==> the resources that are part of that bundle
// ex: "global12345.js" ==> [Resource("/js/BLC.js"), Resource("/js/blc-admin.js")]
protected Map<String, Collection<Resource>> bundles = new HashMap<String, Collection<Resource>>();
// Map of known bundle names ==> bundle version
// ex: "global.js" ==> "global12345.js"
protected Cache bundleVersionsCache;
// Map of known unversioned bundle names ==> additional files that should be included
// Configured via XML
// ex: "global.js" ==> ["classpath:/file1.js", "/js/file2.js"]
protected Map<String, List<String>> additionalBundleFiles = new HashMap<String, List<String>>();
@Value("${asset.server.file.system.path}")
protected String assetFileSystemPath;
@javax.annotation.Resource(name = "blResourceMinificationService")
protected ResourceMinificationService minifyService;
@Override
public Resource getBundle(String versionedBundleName) {
// If we can find this bundle on the file system, we've already generated it
// and we don't need to do so again.
Resource r = readBundle(versionedBundleName);
if (r != null && r.exists()) {
return r;
}
// Otherwise, we'll create the bundle, write it to the file system, and return
r = createBundle(versionedBundleName);
saveBundle(r);
return r;
}
protected Resource readBundle(String versionedBundleName) {
return new FileSystemResource(getFilePath(versionedBundleName));
}
protected String getFilePath(String name) {
String base = StringUtils.isBlank(assetFileSystemPath) ? DEFAULT_STORAGE_DIRECTORY : assetFileSystemPath;
base = StringUtils.removeEnd(base, "/");
return base + "/bundles/" + name;
}
protected Resource createBundle(String versionedBundleName) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] bytes = null;
// Join all of the resources for this bundle together into a byte[]
try {
for (Resource r : bundles.get(versionedBundleName)) {
InputStream is = null;
try {
is = r.getInputStream();
StreamUtils.copy(r.getInputStream(), baos);
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
try {
is.close();
} catch (IOException e2) {
throw new RuntimeException(e2);
}
}
// If we're creating a JavaScript bundle, we'll put a semicolon between each
// file to ensure it won't fail to compile.
if (versionedBundleName.endsWith(".js")) {
baos.write(";\r\n".getBytes());
} else {
baos.write("\r\n".getBytes());
}
}
bytes = baos.toByteArray();
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
try {
baos.close();
} catch (IOException e2) {
throw new RuntimeException(e2);
}
}
// Minify the resource
byte[] minifiedBytes = minifyService.minify(versionedBundleName, bytes);
// Create our GenerateResource that holds our combined and (potentially) minified bundle
GeneratedResource r = new GeneratedResource(minifiedBytes, versionedBundleName);
return r;
}
protected void saveBundle(Resource resource) {
File file = new File(getFilePath(resource.getDescription()));
if (!file.getParentFile().exists()) {
if (!file.getParentFile().mkdirs()) {
throw new RuntimeException("Unable to create middle directories for file: " + file.getAbsolutePath());
}
}
try {
BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(file));
StreamUtils.copy(resource.getInputStream(), out);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public String getVersionedBundleName(String unversionedBundleName) {
Element e = getBundleVersionsCache().get(unversionedBundleName);
return e == null ? null : (String) e.getValue();
}
@Override
public boolean hasBundle(String versionedBundle) {
return bundles.containsKey(versionedBundle);
}
@Override
public synchronized String registerBundle(String bundleName, List<String> files,
BroadleafResourceHttpRequestHandler handler) throws IOException {
LinkedHashMap<String, Resource> foundResources = new LinkedHashMap<String, Resource>();
if (additionalBundleFiles.get(bundleName) != null) {
files.addAll(additionalBundleFiles.get(bundleName));
}
for (String file : files) {
boolean match = false;
// Check to see if there is any registered handler that understands how to generate
// this file.
if (handler.getHandlers() != null) {
for (AbstractGeneratedResourceHandler h : handler.getHandlers()) {
if (h.canHandle(file)) {
foundResources.put(file, h.getResource(file, handler.getLocations()));
match = true;
break;
}
}
}
// If we didn't find a generator that could handle this file, let's see if we can
// look it up from our known locations
if (!match) {
for (Resource location : handler.getLocations()) {
try {
Resource resource = location.createRelative(file);
if (resource.exists() && resource.isReadable()) {
foundResources.put(file, resource);
match = true;
break;
}
}
catch (IOException ex) {
LOG.debug("Failed to create relative resource - trying next resource location", ex);
}
}
}
}
String version = getBundleVersion(foundResources);
String versionedName = getBundleName(bundleName, version);
bundles.put(versionedName, foundResources.values());
getBundleVersionsCache().put(new Element(bundleName, versionedName));
return versionedName;
}
protected String getBundleName(String bundleName, String version) {
String bundleWithoutExtension = bundleName.substring(0, bundleName.lastIndexOf('.'));
String bundleExtension = bundleName.substring(bundleName.lastIndexOf('.'));
String versionedName = bundleWithoutExtension + version + bundleExtension;
return versionedName;
}
protected String getBundleVersion(LinkedHashMap<String, Resource> foundResources) throws IOException {
StringBuilder sb = new StringBuilder();
for (Entry<String, Resource> entry : foundResources.entrySet()) {
sb.append(entry.getKey());
if (entry.getValue() instanceof GeneratedResource) {
sb.append(((GeneratedResource) entry.getValue()).getHashRepresentation());
} else {
sb.append(entry.getValue().lastModified());
}
sb.append("\r\n");
}
String version = String.valueOf(sb.toString().hashCode());
return version;
}
@Override
public List<String> getAdditionalBundleFiles(String bundleName) {
return additionalBundleFiles.get(bundleName);
}
public Map<String, List<String>> getAdditionalBundleFiles() {
return additionalBundleFiles;
}
public void setAdditionalBundleFiles(Map<String, List<String>> additionalBundleFiles) {
this.additionalBundleFiles = additionalBundleFiles;
}
protected Cache getBundleVersionsCache() {
if (bundleVersionsCache == null) {
bundleVersionsCache = CacheManager.getInstance().getCache("blBundleElements");
}
return bundleVersionsCache;
}
}
| 1no label
|
common_src_main_java_org_broadleafcommerce_common_resource_service_ResourceBundlingServiceImpl.java
|
588 |
public final class ConfigCheck implements IdentifiedDataSerializable {
private String groupName;
private String groupPassword;
private String joinerType;
private boolean partitionGroupEnabled;
private PartitionGroupConfig.MemberGroupType memberGroupType;
public ConfigCheck() {
}
public boolean isCompatible(ConfigCheck other) {
if (!groupName.equals(other.groupName)) {
return false;
}
if (!groupPassword.equals(other.groupPassword)) {
throw new HazelcastException("Incompatible group password!");
}
if (!joinerType.equals(other.joinerType)) {
throw new HazelcastException("Incompatible joiners! " + joinerType + " -vs- " + other.joinerType);
}
if (!partitionGroupEnabled && other.partitionGroupEnabled
|| partitionGroupEnabled && !other.partitionGroupEnabled) {
throw new HazelcastException("Incompatible partition groups! "
+ "this: " + (partitionGroupEnabled ? "enabled" : "disabled") + " / " + memberGroupType
+ ", other: " + (other.partitionGroupEnabled ? "enabled" : "disabled")
+ " / " + other.memberGroupType);
}
if (partitionGroupEnabled && memberGroupType != other.memberGroupType) {
throw new HazelcastException("Incompatible partition groups! this: " + memberGroupType + ", other: "
+ other.memberGroupType);
}
return true;
}
public ConfigCheck setGroupName(String groupName) {
this.groupName = groupName;
return this;
}
public ConfigCheck setGroupPassword(String groupPassword) {
this.groupPassword = groupPassword;
return this;
}
public ConfigCheck setJoinerType(String joinerType) {
this.joinerType = joinerType;
return this;
}
public ConfigCheck setPartitionGroupEnabled(boolean partitionGroupEnabled) {
this.partitionGroupEnabled = partitionGroupEnabled;
return this;
}
public ConfigCheck setMemberGroupType(PartitionGroupConfig.MemberGroupType memberGroupType) {
this.memberGroupType = memberGroupType;
return this;
}
@Override
public int getFactoryId() {
return ClusterDataSerializerHook.F_ID;
}
@Override
public int getId() {
return ClusterDataSerializerHook.CONFIG_CHECK;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(groupName);
out.writeUTF(groupPassword);
out.writeUTF(joinerType);
out.writeBoolean(partitionGroupEnabled);
if (partitionGroupEnabled) {
out.writeUTF(memberGroupType.toString());
}
}
@Override
public void readData(ObjectDataInput in) throws IOException {
groupName = in.readUTF();
groupPassword = in.readUTF();
joinerType = in.readUTF();
partitionGroupEnabled = in.readBoolean();
if (partitionGroupEnabled) {
String s = in.readUTF();
try {
memberGroupType = PartitionGroupConfig.MemberGroupType.valueOf(s);
} catch (IllegalArgumentException ignored) {
}
}
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_cluster_ConfigCheck.java
|
117 |
public static final class ClientHelper {
private ClientHelper() {
}
public static void cleanResources(HazelcastClient client) {
closeSockets(client);
tryStopThreads(client);
tryShutdown(client);
}
private static void closeSockets(HazelcastClient client) {
final ClientConnectionManager connectionManager = client.getConnectionManager();
if (connectionManager != null) {
try {
connectionManager.shutdown();
} catch (Throwable ignored) {
}
}
}
private static void tryShutdown(HazelcastClient client) {
if (client == null) {
return;
}
try {
client.doShutdown();
} catch (Throwable ignored) {
}
}
public static void tryStopThreads(HazelcastClient client) {
if (client == null) {
return;
}
try {
client.getThreadGroup().interrupt();
} catch (Throwable ignored) {
}
}
}
| 0true
|
hazelcast-client_src_main_java_com_hazelcast_client_ClientOutOfMemoryHandler.java
|
109 |
public static class Order {
public static final int Rules = 1000;
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_page_domain_PageImpl.java
|
2,896 |
public static class InPredicate extends AbstractPredicate {
private Comparable[] values;
private volatile Set<Comparable> convertedInValues;
public InPredicate() {
}
public InPredicate(String attribute, Comparable... values) {
super(attribute);
this.values = values;
}
@Override
public boolean apply(Map.Entry entry) {
Comparable entryValue = readAttribute(entry);
Set<Comparable> set = convertedInValues;
if (set == null) {
set = new HashSet<Comparable>(values.length);
for (Comparable value : values) {
set.add(convert(entry, entryValue, value));
}
convertedInValues = set;
}
return entryValue != null && set.contains(entryValue);
}
@Override
public Set<QueryableEntry> filter(QueryContext queryContext) {
Index index = getIndex(queryContext);
if (index != null) {
return index.getRecords(values);
} else {
return null;
}
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
super.writeData(out);
out.writeInt(values.length);
for (Object value : values) {
out.writeObject(value);
}
}
@Override
public void readData(ObjectDataInput in) throws IOException {
super.readData(in);
int len = in.readInt();
values = new Comparable[len];
for (int i = 0; i < len; i++) {
values[i] = in.readObject();
}
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append(attribute);
sb.append(" IN (");
for (int i = 0; i < values.length; i++) {
if (i > 0) {
sb.append(",");
}
sb.append(values[i]);
}
sb.append(")");
return sb.toString();
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_query_Predicates.java
|
535 |
class ShardGatewaySnapshotRequest extends BroadcastShardOperationRequest {
ShardGatewaySnapshotRequest() {
}
public ShardGatewaySnapshotRequest(String index, int shardId, GatewaySnapshotRequest request) {
super(index, shardId, request);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_gateway_snapshot_ShardGatewaySnapshotRequest.java
|
328 |
public class PluginInfo implements Streamable, Serializable, ToXContent {
public static final String DESCRIPTION_NOT_AVAILABLE = "No description found.";
public static final String VERSION_NOT_AVAILABLE = "NA";
static final class Fields {
static final XContentBuilderString NAME = new XContentBuilderString("name");
static final XContentBuilderString DESCRIPTION = new XContentBuilderString("description");
static final XContentBuilderString URL = new XContentBuilderString("url");
static final XContentBuilderString JVM = new XContentBuilderString("jvm");
static final XContentBuilderString SITE = new XContentBuilderString("site");
static final XContentBuilderString VERSION = new XContentBuilderString("version");
}
private String name;
private String description;
private boolean site;
private boolean jvm;
private String version;
public PluginInfo() {
}
/**
* Information about plugins
*
* @param name Its name
* @param description Its description
* @param site true if it's a site plugin
* @param jvm true if it's a jvm plugin
* @param version Version number is applicable (NA otherwise)
*/
public PluginInfo(String name, String description, boolean site, boolean jvm, String version) {
this.name = name;
this.description = description;
this.site = site;
this.jvm = jvm;
if (Strings.hasText(version)) {
this.version = version;
} else {
this.version = VERSION_NOT_AVAILABLE;
}
}
/**
* @return Plugin's name
*/
public String getName() {
return name;
}
/**
* @return Plugin's description if any
*/
public String getDescription() {
return description;
}
/**
* @return true if it's a site plugin
*/
public boolean isSite() {
return site;
}
/**
* @return true if it's a plugin running in the jvm
*/
public boolean isJvm() {
return jvm;
}
/**
* We compute the URL for sites: "/_plugin/" + name + "/"
*
* @return relative URL for site plugin
*/
public String getUrl() {
if (site) {
return ("/_plugin/" + name + "/");
} else {
return null;
}
}
/**
* @return Version number for the plugin
*/
public String getVersion() {
return version;
}
public static PluginInfo readPluginInfo(StreamInput in) throws IOException {
PluginInfo info = new PluginInfo();
info.readFrom(in);
return info;
}
@Override
public void readFrom(StreamInput in) throws IOException {
this.name = in.readString();
this.description = in.readString();
this.site = in.readBoolean();
this.jvm = in.readBoolean();
if (in.getVersion().onOrAfter(Version.V_1_0_0_RC2)) {
this.version = in.readString();
} else {
this.version = VERSION_NOT_AVAILABLE;
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeString(description);
out.writeBoolean(site);
out.writeBoolean(jvm);
if (out.getVersion().onOrAfter(Version.V_1_0_0_RC2)) {
out.writeString(version);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(Fields.NAME, name);
builder.field(Fields.VERSION, version);
builder.field(Fields.DESCRIPTION, description);
if (site) {
builder.field(Fields.URL, getUrl());
}
builder.field(Fields.JVM, jvm);
builder.field(Fields.SITE, site);
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PluginInfo that = (PluginInfo) o;
if (!name.equals(that.name)) return false;
if (version != null ? !version.equals(that.version) : that.version != null) return false;
return true;
}
@Override
public int hashCode() {
return name.hashCode();
}
@Override
public String toString() {
final StringBuffer sb = new StringBuffer("PluginInfo{");
sb.append("name='").append(name).append('\'');
sb.append(", description='").append(description).append('\'');
sb.append(", site=").append(site);
sb.append(", jvm=").append(jvm);
sb.append(", version='").append(version).append('\'');
sb.append('}');
return sb.toString();
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_node_info_PluginInfo.java
|
375 |
public static class TestCombinerFactory
implements CombinerFactory<String, Integer, Integer> {
public TestCombinerFactory() {
}
@Override
public Combiner<String, Integer, Integer> newCombiner(String key) {
return new TestCombiner();
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_mapreduce_DistributedMapperClientMapReduceTest.java
|
3,729 |
public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
public static final String CONTENT_TYPE = "object";
public static final String NESTED_CONTENT_TYPE = "nested";
public static class Defaults {
public static final boolean ENABLED = true;
public static final Nested NESTED = Nested.NO;
public static final Dynamic DYNAMIC = null; // not set, inherited from root
public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL;
}
public static enum Dynamic {
TRUE,
FALSE,
STRICT
}
public static class Nested {
public static final Nested NO = new Nested(false, false, false);
public static Nested newNested(boolean includeInParent, boolean includeInRoot) {
return new Nested(true, includeInParent, includeInRoot);
}
private final boolean nested;
private final boolean includeInParent;
private final boolean includeInRoot;
private Nested(boolean nested, boolean includeInParent, boolean includeInRoot) {
this.nested = nested;
this.includeInParent = includeInParent;
this.includeInRoot = includeInRoot;
}
public boolean isNested() {
return nested;
}
public boolean isIncludeInParent() {
return includeInParent;
}
public boolean isIncludeInRoot() {
return includeInRoot;
}
}
public static class Builder<T extends Builder, Y extends ObjectMapper> extends Mapper.Builder<T, Y> {
protected boolean enabled = Defaults.ENABLED;
protected Nested nested = Defaults.NESTED;
protected Dynamic dynamic = Defaults.DYNAMIC;
protected ContentPath.Type pathType = Defaults.PATH_TYPE;
protected Boolean includeInAll;
protected final List<Mapper.Builder> mappersBuilders = newArrayList();
public Builder(String name) {
super(name);
this.builder = (T) this;
}
public T enabled(boolean enabled) {
this.enabled = enabled;
return builder;
}
public T dynamic(Dynamic dynamic) {
this.dynamic = dynamic;
return builder;
}
public T nested(Nested nested) {
this.nested = nested;
return builder;
}
public T pathType(ContentPath.Type pathType) {
this.pathType = pathType;
return builder;
}
public T includeInAll(boolean includeInAll) {
this.includeInAll = includeInAll;
return builder;
}
public T add(Mapper.Builder builder) {
mappersBuilders.add(builder);
return this.builder;
}
@Override
public Y build(BuilderContext context) {
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
context.path().add(name);
Map<String, Mapper> mappers = new HashMap<String, Mapper>();
for (Mapper.Builder builder : mappersBuilders) {
Mapper mapper = builder.build(context);
mappers.put(mapper.name(), mapper);
}
context.path().pathType(origPathType);
context.path().remove();
ObjectMapper objectMapper = createMapper(name, context.path().fullPathAsText(name), enabled, nested, dynamic, pathType, mappers);
objectMapper.includeInAllIfNotSet(includeInAll);
return (Y) objectMapper;
}
protected ObjectMapper createMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, ContentPath.Type pathType, Map<String, Mapper> mappers) {
return new ObjectMapper(name, fullPath, enabled, nested, dynamic, pathType, mappers);
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Map<String, Object> objectNode = node;
ObjectMapper.Builder builder = createBuilder(name);
boolean nested = false;
boolean nestedIncludeInParent = false;
boolean nestedIncludeInRoot = false;
for (Map.Entry<String, Object> entry : objectNode.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("dynamic")) {
String value = fieldNode.toString();
if (value.equalsIgnoreCase("strict")) {
builder.dynamic(Dynamic.STRICT);
} else {
builder.dynamic(nodeBooleanValue(fieldNode) ? Dynamic.TRUE : Dynamic.FALSE);
}
} else if (fieldName.equals("type")) {
String type = fieldNode.toString();
if (type.equals(CONTENT_TYPE)) {
builder.nested = Nested.NO;
} else if (type.equals(NESTED_CONTENT_TYPE)) {
nested = true;
} else {
throw new MapperParsingException("Trying to parse an object but has a different type [" + type + "] for [" + name + "]");
}
} else if (fieldName.equals("include_in_parent")) {
nestedIncludeInParent = nodeBooleanValue(fieldNode);
} else if (fieldName.equals("include_in_root")) {
nestedIncludeInRoot = nodeBooleanValue(fieldNode);
} else if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode));
} else if (fieldName.equals("path")) {
builder.pathType(parsePathType(name, fieldNode.toString()));
} else if (fieldName.equals("properties")) {
parseProperties(builder, (Map<String, Object>) fieldNode, parserContext);
} else if (fieldName.equals("include_in_all")) {
builder.includeInAll(nodeBooleanValue(fieldNode));
} else {
processField(builder, fieldName, fieldNode);
}
}
if (nested) {
builder.nested = Nested.newNested(nestedIncludeInParent, nestedIncludeInRoot);
}
return builder;
}
private void parseProperties(ObjectMapper.Builder objBuilder, Map<String, Object> propsNode, ParserContext parserContext) {
for (Map.Entry<String, Object> entry : propsNode.entrySet()) {
String propName = entry.getKey();
Map<String, Object> propNode = (Map<String, Object>) entry.getValue();
String type;
Object typeNode = propNode.get("type");
if (typeNode != null) {
type = typeNode.toString();
} else {
// lets see if we can derive this...
if (propNode.get("properties") != null) {
type = ObjectMapper.CONTENT_TYPE;
} else if (propNode.size() == 1 && propNode.get("enabled") != null) {
// if there is a single property with the enabled flag on it, make it an object
// (usually, setting enabled to false to not index any type, including core values, which
// non enabled object type supports).
type = ObjectMapper.CONTENT_TYPE;
} else {
throw new MapperParsingException("No type specified for property [" + propName + "]");
}
}
Mapper.TypeParser typeParser = parserContext.typeParser(type);
if (typeParser == null) {
throw new MapperParsingException("No handler for type [" + type + "] declared on field [" + propName + "]");
}
objBuilder.add(typeParser.parse(propName, propNode, parserContext));
}
}
protected Builder createBuilder(String name) {
return object(name);
}
protected void processField(Builder builder, String fieldName, Object fieldNode) {
}
}
private final String name;
private final String fullPath;
private final boolean enabled;
private final Nested nested;
private final String nestedTypePathAsString;
private final BytesRef nestedTypePathAsBytes;
private final Filter nestedTypeFilter;
private volatile Dynamic dynamic;
private final ContentPath.Type pathType;
private Boolean includeInAll;
private volatile ImmutableOpenMap<String, Mapper> mappers = ImmutableOpenMap.of();
private final Object mutex = new Object();
ObjectMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, ContentPath.Type pathType, Map<String, Mapper> mappers) {
this.name = name;
this.fullPath = fullPath;
this.enabled = enabled;
this.nested = nested;
this.dynamic = dynamic;
this.pathType = pathType;
if (mappers != null) {
this.mappers = ImmutableOpenMap.builder(this.mappers).putAll(mappers).build();
}
this.nestedTypePathAsString = "__" + fullPath;
this.nestedTypePathAsBytes = new BytesRef(nestedTypePathAsString);
this.nestedTypeFilter = new TermFilter(new Term(TypeFieldMapper.NAME, nestedTypePathAsBytes));
}
@Override
public String name() {
return this.name;
}
@Override
public void includeInAll(Boolean includeInAll) {
if (includeInAll == null) {
return;
}
this.includeInAll = includeInAll;
// when called from outside, apply this on all the inner mappers
for (ObjectObjectCursor<String, Mapper> cursor : mappers) {
if (cursor.value instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) cursor.value).includeInAll(includeInAll);
}
}
}
@Override
public void includeInAllIfNotSet(Boolean includeInAll) {
if (this.includeInAll == null) {
this.includeInAll = includeInAll;
}
// when called from outside, apply this on all the inner mappers
for (ObjectObjectCursor<String, Mapper> cursor : mappers) {
if (cursor.value instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) cursor.value).includeInAllIfNotSet(includeInAll);
}
}
}
@Override
public void unsetIncludeInAll() {
includeInAll = null;
// when called from outside, apply this on all the inner mappers
for (ObjectObjectCursor<String, Mapper> cursor : mappers) {
if (cursor.value instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) cursor.value).unsetIncludeInAll();
}
}
}
public Nested nested() {
return this.nested;
}
public Filter nestedTypeFilter() {
return this.nestedTypeFilter;
}
public ObjectMapper putMapper(Mapper mapper) {
if (mapper instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll);
}
synchronized (mutex) {
this.mappers = ImmutableOpenMap.builder(this.mappers).fPut(mapper.name(), mapper).build();
}
return this;
}
@Override
public void traverse(FieldMapperListener fieldMapperListener) {
for (ObjectObjectCursor<String, Mapper> cursor : mappers) {
cursor.value.traverse(fieldMapperListener);
}
}
@Override
public void traverse(ObjectMapperListener objectMapperListener) {
objectMapperListener.objectMapper(this);
for (ObjectObjectCursor<String, Mapper> cursor : mappers) {
cursor.value.traverse(objectMapperListener);
}
}
public String fullPath() {
return this.fullPath;
}
public BytesRef nestedTypePathAsBytes() {
return nestedTypePathAsBytes;
}
public String nestedTypePathAsString() {
return nestedTypePathAsString;
}
public final Dynamic dynamic() {
return this.dynamic == null ? Dynamic.TRUE : this.dynamic;
}
protected boolean allowValue() {
return true;
}
public void parse(ParseContext context) throws IOException {
if (!enabled) {
context.parser().skipChildren();
return;
}
XContentParser parser = context.parser();
String currentFieldName = parser.currentName();
XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.VALUE_NULL) {
// the object is null ("obj1" : null), simply bail
return;
}
if (token.isValue() && !allowValue()) {
// if we are parsing an object but it is just a value, its only allowed on root level parsers with there
// is a field name with the same name as the type
throw new MapperParsingException("object mapping for [" + name + "] tried to parse as object, but found a concrete value");
}
Document restoreDoc = null;
if (nested.isNested()) {
Document nestedDoc = new Document();
// pre add the uid field if possible (id was already provided)
IndexableField uidField = context.doc().getField(UidFieldMapper.NAME);
if (uidField != null) {
// we don't need to add it as a full uid field in nested docs, since we don't need versioning
// we also rely on this for UidField#loadVersion
// this is a deeply nested field
nestedDoc.add(new Field(UidFieldMapper.NAME, uidField.stringValue(), UidFieldMapper.Defaults.NESTED_FIELD_TYPE));
}
// the type of the nested doc starts with __, so we can identify that its a nested one in filters
// note, we don't prefix it with the type of the doc since it allows us to execute a nested query
// across types (for example, with similar nested objects)
nestedDoc.add(new Field(TypeFieldMapper.NAME, nestedTypePathAsString, TypeFieldMapper.Defaults.FIELD_TYPE));
restoreDoc = context.switchDoc(nestedDoc);
context.addDoc(nestedDoc);
}
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
// if we are at the end of the previous object, advance
if (token == XContentParser.Token.END_OBJECT) {
token = parser.nextToken();
}
if (token == XContentParser.Token.START_OBJECT) {
// if we are just starting an OBJECT, advance, this is the object we are parsing, we need the name first
token = parser.nextToken();
}
while (token != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.START_OBJECT) {
serializeObject(context, currentFieldName);
} else if (token == XContentParser.Token.START_ARRAY) {
serializeArray(context, currentFieldName);
} else if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_NULL) {
serializeNullValue(context, currentFieldName);
} else if (token == null) {
throw new MapperParsingException("object mapping for [" + name + "] tried to parse as object, but got EOF, has a concrete value been provided to it?");
} else if (token.isValue()) {
serializeValue(context, currentFieldName, token);
}
token = parser.nextToken();
}
// restore the enable path flag
context.path().pathType(origPathType);
if (nested.isNested()) {
Document nestedDoc = context.switchDoc(restoreDoc);
if (nested.isIncludeInParent()) {
for (IndexableField field : nestedDoc.getFields()) {
if (field.name().equals(UidFieldMapper.NAME) || field.name().equals(TypeFieldMapper.NAME)) {
continue;
} else {
context.doc().add(field);
}
}
}
if (nested.isIncludeInRoot()) {
// don't add it twice, if its included in parent, and we are handling the master doc...
if (!(nested.isIncludeInParent() && context.doc() == context.rootDoc())) {
for (IndexableField field : nestedDoc.getFields()) {
if (field.name().equals(UidFieldMapper.NAME) || field.name().equals(TypeFieldMapper.NAME)) {
continue;
} else {
context.rootDoc().add(field);
}
}
}
}
}
}
private void serializeNullValue(ParseContext context, String lastFieldName) throws IOException {
// we can only handle null values if we have mappings for them
Mapper mapper = mappers.get(lastFieldName);
if (mapper != null) {
mapper.parse(context);
}
}
private void serializeObject(final ParseContext context, String currentFieldName) throws IOException {
if (currentFieldName == null) {
throw new MapperParsingException("object mapping [" + name + "] trying to serialize an object with no field associated with it, current value [" + context.parser().textOrNull() + "]");
}
context.path().add(currentFieldName);
Mapper objectMapper = mappers.get(currentFieldName);
if (objectMapper != null) {
objectMapper.parse(context);
} else {
Dynamic dynamic = this.dynamic;
if (dynamic == null) {
dynamic = context.root().dynamic();
}
if (dynamic == Dynamic.STRICT) {
throw new StrictDynamicMappingException(fullPath, currentFieldName);
} else if (dynamic == Dynamic.TRUE) {
// we sync here just so we won't add it twice. Its not the end of the world
// to sync here since next operations will get it before
synchronized (mutex) {
objectMapper = mappers.get(currentFieldName);
if (objectMapper == null) {
// remove the current field name from path, since template search and the object builder add it as well...
context.path().remove();
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "object");
if (builder == null) {
builder = MapperBuilders.object(currentFieldName).enabled(true).pathType(pathType);
// if this is a non root object, then explicitly set the dynamic behavior if set
if (!(this instanceof RootObjectMapper) && this.dynamic != Defaults.DYNAMIC) {
((Builder) builder).dynamic(this.dynamic);
}
}
BuilderContext builderContext = new BuilderContext(context.indexSettings(), context.path());
objectMapper = builder.build(builderContext);
// ...now re add it
context.path().add(currentFieldName);
context.setMappingsModified();
if (context.isWithinNewMapper()) {
// within a new mapper, no need to traverse, just parse
objectMapper.parse(context);
} else {
// create a context of new mapper, so we batch aggregate all the changes within
// this object mapper once, and traverse all of them to add them in a single go
context.setWithinNewMapper();
try {
objectMapper.parse(context);
FieldMapperListener.Aggregator newFields = new FieldMapperListener.Aggregator();
ObjectMapperListener.Aggregator newObjects = new ObjectMapperListener.Aggregator();
objectMapper.traverse(newFields);
objectMapper.traverse(newObjects);
// callback on adding those fields!
context.docMapper().addFieldMappers(newFields.mappers);
context.docMapper().addObjectMappers(newObjects.mappers);
} finally {
context.clearWithinNewMapper();
}
}
// only put after we traversed and did the callbacks, so other parsing won't see it only after we
// properly traversed it and adding the mappers
putMapper(objectMapper);
} else {
objectMapper.parse(context);
}
}
} else {
// not dynamic, read everything up to end object
context.parser().skipChildren();
}
}
context.path().remove();
}
private void serializeArray(ParseContext context, String lastFieldName) throws IOException {
String arrayFieldName = lastFieldName;
Mapper mapper = mappers.get(lastFieldName);
if (mapper != null && mapper instanceof ArrayValueMapperParser) {
mapper.parse(context);
} else {
XContentParser parser = context.parser();
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.START_OBJECT) {
serializeObject(context, lastFieldName);
} else if (token == XContentParser.Token.START_ARRAY) {
serializeArray(context, lastFieldName);
} else if (token == XContentParser.Token.FIELD_NAME) {
lastFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_NULL) {
serializeNullValue(context, lastFieldName);
} else if (token == null) {
throw new MapperParsingException("object mapping for [" + name + "] with array for [" + arrayFieldName + "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?");
} else {
serializeValue(context, lastFieldName, token);
}
}
}
}
private void serializeValue(final ParseContext context, String currentFieldName, XContentParser.Token token) throws IOException {
if (currentFieldName == null) {
throw new MapperParsingException("object mapping [" + name + "] trying to serialize a value with no field associated with it, current value [" + context.parser().textOrNull() + "]");
}
Mapper mapper = mappers.get(currentFieldName);
if (mapper != null) {
mapper.parse(context);
} else {
parseDynamicValue(context, currentFieldName, token);
}
}
public void parseDynamicValue(final ParseContext context, String currentFieldName, XContentParser.Token token) throws IOException {
Dynamic dynamic = this.dynamic;
if (dynamic == null) {
dynamic = context.root().dynamic();
}
if (dynamic == Dynamic.STRICT) {
throw new StrictDynamicMappingException(fullPath, currentFieldName);
}
if (dynamic == Dynamic.FALSE) {
return;
}
// we sync here since we don't want to add this field twice to the document mapper
// its not the end of the world, since we add it to the mappers once we create it
// so next time we won't even get here for this field
synchronized (mutex) {
Mapper mapper = mappers.get(currentFieldName);
if (mapper == null) {
BuilderContext builderContext = new BuilderContext(context.indexSettings(), context.path());
if (token == XContentParser.Token.VALUE_STRING) {
boolean resolved = false;
// do a quick test to see if its fits a dynamic template, if so, use it.
// we need to do it here so we can handle things like attachment templates, where calling
// text (to see if its a date) causes the binary value to be cleared
if (!resolved) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string", null);
if (builder != null) {
mapper = builder.build(builderContext);
resolved = true;
}
}
if (!resolved && context.parser().textLength() == 0) {
// empty string with no mapping, treat it like null value
return;
}
if (!resolved && context.root().dateDetection()) {
String text = context.parser().text();
// a safe check since "1" gets parsed as well
if (Strings.countOccurrencesOf(text, ":") > 1 || Strings.countOccurrencesOf(text, "-") > 1 || Strings.countOccurrencesOf(text, "/") > 1) {
for (FormatDateTimeFormatter dateTimeFormatter : context.root().dynamicDateTimeFormatters()) {
try {
dateTimeFormatter.parser().parseMillis(text);
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "date");
if (builder == null) {
builder = dateField(currentFieldName).dateTimeFormatter(dateTimeFormatter);
}
mapper = builder.build(builderContext);
resolved = true;
break;
} catch (Exception e) {
// failure to parse this, continue
}
}
}
}
if (!resolved && context.root().numericDetection()) {
String text = context.parser().text();
try {
Long.parseLong(text);
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
if (builder == null) {
builder = longField(currentFieldName);
}
mapper = builder.build(builderContext);
resolved = true;
} catch (Exception e) {
// not a long number
}
if (!resolved) {
try {
Double.parseDouble(text);
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = doubleField(currentFieldName);
}
mapper = builder.build(builderContext);
resolved = true;
} catch (Exception e) {
// not a long number
}
}
}
// DON'T do automatic ip detection logic, since it messes up with docs that have hosts and ips
// check if its an ip
// if (!resolved && text.indexOf('.') != -1) {
// try {
// IpFieldMapper.ipToLong(text);
// XContentMapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "ip");
// if (builder == null) {
// builder = ipField(currentFieldName);
// }
// mapper = builder.build(builderContext);
// resolved = true;
// } catch (Exception e) {
// // failure to parse, not ip...
// }
// }
if (!resolved) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string");
if (builder == null) {
builder = stringField(currentFieldName);
}
mapper = builder.build(builderContext);
}
} else if (token == XContentParser.Token.VALUE_NUMBER) {
XContentParser.NumberType numberType = context.parser().numberType();
if (numberType == XContentParser.NumberType.INT) {
if (context.parser().estimatedNumberType()) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
if (builder == null) {
builder = longField(currentFieldName);
}
mapper = builder.build(builderContext);
} else {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "integer");
if (builder == null) {
builder = integerField(currentFieldName);
}
mapper = builder.build(builderContext);
}
} else if (numberType == XContentParser.NumberType.LONG) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
if (builder == null) {
builder = longField(currentFieldName);
}
mapper = builder.build(builderContext);
} else if (numberType == XContentParser.NumberType.FLOAT) {
if (context.parser().estimatedNumberType()) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = doubleField(currentFieldName);
}
mapper = builder.build(builderContext);
} else {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "float");
if (builder == null) {
builder = floatField(currentFieldName);
}
mapper = builder.build(builderContext);
}
} else if (numberType == XContentParser.NumberType.DOUBLE) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = doubleField(currentFieldName);
}
mapper = builder.build(builderContext);
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "boolean");
if (builder == null) {
builder = booleanField(currentFieldName);
}
mapper = builder.build(builderContext);
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "binary");
if (builder == null) {
builder = binaryField(currentFieldName);
}
mapper = builder.build(builderContext);
} else {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, null);
if (builder != null) {
mapper = builder.build(builderContext);
} else {
// TODO how do we identify dynamically that its a binary value?
throw new ElasticsearchIllegalStateException("Can't handle serializing a dynamic type with content token [" + token + "] and field name [" + currentFieldName + "]");
}
}
if (context.isWithinNewMapper()) {
mapper.parse(context);
} else {
context.setWithinNewMapper();
try {
mapper.parse(context);
FieldMapperListener.Aggregator newFields = new FieldMapperListener.Aggregator();
mapper.traverse(newFields);
context.docMapper().addFieldMappers(newFields.mappers);
} finally {
context.clearWithinNewMapper();
}
}
// only put after we traversed and did the callbacks, so other parsing won't see it only after we
// properly traversed it and adding the mappers
putMapper(mapper);
context.setMappingsModified();
} else {
mapper.parse(context);
}
}
}
@Override
public void merge(final Mapper mergeWith, final MergeContext mergeContext) throws MergeMappingException {
if (!(mergeWith instanceof ObjectMapper)) {
mergeContext.addConflict("Can't merge a non object mapping [" + mergeWith.name() + "] with an object mapping [" + name() + "]");
return;
}
ObjectMapper mergeWithObject = (ObjectMapper) mergeWith;
if (nested().isNested()) {
if (!mergeWithObject.nested().isNested()) {
mergeContext.addConflict("object mapping [" + name() + "] can't be changed from nested to non-nested");
return;
}
} else {
if (mergeWithObject.nested().isNested()) {
mergeContext.addConflict("object mapping [" + name() + "] can't be changed from non-nested to nested");
return;
}
}
if (!mergeContext.mergeFlags().simulate()) {
if (mergeWithObject.dynamic != null) {
this.dynamic = mergeWithObject.dynamic;
}
}
doMerge(mergeWithObject, mergeContext);
List<Mapper> mappersToPut = new ArrayList<Mapper>();
FieldMapperListener.Aggregator newFieldMappers = new FieldMapperListener.Aggregator();
ObjectMapperListener.Aggregator newObjectMappers = new ObjectMapperListener.Aggregator();
synchronized (mutex) {
for (ObjectObjectCursor<String, Mapper> cursor : mergeWithObject.mappers) {
Mapper mergeWithMapper = cursor.value;
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.name());
if (mergeIntoMapper == null) {
// no mapping, simply add it if not simulating
if (!mergeContext.mergeFlags().simulate()) {
mappersToPut.add(mergeWithMapper);
mergeWithMapper.traverse(newFieldMappers);
mergeWithMapper.traverse(newObjectMappers);
}
} else {
mergeIntoMapper.merge(mergeWithMapper, mergeContext);
}
}
if (!newFieldMappers.mappers.isEmpty()) {
mergeContext.docMapper().addFieldMappers(newFieldMappers.mappers);
}
if (!newObjectMappers.mappers.isEmpty()) {
mergeContext.docMapper().addObjectMappers(newObjectMappers.mappers);
}
// and the mappers only after the administration have been done, so it will not be visible to parser (which first try to read with no lock)
for (Mapper mapper : mappersToPut) {
putMapper(mapper);
}
}
}
protected void doMerge(ObjectMapper mergeWith, MergeContext mergeContext) {
}
@Override
public void close() {
for (ObjectObjectCursor<String, Mapper> cursor : mappers) {
cursor.value.close();
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
toXContent(builder, params, null, Mapper.EMPTY_ARRAY);
return builder;
}
public void toXContent(XContentBuilder builder, Params params, ToXContent custom, Mapper... additionalMappers) throws IOException {
builder.startObject(name);
if (nested.isNested()) {
builder.field("type", NESTED_CONTENT_TYPE);
if (nested.isIncludeInParent()) {
builder.field("include_in_parent", true);
}
if (nested.isIncludeInRoot()) {
builder.field("include_in_root", true);
}
} else if (mappers.isEmpty()) { // only write the object content type if there are no properties, otherwise, it is automatically detected
builder.field("type", CONTENT_TYPE);
}
if (dynamic != null) {
builder.field("dynamic", dynamic.name().toLowerCase(Locale.ROOT));
}
if (enabled != Defaults.ENABLED) {
builder.field("enabled", enabled);
}
if (pathType != Defaults.PATH_TYPE) {
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
}
if (includeInAll != null) {
builder.field("include_in_all", includeInAll);
}
if (custom != null) {
custom.toXContent(builder, params);
}
doXContent(builder, params);
// sort the mappers so we get consistent serialization format
TreeMap<String, Mapper> sortedMappers = new TreeMap<String, Mapper>();
for (ObjectObjectCursor<String, Mapper> cursor : mappers) {
sortedMappers.put(cursor.key, cursor.value);
}
// check internal mappers first (this is only relevant for root object)
for (Mapper mapper : sortedMappers.values()) {
if (mapper instanceof InternalMapper) {
mapper.toXContent(builder, params);
}
}
if (additionalMappers != null && additionalMappers.length > 0) {
TreeMap<String, Mapper> additionalSortedMappers = new TreeMap<String, Mapper>();
for (Mapper mapper : additionalMappers) {
additionalSortedMappers.put(mapper.name(), mapper);
}
for (Mapper mapper : additionalSortedMappers.values()) {
mapper.toXContent(builder, params);
}
}
if (!mappers.isEmpty()) {
builder.startObject("properties");
for (Mapper mapper : sortedMappers.values()) {
if (!(mapper instanceof InternalMapper)) {
mapper.toXContent(builder, params);
}
}
builder.endObject();
}
builder.endObject();
}
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_mapper_object_ObjectMapper.java
|
5,425 |
public static class WithScript extends Bytes {
private final BytesValues bytesValues;
public WithScript(FieldDataSource delegate, SearchScript script) {
this.bytesValues = new BytesValues(delegate, script);
}
@Override
public MetaData metaData() {
return MetaData.UNKNOWN;
}
@Override
public BytesValues bytesValues() {
return bytesValues;
}
static class BytesValues extends org.elasticsearch.index.fielddata.BytesValues {
private final FieldDataSource source;
private final SearchScript script;
private final BytesRef scratch;
public BytesValues(FieldDataSource source, SearchScript script) {
super(true);
this.source = source;
this.script = script;
scratch = new BytesRef();
}
@Override
public int setDocument(int docId) {
return source.bytesValues().setDocument(docId);
}
@Override
public BytesRef nextValue() {
BytesRef value = source.bytesValues().nextValue();
script.setNextVar("_value", value.utf8ToString());
scratch.copyChars(script.run().toString());
return scratch;
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_search_aggregations_support_FieldDataSource.java
|
2,315 |
public class MapReduceService
implements ManagedService, RemoteService {
/**
* The service name to retrieve an instance of the MapReduceService
*/
public static final String SERVICE_NAME = "hz:impl:mapReduceService";
private static final ILogger LOGGER = Logger.getLogger(MapReduceService.class);
private static final int DEFAULT_RETRY_SLEEP_MILLIS = 100;
private final ConstructorFunction<String, NodeJobTracker> constructor = new ConstructorFunction<String, NodeJobTracker>() {
@Override
public NodeJobTracker createNew(String arg) {
JobTrackerConfig jobTrackerConfig = config.findJobTrackerConfig(arg);
return new NodeJobTracker(arg, jobTrackerConfig.getAsReadOnly(), nodeEngine, MapReduceService.this);
}
};
private final ConcurrentMap<String, NodeJobTracker> jobTrackers;
private final ConcurrentMap<JobSupervisorKey, JobSupervisor> jobSupervisors;
private final InternalPartitionService partitionService;
private final ClusterService clusterService;
private final NodeEngineImpl nodeEngine;
private final Config config;
public MapReduceService(NodeEngine nodeEngine) {
this.config = nodeEngine.getConfig();
this.nodeEngine = (NodeEngineImpl) nodeEngine;
this.clusterService = nodeEngine.getClusterService();
this.partitionService = nodeEngine.getPartitionService();
this.jobTrackers = new ConcurrentHashMap<String, NodeJobTracker>();
this.jobSupervisors = new ConcurrentHashMap<JobSupervisorKey, JobSupervisor>();
}
public JobTracker getJobTracker(String name) {
return (JobTracker) createDistributedObject(name);
}
public JobSupervisor getJobSupervisor(String name, String jobId) {
JobSupervisorKey key = new JobSupervisorKey(name, jobId);
return jobSupervisors.get(key);
}
public boolean registerJobSupervisorCancellation(String name, String jobId, Address jobOwner) {
NodeJobTracker jobTracker = (NodeJobTracker) createDistributedObject(name);
if (jobTracker.registerJobSupervisorCancellation(jobId) && getLocalAddress().equals(jobOwner)) {
for (MemberImpl member : clusterService.getMemberList()) {
if (!member.getAddress().equals(jobOwner)) {
try {
ProcessingOperation operation = new CancelJobSupervisorOperation(name, jobId);
processRequest(member.getAddress(), operation, name);
} catch (Exception ignore) {
LOGGER.finest("Member might be already unavailable", ignore);
}
}
}
return true;
}
return false;
}
public boolean unregisterJobSupervisorCancellation(String name, String jobId) {
NodeJobTracker jobTracker = (NodeJobTracker) createDistributedObject(name);
return jobTracker.unregisterJobSupervisorCancellation(jobId);
}
public JobSupervisor createJobSupervisor(JobTaskConfiguration configuration) {
// Job might already be cancelled (due to async processing)
NodeJobTracker jobTracker = (NodeJobTracker) createDistributedObject(configuration.getName());
if (jobTracker.unregisterJobSupervisorCancellation(configuration.getJobId())) {
return null;
}
JobSupervisorKey key = new JobSupervisorKey(configuration.getName(), configuration.getJobId());
boolean ownerNode = nodeEngine.getThisAddress().equals(configuration.getJobOwner());
JobSupervisor jobSupervisor = new JobSupervisor(configuration, jobTracker, ownerNode, this);
JobSupervisor oldSupervisor = jobSupervisors.putIfAbsent(key, jobSupervisor);
return oldSupervisor != null ? oldSupervisor : jobSupervisor;
}
public boolean destroyJobSupervisor(JobSupervisor supervisor) {
String name = supervisor.getConfiguration().getName();
String jobId = supervisor.getConfiguration().getJobId();
NodeJobTracker jobTracker = (NodeJobTracker) createDistributedObject(name);
if (jobTracker != null) {
jobTracker.unregisterJobSupervisorCancellation(jobId);
}
JobSupervisorKey key = new JobSupervisorKey(supervisor);
return jobSupervisors.remove(key) == supervisor;
}
public ExecutorService getExecutorService(String name) {
return nodeEngine.getExecutionService().getExecutor(MapReduceUtil.buildExecutorName(name));
}
@Override
public void init(NodeEngine nodeEngine, Properties properties) {
}
@Override
public void reset() {
}
@Override
public void shutdown(boolean terminate) {
for (JobTracker jobTracker : jobTrackers.values()) {
jobTracker.destroy();
}
jobTrackers.clear();
}
@Override
public DistributedObject createDistributedObject(String objectName) {
return ConcurrencyUtil.getOrPutSynchronized(jobTrackers, objectName, jobTrackers, constructor);
}
@Override
public void destroyDistributedObject(String objectName) {
JobTracker jobTracker = jobTrackers.remove(objectName);
if (jobTracker != null) {
jobTracker.destroy();
}
}
public Address getKeyMember(Object key) {
int partitionId = partitionService.getPartitionId(key);
Address owner;
while ((owner = partitionService.getPartitionOwner(partitionId)) == null) {
try {
Thread.sleep(DEFAULT_RETRY_SLEEP_MILLIS);
} catch (Exception ignore) {
// Partitions might not assigned yet so we need to retry
LOGGER.finest("Partitions not yet assigned, retry", ignore);
}
}
return owner;
}
public boolean checkAssignedMembersAvailable(Collection<Address> assignedMembers) {
Collection<MemberImpl> members = clusterService.getMemberList();
List<Address> addresses = new ArrayList<Address>(members.size());
for (MemberImpl member : members) {
addresses.add(member.getAddress());
}
for (Address address : assignedMembers) {
if (!addresses.contains(address)) {
return false;
}
}
return true;
}
public <R> R processRequest(Address address, ProcessingOperation processingOperation, String name)
throws ExecutionException, InterruptedException {
String executorName = MapReduceUtil.buildExecutorName(name);
InvocationBuilder invocation = nodeEngine.getOperationService()
.createInvocationBuilder(SERVICE_NAME, processingOperation, address);
Future<R> future = invocation.setExecutorName(executorName).invoke();
return future.get();
}
public void sendNotification(Address address, MapReduceNotification notification) {
try {
String name = MapReduceUtil.buildExecutorName(notification.getName());
ProcessingOperation operation = new FireNotificationOperation(notification);
processRequest(address, operation, name);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public final Address getLocalAddress() {
return nodeEngine.getThisAddress();
}
public NodeEngine getNodeEngine() {
return nodeEngine;
}
public void dispatchEvent(MapReduceNotification notification) {
String name = notification.getName();
String jobId = notification.getJobId();
JobSupervisor supervisor = getJobSupervisor(name, jobId);
if (supervisor == null) {
throw new NullPointerException("JobSupervisor name=" + name + ", jobId=" + jobId + " not found");
}
supervisor.onNotification(notification);
}
/**
* This key type is used for assigning {@link com.hazelcast.mapreduce.impl.task.JobSupervisor}s to their
* corresponding job trackers by JobTracker name and the unique jobId.
*/
private static final class JobSupervisorKey {
private final String name;
private final String jobId;
private JobSupervisorKey(String name, String jobId) {
this.name = name;
this.jobId = jobId;
}
private JobSupervisorKey(JobSupervisor supervisor) {
this.name = supervisor.getConfiguration().getName();
this.jobId = supervisor.getConfiguration().getJobId();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
JobSupervisorKey that = (JobSupervisorKey) o;
if (!jobId.equals(that.jobId)) {
return false;
}
return name.equals(that.name);
}
@Override
public int hashCode() {
int result = name != null ? name.hashCode() : 0;
result = 31 * result + (jobId != null ? jobId.hashCode() : 0);
return result;
}
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_mapreduce_impl_MapReduceService.java
|
202 |
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.FIELD,ElementType.METHOD})
public @interface Hydrated {
String factoryMethod();
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_cache_Hydrated.java
|
1,140 |
public class OSQLMethodAsSet extends OAbstractSQLMethod {
public static final String NAME = "asSet";
public OSQLMethodAsSet() {
super(NAME);
}
@SuppressWarnings("unchecked")
@Override
public Object execute(OIdentifiable iCurrentRecord, OCommandContext iContext, Object ioResult, Object[] iMethodParams) {
if (ioResult instanceof Set)
// ALREADY A SET
return ioResult;
if (ioResult == null)
// NULL VALUE, RETURN AN EMPTY SET
return new HashSet<Object>();
if (ioResult instanceof Collection<?>)
return new HashSet<Object>((Collection<Object>) ioResult);
else if (ioResult instanceof Iterable<?>)
ioResult = ((Iterable<?>) ioResult).iterator();
if (ioResult instanceof Iterator<?>) {
final Set<Object> set = ioResult instanceof OSizeable ? new HashSet<Object>(((OSizeable) ioResult).size())
: new HashSet<Object>();
for (Iterator<Object> iter = (Iterator<Object>) ioResult; iter.hasNext();)
set.add(iter.next());
return set;
}
// SINGLE ITEM: ADD IT AS UNIQUE ITEM
final Set<Object> set = new HashSet<Object>();
set.add(ioResult);
return set;
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_sql_method_misc_OSQLMethodAsSet.java
|
1,704 |
public abstract class OServerCommandAbstract implements OServerCommand {
protected OServer server;
/**
* Default constructor. Disable cache of content at HTTP level
*/
public OServerCommandAbstract() {
}
@Override
public boolean beforeExecute(final OHttpRequest iRequest, OHttpResponse iResponse) throws IOException {
setNoCache(iResponse);
return true;
}
@Override
public boolean afterExecute(final OHttpRequest iRequest, OHttpResponse iResponse) throws IOException {
return true;
}
protected String[] checkSyntax(final String iURL, final int iArgumentCount, final String iSyntax) {
final List<String> parts = OStringSerializerHelper.smartSplit(iURL, OHttpResponse.URL_SEPARATOR, 1, -1, true, true, false);
if (parts.size() < iArgumentCount)
throw new OHttpRequestException(iSyntax);
final String[] array = new String[parts.size()];
return decodeParts(parts.toArray(array));
}
/**
* urldecode each request part return the same array instance
*
* @param parts
* @return
*/
private String[] decodeParts(final String[] parts) {
try {
if (parts == null)
return null;
for (int i = 0; i < parts.length; i++) {
String part = parts[i];
if (part == null)
continue;
// NEEDS DECODING
part = java.net.URLDecoder.decode(part, "UTF-8");
parts[i] = part;
}
return parts;
} catch (Exception ex) {
throw new OException(ex);
}
}
public OServer getServer() {
return server;
}
public void configure(final OServer server) {
this.server = server;
}
protected void setNoCache(final OHttpResponse iResponse) {
// DEFAULT = DON'T CACHE
iResponse.setHeader("Cache-Control: no-cache, no-store, max-age=0, must-revalidate\r\nPragma: no-cache");
}
}
| 1no label
|
server_src_main_java_com_orientechnologies_orient_server_network_protocol_http_command_OServerCommandAbstract.java
|
114 |
public class OLogManager {
private boolean debug = true;
private boolean info = true;
private boolean warn = true;
private boolean error = true;
private Level minimumLevel = Level.SEVERE;
private static final String DEFAULT_LOG = "com.orientechnologies";
private static final OLogManager instance = new OLogManager();
protected OLogManager() {
}
public void setConsoleLevel(final String iLevel) {
setLevel(iLevel, ConsoleHandler.class);
}
public void setFileLevel(final String iLevel) {
setLevel(iLevel, FileHandler.class);
}
public void log(final Object iRequester, final Level iLevel, String iMessage, final Throwable iException,
final Object... iAdditionalArgs) {
if (iMessage != null) {
final Logger log = iRequester != null ? Logger.getLogger(iRequester.getClass().getName()) : Logger.getLogger(DEFAULT_LOG);
if (log == null) {
// USE SYSERR
try {
System.err.println(String.format(iMessage, iAdditionalArgs));
} catch (Exception e) {
OLogManager.instance().warn(this, "Error on formatting message", e);
}
} else if (log.isLoggable(iLevel)) {
// USE THE LOG
try {
final String msg = String.format(iMessage, iAdditionalArgs);
if (iException != null)
log.log(iLevel, msg, iException);
else
log.log(iLevel, msg);
} catch (Exception e) {
OLogManager.instance().warn(this, "Error on formatting message", e);
}
}
}
}
public void debug(final Object iRequester, final String iMessage, final Object... iAdditionalArgs) {
if (isDebugEnabled())
log(iRequester, Level.FINE, iMessage, null, iAdditionalArgs);
}
public void debug(final Object iRequester, final String iMessage, final Throwable iException, final Object... iAdditionalArgs) {
if (isDebugEnabled())
log(iRequester, Level.FINE, iMessage, iException, iAdditionalArgs);
}
public void debug(final Object iRequester, final String iMessage, final Throwable iException,
final Class<? extends OException> iExceptionClass, final Object... iAdditionalArgs) {
debug(iRequester, iMessage, iException, iAdditionalArgs);
if (iExceptionClass != null)
try {
throw iExceptionClass.getConstructor(String.class, Throwable.class).newInstance(iMessage, iException);
} catch (NoSuchMethodException e) {
} catch (IllegalArgumentException e) {
} catch (SecurityException e) {
} catch (InstantiationException e) {
} catch (IllegalAccessException e) {
} catch (InvocationTargetException e) {
}
}
public void info(final Object iRequester, final String iMessage, final Object... iAdditionalArgs) {
if (isInfoEnabled())
log(iRequester, Level.INFO, iMessage, null, iAdditionalArgs);
}
public void info(final Object iRequester, final String iMessage, final Throwable iException, final Object... iAdditionalArgs) {
if (isInfoEnabled())
log(iRequester, Level.INFO, iMessage, iException, iAdditionalArgs);
}
public void warn(final Object iRequester, final String iMessage, final Object... iAdditionalArgs) {
if (isWarnEnabled())
log(iRequester, Level.WARNING, iMessage, null, iAdditionalArgs);
}
public void warn(final Object iRequester, final String iMessage, final Throwable iException, final Object... iAdditionalArgs) {
if (isWarnEnabled())
log(iRequester, Level.WARNING, iMessage, iException, iAdditionalArgs);
}
public void config(final Object iRequester, final String iMessage, final Object... iAdditionalArgs) {
log(iRequester, Level.CONFIG, iMessage, null, iAdditionalArgs);
}
public void error(final Object iRequester, final String iMessage, final Object... iAdditionalArgs) {
log(iRequester, Level.SEVERE, iMessage, null, iAdditionalArgs);
}
public void error(final Object iRequester, final String iMessage, final Throwable iException, final Object... iAdditionalArgs) {
if (isErrorEnabled())
log(iRequester, Level.SEVERE, iMessage, iException, iAdditionalArgs);
}
public void error(final Object iRequester, final String iMessage, final Throwable iException,
final Class<? extends OException> iExceptionClass, final Object... iAdditionalArgs) {
error(iRequester, iMessage, iException, iAdditionalArgs);
final String msg = String.format(iMessage, iAdditionalArgs);
if (iExceptionClass != null)
try {
throw iExceptionClass.getConstructor(String.class, Throwable.class).newInstance(msg, iException);
} catch (NoSuchMethodException e) {
} catch (IllegalArgumentException e) {
} catch (SecurityException e) {
} catch (InstantiationException e) {
} catch (IllegalAccessException e) {
} catch (InvocationTargetException e) {
}
}
public void error(final Object iRequester, final String iMessage, final Class<? extends OException> iExceptionClass) {
error(iRequester, iMessage, (Throwable) null);
try {
throw iExceptionClass.getConstructor(String.class).newInstance(iMessage);
} catch (IllegalArgumentException e) {
} catch (SecurityException e) {
} catch (InstantiationException e) {
} catch (IllegalAccessException e) {
} catch (InvocationTargetException e) {
} catch (NoSuchMethodException e) {
}
}
@SuppressWarnings("unchecked")
public void exception(final String iMessage, final Exception iNestedException, final Class<? extends OException> iExceptionClass,
final Object... iAdditionalArgs) throws OException {
if (iMessage == null)
return;
// FORMAT THE MESSAGE
String msg = String.format(iMessage, iAdditionalArgs);
Constructor<OException> c;
OException exceptionToThrow = null;
try {
if (iNestedException != null) {
c = (Constructor<OException>) iExceptionClass.getConstructor(String.class, Throwable.class);
exceptionToThrow = c.newInstance(msg, iNestedException);
}
} catch (Exception e) {
}
if (exceptionToThrow == null)
try {
c = (Constructor<OException>) iExceptionClass.getConstructor(String.class);
exceptionToThrow = c.newInstance(msg);
} catch (SecurityException e1) {
} catch (NoSuchMethodException e1) {
} catch (IllegalArgumentException e1) {
} catch (InstantiationException e1) {
} catch (IllegalAccessException e1) {
} catch (InvocationTargetException e1) {
}
if (exceptionToThrow != null)
throw exceptionToThrow;
else
throw new IllegalArgumentException("Cannot create the exception of type: " + iExceptionClass);
}
public boolean isWarn() {
return warn;
}
public void setWarnEnabled(boolean warn) {
this.warn = warn;
}
public void setInfoEnabled(boolean info) {
this.info = info;
}
public void setDebugEnabled(boolean debug) {
this.debug = debug;
}
public void setErrorEnabled(boolean error) {
this.error = error;
}
public boolean isLevelEnabled(final Level level) {
if (level.equals(Level.FINER) || level.equals(Level.FINE) || level.equals(Level.FINEST))
return debug;
else if (level.equals(Level.INFO))
return info;
else if (level.equals(Level.WARNING))
return warn;
else if (level.equals(Level.SEVERE))
return error;
return false;
}
public boolean isDebugEnabled() {
return debug;
}
public boolean isInfoEnabled() {
return info;
}
public boolean isWarnEnabled() {
return warn;
}
public boolean isErrorEnabled() {
return error;
}
public static OLogManager instance() {
return instance;
}
public Level setLevel(final String iLevel, final Class<? extends Handler> iHandler) {
final Level level = iLevel != null ? Level.parse(iLevel.toUpperCase(Locale.ENGLISH)) : Level.INFO;
if (level.intValue() < minimumLevel.intValue()) {
// UPDATE MINIMUM LEVEL
minimumLevel = level;
if (level.equals(Level.FINER) || level.equals(Level.FINE) || level.equals(Level.FINEST))
debug = info = warn = error = true;
else if (level.equals(Level.INFO)) {
info = warn = error = true;
debug = false;
} else if (level.equals(Level.WARNING)) {
warn = error = true;
debug = info = false;
} else if (level.equals(Level.SEVERE)) {
error = true;
debug = info = warn = false;
}
}
Logger log = Logger.getLogger(DEFAULT_LOG);
for (Handler h : log.getHandlers()) {
if (h.getClass().isAssignableFrom(iHandler)) {
h.setLevel(level);
break;
}
}
return level;
}
public static void installCustomFormatter() {
try {
// ASSURE TO HAVE THE ORIENT LOG FORMATTER TO THE CONSOLE EVEN IF NO CONFIGURATION FILE IS TAKEN
final Logger log = Logger.getLogger("");
if (log.getHandlers().length == 0) {
// SET DEFAULT LOG FORMATTER
final Handler h = new ConsoleHandler();
h.setFormatter(new OLogFormatter());
log.addHandler(h);
} else {
for (Handler h : log.getHandlers()) {
if (h instanceof ConsoleHandler && !h.getFormatter().getClass().equals(OLogFormatter.class))
h.setFormatter(new OLogFormatter());
}
}
} catch (Exception e) {
System.err.println("Error while installing custom formatter. Logging could be disabled. Cause: " + e.toString());
}
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_log_OLogManager.java
|
25 |
public class ErrorCommandProcessor extends AbstractTextCommandProcessor<ErrorCommand> {
public ErrorCommandProcessor(TextCommandService textCommandService) {
super(textCommandService);
}
public void handle(ErrorCommand command) {
textCommandService.sendResponse(command);
}
public void handleRejection(ErrorCommand command) {
handle(command);
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_ascii_memcache_ErrorCommandProcessor.java
|
89 |
private final class ConnectionListenerImpl implements ConnectionListener {
@Override
public void connectionAdded(Connection conn) {
//no-op
//unfortunately we can't do the endpoint creation here, because this event is only called when the
//connection is bound, but we need to use the endpoint connection before that.
}
@Override
public void connectionRemoved(Connection connection) {
if (connection.isClient() && connection instanceof TcpIpConnection && nodeEngine.isActive()) {
ClientEndpoint endpoint = endpoints.get(connection);
if (endpoint == null) {
return;
}
String localMemberUuid = node.getLocalMember().getUuid();
String ownerUuid = endpoint.getPrincipal().getOwnerUuid();
if (localMemberUuid.equals(ownerUuid)) {
doRemoveEndpoint(connection, endpoint);
}
}
}
private void doRemoveEndpoint(Connection connection, ClientEndpoint endpoint) {
removeEndpoint(connection, true);
if (!endpoint.isFirstConnection()) {
return;
}
NodeEngine nodeEngine = node.nodeEngine;
Collection<MemberImpl> memberList = nodeEngine.getClusterService().getMemberList();
OperationService operationService = nodeEngine.getOperationService();
for (MemberImpl member : memberList) {
ClientDisconnectionOperation op = new ClientDisconnectionOperation(endpoint.getUuid());
op.setNodeEngine(nodeEngine)
.setServiceName(SERVICE_NAME)
.setService(ClientEngineImpl.this)
.setResponseHandler(createEmptyResponseHandler());
if (member.localMember()) {
operationService.runOperationOnCallingThread(op);
} else {
operationService.send(op, member.getAddress());
}
}
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_client_ClientEngineImpl.java
|
1,778 |
private class SortableValue implements Comparable<SortableValue> {
private Integer pos;
private Serializable entity;
private Class<?> entityClass;
private String containingPropertyName;
private Object bean;
public SortableValue(Object bean, Serializable entity, Integer pos, String containingPropertyName) {
this.bean = bean;
this.entity = entity;
this.pos = pos;
this.entityClass = entity.getClass();
this.containingPropertyName = containingPropertyName;
}
public int compareTo(SortableValue o) {
return pos.compareTo(o.pos) * -1;
}
public String getContainingPropertyName() {
return containingPropertyName;
}
private Object getBean() {
return bean;
}
@Override
public int hashCode() {
int prime = 31;
int result = 1;
result = prime * result + getOuterType().hashCode();
result = prime * result + (entityClass == null ? 0 : entityClass.hashCode());
result = prime * result + (pos == null ? 0 : pos.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
SortableValue other = (SortableValue) obj;
if (!getOuterType().equals(other.getOuterType()))
return false;
if (entityClass == null) {
if (other.entityClass != null)
return false;
} else if (!entityClass.equals(other.entityClass))
return false;
if (pos == null) {
if (other.pos != null)
return false;
} else if (!pos.equals(other.pos))
return false;
return true;
}
private FieldManager getOuterType() {
return FieldManager.this;
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_service_persistence_module_FieldManager.java
|
262 |
ex.schedule(new Runnable() {
@Override
public void run() {
hzs.get(1).shutdown();
}
}, 1000, TimeUnit.MILLISECONDS);
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_executor_ExecutionDelayTest.java
|
734 |
public class DeleteByQueryResponse extends ActionResponse implements Iterable<IndexDeleteByQueryResponse> {
private Map<String, IndexDeleteByQueryResponse> indices = newHashMap();
DeleteByQueryResponse() {
}
@Override
public Iterator<IndexDeleteByQueryResponse> iterator() {
return indices.values().iterator();
}
/**
* The responses from all the different indices.
*/
public Map<String, IndexDeleteByQueryResponse> getIndices() {
return indices;
}
/**
* The response of a specific index.
*/
public IndexDeleteByQueryResponse getIndex(String index) {
return indices.get(index);
}
public RestStatus status() {
RestStatus status = RestStatus.OK;
for (IndexDeleteByQueryResponse indexResponse : indices.values()) {
if (indexResponse.getFailedShards() > 0) {
RestStatus indexStatus = indexResponse.getFailures()[0].status();
if (indexResponse.getFailures().length > 1) {
for (int i = 1; i < indexResponse.getFailures().length; i++) {
if (indexResponse.getFailures()[i].status().getStatus() >= 500) {
indexStatus = indexResponse.getFailures()[i].status();
}
}
}
if (status.getStatus() < indexStatus.getStatus()) {
status = indexStatus;
}
}
}
return status;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
for (int i = 0; i < size; i++) {
IndexDeleteByQueryResponse response = new IndexDeleteByQueryResponse();
response.readFrom(in);
indices.put(response.getIndex(), response);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(indices.size());
for (IndexDeleteByQueryResponse indexResponse : indices.values()) {
indexResponse.writeTo(out);
}
}
}
| 0true
|
src_main_java_org_elasticsearch_action_deletebyquery_DeleteByQueryResponse.java
|
10 |
private static class StreamLogger extends Thread {
private final BufferedReader reader;
private static final Logger log =
LoggerFactory.getLogger(StreamLogger.class);
private StreamLogger(InputStream is) {
this.reader = new BufferedReader(new InputStreamReader(is));
}
@Override
public void run() {
String line;
try {
while (null != (line = reader.readLine())) {
log.info("> {}", line);
if (Thread.currentThread().isInterrupted()) {
break;
}
}
log.info("End of stream.");
} catch (IOException e) {
log.error("Unexpected IOException while reading stream {}", reader, e);
}
}
}
| 0true
|
titan-hbase-parent_titan-hbase-core_src_test_java_com_thinkaurelius_titan_HBaseStorageSetup.java
|
1,019 |
public class SearchResultsLabelProvider extends CeylonLabelProvider {
@Override
public Image getImage(Object element) {
if (element instanceof WithSourceFolder) {
element = ((WithSourceFolder) element).element;
}
String key;
int decorations;
if (element instanceof ArchiveMatches) {
key = RUNTIME_OBJ;
decorations = 0;
}
else if (element instanceof CeylonElement) {
key = ((CeylonElement) element).getImageKey();
decorations = ((CeylonElement) element).getDecorations();
}
else if (element instanceof IType ||
element instanceof IField ||
element instanceof IMethod) {
key = getImageKeyForDeclaration((IJavaElement) element);
decorations = 0;
}
else {
key = super.getImageKey(element);
decorations = super.getDecorationAttributes(element);
}
return getDecoratedImage(key, decorations, false);
}
@Override
public StyledString getStyledText(Object element) {
if (element instanceof WithSourceFolder) {
element = ((WithSourceFolder) element).element;
}
if (element instanceof ArchiveMatches) {
return new StyledString("Source Archive Matches");
}
else if (element instanceof CeylonElement) {
return getStyledLabelForSearchResult((CeylonElement) element);
}
else if (element instanceof IType ||
element instanceof IField||
element instanceof IMethod) {
return getStyledLabelForSearchResult((IJavaElement) element);
}
else {
return super.getStyledText(element);
}
}
private StyledString getStyledLabelForSearchResult(CeylonElement ce) {
StyledString styledString = new StyledString();
IFile file = ce.getFile();
String path = file==null ?
ce.getVirtualFile().getPath() :
file.getFullPath().toString();
styledString.append(ce.getLabel())
.append(" - " + ce.getPackageLabel(), PACKAGE_STYLER)
.append(" - " + path, COUNTER_STYLER);
return styledString;
}
private StyledString getStyledLabelForSearchResult(IJavaElement je) {
StyledString styledString = new StyledString();
String name = je.getElementName();
if (je instanceof IMethod) {
try {
String returnType = ((IMethod) je).getReturnType();
if (returnType.equals("V")) {
styledString.append("void", Highlights.KW_STYLER);
}
else {
styledString.append("method", KW_STYLER);
/*styleJavaType(styledString,
getSignatureSimpleName(returnType));*/
}
}
catch (Exception e) {
e.printStackTrace();
}
styledString.append(' ').append(name, ID_STYLER);
try {
styledString.append('(');
String[] parameterTypes = ((IMethod) je).getParameterTypes();
String[] parameterNames = ((IMethod) je).getParameterNames();
boolean first = true;
for (int i=0; i<parameterTypes.length && i<parameterNames.length; i++) {
if (first) {
first = false;
}
else {
styledString.append(", ");
}
styleJavaType(styledString,
getSignatureSimpleName(parameterTypes[i]));
styledString.append(' ')
.append(parameterNames[i], ID_STYLER);
}
styledString.append(')');
}
catch (Exception e) {
e.printStackTrace();
}
if (EditorsUI.getPreferenceStore().getBoolean(DISPLAY_RETURN_TYPES)) {
try {
String returnType = ((IMethod) je).getReturnType();
styledString.append(" ∊ ");
styleJavaType(styledString,
getSignatureSimpleName(returnType),
ARROW_STYLER);
}
catch (Exception e) {
e.printStackTrace();
}
}
}
else if (je instanceof IField) {
styledString.append("field", KW_STYLER);
/*try {
String type = ((IField) je).getTypeSignature();
styleJavaType(styledString,
getSignatureSimpleName(type));
}
catch (Exception e) {
e.printStackTrace();
}*/
styledString.append(' ').append(name, ID_STYLER);
if (EditorsUI.getPreferenceStore().getBoolean(DISPLAY_RETURN_TYPES)) {
try {
String type = ((IField) je).getTypeSignature();
styledString.append(" ∊ ");
styleJavaType(styledString,
getSignatureSimpleName(type),
ARROW_STYLER);
}
catch (Exception e) {
e.printStackTrace();
}
}
}
else if (je instanceof IType) {
IType type = (IType) je;
try {
if (type.isAnnotation()) {
styledString.append('@').append("interface ", KW_STYLER);
}
else if (type.isInterface()) {
styledString.append("interface ", KW_STYLER);
}
else if (type.isClass()) {
styledString.append("class ", KW_STYLER);
}
else if (type.isEnum()) {
styledString.append("enum ", KW_STYLER);
}
}
catch (Exception e) {
e.printStackTrace();
}
styledString.append(name, TYPE_ID_STYLER);
}
IJavaElement pkg = ((IJavaElement) je.getOpenable()).getParent();
styledString.append(" - ", PACKAGE_STYLER)
.append(pkg.getElementName(), PACKAGE_STYLER);
IFile file = (IFile) je.getResource();
if (file!=null) {
styledString.append(" - " + file.getFullPath().toString(), COUNTER_STYLER);
}
return styledString;
}
private static String getImageKeyForDeclaration(IJavaElement e) {
if (e==null) return null;
boolean shared = false;
if (e instanceof IMember) {
try {
shared = Flags.isPublic(((IMember) e).getFlags());
}
catch (JavaModelException jme) {
jme.printStackTrace();
}
}
switch(e.getElementType()) {
case IJavaElement.METHOD:
if (shared) {
return CEYLON_METHOD;
}
else {
return CEYLON_LOCAL_METHOD;
}
case IJavaElement.FIELD:
if (shared) {
return CEYLON_ATTRIBUTE;
}
else {
return CEYLON_LOCAL_ATTRIBUTE;
}
case IJavaElement.TYPE:
if (shared) {
return CEYLON_CLASS;
}
else {
return CEYLON_LOCAL_CLASS;
}
default:
return null;
}
}
}
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_search_SearchResultsLabelProvider.java
|
3,421 |
public class ProxyServiceImpl
implements ProxyService, PostJoinAwareService, EventPublishingService<DistributedObjectEventPacket, Object> {
static final String SERVICE_NAME = "hz:core:proxyService";
private final NodeEngineImpl nodeEngine;
private final ConcurrentMap<String, ProxyRegistry> registries = new ConcurrentHashMap<String, ProxyRegistry>();
private final ConcurrentMap<String, DistributedObjectListener> listeners = new ConcurrentHashMap<String, DistributedObjectListener>();
private final ILogger logger;
ProxyServiceImpl(NodeEngineImpl nodeEngine) {
this.nodeEngine = nodeEngine;
this.logger = nodeEngine.getLogger(ProxyService.class.getName());
}
void init() {
nodeEngine.getEventService().registerListener(SERVICE_NAME, SERVICE_NAME, new Object());
}
private final ConstructorFunction<String, ProxyRegistry> registryConstructor = new ConstructorFunction<String, ProxyRegistry>() {
public ProxyRegistry createNew(String serviceName) {
return new ProxyRegistry(serviceName);
}
};
@Override
public int getProxyCount() {
int count = 0;
for (ProxyRegistry registry : registries.values()) {
count += registry.getProxyCount();
}
return count;
}
@Override
public void initializeDistributedObject(String serviceName, String name) {
if (serviceName == null) {
throw new NullPointerException("Service name is required!");
}
if (name == null) {
throw new NullPointerException("Object name is required!");
}
ProxyRegistry registry = getOrPutIfAbsent(registries, serviceName, registryConstructor);
registry.createProxy(name, true, true);
}
@Override
public DistributedObject getDistributedObject(String serviceName, String name) {
if (serviceName == null) {
throw new NullPointerException("Service name is required!");
}
if (name == null) {
throw new NullPointerException("Object name is required!");
}
ProxyRegistry registry = getOrPutIfAbsent(registries, serviceName, registryConstructor);
return registry.getOrCreateProxy(name, true, true);
}
@Override
public void destroyDistributedObject(String serviceName, String name) {
if (serviceName == null) {
throw new NullPointerException("Service name is required!");
}
if (name == null) {
throw new NullPointerException("Object name is required!");
}
Collection<MemberImpl> members = nodeEngine.getClusterService().getMemberList();
Collection<Future> calls = new ArrayList<Future>(members.size());
for (MemberImpl member : members) {
if (member.localMember()) {
continue;
}
Future f = nodeEngine.getOperationService()
.createInvocationBuilder(SERVICE_NAME, new DistributedObjectDestroyOperation(serviceName, name),
member.getAddress()).setTryCount(10).invoke();
calls.add(f);
}
destroyLocalDistributedObject(serviceName, name, true);
for (Future f : calls) {
try {
f.get(3, TimeUnit.SECONDS);
} catch (Exception e) {
logger.finest(e);
}
}
}
private void destroyLocalDistributedObject(String serviceName, String name, boolean fireEvent) {
ProxyRegistry registry = registries.get(serviceName);
if (registry != null) {
registry.destroyProxy(name, fireEvent);
}
final RemoteService service = nodeEngine.getService(serviceName);
if (service != null) {
service.destroyDistributedObject(name);
}
Throwable cause = new DistributedObjectDestroyedException(serviceName, name);
nodeEngine.waitNotifyService.cancelWaitingOps(serviceName, name, cause);
}
@Override
public Collection<DistributedObject> getDistributedObjects(String serviceName) {
if (serviceName == null) {
throw new NullPointerException("Service name is required!");
}
Collection<DistributedObject> objects = new LinkedList<DistributedObject>();
ProxyRegistry registry = registries.get(serviceName);
if (registry != null) {
Collection<DistributedObjectFuture> futures = registry.proxies.values();
for (DistributedObjectFuture future : futures) {
objects.add(future.get());
}
}
return objects;
}
@Override
public Collection<DistributedObject> getAllDistributedObjects() {
Collection<DistributedObject> objects = new LinkedList<DistributedObject>();
for (ProxyRegistry registry : registries.values()) {
Collection<DistributedObjectFuture> futures = registry.proxies.values();
for (DistributedObjectFuture future : futures) {
objects.add(future.get());
}
}
return objects;
}
@Override
public String addProxyListener(DistributedObjectListener distributedObjectListener) {
final String id = UuidUtil.buildRandomUuidString();
listeners.put(id, distributedObjectListener);
return id;
}
@Override
public boolean removeProxyListener(String registrationId) {
return listeners.remove(registrationId) != null;
}
@Override
public void dispatchEvent(final DistributedObjectEventPacket eventPacket, Object ignore) {
final String serviceName = eventPacket.getServiceName();
if (eventPacket.getEventType() == CREATED) {
try {
final ProxyRegistry registry = getOrPutIfAbsent(registries, serviceName, registryConstructor);
if (!registry.contains(eventPacket.getName())) {
registry.createProxy(eventPacket.getName(), false,
true); // listeners will be called if proxy is created here.
}
} catch (HazelcastInstanceNotActiveException ignored) {
}
} else {
final ProxyRegistry registry = registries.get(serviceName);
if (registry != null) {
registry.destroyProxy(eventPacket.getName(), false);
}
}
}
@Override
public Operation getPostJoinOperation() {
Collection<ProxyInfo> proxies = new LinkedList<ProxyInfo>();
for (ProxyRegistry registry : registries.values()) {
for (DistributedObjectFuture future : registry.proxies.values()) {
DistributedObject distributedObject = future.get();
if (distributedObject instanceof InitializingObject) {
proxies.add(new ProxyInfo(registry.serviceName, distributedObject.getName()));
}
}
}
return proxies.isEmpty() ? null : new PostJoinProxyOperation(proxies);
}
private class ProxyRegistry {
final String serviceName;
final RemoteService service;
final ConcurrentMap<String, DistributedObjectFuture> proxies = new ConcurrentHashMap<String, DistributedObjectFuture>();
private ProxyRegistry(String serviceName) {
this.serviceName = serviceName;
this.service = nodeEngine.getService(serviceName);
if (service == null) {
if (nodeEngine.isActive()) {
throw new IllegalArgumentException("Unknown service: " + serviceName);
} else {
throw new HazelcastInstanceNotActiveException();
}
}
}
/**
* Retrieves a DistributedObject proxy or creates it if it's not available
*
* @param name name of the proxy object
* @param publishEvent true if a DistributedObjectEvent should be fired
* @param initialize true if proxy object should be initialized
* @return a DistributedObject instance
*/
DistributedObject getOrCreateProxy(final String name, boolean publishEvent, boolean initialize) {
DistributedObjectFuture proxyFuture = proxies.get(name);
if (proxyFuture == null) {
if (!nodeEngine.isActive()) {
throw new HazelcastInstanceNotActiveException();
}
proxyFuture = createProxy(name, publishEvent, initialize);
if (proxyFuture == null) {
// warning; recursive call! I (@mdogan) do not think this will ever cause a stack overflow..
return getOrCreateProxy(name, publishEvent, initialize);
}
}
return proxyFuture.get();
}
/**
* Creates a DistributedObject proxy if it's not created yet
*
* @param name name of the proxy object
* @param publishEvent true if a DistributedObjectEvent should be fired
* @param initialize true if proxy object should be initialized
* @return a DistributedObject instance if it's created by this method, null otherwise
*/
DistributedObjectFuture createProxy(final String name, boolean publishEvent, boolean initialize) {
if (!proxies.containsKey(name)) {
if (!nodeEngine.isActive()) {
throw new HazelcastInstanceNotActiveException();
}
DistributedObjectFuture proxyFuture = new DistributedObjectFuture();
if (proxies.putIfAbsent(name, proxyFuture) == null) {
DistributedObject proxy = service.createDistributedObject(name);
if (initialize && proxy instanceof InitializingObject) {
try {
((InitializingObject) proxy).initialize();
} catch (Exception e) {
logger.warning("Error while initializing proxy: " + proxy, e);
}
}
nodeEngine.eventService.executeEvent(new ProxyEventProcessor(CREATED, serviceName, proxy));
if (publishEvent) {
publish(new DistributedObjectEventPacket(CREATED, serviceName, name));
}
proxyFuture.set(proxy);
return proxyFuture;
}
}
return null;
}
void destroyProxy(String name, boolean publishEvent) {
final DistributedObjectFuture proxyFuture = proxies.remove(name);
if (proxyFuture != null) {
DistributedObject proxy = proxyFuture.get();
nodeEngine.eventService.executeEvent(new ProxyEventProcessor(DESTROYED, serviceName, proxy));
if (publishEvent) {
publish(new DistributedObjectEventPacket(DESTROYED, serviceName, name));
}
}
}
private void publish(DistributedObjectEventPacket event) {
final EventService eventService = nodeEngine.getEventService();
final Collection<EventRegistration> registrations = eventService.getRegistrations(SERVICE_NAME, SERVICE_NAME);
eventService.publishEvent(SERVICE_NAME, registrations, event, event.getName().hashCode());
}
private boolean contains(String name) {
return proxies.containsKey(name);
}
void destroy() {
for (DistributedObjectFuture future : proxies.values()) {
DistributedObject distributedObject = future.get();
if (distributedObject instanceof AbstractDistributedObject) {
((AbstractDistributedObject) distributedObject).invalidate();
}
}
proxies.clear();
}
public int getProxyCount() {
return proxies.size();
}
}
private static class DistributedObjectFuture {
volatile DistributedObject proxy;
DistributedObject get() {
if (proxy == null) {
boolean interrupted = false;
synchronized (this) {
while (proxy == null) {
try {
wait();
} catch (InterruptedException e) {
interrupted = true;
}
}
}
if (interrupted) {
Thread.currentThread().interrupt();
}
}
return proxy;
}
void set(DistributedObject o) {
if (o == null) {
throw new IllegalArgumentException();
}
synchronized (this) {
proxy = o;
notifyAll();
}
}
}
private class ProxyEventProcessor
implements StripedRunnable {
final EventType type;
final String serviceName;
final DistributedObject object;
private ProxyEventProcessor(EventType eventType, String serviceName, DistributedObject object) {
this.type = eventType;
this.serviceName = serviceName;
this.object = object;
}
@Override
public void run() {
DistributedObjectEvent event = new DistributedObjectEvent(type, serviceName, object);
for (DistributedObjectListener listener : listeners.values()) {
if (EventType.CREATED.equals(type)) {
listener.distributedObjectCreated(event);
} else if (EventType.DESTROYED.equals(type)) {
listener.distributedObjectDestroyed(event);
}
}
}
@Override
public int getKey() {
return object.getId().hashCode();
}
}
public static class DistributedObjectDestroyOperation
extends AbstractOperation {
private String serviceName;
private String name;
public DistributedObjectDestroyOperation() {
}
public DistributedObjectDestroyOperation(String serviceName, String name) {
this.serviceName = serviceName;
this.name = name;
}
@Override
public void run()
throws Exception {
ProxyServiceImpl proxyService = getService();
proxyService.destroyLocalDistributedObject(serviceName, name, false);
}
@Override
public boolean returnsResponse() {
return true;
}
@Override
public Object getResponse() {
return Boolean.TRUE;
}
@Override
protected void writeInternal(ObjectDataOutput out)
throws IOException {
super.writeInternal(out);
out.writeUTF(serviceName);
out.writeObject(name); // writing as object for backward-compatibility
}
@Override
protected void readInternal(ObjectDataInput in)
throws IOException {
super.readInternal(in);
serviceName = in.readUTF();
name = in.readObject();
}
}
public static class PostJoinProxyOperation
extends AbstractOperation {
private Collection<ProxyInfo> proxies;
public PostJoinProxyOperation() {
}
public PostJoinProxyOperation(Collection<ProxyInfo> proxies) {
this.proxies = proxies;
}
@Override
public void run()
throws Exception {
if (proxies != null && proxies.size() > 0) {
NodeEngine nodeEngine = getNodeEngine();
ProxyServiceImpl proxyService = getService();
for (ProxyInfo proxy : proxies) {
final ProxyRegistry registry = getOrPutIfAbsent(proxyService.registries, proxy.serviceName,
proxyService.registryConstructor);
DistributedObjectFuture future = registry.createProxy(proxy.objectName, false, false);
if (future != null) {
final DistributedObject object = future.get();
if (object instanceof InitializingObject) {
nodeEngine.getExecutionService().execute(ExecutionService.SYSTEM_EXECUTOR, new Runnable() {
public void run() {
try {
((InitializingObject) object).initialize();
} catch (Exception e) {
getLogger().warning("Error while initializing proxy: " + object, e);
}
}
});
}
}
}
}
}
@Override
public String getServiceName() {
return ProxyServiceImpl.SERVICE_NAME;
}
@Override
public boolean returnsResponse() {
return false;
}
@Override
protected void writeInternal(ObjectDataOutput out)
throws IOException {
super.writeInternal(out);
int len = proxies != null ? proxies.size() : 0;
out.writeInt(len);
if (len > 0) {
for (ProxyInfo proxy : proxies) {
out.writeUTF(proxy.serviceName);
out.writeObject(proxy.objectName); // writing as object for backward-compatibility
}
}
}
@Override
protected void readInternal(ObjectDataInput in)
throws IOException {
super.readInternal(in);
int len = in.readInt();
if (len > 0) {
proxies = new ArrayList<ProxyInfo>(len);
for (int i = 0; i < len; i++) {
ProxyInfo proxy = new ProxyInfo(in.readUTF(), (String) in.readObject());
proxies.add(proxy);
}
}
}
}
private static class ProxyInfo {
final String serviceName;
final String objectName;
private ProxyInfo(String serviceName, String objectName) {
this.serviceName = serviceName;
this.objectName = objectName;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("ProxyInfo{");
sb.append("serviceName='").append(serviceName).append('\'');
sb.append(", objectName='").append(objectName).append('\'');
sb.append('}');
return sb.toString();
}
}
void shutdown() {
for (ProxyRegistry registry : registries.values()) {
registry.destroy();
}
registries.clear();
listeners.clear();
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_spi_impl_ProxyServiceImpl.java
|
5,406 |
public static class SortedAndUnique extends Bytes implements ReaderContextAware {
private final FieldDataSource delegate;
private final MetaData metaData;
private BytesValues bytesValues;
public SortedAndUnique(FieldDataSource delegate) {
this.delegate = delegate;
this.metaData = MetaData.builder(delegate.metaData()).uniqueness(MetaData.Uniqueness.UNIQUE).build();
}
@Override
public MetaData metaData() {
return metaData;
}
@Override
public void setNextReader(AtomicReaderContext reader) {
bytesValues = null; // order may change per-segment -> reset
}
@Override
public org.elasticsearch.index.fielddata.BytesValues bytesValues() {
if (bytesValues == null) {
bytesValues = delegate.bytesValues();
if (bytesValues.isMultiValued() &&
(!delegate.metaData().uniqueness.unique() || bytesValues.getOrder() != Order.BYTES)) {
bytesValues = new SortedUniqueBytesValues(bytesValues);
}
}
return bytesValues;
}
static class SortedUniqueBytesValues extends FilterBytesValues {
final BytesRef spare;
int[] sortedIds;
final BytesRefHash bytes;
int numUniqueValues;
int pos = Integer.MAX_VALUE;
public SortedUniqueBytesValues(BytesValues delegate) {
super(delegate);
bytes = new BytesRefHash();
spare = new BytesRef();
}
@Override
public int setDocument(int docId) {
final int numValues = super.setDocument(docId);
if (numValues == 0) {
sortedIds = null;
return 0;
}
bytes.clear();
bytes.reinit();
for (int i = 0; i < numValues; ++i) {
bytes.add(super.nextValue(), super.currentValueHash());
}
numUniqueValues = bytes.size();
sortedIds = bytes.sort(BytesRef.getUTF8SortedAsUnicodeComparator());
pos = 0;
return numUniqueValues;
}
@Override
public BytesRef nextValue() {
bytes.get(sortedIds[pos++], spare);
return spare;
}
@Override
public int currentValueHash() {
return spare.hashCode();
}
@Override
public Order getOrder() {
return Order.BYTES;
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_search_aggregations_support_FieldDataSource.java
|
110 |
private class TxHook implements javax.transaction.Synchronization
{
boolean gotBefore = false;
boolean gotAfter = false;
int statusBefore = -1;
int statusAfter = -1;
Transaction txBefore = null;
Transaction txAfter = null;
public void beforeCompletion()
{
try
{
statusBefore = tm.getStatus();
txBefore = tm.getTransaction();
gotBefore = true;
}
catch ( Exception e )
{
throw new RuntimeException( "" + e );
}
}
public void afterCompletion( int status )
{
try
{
statusAfter = status;
txAfter = tm.getTransaction();
assertTrue( status == tm.getStatus() );
gotAfter = true;
}
catch ( Exception e )
{
throw new RuntimeException( "" + e );
}
}
}
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestJtaCompliance.java
|
113 |
{
@Override
public Object doWork( Void state )
{
try
{
tm.begin();
tm.getTransaction().registerSynchronization( hook );
return null;
}
catch ( Exception e )
{
throw new RuntimeException( e );
}
}
};
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestJtaCompliance.java
|
201 |
public abstract class ClientAbstractIOSelector extends Thread implements IOSelector {
private static final int TIMEOUT = 3;
protected final ILogger logger;
protected final Queue<Runnable> selectorQueue = new ConcurrentLinkedQueue<Runnable>();
protected final int waitTime;
protected final Selector selector;
protected boolean live = true;
private final CountDownLatch shutdownLatch = new CountDownLatch(1);
protected ClientAbstractIOSelector(ThreadGroup threadGroup, String threadName) {
super(threadGroup, threadName);
this.logger = Logger.getLogger(getClass().getName());
this.waitTime = 5000;
Selector selectorTemp = null;
try {
selectorTemp = Selector.open();
} catch (final IOException e) {
handleSelectorException(e);
}
this.selector = selectorTemp;
}
public Selector getSelector() {
return selector;
}
public void addTask(Runnable runnable) {
selectorQueue.add(runnable);
}
public void wakeup() {
selector.wakeup();
}
public void shutdown() {
selectorQueue.clear();
try {
addTask(new Runnable() {
public void run() {
live = false;
shutdownLatch.countDown();
}
});
interrupt();
} catch (Throwable ignored) {
}
}
public void awaitShutdown() {
try {
shutdownLatch.await(TIMEOUT, TimeUnit.SECONDS);
} catch (InterruptedException ignored) {
}
}
private void processSelectionQueue() {
while (live) {
final Runnable runnable = selectorQueue.poll();
if (runnable == null) {
return;
}
runnable.run();
}
}
public final void run() {
try {
//noinspection WhileLoopSpinsOnField
while (live) {
processSelectionQueue();
if (!live || isInterrupted()) {
if (logger.isFinestEnabled()) {
logger.finest(getName() + " is interrupted!");
}
live = false;
return;
}
int selectedKeyCount;
try {
selectedKeyCount = selector.select(waitTime);
} catch (Throwable e) {
logger.warning(e.toString());
continue;
}
if (selectedKeyCount == 0) {
continue;
}
final Set<SelectionKey> setSelectedKeys = selector.selectedKeys();
final Iterator<SelectionKey> it = setSelectedKeys.iterator();
while (it.hasNext()) {
final SelectionKey sk = it.next();
try {
it.remove();
handleSelectionKey(sk);
} catch (Throwable e) {
handleSelectorException(e);
}
}
}
} catch (Throwable e) {
logger.warning("Unhandled exception in " + getName(), e);
} finally {
try {
if (logger.isFinestEnabled()) {
logger.finest("Closing selector " + getName());
}
selector.close();
} catch (final Exception ignored) {
}
}
}
protected abstract void handleSelectionKey(SelectionKey sk);
private void handleSelectorException(final Throwable e) {
String msg = "Selector exception at " + getName() + ", cause= " + e.toString();
logger.warning(msg, e);
}
}
| 1no label
|
hazelcast-client_src_main_java_com_hazelcast_client_connection_nio_ClientAbstractIOSelector.java
|
329 |
public class AttributePreserveInsert extends BaseHandler {
public Node[] merge(List<Node> nodeList1, List<Node> nodeList2, List<Node> exhaustedNodes) {
if (CollectionUtils.isEmpty(nodeList1) || CollectionUtils.isEmpty(nodeList2)) {
return null;
}
Node node1 = nodeList1.get(0);
Node node2 = nodeList2.get(0);
NamedNodeMap attributes2 = node2.getAttributes();
Comparator<Object> nameCompare = new Comparator<Object>() {
public int compare(Object arg0, Object arg1) {
return ((Node) arg0).getNodeName().compareTo(((Node) arg1).getNodeName());
}
};
Node[] tempNodes = {};
tempNodes = exhaustedNodes.toArray(tempNodes);
Arrays.sort(tempNodes, nameCompare);
int length = attributes2.getLength();
for (int j = 0; j < length; j++) {
Node temp = attributes2.item(j);
int pos = Arrays.binarySearch(tempNodes, temp, nameCompare);
if (pos < 0) {
((Element) node1).setAttributeNode((Attr) node1.getOwnerDocument().importNode(temp.cloneNode(true), true));
}
}
return null;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_extensibility_context_merge_handlers_AttributePreserveInsert.java
|
351 |
private class StressThread extends Thread {
private final AtomicInteger counter;
private final AtomicInteger errors;
public StressThread(AtomicInteger counter, AtomicInteger errors) {
this.counter = counter;
this.errors = errors;
}
public void run() {
try {
for(;;){
int index = counter.decrementAndGet();
if(index<=0){
return;
}
IMap<Object, Object> map = client.getMap("juka" + index);
map.set("aaaa", "bbbb");
map.clear();
map.destroy();
if(index % 1000 == 0){
System.out.println("At: "+index);
}
}
} catch (Throwable t) {
errors.incrementAndGet();
t.printStackTrace();
}
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_map_MapMemoryUsageStressTest.java
|
373 |
public class PutRepositoryAction extends ClusterAction<PutRepositoryRequest, PutRepositoryResponse, PutRepositoryRequestBuilder> {
public static final PutRepositoryAction INSTANCE = new PutRepositoryAction();
public static final String NAME = "cluster/repository/put";
private PutRepositoryAction() {
super(NAME);
}
@Override
public PutRepositoryResponse newResponse() {
return new PutRepositoryResponse();
}
@Override
public PutRepositoryRequestBuilder newRequestBuilder(ClusterAdminClient client) {
return new PutRepositoryRequestBuilder(client);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_repositories_put_PutRepositoryAction.java
|
170 |
es.submit(new Runnable() {
public void run() {
IMap<String, byte[]> map = client.getMap("default");
while (true) {
int key = (int) (Math.random() * ENTRY_COUNT);
int operation = ((int) (Math.random() * 100));
if (operation < GET_PERCENTAGE) {
map.get(String.valueOf(key));
stats.gets.incrementAndGet();
} else if (operation < GET_PERCENTAGE + PUT_PERCENTAGE) {
map.put(String.valueOf(key), new byte[VALUE_SIZE]);
stats.puts.incrementAndGet();
} else {
map.remove(String.valueOf(key));
stats.removes.incrementAndGet();
}
}
}
});
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_SimpleMapTestFromClient.java
|
429 |
public class ClusterStateResponse extends ActionResponse {
private ClusterName clusterName;
private ClusterState clusterState;
public ClusterStateResponse() {
}
ClusterStateResponse(ClusterName clusterName, ClusterState clusterState) {
this.clusterName = clusterName;
this.clusterState = clusterState;
}
public ClusterState getState() {
return this.clusterState;
}
public ClusterName getClusterName() {
return this.clusterName;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
clusterName = ClusterName.readClusterName(in);
clusterState = ClusterState.Builder.readFrom(in, null);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
clusterName.writeTo(out);
ClusterState.Builder.writeTo(clusterState, out);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_state_ClusterStateResponse.java
|
345 |
public class NodesShutdownResponse extends ActionResponse {
private ClusterName clusterName;
private DiscoveryNode[] nodes;
NodesShutdownResponse() {
}
public NodesShutdownResponse(ClusterName clusterName, DiscoveryNode[] nodes) {
this.clusterName = clusterName;
this.nodes = nodes;
}
public ClusterName getClusterName() {
return this.clusterName;
}
public DiscoveryNode[] getNodes() {
return this.nodes;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
clusterName = ClusterName.readClusterName(in);
nodes = new DiscoveryNode[in.readVInt()];
for (int i = 0; i < nodes.length; i++) {
nodes[i] = DiscoveryNode.readNode(in);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
clusterName.writeTo(out);
out.writeVInt(nodes.length);
for (DiscoveryNode node : nodes) {
node.writeTo(out);
}
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_node_shutdown_NodesShutdownResponse.java
|
2,665 |
final class DataSerializer implements StreamSerializer<DataSerializable> {
private static final String FACTORY_ID = "com.hazelcast.DataSerializerHook";
private final Map<Integer, DataSerializableFactory> factories = new HashMap<Integer, DataSerializableFactory>();
DataSerializer(Map<Integer, ? extends DataSerializableFactory> dataSerializableFactories, ClassLoader classLoader) {
try {
final Iterator<DataSerializerHook> hooks = ServiceLoader.iterator(DataSerializerHook.class, FACTORY_ID, classLoader);
while (hooks.hasNext()) {
DataSerializerHook hook = hooks.next();
final DataSerializableFactory factory = hook.createFactory();
if (factory != null) {
register(hook.getFactoryId(), factory);
}
}
} catch (Exception e) {
throw ExceptionUtil.rethrow(e);
}
if (dataSerializableFactories != null) {
for (Map.Entry<Integer, ? extends DataSerializableFactory> entry : dataSerializableFactories.entrySet()) {
register(entry.getKey(), entry.getValue());
}
}
}
private void register(int factoryId, DataSerializableFactory factory) {
final DataSerializableFactory current = factories.get(factoryId);
if (current != null) {
if (current.equals(factory)) {
Logger.getLogger(getClass()).warning("DataSerializableFactory[" + factoryId + "] is already registered! Skipping "
+ factory);
} else {
throw new IllegalArgumentException("DataSerializableFactory[" + factoryId + "] is already registered! "
+ current + " -> " + factory);
}
} else {
factories.put(factoryId, factory);
}
}
public int getTypeId() {
return CONSTANT_TYPE_DATA;
}
public DataSerializable read(ObjectDataInput in) throws IOException {
final DataSerializable ds;
final boolean identified = in.readBoolean();
int id = 0;
int factoryId = 0;
String className = null;
try {
if (identified) {
factoryId = in.readInt();
final DataSerializableFactory dsf = factories.get(factoryId);
if (dsf == null) {
throw new HazelcastSerializationException("No DataSerializerFactory registered for namespace: " + factoryId);
}
id = in.readInt();
ds = dsf.create(id);
if (ds == null) {
throw new HazelcastSerializationException(dsf
+ " is not be able to create an instance for id: " + id + " on factoryId: " + factoryId);
}
// TODO: @mm - we can check if DS class is final.
} else {
className = in.readUTF();
ds = ClassLoaderUtil.newInstance(in.getClassLoader(), className);
}
ds.readData(in);
return ds;
} catch (Exception e) {
if (e instanceof IOException) {
throw (IOException) e;
}
if (e instanceof HazelcastSerializationException) {
throw (HazelcastSerializationException) e;
}
throw new HazelcastSerializationException("Problem while reading DataSerializable, namespace: "
+ factoryId
+ ", id: " + id
+ ", class: " + className
+ ", exception: " + e.getMessage(), e);
}
}
public void write(ObjectDataOutput out, DataSerializable obj) throws IOException {
final boolean identified = obj instanceof IdentifiedDataSerializable;
out.writeBoolean(identified);
if (identified) {
final IdentifiedDataSerializable ds = (IdentifiedDataSerializable) obj;
out.writeInt(ds.getFactoryId());
out.writeInt(ds.getId());
} else {
out.writeUTF(obj.getClass().getName());
}
obj.writeData(out);
}
public void destroy() {
factories.clear();
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_nio_serialization_DataSerializer.java
|
1,495 |
public class SingleSourceUnitPackage extends com.redhat.ceylon.compiler.typechecker.model.Package {
private com.redhat.ceylon.compiler.typechecker.model.Package modelPackage;
private String fullPathOfSourceUnitToTypecheck;
public SingleSourceUnitPackage(com.redhat.ceylon.compiler.typechecker.model.Package delegate,
String fullPathOfSourceUnitToTypecheck) {
this.modelPackage = delegate;
this.fullPathOfSourceUnitToTypecheck = fullPathOfSourceUnitToTypecheck;
setModule(delegate.getModule());
setName(delegate.getName());
setShared(delegate.isShared());
}
private boolean mustSearchInSourceFile(Declaration modelDeclaration) {
if (modelDeclaration == null) {
return true;
}
Unit unit = modelDeclaration.getUnit();
return mustSearchInSourceFile(unit);
}
private boolean mustSearchInSourceFile(Unit modelUnit) {
if (modelUnit instanceof CeylonUnit) {
CeylonUnit ceylonUnit = (CeylonUnit) modelUnit;
String fullPathOfModelSourceUnit = ceylonUnit.getSourceFullPath();
if (fullPathOfModelSourceUnit != null && fullPathOfModelSourceUnit.equals(fullPathOfSourceUnitToTypecheck)) {
return true;
}
}
return false;
}
@Override
public Declaration getDirectMember(String name,
List<ProducedType> signature, boolean ellipsis) {
Declaration modelMember = modelPackage.getDirectMember(name, signature, ellipsis);
return mustSearchInSourceFile(modelMember) ? super.getDirectMember(name, signature, ellipsis) : modelMember;
}
@Override
public Declaration getMember(String name, List<ProducedType> signature,
boolean ellipsis) {
Declaration modelMember = modelPackage.getMember(name, signature, ellipsis);
return mustSearchInSourceFile(modelMember) ? super.getMember(name, signature, ellipsis) : modelMember;
}
@Override
public List<Declaration> getMembers() {
LinkedList<Declaration> ret = new LinkedList<Declaration>();
for (Declaration modelDeclaration : modelPackage.getMembers()) {
if (! mustSearchInSourceFile(modelDeclaration)) {
ret.add(modelDeclaration);
}
}
ret.addAll(super.getMembers());
return ret;
}
@Override
public Iterable<Unit> getUnits() {
LinkedList<Unit> units = new LinkedList<Unit>();
for (Unit modelUnit : modelPackage.getUnits()) {
if (! mustSearchInSourceFile(modelUnit)) {
units.add(modelUnit);
}
}
for (Unit u : super.getUnits()) {
units.add(u);
}
return units;
}
@Override
public List<Annotation> getAnnotations() {
return modelPackage.getAnnotations();
}
@Override
public Scope getContainer() {
return modelPackage.getContainer();
}
@Override
public ProducedType getDeclaringType(Declaration modelDeclaration) {
return mustSearchInSourceFile(modelDeclaration) ? super.getDeclaringType(modelDeclaration) : modelPackage.getDeclaringType(modelDeclaration);
}
@Override
public Map<String, DeclarationWithProximity> getImportableDeclarations(Unit modelUnit,
String startingWith, List<Import> imports, int proximity) {
return modelPackage.getImportableDeclarations(modelUnit, startingWith, imports, proximity);
}
@Override
public TypeDeclaration getInheritingDeclaration(Declaration d) {
return modelPackage.getInheritingDeclaration(d);
}
@Override
public Map<String, DeclarationWithProximity> getMatchingDeclarations(
Unit unit, String startingWith, int proximity) {
return super.getMatchingDeclarations(unit, startingWith, proximity);
}
@Override
public Declaration getMemberOrParameter(Unit modelUnit, String name,
List<ProducedType> signature, boolean ellipsis) {
Declaration modelMember = modelPackage.getMemberOrParameter(modelUnit, name, signature, ellipsis);
return mustSearchInSourceFile(modelMember) ? super.getMemberOrParameter(modelUnit, name, signature, ellipsis) : modelMember;
}
@Override
public Module getModule() {
return modelPackage.getModule();
}
@Override
public List<String> getName() {
return modelPackage.getName();
}
@Override
public String getNameAsString() {
return modelPackage.getNameAsString();
}
@Override
public String getQualifiedNameString() {
return modelPackage.getQualifiedNameString();
}
@Override
public Scope getScope() {
return modelPackage.getScope();
}
@Override
public Unit getUnit() {
Unit modelUnit = modelPackage.getUnit();
return mustSearchInSourceFile(modelUnit) ? super.getUnit() : modelUnit;
}
@Override
public boolean isInherited(Declaration d) {
return modelPackage.isInherited(d);
}
@Override
public boolean isShared() {
return modelPackage.isShared();
}
public com.redhat.ceylon.compiler.typechecker.model.Package getModelPackage() {
return modelPackage;
}
}
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_util_SingleSourceUnitPackage.java
|
527 |
public class FlushResponse extends BroadcastOperationResponse {
FlushResponse() {
}
FlushResponse(int totalShards, int successfulShards, int failedShards, List<ShardOperationFailedException> shardFailures) {
super(totalShards, successfulShards, failedShards, shardFailures);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_flush_FlushResponse.java
|
1,071 |
public class MaxSizeConfig {
private MaxSizeConfigReadOnly readOnly;
private int size = MapConfig.DEFAULT_MAX_SIZE;
private MaxSizePolicy maxSizePolicy = MaxSizePolicy.PER_NODE;
public MaxSizeConfig() {
}
public MaxSizeConfig(int size, MaxSizePolicy maxSizePolicy) {
this.size = size;
this.maxSizePolicy = maxSizePolicy;
}
public MaxSizeConfig(MaxSizeConfig config) {
this.size = config.size;
this.maxSizePolicy = config.maxSizePolicy;
}
public enum MaxSizePolicy {
PER_NODE, PER_PARTITION, USED_HEAP_PERCENTAGE, USED_HEAP_SIZE
}
public MaxSizeConfigReadOnly getAsReadOnly() {
if (readOnly == null) {
readOnly = new MaxSizeConfigReadOnly(this);
}
return readOnly;
}
public int getSize() {
return size;
}
public MaxSizeConfig setSize(int size) {
if (size <= 0) {
size = Integer.MAX_VALUE;
}
this.size = size;
return this;
}
public MaxSizePolicy getMaxSizePolicy() {
return maxSizePolicy;
}
public MaxSizeConfig setMaxSizePolicy(MaxSizePolicy maxSizePolicy) {
this.maxSizePolicy = maxSizePolicy;
return this;
}
@Override
public String toString() {
return "MaxSizeConfig{" +
"maxSizePolicy='" + maxSizePolicy + '\'' +
", size=" + size +
'}';
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_config_MaxSizeConfig.java
|
1,288 |
public class ReviewStatusType {
private static final long serialVersionUID = 1L;
private static final Map<String, ReviewStatusType> TYPES = new HashMap<String, ReviewStatusType>();
public static final ReviewStatusType PENDING = new ReviewStatusType("PENDING");
public static final ReviewStatusType APPROVED = new ReviewStatusType("APPROVED");
public static final ReviewStatusType REJECTED = new ReviewStatusType("REJECTED");
public static ReviewStatusType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
public ReviewStatusType() {
}
public ReviewStatusType(final String type) {
setType(type);
}
public String getType() {
return type;
}
private void setType(String type) {
this.type = type;
if (!TYPES.containsKey(type)) {
TYPES.put(type, this);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
ReviewStatusType other = (ReviewStatusType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_rating_service_type_ReviewStatusType.java
|
1,637 |
public class OHazelcastDistributedMessageService implements ODistributedMessageService {
protected final OHazelcastPlugin manager;
protected Map<String, OHazelcastDistributedDatabase> databases = new ConcurrentHashMap<String, OHazelcastDistributedDatabase>();
protected final static Map<String, IQueue<?>> queues = new HashMap<String, IQueue<?>>();
protected final IQueue<ODistributedResponse> nodeResponseQueue;
protected final ConcurrentHashMap<Long, ODistributedResponseManager> responsesByRequestIds;
protected final TimerTask asynchMessageManager;
public static final String NODE_QUEUE_PREFIX = "orientdb.node.";
public static final String NODE_QUEUE_REQUEST_POSTFIX = ".request";
public static final String NODE_QUEUE_RESPONSE_POSTFIX = ".response";
public static final String NODE_QUEUE_UNDO_POSTFIX = ".undo";
public OHazelcastDistributedMessageService(final OHazelcastPlugin manager) {
this.manager = manager;
this.responsesByRequestIds = new ConcurrentHashMap<Long, ODistributedResponseManager>();
// CREAT THE QUEUE
final String queueName = getResponseQueueName(manager.getLocalNodeName());
nodeResponseQueue = getQueue(queueName);
if (ODistributedServerLog.isDebugEnabled())
ODistributedServerLog.debug(this, getLocalNodeNameAndThread(), null, DIRECTION.NONE,
"listening for incoming responses on queue: %s", queueName);
checkForPendingMessages(nodeResponseQueue, queueName, false);
// CREATE TASK THAT CHECK ASYNCHRONOUS MESSAGE RECEIVED
asynchMessageManager = new TimerTask() {
@Override
public void run() {
purgePendingMessages();
}
};
// CREATE THREAD LISTENER AGAINST orientdb.node.<node>.response, ONE PER NODE, THEN DISPATCH THE MESSAGE INTERNALLY USING THE
// THREAD ID
new Thread(new Runnable() {
@Override
public void run() {
while (!Thread.interrupted()) {
String senderNode = null;
ODistributedResponse message = null;
try {
message = nodeResponseQueue.take();
if (message != null) {
senderNode = message.getSenderNodeName();
dispatchResponseToThread(message);
}
} catch (InterruptedException e) {
// EXIT CURRENT THREAD
Thread.interrupted();
break;
} catch (Throwable e) {
ODistributedServerLog.error(this, manager.getLocalNodeName(), senderNode, DIRECTION.IN,
"error on reading distributed response", e, message != null ? message.getPayload() : "-");
}
}
}
}).start();
}
public OHazelcastDistributedDatabase getDatabase(final String iDatabaseName) {
return databases.get(iDatabaseName);
}
@Override
public ODistributedRequest createRequest() {
return new OHazelcastDistributedRequest();
}
protected void dispatchResponseToThread(final ODistributedResponse response) {
try {
final long reqId = response.getRequestId();
// GET ASYNCHRONOUS MSG MANAGER IF ANY
final ODistributedResponseManager asynchMgr = responsesByRequestIds.get(reqId);
if (asynchMgr == null) {
if (ODistributedServerLog.isDebugEnabled())
ODistributedServerLog.debug(this, manager.getLocalNodeName(), response.getExecutorNodeName(), DIRECTION.IN,
"received response for message %d after the timeout (%dms)", reqId,
OGlobalConfiguration.DISTRIBUTED_ASYNCH_RESPONSES_TIMEOUT.getValueAsLong());
} else if (asynchMgr.addResponse(response))
// ALL RESPONSE RECEIVED, REMOVE THE RESPONSE MANAGER
responsesByRequestIds.remove(reqId);
} finally {
Orient.instance().getProfiler()
.updateCounter("distributed.replication.msgReceived", "Number of replication messages received in current node", +1);
Orient
.instance()
.getProfiler()
.updateCounter("distributed.replication." + response.getExecutorNodeName() + ".msgReceived",
"Number of replication messages received in current node from a node", +1, "distributed.replication.*.msgReceived");
}
}
public void shutdown() {
for (Entry<String, OHazelcastDistributedDatabase> m : databases.entrySet())
m.getValue().shutdown();
asynchMessageManager.cancel();
responsesByRequestIds.clear();
if (nodeResponseQueue != null) {
nodeResponseQueue.clear();
nodeResponseQueue.destroy();
}
}
/**
* Composes the request queue name based on node name and database.
*/
protected static String getRequestQueueName(final String iNodeName, final String iDatabaseName) {
final StringBuilder buffer = new StringBuilder();
buffer.append(NODE_QUEUE_PREFIX);
buffer.append(iNodeName);
if (iDatabaseName != null) {
buffer.append('.');
buffer.append(iDatabaseName);
}
buffer.append(NODE_QUEUE_REQUEST_POSTFIX);
return buffer.toString();
}
/**
* Composes the response queue name based on node name.
*/
protected static String getResponseQueueName(final String iNodeName) {
final StringBuilder buffer = new StringBuilder();
buffer.append(NODE_QUEUE_PREFIX);
buffer.append(iNodeName);
buffer.append(NODE_QUEUE_RESPONSE_POSTFIX);
return buffer.toString();
}
protected String getLocalNodeNameAndThread() {
return manager.getLocalNodeName() + ":" + Thread.currentThread().getId();
}
protected void purgePendingMessages() {
final long now = System.currentTimeMillis();
final long timeout = OGlobalConfiguration.DISTRIBUTED_ASYNCH_RESPONSES_TIMEOUT.getValueAsLong();
for (Iterator<Entry<Long, ODistributedResponseManager>> it = responsesByRequestIds.entrySet().iterator(); it.hasNext();) {
final Entry<Long, ODistributedResponseManager> item = it.next();
final ODistributedResponseManager resp = item.getValue();
final long timeElapsed = now - resp.getSentOn();
if (timeElapsed > timeout) {
// EXPIRED, FREE IT!
final List<String> missingNodes = resp.getMissingNodes();
ODistributedServerLog.warn(this, manager.getLocalNodeName(), missingNodes.toString(), DIRECTION.IN,
"%d missed response(s) for message %d by nodes %s after %dms when timeout is %dms", missingNodes.size(),
resp.getMessageId(), missingNodes, timeElapsed, timeout);
Orient
.instance()
.getProfiler()
.updateCounter("distributed.replication." + resp.getDatabaseName() + ".timeouts",
"Number of timeouts on replication messages responses", +1, "distributed.replication.*.timeouts");
resp.timeout();
it.remove();
}
}
}
protected void checkForPendingMessages(final IQueue<?> iQueue, final String iQueueName, final boolean iUnqueuePendingMessages) {
final int queueSize = iQueue.size();
if (queueSize > 0) {
if (!iUnqueuePendingMessages) {
ODistributedServerLog.warn(this, manager.getLocalNodeName(), null, DIRECTION.NONE,
"found %d previous messages in queue %s, clearing them...", queueSize, iQueueName);
iQueue.clear();
} else
ODistributedServerLog.warn(this, manager.getLocalNodeName(), null, DIRECTION.NONE,
"found %d previous messages in queue %s, aligning the database...", queueSize, iQueueName);
}
}
/**
* Return the queue. If not exists create and register it.
*/
@SuppressWarnings("unchecked")
protected <T> IQueue<T> getQueue(final String iQueueName) {
synchronized (queues) {
IQueue<T> queue = (IQueue<T>) queues.get(iQueueName);
if (queue == null) {
queue = manager.getHazelcastInstance().getQueue(iQueueName);
queues.put(iQueueName, queue);
}
return manager.getHazelcastInstance().getQueue(iQueueName);
}
}
/**
* Remove the queue.
*/
protected void removeQueue(final String iQueueName) {
synchronized (queues) {
queues.remove(iQueueName);
IQueue<?> queue = manager.getHazelcastInstance().getQueue(iQueueName);
queue.clear();
}
}
public void registerRequest(final long id, final ODistributedResponseManager currentResponseMgr) {
responsesByRequestIds.put(id, currentResponseMgr);
}
public OHazelcastDistributedDatabase registerDatabase(final String iDatabaseName) {
final OHazelcastDistributedDatabase db = new OHazelcastDistributedDatabase(manager, this, iDatabaseName);
databases.put(iDatabaseName, db);
return db;
}
public Set<String> getDatabases() {
return databases.keySet();
}
}
| 1no label
|
distributed_src_main_java_com_orientechnologies_orient_server_hazelcast_OHazelcastDistributedMessageService.java
|
4,267 |
public class FsChannelSnapshot implements Translog.Snapshot {
private final long id;
private final int totalOperations;
private final RafReference raf;
private final FileChannel channel;
private final long length;
private Translog.Operation lastOperationRead = null;
private int position = 0;
private ByteBuffer cacheBuffer;
public FsChannelSnapshot(long id, RafReference raf, long length, int totalOperations) throws FileNotFoundException {
this.id = id;
this.raf = raf;
this.channel = raf.raf().getChannel();
this.length = length;
this.totalOperations = totalOperations;
}
@Override
public long translogId() {
return this.id;
}
@Override
public long position() {
return this.position;
}
@Override
public long length() {
return this.length;
}
@Override
public int estimatedTotalOperations() {
return this.totalOperations;
}
@Override
public InputStream stream() throws IOException {
return new FileChannelInputStream(channel, position, lengthInBytes());
}
@Override
public long lengthInBytes() {
return length - position;
}
@Override
public boolean hasNext() {
try {
if (position > length) {
return false;
}
if (cacheBuffer == null) {
cacheBuffer = ByteBuffer.allocate(1024);
}
cacheBuffer.limit(4);
int bytesRead = channel.read(cacheBuffer, position);
if (bytesRead < 4) {
return false;
}
cacheBuffer.flip();
int opSize = cacheBuffer.getInt();
position += 4;
if ((position + opSize) > length) {
// restore the position to before we read the opSize
position -= 4;
return false;
}
if (cacheBuffer.capacity() < opSize) {
cacheBuffer = ByteBuffer.allocate(opSize);
}
cacheBuffer.clear();
cacheBuffer.limit(opSize);
channel.read(cacheBuffer, position);
cacheBuffer.flip();
position += opSize;
lastOperationRead = TranslogStreams.readTranslogOperation(new BytesStreamInput(cacheBuffer.array(), 0, opSize, true));
return true;
} catch (Exception e) {
return false;
}
}
@Override
public Translog.Operation next() {
return this.lastOperationRead;
}
@Override
public void seekForward(long length) {
this.position += length;
}
@Override
public boolean release() throws ElasticsearchException {
raf.decreaseRefCount(true);
return true;
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_translog_fs_FsChannelSnapshot.java
|
388 |
new Thread(){
public void run() {
try {
if(mm.tryLock(key, 10, TimeUnit.SECONDS)){
tryLockReturnsTrue.countDown();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}.start();
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_multimap_ClientMultiMapLockTest.java
|
89 |
public class StaticAssetServiceImplTest extends TestCase {
public void testConvertURLProperties() throws Exception {
StaticAssetServiceImpl staticAssetService = new StaticAssetServiceImpl();
staticAssetService.setStaticAssetUrlPrefix("cmsstatic");
staticAssetService.setStaticAssetEnvironmentUrlPrefix("http://images.mysite.com/myapp/cmsstatic");
String url = staticAssetService.convertAssetPath("/cmsstatic/product.jpg","myapp", false);
assertTrue(url.equals("http://images.mysite.com/myapp/cmsstatic/product.jpg"));
staticAssetService.setStaticAssetEnvironmentUrlPrefix("http://images.mysite.com");
url = staticAssetService.convertAssetPath("/cmsstatic/product.jpg","myapp", false);
assertTrue(url.equals("http://images.mysite.com/product.jpg"));
url = staticAssetService.convertAssetPath("/cmsstatic/product.jpg","myapp", true);
assertTrue(url.equals("https://images.mysite.com/product.jpg"));
staticAssetService.setStaticAssetEnvironmentUrlPrefix(null);
url = staticAssetService.convertAssetPath("/cmsstatic/product.jpg","myapp", true);
assertTrue(url.equals("/myapp/cmsstatic/product.jpg"));
url = staticAssetService.convertAssetPath("cmsstatic/product.jpg","myapp", true);
assertTrue(url.equals("/myapp/cmsstatic/product.jpg"));
}
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_test_java_org_broadleafcommerce_cms_file_service_StaticAssetServiceImplTest.java
|
437 |
public class ClusterStatsNodes implements ToXContent, Streamable {
private Counts counts;
private Set<Version> versions;
private OsStats os;
private ProcessStats process;
private JvmStats jvm;
private FsStats.Info fs;
private Set<PluginInfo> plugins;
private ClusterStatsNodes() {
}
public ClusterStatsNodes(ClusterStatsNodeResponse[] nodeResponses) {
this.counts = new Counts();
this.versions = new HashSet<Version>();
this.os = new OsStats();
this.jvm = new JvmStats();
this.fs = new FsStats.Info();
this.plugins = new HashSet<PluginInfo>();
this.process = new ProcessStats();
Set<InetAddress> seenAddresses = new HashSet<InetAddress>(nodeResponses.length);
for (ClusterStatsNodeResponse nodeResponse : nodeResponses) {
counts.addNodeInfo(nodeResponse.nodeInfo());
versions.add(nodeResponse.nodeInfo().getVersion());
process.addNodeStats(nodeResponse.nodeStats());
jvm.addNodeInfoStats(nodeResponse.nodeInfo(), nodeResponse.nodeStats());
plugins.addAll(nodeResponse.nodeInfo().getPlugins().getInfos());
// now do the stats that should be deduped by hardware (implemented by ip deduping)
TransportAddress publishAddress = nodeResponse.nodeInfo().getTransport().address().publishAddress();
InetAddress inetAddress = null;
if (publishAddress.uniqueAddressTypeId() == 1) {
inetAddress = ((InetSocketTransportAddress) publishAddress).address().getAddress();
}
if (!seenAddresses.add(inetAddress)) {
continue;
}
os.addNodeInfo(nodeResponse.nodeInfo());
if (nodeResponse.nodeStats().getFs() != null) {
fs.add(nodeResponse.nodeStats().getFs().total());
}
}
}
public Counts getCounts() {
return this.counts;
}
public Set<Version> getVersions() {
return versions;
}
public OsStats getOs() {
return os;
}
public ProcessStats getProcess() {
return process;
}
public JvmStats getJvm() {
return jvm;
}
public FsStats.Info getFs() {
return fs;
}
public Set<PluginInfo> getPlugins() {
return plugins;
}
@Override
public void readFrom(StreamInput in) throws IOException {
counts = Counts.readCounts(in);
int size = in.readVInt();
versions = new HashSet<Version>(size);
for (; size > 0; size--) {
versions.add(Version.readVersion(in));
}
os = OsStats.readOsStats(in);
process = ProcessStats.readStats(in);
jvm = JvmStats.readJvmStats(in);
fs = FsStats.Info.readInfoFrom(in);
size = in.readVInt();
plugins = new HashSet<PluginInfo>(size);
for (; size > 0; size--) {
plugins.add(PluginInfo.readPluginInfo(in));
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
counts.writeTo(out);
out.writeVInt(versions.size());
for (Version v : versions) Version.writeVersion(v, out);
os.writeTo(out);
process.writeTo(out);
jvm.writeTo(out);
fs.writeTo(out);
out.writeVInt(plugins.size());
for (PluginInfo p : plugins) {
p.writeTo(out);
}
}
public static ClusterStatsNodes readNodeStats(StreamInput in) throws IOException {
ClusterStatsNodes nodeStats = new ClusterStatsNodes();
nodeStats.readFrom(in);
return nodeStats;
}
static final class Fields {
static final XContentBuilderString COUNT = new XContentBuilderString("count");
static final XContentBuilderString VERSIONS = new XContentBuilderString("versions");
static final XContentBuilderString OS = new XContentBuilderString("os");
static final XContentBuilderString PROCESS = new XContentBuilderString("process");
static final XContentBuilderString JVM = new XContentBuilderString("jvm");
static final XContentBuilderString FS = new XContentBuilderString("fs");
static final XContentBuilderString PLUGINS = new XContentBuilderString("plugins");
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Fields.COUNT);
counts.toXContent(builder, params);
builder.endObject();
builder.startArray(Fields.VERSIONS);
for (Version v : versions) {
builder.value(v.toString());
}
builder.endArray();
builder.startObject(Fields.OS);
os.toXContent(builder, params);
builder.endObject();
builder.startObject(Fields.PROCESS);
process.toXContent(builder, params);
builder.endObject();
builder.startObject(Fields.JVM);
jvm.toXContent(builder, params);
builder.endObject();
builder.field(Fields.FS);
fs.toXContent(builder, params);
builder.startArray(Fields.PLUGINS);
for (PluginInfo pluginInfo : plugins) {
pluginInfo.toXContent(builder, params);
}
builder.endArray();
return builder;
}
public static class Counts implements Streamable, ToXContent {
int total;
int masterOnly;
int dataOnly;
int masterData;
int client;
public void addNodeInfo(NodeInfo nodeInfo) {
total++;
DiscoveryNode node = nodeInfo.getNode();
if (node.masterNode()) {
if (node.dataNode()) {
masterData++;
} else {
masterOnly++;
}
} else if (node.dataNode()) {
dataOnly++;
} else if (node.clientNode()) {
client++;
}
}
public int getTotal() {
return total;
}
public int getMasterOnly() {
return masterOnly;
}
public int getDataOnly() {
return dataOnly;
}
public int getMasterData() {
return masterData;
}
public int getClient() {
return client;
}
public static Counts readCounts(StreamInput in) throws IOException {
Counts c = new Counts();
c.readFrom(in);
return c;
}
@Override
public void readFrom(StreamInput in) throws IOException {
total = in.readVInt();
masterOnly = in.readVInt();
dataOnly = in.readVInt();
masterData = in.readVInt();
client = in.readVInt();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(total);
out.writeVInt(masterOnly);
out.writeVInt(dataOnly);
out.writeVInt(masterData);
out.writeVInt(client);
}
static final class Fields {
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
static final XContentBuilderString MASTER_ONLY = new XContentBuilderString("master_only");
static final XContentBuilderString DATA_ONLY = new XContentBuilderString("data_only");
static final XContentBuilderString MASTER_DATA = new XContentBuilderString("master_data");
static final XContentBuilderString CLIENT = new XContentBuilderString("client");
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.TOTAL, total);
builder.field(Fields.MASTER_ONLY, masterOnly);
builder.field(Fields.DATA_ONLY, dataOnly);
builder.field(Fields.MASTER_DATA, masterData);
builder.field(Fields.CLIENT, client);
return builder;
}
}
public static class OsStats implements ToXContent, Streamable {
int availableProcessors;
long availableMemory;
ObjectIntOpenHashMap<OsInfo.Cpu> cpus;
public OsStats() {
cpus = new ObjectIntOpenHashMap<org.elasticsearch.monitor.os.OsInfo.Cpu>();
}
public void addNodeInfo(NodeInfo nodeInfo) {
availableProcessors += nodeInfo.getOs().availableProcessors();
if (nodeInfo.getOs() == null) {
return;
}
if (nodeInfo.getOs().cpu() != null) {
cpus.addTo(nodeInfo.getOs().cpu(), 1);
}
if (nodeInfo.getOs().getMem() != null && nodeInfo.getOs().getMem().getTotal().bytes() != -1) {
availableMemory += nodeInfo.getOs().getMem().getTotal().bytes();
}
}
public int getAvailableProcessors() {
return availableProcessors;
}
public ByteSizeValue getAvailableMemory() {
return new ByteSizeValue(availableMemory);
}
public ObjectIntOpenHashMap<OsInfo.Cpu> getCpus() {
return cpus;
}
@Override
public void readFrom(StreamInput in) throws IOException {
availableProcessors = in.readVInt();
availableMemory = in.readLong();
int size = in.readVInt();
cpus = new ObjectIntOpenHashMap<OsInfo.Cpu>(size);
for (; size > 0; size--) {
cpus.addTo(OsInfo.Cpu.readCpu(in), in.readVInt());
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(availableProcessors);
out.writeLong(availableMemory);
out.writeVInt(cpus.size());
for (ObjectIntCursor<OsInfo.Cpu> c : cpus) {
c.key.writeTo(out);
out.writeVInt(c.value);
}
}
public static OsStats readOsStats(StreamInput in) throws IOException {
OsStats os = new OsStats();
os.readFrom(in);
return os;
}
static final class Fields {
static final XContentBuilderString AVAILABLE_PROCESSORS = new XContentBuilderString("available_processors");
static final XContentBuilderString MEM = new XContentBuilderString("mem");
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
static final XContentBuilderString TOTAL_IN_BYTES = new XContentBuilderString("total_in_bytes");
static final XContentBuilderString CPU = new XContentBuilderString("cpu");
static final XContentBuilderString COUNT = new XContentBuilderString("count");
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.AVAILABLE_PROCESSORS, availableProcessors);
builder.startObject(Fields.MEM);
builder.byteSizeField(Fields.TOTAL_IN_BYTES, Fields.TOTAL, availableMemory);
builder.endObject();
builder.startArray(Fields.CPU);
for (ObjectIntCursor<OsInfo.Cpu> cpu : cpus) {
builder.startObject();
cpu.key.toXContent(builder, params);
builder.field(Fields.COUNT, cpu.value);
builder.endObject();
}
builder.endArray();
return builder;
}
}
public static class ProcessStats implements ToXContent, Streamable {
int count;
int cpuPercent;
long totalOpenFileDescriptors;
long minOpenFileDescriptors = Long.MAX_VALUE;
long maxOpenFileDescriptors = Long.MIN_VALUE;
public void addNodeStats(NodeStats nodeStats) {
if (nodeStats.getProcess() == null) {
return;
}
count++;
if (nodeStats.getProcess().cpu() != null) {
// with no sigar, this may not be available
cpuPercent += nodeStats.getProcess().cpu().getPercent();
}
long fd = nodeStats.getProcess().openFileDescriptors();
if (fd > 0) {
// fd can be -1 if not supported on platform
totalOpenFileDescriptors += fd;
}
// we still do min max calc on -1, so we'll have an indication of it not being supported on one of the nodes.
minOpenFileDescriptors = Math.min(minOpenFileDescriptors, fd);
maxOpenFileDescriptors = Math.max(maxOpenFileDescriptors, fd);
}
/**
* Cpu usage in percentages - 100 is 1 core.
*/
public int getCpuPercent() {
return cpuPercent;
}
public long getAvgOpenFileDescriptors() {
if (count == 0) {
return -1;
}
return totalOpenFileDescriptors / count;
}
public long getMaxOpenFileDescriptors() {
if (count == 0) {
return -1;
}
return maxOpenFileDescriptors;
}
public long getMinOpenFileDescriptors() {
if (count == 0) {
return -1;
}
return minOpenFileDescriptors;
}
@Override
public void readFrom(StreamInput in) throws IOException {
count = in.readVInt();
cpuPercent = in.readVInt();
totalOpenFileDescriptors = in.readVLong();
minOpenFileDescriptors = in.readLong();
maxOpenFileDescriptors = in.readLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(count);
out.writeVInt(cpuPercent);
out.writeVLong(totalOpenFileDescriptors);
out.writeLong(minOpenFileDescriptors);
out.writeLong(maxOpenFileDescriptors);
}
public static ProcessStats readStats(StreamInput in) throws IOException {
ProcessStats cpu = new ProcessStats();
cpu.readFrom(in);
return cpu;
}
static final class Fields {
static final XContentBuilderString CPU = new XContentBuilderString("cpu");
static final XContentBuilderString PERCENT = new XContentBuilderString("percent");
static final XContentBuilderString OPEN_FILE_DESCRIPTORS = new XContentBuilderString("open_file_descriptors");
static final XContentBuilderString MIN = new XContentBuilderString("min");
static final XContentBuilderString MAX = new XContentBuilderString("max");
static final XContentBuilderString AVG = new XContentBuilderString("avg");
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Fields.CPU).field(Fields.PERCENT, cpuPercent).endObject();
if (count > 0) {
builder.startObject(Fields.OPEN_FILE_DESCRIPTORS);
builder.field(Fields.MIN, getMinOpenFileDescriptors());
builder.field(Fields.MAX, getMaxOpenFileDescriptors());
builder.field(Fields.AVG, getAvgOpenFileDescriptors());
builder.endObject();
}
return builder;
}
}
public static class JvmStats implements Streamable, ToXContent {
ObjectIntOpenHashMap<JvmVersion> versions;
long threads;
long maxUptime;
long heapUsed;
long heapMax;
JvmStats() {
versions = new ObjectIntOpenHashMap<JvmVersion>();
threads = 0;
maxUptime = 0;
heapMax = 0;
heapUsed = 0;
}
public ObjectIntOpenHashMap<JvmVersion> getVersions() {
return versions;
}
/**
* The total number of threads in the cluster
*/
public long getThreads() {
return threads;
}
/**
* The maximum uptime of a node in the cluster
*/
public TimeValue getMaxUpTime() {
return new TimeValue(maxUptime);
}
/**
* Total heap used in the cluster
*/
public ByteSizeValue getHeapUsed() {
return new ByteSizeValue(heapUsed);
}
/**
* Maximum total heap available to the cluster
*/
public ByteSizeValue getHeapMax() {
return new ByteSizeValue(heapMax);
}
public void addNodeInfoStats(NodeInfo nodeInfo, NodeStats nodeStats) {
versions.addTo(new JvmVersion(nodeInfo.getJvm()), 1);
org.elasticsearch.monitor.jvm.JvmStats js = nodeStats.getJvm();
if (js == null) {
return;
}
if (js.threads() != null) {
threads += js.threads().count();
}
maxUptime = Math.max(maxUptime, js.uptime().millis());
if (js.mem() != null) {
heapUsed += js.mem().getHeapUsed().bytes();
heapMax += js.mem().getHeapMax().bytes();
}
}
@Override
public void readFrom(StreamInput in) throws IOException {
int size = in.readVInt();
versions = new ObjectIntOpenHashMap<JvmVersion>(size);
for (; size > 0; size--) {
versions.addTo(JvmVersion.readJvmVersion(in), in.readVInt());
}
threads = in.readVLong();
maxUptime = in.readVLong();
heapUsed = in.readVLong();
heapMax = in.readVLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(versions.size());
for (ObjectIntCursor<JvmVersion> v : versions) {
v.key.writeTo(out);
out.writeVInt(v.value);
}
out.writeVLong(threads);
out.writeVLong(maxUptime);
out.writeVLong(heapUsed);
out.writeVLong(heapMax);
}
public static JvmStats readJvmStats(StreamInput in) throws IOException {
JvmStats jvmStats = new JvmStats();
jvmStats.readFrom(in);
return jvmStats;
}
static final class Fields {
static final XContentBuilderString VERSIONS = new XContentBuilderString("versions");
static final XContentBuilderString VERSION = new XContentBuilderString("version");
static final XContentBuilderString VM_NAME = new XContentBuilderString("vm_name");
static final XContentBuilderString VM_VERSION = new XContentBuilderString("vm_version");
static final XContentBuilderString VM_VENDOR = new XContentBuilderString("vm_vendor");
static final XContentBuilderString COUNT = new XContentBuilderString("count");
static final XContentBuilderString THREADS = new XContentBuilderString("threads");
static final XContentBuilderString MAX_UPTIME = new XContentBuilderString("max_uptime");
static final XContentBuilderString MAX_UPTIME_IN_MILLIS = new XContentBuilderString("max_uptime_in_millis");
static final XContentBuilderString MEM = new XContentBuilderString("mem");
static final XContentBuilderString HEAP_USED = new XContentBuilderString("heap_used");
static final XContentBuilderString HEAP_USED_IN_BYTES = new XContentBuilderString("heap_used_in_bytes");
static final XContentBuilderString HEAP_MAX = new XContentBuilderString("heap_max");
static final XContentBuilderString HEAP_MAX_IN_BYTES = new XContentBuilderString("heap_max_in_bytes");
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.timeValueField(Fields.MAX_UPTIME_IN_MILLIS, Fields.MAX_UPTIME, maxUptime);
builder.startArray(Fields.VERSIONS);
for (ObjectIntCursor<JvmVersion> v : versions) {
builder.startObject();
builder.field(Fields.VERSION, v.key.version);
builder.field(Fields.VM_NAME, v.key.vmName);
builder.field(Fields.VM_VERSION, v.key.vmVersion);
builder.field(Fields.VM_VENDOR, v.key.vmVendor);
builder.field(Fields.COUNT, v.value);
builder.endObject();
}
builder.endArray();
builder.startObject(Fields.MEM);
builder.byteSizeField(Fields.HEAP_USED_IN_BYTES, Fields.HEAP_USED, heapUsed);
builder.byteSizeField(Fields.HEAP_MAX_IN_BYTES, Fields.HEAP_MAX, heapMax);
builder.endObject();
builder.field(Fields.THREADS, threads);
return builder;
}
}
public static class JvmVersion implements Streamable {
String version;
String vmName;
String vmVersion;
String vmVendor;
JvmVersion(JvmInfo jvmInfo) {
version = jvmInfo.version();
vmName = jvmInfo.vmName();
vmVersion = jvmInfo.vmVersion();
vmVendor = jvmInfo.vmVendor();
}
JvmVersion() {
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
JvmVersion jvm = (JvmVersion) o;
return vmVersion.equals(jvm.vmVersion) && vmVendor.equals(jvm.vmVendor);
}
@Override
public int hashCode() {
return vmVersion.hashCode();
}
public static JvmVersion readJvmVersion(StreamInput in) throws IOException {
JvmVersion jvm = new JvmVersion();
jvm.readFrom(in);
return jvm;
}
@Override
public void readFrom(StreamInput in) throws IOException {
version = in.readString();
vmName = in.readString();
vmVersion = in.readString();
vmVendor = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(version);
out.writeString(vmName);
out.writeString(vmVersion);
out.writeString(vmVendor);
}
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_stats_ClusterStatsNodes.java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.