lang
stringclasses
1 value
license
stringclasses
13 values
stderr
stringlengths
0
350
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
7
45.1k
new_contents
stringlengths
0
1.87M
new_file
stringlengths
6
292
old_contents
stringlengths
0
1.87M
message
stringlengths
6
9.26k
old_file
stringlengths
6
292
subject
stringlengths
0
4.45k
Java
artistic-2.0
4cf89aec7bb40a321308632842a9899dfdd154dd
0
greenlaw110/java-http,osglworks/java-http
package org.osgl.http; import org.apache.commons.codec.Charsets; import org.osgl.$; import org.osgl.cache.CacheService; import org.osgl.exception.NotAppliedException; import org.osgl.exception.UnexpectedIOException; import org.osgl.http.util.Path; import org.osgl.logging.L; import org.osgl.logging.Logger; import org.osgl.util.*; import org.osgl.web.util.UserAgent; import java.io.*; import java.net.InetAddress; import java.net.UnknownHostException; import java.nio.charset.Charset; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import static org.osgl.http.H.Header.Names.*; /** * The namespace to access Http features. * Alias of {@link org.osgl.http.Http} */ public class H { protected static final Logger logger = L.get(Http.class); public enum Method { GET, HEAD, POST, DELETE, PUT, PATCH, TRACE, OPTIONS, CONNECT; private static EnumSet<Method> unsafeMethods = EnumSet.of(POST, DELETE, PUT, PATCH); private static EnumSet<Method> actionMethods = EnumSet.of(GET, POST, PUT, DELETE); /** * Returns if this http method is safe, meaning it * won't change the state of the server * * @see #unsafe() */ public boolean safe() { return !unsafe(); } /** * Returns if this http method is unsafe, meaning * it will change the state of the server * * @see #safe() */ public boolean unsafe() { return unsafeMethods.contains(this); } public static Method valueOfIgnoreCase(String method) { return valueOf(method.toUpperCase()); } public static EnumSet<Method> actionMethods() { return actionMethods.clone(); } } // eof Method public static final class Status implements Serializable, Comparable<Status> { private static final Map<Integer, Status> predefinedStatus = new LinkedHashMap<Integer, Status>(); private static final long serialVersionUID = -286619406116817809L; private int code; private Status(int code) { this(code, true); } private Status(int code, boolean predefined) { this.code = code; if (predefined) { predefinedStatus.put(code, this); } } /** * Returns the int value of the status */ public final int code() { return code; } /** * Returns {@code true} if the status is either a {@link #isClientError() client error} * or {@link #isServerError() server error} */ public boolean isError() { return isClientError() || isServerError(); } /** * Returns true if the status is server error (5xx) */ public boolean isServerError() { return code / 100 == 5; } /** * Returns true if the status is client error (4xx) */ public boolean isClientError() { return code / 100 == 4; } /** * Returns true if the status is success series (2xx) */ public boolean isSuccess() { return code / 100 == 2; } /** * Returns true if the status is redirect series (3xx) */ public boolean isRedirect() { return code / 100 == 3; } /** * Returns true if the status is informational series (1xx) */ public boolean isInformational() { return code / 100 == 1; } /** * Return a string representation of this status code. */ @Override public String toString() { return Integer.toString(code); } @Override public int hashCode() { return code; } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj instanceof Status) { Status that = (Status) obj; return that.code() == code; } return false; } @Override public int compareTo(Status o) { return code - o.code; } protected final Object clone() throws CloneNotSupportedException { throw new CloneNotSupportedException(); } private Object readResolve() { Status predefined = predefinedStatus.get(code); return null != predefined ? predefined : this; } /** * Alias of {@link #valueOf(int)} * @param n * @return */ public static Status of(int n) { return valueOf(n); } public static Status valueOf(int n) { E.illegalArgumentIf(n < 100 || n > 599, "invalid http status code: %s", n); Status retVal = predefinedStatus.get(n); if (null == retVal) { retVal = new Status(n, false); } return retVal; } public static List<Status> predefined() { return C.list(predefinedStatus.values()); } // 1xx Informational /** * {@code 100 Continue}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.1.1">HTTP/1.1</a> */ public static final Status CONTINUE = new Status(100); /** * {@code 101 Switching Protocols}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.1.2">HTTP/1.1</a> */ public static final Status SWITCHING_PROTOCOLS = new Status(101); /** * {@code 102 Processing}. * * @see <a href="http://tools.ietf.org/html/rfc2518#section-10.1">WebDAV</a> */ public static final Status PROCESSING = new Status(102); /** * {@code 103 Checkpoint}. * * @see <a href="http://code.google.com/p/gears/wiki/ResumableHttpRequestsProposal">A proposal for supporting * resumable POST/PUT HTTP requests in HTTP/1.0</a> */ public static final Status CHECKPOINT = new Status(103); // 2xx Success /** * {@code 200 OK}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.2.1">HTTP/1.1</a> */ public static final Status OK = new Status(200); /** * {@code 201 Created}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.2.2">HTTP/1.1</a> */ public static final Status CREATED = new Status(201); /** * {@code 202 Accepted}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.2.3">HTTP/1.1</a> */ public static final Status ACCEPTED = new Status(202); /** * {@code 203 Non-Authoritative Information}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.2.4">HTTP/1.1</a> */ public static final Status NON_AUTHORITATIVE_INFORMATION = new Status(203); /** * {@code 204 No Content}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.2.5">HTTP/1.1</a> */ public static final Status NO_CONTENT = new Status(204); /** * {@code 205 Reset Content}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.2.6">HTTP/1.1</a> */ public static final Status RESET_CONTENT = new Status(205); /** * {@code 206 Partial Content}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.2.7">HTTP/1.1</a> */ public static final Status PARTIAL_CONTENT = new Status(206); /** * {@code 207 Multi-Status}. * * @see <a href="http://tools.ietf.org/html/rfc4918#section-13">WebDAV</a> */ public static final Status MULTI_STATUS = new Status(207); /** * {@code 208 Already Reported}. * * @see <a href="http://tools.ietf.org/html/rfc5842#section-7.1">WebDAV Binding Extensions</a> */ public static final Status ALREADY_REPORTED = new Status(208); /** * {@code 226 IM Used}. * * @see <a href="http://tools.ietf.org/html/rfc3229#section-10.4.1">Delta encoding in HTTP</a> */ public static final Status IM_USED = new Status(226); /** * {@code 278} Faked http status to handle redirection on ajax case * @see <a href="http://stackoverflow.com/questions/199099/how-to-manage-a-redirect-request-after-a-jquery-ajax-call">this</a> stackoverflow */ public static final Status FOUND_AJAX = new Status(278); // 3xx Redirection /** * {@code 300 Multiple Choices}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.3.1">HTTP/1.1</a> */ public static final Status MULTIPLE_CHOICES = new Status(300); /** * {@code 301 Moved Permanently}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.3.2">HTTP/1.1</a> */ public static final Status MOVED_PERMANENTLY = new Status(301); /** * {@code 302 Found}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.3.3">HTTP/1.1</a> */ public static final Status FOUND = new Status(302); /** * {@code 302 Moved Temporarily}. * * @see <a href="http://tools.ietf.org/html/rfc1945#section-9.3">HTTP/1.0</a> * @deprecated In favor of {@link #FOUND} which will be returned from {@code Status.valueOf(302)} */ @Deprecated public static final Status MOVED_TEMPORARILY = new Status(302); /** * {@code 303 See Other}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.3.4">HTTP/1.1</a> */ public static final Status SEE_OTHER = new Status(303); /** * {@code 304 Not Modified}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.3.5">HTTP/1.1</a> */ public static final Status NOT_MODIFIED = new Status(304); /** * {@code 305 Use Proxy}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.3.6">HTTP/1.1</a> */ public static final Status USE_PROXY = new Status(305); /** * {@code 307 Temporary Redirect}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.3.8">HTTP/1.1</a> */ public static final Status TEMPORARY_REDIRECT = new Status(307); /** * {@code 308 Resume Incomplete}. * * @see <a href="http://code.google.com/p/gears/wiki/ResumableHttpRequestsProposal">A proposal for supporting * resumable POST/PUT HTTP requests in HTTP/1.0</a> */ public static final Status RESUME_INCOMPLETE = new Status(308); // --- 4xx Client Error --- /** * {@code 400 Bad Request}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.1">HTTP/1.1</a> */ public static final Status BAD_REQUEST = new Status(400); /** * {@code 401 Unauthorized}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.2">HTTP/1.1</a> */ public static final Status UNAUTHORIZED = new Status(401); /** * {@code 402 Payment Required}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.3">HTTP/1.1</a> */ public static final Status PAYMENT_REQUIRED = new Status(402); /** * {@code 403 Forbidden}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.4">HTTP/1.1</a> */ public static final Status FORBIDDEN = new Status(403); /** * {@code 404 Not Found}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.5">HTTP/1.1</a> */ public static final Status NOT_FOUND = new Status(404); /** * {@code 405 Method Not Allowed}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.6">HTTP/1.1</a> */ public static final Status METHOD_NOT_ALLOWED = new Status(405); /** * {@code 406 Not Acceptable}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.7">HTTP/1.1</a> */ public static final Status NOT_ACCEPTABLE = new Status(406); /** * {@code 407 Proxy Authentication Required}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.8">HTTP/1.1</a> */ public static final Status PROXY_AUTHENTICATION_REQUIRED = new Status(407); /** * {@code 408 Request Timeout}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.9">HTTP/1.1</a> */ public static final Status REQUEST_TIMEOUT = new Status(408); /** * {@code 409 Conflict}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.10">HTTP/1.1</a> */ public static final Status CONFLICT = new Status(409); /** * {@code 410 Gone}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.11">HTTP/1.1</a> */ public static final Status GONE = new Status(410); /** * {@code 411 Length Required}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.12">HTTP/1.1</a> */ public static final Status LENGTH_REQUIRED = new Status(411); /** * {@code 412 Precondition failed}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.13">HTTP/1.1</a> */ public static final Status PRECONDITION_FAILED = new Status(412); /** * {@code 413 Request Entity Too Large}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.14">HTTP/1.1</a> */ public static final Status REQUEST_ENTITY_TOO_LARGE = new Status(413); /** * {@code 414 Request-URI Too Long}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.15">HTTP/1.1</a> */ public static final Status REQUEST_URI_TOO_LONG = new Status(414); /** * {@code 415 Unsupported Media Type}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.16">HTTP/1.1</a> */ public static final Status UNSUPPORTED_MEDIA_TYPE = new Status(415); /** * {@code 416 Requested Range Not Satisfiable}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.17">HTTP/1.1</a> */ public static final Status REQUESTED_RANGE_NOT_SATISFIABLE = new Status(416); /** * {@code 417 Expectation Failed}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.18">HTTP/1.1</a> */ public static final Status EXPECTATION_FAILED = new Status(417); /** * {@code 418 I'm a teapot}. * * @see <a href="http://tools.ietf.org/html/rfc2324#section-2.3.2">HTCPCP/1.0</a> */ public static final Status I_AM_A_TEAPOT = new Status(418); /** * @deprecated See <a href="http://tools.ietf.org/rfcdiff?difftype=--hwdiff&url2=draft-ietf-webdav-protocol-06.txt">WebDAV Draft Changes</a> */ @Deprecated public static final Status INSUFFICIENT_SPACE_ON_RESOURCE = new Status(419); /** * @deprecated See <a href="http://tools.ietf.org/rfcdiff?difftype=--hwdiff&url2=draft-ietf-webdav-protocol-06.txt">WebDAV Draft Changes</a> */ @Deprecated public static final Status METHOD_FAILURE = new Status(420); /** * @deprecated See <a href="http://tools.ietf.org/rfcdiff?difftype=--hwdiff&url2=draft-ietf-webdav-protocol-06.txt">WebDAV Draft Changes</a> */ @Deprecated public static final Status DESTINATION_LOCKED = new Status(421); /** * {@code 422 Unprocessable Entity}. * * @see <a href="http://tools.ietf.org/html/rfc4918#section-11.2">WebDAV</a> */ public static final Status UNPROCESSABLE_ENTITY = new Status(422); /** * {@code 423 Locked}. * * @see <a href="http://tools.ietf.org/html/rfc4918#section-11.3">WebDAV</a> */ public static final Status LOCKED = new Status(423); /** * {@code 424 Failed Dependency}. * * @see <a href="http://tools.ietf.org/html/rfc4918#section-11.4">WebDAV</a> */ public static final Status FAILED_DEPENDENCY = new Status(424); /** * {@code 426 Upgrade Required}. * * @see <a href="http://tools.ietf.org/html/rfc2817#section-6">Upgrading to TLS Within HTTP/1.1</a> */ public static final Status UPGRADE_REQUIRED = new Status(426); /** * {@code 428 Precondition Required}. * * @see <a href="http://tools.ietf.org/html/rfc6585#section-3">Additional HTTP Status Codes</a> */ public static final Status PRECONDITION_REQUIRED = new Status(428); /** * {@code 429 Too Many Requests}. * * @see <a href="http://tools.ietf.org/html/rfc6585#section-4">Additional HTTP Status Codes</a> */ public static final Status TOO_MANY_REQUESTS = new Status(429); /** * {@code 431 Request Header Fields Too Large}. * * @see <a href="http://tools.ietf.org/html/rfc6585#section-5">Additional HTTP Status Codes</a> */ public static final Status REQUEST_HEADER_FIELDS_TOO_LARGE = new Status(431); // --- 5xx Server Error --- /** * {@code 500 Internal Server Error}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.5.1">HTTP/1.1</a> */ public static final Status INTERNAL_SERVER_ERROR = new Status(500); /** * {@code 501 Not Implemented}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.5.2">HTTP/1.1</a> */ public static final Status NOT_IMPLEMENTED = new Status(501); /** * {@code 502 Bad Gateway}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.5.3">HTTP/1.1</a> */ public static final Status BAD_GATEWAY = new Status(502); /** * {@code 503 Service Unavailable}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.5.4">HTTP/1.1</a> */ public static final Status SERVICE_UNAVAILABLE = new Status(503); /** * {@code 504 Gateway Timeout}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.5.5">HTTP/1.1</a> */ public static final Status GATEWAY_TIMEOUT = new Status(504); /** * {@code 505 HTTP Version Not Supported}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.5.6">HTTP/1.1</a> */ public static final Status HTTP_VERSION_NOT_SUPPORTED = new Status(505); /** * {@code 506 Variant Also Negotiates} * * @see <a href="http://tools.ietf.org/html/rfc2295#section-8.1">Transparent Content Negotiation</a> */ public static final Status VARIANT_ALSO_NEGOTIATES = new Status(506); /** * {@code 507 Insufficient Storage} * * @see <a href="http://tools.ietf.org/html/rfc4918#section-11.5">WebDAV</a> */ public static final Status INSUFFICIENT_STORAGE = new Status(507); /** * {@code 508 Loop Detected} * * @see <a href="http://tools.ietf.org/html/rfc5842#section-7.2">WebDAV Binding Extensions</a> */ public static final Status LOOP_DETECTED = new Status(508); /** * {@code 509 Bandwidth Limit Exceeded} */ public static final Status BANDWIDTH_LIMIT_EXCEEDED = new Status(509); /** * {@code 510 Not Extended} * * @see <a href="http://tools.ietf.org/html/rfc2774#section-7">HTTP Extension Framework</a> */ public static final Status NOT_EXTENDED = new Status(510); /** * {@code 511 Network Authentication Required}. * * @see <a href="http://tools.ietf.org/html/rfc6585#section-6">Additional HTTP Status Codes</a> */ public static final Status NETWORK_AUTHENTICATION_REQUIRED = new Status(511); } public static Status status(int n) { return Status.valueOf(n); } public static final class Header implements Serializable { private static final long serialVersionUID = -3987421318751857114L; public static final class Names { /** * {@code "Accept"} */ public static final String ACCEPT = "accept"; /** * {@code "Accept-Charset"} */ public static final String ACCEPT_CHARSET = "accept-charset"; /** * {@code "Accept-Encoding"} */ public static final String ACCEPT_ENCODING = "accept-encoding"; /** * {@code "Accept-Language"} */ public static final String ACCEPT_LANGUAGE = "accept-language"; /** * {@code "Accept-Ranges"} */ public static final String ACCEPT_RANGES = "accept-ranges"; /** * {@code "Accept-Patch"} */ public static final String ACCEPT_PATCH = "accept-patch"; /** * {@code "Access-Control-Allow-Origin"} */ public static final String ACCESS_CONTROL_ALLOW_ORIGIN = "access-control-allow-origin"; /** * {@code "Access-Control-Allow-Methods"} */ public static final String ACCESS_CONTROL_ALLOW_METHODS = "access-control-allow-methods"; /** * {@code "Access-Control-Allow-Headers"} */ public static final String ACCESS_CONTROL_ALLOW_HEADERS = "access-control-allow-headers"; /** * {@code "Access-Control-Allow-Credentials"} */ public static final String ACCESS_CONTROL_ALLOW_CREDENTIALS = "access-control-allow-Credentials"; /** * {@code "Access-Control-Expose-Headers"} */ public static final String ACCESS_CONTROL_EXPOSE_HEADERS = "access-control-expose-headers"; /** * {@code "Access-Control-Max-Age"} */ public static final String ACCESS_CONTROL_MAX_AGE = "access-control-max-age"; /** * {@code "Access-Control-Request-Method"} */ public static final String ACCESS_CONTROL_REQUEST_METHOD = "access-control-request-method"; /** * {@code "Access-Control-Request-Headers"} */ public static final String ACCESS_CONTROL_REQUEST_HEADERS = "access-control-request-headers"; /** * {@code "Age"} */ public static final String AGE = "age"; /** * {@code "Allow"} */ public static final String ALLOW = "allow"; /** * {@code "Authorization"} */ public static final String AUTHORIZATION = "authorization"; /** * {@code "Cache-Control"} */ public static final String CACHE_CONTROL = "cache-control"; /** * {@code "Connection"} */ public static final String CONNECTION = "connection"; /** * {@code "Content-Base"} */ public static final String CONTENT_BASE = "content-base"; /** * {@code "Content-Disposition"} */ public static final String CONTENT_DISPOSITION = "content-disposition"; /** * {@code "Content-Encoding"} */ public static final String CONTENT_ENCODING = "content-encoding"; /** * {@code "Content-Language"} */ public static final String CONTENT_LANGUAGE = "content-language"; /** * {@code "Content-Length"} */ public static final String CONTENT_LENGTH = "content-length"; /** * {@code "Content-Location"} */ public static final String CONTENT_LOCATION = "content-location"; /** * {@code "Content-Transfer-Encoding"} */ public static final String CONTENT_TRANSFER_ENCODING = "Content-Transfer-Encoding"; /** * {@code "Content-MD5"} */ public static final String CONTENT_MD5 = "content-md5"; /** * {@code "Content-Range"} */ public static final String CONTENT_RANGE = "content-range"; /** * {@code "Content-Type"} */ public static final String CONTENT_TYPE = "content-type"; /** * {@code "Cookie"} */ public static final String COOKIE = "cookie"; /** * {@code "Date"} */ public static final String DATE = "date"; /** * {@code "ETag"} */ public static final String ETAG = "etag"; /** * {@code "Expect"} */ public static final String EXPECT = "expect"; /** * {@code "Expires"} */ public static final String EXPIRES = "expires"; /** * {@code "From"} */ public static final String FROM = "from"; /** * {@code "Front-End-Https"} */ public static final String FRONT_END_HTTPS = "front-end-https"; /** * {@code "Host"} */ public static final String HOST = "host"; /** * {@code "HTTP_CLIENT_IP"} */ public static final String HTTP_CLIENT_IP = "http_client_ip"; /** * {@code "HTTP_X_FORWARDED_FOR"} */ public static final String HTTP_X_FORWARDED_FOR = "http_x_forwarded_for"; /** * {@code "If-Match"} */ public static final String IF_MATCH = "if-match"; /** * {@code "If-Modified-Since"} */ public static final String IF_MODIFIED_SINCE = "if-modified-since"; /** * {@code "If-None-Match"} */ public static final String IF_NONE_MATCH = "if-none-match"; /** * {@code "If-Range"} */ public static final String IF_RANGE = "if-range"; /** * {@code "If-Unmodified-Since"} */ public static final String IF_UNMODIFIED_SINCE = "if-unmodified-since"; /** * {@code "Last-Modified"} */ public static final String LAST_MODIFIED = "last-modified"; /** * {@code "Location"} */ public static final String LOCATION = "location"; /** * {@code "Max-Forwards"} */ public static final String MAX_FORWARDS = "max-forwards"; /** * {@code "Origin"} */ public static final String ORIGIN = "origin"; /** * {@code "Pragma"} */ public static final String PRAGMA = "pragma"; /** * {@code "Proxy-Authenticate"} */ public static final String PROXY_AUTHENTICATE = "proxy-authenticate"; /** * {@code "Proxy-Authorization"} */ public static final String PROXY_AUTHORIZATION = "proxy-authorization"; /** * {@code "Proxy-Client-IP"} */ public static final String PROXY_CLIENT_IP = "proxy-client-ip"; /** * {@code "Proxy-Connection"} */ public static final String PROXY_CONNECTION = "proxy_connection"; /** * {@code "Range"} */ public static final String RANGE = "range"; /** * {@code "Referer"} */ public static final String REFERER = "referer"; /** * {@code "Retry-After"} */ public static final String RETRY_AFTER = "retry-after"; /** * the header used to put the real ip by load balancers like F5 * {@code "rlnclientipaddr"} */ public static final String RLNCLIENTIPADDR = "rlnclientipaddr"; /** * {@code "sec-websocket-Key1"} */ public static final String SEC_WEBSOCKET_KEY1 = "sec-websocket-key1"; /** * {@code "sec-websocket-Key2"} */ public static final String SEC_WEBSOCKET_KEY2 = "sec-websocket-key2"; /** * {@code "sec-websocket-Location"} */ public static final String SEC_WEBSOCKET_LOCATION = "sec-websocket-location"; /** * {@code "sec-websocket-Origin"} */ public static final String SEC_WEBSOCKET_ORIGIN = "sec-websocket-origin"; /** * {@code "sec-websocket-Protocol"} */ public static final String SEC_WEBSOCKET_PROTOCOL = "sec-websocket-protocol"; /** * {@code "sec-websocket-Version"} */ public static final String SEC_WEBSOCKET_VERSION = "sec-websocket-version"; /** * {@code "sec-websocket-Key"} */ public static final String SEC_WEBSOCKET_KEY = "sec-websocket-key"; /** * {@code "sec-websocket-Accept"} */ public static final String SEC_WEBSOCKET_ACCEPT = "sec-websocket-accept"; /** * {@code "Server"} */ public static final String SERVER = "server"; /** * {@code "Set-Cookie"} */ public static final String SET_COOKIE = "set-cookie"; /** * {@code "Set-Cookie2"} */ public static final String SET_COOKIE2 = "set-cookie2"; /** * {@code "TE"} */ public static final String TE = "te"; /** * {@code "Trailer"} */ public static final String TRAILER = "trailer"; /** * {@code "Transfer-Encoding"} */ public static final String TRANSFER_ENCODING = "transfer-encoding"; /** * {@code "Upgrade"} */ public static final String UPGRADE = "upgrade"; /** * {@code "User-Agent"} */ public static final String USER_AGENT = "user-agent"; /** * {@code "Vary"} */ public static final String VARY = "vary"; /** * {@code "Via"} */ public static final String VIA = "via"; /** * {@code "Warning"} */ public static final String WARNING = "warning"; /** * {@code "WebSocket-Location"} */ public static final String WEBSOCKET_LOCATION = "websocket-location"; /** * {@code "WebSocket-Origin"} */ public static final String WEBSOCKET_ORIGIN = "webwocket-origin"; /** * {@code "WebSocket-Protocol"} */ public static final String WEBSOCKET_PROTOCOL = "websocket-protocol"; /** * {@code "WL-Proxy-Client-IP"} */ public static final String WL_PROXY_CLIENT_IP = "wl-proxy-client-ip"; /** * {@code "WWW-Authenticate"} */ public static final String WWW_AUTHENTICATE = "www-authenticate"; /** * {@code "X_Requested_With"} */ public static final String X_REQUESTED_WITH = "x-requested-with"; /** * {@code "X-Forwarded-Host"} */ public static final String X_FORWARDED_HOST = "x-forwarded-host"; /** * {@code "X_Forwared_For"} */ public static final String X_FORWARDED_FOR = "x-forwarded-for"; /** * {@code "X_Forwared_Proto"} */ public static final String X_FORWARDED_PROTO = "x-forwarded-proto"; /** * {@code "X-Forwarded-Ssl"} */ public static final String X_FORWARDED_SSL = "x-forwarded-ssl"; /** * {@code "X-Http-Method-Override"} */ public static final String X_HTTP_METHOD_OVERRIDE = "x-http-method-override"; /** * {@code "X-Url-Scheme"} */ public static final String X_URL_SCHEME = "x-url-scheme"; /** * {@code "X-Xsrf-Token"} */ public static final String X_XSRF_TOKEN = "x-xsrf-token"; private Names() { super(); } } private String name; private C.List<String> values; public Header(String name, String value) { E.NPE(name); this.name = name; this.values = C.list(value); } public Header(String name, String... values) { E.NPE(name); this.name = name; this.values = C.listOf(values); } public Header(String name, Iterable<String> values) { E.NPE(name); this.name = name; this.values = C.list(values); } public String name() { return name; } public String value() { return values.get(0); } public C.List<String> values() { return values; } @Override public String toString() { return values.toString(); } } // eof Header /** * Specify the format of the requested content type */ public static class Format implements Serializable { private static final Map<String, Format> predefined = new LinkedHashMap<String, Format>(); private static volatile Properties types; private int ordinal; private String name; private String contentType; private Format(String name, String contentType) { this(name, contentType, true); } private Format(String name, String contentType, boolean predefined) { this.name = name.toLowerCase(); this.contentType = contentType; if (predefined) { Format.predefined.put(name, this); this.ordinal = ordinal(name); } else { this.ordinal = -1; } } public final String name() { return name; } public final int ordinal() { return ordinal; } /** * Returns the content type string * * @return the content type string of this format */ public String contentType() { return contentType; } /** * Deprecated. Please use {@link #contentType()} * @return the content type string of the format */ @Deprecated public final String toContentType() { return contentType(); } public final String getName() { return name(); } public final String getContentType() { return contentType(); } /** * Returns the error message * * @param message * @return the message directly */ public String errorMessage(String message) { return message; } @Override public int hashCode() { if (ordinal != -1) { return ordinal; } return $.hc(name, contentType); } @Override public String toString() { return name(); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj instanceof Format) { Format that = (Format) obj; return $.eq(that.name, this.name) && $.eq(that.contentType, this.contentType); } return false; } private Object readResolve() { if (ordinal == -1) { return this; } return predefined.get(name); } /** * Deprecated. please Use {@link #predefined()} * @return an array of predefined Formats */ public static Format[] values() { Format[] retVal = new Format[predefined.size()]; return predefined.values().toArray(retVal); } public static List<Format> predefined() { return C.list(predefined.values()); } public static Format of(String name) { return valueOf(name); } public static Format of(String name, String contentType) { return valueOf(name, contentType); } public static Format valueOf(String name) { name = name.toLowerCase(); if (name.startsWith(".")) { name = S.afterLast(name, "."); } return predefined.get(name.toLowerCase()); } public static Format valueOf(String name, String contentType) { Format retVal = valueOf(name); if (null != retVal) { return retVal; } E.illegalArgumentIf(S.blank(name), "name cannot be blank string"); E.illegalArgumentIf(S.blank(contentType), "content type cannot be blank string"); name = name.toLowerCase(); if (name.startsWith(".")) { name = S.afterLast(name, "."); } return new Format(name, contentType, false); } public static Format resolve(Format def, String accept) { E.NPE(def); return resolve_(def, accept); } public static Format resolve(Iterable<String> accepts) { return resolve(Format.HTML, accepts); } public static Format resolve(Format def, Iterable<String> accepts) { Format retVal; for (String s : accepts) { retVal = resolve_(null, s); if (null != retVal) { return retVal; } } return $.ifNullThen(def, Format.HTML); } public static Format resolve(String... accepts) { return resolve(Format.HTML, accepts); } public static Format resolve(Format def, String... accepts) { Format retVal; for (String s : accepts) { retVal = resolve_(null, s); if (null != retVal) { return retVal; } } return $.ifNullThen(def, Format.HTML); } /** * Resolve {@code Format} instance out of an http "Accept" header. * * @param accept the value of http "Accept" header * @return an {@code Format} instance */ public static Format resolve(String accept) { return resolve_(Format.UNKNOWN, accept); } public static String toContentType(String fmt) { Format f = predefined.get(fmt.toLowerCase()); if (null == f) { f = HTML; } return f.contentType(); } private static int ordinal(String s) { int l = s.length(), h = 0; for (int i = 0; i < l; ++i) { char c = s.charAt(i); h = 31 * h + c; } return h; } private static Format resolve_(Format def, String contentType) { Format fmt = def; if (S.blank(contentType)) { fmt = HTML; } else if (contentType.contains("application/xhtml") || contentType.contains("text/html") || contentType.startsWith("*/*")) { fmt = HTML; } else if (contentType.contains("application/xml") || contentType.contains("text/xml")) { fmt = XML; } else if (contentType.contains("application/json") || contentType.contains("text/javascript")) { fmt = JSON; } else if (contentType.contains("application/x-www-form-urlencoded")) { fmt = FORM_URL_ENCODED; } else if (contentType.contains("multipart/form-data") || contentType.contains("multipart/mixed")) { fmt = FORM_MULTIPART_DATA; } else if (contentType.contains("text/plain")) { fmt = TXT; } else if (contentType.contains("csv") || contentType.contains("comma-separated-values")) { fmt = CSV; } else if (contentType.contains("ms-excel")) { fmt = XLS; } else if (contentType.contains("spreadsheetml")) { fmt = XLSX; } else if (contentType.contains("pdf")) { fmt = PDF; } else if (contentType.contains("msword")) { fmt = DOC; } else if (contentType.contains("wordprocessingml")) { fmt = DOCX; } else if (contentType.contains("rtf")) { fmt = RTF; } return fmt; } static { try { InputStream is = H.class.getResourceAsStream("mime-types.properties"); Properties types = new Properties(); types.load(is); for (Object k : types.keySet()) { String fmt = k.toString(); String contentType = types.getProperty(fmt); new Format(fmt, contentType); } } catch (IOException e) { throw E.ioException(e); } } /** * The "text/html" content format */ public static final Format HTML = valueOf("html"); /** * Deprecated, please use {@link #HTML} */ @Deprecated public static final Format html = HTML; /** * The "text/xml" content format */ public static final Format XML = valueOf("xml"); /** * Deprecated, please use {@link #XML} */ @Deprecated public static final Format xml = XML; /** * The "application/json" content format */ public static final Format JSON = new Format("json", "application/json") { @Override public String errorMessage(String message) { return S.fmt("{\"error\": \"%s\"}", message); } }; /** * Deprecated. Please use {@link #JSON} */ @Deprecated public static final Format json = JSON; /** * The "text/css" content format */ public static final Format CSS = new Format("css", "text/css"); /** * The "application/javascript" content format */ public static final Format JAVASCRIPT = new Format("javascript", "application/javascript") { @Override public String errorMessage(String message) { return "alert(" + message + ");"; } }; /** * The "application/vnd.ms-excel" content format */ public static final Format XLS = valueOf("xls"); /** * Deprecated. Please use {@link #XLS} */ public static final Format xls = XLS; /** * The "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" content format */ public static final Format XLSX = valueOf("xlsx"); /** * Deprecated. Please use {@link #XLSX} */ public static final Format xlsx = XLSX; /** * The "application/vnd.ms-word" content format */ public static final Format DOC = valueOf("doc"); /** * Deprecated. Please use {@link #DOC} */ public static final Format doc = DOC; /** * The "application/vnd.openxmlformats-officedocument.wordprocessingml.document" content format */ public static final Format DOCX = valueOf("docx"); /** * Deprecated. Please use {@link #DOCX} */ public static final Format docx = DOCX; /** * The "text/csv" content format */ public static final Format CSV = valueOf("csv"); /** * Deprecated, please use {@link #CSV} */ @Deprecated public static final Format csv = CSV; /** * The "text/plain" content format */ public static final Format TXT = valueOf("txt"); /** * Deprecated, please use {@link #TXT} */ @Deprecated public static final Format txt = TXT; /** * The "application/pdf" content format */ public static final Format PDF = valueOf("pdf"); /** * Deprecated, please use {@link #PDF} */ @Deprecated public static final Format pdf = PDF; /** * The "application/rtf" content format */ public static final Format RTF = valueOf("pdf"); /** * Deprecated, please use {@link #RTF} */ @Deprecated public static final Format rtf = RTF; /** * The "application/x-www-form-urlencoded" content format */ public static final Format FORM_URL_ENCODED = new Format("form_url_encoded", "application/x-www-form-urlencoded"); /** * Deprecated, please use {@link #FORM_URL_ENCODED} */ @Deprecated public static final Format form_url_encoded = FORM_URL_ENCODED; /** * The "multipart/form-data" content format */ public static final Format FORM_MULTIPART_DATA = new Format("form_multipart_data", "multipart/form-data"); /** * Deprecated, please use {@link #FORM_MULTIPART_DATA} */ @Deprecated public static final Format form_multipart_data = FORM_MULTIPART_DATA; /** * The "unknown" content format. Use default content type: "text/html" */ public static final Format UNKNOWN = new Format("unknown", "text/html") { @Override public String contentType() { String s = Current.format(); if (!S.blank(s)) { return toContentType(s); } return "text/html"; } @Override public String toString() { String s = Current.format(); return null == s ? name() : s; } }; /** * Deprecated, please use {@link #UNKNOWN} */ @Deprecated public static final Format unknown = UNKNOWN; public static final class Ordinal { public static final int HTML = Format.HTML.ordinal; public static final int XML = Format.XML.ordinal; public static final int JSON = Format.JSON.ordinal; public static final int XLS = Format.XLS.ordinal; public static final int XLSX = Format.XLSX.ordinal; public static final int DOC = Format.DOC.ordinal; public static final int DOCX = Format.DOCX.ordinal; public static final int CSV = Format.CSV.ordinal; public static final int TXT = Format.TXT.ordinal; public static final int PDF = Format.PDF.ordinal; public static final int RTF = Format.RTF.ordinal; public static final int FORM_URL_ENCODED = Format.FORM_URL_ENCODED.ordinal; public static final int FORM_MULTIPART_DATA = Format.FORM_MULTIPART_DATA.ordinal; } } public static Format format(String name) { return Format.valueOf(name); } public static Format format(String name, String contentType) { return Format.valueOf(name, contentType); } /** * The HTTP cookie */ public static class Cookie implements Serializable { private static final long serialVersionUID = 5325872881041347558L; private String name; // default is non-persistent cookie private int maxAge = -1; private boolean secure; private String path; private String domain; private String value; private boolean httpOnly; private int version; private Date expires; private String comment; public Cookie(String name) { this(name, ""); } public Cookie(String name, String value) { E.NPE(name); this.name = name; this.value = null == value ? "" : value; } public Cookie(String name, String value, int maxAge, boolean secure, String path, String domain, boolean httpOnly) { this(name, value); this.maxAge = maxAge; this.secure = secure; this.path = path; this.domain = domain; this.httpOnly = httpOnly; } /** * Returns the name of the cookie. Cookie name * cannot be changed after created */ public String name() { return name; } /** * Returns the value of the cookie */ public String value() { return value; } /** * Set a value to a cookie and the return {@code this} cookie * * @param value the value to be set to the cookie * @return this cookie */ public Cookie value(String value) { this.value = value; return this; } /** * Returns the domain of the cookie */ public String domain() { return domain; } /** * Set the domain of the cookie * * @param domain the domain string * @return this cookie */ public Cookie domain(String domain) { this.domain = domain; return this; } /** * Returns the path on the server * to which the browser returns this cookie. The * cookie is visible to all subpaths on the server. * * @see #path(String) */ public String path() { return path; } /** * Specifies a path for the cookie * to which the client should return the cookie. * <p/> * <p>The cookie is visible to all the pages in the directory * you specify, and all the pages in that directory's subdirectories. * <p/> * <p>Consult RFC 2109 (available on the Internet) for more * information on setting path names for cookies. * * @param uri a <code>String</code> specifying a path * @return this cookie after path is set * @see #path */ public Cookie path(String uri) { this.path = uri; return this; } /** * Returns the maximum age of cookie specified in seconds. If * maxAge is set to {@code -1} then the cookie will persist until * browser shutdown */ public int maxAge() { return maxAge; } /** * Set the max age of the cookie in seconds. * <p>A positive value indicates that the cookie will expire * after that many seconds have passed. Note that the value is * the <i>maximum</i> age when the cookie will expire, not the cookie's * current age. * <p/> * <p>A negative value means * that the cookie is not stored persistently and will be deleted * when the Web browser exits. A zero value causes the cookie * to be deleted. * * @see #maxAge() */ public Cookie maxAge(int maxAge) { this.maxAge = maxAge; return this; } public Date expires() { if (null != expires) { return expires; } if (maxAge < 0) { return null; } return new Date($.ms() + maxAge * 1000); } public Cookie expires(Date expires) { this.expires = expires; if (null != expires && -1 == maxAge) { maxAge = (int) ((expires.getTime() - $.ms()) / 1000); } return this; } /** * Returns <code>true</code> if the browser is sending cookies * only over a secure protocol, or <code>false</code> if the * browser can send cookies using any protocol. * * @see #secure(boolean) */ public boolean secure() { return secure; } /** * Indicates to the browser whether the cookie should only be sent * using a secure protocol, such as HTTPS or SSL. * <p/> * <p>The default value is <code>false</code>. * * @param secure the cookie secure requirement * @return this cookie instance */ public Cookie secure(boolean secure) { this.secure = secure; return this; } /** * Returns the version of the protocol this cookie complies * with. Version 1 complies with RFC 2109, * and version 0 complies with the original * cookie specification drafted by Netscape. Cookies provided * by a browser use and identify the browser's cookie version. * * @return 0 if the cookie complies with the * original Netscape specification; 1 * if the cookie complies with RFC 2109 * @see #version(int) */ public int version() { return version; } /** * Sets the version of the cookie protocol that this Cookie complies * with. * <p/> * <p>Version 0 complies with the original Netscape cookie * specification. Version 1 complies with RFC 2109. * <p/> * <p>Since RFC 2109 is still somewhat new, consider * version 1 as experimental; do not use it yet on production sites. * * @param v 0 if the cookie should comply with the original Netscape * specification; 1 if the cookie should comply with RFC 2109 * @see #version() */ public Cookie version(int v) { this.version = v; return this; } public boolean httpOnly() { return httpOnly; } public Cookie httpOnly(boolean httpOnly) { this.httpOnly = httpOnly; return this; } public String comment() { return comment; } public Cookie comment(String comment) { this.comment = comment; return this; } private static void ensureInit() { if (!Current.cookieMapInitialized()) { Request req = Request.current(); E.illegalStateIf(null == req); req._initCookieMap(); } } /** * Add a cookie to the current context * * @param cookie */ public static void set(Cookie cookie) { ensureInit(); Current.setCookie(cookie.name(), cookie); } /** * Get a cookie from current context by name * * @param name * @return a cookie with the name specified */ public static Cookie get(String name) { ensureInit(); return Current.getCookie(name); } /** * Returns all cookies from current context */ public static Collection<Cookie> all() { ensureInit(); return Current.cookies(); } /** * The function object namespace */ public static enum F { ; public static final $.F2<Cookie, Response, Void> ADD_TO_RESPONSE = new $.F2<Cookie, Response, Void>() { @Override public Void apply(Cookie cookie, Response response) throws NotAppliedException, $.Break { response.addCookie(cookie); return null; } }; } } // eof Cookie public static class KV<T extends KV> implements Serializable { private static final long serialVersionUID = 891504755320699989L; protected Map<String, String> data = C.newMap(); private boolean dirty = false; private KV() {} private KV(Map<String, String> data) { E.NPE(data); this.data = data; } /** * Associate a string value with the key specified during * initialization. The difference between calling {@code load} * and {@link #put(String, String)} is the former will not change * the dirty tag */ public T load(String key, String val) { E.illegalArgumentIf(key.contains(":")); data.put(key, val); return me(); } /** * Associate a string value with the key specified. */ public T put(String key, String val) { E.illegalArgumentIf(key.contains(":")); dirty = true; return load(key, val); } /** * Associate an Object value's String representation with the * key specified. If the object is {@code null} then {@code null} * is associated with the key specified */ public T put(String key, Object val) { String valStr = null == val ? null : val.toString(); return put(key, valStr); } /** * Returns the string value associated with the key specified */ public String get(String key) { return data.get(key); } /** * Returns the key set of internal data map */ public Set<String> keySet() { return data.keySet(); } /** * Returns {@code true} if internal data map is empty */ public boolean isEmpty() { return data.isEmpty(); } /** * Indicate if the KV has been changed * * @return {@code true} if this instance has been changed */ public boolean dirty() { return dirty; } /** * Alias of {@link #dirty()} */ public boolean changed() { return dirty; } /** * Returns true if the internal data map is empty */ public boolean empty() { return data.isEmpty(); } /** * Returns true if an association with key specified exists in * the internal map */ public boolean containsKey(String key) { return data.containsKey(key); } /** * Alias of {@link #containsKey(String)} */ public boolean contains(String key) { return containsKey(key); } /** * Returns the number of assoications stored in the internal map */ public int size() { return data.size(); } /** * Release an association with key specified * @param key specify the k-v pair that should be removed from internal map * @return this instance */ public T remove(String key) { data.remove(key); return me(); } /** * Clear the internal data map. In other words, all * Key/Value association stored in this instance has been * release * * @return this instance */ public T clear() { data.clear(); return me(); } @Override public String toString() { return data.toString(); } protected T me() { return (T) this; } } /** * Defines a data structure to encapsulate a stateless session which * accept only {@code String} type value, and will be persisted at * client side as a cookie. This means the entire size of the * information stored in session including names and values shall * not exceed 4096 bytes. * <p/> * <p>To store typed value or big value, use the cache methods * of the session class. However it is subject to the implementation * to decide whether cache methods are provided and how it is * implemented</p> */ public static final class Session extends KV<Session> { /** * Session identifier */ public static final String KEY_ID = "___ID"; /** * Stores the expiration date in the session */ public static final String KEY_EXPIRATION = "___TS"; /** * Stores the authenticity token in the session */ public static final String KEY_AUTHENTICITY_TOKEN = "___AT"; /** * Used to mark if a session has just expired */ public static final String KEY_EXPIRE_INDICATOR = "___expired"; /** * Stores the fingerprint to the session */ public static final String KEY_FINGER_PRINT = "__FP"; private static final long serialVersionUID = -423716328552054481L; private String id; public Session() { } /** * Returns the session identifier */ public String id() { if (null == id) { id = data.get(KEY_ID); if (null == id) { id = UUID.randomUUID().toString(); put(KEY_ID, id()); } } return id; } // ------- regular session attribute operations --- /** * Returns {@code true} if the session is empty. e.g. * does not contain anything else than the timestamp */ public boolean empty() { return super.empty() || (containsKey(KEY_EXPIRATION) && size() == 1); } /** * Check if the session is expired. A session is considered * to be expired if it has a timestamp and the timestamp is * non negative number and is less than {@link System#currentTimeMillis()} * * @return {@code true} if the session is expired */ public boolean expired() { long expiry = expiry(); if (expiry < 0) return false; return (expiry < System.currentTimeMillis()); } /** * Returns the expiration time in milliseconds of this session. If * there is no expiration set up, then this method return {@code -1} * * @return the difference, measured in milliseconds, between * the expiry of the session and midnight, January 1, * 1970 UTC, or {@code -1} if the session has no * expiry */ public long expiry() { String s = get(KEY_EXPIRATION); if (S.blank(s)) return -1; return Long.parseLong(s); } /** * Set session expiry in milliseconds * * @param expiry the difference, measured in milliseconds, between * the expiry and midnight, January 1, 1970 UTC. * @return the session instance */ public Session expireOn(long expiry) { put(KEY_EXPIRATION, S.string(expiry)); return this; } // ------- eof regular session attribute operations --- // ------- cache operations ------ /* * Attach session id to a cache key */ private String k(String key) { return S.builder(id()).append(key).toString(); } private static volatile CacheService cs; private static CacheService cs() { if (null != cs) return cs; synchronized (H.class) { if (null == cs) { cs = HttpConfig.cacheService(); } return cs; } } /** * Store an object into cache using key specified. The key will be * appended with session id, so that it distinct between caching * using the same key but in different user sessions. * <p/> * <p>The object is cached for {@link org.osgl.cache.CacheService#setDefaultTTL(int) default} ttl</p> * * @param key the key to cache the object * @param obj the object to be cached * @return this session instance */ public Session cache(String key, Object obj) { cs().put(k(key), obj); return this; } /** * Store an object into cache with expiration specified * * @param key the key to cache the object * @param obj the object to be cached * @param expiration specify the cache expiration in seconds * @return this session instance * @see #cache(String, Object) */ public Session cache(String key, Object obj, int expiration) { cs().put(k(key), obj, expiration); return this; } /** * Store an object into cache for 1 hour * * @param key the key to cache the object * @param obj the object to be cached * @return the session instance */ public Session cacheFor1Hr(String key, Object obj) { return cache(key, obj, 60 * 60); } /** * Store an object into cache for 30 minutes * * @param key the key to cache the object * @param obj the object to be cached * @return the session instance */ public Session cacheFor30Min(String key, Object obj) { return cache(key, obj, 30 * 60); } /** * Store an object into cache for 10 minutes * * @param key the key to cache the object * @param obj the object to be cached * @return the session instance */ public Session cacheFor10Min(String key, Object obj) { return cache(key, obj, 10 * 60); } /** * Store an object into cache for 1 minutes * * @param key the key to cache the object * @param obj the object to be cached * @return the session instance */ public Session cacheFor1Min(String key, Object obj) { return cache(key, obj, 60); } /** * Evict an object from cache * * @param key the key to cache the object * @return this session instance */ public Session evict(String key) { cs().evict(k(key)); return this; } /** * Retrieve an object from cache by key. The key * will be attached with session id * * @param key the key to get the cached object * @param <T> the object type * @return the object in the cache, or {@code null} * if it cannot find the object by key * specified * @see #cache(String, Object) */ public <T> T cached(String key) { return cs().get(k(key)); } /** * Retrieve an object from cache by key. The key * will be attached with session id * * @param key the key to get the cached object * @param clz the class to specify the return type * @param <T> the object type * @return the object in the cache, or {@code null} * if it cannot find the object by key * specified * @see #cache(String, Object) */ public <T> T cached(String key, Class<T> clz) { return cs().get(k(key)); } // ------- eof cache operations ------ /** * Return a session instance of the current execution context, * For example from a {@link java.lang.ThreadLocal} * * @return the current session instance */ public static Session current() { return Current.session(); } /** * Set a session instance into the current execution context, * for example into a {@link java.lang.ThreadLocal} * * @param session the session to be set to current execution context */ public static void current(Session session) { Current.session(session); } // used to parse session data persisted in the cookie value private static final Pattern _PARSER = Pattern.compile(S.HSEP + "([^:]*):([^" + S.HSEP + "]*)" + S.HSEP); /** * Resolve a Session instance from a session cookie * * @param sessionCookie the cookie corresponding to a session * @param ttl session time to live in seconds * @return a Session instance * @see #serialize(String) */ public static Session resolve(Cookie sessionCookie, int ttl) { Session session = new Session(); long expiration = System.currentTimeMillis() + ttl * 1000; boolean hasTtl = ttl > -1; String value = null == sessionCookie ? null : sessionCookie.value(); if (S.blank(value)) { if (hasTtl) { session.expireOn(expiration); } } else { int firstDashIndex = value.indexOf("-"); if (firstDashIndex > -1) { String signature = value.substring(0, firstDashIndex); String data = value.substring(firstDashIndex + 1); if (S.eq(signature, sign(data))) { String sessionData = Codec.decodeUrl(data, Charsets.UTF_8); Matcher matcher = _PARSER.matcher(sessionData); while (matcher.find()) { session.put(matcher.group(1), matcher.group(2)); } } } if (hasTtl && session.expired()) { session = new Session().expireOn(expiration); } } return session; } /** * Serialize this session into a cookie. Note the cookie * returned has only name, value maxAge been set. It's up * to the caller to set the secure, httpOnly and path * attributes * * @param sessionKey the cookie name for the session cookie * @return a cookie captures this session's information or {@code null} if * this session is empty or this session hasn't been changed and * there is no expiry * @see #resolve(org.osgl.http.H.Cookie, int) */ public Cookie serialize(String sessionKey) { long expiry = expiry(); boolean hasTtl = expiry > -1; boolean expired = !hasTtl && expiry < System.currentTimeMillis(); if (!changed() && !hasTtl) return null; if (empty() || expired) { // empty session, delete the session cookie return new H.Cookie(sessionKey).maxAge(0); } StringBuilder sb = S.builder(); for (String k : keySet()) { sb.append(S.HSEP); sb.append(k); sb.append(":"); sb.append(get(k)); sb.append(S.HSEP); } String data = Codec.encodeUrl(sb.toString(), Charsets.UTF_8); String sign = sign(data); String value = S.builder(sign).append("-").append(data).toString(); Cookie cookie = new Cookie(sessionKey).value(value); if (expiry > -1L) { int ttl = (int) ((expiry - System.currentTimeMillis()) / 1000); cookie.maxAge(ttl); } return cookie; } private static String sign(String s) { return Crypto.sign(s, s.getBytes(Charsets.UTF_8)); } } // eof Session /** * A Flash represent a storage scope that attributes inside is valid only * for one session interaction. This feature of flash makes it very good * for server to pass one time information to client, e.g. form submission * error message etc. * <p/> * <p>Like {@link org.osgl.http.H.Session}, you can store only String type * information to flash, and the total number of information stored * including keys and values shall not exceed 4096 bytes as flash is * persisted as cookie in browser</p> */ public static final class Flash extends KV<Flash> { // used to parse flash data persisted in the cookie value private static final Pattern _PARSER = Session._PARSER; private static final long serialVersionUID = 5609789840171619780L; /** * Stores the data that will be output to cookie so next time the user's request income * they will be available for the application to access */ private Map<String, String> out = C.newMap(); /** * Add an attribute to the flash scope. The data is * added to both data buffer and the out buffer * * @param key the key to index the attribute * @param value the value of the attribute * @return the flash instance */ public Flash put(String key, String value) { out.put(key, value); return super.put(key, value); } /** * Add an attribute to the flash scope. The value is in Object * type, however it will be convert to its {@link Object#toString() string * representation} before put into the flash * * @param key the key to index the attribute * @param value the value to be put into the flash * @return this flash instance */ public Flash put(String key, Object value) { return put(key, null == value ? null : value.toString()); } /** * Add an attribute to the flash's current scope. Meaning when next time * the user request to the server, the attribute will not be there anymore. * * @param key the attribute key * @param value the attribute value * @return the flash instance */ public Flash now(String key, String value) { return super.put(key, value); } /** * Add an "error" message to the flash scope * * @param message the error message * @return the flash instance * @see #put(String, Object) */ public Flash error(String message) { return put("error", message); } /** * Add an "error" message to the flash scope, with * optional format arguments * * @param message the message template * @param args the format arguments * @return this flash instance */ public Flash error(String message, Object... args) { return put("error", S.fmt(message, args)); } /** * Get the "error" message that has been added to * the flash scope. * * @return the "error" message or {@code null} if * no error message has been added to the flash */ public String error() { return get("error"); } /** * Add a "success" message to the flash scope * * @param message the error message * @return the flash instance * @see #put(String, Object) */ public Flash success(String message) { return put("success", message); } /** * Add a "success" message to the flash scope, with * optional format arguments * * @param message the message template * @param args the format arguments * @return this flash instance */ public Flash success(String message, Object... args) { return put("success", S.fmt(message, args)); } /** * Get the "success" message that has been added to * the flash scope. * * @return the "success" message or {@code null} if * no success message has been added to the flash */ public String success() { return get("success"); } /** * Discard a data from the output buffer of the flash but * the data buffer is remain untouched. Meaning * the app can still get the data {@link #put(String, Object)} * into the flash scope, however they will NOT * be write to the client cookie, thus the next * time client request the server, the app will * not be able to get the info anymore * * @param key the key to the data to be discarded * @return the flash instance */ public Flash discard(String key) { out.remove(key); return this; } /** * Discard the whole output buffer of the flash but * the data buffer is remain untouched. Meaning * the app can still get the data {@link #put(String, Object)} * into the flash scope, however they will NOT * be write to the client cookie, thus the next * time client request the server, the app will * not be able to get those info anymore * * @return the flash instance */ public Flash discard() { out.clear(); return this; } /** * Keep a data that has been {@link #put(String, Object) put} * into the flash for one time. The data that has been kept * will be persistent to client cookie for one time, thus * the next time when user request the server, the app * can still get the data, but only for one time unless * the app call {@code keep} method again * * @param key the key to identify the data to be kept * @see #keep() */ public Flash keep(String key) { if (super.containsKey(key)) { out.put(key, get(key)); } return this; } /** * Keep all data that has been {@link #put(String, Object) put} * into the flash for one time. The data that has been kept * will be persistent to client cookie for one time, thus * the next time when user request the server, the app * can still get the data, but only for one time unless * the app call {@code keep} method again * * @return the flash instance */ public Flash keep() { out.putAll(data); return this; } public KV out() { return new KV(out); } /** * Return a flash instance of the current execution context, * For example from a {@link java.lang.ThreadLocal} * * @return the current flash instance */ public static Flash current() { return Current.flash(); } /** * Set a flash instance into the current execution context, * for example into a {@link java.lang.ThreadLocal} * * @param flash the flash to be set to current execution context */ public static void current(Flash flash) { Current.flash(flash); } /** * Resolve a Flash instance from a cookie. If the cookie supplied * is {@code null} then an empty Flash instance is returned * * @param flashCookie the flash cookie * @return a Flash instance * @see #serialize(String) */ public static Flash resolve(Cookie flashCookie) { Flash flash = new Flash(); if (null != flashCookie) { String value = flashCookie.value(); if (S.notBlank(value)) { String s = Codec.decodeUrl(value, Charsets.UTF_8); Matcher m = _PARSER.matcher(s); while (m.find()) { flash.data.put(m.group(1), m.group(2)); } } } return flash; } /** * Serialize this Flash instance into a Cookie. Note * the cookie returned has only name, value and max Age * been set. It's up to the caller to set secure, path * and httpOnly attributes. * * @param flashKey the cookie name * @return a Cookie represent this flash instance * @see #resolve(org.osgl.http.H.Cookie) */ public Cookie serialize(String flashKey) { if (out.isEmpty()) { return new Cookie(flashKey).maxAge(0); } StringBuilder sb = S.builder(); for (String key : out.keySet()) { sb.append(S.HSEP); sb.append(key); sb.append(":"); sb.append(out.get(key)); sb.append(S.HSEP); } String value = Codec.encodeUrl(sb.toString(), Charsets.UTF_8); return new Cookie(flashKey).value(value); } } // eof Flash /** * Defines the HTTP request trait * * @param <T> the type of the implementation class */ public static abstract class Request<T extends Request> { private static SimpleDateFormat dateFormat; static { dateFormat = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz", Locale.US); dateFormat.setTimeZone(TimeZone.getTimeZone("GMT")); } /** * Returns the class of the implementation. Not to be used * by application */ protected abstract Class<T> _impl(); private Format accept; private Format contentType; private String ip; private int port = -1; private State state = State.NONE; private Object context; private String etag; protected volatile InputStream inputStream; protected volatile Reader reader; private Map<String, Cookie> cookies = C.newMap(); /** * Attach a context object to the request instance * @param context the context object * @return the request instance itself */ public T context(Object context) { this.context = $.notNull(context); return me(); } /** * Get the context object from the request instance * @param <CONTEXT> the generic type of the context object * @return the context object */ public <CONTEXT> CONTEXT context() { return (CONTEXT) context; } /** * Returns the HTTP method of the request */ public abstract Method method(); /** * Set the Http method on this request. Used by framework to "override" * a HTTP method * @param method the method to set * @return this request instance */ public abstract T method(Method method); /** * Returns the header content by name. If there are * multiple headers with the same name, then the first * one is returned. If there is no header has the name * then {@code null} is returned * <p/> * <p>Note header name is case insensitive</p> * * @param name the name of the header * @return the header content */ public abstract String header(String name); /** * Returns all header content by name. This method returns * content of all header with the same name specified in * an {@link java.lang.Iterable} of {@code String}. If there * is no header has the name specified, then an empty iterable * is returned. * <p/> * <p>Note header name is case insensitive</p> * * @param name the name of the header * @return all values of the header */ public abstract Iterable<String> headers(String name); /** * Return the request {@link org.osgl.http.H.Format accept} * * @return the request accept */ public Format accept() { if (null == accept) { resolveAcceptFormat(); } return accept; } /** * Set {@link org.osgl.http.H.Format accept} to the request * @param fmt * @return this request */ public T accept(Format fmt) { E.NPE(fmt); this.accept = fmt; return me(); } public String etag() { if (null == etag) { etag = method().safe() ? header(IF_NONE_MATCH) : header(IF_MATCH); } return etag; } public boolean etagMatches(String etag) { return S.eq(this.etag(), etag); } /** * Check if the request is an ajax call * * @return {@code true} if it is an ajax call */ public boolean isAjax() { return S.eq(header(X_REQUESTED_WITH), "XMLHttpRequest"); } /** * Returns the path of the request. This does not include the * context path. The path is a composite of * {@link javax.servlet.http.HttpServletRequest#getServletPath()} * and {@link javax.servlet.http.HttpServletRequest#getPathInfo()} * <p/> * <p> * The path starts with "/" but not end with "/" * </p> */ public abstract String path(); /** * Returns the context path of the request. * The context path starts with "/" but not end * with "/". If there is no context path * then and empty "" is returned */ public abstract String contextPath(); /** * Returns the full URI path. It's composed of * {@link #contextPath()} and {@link #path()} * The full path starts with "/" */ public String fullPath() { return Path.url(path(), this); } /** * Alias of {@link #fullPath()} * * @return the full URL path of the request */ public String url() { return fullPath(); } /** * Returns the full URL including scheme, domain, port and * full request path plus query string * * @return the absolute URL */ public String fullUrl() { return Path.fullUrl(path(), this); } /** * Returns query string or an empty String if the request * doesn't contains a query string */ public abstract String query(); /** * Check if the request was made on a secure channel * * @return {@code true} if this is a secure request */ public abstract boolean secure(); /** * Returns the scheme of the request, specifically one of the * "http" and "https" * * @return the scheme of the request */ public String scheme() { return secure() ? "https" : "http"; } protected void _setCookie(String name, Cookie cookie) { cookies.put(name, cookie); } private String domain; /** * Returns the domain of the request */ public String domain() { if (null == domain) resolveHostPort(); return domain; } /** * Returns the port */ public int port() { if (-1 == port) resolveHostPort(); return port; } /** * Returns remote ip address */ protected abstract String _ip(); private static boolean ipOk(String s) { return S.notEmpty(s) && S.neq("unknown", s); } private void resolveIp() { String rmt = _ip(); if (!HttpConfig.isXForwardedAllowed(rmt)) { ip = rmt; return; } String s = header(X_FORWARDED_FOR); if (!ipOk(s)) { if (HttpConfig.allowExtensiveRemoteAddrResolving()) { s = header(PROXY_CLIENT_IP); if (!ipOk(s)) { s = header(WL_PROXY_CLIENT_IP); if (!ipOk(s)) { s = header(HTTP_CLIENT_IP); if (!ipOk(s)) { s = header(HTTP_X_FORWARDED_FOR); if (!ipOk(s)) { ip = rmt; return; } } } } } else { ip = rmt; return; } } // in case there are multiple ip addresses (due to cascade proxies), then use the first one. if (s.length() > 15) { int pos = s.indexOf(","); if (pos > 0) { s = s.substring(0, pos); } } ip = s; } private void resolveHostPort() { String host = header(X_FORWARDED_HOST); if (S.empty(host)) { host = header(HOST); } if (null != host) { FastStr fs = FastStr.unsafeOf(host); if (fs.contains(':')) { domain = fs.beforeFirst(':').toString(); try { port = Integer.parseInt(fs.afterFirst(':').toString()); } catch (NumberFormatException e) { port = defPort(); } } else { domain = host; port = defPort(); } } else { domain = ""; port = defPort(); } } private int defPort() { return secure() ? 443 : 80; } public String ip() { if (null == ip) { resolveIp(); } return ip; } public String userAgentStr() { return header(USER_AGENT); } public UserAgent userAgent() { return UserAgent.parse(userAgentStr()); } protected abstract void _initCookieMap(); /** * Returns cookie by it's name * * @param name the cookie name * @return the cookie or {@code null} if not found */ public H.Cookie cookie(String name) { if (cookies.isEmpty()) { _initCookieMap(); } return cookies.get(name); } /** * Returns all cookies of the request in Iterable */ public List<H.Cookie> cookies() { if (cookies.isEmpty()) { _initCookieMap(); } return C.list(cookies.values()); } /** * resolve the request accept * * @return this request instance */ private T resolveAcceptFormat() { String accept = header(ACCEPT); this.accept = Format.resolve(accept); return (T) this; } /** * Check if the requested resource is modified with etag and * last timestamp (usually the timestamp of a static file e.g.) * * @param etag the etag to compare with "If_None_Match" * header in browser * @param since the last timestamp to compare with * "If_Modified_Since" header in browser * @return {@code true} if the resource has changed * or {@code false} otherwise */ public boolean isModified(String etag, long since) { String browserEtag = header(IF_NONE_MATCH); if (null == browserEtag) return true; if (!S.eq(browserEtag, etag)) { return true; } String s = header(IF_MODIFIED_SINCE); if (null == s) return true; try { Date browserDate = dateFormat.parse(s); if (browserDate.getTime() >= since) { return false; } } catch (ParseException ex) { logger.error(ex, "Can't parse date: %s", s); } return true; } private void parseContentTypeAndEncoding() { String type = header(CONTENT_TYPE); if (null == type) { contentType = Format.HTML; encoding = "utf-8"; } else { String[] contentTypeParts = type.split(";"); String _contentType = contentTypeParts[0].trim().toLowerCase(); String _encoding = "utf-8"; // check for encoding-info if (contentTypeParts.length >= 2) { String[] encodingInfoParts = contentTypeParts[1].split(("=")); if (encodingInfoParts.length == 2 && encodingInfoParts[0].trim().equalsIgnoreCase("charset")) { // encoding-info was found in request _encoding = encodingInfoParts[1].trim(); if (S.notBlank(_encoding) && ((_encoding.startsWith("\"") && _encoding.endsWith("\"")) || (_encoding.startsWith("'") && _encoding.endsWith("'"))) ) { _encoding = _encoding.substring(1, _encoding.length() - 1).trim(); } } } contentType = Format.resolve(_contentType); encoding = _encoding; } } /** * Return content type of the request */ public Format contentType() { if (null == contentType) { parseContentTypeAndEncoding(); } return contentType; } private String encoding; /** * Returns encoding of the request */ public String characterEncoding() { if (null == encoding) { parseContentTypeAndEncoding(); } return encoding; } private C.List<Locale> locales; private void parseLocales() { String s = header(ACCEPT_LANGUAGE); if (S.blank(s)) { locales = C.list(HttpConfig.defaultLocale()); return; } // preprocess to remove all blanks s = S.str(s).remove(new $.F1<Character, Boolean>() { @Override public Boolean apply(Character character) { char c = character; return c == ' ' || c == '\t'; } }).toString(); ListBuilder<Locale> lb = ListBuilder.create(); // parse things like "da,en-gb;q=0.8,en;q=0.7" String[] sa = s.split(","); for (String s0 : sa) { String[] arr = s0.trim().split(";"); //Parse the locale Locale locale; String[] l = arr[0].split("-"); switch(l.length){ case 2: locale = new Locale(l[0], l[1]); break; case 3: locale = new Locale(l[0], l[1], l[2]); break; default: locale = new Locale(l[0]); break; } lb.add(locale); } if (lb.isEmpty()) lb.add(HttpConfig.defaultLocale()); locales = lb.toList(); } /** * Returns locale of the request */ public Locale locale() { if (null == locales) parseLocales(); return locales.get(0); } /** * Returns all locales of the request */ public C.List<Locale> locales() { if (null == locales) parseLocales(); return locales; } private long len = -2; /** * Returns the content length of the request */ public long contentLength() { if (len > -2) return len; String s = header(CONTENT_LENGTH); if (S.blank(s)) { len = -1; } else { try { len = Long.parseLong(s); } catch (NumberFormatException e) { len = -1; logger.error("Error parsing content-length: %s", s); } } return len; } public boolean readerCreated() { return state == State.READER; } protected abstract InputStream createInputStream(); /** * Returns body of the request as binary data using {@link java.io.InputStream} * * @throws IllegalStateException if {@link #reader()} has already * been called on this request instance */ public InputStream inputStream() throws IllegalStateException { return state.inputStream(this); } private void createReader() { if (null != reader) { return; } synchronized (this) { if (null != reader) { return; } createInputStream(); String charset = characterEncoding(); Charset cs = null == charset ? Charsets.UTF_8 : Charset.forName(charset); reader = new InputStreamReader(inputStream(), cs); } } /** * Returns body of the request as binary data using {@link java.io.Reader} * * @throws IllegalStateException if {@link #inputStream()} has already * been called on this request instance */ public Reader reader() throws IllegalStateException { return state.reader(this); } /** * Return a request parameter value by name. If there is no parameter * found with the name specified, then {@code null} is returned. If * there are multiple values associated with the name, then the * first one is returned * * @param name the parameter name * @return the parameter value of {@code null} if not found */ public abstract String paramVal(String name); /** * Returns all values associated with the name specified in the * http request. If there is no parameter found with the name, * then {@code new String[0]} shall be returned * * @param name the parameter name * @return all values of the parameter */ public abstract String[] paramVals(String name); /** * Return all parameter names * * @return an {@link java.lang.Iterable} of parameter names */ public abstract Iterable<String> paramNames(); private void parseAuthorization() { if (null != user) return; user = ""; password = ""; String s = header(AUTHORIZATION); if (null != s && s.startsWith("Basic")) { String data = s.substring(6); String[] decodedData = new String(Codec.decodeBASE64(data)).split(":"); user = decodedData.length > 0 ? decodedData[0] : null; password = decodedData.length > 1 ? decodedData[1] : null; } } private String user; /** * The Http Basic user */ public String user() { if (null == user) parseAuthorization(); return user; } private String password; /** * the Http Basic password */ public String password() { if (null == password) parseAuthorization(); return password; } protected final T me() { return (T) this; } /** * Return a request instance of the current execution context, * For example from a {@link java.lang.ThreadLocal} * * @return the current request instance */ @SuppressWarnings("unchecked") public static <T extends Request> T current() { return (T) Current.request(); } /** * Set a request instance into the current execution context, * for example into a {@link java.lang.ThreadLocal} * * @param request the request to be set to current execution context */ public static <T extends Request> void current(T request) { Current.request(request); } private enum State { NONE, STREAM() { @Override Reader reader(Request req) { throw new IllegalStateException("reader() already called"); } }, READER() { @Override InputStream inputStream(Request req) { throw new IllegalStateException("inputStream() already called"); } }; InputStream inputStream(Request req) { req.inputStream = req.createInputStream(); req.state = STREAM; return req.inputStream; } Reader reader(Request req) { req.createReader(); req.state = READER; return req.reader; } } } // eof Request /** * Defines the HTTP response trait */ public static abstract class Response<T extends Response> { private State state = State.NONE; protected volatile OutputStream outputStream; protected volatile Writer writer; private Object context; /** * Attach a context object to the response instance * @param context the context object * @return the response instance itself */ public T context(Object context) { this.context = $.notNull(context); return (T)this; } /** * Get the context object from the response instance * @param <CONTEXT> the generic type of the context object * @return the context object */ public <CONTEXT> CONTEXT context() { return (CONTEXT) context; } /** * Returns the class of the implementation. Not to be used * by application */ protected abstract Class<T> _impl(); public boolean writerCreated() { return state == State.WRITER; } protected abstract OutputStream createOutputStream(); private void createWriter() { if (null != writer) { return; } synchronized (this) { if (null != writer) { return; } outputStream = createOutputStream(); String charset = characterEncoding(); Charset cs = null == charset ? Charsets.UTF_8 : Charset.forName(charset); writer = new OutputStreamWriter(outputStream, cs); } } /** * Returns the output stream to write to the response * * @throws java.lang.IllegalStateException if * {@link #writer()} is called already * @throws org.osgl.exception.UnexpectedIOException if * there are output exception */ public OutputStream outputStream() throws IllegalStateException, UnexpectedIOException { return state.outputStream(this); } /** * Returns the writer to write to the response * * @throws java.lang.IllegalStateException if {@link #outputStream()} is called already * @throws org.osgl.exception.UnexpectedIOException if there are output exception */ public Writer writer() throws IllegalStateException, UnexpectedIOException { return state.writer(this); } /** * Returns a print writer to write to the response * * @throws IllegalStateException if {@link #outputStream()} is called already * @throws UnexpectedIOException if there are output exception */ public PrintWriter printWriter() { Writer w = writer(); if (w instanceof PrintWriter) { return (PrintWriter) w; } else { return new PrintWriter(w); } } /** * Returns the name of the character encoding (MIME charset) * used for the body sent in this response. * The character encoding may have been specified explicitly * using the {@link #characterEncoding(String)} or * {@link #contentType(String)} methods, or implicitly using the * {@link #locale(java.util.Locale)} method. Explicit specifications take * precedence over implicit specifications. Calls made * to these methods after <code>getWriter</code> has been * called or after the response has been committed have no * effect on the character encoding. If no character encoding * has been specified, <code>ISO-8859-1</code> is returned. * <p>See RFC 2047 (http://www.ietf.org/rfc/rfc2047.txt) * for more information about character encoding and MIME. * * @return a <code>String</code> specifying the * name of the character encoding, for * example, <code>UTF-8</code> */ public abstract String characterEncoding(); /** * Returns the content type used for the MIME body * sent in this response. The content type proper must * have been specified using {@link #contentType(String)} * before the response is committed. If no content type * has been specified, this method returns null. * If a content type has been specified, and a * character encoding has been explicitly or implicitly * specified as described in {@link #characterEncoding()} * or {@link #writer()} has been called, * the charset parameter is included in the string returned. * If no character encoding has been specified, the * charset parameter is omitted. * * @return a <code>String</code> specifying the * content type, for example, * <code>text/html; charset=UTF-8</code>, * or null */ public abstract T characterEncoding(String encoding); /** * Set the length of the content to be write to the response * * @param len an long value specifying the length of the * content being returned to the client; sets * the Content-Length header * @return the response it self * @see #outputStream * @see #writer */ public abstract T contentLength(long len); /** * Sub class to overwrite this method to set content type to * the response * * @param type a <code>String</code> specifying the MIME * type of the content */ protected abstract void _setContentType(String type); private String contentType; /** * Sets the content type of the response being sent to * the client. The content type may include the type of character * encoding used, for example, <code>text/html; charset=ISO-8859-4</code>. * If content type has already been set to the response, this method * will update the content type with the new value * <p/> * <p>this method must be called before calling {@link #writer()} * or {@link #outputStream()}</p> * * @param type a <code>String</code> specifying the MIME * type of the content * @return the response it self * @see #outputStream * @see #writer * @see #initContentType(String) */ public T contentType(String type) { _setContentType(type); contentType = type; return (T) this; } /** * This method set the content type to the response if there * is no content type been set already. * * @param type a <code>String</code> specifying the MIME * type of the content * @return the response it self * @see #contentType(String) */ public T initContentType(String type) { return (null == contentType) ? contentType(type) : (T) this; } /** * Set the etag header * @param etag the etag content * @return this response */ public T etag(String etag) { header(ETAG, etag); return (T)this; } /** * Sets the locale of the response, setting the headers (including the * Content-Type's charset) as appropriate. This method should be called * before a call to {@link #writer()}. By default, the response locale * is the default locale for the server. * * @param loc the locale of the response * @see #locale() */ protected abstract void _setLocale(Locale loc); public T locale(Locale locale) { _setLocale(locale); return (T) this; } /** * Returns the locale assigned to the response. * * @see #locale(java.util.Locale) */ public abstract Locale locale(); /** * Adds the specified cookie to the response. This method can be called * multiple times to add more than one cookie. * * @param cookie the Cookie to return to the client */ public abstract void addCookie(H.Cookie cookie); /** * Returns a boolean indicating whether the named response header * has already been set. * * @param name the header name * @return <code>true</code> if the named response header * has already been set; * <code>false</code> otherwise */ public abstract boolean containsHeader(String name); /** * Sends an error response to the client using the specified * status. The server defaults to creating the * response to look like an HTML-formatted server error page * containing the specified message, setting the content type * to "text/html", leaving cookies and other headers unmodified. * <p/> * If an error-page declaration has been made for the web application * corresponding to the status code passed in, it will be served back in * preference to the suggested msg parameter. * <p/> * <p>If the response has already been committed, this method throws * an IllegalStateException. * After using this method, the response should be considered * to be committed and should not be written to. * * @param sc the error status code * @param msg the descriptive message * @return the response itself * @throws org.osgl.exception.UnexpectedIOException If an input or output exception occurs * @throws IllegalStateException If the response was committed */ public abstract T sendError(int sc, String msg); /** * Sames as {@link #sendError(int, String)} but accept message format * arguments * * @param sc the error status code * @param msg the descriptive message template * @param args the descriptive message arguments * @return the response itself * @throws org.osgl.exception.UnexpectedIOException If an input or output exception occurs * @throws IllegalStateException If the response was committed */ public T sendError(int sc, String msg, Object... args) { return sendError(sc, S.fmt(msg, args)); } /** * Sends an error response to the client using the specified status * code and clearing the buffer. * <p>If the response has already been committed, this method throws * an IllegalStateException. * After using this method, the response should be considered * to be committed and should not be written to. * * @param sc the error status code * @return the response itself * @throws org.osgl.exception.UnexpectedIOException If the response was committed before this method call */ public abstract T sendError(int sc); /** * Sends a temporary redirect response to the client using the * specified redirect location URL. This method can accept relative URLs; * the servlet container must convert the relative URL to an absolute URL * before sending the response to the client. If the location is relative * without a leading '/' the container interprets it as relative to * the current request URI. If the location is relative with a leading * '/' the container interprets it as relative to the servlet container root. * <p/> * <p>If the response has already been committed, this method throws * an IllegalStateException. * After using this method, the response should be considered * to be committed and should not be written to. * * @param location the redirect location URL * @return the response itself * @throws org.osgl.exception.UnexpectedIOException If the response was committed before this method call * @throws IllegalStateException If the response was committed or * if a partial URL is given and cannot be converted into a valid URL */ public abstract T sendRedirect(String location); /** * Sets a response header with the given name and value. * If the header had already been set, the new value overwrites the * previous one. The <code>containsHeader</code> method can be * used to test for the presence of a header before setting its * value. * * @param name the name of the header * @param value the header value If it contains octet string, * it should be encoded according to RFC 2047 * (http://www.ietf.org/rfc/rfc2047.txt) * @return the response itself * @see #containsHeader * @see #addHeader */ public abstract T header(String name, String value); /** * Sets the status code for this response. This method is used to * set the return status code when there is no error (for example, * for the status codes SC_OK or SC_MOVED_TEMPORARILY). If there * is an error, and the caller wishes to invoke an error-page defined * in the web application, the <code>sendError</code> method should be used * instead. * <p> The container clears the buffer and sets the Location header, preserving * cookies and other headers. * * @param sc the status code * @return the response itself * @see #sendError * @see #status(Status) */ public abstract T status(int sc); /** * Sets the status for this response. This method is used to * set the return status code when there is no error (for example, * for the status OK or MOVED_TEMPORARILY). If there * is an error, and the caller wishes to invoke an error-page defined * in the web application, the <code>sendError</code> method should be used * instead. * <p> The container clears the buffer and sets the Location header, preserving * cookies and other headers. * * @param s the status * @return the response itself * @see #sendError */ public T status(Status s) { status(s.code()); return (T) this; } /** * Adds a response header with the given name and value. * This method allows response headers to have multiple values. * * @param name the name of the header * @param value the additional header value If it contains * octet string, it should be encoded * according to RFC 2047 * (http://www.ietf.org/rfc/rfc2047.txt) * @return this response itself * @see #header(String, String) */ public abstract T addHeader(String name, String value); /** * Adds a response header with given name and value if the header * with the same name has not been added yet * @param name the name of the header * @param value the header value * @return this response itself * @see #addHeader(String, String) */ public T addHeaderIfNotAdded(String name, String value) { if (!containsHeader(name)) { addHeader(name, value); } return (T) this; } /** * Write a string to the response * * @param s the string to write to the response * @return this response itself */ public T writeContent(String s) { try { IO.write(s.getBytes(characterEncoding()), outputStream()); } catch (UnsupportedEncodingException e) { throw E.encodingException(e); } return (T) this; } /** * Write content to the response * * @param content the content to write * @return the response itself */ public T writeText(String content) { _setContentType(Format.TXT.contentType()); return writeContent(content); } /** * Write content to the response * * @param content the content to write * @return the response itself */ public T writeHtml(String content) { _setContentType(Format.HTML.contentType()); return writeContent(content); } /** * Write content to the response * * @param content the content to write * @return the response itself */ public T writeJSON(String content) { _setContentType(Format.JSON.contentType()); return writeContent(content); } /** * Calling this method commits the response, meaning the status * code and headers will be written to the client */ public abstract void commit(); /** * Return a request instance of the current execution context, * For example from a {@link java.lang.ThreadLocal} * * @return the current request instance */ @SuppressWarnings("unchecked") public static <T extends Response> T current() { return (T) Current.response(); } /** * Set a request instance into the current execution context, * for example into a {@link java.lang.ThreadLocal} * * @param response the request to be set to current execution context */ public static <T extends Response> void current(T response) { Current.response(response); } private enum State { NONE, STREAM() { @Override Writer writer(Response resp) { throw new IllegalStateException("writer() already called"); } }, WRITER() { @Override OutputStream outputStream(Response resp) { throw new IllegalStateException("outputStream() already called"); } }; OutputStream outputStream(Response resp) { resp.outputStream = resp.createOutputStream(); resp.state = STREAM; return resp.outputStream; } Writer writer(Response resp) { resp.createWriter(); resp.state = WRITER; return resp.writer; } } } // eof Response H() { } /** * Clear all current context */ public static void cleanUp() { Current.clear(); } }
src/main/java/org/osgl/http/H.java
package org.osgl.http; import org.apache.commons.codec.Charsets; import org.osgl.$; import org.osgl.cache.CacheService; import org.osgl.exception.NotAppliedException; import org.osgl.exception.UnexpectedIOException; import org.osgl.http.util.Path; import org.osgl.logging.L; import org.osgl.logging.Logger; import org.osgl.util.*; import org.osgl.web.util.UserAgent; import java.io.*; import java.net.InetAddress; import java.net.UnknownHostException; import java.nio.charset.Charset; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import static org.osgl.http.H.Header.Names.*; /** * The namespace to access Http features. * Alias of {@link org.osgl.http.Http} */ public class H { protected static final Logger logger = L.get(Http.class); public enum Method { GET, HEAD, POST, DELETE, PUT, PATCH, TRACE, OPTIONS, CONNECT; private static EnumSet<Method> unsafeMethods = EnumSet.of(POST, DELETE, PUT, PATCH); private static EnumSet<Method> actionMethods = EnumSet.of(GET, POST, PUT, DELETE); /** * Returns if this http method is safe, meaning it * won't change the state of the server * * @see #unsafe() */ public boolean safe() { return !unsafe(); } /** * Returns if this http method is unsafe, meaning * it will change the state of the server * * @see #safe() */ public boolean unsafe() { return unsafeMethods.contains(this); } public static Method valueOfIgnoreCase(String method) { return valueOf(method.toUpperCase()); } public static EnumSet<Method> actionMethods() { return actionMethods.clone(); } } // eof Method public static final class Status implements Serializable, Comparable<Status> { private static final Map<Integer, Status> predefinedStatus = new LinkedHashMap<Integer, Status>(); private static final long serialVersionUID = -286619406116817809L; private int code; private Status(int code) { this(code, true); } private Status(int code, boolean predefined) { this.code = code; if (predefined) { predefinedStatus.put(code, this); } } /** * Returns the int value of the status */ public final int code() { return code; } /** * Returns {@code true} if the status is either a {@link #isClientError() client error} * or {@link #isServerError() server error} */ public boolean isError() { return isClientError() || isServerError(); } /** * Returns true if the status is server error (5xx) */ public boolean isServerError() { return code / 100 == 5; } /** * Returns true if the status is client error (4xx) */ public boolean isClientError() { return code / 100 == 4; } /** * Returns true if the status is success series (2xx) */ public boolean isSuccess() { return code / 100 == 2; } /** * Returns true if the status is redirect series (3xx) */ public boolean isRedirect() { return code / 100 == 3; } /** * Returns true if the status is informational series (1xx) */ public boolean isInformational() { return code / 100 == 1; } /** * Return a string representation of this status code. */ @Override public String toString() { return Integer.toString(code); } @Override public int hashCode() { return code; } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj instanceof Status) { Status that = (Status) obj; return that.code() == code; } return false; } @Override public int compareTo(Status o) { return code - o.code; } protected final Object clone() throws CloneNotSupportedException { throw new CloneNotSupportedException(); } private Object readResolve() { Status predefined = predefinedStatus.get(code); return null != predefined ? predefined : this; } /** * Alias of {@link #valueOf(int)} * @param n * @return */ public static Status of(int n) { return valueOf(n); } public static Status valueOf(int n) { E.illegalArgumentIf(n < 100 || n > 599, "invalid http status code: %s", n); Status retVal = predefinedStatus.get(n); if (null == retVal) { retVal = new Status(n, false); } return retVal; } public static List<Status> predefined() { return C.list(predefinedStatus.values()); } // 1xx Informational /** * {@code 100 Continue}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.1.1">HTTP/1.1</a> */ public static final Status CONTINUE = new Status(100); /** * {@code 101 Switching Protocols}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.1.2">HTTP/1.1</a> */ public static final Status SWITCHING_PROTOCOLS = new Status(101); /** * {@code 102 Processing}. * * @see <a href="http://tools.ietf.org/html/rfc2518#section-10.1">WebDAV</a> */ public static final Status PROCESSING = new Status(102); /** * {@code 103 Checkpoint}. * * @see <a href="http://code.google.com/p/gears/wiki/ResumableHttpRequestsProposal">A proposal for supporting * resumable POST/PUT HTTP requests in HTTP/1.0</a> */ public static final Status CHECKPOINT = new Status(103); // 2xx Success /** * {@code 200 OK}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.2.1">HTTP/1.1</a> */ public static final Status OK = new Status(200); /** * {@code 201 Created}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.2.2">HTTP/1.1</a> */ public static final Status CREATED = new Status(201); /** * {@code 202 Accepted}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.2.3">HTTP/1.1</a> */ public static final Status ACCEPTED = new Status(202); /** * {@code 203 Non-Authoritative Information}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.2.4">HTTP/1.1</a> */ public static final Status NON_AUTHORITATIVE_INFORMATION = new Status(203); /** * {@code 204 No Content}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.2.5">HTTP/1.1</a> */ public static final Status NO_CONTENT = new Status(204); /** * {@code 205 Reset Content}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.2.6">HTTP/1.1</a> */ public static final Status RESET_CONTENT = new Status(205); /** * {@code 206 Partial Content}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.2.7">HTTP/1.1</a> */ public static final Status PARTIAL_CONTENT = new Status(206); /** * {@code 207 Multi-Status}. * * @see <a href="http://tools.ietf.org/html/rfc4918#section-13">WebDAV</a> */ public static final Status MULTI_STATUS = new Status(207); /** * {@code 208 Already Reported}. * * @see <a href="http://tools.ietf.org/html/rfc5842#section-7.1">WebDAV Binding Extensions</a> */ public static final Status ALREADY_REPORTED = new Status(208); /** * {@code 226 IM Used}. * * @see <a href="http://tools.ietf.org/html/rfc3229#section-10.4.1">Delta encoding in HTTP</a> */ public static final Status IM_USED = new Status(226); /** * {@code 278} Faked http status to handle redirection on ajax case * @see <a href="http://stackoverflow.com/questions/199099/how-to-manage-a-redirect-request-after-a-jquery-ajax-call">this</a> stackoverflow */ public static final Status FOUND_AJAX = new Status(278); // 3xx Redirection /** * {@code 300 Multiple Choices}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.3.1">HTTP/1.1</a> */ public static final Status MULTIPLE_CHOICES = new Status(300); /** * {@code 301 Moved Permanently}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.3.2">HTTP/1.1</a> */ public static final Status MOVED_PERMANENTLY = new Status(301); /** * {@code 302 Found}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.3.3">HTTP/1.1</a> */ public static final Status FOUND = new Status(302); /** * {@code 302 Moved Temporarily}. * * @see <a href="http://tools.ietf.org/html/rfc1945#section-9.3">HTTP/1.0</a> * @deprecated In favor of {@link #FOUND} which will be returned from {@code Status.valueOf(302)} */ @Deprecated public static final Status MOVED_TEMPORARILY = new Status(302); /** * {@code 303 See Other}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.3.4">HTTP/1.1</a> */ public static final Status SEE_OTHER = new Status(303); /** * {@code 304 Not Modified}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.3.5">HTTP/1.1</a> */ public static final Status NOT_MODIFIED = new Status(304); /** * {@code 305 Use Proxy}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.3.6">HTTP/1.1</a> */ public static final Status USE_PROXY = new Status(305); /** * {@code 307 Temporary Redirect}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.3.8">HTTP/1.1</a> */ public static final Status TEMPORARY_REDIRECT = new Status(307); /** * {@code 308 Resume Incomplete}. * * @see <a href="http://code.google.com/p/gears/wiki/ResumableHttpRequestsProposal">A proposal for supporting * resumable POST/PUT HTTP requests in HTTP/1.0</a> */ public static final Status RESUME_INCOMPLETE = new Status(308); // --- 4xx Client Error --- /** * {@code 400 Bad Request}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.1">HTTP/1.1</a> */ public static final Status BAD_REQUEST = new Status(400); /** * {@code 401 Unauthorized}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.2">HTTP/1.1</a> */ public static final Status UNAUTHORIZED = new Status(401); /** * {@code 402 Payment Required}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.3">HTTP/1.1</a> */ public static final Status PAYMENT_REQUIRED = new Status(402); /** * {@code 403 Forbidden}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.4">HTTP/1.1</a> */ public static final Status FORBIDDEN = new Status(403); /** * {@code 404 Not Found}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.5">HTTP/1.1</a> */ public static final Status NOT_FOUND = new Status(404); /** * {@code 405 Method Not Allowed}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.6">HTTP/1.1</a> */ public static final Status METHOD_NOT_ALLOWED = new Status(405); /** * {@code 406 Not Acceptable}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.7">HTTP/1.1</a> */ public static final Status NOT_ACCEPTABLE = new Status(406); /** * {@code 407 Proxy Authentication Required}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.8">HTTP/1.1</a> */ public static final Status PROXY_AUTHENTICATION_REQUIRED = new Status(407); /** * {@code 408 Request Timeout}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.9">HTTP/1.1</a> */ public static final Status REQUEST_TIMEOUT = new Status(408); /** * {@code 409 Conflict}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.10">HTTP/1.1</a> */ public static final Status CONFLICT = new Status(409); /** * {@code 410 Gone}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.11">HTTP/1.1</a> */ public static final Status GONE = new Status(410); /** * {@code 411 Length Required}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.12">HTTP/1.1</a> */ public static final Status LENGTH_REQUIRED = new Status(411); /** * {@code 412 Precondition failed}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.13">HTTP/1.1</a> */ public static final Status PRECONDITION_FAILED = new Status(412); /** * {@code 413 Request Entity Too Large}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.14">HTTP/1.1</a> */ public static final Status REQUEST_ENTITY_TOO_LARGE = new Status(413); /** * {@code 414 Request-URI Too Long}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.15">HTTP/1.1</a> */ public static final Status REQUEST_URI_TOO_LONG = new Status(414); /** * {@code 415 Unsupported Media Type}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.16">HTTP/1.1</a> */ public static final Status UNSUPPORTED_MEDIA_TYPE = new Status(415); /** * {@code 416 Requested Range Not Satisfiable}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.17">HTTP/1.1</a> */ public static final Status REQUESTED_RANGE_NOT_SATISFIABLE = new Status(416); /** * {@code 417 Expectation Failed}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.4.18">HTTP/1.1</a> */ public static final Status EXPECTATION_FAILED = new Status(417); /** * {@code 418 I'm a teapot}. * * @see <a href="http://tools.ietf.org/html/rfc2324#section-2.3.2">HTCPCP/1.0</a> */ public static final Status I_AM_A_TEAPOT = new Status(418); /** * @deprecated See <a href="http://tools.ietf.org/rfcdiff?difftype=--hwdiff&url2=draft-ietf-webdav-protocol-06.txt">WebDAV Draft Changes</a> */ @Deprecated public static final Status INSUFFICIENT_SPACE_ON_RESOURCE = new Status(419); /** * @deprecated See <a href="http://tools.ietf.org/rfcdiff?difftype=--hwdiff&url2=draft-ietf-webdav-protocol-06.txt">WebDAV Draft Changes</a> */ @Deprecated public static final Status METHOD_FAILURE = new Status(420); /** * @deprecated See <a href="http://tools.ietf.org/rfcdiff?difftype=--hwdiff&url2=draft-ietf-webdav-protocol-06.txt">WebDAV Draft Changes</a> */ @Deprecated public static final Status DESTINATION_LOCKED = new Status(421); /** * {@code 422 Unprocessable Entity}. * * @see <a href="http://tools.ietf.org/html/rfc4918#section-11.2">WebDAV</a> */ public static final Status UNPROCESSABLE_ENTITY = new Status(422); /** * {@code 423 Locked}. * * @see <a href="http://tools.ietf.org/html/rfc4918#section-11.3">WebDAV</a> */ public static final Status LOCKED = new Status(423); /** * {@code 424 Failed Dependency}. * * @see <a href="http://tools.ietf.org/html/rfc4918#section-11.4">WebDAV</a> */ public static final Status FAILED_DEPENDENCY = new Status(424); /** * {@code 426 Upgrade Required}. * * @see <a href="http://tools.ietf.org/html/rfc2817#section-6">Upgrading to TLS Within HTTP/1.1</a> */ public static final Status UPGRADE_REQUIRED = new Status(426); /** * {@code 428 Precondition Required}. * * @see <a href="http://tools.ietf.org/html/rfc6585#section-3">Additional HTTP Status Codes</a> */ public static final Status PRECONDITION_REQUIRED = new Status(428); /** * {@code 429 Too Many Requests}. * * @see <a href="http://tools.ietf.org/html/rfc6585#section-4">Additional HTTP Status Codes</a> */ public static final Status TOO_MANY_REQUESTS = new Status(429); /** * {@code 431 Request Header Fields Too Large}. * * @see <a href="http://tools.ietf.org/html/rfc6585#section-5">Additional HTTP Status Codes</a> */ public static final Status REQUEST_HEADER_FIELDS_TOO_LARGE = new Status(431); // --- 5xx Server Error --- /** * {@code 500 Internal Server Error}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.5.1">HTTP/1.1</a> */ public static final Status INTERNAL_SERVER_ERROR = new Status(500); /** * {@code 501 Not Implemented}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.5.2">HTTP/1.1</a> */ public static final Status NOT_IMPLEMENTED = new Status(501); /** * {@code 502 Bad Gateway}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.5.3">HTTP/1.1</a> */ public static final Status BAD_GATEWAY = new Status(502); /** * {@code 503 Service Unavailable}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.5.4">HTTP/1.1</a> */ public static final Status SERVICE_UNAVAILABLE = new Status(503); /** * {@code 504 Gateway Timeout}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.5.5">HTTP/1.1</a> */ public static final Status GATEWAY_TIMEOUT = new Status(504); /** * {@code 505 HTTP Version Not Supported}. * * @see <a href="http://tools.ietf.org/html/rfc2616#section-10.5.6">HTTP/1.1</a> */ public static final Status HTTP_VERSION_NOT_SUPPORTED = new Status(505); /** * {@code 506 Variant Also Negotiates} * * @see <a href="http://tools.ietf.org/html/rfc2295#section-8.1">Transparent Content Negotiation</a> */ public static final Status VARIANT_ALSO_NEGOTIATES = new Status(506); /** * {@code 507 Insufficient Storage} * * @see <a href="http://tools.ietf.org/html/rfc4918#section-11.5">WebDAV</a> */ public static final Status INSUFFICIENT_STORAGE = new Status(507); /** * {@code 508 Loop Detected} * * @see <a href="http://tools.ietf.org/html/rfc5842#section-7.2">WebDAV Binding Extensions</a> */ public static final Status LOOP_DETECTED = new Status(508); /** * {@code 509 Bandwidth Limit Exceeded} */ public static final Status BANDWIDTH_LIMIT_EXCEEDED = new Status(509); /** * {@code 510 Not Extended} * * @see <a href="http://tools.ietf.org/html/rfc2774#section-7">HTTP Extension Framework</a> */ public static final Status NOT_EXTENDED = new Status(510); /** * {@code 511 Network Authentication Required}. * * @see <a href="http://tools.ietf.org/html/rfc6585#section-6">Additional HTTP Status Codes</a> */ public static final Status NETWORK_AUTHENTICATION_REQUIRED = new Status(511); } public static Status status(int n) { return Status.valueOf(n); } public static final class Header implements Serializable { private static final long serialVersionUID = -3987421318751857114L; public static final class Names { /** * {@code "Accept"} */ public static final String ACCEPT = "accept"; /** * {@code "Accept-Charset"} */ public static final String ACCEPT_CHARSET = "accept-charset"; /** * {@code "Accept-Encoding"} */ public static final String ACCEPT_ENCODING = "accept-encoding"; /** * {@code "Accept-Language"} */ public static final String ACCEPT_LANGUAGE = "accept-language"; /** * {@code "Accept-Ranges"} */ public static final String ACCEPT_RANGES = "accept-ranges"; /** * {@code "Accept-Patch"} */ public static final String ACCEPT_PATCH = "accept-patch"; /** * {@code "Access-Control-Allow-Origin"} */ public static final String ACCESS_CONTROL_ALLOW_ORIGIN = "access-control-allow-origin"; /** * {@code "Access-Control-Allow-Methods"} */ public static final String ACCESS_CONTROL_ALLOW_METHODS = "access-control-allow-methods"; /** * {@code "Access-Control-Allow-Headers"} */ public static final String ACCESS_CONTROL_ALLOW_HEADERS = "access-control-allow-headers"; /** * {@code "Access-Control-Allow-Credentials"} */ public static final String ACCESS_CONTROL_ALLOW_CREDENTIALS = "access-control-allow-Credentials"; /** * {@code "Access-Control-Expose-Headers"} */ public static final String ACCESS_CONTROL_EXPOSE_HEADERS = "access-control-expose-headers"; /** * {@code "Access-Control-Max-Age"} */ public static final String ACCESS_CONTROL_MAX_AGE = "access-control-max-age"; /** * {@code "Access-Control-Request-Method"} */ public static final String ACCESS_CONTROL_REQUEST_METHOD = "access-control-request-method"; /** * {@code "Access-Control-Request-Headers"} */ public static final String ACCESS_CONTROL_REQUEST_HEADERS = "access-control-request-headers"; /** * {@code "Age"} */ public static final String AGE = "age"; /** * {@code "Allow"} */ public static final String ALLOW = "allow"; /** * {@code "Authorization"} */ public static final String AUTHORIZATION = "authorization"; /** * {@code "Cache-Control"} */ public static final String CACHE_CONTROL = "cache-control"; /** * {@code "Connection"} */ public static final String CONNECTION = "connection"; /** * {@code "Content-Base"} */ public static final String CONTENT_BASE = "content-base"; /** * {@code "Content-Disposition"} */ public static final String CONTENT_DISPOSITION = "content-disposition"; /** * {@code "Content-Encoding"} */ public static final String CONTENT_ENCODING = "content-encoding"; /** * {@code "Content-Language"} */ public static final String CONTENT_LANGUAGE = "content-language"; /** * {@code "Content-Length"} */ public static final String CONTENT_LENGTH = "content-length"; /** * {@code "Content-Location"} */ public static final String CONTENT_LOCATION = "content-location"; /** * {@code "Content-Transfer-Encoding"} */ public static final String CONTENT_TRANSFER_ENCODING = "Content-Transfer-Encoding"; /** * {@code "Content-MD5"} */ public static final String CONTENT_MD5 = "content-md5"; /** * {@code "Content-Range"} */ public static final String CONTENT_RANGE = "content-range"; /** * {@code "Content-Type"} */ public static final String CONTENT_TYPE = "content-type"; /** * {@code "Cookie"} */ public static final String COOKIE = "cookie"; /** * {@code "Date"} */ public static final String DATE = "date"; /** * {@code "ETag"} */ public static final String ETAG = "etag"; /** * {@code "Expect"} */ public static final String EXPECT = "expect"; /** * {@code "Expires"} */ public static final String EXPIRES = "expires"; /** * {@code "From"} */ public static final String FROM = "from"; /** * {@code "Front-End-Https"} */ public static final String FRONT_END_HTTPS = "front-end-https"; /** * {@code "Host"} */ public static final String HOST = "host"; /** * {@code "HTTP_CLIENT_IP"} */ public static final String HTTP_CLIENT_IP = "http_client_ip"; /** * {@code "HTTP_X_FORWARDED_FOR"} */ public static final String HTTP_X_FORWARDED_FOR = "http_x_forwarded_for"; /** * {@code "If-Match"} */ public static final String IF_MATCH = "if-match"; /** * {@code "If-Modified-Since"} */ public static final String IF_MODIFIED_SINCE = "if-modified-since"; /** * {@code "If-None-Match"} */ public static final String IF_NONE_MATCH = "if-none-match"; /** * {@code "If-Range"} */ public static final String IF_RANGE = "if-range"; /** * {@code "If-Unmodified-Since"} */ public static final String IF_UNMODIFIED_SINCE = "if-unmodified-since"; /** * {@code "Last-Modified"} */ public static final String LAST_MODIFIED = "last-modified"; /** * {@code "Location"} */ public static final String LOCATION = "location"; /** * {@code "Max-Forwards"} */ public static final String MAX_FORWARDS = "max-forwards"; /** * {@code "Origin"} */ public static final String ORIGIN = "origin"; /** * {@code "Pragma"} */ public static final String PRAGMA = "pragma"; /** * {@code "Proxy-Authenticate"} */ public static final String PROXY_AUTHENTICATE = "proxy-authenticate"; /** * {@code "Proxy-Authorization"} */ public static final String PROXY_AUTHORIZATION = "proxy-authorization"; /** * {@code "Proxy-Client-IP"} */ public static final String PROXY_CLIENT_IP = "proxy-client-ip"; /** * {@code "Proxy-Connection"} */ public static final String PROXY_CONNECTION = "proxy_connection"; /** * {@code "Range"} */ public static final String RANGE = "range"; /** * {@code "Referer"} */ public static final String REFERER = "referer"; /** * {@code "Retry-After"} */ public static final String RETRY_AFTER = "retry-after"; /** * the header used to put the real ip by load balancers like F5 * {@code "rlnclientipaddr"} */ public static final String RLNCLIENTIPADDR = "rlnclientipaddr"; /** * {@code "sec-websocket-Key1"} */ public static final String SEC_WEBSOCKET_KEY1 = "sec-websocket-key1"; /** * {@code "sec-websocket-Key2"} */ public static final String SEC_WEBSOCKET_KEY2 = "sec-websocket-key2"; /** * {@code "sec-websocket-Location"} */ public static final String SEC_WEBSOCKET_LOCATION = "sec-websocket-location"; /** * {@code "sec-websocket-Origin"} */ public static final String SEC_WEBSOCKET_ORIGIN = "sec-websocket-origin"; /** * {@code "sec-websocket-Protocol"} */ public static final String SEC_WEBSOCKET_PROTOCOL = "sec-websocket-protocol"; /** * {@code "sec-websocket-Version"} */ public static final String SEC_WEBSOCKET_VERSION = "sec-websocket-version"; /** * {@code "sec-websocket-Key"} */ public static final String SEC_WEBSOCKET_KEY = "sec-websocket-key"; /** * {@code "sec-websocket-Accept"} */ public static final String SEC_WEBSOCKET_ACCEPT = "sec-websocket-accept"; /** * {@code "Server"} */ public static final String SERVER = "server"; /** * {@code "Set-Cookie"} */ public static final String SET_COOKIE = "set-cookie"; /** * {@code "Set-Cookie2"} */ public static final String SET_COOKIE2 = "set-cookie2"; /** * {@code "TE"} */ public static final String TE = "te"; /** * {@code "Trailer"} */ public static final String TRAILER = "trailer"; /** * {@code "Transfer-Encoding"} */ public static final String TRANSFER_ENCODING = "transfer-encoding"; /** * {@code "Upgrade"} */ public static final String UPGRADE = "upgrade"; /** * {@code "User-Agent"} */ public static final String USER_AGENT = "user-agent"; /** * {@code "Vary"} */ public static final String VARY = "vary"; /** * {@code "Via"} */ public static final String VIA = "via"; /** * {@code "Warning"} */ public static final String WARNING = "warning"; /** * {@code "WebSocket-Location"} */ public static final String WEBSOCKET_LOCATION = "websocket-location"; /** * {@code "WebSocket-Origin"} */ public static final String WEBSOCKET_ORIGIN = "webwocket-origin"; /** * {@code "WebSocket-Protocol"} */ public static final String WEBSOCKET_PROTOCOL = "websocket-protocol"; /** * {@code "WL-Proxy-Client-IP"} */ public static final String WL_PROXY_CLIENT_IP = "wl-proxy-client-ip"; /** * {@code "WWW-Authenticate"} */ public static final String WWW_AUTHENTICATE = "www-authenticate"; /** * {@code "X_Requested_With"} */ public static final String X_REQUESTED_WITH = "x-requested-with"; /** * {@code "X-Forwarded-Host"} */ public static final String X_FORWARDED_HOST = "x-forwarded-host"; /** * {@code "X_Forwared_For"} */ public static final String X_FORWARDED_FOR = "x-forwarded-for"; /** * {@code "X_Forwared_Proto"} */ public static final String X_FORWARDED_PROTO = "x-forwarded-proto"; /** * {@code "X-Forwarded-Ssl"} */ public static final String X_FORWARDED_SSL = "x-forwarded-ssl"; /** * {@code "X-Http-Method-Override"} */ public static final String X_HTTP_METHOD_OVERRIDE = "x-http-method-override"; /** * {@code "X-Url-Scheme"} */ public static final String X_URL_SCHEME = "x-url-scheme"; /** * {@code "X-Xsrf-Token"} */ public static final String X_XSRF_TOKEN = "x-xsrf-token"; private Names() { super(); } } private String name; private C.List<String> values; public Header(String name, String value) { E.NPE(name); this.name = name; this.values = C.list(value); } public Header(String name, String... values) { E.NPE(name); this.name = name; this.values = C.listOf(values); } public Header(String name, Iterable<String> values) { E.NPE(name); this.name = name; this.values = C.list(values); } public String name() { return name; } public String value() { return values.get(0); } public C.List<String> values() { return values; } @Override public String toString() { return values.toString(); } } // eof Header /** * Specify the format of the requested content type */ public static class Format implements Serializable { private static final Map<String, Format> predefined = new LinkedHashMap<String, Format>(); private static volatile Properties types; private int ordinal; private String name; private String contentType; private Format(String name, String contentType) { this(name, contentType, true); } private Format(String name, String contentType, boolean predefined) { this.name = name.toLowerCase(); this.contentType = contentType; if (predefined) { Format.predefined.put(name, this); this.ordinal = ordinal(name); } else { this.ordinal = -1; } } public final String name() { return name; } public final int ordinal() { return ordinal; } /** * Returns the content type string * * @return the content type string of this format */ public String contentType() { return contentType; } /** * Deprecated. Please use {@link #contentType()} * @return the content type string of the format */ @Deprecated public final String toContentType() { return contentType(); } public final String getName() { return name(); } public final String getContentType() { return contentType(); } /** * Returns the error message * * @param message * @return the message directly */ public String errorMessage(String message) { return message; } @Override public int hashCode() { if (ordinal != -1) { return ordinal; } return $.hc(name, contentType); } @Override public String toString() { return name(); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj instanceof Format) { Format that = (Format) obj; return $.eq(that.name, this.name) && $.eq(that.contentType, this.contentType); } return false; } private Object readResolve() { if (ordinal == -1) { return this; } return predefined.get(name); } /** * Deprecated. please Use {@link #predefined()} * @return an array of predefined Formats */ public static Format[] values() { Format[] retVal = new Format[predefined.size()]; return predefined.values().toArray(retVal); } public static List<Format> predefined() { return C.list(predefined.values()); } public static Format of(String name) { return valueOf(name); } public static Format of(String name, String contentType) { return valueOf(name, contentType); } public static Format valueOf(String name) { name = name.toLowerCase(); if (name.startsWith(".")) { name = S.afterLast(name, "."); } return predefined.get(name.toLowerCase()); } public static Format valueOf(String name, String contentType) { Format retVal = valueOf(name); if (null != retVal) { return retVal; } E.illegalArgumentIf(S.blank(name), "name cannot be blank string"); E.illegalArgumentIf(S.blank(contentType), "content type cannot be blank string"); name = name.toLowerCase(); if (name.startsWith(".")) { name = S.afterLast(name, "."); } return new Format(name, contentType, false); } public static Format resolve(Format def, String accept) { E.NPE(def); return resolve_(def, accept); } public static Format resolve(Iterable<String> accepts) { return resolve(Format.HTML, accepts); } public static Format resolve(Format def, Iterable<String> accepts) { Format retVal; for (String s : accepts) { retVal = resolve_(null, s); if (null != retVal) { return retVal; } } return $.ifNullThen(def, Format.HTML); } public static Format resolve(String... accepts) { return resolve(Format.HTML, accepts); } public static Format resolve(Format def, String... accepts) { Format retVal; for (String s : accepts) { retVal = resolve_(null, s); if (null != retVal) { return retVal; } } return $.ifNullThen(def, Format.HTML); } /** * Resolve {@code Format} instance out of an http "Accept" header. * * @param accept the value of http "Accept" header * @return an {@code Format} instance */ public static Format resolve(String accept) { return resolve_(Format.UNKNOWN, accept); } public static String toContentType(String fmt) { Format f = predefined.get(fmt.toLowerCase()); if (null == f) { f = HTML; } return f.contentType(); } private static int ordinal(String s) { int l = s.length(), h = 0; for (int i = 0; i < l; ++i) { char c = s.charAt(i); h = 31 * h + c; } return h; } private static Format resolve_(Format def, String contentType) { Format fmt = def; if (S.blank(contentType)) { fmt = HTML; } else if (contentType.contains("application/xhtml") || contentType.contains("text/html") || contentType.startsWith("*/*")) { fmt = HTML; } else if (contentType.contains("application/xml") || contentType.contains("text/xml")) { fmt = XML; } else if (contentType.contains("application/json") || contentType.contains("text/javascript")) { fmt = JSON; } else if (contentType.contains("application/x-www-form-urlencoded")) { fmt = FORM_URL_ENCODED; } else if (contentType.contains("multipart/form-data") || contentType.contains("multipart/mixed")) { fmt = FORM_MULTIPART_DATA; } else if (contentType.contains("text/plain")) { fmt = TXT; } else if (contentType.contains("csv") || contentType.contains("comma-separated-values")) { fmt = CSV; } else if (contentType.contains("ms-excel")) { fmt = XLS; } else if (contentType.contains("spreadsheetml")) { fmt = XLSX; } else if (contentType.contains("pdf")) { fmt = PDF; } else if (contentType.contains("msword")) { fmt = DOC; } else if (contentType.contains("wordprocessingml")) { fmt = DOCX; } else if (contentType.contains("rtf")) { fmt = RTF; } return fmt; } static { try { InputStream is = H.class.getResourceAsStream("mime-types.properties"); Properties types = new Properties(); types.load(is); for (Object k : types.keySet()) { String fmt = k.toString(); String contentType = types.getProperty(fmt); new Format(fmt, contentType); } } catch (IOException e) { throw E.ioException(e); } } /** * The "text/html" content format */ public static final Format HTML = valueOf("html"); /** * Deprecated, please use {@link #HTML} */ @Deprecated public static final Format html = HTML; /** * The "text/xml" content format */ public static final Format XML = valueOf("xml"); /** * Deprecated, please use {@link #XML} */ @Deprecated public static final Format xml = XML; /** * The "application/json" content format */ public static final Format JSON = new Format("json", "application/json") { @Override public String errorMessage(String message) { return S.fmt("{\"error\": \"%s\"}", message); } }; /** * Deprecated. Please use {@link #JSON} */ @Deprecated public static final Format json = JSON; /** * The "text/css" content format */ public static final Format CSS = new Format("css", "text/css"); /** * The "application/javascript" content format */ public static final Format JAVASCRIPT = new Format("javascript", "application/javascript") { @Override public String errorMessage(String message) { return "alert(" + message + ");"; } }; /** * The "application/vnd.ms-excel" content format */ public static final Format XLS = valueOf("xls"); /** * Deprecated. Please use {@link #XLS} */ public static final Format xls = XLS; /** * The "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" content format */ public static final Format XLSX = valueOf("xlsx"); /** * Deprecated. Please use {@link #XLSX} */ public static final Format xlsx = XLSX; /** * The "application/vnd.ms-word" content format */ public static final Format DOC = valueOf("doc"); /** * Deprecated. Please use {@link #DOC} */ public static final Format doc = DOC; /** * The "application/vnd.openxmlformats-officedocument.wordprocessingml.document" content format */ public static final Format DOCX = valueOf("docx"); /** * Deprecated. Please use {@link #DOCX} */ public static final Format docx = DOCX; /** * The "text/csv" content format */ public static final Format CSV = valueOf("csv"); /** * Deprecated, please use {@link #CSV} */ @Deprecated public static final Format csv = CSV; /** * The "text/plain" content format */ public static final Format TXT = valueOf("txt"); /** * Deprecated, please use {@link #TXT} */ @Deprecated public static final Format txt = TXT; /** * The "application/pdf" content format */ public static final Format PDF = valueOf("pdf"); /** * Deprecated, please use {@link #PDF} */ @Deprecated public static final Format pdf = PDF; /** * The "application/rtf" content format */ public static final Format RTF = valueOf("pdf"); /** * Deprecated, please use {@link #RTF} */ @Deprecated public static final Format rtf = RTF; /** * The "application/x-www-form-urlencoded" content format */ public static final Format FORM_URL_ENCODED = new Format("form_url_encoded", "application/x-www-form-urlencoded"); /** * Deprecated, please use {@link #FORM_URL_ENCODED} */ @Deprecated public static final Format form_url_encoded = FORM_URL_ENCODED; /** * The "multipart/form-data" content format */ public static final Format FORM_MULTIPART_DATA = new Format("form_multipart_data", "multipart/form-data"); /** * Deprecated, please use {@link #FORM_MULTIPART_DATA} */ @Deprecated public static final Format form_multipart_data = FORM_MULTIPART_DATA; /** * The "unknown" content format. Use default content type: "text/html" */ public static final Format UNKNOWN = new Format("unknown", "text/html") { @Override public String contentType() { String s = Current.format(); if (!S.blank(s)) { return toContentType(s); } return "text/html"; } @Override public String toString() { String s = Current.format(); return null == s ? name() : s; } }; /** * Deprecated, please use {@link #UNKNOWN} */ @Deprecated public static final Format unknown = UNKNOWN; public static final class Ordinal { public static final int HTML = Format.HTML.ordinal; public static final int XML = Format.XML.ordinal; public static final int JSON = Format.JSON.ordinal; public static final int XLS = Format.XLS.ordinal; public static final int XLSX = Format.XLSX.ordinal; public static final int DOC = Format.DOC.ordinal; public static final int DOCX = Format.DOCX.ordinal; public static final int CSV = Format.CSV.ordinal; public static final int TXT = Format.TXT.ordinal; public static final int PDF = Format.PDF.ordinal; public static final int RTF = Format.RTF.ordinal; public static final int FORM_URL_ENCODED = Format.FORM_URL_ENCODED.ordinal; public static final int FORM_MULTIPART_DATA = Format.FORM_MULTIPART_DATA.ordinal; } } public static Format format(String name) { return Format.valueOf(name); } public static Format format(String name, String contentType) { return Format.valueOf(name, contentType); } /** * The HTTP cookie */ public static class Cookie implements Serializable { private static final long serialVersionUID = 5325872881041347558L; private String name; // default is non-persistent cookie private int maxAge = -1; private boolean secure; private String path; private String domain; private String value; private boolean httpOnly; private int version; private Date expires; private String comment; public Cookie(String name) { this(name, ""); } public Cookie(String name, String value) { E.NPE(name); this.name = name; this.value = null == value ? "" : value; } public Cookie(String name, String value, int maxAge, boolean secure, String path, String domain, boolean httpOnly) { this(name, value); this.maxAge = maxAge; this.secure = secure; this.path = path; this.domain = domain; this.httpOnly = httpOnly; } /** * Returns the name of the cookie. Cookie name * cannot be changed after created */ public String name() { return name; } /** * Returns the value of the cookie */ public String value() { return value; } /** * Set a value to a cookie and the return {@code this} cookie * * @param value the value to be set to the cookie * @return this cookie */ public Cookie value(String value) { this.value = value; return this; } /** * Returns the domain of the cookie */ public String domain() { return domain; } /** * Set the domain of the cookie * * @param domain the domain string * @return this cookie */ public Cookie domain(String domain) { this.domain = domain; return this; } /** * Returns the path on the server * to which the browser returns this cookie. The * cookie is visible to all subpaths on the server. * * @see #path(String) */ public String path() { return path; } /** * Specifies a path for the cookie * to which the client should return the cookie. * <p/> * <p>The cookie is visible to all the pages in the directory * you specify, and all the pages in that directory's subdirectories. * <p/> * <p>Consult RFC 2109 (available on the Internet) for more * information on setting path names for cookies. * * @param uri a <code>String</code> specifying a path * @return this cookie after path is set * @see #path */ public Cookie path(String uri) { this.path = uri; return this; } /** * Returns the maximum age of cookie specified in seconds. If * maxAge is set to {@code -1} then the cookie will persist until * browser shutdown */ public int maxAge() { return maxAge; } /** * Set the max age of the cookie in seconds. * <p>A positive value indicates that the cookie will expire * after that many seconds have passed. Note that the value is * the <i>maximum</i> age when the cookie will expire, not the cookie's * current age. * <p/> * <p>A negative value means * that the cookie is not stored persistently and will be deleted * when the Web browser exits. A zero value causes the cookie * to be deleted. * * @see #maxAge() */ public Cookie maxAge(int maxAge) { this.maxAge = maxAge; return this; } public Date expires() { if (null != expires) { return expires; } if (maxAge < 0) { return null; } return new Date($.ms() + maxAge * 1000); } public Cookie expires(Date expires) { this.expires = expires; if (null != expires && -1 == maxAge) { maxAge = (int) ((expires.getTime() - $.ms()) / 1000); } return this; } /** * Returns <code>true</code> if the browser is sending cookies * only over a secure protocol, or <code>false</code> if the * browser can send cookies using any protocol. * * @see #secure(boolean) */ public boolean secure() { return secure; } /** * Indicates to the browser whether the cookie should only be sent * using a secure protocol, such as HTTPS or SSL. * <p/> * <p>The default value is <code>false</code>. * * @param secure the cookie secure requirement * @return this cookie instance */ public Cookie secure(boolean secure) { this.secure = secure; return this; } /** * Returns the version of the protocol this cookie complies * with. Version 1 complies with RFC 2109, * and version 0 complies with the original * cookie specification drafted by Netscape. Cookies provided * by a browser use and identify the browser's cookie version. * * @return 0 if the cookie complies with the * original Netscape specification; 1 * if the cookie complies with RFC 2109 * @see #version(int) */ public int version() { return version; } /** * Sets the version of the cookie protocol that this Cookie complies * with. * <p/> * <p>Version 0 complies with the original Netscape cookie * specification. Version 1 complies with RFC 2109. * <p/> * <p>Since RFC 2109 is still somewhat new, consider * version 1 as experimental; do not use it yet on production sites. * * @param v 0 if the cookie should comply with the original Netscape * specification; 1 if the cookie should comply with RFC 2109 * @see #version() */ public Cookie version(int v) { this.version = v; return this; } public boolean httpOnly() { return httpOnly; } public Cookie httpOnly(boolean httpOnly) { this.httpOnly = httpOnly; return this; } public String comment() { return comment; } public Cookie comment(String comment) { this.comment = comment; return this; } private static void ensureInit() { if (!Current.cookieMapInitialized()) { Request req = Request.current(); E.illegalStateIf(null == req); req._initCookieMap(); } } /** * Add a cookie to the current context * * @param cookie */ public static void set(Cookie cookie) { ensureInit(); Current.setCookie(cookie.name(), cookie); } /** * Get a cookie from current context by name * * @param name * @return a cookie with the name specified */ public static Cookie get(String name) { ensureInit(); return Current.getCookie(name); } /** * Returns all cookies from current context */ public static Collection<Cookie> all() { ensureInit(); return Current.cookies(); } /** * The function object namespace */ public static enum F { ; public static final $.F2<Cookie, Response, Void> ADD_TO_RESPONSE = new $.F2<Cookie, Response, Void>() { @Override public Void apply(Cookie cookie, Response response) throws NotAppliedException, $.Break { response.addCookie(cookie); return null; } }; } } // eof Cookie public static class KV<T extends KV> implements Serializable { private static final long serialVersionUID = 891504755320699989L; protected Map<String, String> data = C.newMap(); private boolean dirty = false; private KV() {} private KV(Map<String, String> data) { E.NPE(data); this.data = data; } /** * Associate a string value with the key specified during * initialization. The difference between calling {@code load} * and {@link #put(String, String)} is the former will not change * the dirty tag */ public T load(String key, String val) { E.illegalArgumentIf(key.contains(":")); data.put(key, val); return me(); } /** * Associate a string value with the key specified. */ public T put(String key, String val) { E.illegalArgumentIf(key.contains(":")); dirty = true; return load(key, val); } /** * Associate an Object value's String representation with the * key specified. If the object is {@code null} then {@code null} * is associated with the key specified */ public T put(String key, Object val) { String valStr = null == val ? null : val.toString(); return put(key, valStr); } /** * Returns the string value associated with the key specified */ public String get(String key) { return data.get(key); } /** * Returns the key set of internal data map */ public Set<String> keySet() { return data.keySet(); } /** * Returns {@code true} if internal data map is empty */ public boolean isEmpty() { return data.isEmpty(); } /** * Indicate if the KV has been changed * * @return {@code true} if this instance has been changed */ public boolean dirty() { return dirty; } /** * Alias of {@link #dirty()} */ public boolean changed() { return dirty; } /** * Returns true if the internal data map is empty */ public boolean empty() { return data.isEmpty(); } /** * Returns true if an association with key specified exists in * the internal map */ public boolean containsKey(String key) { return data.containsKey(key); } /** * Alias of {@link #containsKey(String)} */ public boolean contains(String key) { return containsKey(key); } /** * Returns the number of assoications stored in the internal map */ public int size() { return data.size(); } /** * Release an association with key specified * @param key specify the k-v pair that should be removed from internal map * @return this instance */ public T remove(String key) { data.remove(key); return me(); } /** * Clear the internal data map. In other words, all * Key/Value association stored in this instance has been * release * * @return this instance */ public T clear() { data.clear(); return me(); } @Override public String toString() { return data.toString(); } protected T me() { return (T) this; } } /** * Defines a data structure to encapsulate a stateless session which * accept only {@code String} type value, and will be persisted at * client side as a cookie. This means the entire size of the * information stored in session including names and values shall * not exceed 4096 bytes. * <p/> * <p>To store typed value or big value, use the cache methods * of the session class. However it is subject to the implementation * to decide whether cache methods are provided and how it is * implemented</p> */ public static final class Session extends KV<Session> { /** * Session identifier */ public static final String KEY_ID = "___ID"; /** * Stores the expiration date in the session */ public static final String KEY_EXPIRATION = "___TS"; /** * Stores the authenticity token in the session */ public static final String KEY_AUTHENTICITY_TOKEN = "___AT"; /** * Used to mark if a session has just expired */ public static final String KEY_EXPIRE_INDICATOR = "___expired"; /** * Stores the fingerprint to the session */ public static final String KEY_FINGER_PRINT = "__FP"; private static final long serialVersionUID = -423716328552054481L; private String id; public Session() { } /** * Returns the session identifier */ public String id() { if (null == id) { id = data.get(KEY_ID); if (null == id) { id = UUID.randomUUID().toString(); put(KEY_ID, id()); } } return id; } // ------- regular session attribute operations --- /** * Returns {@code true} if the session is empty. e.g. * does not contain anything else than the timestamp */ public boolean empty() { return super.empty() || (containsKey(KEY_EXPIRATION) && size() == 1); } /** * Check if the session is expired. A session is considered * to be expired if it has a timestamp and the timestamp is * non negative number and is less than {@link System#currentTimeMillis()} * * @return {@code true} if the session is expired */ public boolean expired() { long expiry = expiry(); if (expiry < 0) return false; return (expiry < System.currentTimeMillis()); } /** * Returns the expiration time in milliseconds of this session. If * there is no expiration set up, then this method return {@code -1} * * @return the difference, measured in milliseconds, between * the expiry of the session and midnight, January 1, * 1970 UTC, or {@code -1} if the session has no * expiry */ public long expiry() { String s = get(KEY_EXPIRATION); if (S.blank(s)) return -1; return Long.parseLong(s); } /** * Set session expiry in milliseconds * * @param expiry the difference, measured in milliseconds, between * the expiry and midnight, January 1, 1970 UTC. * @return the session instance */ public Session expireOn(long expiry) { put(KEY_EXPIRATION, S.string(expiry)); return this; } // ------- eof regular session attribute operations --- // ------- cache operations ------ /* * Attach session id to a cache key */ private String k(String key) { return S.builder(id()).append(key).toString(); } private static volatile CacheService cs; private static CacheService cs() { if (null != cs) return cs; synchronized (H.class) { if (null == cs) { cs = HttpConfig.cacheService(); } return cs; } } /** * Store an object into cache using key specified. The key will be * appended with session id, so that it distinct between caching * using the same key but in different user sessions. * <p/> * <p>The object is cached for {@link org.osgl.cache.CacheService#setDefaultTTL(int) default} ttl</p> * * @param key the key to cache the object * @param obj the object to be cached * @return this session instance */ public Session cache(String key, Object obj) { cs().put(k(key), obj); return this; } /** * Store an object into cache with expiration specified * * @param key the key to cache the object * @param obj the object to be cached * @param expiration specify the cache expiration in seconds * @return this session instance * @see #cache(String, Object) */ public Session cache(String key, Object obj, int expiration) { cs().put(k(key), obj, expiration); return this; } /** * Store an object into cache for 1 hour * * @param key the key to cache the object * @param obj the object to be cached * @return the session instance */ public Session cacheFor1Hr(String key, Object obj) { return cache(key, obj, 60 * 60); } /** * Store an object into cache for 30 minutes * * @param key the key to cache the object * @param obj the object to be cached * @return the session instance */ public Session cacheFor30Min(String key, Object obj) { return cache(key, obj, 30 * 60); } /** * Store an object into cache for 10 minutes * * @param key the key to cache the object * @param obj the object to be cached * @return the session instance */ public Session cacheFor10Min(String key, Object obj) { return cache(key, obj, 10 * 60); } /** * Store an object into cache for 1 minutes * * @param key the key to cache the object * @param obj the object to be cached * @return the session instance */ public Session cacheFor1Min(String key, Object obj) { return cache(key, obj, 60); } /** * Evict an object from cache * * @param key the key to cache the object * @return this session instance */ public Session evict(String key) { cs().evict(k(key)); return this; } /** * Retrieve an object from cache by key. The key * will be attached with session id * * @param key the key to get the cached object * @param <T> the object type * @return the object in the cache, or {@code null} * if it cannot find the object by key * specified * @see #cache(String, Object) */ public <T> T cached(String key) { return cs().get(k(key)); } /** * Retrieve an object from cache by key. The key * will be attached with session id * * @param key the key to get the cached object * @param clz the class to specify the return type * @param <T> the object type * @return the object in the cache, or {@code null} * if it cannot find the object by key * specified * @see #cache(String, Object) */ public <T> T cached(String key, Class<T> clz) { return cs().get(k(key)); } // ------- eof cache operations ------ /** * Return a session instance of the current execution context, * For example from a {@link java.lang.ThreadLocal} * * @return the current session instance */ public static Session current() { return Current.session(); } /** * Set a session instance into the current execution context, * for example into a {@link java.lang.ThreadLocal} * * @param session the session to be set to current execution context */ public static void current(Session session) { Current.session(session); } // used to parse session data persisted in the cookie value private static final Pattern _PARSER = Pattern.compile(S.HSEP + "([^:]*):([^" + S.HSEP + "]*)" + S.HSEP); /** * Resolve a Session instance from a session cookie * * @param sessionCookie the cookie corresponding to a session * @param ttl session time to live in seconds * @return a Session instance * @see #serialize(String) */ public static Session resolve(Cookie sessionCookie, int ttl) { Session session = new Session(); long expiration = System.currentTimeMillis() + ttl * 1000; boolean hasTtl = ttl > -1; String value = null == sessionCookie ? null : sessionCookie.value(); if (S.blank(value)) { if (hasTtl) { session.expireOn(expiration); } } else { int firstDashIndex = value.indexOf("-"); if (firstDashIndex > -1) { String signature = value.substring(0, firstDashIndex); String data = value.substring(firstDashIndex + 1); if (S.eq(signature, sign(data))) { String sessionData = Codec.decodeUrl(data, Charsets.UTF_8); Matcher matcher = _PARSER.matcher(sessionData); while (matcher.find()) { session.put(matcher.group(1), matcher.group(2)); } } } if (hasTtl && session.expired()) { session = new Session().expireOn(expiration); } } return session; } /** * Serialize this session into a cookie. Note the cookie * returned has only name, value maxAge been set. It's up * to the caller to set the secure, httpOnly and path * attributes * * @param sessionKey the cookie name for the session cookie * @return a cookie captures this session's information or {@code null} if * this session is empty or this session hasn't been changed and * there is no expiry * @see #resolve(org.osgl.http.H.Cookie, int) */ public Cookie serialize(String sessionKey) { long expiry = expiry(); boolean hasTtl = expiry > -1; boolean expired = !hasTtl && expiry < System.currentTimeMillis(); if (!changed() && !hasTtl) return null; if (empty() || expired) { // empty session, delete the session cookie return new H.Cookie(sessionKey).maxAge(0); } StringBuilder sb = S.builder(); for (String k : keySet()) { sb.append(S.HSEP); sb.append(k); sb.append(":"); sb.append(get(k)); sb.append(S.HSEP); } String data = Codec.encodeUrl(sb.toString(), Charsets.UTF_8); String sign = sign(data); String value = S.builder(sign).append("-").append(data).toString(); Cookie cookie = new Cookie(sessionKey).value(value); if (expiry > -1L) { int ttl = (int) ((expiry - System.currentTimeMillis()) / 1000); cookie.maxAge(ttl); } return cookie; } private static String sign(String s) { return Crypto.sign(s, s.getBytes(Charsets.UTF_8)); } } // eof Session /** * A Flash represent a storage scope that attributes inside is valid only * for one session interaction. This feature of flash makes it very good * for server to pass one time information to client, e.g. form submission * error message etc. * <p/> * <p>Like {@link org.osgl.http.H.Session}, you can store only String type * information to flash, and the total number of information stored * including keys and values shall not exceed 4096 bytes as flash is * persisted as cookie in browser</p> */ public static final class Flash extends KV<Flash> { // used to parse flash data persisted in the cookie value private static final Pattern _PARSER = Session._PARSER; private static final long serialVersionUID = 5609789840171619780L; /** * Stores the data that will be output to cookie so next time the user's request income * they will be available for the application to access */ private Map<String, String> out = C.newMap(); /** * Add an attribute to the flash scope. The data is * added to both data buffer and the out buffer * * @param key the key to index the attribute * @param value the value of the attribute * @return the flash instance */ public Flash put(String key, String value) { out.put(key, value); return super.put(key, value); } /** * Add an attribute to the flash scope. The value is in Object * type, however it will be convert to its {@link Object#toString() string * representation} before put into the flash * * @param key the key to index the attribute * @param value the value to be put into the flash * @return this flash instance */ public Flash put(String key, Object value) { return put(key, null == value ? null : value.toString()); } /** * Add an attribute to the flash's current scope. Meaning when next time * the user request to the server, the attribute will not be there anymore. * * @param key the attribute key * @param value the attribute value * @return the flash instance */ public Flash now(String key, String value) { return super.put(key, value); } /** * Add an "error" message to the flash scope * * @param message the error message * @return the flash instance * @see #put(String, Object) */ public Flash error(String message) { return put("error", message); } /** * Add an "error" message to the flash scope, with * optional format arguments * * @param message the message template * @param args the format arguments * @return this flash instance */ public Flash error(String message, Object... args) { return put("error", S.fmt(message, args)); } /** * Get the "error" message that has been added to * the flash scope. * * @return the "error" message or {@code null} if * no error message has been added to the flash */ public String error() { return get("error"); } /** * Add a "success" message to the flash scope * * @param message the error message * @return the flash instance * @see #put(String, Object) */ public Flash success(String message) { return put("success", message); } /** * Add a "success" message to the flash scope, with * optional format arguments * * @param message the message template * @param args the format arguments * @return this flash instance */ public Flash success(String message, Object... args) { return put("success", S.fmt(message, args)); } /** * Get the "success" message that has been added to * the flash scope. * * @return the "success" message or {@code null} if * no success message has been added to the flash */ public String success() { return get("success"); } /** * Discard a data from the output buffer of the flash but * the data buffer is remain untouched. Meaning * the app can still get the data {@link #put(String, Object)} * into the flash scope, however they will NOT * be write to the client cookie, thus the next * time client request the server, the app will * not be able to get the info anymore * * @param key the key to the data to be discarded * @return the flash instance */ public Flash discard(String key) { out.remove(key); return this; } /** * Discard the whole output buffer of the flash but * the data buffer is remain untouched. Meaning * the app can still get the data {@link #put(String, Object)} * into the flash scope, however they will NOT * be write to the client cookie, thus the next * time client request the server, the app will * not be able to get those info anymore * * @return the flash instance */ public Flash discard() { out.clear(); return this; } /** * Keep a data that has been {@link #put(String, Object) put} * into the flash for one time. The data that has been kept * will be persistent to client cookie for one time, thus * the next time when user request the server, the app * can still get the data, but only for one time unless * the app call {@code keep} method again * * @param key the key to identify the data to be kept * @see #keep() */ public Flash keep(String key) { if (super.containsKey(key)) { out.put(key, get(key)); } return this; } /** * Keep all data that has been {@link #put(String, Object) put} * into the flash for one time. The data that has been kept * will be persistent to client cookie for one time, thus * the next time when user request the server, the app * can still get the data, but only for one time unless * the app call {@code keep} method again * * @return the flash instance */ public Flash keep() { out.putAll(data); return this; } public KV out() { return new KV(out); } /** * Return a flash instance of the current execution context, * For example from a {@link java.lang.ThreadLocal} * * @return the current flash instance */ public static Flash current() { return Current.flash(); } /** * Set a flash instance into the current execution context, * for example into a {@link java.lang.ThreadLocal} * * @param flash the flash to be set to current execution context */ public static void current(Flash flash) { Current.flash(flash); } /** * Resolve a Flash instance from a cookie. If the cookie supplied * is {@code null} then an empty Flash instance is returned * * @param flashCookie the flash cookie * @return a Flash instance * @see #serialize(String) */ public static Flash resolve(Cookie flashCookie) { Flash flash = new Flash(); if (null != flashCookie) { String value = flashCookie.value(); if (S.notBlank(value)) { String s = Codec.decodeUrl(value, Charsets.UTF_8); Matcher m = _PARSER.matcher(s); while (m.find()) { flash.data.put(m.group(1), m.group(2)); } } } return flash; } /** * Serialize this Flash instance into a Cookie. Note * the cookie returned has only name, value and max Age * been set. It's up to the caller to set secure, path * and httpOnly attributes. * * @param flashKey the cookie name * @return a Cookie represent this flash instance * @see #resolve(org.osgl.http.H.Cookie) */ public Cookie serialize(String flashKey) { if (out.isEmpty()) { return new Cookie(flashKey).maxAge(0); } StringBuilder sb = S.builder(); for (String key : out.keySet()) { sb.append(S.HSEP); sb.append(key); sb.append(":"); sb.append(out.get(key)); sb.append(S.HSEP); } String value = Codec.encodeUrl(sb.toString(), Charsets.UTF_8); return new Cookie(flashKey).value(value); } } // eof Flash /** * Defines the HTTP request trait * * @param <T> the type of the implementation class */ public static abstract class Request<T extends Request> { private static SimpleDateFormat dateFormat; static { dateFormat = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz", Locale.US); dateFormat.setTimeZone(TimeZone.getTimeZone("GMT")); } /** * Returns the class of the implementation. Not to be used * by application */ protected abstract Class<T> _impl(); private Format accept; private Format contentType; private String ip; private int port = -1; private State state = State.NONE; private Object context; private String etag; protected volatile InputStream inputStream; protected volatile Reader reader; private Map<String, Cookie> cookies = C.newMap(); /** * Attach a context object to the request instance * @param context the context object * @return the request instance itself */ public T context(Object context) { this.context = $.notNull(context); return me(); } /** * Get the context object from the request instance * @param <CONTEXT> the generic type of the context object * @return the context object */ public <CONTEXT> CONTEXT context() { return (CONTEXT) context; } /** * Returns the HTTP method of the request */ public abstract Method method(); /** * Set the Http method on this request. Used by framework to "override" * a HTTP method * @param method the method to set * @return this request instance */ public abstract T method(Method method); /** * Returns the header content by name. If there are * multiple headers with the same name, then the first * one is returned. If there is no header has the name * then {@code null} is returned * <p/> * <p>Note header name is case insensitive</p> * * @param name the name of the header * @return the header content */ public abstract String header(String name); /** * Returns all header content by name. This method returns * content of all header with the same name specified in * an {@link java.lang.Iterable} of {@code String}. If there * is no header has the name specified, then an empty iterable * is returned. * <p/> * <p>Note header name is case insensitive</p> * * @param name the name of the header * @return all values of the header */ public abstract Iterable<String> headers(String name); /** * Return the request {@link org.osgl.http.H.Format accept} * * @return the request accept */ public Format accept() { if (null == accept) { resolveAcceptFormat(); } return accept; } /** * Set {@link org.osgl.http.H.Format accept} to the request * @param fmt * @return this request */ public T accept(Format fmt) { E.NPE(fmt); this.accept = fmt; return me(); } public String etag() { if (null == etag) { etag = method().safe() ? header(IF_NONE_MATCH) : header(IF_MATCH); } return etag; } public boolean etagMatches(String etag) { return S.eq(this.etag(), etag); } /** * Check if the request is an ajax call * * @return {@code true} if it is an ajax call */ public boolean isAjax() { return S.eq(header(X_REQUESTED_WITH), "XMLHttpRequest"); } /** * Returns the path of the request. This does not include the * context path. The path is a composite of * {@link javax.servlet.http.HttpServletRequest#getServletPath()} * and {@link javax.servlet.http.HttpServletRequest#getPathInfo()} * <p/> * <p> * The path starts with "/" but not end with "/" * </p> */ public abstract String path(); /** * Returns the context path of the request. * The context path starts with "/" but not end * with "/". If there is no context path * then and empty "" is returned */ public abstract String contextPath(); /** * Returns the full URI path. It's composed of * {@link #contextPath()} and {@link #path()} * The full path starts with "/" */ public String fullPath() { return Path.url(path(), this); } /** * Alias of {@link #fullPath()} * * @return the full URL path of the request */ public String url() { return fullPath(); } /** * Returns the full URL including scheme, domain, port and * full request path plus query string * * @return the absolute URL */ public String fullUrl() { return Path.fullUrl(path(), this); } /** * Returns query string or an empty String if the request * doesn't contains a query string */ public abstract String query(); /** * Check if the request was made on a secure channel * * @return {@code true} if this is a secure request */ public abstract boolean secure(); /** * Returns the scheme of the request, specifically one of the * "http" and "https" * * @return the scheme of the request */ public String scheme() { return secure() ? "https" : "http"; } protected void _setCookie(String name, Cookie cookie) { cookies.put(name, cookie); } private String domain; /** * Returns the domain of the request */ public String domain() { if (null == domain) resolveHostPort(); return domain; } /** * Returns the port */ public int port() { if (-1 == port) resolveHostPort(); return port; } /** * Returns remote ip address */ protected abstract String _ip(); private static boolean ipOk(String s) { return S.notEmpty(s) && S.neq("unknown", s); } private void resolveIp() { String rmt = _ip(); if (!HttpConfig.isXForwardedAllowed(rmt)) { ip = rmt; return; } String s = header(X_FORWARDED_FOR); if (!ipOk(s)) { if (HttpConfig.allowExtensiveRemoteAddrResolving()) { s = header(PROXY_CLIENT_IP); if (!ipOk(s)) { s = header(WL_PROXY_CLIENT_IP); if (!ipOk(s)) { s = header(HTTP_CLIENT_IP); if (!ipOk(s)) { s = header(HTTP_X_FORWARDED_FOR); if (!ipOk(s)) { ip = rmt; return; } } } } } else { ip = rmt; return; } } // in case there are multiple ip addresses (due to cascade proxies), then use the first one. if (s.length() > 15) { int pos = s.indexOf(","); if (pos > 0) { s = s.substring(0, pos); } } ip = s; } private void resolveHostPort() { String host = header(X_FORWARDED_HOST); if (S.empty(host)) { host = header(HOST); } if (null != host) { FastStr fs = FastStr.unsafeOf(host); if (fs.contains(':')) { domain = fs.beforeFirst(':').toString(); try { port = Integer.parseInt(fs.afterFirst(':').toString()); } catch (NumberFormatException e) { port = defPort(); logger.error(e, "Error parsing port number: %s", S.after(host, ":")); } } else { domain = host; port = defPort(); } } else { domain = ""; port = defPort(); } } private int defPort() { return secure() ? 80 : 443; } public String ip() { if (null == ip) { resolveIp(); } return ip; } public String userAgentStr() { return header(USER_AGENT); } public UserAgent userAgent() { return UserAgent.parse(userAgentStr()); } protected abstract void _initCookieMap(); /** * Returns cookie by it's name * * @param name the cookie name * @return the cookie or {@code null} if not found */ public H.Cookie cookie(String name) { if (cookies.isEmpty()) { _initCookieMap(); } return cookies.get(name); } /** * Returns all cookies of the request in Iterable */ public List<H.Cookie> cookies() { if (cookies.isEmpty()) { _initCookieMap(); } return C.list(cookies.values()); } /** * resolve the request accept * * @return this request instance */ private T resolveAcceptFormat() { String accept = header(ACCEPT); this.accept = Format.resolve(accept); return (T) this; } /** * Check if the requested resource is modified with etag and * last timestamp (usually the timestamp of a static file e.g.) * * @param etag the etag to compare with "If_None_Match" * header in browser * @param since the last timestamp to compare with * "If_Modified_Since" header in browser * @return {@code true} if the resource has changed * or {@code false} otherwise */ public boolean isModified(String etag, long since) { String browserEtag = header(IF_NONE_MATCH); if (null == browserEtag) return true; if (!S.eq(browserEtag, etag)) { return true; } String s = header(IF_MODIFIED_SINCE); if (null == s) return true; try { Date browserDate = dateFormat.parse(s); if (browserDate.getTime() >= since) { return false; } } catch (ParseException ex) { logger.error(ex, "Can't parse date: %s", s); } return true; } private void parseContentTypeAndEncoding() { String type = header(CONTENT_TYPE); if (null == type) { contentType = Format.HTML; encoding = "utf-8"; } else { String[] contentTypeParts = type.split(";"); String _contentType = contentTypeParts[0].trim().toLowerCase(); String _encoding = "utf-8"; // check for encoding-info if (contentTypeParts.length >= 2) { String[] encodingInfoParts = contentTypeParts[1].split(("=")); if (encodingInfoParts.length == 2 && encodingInfoParts[0].trim().equalsIgnoreCase("charset")) { // encoding-info was found in request _encoding = encodingInfoParts[1].trim(); if (S.notBlank(_encoding) && ((_encoding.startsWith("\"") && _encoding.endsWith("\"")) || (_encoding.startsWith("'") && _encoding.endsWith("'"))) ) { _encoding = _encoding.substring(1, _encoding.length() - 1).trim(); } } } contentType = Format.resolve(_contentType); encoding = _encoding; } } /** * Return content type of the request */ public Format contentType() { if (null == contentType) { parseContentTypeAndEncoding(); } return contentType; } private String encoding; /** * Returns encoding of the request */ public String characterEncoding() { if (null == encoding) { parseContentTypeAndEncoding(); } return encoding; } private C.List<Locale> locales; private void parseLocales() { String s = header(ACCEPT_LANGUAGE); if (S.blank(s)) { locales = C.list(HttpConfig.defaultLocale()); return; } // preprocess to remove all blanks s = S.str(s).remove(new $.F1<Character, Boolean>() { @Override public Boolean apply(Character character) { char c = character; return c == ' ' || c == '\t'; } }).toString(); ListBuilder<Locale> lb = ListBuilder.create(); // parse things like "da,en-gb;q=0.8,en;q=0.7" String[] sa = s.split(","); for (String s0 : sa) { String[] arr = s0.trim().split(";"); //Parse the locale Locale locale; String[] l = arr[0].split("-"); switch(l.length){ case 2: locale = new Locale(l[0], l[1]); break; case 3: locale = new Locale(l[0], l[1], l[2]); break; default: locale = new Locale(l[0]); break; } lb.add(locale); } if (lb.isEmpty()) lb.add(HttpConfig.defaultLocale()); locales = lb.toList(); } /** * Returns locale of the request */ public Locale locale() { if (null == locales) parseLocales(); return locales.get(0); } /** * Returns all locales of the request */ public C.List<Locale> locales() { if (null == locales) parseLocales(); return locales; } private long len = -2; /** * Returns the content length of the request */ public long contentLength() { if (len > -2) return len; String s = header(CONTENT_LENGTH); if (S.blank(s)) { len = -1; } else { try { len = Long.parseLong(s); } catch (NumberFormatException e) { len = -1; logger.error("Error parsing content-length: %s", s); } } return len; } public boolean readerCreated() { return state == State.READER; } protected abstract InputStream createInputStream(); /** * Returns body of the request as binary data using {@link java.io.InputStream} * * @throws IllegalStateException if {@link #reader()} has already * been called on this request instance */ public InputStream inputStream() throws IllegalStateException { return state.inputStream(this); } private void createReader() { if (null != reader) { return; } synchronized (this) { if (null != reader) { return; } createInputStream(); String charset = characterEncoding(); Charset cs = null == charset ? Charsets.UTF_8 : Charset.forName(charset); reader = new InputStreamReader(inputStream(), cs); } } /** * Returns body of the request as binary data using {@link java.io.Reader} * * @throws IllegalStateException if {@link #inputStream()} has already * been called on this request instance */ public Reader reader() throws IllegalStateException { return state.reader(this); } /** * Return a request parameter value by name. If there is no parameter * found with the name specified, then {@code null} is returned. If * there are multiple values associated with the name, then the * first one is returned * * @param name the parameter name * @return the parameter value of {@code null} if not found */ public abstract String paramVal(String name); /** * Returns all values associated with the name specified in the * http request. If there is no parameter found with the name, * then {@code new String[0]} shall be returned * * @param name the parameter name * @return all values of the parameter */ public abstract String[] paramVals(String name); /** * Return all parameter names * * @return an {@link java.lang.Iterable} of parameter names */ public abstract Iterable<String> paramNames(); private void parseAuthorization() { if (null != user) return; user = ""; password = ""; String s = header(AUTHORIZATION); if (null != s && s.startsWith("Basic")) { String data = s.substring(6); String[] decodedData = new String(Codec.decodeBASE64(data)).split(":"); user = decodedData.length > 0 ? decodedData[0] : null; password = decodedData.length > 1 ? decodedData[1] : null; } } private String user; /** * The Http Basic user */ public String user() { if (null == user) parseAuthorization(); return user; } private String password; /** * the Http Basic password */ public String password() { if (null == password) parseAuthorization(); return password; } protected final T me() { return (T) this; } /** * Return a request instance of the current execution context, * For example from a {@link java.lang.ThreadLocal} * * @return the current request instance */ @SuppressWarnings("unchecked") public static <T extends Request> T current() { return (T) Current.request(); } /** * Set a request instance into the current execution context, * for example into a {@link java.lang.ThreadLocal} * * @param request the request to be set to current execution context */ public static <T extends Request> void current(T request) { Current.request(request); } private enum State { NONE, STREAM() { @Override Reader reader(Request req) { throw new IllegalStateException("reader() already called"); } }, READER() { @Override InputStream inputStream(Request req) { throw new IllegalStateException("inputStream() already called"); } }; InputStream inputStream(Request req) { req.inputStream = req.createInputStream(); req.state = STREAM; return req.inputStream; } Reader reader(Request req) { req.createReader(); req.state = READER; return req.reader; } } } // eof Request /** * Defines the HTTP response trait */ public static abstract class Response<T extends Response> { private State state = State.NONE; protected volatile OutputStream outputStream; protected volatile Writer writer; private Object context; /** * Attach a context object to the response instance * @param context the context object * @return the response instance itself */ public T context(Object context) { this.context = $.notNull(context); return (T)this; } /** * Get the context object from the response instance * @param <CONTEXT> the generic type of the context object * @return the context object */ public <CONTEXT> CONTEXT context() { return (CONTEXT) context; } /** * Returns the class of the implementation. Not to be used * by application */ protected abstract Class<T> _impl(); public boolean writerCreated() { return state == State.WRITER; } protected abstract OutputStream createOutputStream(); private void createWriter() { if (null != writer) { return; } synchronized (this) { if (null != writer) { return; } outputStream = createOutputStream(); String charset = characterEncoding(); Charset cs = null == charset ? Charsets.UTF_8 : Charset.forName(charset); writer = new OutputStreamWriter(outputStream, cs); } } /** * Returns the output stream to write to the response * * @throws java.lang.IllegalStateException if * {@link #writer()} is called already * @throws org.osgl.exception.UnexpectedIOException if * there are output exception */ public OutputStream outputStream() throws IllegalStateException, UnexpectedIOException { return state.outputStream(this); } /** * Returns the writer to write to the response * * @throws java.lang.IllegalStateException if {@link #outputStream()} is called already * @throws org.osgl.exception.UnexpectedIOException if there are output exception */ public Writer writer() throws IllegalStateException, UnexpectedIOException { return state.writer(this); } /** * Returns a print writer to write to the response * * @throws IllegalStateException if {@link #outputStream()} is called already * @throws UnexpectedIOException if there are output exception */ public PrintWriter printWriter() { Writer w = writer(); if (w instanceof PrintWriter) { return (PrintWriter) w; } else { return new PrintWriter(w); } } /** * Returns the name of the character encoding (MIME charset) * used for the body sent in this response. * The character encoding may have been specified explicitly * using the {@link #characterEncoding(String)} or * {@link #contentType(String)} methods, or implicitly using the * {@link #locale(java.util.Locale)} method. Explicit specifications take * precedence over implicit specifications. Calls made * to these methods after <code>getWriter</code> has been * called or after the response has been committed have no * effect on the character encoding. If no character encoding * has been specified, <code>ISO-8859-1</code> is returned. * <p>See RFC 2047 (http://www.ietf.org/rfc/rfc2047.txt) * for more information about character encoding and MIME. * * @return a <code>String</code> specifying the * name of the character encoding, for * example, <code>UTF-8</code> */ public abstract String characterEncoding(); /** * Returns the content type used for the MIME body * sent in this response. The content type proper must * have been specified using {@link #contentType(String)} * before the response is committed. If no content type * has been specified, this method returns null. * If a content type has been specified, and a * character encoding has been explicitly or implicitly * specified as described in {@link #characterEncoding()} * or {@link #writer()} has been called, * the charset parameter is included in the string returned. * If no character encoding has been specified, the * charset parameter is omitted. * * @return a <code>String</code> specifying the * content type, for example, * <code>text/html; charset=UTF-8</code>, * or null */ public abstract T characterEncoding(String encoding); /** * Set the length of the content to be write to the response * * @param len an long value specifying the length of the * content being returned to the client; sets * the Content-Length header * @return the response it self * @see #outputStream * @see #writer */ public abstract T contentLength(long len); /** * Sub class to overwrite this method to set content type to * the response * * @param type a <code>String</code> specifying the MIME * type of the content */ protected abstract void _setContentType(String type); private String contentType; /** * Sets the content type of the response being sent to * the client. The content type may include the type of character * encoding used, for example, <code>text/html; charset=ISO-8859-4</code>. * If content type has already been set to the response, this method * will update the content type with the new value * <p/> * <p>this method must be called before calling {@link #writer()} * or {@link #outputStream()}</p> * * @param type a <code>String</code> specifying the MIME * type of the content * @return the response it self * @see #outputStream * @see #writer * @see #initContentType(String) */ public T contentType(String type) { _setContentType(type); contentType = type; return (T) this; } /** * This method set the content type to the response if there * is no content type been set already. * * @param type a <code>String</code> specifying the MIME * type of the content * @return the response it self * @see #contentType(String) */ public T initContentType(String type) { return (null == contentType) ? contentType(type) : (T) this; } /** * Set the etag header * @param etag the etag content * @return this response */ public T etag(String etag) { header(ETAG, etag); return (T)this; } /** * Sets the locale of the response, setting the headers (including the * Content-Type's charset) as appropriate. This method should be called * before a call to {@link #writer()}. By default, the response locale * is the default locale for the server. * * @param loc the locale of the response * @see #locale() */ protected abstract void _setLocale(Locale loc); public T locale(Locale locale) { _setLocale(locale); return (T) this; } /** * Returns the locale assigned to the response. * * @see #locale(java.util.Locale) */ public abstract Locale locale(); /** * Adds the specified cookie to the response. This method can be called * multiple times to add more than one cookie. * * @param cookie the Cookie to return to the client */ public abstract void addCookie(H.Cookie cookie); /** * Returns a boolean indicating whether the named response header * has already been set. * * @param name the header name * @return <code>true</code> if the named response header * has already been set; * <code>false</code> otherwise */ public abstract boolean containsHeader(String name); /** * Sends an error response to the client using the specified * status. The server defaults to creating the * response to look like an HTML-formatted server error page * containing the specified message, setting the content type * to "text/html", leaving cookies and other headers unmodified. * <p/> * If an error-page declaration has been made for the web application * corresponding to the status code passed in, it will be served back in * preference to the suggested msg parameter. * <p/> * <p>If the response has already been committed, this method throws * an IllegalStateException. * After using this method, the response should be considered * to be committed and should not be written to. * * @param sc the error status code * @param msg the descriptive message * @return the response itself * @throws org.osgl.exception.UnexpectedIOException If an input or output exception occurs * @throws IllegalStateException If the response was committed */ public abstract T sendError(int sc, String msg); /** * Sames as {@link #sendError(int, String)} but accept message format * arguments * * @param sc the error status code * @param msg the descriptive message template * @param args the descriptive message arguments * @return the response itself * @throws org.osgl.exception.UnexpectedIOException If an input or output exception occurs * @throws IllegalStateException If the response was committed */ public T sendError(int sc, String msg, Object... args) { return sendError(sc, S.fmt(msg, args)); } /** * Sends an error response to the client using the specified status * code and clearing the buffer. * <p>If the response has already been committed, this method throws * an IllegalStateException. * After using this method, the response should be considered * to be committed and should not be written to. * * @param sc the error status code * @return the response itself * @throws org.osgl.exception.UnexpectedIOException If the response was committed before this method call */ public abstract T sendError(int sc); /** * Sends a temporary redirect response to the client using the * specified redirect location URL. This method can accept relative URLs; * the servlet container must convert the relative URL to an absolute URL * before sending the response to the client. If the location is relative * without a leading '/' the container interprets it as relative to * the current request URI. If the location is relative with a leading * '/' the container interprets it as relative to the servlet container root. * <p/> * <p>If the response has already been committed, this method throws * an IllegalStateException. * After using this method, the response should be considered * to be committed and should not be written to. * * @param location the redirect location URL * @return the response itself * @throws org.osgl.exception.UnexpectedIOException If the response was committed before this method call * @throws IllegalStateException If the response was committed or * if a partial URL is given and cannot be converted into a valid URL */ public abstract T sendRedirect(String location); /** * Sets a response header with the given name and value. * If the header had already been set, the new value overwrites the * previous one. The <code>containsHeader</code> method can be * used to test for the presence of a header before setting its * value. * * @param name the name of the header * @param value the header value If it contains octet string, * it should be encoded according to RFC 2047 * (http://www.ietf.org/rfc/rfc2047.txt) * @return the response itself * @see #containsHeader * @see #addHeader */ public abstract T header(String name, String value); /** * Sets the status code for this response. This method is used to * set the return status code when there is no error (for example, * for the status codes SC_OK or SC_MOVED_TEMPORARILY). If there * is an error, and the caller wishes to invoke an error-page defined * in the web application, the <code>sendError</code> method should be used * instead. * <p> The container clears the buffer and sets the Location header, preserving * cookies and other headers. * * @param sc the status code * @return the response itself * @see #sendError * @see #status(Status) */ public abstract T status(int sc); /** * Sets the status for this response. This method is used to * set the return status code when there is no error (for example, * for the status OK or MOVED_TEMPORARILY). If there * is an error, and the caller wishes to invoke an error-page defined * in the web application, the <code>sendError</code> method should be used * instead. * <p> The container clears the buffer and sets the Location header, preserving * cookies and other headers. * * @param s the status * @return the response itself * @see #sendError */ public T status(Status s) { status(s.code()); return (T) this; } /** * Adds a response header with the given name and value. * This method allows response headers to have multiple values. * * @param name the name of the header * @param value the additional header value If it contains * octet string, it should be encoded * according to RFC 2047 * (http://www.ietf.org/rfc/rfc2047.txt) * @return this response itself * @see #header(String, String) */ public abstract T addHeader(String name, String value); /** * Adds a response header with given name and value if the header * with the same name has not been added yet * @param name the name of the header * @param value the header value * @return this response itself * @see #addHeader(String, String) */ public T addHeaderIfNotAdded(String name, String value) { if (!containsHeader(name)) { addHeader(name, value); } return (T) this; } /** * Write a string to the response * * @param s the string to write to the response * @return this response itself */ public T writeContent(String s) { try { IO.write(s.getBytes(characterEncoding()), outputStream()); } catch (UnsupportedEncodingException e) { throw E.encodingException(e); } return (T) this; } /** * Write content to the response * * @param content the content to write * @return the response itself */ public T writeText(String content) { _setContentType(Format.TXT.contentType()); return writeContent(content); } /** * Write content to the response * * @param content the content to write * @return the response itself */ public T writeHtml(String content) { _setContentType(Format.HTML.contentType()); return writeContent(content); } /** * Write content to the response * * @param content the content to write * @return the response itself */ public T writeJSON(String content) { _setContentType(Format.JSON.contentType()); return writeContent(content); } /** * Calling this method commits the response, meaning the status * code and headers will be written to the client */ public abstract void commit(); /** * Return a request instance of the current execution context, * For example from a {@link java.lang.ThreadLocal} * * @return the current request instance */ @SuppressWarnings("unchecked") public static <T extends Response> T current() { return (T) Current.response(); } /** * Set a request instance into the current execution context, * for example into a {@link java.lang.ThreadLocal} * * @param response the request to be set to current execution context */ public static <T extends Response> void current(T response) { Current.response(response); } private enum State { NONE, STREAM() { @Override Writer writer(Response resp) { throw new IllegalStateException("writer() already called"); } }, WRITER() { @Override OutputStream outputStream(Response resp) { throw new IllegalStateException("outputStream() already called"); } }; OutputStream outputStream(Response resp) { resp.outputStream = resp.createOutputStream(); resp.state = STREAM; return resp.outputStream; } Writer writer(Response resp) { resp.createWriter(); resp.state = WRITER; return resp.writer; } } } // eof Response H() { } /** * Clear all current context */ public static void cleanUp() { Current.clear(); } }
fix default port issue
src/main/java/org/osgl/http/H.java
fix default port issue
Java
bsd-3-clause
93e9cebf9d769b99031cba8a253640dc5be60c6f
0
NCIP/catissue-core,krishagni/openspecimen,krishagni/openspecimen,krishagni/openspecimen,NCIP/catissue-core,asamgir/openspecimen,NCIP/catissue-core,asamgir/openspecimen,asamgir/openspecimen
/** * <p>Title: SimpleQueryBizLogic Class> * <p>Description: SimpleQueryBizLogic contains the bizlogic required for simple query interface.</p> * Copyright: Copyright (c) year * Company: Washington University, School of Medicine, St. Louis. * @author Gautam Shetty * @version 1.00 */ package edu.wustl.catissuecore.bizlogic; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.StringTokenizer; import java.util.Vector; import edu.wustl.catissuecore.dao.JDBCDAO; import edu.wustl.catissuecore.query.DataElement; import edu.wustl.catissuecore.query.Operator; import edu.wustl.catissuecore.query.Query; import edu.wustl.catissuecore.query.SimpleConditionsNode; import edu.wustl.catissuecore.util.global.Constants; import edu.wustl.catissuecore.util.global.Utility; import edu.wustl.common.beans.QueryResultObjectData; import edu.wustl.common.util.dbManager.DAOException; import edu.wustl.common.util.logger.Logger; /** * SimpleQueryBizLogic contains the bizlogic required for simple query interface. * @author gautam_shetty */ public class SimpleQueryBizLogic extends DefaultBizLogic { /** * Adds single quotes (') for string and date type attributes in the condition collecion * and the returns the Set of objects to which the condition attributes belong. * @param simpleConditionNodeCollection The condition collection. * @return the Set of objects to which the condition attributes belong. * @throws DAOException */ public Set handleStringAndDateConditions(Collection simpleConditionNodeCollection, Set fromTables) throws DAOException { //Adding single quotes to strings and date values. Iterator iterator = simpleConditionNodeCollection.iterator(); while (iterator.hasNext()) { SimpleConditionsNode simpleConditionsNode = (SimpleConditionsNode) iterator.next(); // Add all the objects selected in UI to the fromtables Set. fromTables.add(simpleConditionsNode.getCondition().getDataElement().getTable()); // Adds single quotes to the value of attributes whose type is string or date. String tableInPath = addSingleQuotes(simpleConditionsNode); //Get the tables in path for this field and add it in the fromTables Set. if (tableInPath != null) { addTablesInPathToFromSet(fromTables, tableInPath); } fromTables.add(simpleConditionsNode.getCondition().getDataElement().getTable()); } return fromTables; } /** * Gets the alias names of the tables in path and adds them in the fromTables Set passed. * @param fromTables The Set to which the alias names are to be added. * @param tableInPath The ids of tables in path separated by : * @throws DAOException */ private void addTablesInPathToFromSet(Set fromTables, String tableInPath) throws DAOException { StringTokenizer tableInPathTokenizer = new StringTokenizer(tableInPath, ":"); while (tableInPathTokenizer.hasMoreTokens()) { Long tableId = Long.valueOf(tableInPathTokenizer.nextToken()); QueryBizLogic bizLogic = (QueryBizLogic)BizLogicFactory .getBizLogic(Constants.SIMPLE_QUERY_INTERFACE_ID); String aliasName = bizLogic.getAliasNameFromTableId(tableId); if (aliasName != null) { fromTables.add(aliasName); } } } /** * Adds quotes to the value of the attribute whose type is string or date * and returns the tables in path for that object. * @param simpleConditionsNode The conditio node to be checked. * @return The tables in path for that object. */ private String addSingleQuotes(SimpleConditionsNode simpleConditionsNode) { String columnName = simpleConditionsNode.getCondition().getDataElement().getField(); StringTokenizer stringToken = new StringTokenizer(columnName, "."); simpleConditionsNode.getCondition().getDataElement().setTable(stringToken.nextToken()); simpleConditionsNode.getCondition().getDataElement().setField(stringToken.nextToken()); String fieldType = stringToken.nextToken(); String value = simpleConditionsNode.getCondition().getValue(); String tableInPath = null; if (stringToken.hasMoreTokens()) { tableInPath = stringToken.nextToken(); } // For operators STARTS_WITH, ENDS_WITH, CONTAINS. String operator = simpleConditionsNode.getCondition().getOperator().getOperator(); if(operator.equals(Operator.STARTS_WITH)) { value = value+"%"; simpleConditionsNode.getCondition().getOperator().setOperator(Operator.LIKE); } else if(operator.equals(Operator.ENDS_WITH)) { value = "%"+value; simpleConditionsNode.getCondition().getOperator().setOperator(Operator.LIKE); } else if(operator.equals(Operator.CONTAINS)) { value = "%"+value+"%"; simpleConditionsNode.getCondition().getOperator().setOperator(Operator.LIKE); } if (fieldType.equalsIgnoreCase(Constants.FIELD_TYPE_VARCHAR) || fieldType.equalsIgnoreCase(Constants.FIELD_TYPE_DATE) || fieldType.equalsIgnoreCase(Constants.FIELD_TYPE_TEXT)) { if (fieldType.equalsIgnoreCase(Constants.FIELD_TYPE_VARCHAR) || fieldType.equalsIgnoreCase(Constants.FIELD_TYPE_TEXT)) { value = "'" + value + "'"; } else { value = "STR_TO_DATE('" + value + "','" + Constants.MYSQL_DATE_PATTERN + "')"; } simpleConditionsNode.getCondition().setValue(value); } return tableInPath; } /** * Adds the activity status conditions for all the objects in the from clause. * @param simpleConditionNodeCollection The SimpleConditionsNode Collection. * @param fromTables Set of tables in the from clause of the query. * @param simpleConditionsNode The last condition in the simpleConditionNode's Collection. */ public void addActivityStatusConditions(Collection simpleConditionNodeCollection, Set fromTables) { // Creating aliasName set with full package names. // Required for checking the activityStatus. Set fromTablesWithPackageNames = new HashSet(); Iterator fromTableSetIterator = fromTables.iterator(); while (fromTableSetIterator.hasNext()) { String tableName = "edu.wustl.catissuecore.domain."+fromTableSetIterator.next(); fromTablesWithPackageNames.add(tableName); } // Check and get the activity status conditions for all the objects in the conditions. List activityStatusConditionList = new ArrayList(); Iterator aliasNameIterator = fromTablesWithPackageNames.iterator(); while (aliasNameIterator.hasNext()) { String fullyQualifiedClassName = (String) aliasNameIterator.next(); SimpleConditionsNode activityStatusCondition = getActivityStatusCondition(fullyQualifiedClassName); if (activityStatusCondition != null) { activityStatusCondition.getOperator().setOperator(Constants.AND_JOIN_CONDITION); activityStatusConditionList.add(activityStatusCondition); } } if (activityStatusConditionList.isEmpty() == false) { // Set the next operator of the last simple condition nodes as AND. Iterator iterator = simpleConditionNodeCollection.iterator(); SimpleConditionsNode simpleConditionsNode = null; while (iterator.hasNext()) { simpleConditionsNode = (SimpleConditionsNode) iterator.next(); } simpleConditionsNode.getOperator().setOperator(Constants.AND_JOIN_CONDITION); // Add the activity status conditions in the simple conditions node collection. simpleConditionNodeCollection.addAll(activityStatusConditionList); } } /** * Returns SimpleConditionsNode if the object named aliasName contains the activityStatus * data member, else returns null. * @param fullyQualifiedClassName The fully qualified name of the class in which * activity status field is to be searched. * @return SimpleConditionsNode if the object named aliasName contains the activityStatus * data member, else returns null. */ private SimpleConditionsNode getActivityStatusCondition(String fullyQualifiedClassName) { SimpleConditionsNode activityStatusCondition = null; //Returns the Class object if it is a valid class else returns null. Class className = edu.wustl.common.util.Utility.getClassObject(fullyQualifiedClassName); if (className != null) { Field[] objectFields = className.getDeclaredFields(); for (int i = 0; i < objectFields.length; i++) { if (objectFields[i].getName().equals(Constants.ACTIVITY_STATUS)) { activityStatusCondition = new SimpleConditionsNode(); activityStatusCondition.getCondition().getDataElement().setTable( Utility.parseClassName(fullyQualifiedClassName)); activityStatusCondition.getCondition().getDataElement().setField( Constants.ACTIVITY_STATUS_COLUMN); activityStatusCondition.getCondition().getOperator().setOperator("!="); activityStatusCondition.getCondition().setValue( "'" + Constants.ACTIVITY_STATUS_DISABLED + "'"); } } if ((activityStatusCondition == null) && (className.getSuperclass().getName().equals( "edu.wustl.catissuecore.domain.AbstractDomainObject") == false)) { activityStatusCondition = getActivityStatusCondition(className.getSuperclass() .getName()); } } return activityStatusCondition; } private Vector getViewElements(String []selectedColumnsList) { /*Split the string which is in the form TableAlias.columnNames.columnDisplayNames * and set the dataelement object. */ Vector vector = new Vector(); for(int i=0;i<selectedColumnsList.length;i++) { StringTokenizer st= new StringTokenizer(selectedColumnsList[i],"."); DataElement dataElement = new DataElement(); while (st.hasMoreTokens()) { dataElement.setTable(st.nextToken()); String field = st.nextToken(); Logger.out.debug(st.nextToken()); String tableInPath = null; if (st.hasMoreTokens()) { tableInPath = st.nextToken(); field = field+"."+tableInPath; Logger.out.debug("Field with the table id......."+field); } dataElement.setField(field); } vector.add(dataElement); } return vector; } private List getColumnDisplayNames(String []selectedColumnsList) { /*Split the string which is in the form TableAlias.columnNames.columnDisplayNames * and set the dataelement object. */ List columnDisplayNames = new ArrayList(); for(int i=0;i<selectedColumnsList.length;i++) { StringTokenizer st= new StringTokenizer(selectedColumnsList[i],"."); DataElement dataElement = new DataElement(); while(st.hasMoreTokens()) { st.nextToken(); st.nextToken(); String displayName = st.nextToken(); columnDisplayNames.add(displayName); Logger.out.debug("columnDisplayNames"+displayName); if(st.hasMoreTokens()) st.nextToken(); } } return columnDisplayNames; } //set the result view for the query. public Vector getSelectDataElements(String[] selectedColumns, Set tableSet, List columnNames) throws DAOException { Vector selectDataElements = null; //If columns not conigured, set to default. if(selectedColumns==null) { selectDataElements = getViewElements(tableSet, columnNames); } //else set to the configured columns. else { selectDataElements = getViewElements(selectedColumns); List columnNamesList = getColumnDisplayNames(selectedColumns); columnNames.addAll(columnNamesList); } // Getting the aliasNames of the table ids in the tables in path. Set forFromSet = configureSelectDataElements(selectDataElements); tableSet.addAll(forFromSet); return selectDataElements; } /** * Gets the fields from select clause of the query and returns * Set of objects of that attributes to be added in the from clause. * @param query The query object whose select fields are to be get. * @return Set of objects of that attributes to be added in the from clause. * @throws DAOException */ private Set configureSelectDataElements(Vector selectDataElements) throws DAOException { Set forFromSet = new HashSet(); Iterator iterator = selectDataElements.iterator(); QueryBizLogic bizLogic = (QueryBizLogic)BizLogicFactory .getBizLogic(Constants.SIMPLE_QUERY_INTERFACE_ID); while (iterator.hasNext()) { DataElement dataElement = (DataElement) iterator.next(); String fieldName = dataElement.getField(); StringTokenizer stringToken = new StringTokenizer(dataElement.getField(), "."); dataElement.setField(stringToken.nextToken()); forFromSet.add(dataElement.getTable()); if (stringToken.hasMoreElements()) { String tableInPath = stringToken.nextToken(); addTablesInPathToFromSet(forFromSet, tableInPath); } } return forFromSet; } /** * Returns the Vector of DataElement objects for the select clause of the query. * And also list the column names in the columnList list. * @param aliasNameSet The Set of the alias names for which the DataElements are to be created. * @param columnList List of column names to be shown in the spreadsheet view. * @return the Vector of DataElement objects for the select clause of the query. * @throws DAOException */ public Vector getViewElements(Set aliasNameSet, List columnList) throws DAOException { Vector vector = new Vector(); try { JDBCDAO jdbcDao = new JDBCDAO(); jdbcDao.openSession(null); Iterator aliasNameIterator = aliasNameSet.iterator(); while (aliasNameIterator.hasNext()) { String aliasName = (String) aliasNameIterator.next(); String sql =" SELECT tableData2.ALIAS_NAME, temp.COLUMN_NAME, temp.TABLES_IN_PATH, temp.DISPLAY_NAME " + " from CATISSUE_QUERY_INTERFACE_TABLE_DATA tableData2 join " + " ( SELECT columnData.COLUMN_NAME, columnData.TABLE_ID, displayData.DISPLAY_NAME, relationData.TABLES_IN_PATH " + " FROM CATISSUE_QUERY_INTERFACE_COLUMN_DATA columnData, " + " CATISSUE_TABLE_RELATION relationData, " + " CATISSUE_QUERY_INTERFACE_TABLE_DATA tableData, " + " CATISSUE_SEARCH_DISPLAY_DATA displayData " + " where relationData.CHILD_TABLE_ID = columnData.TABLE_ID and " + " relationData.PARENT_TABLE_ID = tableData.TABLE_ID and " + " relationData.RELATIONSHIP_ID = displayData.RELATIONSHIP_ID and " + " columnData.IDENTIFIER = displayData.COL_ID and " + " tableData.ALIAS_NAME = '"+aliasName+"') as temp " + " on temp.TABLE_ID = tableData2.TABLE_ID"; Logger.out.debug("DATA ELEMENT SQL : "+sql); List list = jdbcDao.executeQuery(sql, null, false, null); Logger.out.debug("list.size()************************"+list.size()); String [] columnNames = new String[list.size()]; Iterator iterator = list.iterator(); int i = 0; while(iterator.hasNext()) { List rowList = (List) iterator.next(); DataElement dataElement = new DataElement(); dataElement.setTable((String)rowList.get(0)); dataElement.setField((String)rowList.get(1)+"."+(String)rowList.get(2)); vector.add(dataElement); columnList.add((String)rowList.get(3)); } } jdbcDao.closeSession(); } catch(ClassNotFoundException classExp) { throw new DAOException(classExp.getMessage(),classExp); } return vector; } /** * @param fromAliasNameValue * @return * @throws DAOException */ public QueryResultObjectData createQueryResultObjectData(String fromAliasNameValue) throws DAOException { QueryResultObjectData queryResultObjectData; queryResultObjectData = new QueryResultObjectData(); queryResultObjectData.setAliasName(fromAliasNameValue); //Aarti: getting related tables that should be dependent //on main object for authorization Vector relatedTables = new Vector(); relatedTables = QueryBizLogic .getRelatedTableAliases(fromAliasNameValue); // Aarti: Get main query objects which should have individual checks //for authorization and should not be dependent on others Vector mainQueryObjects = QueryBizLogic.getMainObjectsOfQuery(); String queryObject; //Aarti: remove independent query objects from related objects //vector and add them to tableSet so that their authorization //is checked individually for (int i = 0; i < mainQueryObjects.size(); i++) { queryObject = (String) mainQueryObjects.get(i); if (relatedTables.contains(queryObject)) { relatedTables.remove(queryObject); // tableSet.add(queryObject); if(!queryObject.equals(fromAliasNameValue)) { queryResultObjectData.addRelatedQueryResultObject(new QueryResultObjectData(queryObject)); } } } //Aarti: Map all related tables to the main table // relatedTablesMap.put(fromAliasNameValue, relatedTables); queryResultObjectData.setDependentObjectAliases(relatedTables); return queryResultObjectData; } /** * @param queryResultObjectDataMap * @param query */ public List addObjectIdentifierColumnsToQuery(Map queryResultObjectDataMap, Query query) { DataElement identifierDataElement; List columnNames = new ArrayList(); Set keySet = queryResultObjectDataMap.keySet(); Iterator keyIterator = keySet.iterator(); QueryResultObjectData queryResultObjectData2; QueryResultObjectData queryResultObjectData3; Vector queryObjects; Vector queryObjectNames; int initialColumnNumbers = query.getResultView().size(); Map columnIdsMap; int columnId =0; for(int i=0;keyIterator.hasNext();i++) { queryResultObjectData2 = (QueryResultObjectData) queryResultObjectDataMap.get(keyIterator.next()); queryObjects = queryResultObjectData2.getIndependentQueryObjects(); queryObjectNames = queryResultObjectData2.getIndependentObjectAliases(); for(int j = 0 ; j<queryObjects.size();j++) { columnIdsMap = query.getIdentifierColumnIds(queryObjectNames); queryResultObjectData3 = (QueryResultObjectData) queryObjects.get(j); identifierDataElement = new DataElement(queryResultObjectData3.getAliasName(), Constants.IDENTIFIER); // query.addElementToView(columnId, identifierDataElement); // queryResultObjectData3.setIdentifierColumnId(columnId++); queryResultObjectData3.setIdentifierColumnId(((Integer)columnIdsMap.get(queryResultObjectData3.getAliasName())).intValue()-1); // columnNames.add(queryResultObjectData3.getAliasName() + " ID"); } } int columnsAdded = query.getResultView().size() - initialColumnNumbers; for(int i=0;i<columnsAdded;i++) { columnNames.add(" ID"); } return columnNames; } /** * @param queryResultObjectDataMap * @param query */ public void setDependentIdentifiedColumnIds(Map queryResultObjectDataMap, Query query) { Iterator keyIterator; QueryResultObjectData queryResultObjectData2; QueryResultObjectData queryResultObjectData3; Vector queryObjects; Set keySet2 = queryResultObjectDataMap.keySet(); keyIterator = keySet2.iterator(); for(int i=0;keyIterator.hasNext();i++) { queryResultObjectData2 = (QueryResultObjectData) queryResultObjectDataMap.get(keyIterator.next()); queryObjects = queryResultObjectData2.getIndependentQueryObjects(); for(int j = 0 ; j<queryObjects.size();j++) { queryResultObjectData3 = (QueryResultObjectData) queryObjects.get(j); queryResultObjectData3.setDependentColumnIds(query.getColumnIds(queryResultObjectData3.getAliasName(),queryResultObjectData3.getDependentObjectAliases())); queryResultObjectData3.setIdentifiedDataColumnIds(query.getIdentifiedColumnIds(queryResultObjectData3.getAliasName(),queryResultObjectData3.getDependentObjectAliases())); Logger.out.debug(" table:"+queryResultObjectData3.getAliasName()+" columnIds:"+queryResultObjectData3.getDependentColumnIds()); } } } /** * @param fromTables * @param queryResultObjectDataMap * @param query */ public void createQueryResultObjectData(Set fromTables, Map queryResultObjectDataMap, Query query) throws DAOException{ Iterator iterator = fromTables.iterator(); String tableAlias; QueryResultObjectData queryResultObjectData; while (iterator.hasNext()) { tableAlias = (String) iterator.next(); queryResultObjectData = createQueryResultObjectData(tableAlias); if(query.getColumnIds(tableAlias,queryResultObjectData.getDependentObjectAliases()).size()!=0) { queryResultObjectDataMap.put(tableAlias,queryResultObjectData); } } } }
WEB-INF/src/edu/wustl/catissuecore/bizlogic/SimpleQueryBizLogic.java
/** * <p>Title: SimpleQueryBizLogic Class> * <p>Description: SimpleQueryBizLogic contains the bizlogic required for simple query interface.</p> * Copyright: Copyright (c) year * Company: Washington University, School of Medicine, St. Louis. * @author Gautam Shetty * @version 1.00 */ package edu.wustl.catissuecore.bizlogic; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.StringTokenizer; import java.util.Vector; import edu.wustl.catissuecore.dao.JDBCDAO; import edu.wustl.catissuecore.query.DataElement; import edu.wustl.catissuecore.query.Operator; import edu.wustl.catissuecore.query.Query; import edu.wustl.catissuecore.query.SimpleConditionsNode; import edu.wustl.catissuecore.util.global.Constants; import edu.wustl.catissuecore.util.global.Utility; import edu.wustl.common.beans.QueryResultObjectData; import edu.wustl.common.util.dbManager.DAOException; import edu.wustl.common.util.logger.Logger; /** * SimpleQueryBizLogic contains the bizlogic required for simple query interface. * @author gautam_shetty */ public class SimpleQueryBizLogic extends DefaultBizLogic { /** * Adds single quotes (') for string and date type attributes in the condition collecion * and the returns the Set of objects to which the condition attributes belong. * @param simpleConditionNodeCollection The condition collection. * @return the Set of objects to which the condition attributes belong. * @throws DAOException */ public Set handleStringAndDateConditions(Collection simpleConditionNodeCollection, Set fromTables) throws DAOException { //Adding single quotes to strings and date values. Iterator iterator = simpleConditionNodeCollection.iterator(); while (iterator.hasNext()) { SimpleConditionsNode simpleConditionsNode = (SimpleConditionsNode) iterator.next(); // Add all the objects selected in UI to the fromtables Set. fromTables.add(simpleConditionsNode.getCondition().getDataElement().getTable()); // Adds single quotes to the value of attributes whose type is string or date. String tableInPath = addSingleQuotes(simpleConditionsNode); //Get the tables in path for this field and add it in the fromTables Set. if (tableInPath != null) { addTablesInPathToFromSet(fromTables, tableInPath); } fromTables.add(simpleConditionsNode.getCondition().getDataElement().getTable()); } return fromTables; } /** * Gets the alias names of the tables in path and adds them in the fromTables Set passed. * @param fromTables The Set to which the alias names are to be added. * @param tableInPath The ids of tables in path separated by : * @throws DAOException */ private void addTablesInPathToFromSet(Set fromTables, String tableInPath) throws DAOException { StringTokenizer tableInPathTokenizer = new StringTokenizer(tableInPath, ":"); while (tableInPathTokenizer.hasMoreTokens()) { Long tableId = Long.valueOf(tableInPathTokenizer.nextToken()); QueryBizLogic bizLogic = (QueryBizLogic)BizLogicFactory .getBizLogic(Constants.SIMPLE_QUERY_INTERFACE_ID); String aliasName = bizLogic.getAliasNameFromTableId(tableId); if (aliasName != null) { fromTables.add(aliasName); } } } /** * Adds quotes to the value of the attribute whose type is string or date * and returns the tables in path for that object. * @param simpleConditionsNode The conditio node to be checked. * @return The tables in path for that object. */ private String addSingleQuotes(SimpleConditionsNode simpleConditionsNode) { String columnName = simpleConditionsNode.getCondition().getDataElement().getField(); StringTokenizer stringToken = new StringTokenizer(columnName, "."); simpleConditionsNode.getCondition().getDataElement().setTable(stringToken.nextToken()); simpleConditionsNode.getCondition().getDataElement().setField(stringToken.nextToken()); String fieldType = stringToken.nextToken(); String value = simpleConditionsNode.getCondition().getValue(); String tableInPath = null; if (stringToken.hasMoreTokens()) { tableInPath = stringToken.nextToken(); } // For operators STARTS_WITH, ENDS_WITH, CONTAINS. String operator = simpleConditionsNode.getCondition().getOperator().getOperator(); if(operator.equals(Operator.STARTS_WITH)) { value = value+"%"; simpleConditionsNode.getCondition().getOperator().setOperator(Operator.LIKE); } else if(operator.equals(Operator.ENDS_WITH)) { value = "%"+value; simpleConditionsNode.getCondition().getOperator().setOperator(Operator.LIKE); } else if(operator.equals(Operator.CONTAINS)) { value = "%"+value+"%"; simpleConditionsNode.getCondition().getOperator().setOperator(Operator.LIKE); } if (fieldType.equalsIgnoreCase(Constants.FIELD_TYPE_VARCHAR) || fieldType.equalsIgnoreCase(Constants.FIELD_TYPE_DATE) || fieldType.equalsIgnoreCase(Constants.FIELD_TYPE_TEXT)) { if (fieldType.equalsIgnoreCase(Constants.FIELD_TYPE_VARCHAR)) { value = "'" + value + "'"; } else { value = "STR_TO_DATE('" + value + "','" + Constants.MYSQL_DATE_PATTERN + "')"; } simpleConditionsNode.getCondition().setValue(value); } return tableInPath; } /** * Adds the activity status conditions for all the objects in the from clause. * @param simpleConditionNodeCollection The SimpleConditionsNode Collection. * @param fromTables Set of tables in the from clause of the query. * @param simpleConditionsNode The last condition in the simpleConditionNode's Collection. */ public void addActivityStatusConditions(Collection simpleConditionNodeCollection, Set fromTables) { // Creating aliasName set with full package names. // Required for checking the activityStatus. Set fromTablesWithPackageNames = new HashSet(); Iterator fromTableSetIterator = fromTables.iterator(); while (fromTableSetIterator.hasNext()) { String tableName = "edu.wustl.catissuecore.domain."+fromTableSetIterator.next(); fromTablesWithPackageNames.add(tableName); } // Check and get the activity status conditions for all the objects in the conditions. List activityStatusConditionList = new ArrayList(); Iterator aliasNameIterator = fromTablesWithPackageNames.iterator(); while (aliasNameIterator.hasNext()) { String fullyQualifiedClassName = (String) aliasNameIterator.next(); SimpleConditionsNode activityStatusCondition = getActivityStatusCondition(fullyQualifiedClassName); if (activityStatusCondition != null) { activityStatusCondition.getOperator().setOperator(Constants.AND_JOIN_CONDITION); activityStatusConditionList.add(activityStatusCondition); } } if (activityStatusConditionList.isEmpty() == false) { // Set the next operator of the last simple condition nodes as AND. Iterator iterator = simpleConditionNodeCollection.iterator(); SimpleConditionsNode simpleConditionsNode = null; while (iterator.hasNext()) { simpleConditionsNode = (SimpleConditionsNode) iterator.next(); } simpleConditionsNode.getOperator().setOperator(Constants.AND_JOIN_CONDITION); // Add the activity status conditions in the simple conditions node collection. simpleConditionNodeCollection.addAll(activityStatusConditionList); } } /** * Returns SimpleConditionsNode if the object named aliasName contains the activityStatus * data member, else returns null. * @param fullyQualifiedClassName The fully qualified name of the class in which * activity status field is to be searched. * @return SimpleConditionsNode if the object named aliasName contains the activityStatus * data member, else returns null. */ private SimpleConditionsNode getActivityStatusCondition(String fullyQualifiedClassName) { SimpleConditionsNode activityStatusCondition = null; //Returns the Class object if it is a valid class else returns null. Class className = edu.wustl.common.util.Utility.getClassObject(fullyQualifiedClassName); if (className != null) { Field[] objectFields = className.getDeclaredFields(); for (int i = 0; i < objectFields.length; i++) { if (objectFields[i].getName().equals(Constants.ACTIVITY_STATUS)) { activityStatusCondition = new SimpleConditionsNode(); activityStatusCondition.getCondition().getDataElement().setTable( Utility.parseClassName(fullyQualifiedClassName)); activityStatusCondition.getCondition().getDataElement().setField( Constants.ACTIVITY_STATUS_COLUMN); activityStatusCondition.getCondition().getOperator().setOperator("!="); activityStatusCondition.getCondition().setValue( "'" + Constants.ACTIVITY_STATUS_DISABLED + "'"); } } if ((activityStatusCondition == null) && (className.getSuperclass().getName().equals( "edu.wustl.catissuecore.domain.AbstractDomainObject") == false)) { activityStatusCondition = getActivityStatusCondition(className.getSuperclass() .getName()); } } return activityStatusCondition; } private Vector getViewElements(String []selectedColumnsList) { /*Split the string which is in the form TableAlias.columnNames.columnDisplayNames * and set the dataelement object. */ Vector vector = new Vector(); for(int i=0;i<selectedColumnsList.length;i++) { StringTokenizer st= new StringTokenizer(selectedColumnsList[i],"."); DataElement dataElement = new DataElement(); while (st.hasMoreTokens()) { dataElement.setTable(st.nextToken()); String field = st.nextToken(); Logger.out.debug(st.nextToken()); String tableInPath = null; if (st.hasMoreTokens()) { tableInPath = st.nextToken(); field = field+"."+tableInPath; Logger.out.debug("Field with the table id......."+field); } dataElement.setField(field); } vector.add(dataElement); } return vector; } private List getColumnDisplayNames(String []selectedColumnsList) { /*Split the string which is in the form TableAlias.columnNames.columnDisplayNames * and set the dataelement object. */ List columnDisplayNames = new ArrayList(); for(int i=0;i<selectedColumnsList.length;i++) { StringTokenizer st= new StringTokenizer(selectedColumnsList[i],"."); DataElement dataElement = new DataElement(); while(st.hasMoreTokens()) { st.nextToken(); st.nextToken(); String displayName = st.nextToken(); columnDisplayNames.add(displayName); Logger.out.debug("columnDisplayNames"+displayName); if(st.hasMoreTokens()) st.nextToken(); } } return columnDisplayNames; } //set the result view for the query. public Vector getSelectDataElements(String[] selectedColumns, Set tableSet, List columnNames) throws DAOException { Vector selectDataElements = null; //If columns not conigured, set to default. if(selectedColumns==null) { selectDataElements = getViewElements(tableSet, columnNames); } //else set to the configured columns. else { selectDataElements = getViewElements(selectedColumns); List columnNamesList = getColumnDisplayNames(selectedColumns); columnNames.addAll(columnNamesList); } // Getting the aliasNames of the table ids in the tables in path. Set forFromSet = configureSelectDataElements(selectDataElements); tableSet.addAll(forFromSet); return selectDataElements; } /** * Gets the fields from select clause of the query and returns * Set of objects of that attributes to be added in the from clause. * @param query The query object whose select fields are to be get. * @return Set of objects of that attributes to be added in the from clause. * @throws DAOException */ private Set configureSelectDataElements(Vector selectDataElements) throws DAOException { Set forFromSet = new HashSet(); Iterator iterator = selectDataElements.iterator(); QueryBizLogic bizLogic = (QueryBizLogic)BizLogicFactory .getBizLogic(Constants.SIMPLE_QUERY_INTERFACE_ID); while (iterator.hasNext()) { DataElement dataElement = (DataElement) iterator.next(); String fieldName = dataElement.getField(); StringTokenizer stringToken = new StringTokenizer(dataElement.getField(), "."); dataElement.setField(stringToken.nextToken()); forFromSet.add(dataElement.getTable()); if (stringToken.hasMoreElements()) { String tableInPath = stringToken.nextToken(); addTablesInPathToFromSet(forFromSet, tableInPath); } } return forFromSet; } /** * Returns the Vector of DataElement objects for the select clause of the query. * And also list the column names in the columnList list. * @param aliasNameSet The Set of the alias names for which the DataElements are to be created. * @param columnList List of column names to be shown in the spreadsheet view. * @return the Vector of DataElement objects for the select clause of the query. * @throws DAOException */ public Vector getViewElements(Set aliasNameSet, List columnList) throws DAOException { Vector vector = new Vector(); try { JDBCDAO jdbcDao = new JDBCDAO(); jdbcDao.openSession(null); Iterator aliasNameIterator = aliasNameSet.iterator(); while (aliasNameIterator.hasNext()) { String aliasName = (String) aliasNameIterator.next(); String sql =" SELECT tableData2.ALIAS_NAME, temp.COLUMN_NAME, temp.TABLES_IN_PATH, temp.DISPLAY_NAME " + " from CATISSUE_QUERY_INTERFACE_TABLE_DATA tableData2 join " + " ( SELECT columnData.COLUMN_NAME, columnData.TABLE_ID, displayData.DISPLAY_NAME, relationData.TABLES_IN_PATH " + " FROM CATISSUE_QUERY_INTERFACE_COLUMN_DATA columnData, " + " CATISSUE_TABLE_RELATION relationData, " + " CATISSUE_QUERY_INTERFACE_TABLE_DATA tableData, " + " CATISSUE_SEARCH_DISPLAY_DATA displayData " + " where relationData.CHILD_TABLE_ID = columnData.TABLE_ID and " + " relationData.PARENT_TABLE_ID = tableData.TABLE_ID and " + " relationData.RELATIONSHIP_ID = displayData.RELATIONSHIP_ID and " + " columnData.IDENTIFIER = displayData.COL_ID and " + " tableData.ALIAS_NAME = '"+aliasName+"') as temp " + " on temp.TABLE_ID = tableData2.TABLE_ID"; Logger.out.debug("DATA ELEMENT SQL : "+sql); List list = jdbcDao.executeQuery(sql, null, false, null); Logger.out.debug("list.size()************************"+list.size()); String [] columnNames = new String[list.size()]; Iterator iterator = list.iterator(); int i = 0; while(iterator.hasNext()) { List rowList = (List) iterator.next(); DataElement dataElement = new DataElement(); dataElement.setTable((String)rowList.get(0)); dataElement.setField((String)rowList.get(1)+"."+(String)rowList.get(2)); vector.add(dataElement); columnList.add((String)rowList.get(3)); } } jdbcDao.closeSession(); } catch(ClassNotFoundException classExp) { throw new DAOException(classExp.getMessage(),classExp); } return vector; } /** * @param fromAliasNameValue * @return * @throws DAOException */ public QueryResultObjectData createQueryResultObjectData(String fromAliasNameValue) throws DAOException { QueryResultObjectData queryResultObjectData; queryResultObjectData = new QueryResultObjectData(); queryResultObjectData.setAliasName(fromAliasNameValue); //Aarti: getting related tables that should be dependent //on main object for authorization Vector relatedTables = new Vector(); relatedTables = QueryBizLogic .getRelatedTableAliases(fromAliasNameValue); // Aarti: Get main query objects which should have individual checks //for authorization and should not be dependent on others Vector mainQueryObjects = QueryBizLogic.getMainObjectsOfQuery(); String queryObject; //Aarti: remove independent query objects from related objects //vector and add them to tableSet so that their authorization //is checked individually for (int i = 0; i < mainQueryObjects.size(); i++) { queryObject = (String) mainQueryObjects.get(i); if (relatedTables.contains(queryObject)) { relatedTables.remove(queryObject); // tableSet.add(queryObject); if(!queryObject.equals(fromAliasNameValue)) { queryResultObjectData.addRelatedQueryResultObject(new QueryResultObjectData(queryObject)); } } } //Aarti: Map all related tables to the main table // relatedTablesMap.put(fromAliasNameValue, relatedTables); queryResultObjectData.setDependentObjectAliases(relatedTables); return queryResultObjectData; } /** * @param queryResultObjectDataMap * @param query */ public List addObjectIdentifierColumnsToQuery(Map queryResultObjectDataMap, Query query) { DataElement identifierDataElement; List columnNames = new ArrayList(); Set keySet = queryResultObjectDataMap.keySet(); Iterator keyIterator = keySet.iterator(); QueryResultObjectData queryResultObjectData2; QueryResultObjectData queryResultObjectData3; Vector queryObjects; Vector queryObjectNames; int initialColumnNumbers = query.getResultView().size(); Map columnIdsMap; int columnId =0; for(int i=0;keyIterator.hasNext();i++) { queryResultObjectData2 = (QueryResultObjectData) queryResultObjectDataMap.get(keyIterator.next()); queryObjects = queryResultObjectData2.getIndependentQueryObjects(); queryObjectNames = queryResultObjectData2.getIndependentObjectAliases(); for(int j = 0 ; j<queryObjects.size();j++) { columnIdsMap = query.getIdentifierColumnIds(queryObjectNames); queryResultObjectData3 = (QueryResultObjectData) queryObjects.get(j); identifierDataElement = new DataElement(queryResultObjectData3.getAliasName(), Constants.IDENTIFIER); // query.addElementToView(columnId, identifierDataElement); // queryResultObjectData3.setIdentifierColumnId(columnId++); queryResultObjectData3.setIdentifierColumnId(((Integer)columnIdsMap.get(queryResultObjectData3.getAliasName())).intValue()-1); // columnNames.add(queryResultObjectData3.getAliasName() + " ID"); } } int columnsAdded = query.getResultView().size() - initialColumnNumbers; for(int i=0;i<columnsAdded;i++) { columnNames.add(" ID"); } return columnNames; } /** * @param queryResultObjectDataMap * @param query */ public void setDependentIdentifiedColumnIds(Map queryResultObjectDataMap, Query query) { Iterator keyIterator; QueryResultObjectData queryResultObjectData2; QueryResultObjectData queryResultObjectData3; Vector queryObjects; Set keySet2 = queryResultObjectDataMap.keySet(); keyIterator = keySet2.iterator(); for(int i=0;keyIterator.hasNext();i++) { queryResultObjectData2 = (QueryResultObjectData) queryResultObjectDataMap.get(keyIterator.next()); queryObjects = queryResultObjectData2.getIndependentQueryObjects(); for(int j = 0 ; j<queryObjects.size();j++) { queryResultObjectData3 = (QueryResultObjectData) queryObjects.get(j); queryResultObjectData3.setDependentColumnIds(query.getColumnIds(queryResultObjectData3.getAliasName(),queryResultObjectData3.getDependentObjectAliases())); queryResultObjectData3.setIdentifiedDataColumnIds(query.getIdentifiedColumnIds(queryResultObjectData3.getAliasName(),queryResultObjectData3.getDependentObjectAliases())); Logger.out.debug(" table:"+queryResultObjectData3.getAliasName()+" columnIds:"+queryResultObjectData3.getDependentColumnIds()); } } } /** * @param fromTables * @param queryResultObjectDataMap * @param query */ public void createQueryResultObjectData(Set fromTables, Map queryResultObjectDataMap, Query query) throws DAOException{ Iterator iterator = fromTables.iterator(); String tableAlias; QueryResultObjectData queryResultObjectData; while (iterator.hasNext()) { tableAlias = (String) iterator.next(); queryResultObjectData = createQueryResultObjectData(tableAlias); if(query.getColumnIds(tableAlias,queryResultObjectData.getDependentObjectAliases()).size()!=0) { queryResultObjectDataMap.put(tableAlias,queryResultObjectData); } } } }
adding quotes for text fields. SVN-Revision: 2220
WEB-INF/src/edu/wustl/catissuecore/bizlogic/SimpleQueryBizLogic.java
adding quotes for text fields.
Java
bsd-3-clause
c5dc2318c90554311b0b4d56cff93c805c72be29
0
gutomaia/steam-condenser-java
/** * This code is free software; you can redistribute it and/or modify it under * the terms of the new BSD License. * * Copyright (c) 2009-2010, Sebastian Staudt */ package steamcondenser.steam.community.l4d; import java.util.ArrayList; import java.util.HashMap; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import steamcondenser.SteamCondenserException; import steamcondenser.steam.community.GameWeapon; public class L4DStats extends AbstractL4DStats { /** * Creates a L4DStats object by calling the super constructor with the game * name "l4d" * @param steamId The custom URL or the 64bit Steam ID of the user * @throws SteamCondenserException If an error occurs */ public L4DStats(Object steamId) throws SteamCondenserException { super(steamId, "l4d"); } /** * @return A HashMap of Survival statistics for this user like revived * teammates. * If the Survival statistics haven't been parsed already, parsing is done * now. */ public HashMap<String, Object> getSurvivalStats() throws SteamCondenserException { if(!this.isPublic()) { return null; } if(this.survivalStats == null) { super.getSurvivalStats(); Element survivalStatsElement = (Element) ((Element) this.xmlData.getElementsByTagName("stats").item(0)).getElementsByTagName("survival").item(0); HashMap<String, L4DMap> mapsHash = new HashMap<String, L4DMap>(); NodeList mapNodes = survivalStatsElement.getElementsByTagName("maps").item(0).getChildNodes(); for(int i = 0; i < mapNodes.getLength(); i++) { Element mapData = (Element) mapNodes.item(i); mapsHash.put(mapData.getNodeName(), new L4DMap(mapData)); } this.survivalStats.put("maps", mapsHash); } return this.survivalStats; } /** * @return A HashMap of L4DWeapon for this user containing all Left4Dead * weapons. * If the weapons haven't been parsed already, parsing is done now. */ public HashMap<String, GameWeapon> getWeaponStats() { if(!this.isPublic()) { return null; } if(this.weaponStats == null) { Element weaponStatsElement = (Element) ((Element) this.xmlData.getElementsByTagName("stats").item(0)).getElementsByTagName("weapons").item(0); this.weaponStats = new HashMap<String, GameWeapon>(); NodeList weaponNodes = weaponStatsElement.getChildNodes(); for(int i = 0; i < weaponNodes.getLength(); i++) { Element weaponData = (Element) weaponNodes.item(i); String weaponName = weaponData.getNodeName(); GameWeapon weapon; if(!weaponName.equals("molotov") && !weaponName.equals("pipes")) { weapon = new L4DWeapon(weaponData); } else { weapon = new L4DExplosive(weaponData); } this.weaponStats.put(weaponName, weapon); } } return this.weaponStats; } }
java/src/steamcondenser/steam/community/l4d/L4DStats.java
/** * This code is free software; you can redistribute it and/or modify it under * the terms of the new BSD License. * * Copyright (c) 2009-2010, Sebastian Staudt */ package steamcondenser.steam.community.l4d; import java.util.ArrayList; import java.util.HashMap; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import steamcondenser.SteamCondenserException; import steamcondenser.steam.community.GameWeapon; public class L4DStats extends AbstractL4DStats { /** * Creates a L4DStats object by calling the super constructor with the game * name "l4d" * @param steamId The custom URL or the 64bit Steam ID of the user * @throws SteamCondenserException If an error occurs */ public L4DStats(Object steamId) throws SteamCondenserException { super(steamId, "l4d"); } /** * @return A HashMap of Survival statistics for this user like revived * teammates. * If the Survival statistics haven't been parsed already, parsing is done * now. */ public HashMap<String, Object> getSurvivalStats() { if(!this.isPublic()) { return null; } if(this.survivalStats == null) { super.getSurvivalStats(); Element survivalStatsElement = (Element) ((Element) this.xmlData.getElementsByTagName("stats").item(0)).getElementsByTagName("survival").item(0); HashMap<String, L4DMap> mapsHash = new HashMap<String, L4DMap>(); NodeList mapNodes = survivalStatsElement.getElementsByTagName("maps").item(0).getChildNodes(); for(int i = 0; i < mapNodes.getLength(); i++) { Element mapData = (Element) mapNodes.item(i); mapsHash.put(mapData.getNodeName(), new L4DMap(mapData)); } this.survivalStats.put("maps", mapsHash); } return this.survivalStats; } /** * @return A HashMap of L4DWeapon for this user containing all Left4Dead * weapons. * If the weapons haven't been parsed already, parsing is done now. */ public HashMap<String, GameWeapon> getWeaponStats() { if(!this.isPublic()) { return null; } if(this.weaponStats == null) { Element weaponStatsElement = (Element) ((Element) this.xmlData.getElementsByTagName("stats").item(0)).getElementsByTagName("weapons").item(0); this.weaponStats = new HashMap<String, GameWeapon>(); NodeList weaponNodes = weaponStatsElement.getChildNodes(); for(int i = 0; i < weaponNodes.getLength(); i++) { Element weaponData = (Element) weaponNodes.item(i); String weaponName = weaponData.getNodeName(); GameWeapon weapon; if(!weaponName.equals("molotov") && !weaponName.equals("pipes")) { weapon = new L4DWeapon(weaponData); } else { weapon = new L4DExplosive(weaponData); } this.weaponStats.put(weaponName, weapon); } } return this.weaponStats; } }
Java: Added missing throws declaration in L4DStats#getSurvivalStats
java/src/steamcondenser/steam/community/l4d/L4DStats.java
Java: Added missing throws declaration in L4DStats#getSurvivalStats
Java
mit
a43f9d12d98b075ebcd632721f35c7c5715d3b6c
0
bcgit/bc-java,bcgit/bc-java,bcgit/bc-java
package org.bouncycastle.crypto.encodings; import java.security.SecureRandom; import org.bouncycastle.crypto.AsymmetricBlockCipher; import org.bouncycastle.crypto.CipherParameters; import org.bouncycastle.crypto.CryptoServicesRegistrar; import org.bouncycastle.crypto.DataLengthException; import org.bouncycastle.crypto.Digest; import org.bouncycastle.crypto.InvalidCipherTextException; import org.bouncycastle.crypto.params.ParametersWithRandom; import org.bouncycastle.crypto.util.DigestFactory; import org.bouncycastle.util.Arrays; import org.bouncycastle.util.Pack; /** * Optimal Asymmetric Encryption Padding (OAEP) - see PKCS 1 V 2. */ public class OAEPEncoding implements AsymmetricBlockCipher { private byte[] defHash; private Digest mgf1Hash; private AsymmetricBlockCipher engine; private SecureRandom random; private boolean forEncryption; public OAEPEncoding( AsymmetricBlockCipher cipher) { this(cipher, DigestFactory.createSHA1(), null); } public OAEPEncoding( AsymmetricBlockCipher cipher, Digest hash) { this(cipher, hash, null); } public OAEPEncoding( AsymmetricBlockCipher cipher, Digest hash, byte[] encodingParams) { this(cipher, hash, hash, encodingParams); } public OAEPEncoding( AsymmetricBlockCipher cipher, Digest hash, Digest mgf1Hash, byte[] encodingParams) { this.engine = cipher; this.mgf1Hash = mgf1Hash; this.defHash = new byte[hash.getDigestSize()]; hash.reset(); if (encodingParams != null) { hash.update(encodingParams, 0, encodingParams.length); } hash.doFinal(defHash, 0); } public AsymmetricBlockCipher getUnderlyingCipher() { return engine; } public void init( boolean forEncryption, CipherParameters param) { if (param instanceof ParametersWithRandom) { ParametersWithRandom rParam = (ParametersWithRandom)param; this.random = rParam.getRandom(); } else { this.random = CryptoServicesRegistrar.getSecureRandom(); } engine.init(forEncryption, param); this.forEncryption = forEncryption; } public int getInputBlockSize() { int baseBlockSize = engine.getInputBlockSize(); if (forEncryption) { return baseBlockSize - 1 - 2 * defHash.length; } else { return baseBlockSize; } } public int getOutputBlockSize() { int baseBlockSize = engine.getOutputBlockSize(); if (forEncryption) { return baseBlockSize; } else { return baseBlockSize - 1 - 2 * defHash.length; } } public byte[] processBlock( byte[] in, int inOff, int inLen) throws InvalidCipherTextException { if (forEncryption) { return encodeBlock(in, inOff, inLen); } else { return decodeBlock(in, inOff, inLen); } } public byte[] encodeBlock( byte[] in, int inOff, int inLen) throws InvalidCipherTextException { if (inLen > getInputBlockSize()) { throw new DataLengthException("input data too long"); } byte[] block = new byte[getInputBlockSize() + 1 + 2 * defHash.length]; // // copy in the message // System.arraycopy(in, inOff, block, block.length - inLen, inLen); // // add sentinel // block[block.length - inLen - 1] = 0x01; // // as the block is already zeroed - there's no need to add PS (the >= 0 pad of 0) // // // add the hash of the encoding params. // System.arraycopy(defHash, 0, block, defHash.length, defHash.length); // // generate the seed. // byte[] seed = new byte[defHash.length]; random.nextBytes(seed); // // mask the message block. // byte[] mask = maskGeneratorFunction1(seed, 0, seed.length, block.length - defHash.length); for (int i = defHash.length; i != block.length; i++) { block[i] ^= mask[i - defHash.length]; } // // add in the seed // System.arraycopy(seed, 0, block, 0, defHash.length); // // mask the seed. // mask = maskGeneratorFunction1( block, defHash.length, block.length - defHash.length, defHash.length); for (int i = 0; i != defHash.length; i++) { block[i] ^= mask[i]; } return engine.processBlock(block, 0, block.length); } /** * @exception InvalidCipherTextException if the decrypted block turns out to * be badly formatted. */ public byte[] decodeBlock( byte[] in, int inOff, int inLen) throws InvalidCipherTextException { byte[] data = engine.processBlock(in, inOff, inLen); byte[] block = new byte[engine.getOutputBlockSize()]; // // as we may have zeros in our leading bytes for the block we produced // on encryption, we need to make sure our decrypted block comes back // the same size. // boolean wrongData = (block.length < (2 * defHash.length) + 1); if (data.length <= block.length) { System.arraycopy(data, 0, block, block.length - data.length, data.length); } else { System.arraycopy(data, 0, block, 0, block.length); wrongData = true; } // // unmask the seed. // byte[] mask = maskGeneratorFunction1( block, defHash.length, block.length - defHash.length, defHash.length); for (int i = 0; i != defHash.length; i++) { block[i] ^= mask[i]; } // // unmask the message block. // mask = maskGeneratorFunction1(block, 0, defHash.length, block.length - defHash.length); for (int i = defHash.length; i != block.length; i++) { block[i] ^= mask[i - defHash.length]; } // // check the hash of the encoding params. // long check to try to avoid this been a source of a timing attack. // boolean defHashWrong = false; for (int i = 0; i != defHash.length; i++) { defHashWrong |= defHash[i] != block[defHash.length + i]; } // // find the data block // int start = block.length; for (int index = 2 * defHash.length; index != block.length; index++) { if (block[index] != 0 & start == block.length) { start = index; } } boolean dataStartWrong = (start > (block.length - 1) | block[start] != 1); start++; if (defHashWrong | wrongData | dataStartWrong) { Arrays.fill(block, (byte)0); throw new InvalidCipherTextException("data wrong"); } // // extract the data block // byte[] output = new byte[block.length - start]; System.arraycopy(block, start, output, 0, output.length); Arrays.fill(block, (byte)0); return output; } /** * mask generator function, as described in PKCS1v2. */ private byte[] maskGeneratorFunction1( byte[] Z, int zOff, int zLen, int length) { byte[] mask = new byte[length]; byte[] hashBuf = new byte[mgf1Hash.getDigestSize()]; byte[] C = new byte[4]; int counter = 0; mgf1Hash.reset(); while (counter < (length / hashBuf.length)) { Pack.intToBigEndian(counter, C, 0); mgf1Hash.update(Z, zOff, zLen); mgf1Hash.update(C, 0, C.length); mgf1Hash.doFinal(hashBuf, 0); System.arraycopy(hashBuf, 0, mask, counter * hashBuf.length, hashBuf.length); counter++; } if ((counter * hashBuf.length) < length) { Pack.intToBigEndian(counter, C, 0); mgf1Hash.update(Z, zOff, zLen); mgf1Hash.update(C, 0, C.length); mgf1Hash.doFinal(hashBuf, 0); System.arraycopy(hashBuf, 0, mask, counter * hashBuf.length, mask.length - (counter * hashBuf.length)); } return mask; } }
core/src/main/java/org/bouncycastle/crypto/encodings/OAEPEncoding.java
package org.bouncycastle.crypto.encodings; import java.security.SecureRandom; import org.bouncycastle.crypto.AsymmetricBlockCipher; import org.bouncycastle.crypto.CipherParameters; import org.bouncycastle.crypto.CryptoServicesRegistrar; import org.bouncycastle.crypto.DataLengthException; import org.bouncycastle.crypto.Digest; import org.bouncycastle.crypto.InvalidCipherTextException; import org.bouncycastle.crypto.params.ParametersWithRandom; import org.bouncycastle.crypto.util.DigestFactory; import org.bouncycastle.util.Arrays; import org.bouncycastle.util.Pack; /** * Optimal Asymmetric Encryption Padding (OAEP) - see PKCS 1 V 2. */ public class OAEPEncoding implements AsymmetricBlockCipher { private byte[] defHash; private Digest mgf1Hash; private AsymmetricBlockCipher engine; private SecureRandom random; private boolean forEncryption; public OAEPEncoding( AsymmetricBlockCipher cipher) { this(cipher, DigestFactory.createSHA1(), null); } public OAEPEncoding( AsymmetricBlockCipher cipher, Digest hash) { this(cipher, hash, null); } public OAEPEncoding( AsymmetricBlockCipher cipher, Digest hash, byte[] encodingParams) { this(cipher, hash, hash, encodingParams); } public OAEPEncoding( AsymmetricBlockCipher cipher, Digest hash, Digest mgf1Hash, byte[] encodingParams) { this.engine = cipher; this.mgf1Hash = mgf1Hash; this.defHash = new byte[hash.getDigestSize()]; hash.reset(); if (encodingParams != null) { hash.update(encodingParams, 0, encodingParams.length); } hash.doFinal(defHash, 0); } public AsymmetricBlockCipher getUnderlyingCipher() { return engine; } public void init( boolean forEncryption, CipherParameters param) { if (param instanceof ParametersWithRandom) { ParametersWithRandom rParam = (ParametersWithRandom)param; this.random = rParam.getRandom(); } else { this.random = CryptoServicesRegistrar.getSecureRandom(); } engine.init(forEncryption, param); this.forEncryption = forEncryption; } public int getInputBlockSize() { int baseBlockSize = engine.getInputBlockSize(); if (forEncryption) { return baseBlockSize - 1 - 2 * defHash.length; } else { return baseBlockSize; } } public int getOutputBlockSize() { int baseBlockSize = engine.getOutputBlockSize(); if (forEncryption) { return baseBlockSize; } else { return baseBlockSize - 1 - 2 * defHash.length; } } public byte[] processBlock( byte[] in, int inOff, int inLen) throws InvalidCipherTextException { if (forEncryption) { return encodeBlock(in, inOff, inLen); } else { return decodeBlock(in, inOff, inLen); } } public byte[] encodeBlock( byte[] in, int inOff, int inLen) throws InvalidCipherTextException { if (inLen > getInputBlockSize()) { throw new DataLengthException("input data too long"); } byte[] block = new byte[getInputBlockSize() + 1 + 2 * defHash.length]; // // copy in the message // System.arraycopy(in, inOff, block, block.length - inLen, inLen); // // add sentinel // block[block.length - inLen - 1] = 0x01; // // as the block is already zeroed - there's no need to add PS (the >= 0 pad of 0) // // // add the hash of the encoding params. // System.arraycopy(defHash, 0, block, defHash.length, defHash.length); // // generate the seed. // byte[] seed = new byte[defHash.length]; random.nextBytes(seed); // // mask the message block. // byte[] mask = maskGeneratorFunction1(seed, 0, seed.length, block.length - defHash.length); for (int i = defHash.length; i != block.length; i++) { block[i] ^= mask[i - defHash.length]; } // // add in the seed // System.arraycopy(seed, 0, block, 0, defHash.length); // // mask the seed. // mask = maskGeneratorFunction1( block, defHash.length, block.length - defHash.length, defHash.length); for (int i = 0; i != defHash.length; i++) { block[i] ^= mask[i]; } return engine.processBlock(block, 0, block.length); } /** * @exception InvalidCipherTextException if the decrypted block turns out to * be badly formatted. */ public byte[] decodeBlock( byte[] in, int inOff, int inLen) throws InvalidCipherTextException { byte[] data = engine.processBlock(in, inOff, inLen); byte[] block = new byte[engine.getOutputBlockSize()]; // // as we may have zeros in our leading bytes for the block we produced // on encryption, we need to make sure our decrypted block comes back // the same size. // boolean wrongData = (block.length < (2 * defHash.length) + 1); if (data.length <= block.length) { System.arraycopy(data, 0, block, block.length - data.length, data.length); } else { System.arraycopy(data, 0, block, 0, block.length); wrongData = true; } // // unmask the seed. // byte[] mask = maskGeneratorFunction1( block, defHash.length, block.length - defHash.length, defHash.length); for (int i = 0; i != defHash.length; i++) { block[i] ^= mask[i]; } // // unmask the message block. // mask = maskGeneratorFunction1(block, 0, defHash.length, block.length - defHash.length); for (int i = defHash.length; i != block.length; i++) { block[i] ^= mask[i - defHash.length]; } // // check the hash of the encoding params. // long check to try to avoid this been a source of a timing attack. // boolean defHashWrong = false; for (int i = 0; i != defHash.length; i++) { if (defHash[i] != block[defHash.length + i]) { defHashWrong = true; } } // // find the data block // int start = block.length; for (int index = 2 * defHash.length; index != block.length; index++) { if (block[index] != 0 & start == block.length) { start = index; } } boolean dataStartWrong = (start > (block.length - 1) | block[start] != 1); start++; if (defHashWrong | wrongData | dataStartWrong) { Arrays.fill(block, (byte)0); throw new InvalidCipherTextException("data wrong"); } // // extract the data block // byte[] output = new byte[block.length - start]; System.arraycopy(block, start, output, 0, output.length); Arrays.fill(block, (byte)0); return output; } /** * mask generator function, as described in PKCS1v2. */ private byte[] maskGeneratorFunction1( byte[] Z, int zOff, int zLen, int length) { byte[] mask = new byte[length]; byte[] hashBuf = new byte[mgf1Hash.getDigestSize()]; byte[] C = new byte[4]; int counter = 0; mgf1Hash.reset(); while (counter < (length / hashBuf.length)) { Pack.intToBigEndian(counter, C, 0); mgf1Hash.update(Z, zOff, zLen); mgf1Hash.update(C, 0, C.length); mgf1Hash.doFinal(hashBuf, 0); System.arraycopy(hashBuf, 0, mask, counter * hashBuf.length, hashBuf.length); counter++; } if ((counter * hashBuf.length) < length) { Pack.intToBigEndian(counter, C, 0); mgf1Hash.update(Z, zOff, zLen); mgf1Hash.update(C, 0, C.length); mgf1Hash.doFinal(hashBuf, 0); System.arraycopy(hashBuf, 0, mask, counter * hashBuf.length, mask.length - (counter * hashBuf.length)); } return mask; } }
Update to long check.
core/src/main/java/org/bouncycastle/crypto/encodings/OAEPEncoding.java
Update to long check.
Java
mit
0269a6115fb756eb2330f0cc11cecded093d1c82
0
CCI-MIT/XCoLab,CCI-MIT/XCoLab,CCI-MIT/XCoLab,CCI-MIT/XCoLab
package org.xcolab.view.pages.loginregister; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.powermock.modules.junit4.PowerMockRunnerDelegate; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest; import org.springframework.context.annotation.ComponentScan; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.TestPropertySource; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.web.servlet.MockMvc; import org.xcolab.client.activity.IActivityClient; import org.xcolab.client.activity.StaticActivityContext; import org.xcolab.client.admin.IAdminClient; import org.xcolab.client.admin.IContestTypeClient; import org.xcolab.client.admin.IEmailTemplateClient; import org.xcolab.client.admin.StaticAdminContext; import org.xcolab.client.contest.ContestClientUtil; import org.xcolab.client.members.MembersClient; import org.xcolab.client.members.MessagingClient; import org.xcolab.util.http.ServiceRequestUtils; import org.xcolab.view.util.clienthelpers.AdminClientMockerHelper; import org.xcolab.view.util.clienthelpers.ContestTypeClientMockerHelper; import org.xcolab.view.util.clienthelpers.EmailTemplateClientMockerHelper; import org.xcolab.view.util.clienthelpers.MembersClientMockerHelper; import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.forwardedUrl; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.model; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.redirectedUrl; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @RunWith(PowerMockRunner.class) @PowerMockRunnerDelegate(SpringJUnit4ClassRunner.class) @PowerMockIgnore("javax.management.*") @WebMvcTest(LoginRegisterController.class) @ComponentScan("org.xcolab.view.theme") @ComponentScan("org.xcolab.view.auth") @ComponentScan("org.xcolab.view.pages.proposals.interceptors") @ComponentScan("org.xcolab.view.pages.proposals.utils.context") @ComponentScan("org.xcolab.view.pages.loginregister") @ComponentScan("org.xcolab.view.pages.redballoon") @ComponentScan("org.xcolab.view.config") @ComponentScan("org.xcolab.view.i18n") @ComponentScan("org.xcolab.client") @TestPropertySource( properties = { "cache.enabled=false" } ) @PrepareForTest({ ContestClientUtil.class, MembersClient.class, MessagingClient.class, }) @ActiveProfiles("test") public class LoginRegisterControllerTest { @Autowired private MockMvc mockMvc; @Before public void setup() throws Exception { ServiceRequestUtils.setInitialized(true); PowerMockito.mockStatic(ContestClientUtil.class); PowerMockito.mockStatic(MessagingClient.class); StaticActivityContext.setActivityClient(Mockito.mock(IActivityClient.class)); MembersClientMockerHelper.mockMembersClient(); IAdminClient adminClient = AdminClientMockerHelper.mockAdminClient(); IEmailTemplateClient emailTemplateClient = EmailTemplateClientMockerHelper.mockEmailTemplateClient(); IContestTypeClient contestTypeClient = ContestTypeClientMockerHelper.mockContestTypeClient(); StaticAdminContext.setClients(adminClient, contestTypeClient, emailTemplateClient); } @Test public void shouldReturnRegisterForm() throws Exception { this.mockMvc.perform(get("/register")).andExpect(status().isOk()) .andExpect(forwardedUrl("/WEB-INF/jsp/loginregister/register.jspx")); } @Test public void registrationFailsWhenInvalidDataPostedAndSendsUserBackToForm() throws Exception { this.mockMvc.perform(post("/register") .with(csrf()) .param("screenName", "") .param("email", "") .param("imageId", "") .param("file", "") .param("firstName", "") .param("lastName", "") .param("password", "") .param("retypePassword", "") .param("country", "") .param("shortBio", "")) .andExpect(forwardedUrl("/WEB-INF/jsp/loginregister/register.jspx")) .andExpect(model().hasErrors()); } @Test public void registrationWorksAndDoLoginAndUserRedirectedToHome() throws Exception { this.mockMvc.perform(post("/register") .with(csrf()) .param("screenName", "username") .param("email", "[email protected]") .param("imageId", "") .param("file", "") .param("firstName", "User") .param("lastName", "Name") .param("password", "username") .param("retypePassword", "username") .param("language", "en") .param("country", "BR") .param("shortBio", "shortbio")) .andExpect(redirectedUrl("/")); } }
view/src/test/java/org/xcolab/view/pages/loginregister/LoginRegisterControllerTest.java
package org.xcolab.view.pages.loginregister; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.powermock.modules.junit4.PowerMockRunnerDelegate; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest; import org.springframework.context.annotation.ComponentScan; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.TestPropertySource; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.web.servlet.MockMvc; import org.xcolab.client.activity.IActivityClient; import org.xcolab.client.activity.StaticActivityContext; import org.xcolab.client.admin.IAdminClient; import org.xcolab.client.admin.IContestTypeClient; import org.xcolab.client.admin.IEmailTemplateClient; import org.xcolab.client.admin.StaticAdminContext; import org.xcolab.client.contest.ContestClientUtil; import org.xcolab.client.members.MembersClient; import org.xcolab.client.members.MessagingClient; import org.xcolab.util.http.ServiceRequestUtils; import org.xcolab.view.util.clienthelpers.AdminClientMockerHelper; import org.xcolab.view.util.clienthelpers.ContestTypeClientMockerHelper; import org.xcolab.view.util.clienthelpers.EmailTemplateClientMockerHelper; import org.xcolab.view.util.clienthelpers.MembersClientMockerHelper; import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.forwardedUrl; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.model; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.redirectedUrl; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @RunWith(PowerMockRunner.class) @PowerMockRunnerDelegate(SpringJUnit4ClassRunner.class) @PowerMockIgnore("javax.management.*") @WebMvcTest(LoginRegisterController.class) @ComponentScan("org.xcolab.view.theme") @ComponentScan("org.xcolab.view.auth") @ComponentScan("org.xcolab.view.pages.proposals.interceptors") @ComponentScan("org.xcolab.view.pages.proposals.utils.context") @ComponentScan("org.xcolab.view.pages.loginregister") @ComponentScan("org.xcolab.view.pages.redballoon") @ComponentScan("org.xcolab.view.config") @ComponentScan("org.xcolab.view.i18n") @ComponentScan("org.xcolab.client") @TestPropertySource( properties = { "cache.enabled=false" } ) @PrepareForTest({ IActivityClient.class, ContestClientUtil.class, MembersClient.class, MessagingClient.class, }) @ActiveProfiles("test") public class LoginRegisterControllerTest { @Autowired private MockMvc mockMvc; @Before public void setup() throws Exception { ServiceRequestUtils.setInitialized(true); PowerMockito.mockStatic(IActivityClient.class); PowerMockito.mockStatic(ContestClientUtil.class); PowerMockito.mockStatic(MessagingClient.class); StaticActivityContext.setActivityClient(Mockito.mock(IActivityClient.class)); MembersClientMockerHelper.mockMembersClient(); IAdminClient adminClient = AdminClientMockerHelper.mockAdminClient(); IEmailTemplateClient emailTemplateClient = EmailTemplateClientMockerHelper.mockEmailTemplateClient(); IContestTypeClient contestTypeClient = ContestTypeClientMockerHelper.mockContestTypeClient(); StaticAdminContext.setClients(adminClient, contestTypeClient, emailTemplateClient); } @Test public void shouldReturnRegisterForm() throws Exception { this.mockMvc.perform(get("/register")).andExpect(status().isOk()) .andExpect(forwardedUrl("/WEB-INF/jsp/loginregister/register.jspx")); } @Test public void registrationFailsWhenInvalidDataPostedAndSendsUserBackToForm() throws Exception { this.mockMvc.perform(post("/register") .with(csrf()) .param("screenName", "") .param("email", "") .param("imageId", "") .param("file", "") .param("firstName", "") .param("lastName", "") .param("password", "") .param("retypePassword", "") .param("country", "") .param("shortBio", "")) .andExpect(forwardedUrl("/WEB-INF/jsp/loginregister/register.jspx")) .andExpect(model().hasErrors()); } @Test public void registrationWorksAndDoLoginAndUserRedirectedToHome() throws Exception { this.mockMvc.perform(post("/register") .with(csrf()) .param("screenName", "username") .param("email", "[email protected]") .param("imageId", "") .param("file", "") .param("firstName", "User") .param("lastName", "Name") .param("password", "username") .param("retypePassword", "username") .param("language", "en") .param("country", "BR") .param("shortBio", "shortbio")) .andExpect(redirectedUrl("/")); } }
[COLAB-2918]-[activity]: remove obsolete mocking in test
view/src/test/java/org/xcolab/view/pages/loginregister/LoginRegisterControllerTest.java
[COLAB-2918]-[activity]: remove obsolete mocking in test
Java
mit
64aa6afbe025d44c6a40f1cdc2c43d87df033a04
0
hudson/hudson-2.x,hudson/hudson-2.x,hudson/hudson-2.x,hudson/hudson-2.x,hudson/hudson-2.x,hudson/hudson-2.x
package org.jvnet.hudson.maven.plugins.hpi; /* * Copyright 2001-2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import com.thoughtworks.qdox.JavaDocBuilder; import com.thoughtworks.qdox.model.JavaClass; import com.thoughtworks.qdox.model.JavaSource; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.resolver.filter.ScopeArtifactFilter; import org.apache.maven.model.Resource; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.project.MavenProject; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.UnArchiver; import org.codehaus.plexus.archiver.jar.Manifest.Attribute; import org.codehaus.plexus.archiver.jar.Manifest.Section; import org.codehaus.plexus.archiver.jar.ManifestException; import org.codehaus.plexus.archiver.manager.ArchiverManager; import org.codehaus.plexus.archiver.manager.NoSuchArchiverException; import org.codehaus.plexus.util.DirectoryScanner; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.IOUtil; import org.codehaus.plexus.util.InterpolationFilterReader; import org.codehaus.plexus.util.StringUtils; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.Writer; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Properties; import java.util.Set; public abstract class AbstractHpiMojo extends AbstractMojo { /** * The directory for the generated WAR. * * @parameter expression="${project.build.directory}" * @required */ protected String outputDirectory; /** * The maven project. * * @parameter expression="${project}" * @required * @readonly */ protected MavenProject project; /** * The directory containing generated classes. * * @parameter expression="${project.build.outputDirectory}" * @required * @readonly */ private File classesDirectory; /** * Name of the plugin that Hudson uses for display purpose. * It should be one line text. * * @parameter expression="${project.name}" * @required * @readonly */ protected String pluginName; /** * The directory where the webapp is built. * * @parameter expression="${project.build.directory}/${project.build.finalName}" * @required */ private File webappDirectory; /** * Single directory for extra files to include in the WAR. * * @parameter expression="${basedir}/src/main/webapp" * @required */ protected File warSourceDirectory; /** * The list of webResources we want to transfer. * * @parameter */ private Resource[] webResources; /** * @parameter expression="${project.build.filters}" */ private List<String> filters; /** * The path to the context.xml file to use. * * @parameter expression="${maven.war.containerConfigXML}" */ private File containerConfigXML; /** * Directory to unpack dependent WARs into if needed * * @parameter expression="${project.build.directory}/war/work" * @required */ private File workDirectory; /** * To look up Archiver/UnArchiver implementations * * @parameter expression="${component.org.codehaus.plexus.archiver.manager.ArchiverManager}" * @required */ protected ArchiverManager archiverManager; private static final String WEB_INF = "WEB-INF"; private static final String META_INF = "META-INF"; private static final String[] DEFAULT_INCLUDES = {"**/**"}; /** * The comma separated list of tokens to include in the WAR. * Default is '**'. * * @parameter alias="includes" */ private String warSourceIncludes = "**"; /** * The comma separated list of tokens to exclude from the WAR. * * @parameter alias="excludes" */ private String warSourceExcludes; /** * The comma separated list of tokens to include when doing * a war overlay. * Default is '**' * * @parameter */ private String dependentWarIncludes = "**"; /** * The comma separated list of tokens to exclude when doing * a way overlay. * * @parameter */ private String dependentWarExcludes; private static final String[] EMPTY_STRING_ARRAY = {}; public File getClassesDirectory() { return classesDirectory; } public void setClassesDirectory(File classesDirectory) { this.classesDirectory = classesDirectory; } public File getWebappDirectory() { return webappDirectory; } public void setWebappDirectory(File webappDirectory) { this.webappDirectory = webappDirectory; } public void setWarSourceDirectory(File warSourceDirectory) { this.warSourceDirectory = warSourceDirectory; } public File getContainerConfigXML() { return containerConfigXML; } public void setContainerConfigXML(File containerConfigXML) { this.containerConfigXML = containerConfigXML; } /** * Returns a string array of the excludes to be used * when assembling/copying the war. * * @return an array of tokens to exclude */ protected String[] getExcludes() { List<String> excludeList = new ArrayList<String>(); if (StringUtils.isNotEmpty(warSourceExcludes)) { excludeList.addAll(Arrays.asList(StringUtils.split(warSourceExcludes, ","))); } // if contextXML is specified, omit the one in the source directory if (containerConfigXML != null && StringUtils.isNotEmpty(containerConfigXML.getName())) { excludeList.add("**/" + META_INF + "/" + containerConfigXML.getName()); } return excludeList.toArray(EMPTY_STRING_ARRAY); } /** * Returns a string array of the includes to be used * when assembling/copying the war. * * @return an array of tokens to include */ protected String[] getIncludes() { return StringUtils.split(StringUtils.defaultString(warSourceIncludes), ","); } /** * Returns a string array of the excludes to be used * when adding dependent wars as an overlay onto this war. * * @return an array of tokens to exclude */ protected String[] getDependentWarExcludes() { String[] excludes; if (StringUtils.isNotEmpty(dependentWarExcludes)) { excludes = StringUtils.split(dependentWarExcludes, ","); } else { excludes = EMPTY_STRING_ARRAY; } return excludes; } /** * Returns a string array of the includes to be used * when adding dependent wars as an overlay onto this war. * * @return an array of tokens to include */ protected String[] getDependentWarIncludes() { return StringUtils.split(StringUtils.defaultString(dependentWarIncludes), ","); } public void buildExplodedWebapp(File webappDirectory) throws MojoExecutionException { getLog().info("Exploding webapp..."); webappDirectory.mkdirs(); File webinfDir = new File(webappDirectory, WEB_INF); webinfDir.mkdirs(); File metainfDir = new File(webappDirectory, META_INF); metainfDir.mkdirs(); try { List<Resource> webResources = this.webResources != null ? Arrays.asList(this.webResources) : null; if (webResources != null && webResources.size() > 0) { Properties filterProperties = getBuildFilterProperties(); for (Resource resource : webResources) { copyResources(resource, webappDirectory, filterProperties); } } copyResources(warSourceDirectory, webappDirectory); if (containerConfigXML != null && StringUtils.isNotEmpty(containerConfigXML.getName())) { metainfDir = new File(webappDirectory, META_INF); String xmlFileName = containerConfigXML.getName(); copyFileIfModified(containerConfigXML, new File(metainfDir, xmlFileName)); } buildWebapp(project, webappDirectory); } catch (IOException e) { throw new MojoExecutionException("Could not explode webapp...", e); } } private Properties getBuildFilterProperties() throws MojoExecutionException { // System properties Properties filterProperties = new Properties(System.getProperties()); // Project properties filterProperties.putAll(project.getProperties()); for (String filter : filters) { try { Properties properties = PropertyUtils.loadPropertyFile(new File(filter), true, true); filterProperties.putAll(properties); } catch (IOException e) { throw new MojoExecutionException("Error loading property file '" + filter + "'", e); } } return filterProperties; } /** * Copies webapp webResources from the specified directory. * <p/> * Note that the <tt>webXml</tt> parameter could be null and may * specify a file which is not named <tt>web.xml<tt>. If the file * exists, it will be copied to the <tt>META-INF</tt> directory and * renamed accordingly. * * @param resource the resource to copy * @param webappDirectory the target directory * @param filterProperties * @throws java.io.IOException if an error occurred while copying webResources */ public void copyResources(Resource resource, File webappDirectory, Properties filterProperties) throws IOException { if (!resource.getDirectory().equals(webappDirectory.getPath())) { getLog().info("Copy webapp webResources to " + webappDirectory.getAbsolutePath()); if (webappDirectory.exists()) { String[] fileNames = getWarFiles(resource); for (String fileName : fileNames) { if (resource.isFiltering()) { copyFilteredFile(new File(resource.getDirectory(), fileName), new File(webappDirectory, fileName), null, getFilterWrappers(), filterProperties); } else { copyFileIfModified(new File(resource.getDirectory(), fileName), new File(webappDirectory, fileName)); } } } } } /** * Copies webapp webResources from the specified directory. * <p/> * Note that the <tt>webXml</tt> parameter could be null and may * specify a file which is not named <tt>web.xml<tt>. If the file * exists, it will be copied to the <tt>META-INF</tt> directory and * renamed accordingly. * * @param sourceDirectory the source directory * @param webappDirectory the target directory * @throws java.io.IOException if an error occurred while copying webResources */ public void copyResources(File sourceDirectory, File webappDirectory) throws IOException { if (!sourceDirectory.equals(webappDirectory)) { getLog().info("Copy webapp webResources to " + webappDirectory.getAbsolutePath()); if (warSourceDirectory.exists()) { String[] fileNames = getWarFiles(sourceDirectory); for (String fileName : fileNames) { copyFileIfModified(new File(sourceDirectory, fileName), new File(webappDirectory, fileName)); } } } } /** * Builds the webapp for the specified project. * <p/> * Classes, libraries and tld files are copied to * the <tt>webappDirectory</tt> during this phase. * * @param project the maven project * @param webappDirectory * @throws java.io.IOException if an error occurred while building the webapp */ public void buildWebapp(MavenProject project, File webappDirectory) throws MojoExecutionException, IOException { getLog().info("Assembling webapp " + project.getArtifactId() + " in " + webappDirectory); File libDirectory = new File(webappDirectory, WEB_INF + "/lib"); File tldDirectory = new File(webappDirectory, WEB_INF + "/tld"); File webappClassesDirectory = new File(webappDirectory, WEB_INF + "/classes"); if (classesDirectory.exists() && !classesDirectory.equals(webappClassesDirectory)) { copyDirectoryStructureIfModified(classesDirectory, webappClassesDirectory); } Set<Artifact> artifacts = project.getArtifacts(); List duplicates = findDuplicates(artifacts); List<File> dependentWarDirectories = new ArrayList<File>(); for (Artifact artifact : artifacts) { String targetFileName = getDefaultFinalName(artifact); getLog().debug("Processing: " + targetFileName); if (duplicates.contains(targetFileName)) { getLog().debug("Duplicate found: " + targetFileName); targetFileName = artifact.getGroupId() + "-" + targetFileName; getLog().debug("Renamed to: " + targetFileName); } // TODO: utilise appropriate methods from project builder ScopeArtifactFilter filter = new ScopeArtifactFilter(Artifact.SCOPE_RUNTIME); if (!artifact.isOptional() && filter.include(artifact)) { String type = artifact.getType(); if ("tld".equals(type)) { copyFileIfModified(artifact.getFile(), new File(tldDirectory, targetFileName)); } else { if ("jar".equals(type) || "ejb".equals(type) || "ejb-client".equals(type)) { copyFileIfModified(artifact.getFile(), new File(libDirectory, targetFileName)); } else { if ("par".equals(type)) { targetFileName = targetFileName.substring(0, targetFileName.lastIndexOf('.')) + ".jar"; getLog().debug( "Copying " + artifact.getFile() + " to " + new File(libDirectory, targetFileName)); copyFileIfModified(artifact.getFile(), new File(libDirectory, targetFileName)); } else { if ("war".equals(type)) { dependentWarDirectories.add(unpackWarToTempDirectory(artifact)); } else { getLog().debug("Skipping artifact of type " + type + " for WEB-INF/lib"); } } } } } } if (dependentWarDirectories.size() > 0) { getLog().info("Overlaying " + dependentWarDirectories.size() + " war(s)."); // overlay dependent wars for (Iterator iter = dependentWarDirectories.iterator(); iter.hasNext();) { copyDependentWarContents((File) iter.next(), webappDirectory); } } } /** * Searches a set of artifacts for duplicate filenames and returns a list of duplicates. * * @param artifacts set of artifacts * @return List of duplicated artifacts */ private List<String> findDuplicates(Set<Artifact> artifacts) { List<String> duplicates = new ArrayList<String>(); List<String> identifiers = new ArrayList<String>(); for (Artifact artifact : artifacts) { String candidate = getDefaultFinalName(artifact); if (identifiers.contains(candidate)) { duplicates.add(candidate); } else { identifiers.add(candidate); } } return duplicates; } /** * Unpacks war artifacts into a temporary directory inside <tt>workDirectory</tt> * named with the name of the war. * * @param artifact War artifact to unpack. * @return Directory containing the unpacked war. * @throws MojoExecutionException */ private File unpackWarToTempDirectory(Artifact artifact) throws MojoExecutionException { String name = artifact.getFile().getName(); File tempLocation = new File(workDirectory, name.substring(0, name.length() - 4)); boolean process = false; if (!tempLocation.exists()) { tempLocation.mkdirs(); process = true; } else if (artifact.getFile().lastModified() > tempLocation.lastModified()) { process = true; } if (process) { File file = artifact.getFile(); try { unpack(file, tempLocation); } catch (NoSuchArchiverException e) { this.getLog().info("Skip unpacking dependency file with unknown extension: " + file.getPath()); } } return tempLocation; } /** * Unpacks the archive file. * * @param file File to be unpacked. * @param location Location where to put the unpacked files. */ private void unpack(File file, File location) throws MojoExecutionException, NoSuchArchiverException { String archiveExt = FileUtils.getExtension(file.getAbsolutePath()).toLowerCase(); try { UnArchiver unArchiver = archiverManager.getUnArchiver(archiveExt); unArchiver.setSourceFile(file); unArchiver.setDestDirectory(location); unArchiver.extract(); } catch (IOException e) { throw new MojoExecutionException("Error unpacking file: " + file + "to: " + location, e); } catch (ArchiverException e) { throw new MojoExecutionException("Error unpacking file: " + file + "to: " + location, e); } } /** * Recursively copies contents of <tt>srcDir</tt> into <tt>targetDir</tt>. * This will not overwrite any existing files. * * @param srcDir Directory containing unpacked dependent war contents * @param targetDir Directory to overlay srcDir into */ private void copyDependentWarContents(File srcDir, File targetDir) throws MojoExecutionException { DirectoryScanner scanner = new DirectoryScanner(); scanner.setBasedir(srcDir); scanner.setExcludes(getDependentWarExcludes()); scanner.addDefaultExcludes(); scanner.setIncludes(getDependentWarIncludes()); scanner.scan(); for (String dir : scanner.getIncludedDirectories()) { new File(targetDir, dir).mkdirs(); } for (String file : scanner.getIncludedFiles()) { File targetFile = new File(targetDir, file); // Do not overwrite existing files. if (!targetFile.exists()) { try { targetFile.getParentFile().mkdirs(); copyFileIfModified(new File(srcDir, file), targetFile); } catch (IOException e) { throw new MojoExecutionException("Error copying file '" + file + "' to '" + targetFile + "'", e); } } } } /** * Returns a list of filenames that should be copied * over to the destination directory. * * @param sourceDir the directory to be scanned * @return the array of filenames, relative to the sourceDir */ private String[] getWarFiles(File sourceDir) { DirectoryScanner scanner = new DirectoryScanner(); scanner.setBasedir(sourceDir); scanner.setExcludes(getExcludes()); scanner.addDefaultExcludes(); scanner.setIncludes(getIncludes()); scanner.scan(); return scanner.getIncludedFiles(); } /** * Returns a list of filenames that should be copied * over to the destination directory. * * @param resource the resource to be scanned * @return the array of filenames, relative to the sourceDir */ private String[] getWarFiles(Resource resource) { DirectoryScanner scanner = new DirectoryScanner(); scanner.setBasedir(resource.getDirectory()); if (resource.getIncludes() != null && !resource.getIncludes().isEmpty()) { scanner.setIncludes((String[]) resource.getIncludes().toArray(EMPTY_STRING_ARRAY)); } else { scanner.setIncludes(DEFAULT_INCLUDES); } if (resource.getExcludes() != null && !resource.getExcludes().isEmpty()) { scanner.setExcludes((String[]) resource.getExcludes().toArray(EMPTY_STRING_ARRAY)); } scanner.addDefaultExcludes(); scanner.scan(); return scanner.getIncludedFiles(); } /** * Copy file from source to destination only if source is newer than the target file. * If <code>destinationDirectory</code> does not exist, it * (and any parent directories) will be created. If a file <code>source</code> in * <code>destinationDirectory</code> exists, it will be overwritten. * * @param source An existing <code>File</code> to copy. * @param destinationDirectory A directory to copy <code>source</code> into. * @throws java.io.FileNotFoundException if <code>source</code> isn't a normal file. * @throws IllegalArgumentException if <code>destinationDirectory</code> isn't a directory. * @throws java.io.IOException if <code>source</code> does not exist, the file in * <code>destinationDirectory</code> cannot be written to, or an IO error occurs during copying. * <p/> * TO DO: Remove this method when Maven moves to plexus-utils version 1.4 */ private static void copyFileToDirectoryIfModified(File source, File destinationDirectory) throws IOException { // TO DO: Remove this method and use the method in WarFileUtils when Maven 2 changes // to plexus-utils 1.2. if (destinationDirectory.exists() && !destinationDirectory.isDirectory()) { throw new IllegalArgumentException("Destination is not a directory"); } copyFileIfModified(source, new File(destinationDirectory, source.getName())); } private FilterWrapper[] getFilterWrappers() { return new FilterWrapper[]{ // support ${token} new FilterWrapper() { public Reader getReader(Reader fileReader, Properties filterProperties) { return new InterpolationFilterReader(fileReader, filterProperties, "${", "}"); } }, // support @token@ new FilterWrapper() { public Reader getReader(Reader fileReader, Properties filterProperties) { return new InterpolationFilterReader(fileReader, filterProperties, "@", "@"); } }}; } /** * @param from * @param to * @param encoding * @param wrappers * @param filterProperties * @throws IOException TO DO: Remove this method when Maven moves to plexus-utils version 1.4 */ private static void copyFilteredFile(File from, File to, String encoding, FilterWrapper[] wrappers, Properties filterProperties) throws IOException { // buffer so it isn't reading a byte at a time! Reader fileReader = null; Writer fileWriter = null; try { // fix for MWAR-36, ensures that the parent dir are created first to.getParentFile().mkdirs(); if (encoding == null || encoding.length() < 1) { fileReader = new BufferedReader(new FileReader(from)); fileWriter = new FileWriter(to); } else { FileInputStream instream = new FileInputStream(from); FileOutputStream outstream = new FileOutputStream(to); fileReader = new BufferedReader(new InputStreamReader(instream, encoding)); fileWriter = new OutputStreamWriter(outstream, encoding); } Reader reader = fileReader; for (FilterWrapper wrapper : wrappers) { reader = wrapper.getReader(reader, filterProperties); } IOUtil.copy(reader, fileWriter); } finally { IOUtil.close(fileReader); IOUtil.close(fileWriter); } } /** * Copy file from source to destination only if source timestamp is later than the destination timestamp. * The directories up to <code>destination</code> will be created if they don't already exist. * <code>destination</code> will be overwritten if it already exists. * * @param source An existing non-directory <code>File</code> to copy bytes from. * @param destination A non-directory <code>File</code> to write bytes to (possibly * overwriting). * @throws IOException if <code>source</code> does not exist, <code>destination</code> cannot be * written to, or an IO error occurs during copying. * @throws java.io.FileNotFoundException if <code>destination</code> is a directory * <p/> * TO DO: Remove this method when Maven moves to plexus-utils version 1.4 */ private static void copyFileIfModified(File source, File destination) throws IOException { // TO DO: Remove this method and use the method in WarFileUtils when Maven 2 changes // to plexus-utils 1.2. if (destination.lastModified() < source.lastModified()) { FileUtils.copyFile(source, destination); } } /** * Copies a entire directory structure but only source files with timestamp later than the destinations'. * <p/> * Note: * <ul> * <li>It will include empty directories. * <li>The <code>sourceDirectory</code> must exists. * </ul> * * @param sourceDirectory * @param destinationDirectory * @throws IOException TO DO: Remove this method when Maven moves to plexus-utils version 1.4 */ private static void copyDirectoryStructureIfModified(File sourceDirectory, File destinationDirectory) throws IOException { if (!sourceDirectory.exists()) { throw new IOException("Source directory doesn't exists (" + sourceDirectory.getAbsolutePath() + ")."); } String sourcePath = sourceDirectory.getAbsolutePath(); for (File file : sourceDirectory.listFiles()) { String dest = file.getAbsolutePath(); dest = dest.substring(sourcePath.length() + 1); File destination = new File(destinationDirectory, dest); if (file.isFile()) { destination = destination.getParentFile(); copyFileToDirectoryIfModified(file, destination); } else if (file.isDirectory()) { if (!destination.exists() && !destination.mkdirs()) { throw new IOException( "Could not create destination directory '" + destination.getAbsolutePath() + "'."); } copyDirectoryStructureIfModified(file, destination); } else { throw new IOException("Unknown file type: " + file.getAbsolutePath()); } } } /** * TO DO: Remove this interface when Maven moves to plexus-utils version 1.4 */ private interface FilterWrapper { Reader getReader(Reader fileReader, Properties filterProperties); } /** * Converts the filename of an artifact to artifactId-version.type format. * * @param artifact * @return converted filename of the artifact */ private String getDefaultFinalName(Artifact artifact) { return artifact.getArtifactId() + "-" + artifact.getVersion() + "." + artifact.getArtifactHandler().getExtension(); } protected void setAttributes(Section mainSection) throws MojoExecutionException, ManifestException { JavaClass javaClass = findPluginClass(); if(javaClass==null) throw new MojoExecutionException("Unable to find a plugin class. Did you put @plugin in javadoc?"); mainSection.addAttributeAndCheck(new Attribute("Plugin-Class", javaClass.getPackage()+"."+javaClass.getName())); mainSection.addAttributeAndCheck(new Attribute("Long-Name",pluginName)); String v = project.getVersion(); if(v.endsWith("-SNAPSHOT")) { String dt = new SimpleDateFormat("MM/dd/yyyy hh:mm").format(new Date()); v += " (private-"+dt+"-"+System.getProperty("user.name")+")"; } mainSection.addAttributeAndCheck(new Attribute("Plugin-Version",v)); String dep = findDependencyProjects(); if(dep.length()>0) mainSection.addAttributeAndCheck(new Attribute("Plugin-Dependencies",dep)); } /** * Find a class that has "@plugin" marker. */ private JavaClass findPluginClass() { JavaDocBuilder builder = new JavaDocBuilder(); for (Object o : project.getCompileSourceRoots()) builder.addSourceTree(new File((String) o)); // look for a class that extends Plugin for( JavaSource js : builder.getSources() ) { JavaClass jc = js.getClasses()[0]; if(jc.getTagByName("plugin")!=null) return jc; } return null; } /** * Finds and lists dependency plugins. */ private String findDependencyProjects() { StringBuilder buf = new StringBuilder(); for(Object o : project.getArtifacts()) { Artifact a = (Artifact)o; if(a.getType().equals("hpi")) { if(buf.length()>0) buf.append(' '); buf.append(a.getArtifactId()); buf.append(':'); buf.append(a.getVersion()); } } return buf.toString(); } }
src/main/java/org/jvnet/hudson/maven/plugins/hpi/AbstractHpiMojo.java
package org.jvnet.hudson.maven.plugins.hpi; /* * Copyright 2001-2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.DependencyResolutionRequiredException; import org.apache.maven.artifact.resolver.filter.ScopeArtifactFilter; import org.apache.maven.model.Resource; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.project.MavenProject; import org.apache.maven.archiver.MavenArchiver; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.UnArchiver; import org.codehaus.plexus.archiver.jar.Manifest; import org.codehaus.plexus.archiver.jar.ManifestException; import org.codehaus.plexus.archiver.jar.Manifest.Section; import org.codehaus.plexus.archiver.jar.Manifest.Attribute; import org.codehaus.plexus.archiver.manager.ArchiverManager; import org.codehaus.plexus.archiver.manager.NoSuchArchiverException; import org.codehaus.plexus.util.DirectoryScanner; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.IOUtil; import org.codehaus.plexus.util.InterpolationFilterReader; import org.codehaus.plexus.util.StringUtils; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.Writer; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Properties; import java.util.Set; import java.util.Date; import java.text.SimpleDateFormat; import com.thoughtworks.qdox.model.JavaClass; import com.thoughtworks.qdox.model.JavaSource; import com.thoughtworks.qdox.JavaDocBuilder; public abstract class AbstractHpiMojo extends AbstractMojo { /** * The directory for the generated WAR. * * @parameter expression="${project.build.directory}" * @required */ protected String outputDirectory; /** * The maven project. * * @parameter expression="${project}" * @required * @readonly */ protected MavenProject project; /** * The directory containing generated classes. * * @parameter expression="${project.build.outputDirectory}" * @required * @readonly */ private File classesDirectory; /** * Name of the plugin that Hudson uses for display purpose. * It should be one line text. * * @parameter expression="${project.name}" * @required * @readonly */ protected String pluginName; /** * The directory where the webapp is built. * * @parameter expression="${project.build.directory}/${project.build.finalName}" * @required */ private File webappDirectory; /** * Single directory for extra files to include in the WAR. * * @parameter expression="${basedir}/src/main/webapp" * @required */ protected File warSourceDirectory; /** * The list of webResources we want to transfer. * * @parameter */ private Resource[] webResources; /** * @parameter expression="${project.build.filters}" */ private List<String> filters; /** * The path to the context.xml file to use. * * @parameter expression="${maven.war.containerConfigXML}" */ private File containerConfigXML; /** * Directory to unpack dependent WARs into if needed * * @parameter expression="${project.build.directory}/war/work" * @required */ private File workDirectory; /** * To look up Archiver/UnArchiver implementations * * @parameter expression="${component.org.codehaus.plexus.archiver.manager.ArchiverManager}" * @required */ protected ArchiverManager archiverManager; private static final String WEB_INF = "WEB-INF"; private static final String META_INF = "META-INF"; private static final String[] DEFAULT_INCLUDES = {"**/**"}; /** * The comma separated list of tokens to include in the WAR. * Default is '**'. * * @parameter alias="includes" */ private String warSourceIncludes = "**"; /** * The comma separated list of tokens to exclude from the WAR. * * @parameter alias="excludes" */ private String warSourceExcludes; /** * The comma separated list of tokens to include when doing * a war overlay. * Default is '**' * * @parameter */ private String dependentWarIncludes = "**"; /** * The comma separated list of tokens to exclude when doing * a way overlay. * * @parameter */ private String dependentWarExcludes; private static final String[] EMPTY_STRING_ARRAY = {}; public File getClassesDirectory() { return classesDirectory; } public void setClassesDirectory(File classesDirectory) { this.classesDirectory = classesDirectory; } public File getWebappDirectory() { return webappDirectory; } public void setWebappDirectory(File webappDirectory) { this.webappDirectory = webappDirectory; } public void setWarSourceDirectory(File warSourceDirectory) { this.warSourceDirectory = warSourceDirectory; } public File getContainerConfigXML() { return containerConfigXML; } public void setContainerConfigXML(File containerConfigXML) { this.containerConfigXML = containerConfigXML; } /** * Returns a string array of the excludes to be used * when assembling/copying the war. * * @return an array of tokens to exclude */ protected String[] getExcludes() { List<String> excludeList = new ArrayList<String>(); if (StringUtils.isNotEmpty(warSourceExcludes)) { excludeList.addAll(Arrays.asList(StringUtils.split(warSourceExcludes, ","))); } // if contextXML is specified, omit the one in the source directory if (containerConfigXML != null && StringUtils.isNotEmpty(containerConfigXML.getName())) { excludeList.add("**/" + META_INF + "/" + containerConfigXML.getName()); } return excludeList.toArray(EMPTY_STRING_ARRAY); } /** * Returns a string array of the includes to be used * when assembling/copying the war. * * @return an array of tokens to include */ protected String[] getIncludes() { return StringUtils.split(StringUtils.defaultString(warSourceIncludes), ","); } /** * Returns a string array of the excludes to be used * when adding dependent wars as an overlay onto this war. * * @return an array of tokens to exclude */ protected String[] getDependentWarExcludes() { String[] excludes; if (StringUtils.isNotEmpty(dependentWarExcludes)) { excludes = StringUtils.split(dependentWarExcludes, ","); } else { excludes = EMPTY_STRING_ARRAY; } return excludes; } /** * Returns a string array of the includes to be used * when adding dependent wars as an overlay onto this war. * * @return an array of tokens to include */ protected String[] getDependentWarIncludes() { return StringUtils.split(StringUtils.defaultString(dependentWarIncludes), ","); } public void buildExplodedWebapp(File webappDirectory) throws MojoExecutionException { getLog().info("Exploding webapp..."); webappDirectory.mkdirs(); File webinfDir = new File(webappDirectory, WEB_INF); webinfDir.mkdirs(); File metainfDir = new File(webappDirectory, META_INF); metainfDir.mkdirs(); try { List<Resource> webResources = this.webResources != null ? Arrays.asList(this.webResources) : null; if (webResources != null && webResources.size() > 0) { Properties filterProperties = getBuildFilterProperties(); for (Resource resource : webResources) { copyResources(resource, webappDirectory, filterProperties); } } copyResources(warSourceDirectory, webappDirectory); if (containerConfigXML != null && StringUtils.isNotEmpty(containerConfigXML.getName())) { metainfDir = new File(webappDirectory, META_INF); String xmlFileName = containerConfigXML.getName(); copyFileIfModified(containerConfigXML, new File(metainfDir, xmlFileName)); } buildWebapp(project, webappDirectory); } catch (IOException e) { throw new MojoExecutionException("Could not explode webapp...", e); } } private Properties getBuildFilterProperties() throws MojoExecutionException { // System properties Properties filterProperties = new Properties(System.getProperties()); // Project properties filterProperties.putAll(project.getProperties()); for (String filter : filters) { try { Properties properties = PropertyUtils.loadPropertyFile(new File(filter), true, true); filterProperties.putAll(properties); } catch (IOException e) { throw new MojoExecutionException("Error loading property file '" + filter + "'", e); } } return filterProperties; } /** * Copies webapp webResources from the specified directory. * <p/> * Note that the <tt>webXml</tt> parameter could be null and may * specify a file which is not named <tt>web.xml<tt>. If the file * exists, it will be copied to the <tt>META-INF</tt> directory and * renamed accordingly. * * @param resource the resource to copy * @param webappDirectory the target directory * @param filterProperties * @throws java.io.IOException if an error occurred while copying webResources */ public void copyResources(Resource resource, File webappDirectory, Properties filterProperties) throws IOException { if (!resource.getDirectory().equals(webappDirectory.getPath())) { getLog().info("Copy webapp webResources to " + webappDirectory.getAbsolutePath()); if (webappDirectory.exists()) { String[] fileNames = getWarFiles(resource); for (String fileName : fileNames) { if (resource.isFiltering()) { copyFilteredFile(new File(resource.getDirectory(), fileName), new File(webappDirectory, fileName), null, getFilterWrappers(), filterProperties); } else { copyFileIfModified(new File(resource.getDirectory(), fileName), new File(webappDirectory, fileName)); } } } } } /** * Copies webapp webResources from the specified directory. * <p/> * Note that the <tt>webXml</tt> parameter could be null and may * specify a file which is not named <tt>web.xml<tt>. If the file * exists, it will be copied to the <tt>META-INF</tt> directory and * renamed accordingly. * * @param sourceDirectory the source directory * @param webappDirectory the target directory * @throws java.io.IOException if an error occurred while copying webResources */ public void copyResources(File sourceDirectory, File webappDirectory) throws IOException { if (!sourceDirectory.equals(webappDirectory)) { getLog().info("Copy webapp webResources to " + webappDirectory.getAbsolutePath()); if (warSourceDirectory.exists()) { String[] fileNames = getWarFiles(sourceDirectory); for (String fileName : fileNames) { copyFileIfModified(new File(sourceDirectory, fileName), new File(webappDirectory, fileName)); } } } } /** * Builds the webapp for the specified project. * <p/> * Classes, libraries and tld files are copied to * the <tt>webappDirectory</tt> during this phase. * * @param project the maven project * @param webappDirectory * @throws java.io.IOException if an error occurred while building the webapp */ public void buildWebapp(MavenProject project, File webappDirectory) throws MojoExecutionException, IOException { getLog().info("Assembling webapp " + project.getArtifactId() + " in " + webappDirectory); File libDirectory = new File(webappDirectory, WEB_INF + "/lib"); File tldDirectory = new File(webappDirectory, WEB_INF + "/tld"); File webappClassesDirectory = new File(webappDirectory, WEB_INF + "/classes"); if (classesDirectory.exists() && !classesDirectory.equals(webappClassesDirectory)) { copyDirectoryStructureIfModified(classesDirectory, webappClassesDirectory); } Set<Artifact> artifacts = project.getArtifacts(); List duplicates = findDuplicates(artifacts); List<File> dependentWarDirectories = new ArrayList<File>(); for (Artifact artifact : artifacts) { String targetFileName = getDefaultFinalName(artifact); getLog().debug("Processing: " + targetFileName); if (duplicates.contains(targetFileName)) { getLog().debug("Duplicate found: " + targetFileName); targetFileName = artifact.getGroupId() + "-" + targetFileName; getLog().debug("Renamed to: " + targetFileName); } // TODO: utilise appropriate methods from project builder ScopeArtifactFilter filter = new ScopeArtifactFilter(Artifact.SCOPE_RUNTIME); if (!artifact.isOptional() && filter.include(artifact)) { String type = artifact.getType(); if ("tld".equals(type)) { copyFileIfModified(artifact.getFile(), new File(tldDirectory, targetFileName)); } else { if ("jar".equals(type) || "ejb".equals(type) || "ejb-client".equals(type)) { copyFileIfModified(artifact.getFile(), new File(libDirectory, targetFileName)); } else { if ("par".equals(type)) { targetFileName = targetFileName.substring(0, targetFileName.lastIndexOf('.')) + ".jar"; getLog().debug( "Copying " + artifact.getFile() + " to " + new File(libDirectory, targetFileName)); copyFileIfModified(artifact.getFile(), new File(libDirectory, targetFileName)); } else { if ("war".equals(type)) { dependentWarDirectories.add(unpackWarToTempDirectory(artifact)); } else { getLog().debug("Skipping artifact of type " + type + " for WEB-INF/lib"); } } } } } } if (dependentWarDirectories.size() > 0) { getLog().info("Overlaying " + dependentWarDirectories.size() + " war(s)."); // overlay dependent wars for (Iterator iter = dependentWarDirectories.iterator(); iter.hasNext();) { copyDependentWarContents((File) iter.next(), webappDirectory); } } } /** * Searches a set of artifacts for duplicate filenames and returns a list of duplicates. * * @param artifacts set of artifacts * @return List of duplicated artifacts */ private List<String> findDuplicates(Set<Artifact> artifacts) { List<String> duplicates = new ArrayList<String>(); List<String> identifiers = new ArrayList<String>(); for (Artifact artifact : artifacts) { String candidate = getDefaultFinalName(artifact); if (identifiers.contains(candidate)) { duplicates.add(candidate); } else { identifiers.add(candidate); } } return duplicates; } /** * Unpacks war artifacts into a temporary directory inside <tt>workDirectory</tt> * named with the name of the war. * * @param artifact War artifact to unpack. * @return Directory containing the unpacked war. * @throws MojoExecutionException */ private File unpackWarToTempDirectory(Artifact artifact) throws MojoExecutionException { String name = artifact.getFile().getName(); File tempLocation = new File(workDirectory, name.substring(0, name.length() - 4)); boolean process = false; if (!tempLocation.exists()) { tempLocation.mkdirs(); process = true; } else if (artifact.getFile().lastModified() > tempLocation.lastModified()) { process = true; } if (process) { File file = artifact.getFile(); try { unpack(file, tempLocation); } catch (NoSuchArchiverException e) { this.getLog().info("Skip unpacking dependency file with unknown extension: " + file.getPath()); } } return tempLocation; } /** * Unpacks the archive file. * * @param file File to be unpacked. * @param location Location where to put the unpacked files. */ private void unpack(File file, File location) throws MojoExecutionException, NoSuchArchiverException { String archiveExt = FileUtils.getExtension(file.getAbsolutePath()).toLowerCase(); try { UnArchiver unArchiver = archiverManager.getUnArchiver(archiveExt); unArchiver.setSourceFile(file); unArchiver.setDestDirectory(location); unArchiver.extract(); } catch (IOException e) { throw new MojoExecutionException("Error unpacking file: " + file + "to: " + location, e); } catch (ArchiverException e) { throw new MojoExecutionException("Error unpacking file: " + file + "to: " + location, e); } } /** * Recursively copies contents of <tt>srcDir</tt> into <tt>targetDir</tt>. * This will not overwrite any existing files. * * @param srcDir Directory containing unpacked dependent war contents * @param targetDir Directory to overlay srcDir into */ private void copyDependentWarContents(File srcDir, File targetDir) throws MojoExecutionException { DirectoryScanner scanner = new DirectoryScanner(); scanner.setBasedir(srcDir); scanner.setExcludes(getDependentWarExcludes()); scanner.addDefaultExcludes(); scanner.setIncludes(getDependentWarIncludes()); scanner.scan(); for (String dir : scanner.getIncludedDirectories()) { new File(targetDir, dir).mkdirs(); } for (String file : scanner.getIncludedFiles()) { File targetFile = new File(targetDir, file); // Do not overwrite existing files. if (!targetFile.exists()) { try { targetFile.getParentFile().mkdirs(); copyFileIfModified(new File(srcDir, file), targetFile); } catch (IOException e) { throw new MojoExecutionException("Error copying file '" + file + "' to '" + targetFile + "'", e); } } } } /** * Returns a list of filenames that should be copied * over to the destination directory. * * @param sourceDir the directory to be scanned * @return the array of filenames, relative to the sourceDir */ private String[] getWarFiles(File sourceDir) { DirectoryScanner scanner = new DirectoryScanner(); scanner.setBasedir(sourceDir); scanner.setExcludes(getExcludes()); scanner.addDefaultExcludes(); scanner.setIncludes(getIncludes()); scanner.scan(); return scanner.getIncludedFiles(); } /** * Returns a list of filenames that should be copied * over to the destination directory. * * @param resource the resource to be scanned * @return the array of filenames, relative to the sourceDir */ private String[] getWarFiles(Resource resource) { DirectoryScanner scanner = new DirectoryScanner(); scanner.setBasedir(resource.getDirectory()); if (resource.getIncludes() != null && !resource.getIncludes().isEmpty()) { scanner.setIncludes((String[]) resource.getIncludes().toArray(EMPTY_STRING_ARRAY)); } else { scanner.setIncludes(DEFAULT_INCLUDES); } if (resource.getExcludes() != null && !resource.getExcludes().isEmpty()) { scanner.setExcludes((String[]) resource.getExcludes().toArray(EMPTY_STRING_ARRAY)); } scanner.addDefaultExcludes(); scanner.scan(); return scanner.getIncludedFiles(); } /** * Copy file from source to destination only if source is newer than the target file. * If <code>destinationDirectory</code> does not exist, it * (and any parent directories) will be created. If a file <code>source</code> in * <code>destinationDirectory</code> exists, it will be overwritten. * * @param source An existing <code>File</code> to copy. * @param destinationDirectory A directory to copy <code>source</code> into. * @throws java.io.FileNotFoundException if <code>source</code> isn't a normal file. * @throws IllegalArgumentException if <code>destinationDirectory</code> isn't a directory. * @throws java.io.IOException if <code>source</code> does not exist, the file in * <code>destinationDirectory</code> cannot be written to, or an IO error occurs during copying. * <p/> * TO DO: Remove this method when Maven moves to plexus-utils version 1.4 */ private static void copyFileToDirectoryIfModified(File source, File destinationDirectory) throws IOException { // TO DO: Remove this method and use the method in WarFileUtils when Maven 2 changes // to plexus-utils 1.2. if (destinationDirectory.exists() && !destinationDirectory.isDirectory()) { throw new IllegalArgumentException("Destination is not a directory"); } copyFileIfModified(source, new File(destinationDirectory, source.getName())); } private FilterWrapper[] getFilterWrappers() { return new FilterWrapper[]{ // support ${token} new FilterWrapper() { public Reader getReader(Reader fileReader, Properties filterProperties) { return new InterpolationFilterReader(fileReader, filterProperties, "${", "}"); } }, // support @token@ new FilterWrapper() { public Reader getReader(Reader fileReader, Properties filterProperties) { return new InterpolationFilterReader(fileReader, filterProperties, "@", "@"); } }}; } /** * @param from * @param to * @param encoding * @param wrappers * @param filterProperties * @throws IOException TO DO: Remove this method when Maven moves to plexus-utils version 1.4 */ private static void copyFilteredFile(File from, File to, String encoding, FilterWrapper[] wrappers, Properties filterProperties) throws IOException { // buffer so it isn't reading a byte at a time! Reader fileReader = null; Writer fileWriter = null; try { // fix for MWAR-36, ensures that the parent dir are created first to.getParentFile().mkdirs(); if (encoding == null || encoding.length() < 1) { fileReader = new BufferedReader(new FileReader(from)); fileWriter = new FileWriter(to); } else { FileInputStream instream = new FileInputStream(from); FileOutputStream outstream = new FileOutputStream(to); fileReader = new BufferedReader(new InputStreamReader(instream, encoding)); fileWriter = new OutputStreamWriter(outstream, encoding); } Reader reader = fileReader; for (FilterWrapper wrapper : wrappers) { reader = wrapper.getReader(reader, filterProperties); } IOUtil.copy(reader, fileWriter); } finally { IOUtil.close(fileReader); IOUtil.close(fileWriter); } } /** * Copy file from source to destination only if source timestamp is later than the destination timestamp. * The directories up to <code>destination</code> will be created if they don't already exist. * <code>destination</code> will be overwritten if it already exists. * * @param source An existing non-directory <code>File</code> to copy bytes from. * @param destination A non-directory <code>File</code> to write bytes to (possibly * overwriting). * @throws IOException if <code>source</code> does not exist, <code>destination</code> cannot be * written to, or an IO error occurs during copying. * @throws java.io.FileNotFoundException if <code>destination</code> is a directory * <p/> * TO DO: Remove this method when Maven moves to plexus-utils version 1.4 */ private static void copyFileIfModified(File source, File destination) throws IOException { // TO DO: Remove this method and use the method in WarFileUtils when Maven 2 changes // to plexus-utils 1.2. if (destination.lastModified() < source.lastModified()) { FileUtils.copyFile(source, destination); } } /** * Copies a entire directory structure but only source files with timestamp later than the destinations'. * <p/> * Note: * <ul> * <li>It will include empty directories. * <li>The <code>sourceDirectory</code> must exists. * </ul> * * @param sourceDirectory * @param destinationDirectory * @throws IOException TO DO: Remove this method when Maven moves to plexus-utils version 1.4 */ private static void copyDirectoryStructureIfModified(File sourceDirectory, File destinationDirectory) throws IOException { if (!sourceDirectory.exists()) { throw new IOException("Source directory doesn't exists (" + sourceDirectory.getAbsolutePath() + ")."); } String sourcePath = sourceDirectory.getAbsolutePath(); for (File file : sourceDirectory.listFiles()) { String dest = file.getAbsolutePath(); dest = dest.substring(sourcePath.length() + 1); File destination = new File(destinationDirectory, dest); if (file.isFile()) { destination = destination.getParentFile(); copyFileToDirectoryIfModified(file, destination); } else if (file.isDirectory()) { if (!destination.exists() && !destination.mkdirs()) { throw new IOException( "Could not create destination directory '" + destination.getAbsolutePath() + "'."); } copyDirectoryStructureIfModified(file, destination); } else { throw new IOException("Unknown file type: " + file.getAbsolutePath()); } } } /** * TO DO: Remove this interface when Maven moves to plexus-utils version 1.4 */ private interface FilterWrapper { Reader getReader(Reader fileReader, Properties filterProperties); } /** * Converts the filename of an artifact to artifactId-version.type format. * * @param artifact * @return converted filename of the artifact */ private String getDefaultFinalName(Artifact artifact) { return artifact.getArtifactId() + "-" + artifact.getVersion() + "." + artifact.getArtifactHandler().getExtension(); } protected void setAttributes(Section mainSection) throws MojoExecutionException, ManifestException { JavaClass javaClass = findPluginClass(); if(javaClass==null) throw new MojoExecutionException("Unable to find a plugin class. Did you put @plugin in javadoc?"); mainSection.addAttributeAndCheck(new Attribute("Plugin-Class", javaClass.getPackage()+"."+javaClass.getName())); mainSection.addAttributeAndCheck(new Attribute("Long-Name",pluginName)); String v = project.getVersion(); if(v.endsWith("-SNAPSHOT")) { String dt = new SimpleDateFormat("MM/dd/yyyy hh:mm").format(new Date()); v += " (private-"+dt+"-"+System.getProperty("user.name")+")"; } mainSection.addAttributeAndCheck(new Attribute("Plugin-Version",v)); } /** * Find a class that has "@plugin" marker. */ private JavaClass findPluginClass() { JavaDocBuilder builder = new JavaDocBuilder(); for (Object o : project.getCompileSourceRoots()) builder.addSourceTree(new File((String) o)); // look for a class that extends Plugin for( JavaSource js : builder.getSources() ) { JavaClass jc = js.getClasses()[0]; if(jc.getTagByName("plugin")!=null) return jc; } return null; } }
added "Plugin-Dependencies" entry
src/main/java/org/jvnet/hudson/maven/plugins/hpi/AbstractHpiMojo.java
added "Plugin-Dependencies" entry
Java
mit
3c4e8372f91de82b12da8e4357105eeb50ff210f
0
jajja/jorm
/* * Copyright (C) 2013 Jajja Communications AB * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.jajja.jorm; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.jajja.jorm.Composite.Value; import com.jajja.jorm.generator.Generator; /** * <p> * Records provide the main interface for viewing and modifying data stored in * database tables. For generic SQL queries, examine {@link Transaction} * instead. For SQL syntax, examine {@link Query} * </p> * <p> * Records are not thread-safe! In fact, shared records will use thread-local * transactions with possibly unpredictable results for seemingly synchronized * execution. * </p> * <h3>Object relational mapping</h3> * <p> * Record template implementations can be automated according to JDBC types, * using the {@link Generator#string(String)} and * {@link Generator#string(String, String)} methods for targeted tables. * </p> * <p> * <strong>From SQL:</strong> * * <pre> * CREATE TABLE phrases ( * id serial NOT NULL, * phrase varchar NOT NULL, * locale_id integer NOT NULL, * PRIMARY KEY (id), * UNIQUE (phrase, locale_id), * FOREIGN KEY (locale_id) REFERENCES locales (id) ON DELETE CASCADE * ) * </pre> * * </p> * <p> * <strong>To Java:</strong> * * <pre> * &#064;Table(database = &quot;default&quot;, table = &quot;phrases&quot;, id = &quot;id&quot;) * public class Phrase extends Record { * * public Integer getId() { * return get(&quot;id&quot;, Integer.class); * } * * public void setId(Integer id) { * set(&quot;id&quot;, id); * } * * public String getPhrase() { * return get(&quot;phrase&quot;, String.class); * } * * public void setPhrase(String phrase) { * set(&quot;phrase&quot;, phrase); * } * * public Integer getLocaleId() { * return get(&quot;locale_id&quot;, Integer.class); * } * * public void setLocaleId(Integer id) { * set(&quot;locale_id&quot;, id); * } * * public Locale getLocale() { * return get(&quot;locale_id&quot;, Locale.class); * } * * public void setLocale(Locale Locale) { * set(&quot;locale_id&quot;, locale); * } * * } * </pre> * * </p> * <p> * Note that related records are cached by the method * {@link Record#get(String, Class)}. Cache invalidation upon change of foreign * keys is maintained in records. Further control can be achieved by overriding * {@link Record#notifyFieldChanged(Symbol, Object)}. * </p> * * @see Jorm * @see Query * @author Andreas Allerdahl <[email protected]> * @author Martin Korinth <[email protected]> * @author Daniel Adolfsson <[email protected]> * @since 1.0.0 */ public abstract class Record { Map<Symbol, Field> fields = new HashMap<Symbol, Field>(); private Table table; private boolean isStale = false; private boolean isReadOnly = false; private static Map<Class<? extends Record>, Log> logs = new ConcurrentHashMap<Class<? extends Record>, Log>(16, 0.75f, 1); public static enum ResultMode { /** For both INSERTs and UPDATEs, fully repopulate record(s). This is the default. */ REPOPULATE, /** For INSERTs, fetch only generated keys, mark record(s) as stale. For UPDATEs, this is equivalent to NO_RESULT. */ ID_ONLY, /** Fetch nothing, mark record as stale and assume the primary key value is accurate. */ NO_RESULT; } public static class Field { private Object value = null; private boolean isChanged = false; private Record reference = null; private Field() {} void setValue(Object value) { this.value = value; } public Object getValue() { return value; } void setChanged(boolean isChanged) { this.isChanged = isChanged; } boolean isChanged() { return isChanged; } void setReference(Record reference) { this.reference = reference; } public Record getReference() { return reference; } } private Field getOrCreateField(Symbol symbol) { Field field = fields.get(symbol); if (field == null) { field = new Field(); fields.put(symbol, field); } return field; } /** * Provides a cached log for the specified class. Stores a new log in the * cache if no preceding call with the given class has been made. * * @param clazz * the class defining log instance. * @return the class specific cached log. */ public static Log log(Class<? extends Record> clazz) { Log log = logs.get(clazz); if (log == null) { synchronized (logs) { log = logs.get(clazz); if (log == null) { log = LogFactory.getLog(clazz); logs.put(clazz, log); } } } return log; } /** * Provides the cached log for the instance class according to {@link Record#log(Class)}. */ public Log log() { return log(getClass()); } /** * Constructs a mapped record. Depends on {@link Jorm} annotation for table * mapping. */ public Record() { table = Table.get(getClass()); } /** * Constructs a mapped record. Mainly intended for anonymous record * instantiation such as the results from the transaction select methods * {@link Transaction#select(Query)}, * {@link Transaction#select(String, Object...)}, * {@link Transaction#selectAll(Query)} and * {@link Transaction#selectAll(String, Object...)}. * * @param table * the table mapping. */ public Record(Table table) { this.table = table; } /** * Instantiates a record class of the specified type. */ public static <T extends Record> T construct(Class<T> clazz) { try { return clazz.newInstance(); } catch (Exception e) { throw new RuntimeException("Failed to instantiate " + clazz, e); } } /** * Notifies field changes. The default implementation is empty, but provides * the option to override and act upon changes. This method is called * whenever {@link #set(String, Object)} or {@link #set(Symbol, Object)} * changes a field, or {@link #populate(ResultSet)} is called. * * @param symbol * the symbol of the column. * @param object * the value of the field after change. */ protected void notifyFieldChanged(Symbol symbol, Object object) { } public Value id() { return get(primaryKey()); } public Composite primaryKey() { return table.getPrimaryKey(); } public static Composite primaryKey(Class<? extends Record> clazz) { return Table.get(clazz).getPrimaryKey(); } public Value get(Composite composite) { return composite.valueFrom(this); } /** * Provides the table mapping for the record. * * @return the table mapping. */ public Table table() { return table; } /** * Provides an immutable view of the fields of the record. * * @return the fields. */ public Map<Symbol, Field> fields() { return Collections.unmodifiableMap(fields); } /** * <p> * Opens a thread local transaction to the database mapped by the record * class. If an open transaction already exists for the record class, it is * reused. This method is idempotent when called from the same thread. * </p> * <p> * This is corresponds to a call to {@link Database#open(String)} for the * database named by the class mapping of the record. Requires the given * class to be mapped by {@link Jorm}. * </p> * * @param clazz * the mapped record class. * @return the open transaction. */ public static Transaction open(Class<? extends Record> clazz) { return Database.open(Table.get(clazz).getDatabase()); } /** * <p> * Commits the thread local transaction to the named database mapped by the * record class, if it has been opened. * </p> * <p> * This is corresponds to a call to {@link Database#commit(String)} for the * database named by the class mapping of the record. Requires the given * class to be mapped by {@link Jorm}. * </p> * * @param clazz * the mapped record class. * @return the committed transaction or null for no active transaction. */ public static Transaction commit(Class<? extends Record> clazz) throws SQLException { return Database.commit(Table.get(clazz).getDatabase()); } /** * <p> * Closes the thread local transaction to the named database mapped by the * record class, if it has been opened. This method is idempotent when * called from the same thread. * </p> * <p> * This is corresponds to a call to {@link Database#close(String)} for the * database named by the class mapping of the record. Requires the given * class to be mapped by {@link Jorm}. * </p> * * @param clazz * the mapped record class. * @return the closed transaction or null for no active transaction. */ public static Transaction close(Class<? extends Record> clazz) { return Database.close(Table.get(clazz).getDatabase()); } /** * <p> * Opens a thread local transaction to the named database mapped by the * record. If an open transaction already exists for the record, it is * reused. This method is idempotent when called from the same thread. * </p> * <p> * This is corresponds to a call to {@link Database#open(String)} for the * database named by the table mapping of the record. * </p> * * @return the open transaction. */ public Transaction open() { return Database.open(table.getDatabase()); } /** * <p> * Commits the thread local transaction to the named database mapped by the * record, if it has been opened. * </p> * <p> * This is corresponds to a call to {@link Database#commit(String)} for the * database named by the table mapping of the record. * </p> * <p> * <strong>Note:</strong> This may cause changes of other records to be * persisted to the mapped database of the record, since all records mapped * to the same named database share transaction in the context of the * current thread. * </p> * * @throws SQLException * if a database access error occurs. * @return the committed transaction or null for no active transaction. */ public Transaction commit() throws SQLException { return Database.commit(table.getDatabase()); } /** * <p> * Closes the thread local transaction to the named database mapped by the * record, if it has been opened. This method is idempotent when called from * the same thread. * </p> * <p> * This is corresponds to a call to {@link Database#close(String)} for the * database named by the table mapping of the record. * </p> * <p> * <strong>Note:</strong> This may cause changes of other records to be * discarded in the mapped database of the record, since all records mapped * to the same named database share transaction in the context of the * current thread. * </p> * * @return the closed transaction or null for no active transaction. */ public Transaction close() { return Database.close(table.getDatabase()); } /** * Populates the record with the first result for which the given column name * matches the given value. * * @param symbol * the column symbol. * @param value * the value to match. * @return true if the record could be updated with a matching row from the * table. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public boolean populateByComposite(Composite composite, Value value) throws SQLException { return selectInto(getSelectQuery(getClass(), composite, value)); } /** * Populates the record with the result for which the id column matches the * given value. * * @param id * the id value to match. * @return true if the record could be updated with a matching row from the * table. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public boolean populateById(Value id) throws SQLException { return populateByComposite(primaryKey(), id); } private static <T extends Record> Query getSelectQuery(Class<T> clazz) { return Table.get(clazz).getSelectQuery(open(clazz).getDialect()); } private static <T extends Record> Query getSelectQuery(Class<T> clazz, Composite composite, Object value) { Value v; if (value instanceof Value) { v = (Value)value; } else { v = primaryKey(clazz).value(value); } composite.assertCompatible(v); Dialect dialect = open(clazz).getDialect(); Query query = Table.get(clazz).getSelectQuery(dialect); query.append("WHERE "); query.append(dialect.toSqlExpression(composite, v)); return query; } /** * Builds a generic SQL query for the record. * * @param sql * the SQL statement to represent the query. * @return the built query. */ public Query build(String sql) { return new Query(open().getDialect(), sql); } /** * Builds a generic SQL query for the record and quotes identifiers from the * given parameters according to the SQL dialect of the mapped database of * the record. * * @param sql * the Jorm SQL statement to represent the query. * @param params * the parameters applying to the SQL hash markup. * @return the built query. */ public Query build(String sql, Object... params) { return new Query(open().getDialect(), sql, params); } /** * Builds a generic SQL query for a given record class. * * @param clazz * the mapped record class. * @param sql * the SQL statement to represent the query. * @return the built query. */ public static Query build(Class<? extends Record> clazz, String sql) { return new Query(open(clazz).getDialect(), sql); } /** * Builds a generic SQL query for a given record class and quotes * identifiers from the given parameters according to the SQL dialect of the * mapped database of the record class. * * @param clazz * the mapped record class. * @param sql * the Jorm SQL statement to represent the query. * @param params * the parameters applying to the SQL hash markup. * @return the built query. * @return the built query. */ public static Query build(Class<? extends Record> clazz, String sql, Object... params) { return new Query(open(clazz).getDialect(), sql, params); } /** * Provides a selected record from the mapped database table, populated with * the first result for which the primary key matches. * * @param clazz * the class defining the table mapping. * @param composite * the composite key * @param value * the composite key value * @return the matched record or null for no match. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> T find(Class<T> clazz, Composite composite, Object value) throws SQLException { return select(clazz, getSelectQuery(clazz, composite, value)); } /** * Provides a complete list of selected records from the mapped database * table, populated with the results for which the composite key matches. * * @param clazz * the class defining the table mapping. * @param composite * the composite key * @param value * the composite key value * @return the matched records. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> List<T> findAll(Class<T> clazz, Composite composite, Value value) throws SQLException { return selectAll(clazz, getSelectQuery(clazz, composite, value)); } public static <T extends Record> List<T> findAll(Class<T> clazz) throws SQLException { return selectAll(clazz, getSelectQuery(clazz)); } /** * Provides a complete list of selected reference records of a given class * referring to the mapped record through a given foreign key column. * * @param clazz * the class of the records referring to the mapped record. * @param column * the column defining the foreign key for the reference records. * @return the matched references. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public <T extends Record> List<T> findReferences(Class<T> clazz, String column) throws SQLException { return findReferences(clazz, Symbol.get(column)); } /** * Provides a complete list of selected reference records of a given class * referring to the mapped record through a given foreign key column. * * @param clazz * the class of the records referring to the mapped record. * @param symbol * the symbol of the column defining the foreign key for the * reference records. * @return the matched references. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public <T extends Record> List<T> findReferences(Class<T> clazz, Symbol symbol) throws SQLException { Table table = Table.get(clazz); return selectAll(clazz, "SELECT * FROM #1# WHERE #:2# = #3#", table, symbol, get(symbol)); } /** * Provides a selected record, populated with the result for which the primary key * column matches the given id value. * * @param clazz * the class defining the table mapping. * @param id * the primary key value (can be either a {@link Composite.Value} or a single column value). * @return the matched record or null for no match. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> T findById(Class<T> clazz, Object id) throws SQLException { return find(clazz, primaryKey(clazz), id); } /** * Provides a selected record, populated with the first result from the * query given by a plain SQL statement and applicable parameters. * * @param clazz * the class defining the table mapping. * @param sql * the plain SQL statement. * @return the matched record or null for no match. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> T select(Class<T> clazz, String sql) throws SQLException { return select(clazz, new Query(open(clazz).getDialect(), sql)); } /** * Provides a selected record, populated with the first result from the * query given by a Jorm SQL statement and applicable parameters. * * @param clazz * the class defining the table mapping. * @param sql * the Jorm SQL statement. * @param params * the applicable parameters. * @return the matched record or null for no match. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> T select(Class<T> clazz, String sql, Object... params) throws SQLException { return select(clazz, new Query(open(clazz).getDialect(), sql, params)); } /** * Provides a selected record, populated with the first result from the * given query. * * @param clazz * the class defining the table mapping. * @param query * the query. * @return the matched record or null for no match. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> T select(Class<T> clazz, Query query) throws SQLException { T record = construct(clazz); if (record.selectInto(query)) { return record; } return null; } /** * Provides a list of selected records, populated with the results from the * query given by a plain SQL statement. * * @param clazz * the class defining the table mapping. * @param sql * the plain SQL statement. * @return the matched records * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> List<T> selectAll(Class<T> clazz, String sql) throws SQLException { return selectAll(clazz, new Query(open(clazz).getDialect(), sql)); } /** * Provides a list of selected records, populated with the results from the * query given by a Jorm SQL statement and applicable parameters. * * @param clazz * the class defining the table mapping. * @param sql * the Jorm SQL statement. * @param params * the applicable parameters. * @return the matched records * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> List<T> selectAll(Class<T> clazz, String sql, Object... params) throws SQLException { return selectAll(clazz, new Query(open(clazz).getDialect(), sql, params)); } /** * Provides a list of selected records, populated with the results from the * given query. * * @param clazz * the class defining the table mapping. * @param query * the query. * @return the matched records. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> List<T> selectAll(Class<T> clazz, Query query) throws SQLException { PreparedStatement preparedStatement = open(clazz).prepare(query.getSql(), query.getParams()); ResultSet resultSet = null; LinkedList<T> records = new LinkedList<T>(); try { resultSet = preparedStatement.executeQuery(); SymbolMap symbolMap = new SymbolMap(resultSet.getMetaData()); while (resultSet.next()) { T record = construct(clazz); symbolMap.populate(record, resultSet); records.add(record); } } catch (SQLException sqlException) { open(clazz).getDialect().rethrow(sqlException, query.getSql()); } finally { try { if (resultSet != null) { resultSet.close(); } } finally { preparedStatement.close(); } } return records; } /** * Provides a hash map of selected records, populated with the results from the * given query. * * @param clazz * the class defining the table mapping. * @param column * the column to use as key. * @param query * the query. * @return the matched records. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> Map<Composite.Value, T> selectAsMap(Class<T> clazz, Composite compositeKey, boolean allowDuplicates, Query query) throws SQLException { PreparedStatement preparedStatement = open(clazz).prepare(query.getSql(), query.getParams()); ResultSet resultSet = null; HashMap<Composite.Value, T> records = new HashMap<Composite.Value, T>(); try { resultSet = preparedStatement.executeQuery(); while (resultSet.next()) { T record = construct(clazz); record.populate(resultSet); Value value = compositeKey.valueFrom(record); if (records.put(value, record) != null && !allowDuplicates) { throw new IllegalStateException("Duplicate key " + value); } } } catch (SQLException sqlException) { open(clazz).getDialect().rethrow(sqlException, query.getSql()); } finally { if (resultSet != null) resultSet.close(); preparedStatement.close(); } return records; } public static <T extends Record> Map<Composite.Value, T> selectAsMap(Class<T> clazz, Composite compositeKey, boolean allowDuplicates, String sql, Object... params) throws SQLException { return selectAsMap(clazz, compositeKey, allowDuplicates, new Query(open(clazz).getDialect(), sql, params)); } /** * Provides a hash map of selected records, populated with the results from the * given query. * * @param clazz * the class defining the table mapping. * @param column * the column to use as key. * @param query * the query. * @return the matched records. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> Map<Composite.Value, List<T>> selectAllAsMap(Class<T> clazz, Composite compositeKey, Query query) throws SQLException { PreparedStatement preparedStatement = open(clazz).prepare(query.getSql(), query.getParams()); ResultSet resultSet = null; HashMap<Composite.Value, List<T>> records = new HashMap<Composite.Value, List<T>>(); try { resultSet = preparedStatement.executeQuery(); while (resultSet.next()) { T record = construct(clazz); record.populate(resultSet); Value value = compositeKey.valueFrom(record); List<T> list = records.get(value); if (list == null) { list = new LinkedList<T>(); records.put(value, list); } list.add(record); } } catch (SQLException sqlException) { open(clazz).getDialect().rethrow(sqlException, query.getSql()); } finally { if (resultSet != null) resultSet.close(); preparedStatement.close(); } return records; } public static <T extends Record> Map<Composite.Value, List<T>> selectAllAsMap(Class<T> clazz, Composite compositeKey, String sql, Object... params) throws SQLException { return selectAllAsMap(clazz, compositeKey, new Query(open(clazz).getDialect(), sql, params)); } /** * Executes the query given by a plain SQL statement and applicable * parameters and populates the record with the first row of the result. Any * values in the record object are cleared if the record was previously * populated. * * @param sql * the plain SQL statement. * @return true if the record was populated, otherwise false. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public boolean selectInto(String sql) throws SQLException { return selectInto(new Query(open().getDialect(), sql)); } /** * Executes the query given by a Jorm SQL statement and applicable * parameters and populates the record with the first row of the result. Any * values in the record object are cleared if the record was previously * populated. * * @param sql * the Jorm SQL statement. * @param params * the applicable parameters. * @return true if the record was populated, otherwise false. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public boolean selectInto(String sql, Object... params) throws SQLException { return selectInto(new Query(open().getDialect(), sql, params)); } /** * Executes the given query and populates the record with the first row of * the result. Any values in the record object are cleared if the record was * previously populated. * * @param query * the query. * @return true if the record was populated, otherwise false. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public boolean selectInto(Query query) throws SQLException { PreparedStatement preparedStatement = open().prepare(query.getSql(), query.getParams()); ResultSet resultSet = null; try { resultSet = preparedStatement.executeQuery(); if (resultSet.next()) { populate(resultSet); return true; } } catch (SQLException sqlException) { open().getDialect().rethrow(sqlException, query.getSql()); } finally { try { if (resultSet != null) { resultSet.close(); } } finally { preparedStatement.close(); } } return false; } /** * Populates all records in the given collection of records with a single * prefetched reference of the given record class. Existing cached * references are not overwritten. * * @param records * the records to populate with prefetched references. * @param foreignKeySymbol * the symbol defining the foreign key to the referenced records. * @param clazz * the class of the referenced records. * @param referredSymbol * the symbol defining the referred column of the referenced * records. * @return the prefetched records. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> List<T> prefetch(Collection<? extends Record> records, Symbol foreignKeySymbol, Class<T> clazz, Symbol referredSymbol) throws SQLException { Set<Object> values = new HashSet<Object>(); for (Record record : records) { Field field = record.fields.get(foreignKeySymbol); if (field != null && field.getValue() != null && field.getReference() == null) { values.add(field.getValue()); } } if (values.isEmpty()) { return new LinkedList<T>(); } List<T> referenceRecords = selectAll(clazz, "SELECT * FROM #1# WHERE #2# IN (#3#)", Table.get(clazz), referredSymbol, values); Map<Object, Record> map = new HashMap<Object, Record>(); for (Record referenceRecord : referenceRecords) { map.put(referenceRecord.get(referredSymbol), referenceRecord); } for (Record record : records) { Field field = record.fields.get(foreignKeySymbol); if (field != null && field.getValue() != null && field.getReference() == null) { Record referenceRecord = map.get(field.getValue()); if (referenceRecord == null) { throw new IllegalStateException(field.getValue() + " not present in " + Table.get(clazz).getTable() + "." + referredSymbol.getName()); } record.set(foreignKeySymbol, referenceRecord); } } return referenceRecords; } /** * Populates all records in the given collection of records with a single * prefetched reference of the given record class. Existing cached * references are not overwritten. * * @param records * the records to populate with prefetched references. * @param foreignKeySymbol * the column name defining the foreign key to the referenced records. * @param clazz * the class of the referenced records. * @param referredSymbol * the column name defining the referred column of the referenced * records. * @return the prefetched records. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> List<T> prefetch(Collection<? extends Record> records, String foreignKeySymbol, Class<T> clazz, String referredSymbol) throws SQLException { return prefetch(records, Symbol.get(foreignKeySymbol), clazz, Symbol.get(referredSymbol)); } /** * Populates the record with the first row of the result. Any values in the * record object are cleared if the record was previously populated. * * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public void populate(ResultSet resultSet) throws SQLException { SymbolMap symbolMap = new SymbolMap(resultSet.getMetaData()); symbolMap.populate(this, resultSet); } public static class SymbolMap { private Symbol[] symbols; private Set<Symbol> symbolSet = new HashSet<Symbol>(); public SymbolMap(ResultSetMetaData resultSetMetaData) throws SQLException { symbols = new Symbol[resultSetMetaData.getColumnCount()]; symbolSet = new HashSet<Symbol>(symbols.length + 1, 1.0f); // + 1 to prevent resize for (int i = 0; i < symbols.length; i++) { symbols[i] = Symbol.get(resultSetMetaData.getColumnLabel(i + 1)); symbolSet.add(symbols[i]); } } public void populate(Record record, ResultSet resultSet) throws SQLException { for (int i = 0; i < symbols.length; i++) { record.isStale = false; try { record.put(symbols[i], resultSet.getObject(i + 1)); } catch (SQLException sqlException) { record.open().getDialect().rethrow(sqlException); } finally { record.isStale = true; // lol exception } record.isStale = false; } Iterator<Symbol> i = record.fields.keySet().iterator(); while (i.hasNext()) { Symbol symbol = i.next(); if (!contains(symbol)) { record.unset(symbol); } } record.purify(); } public boolean contains(Symbol symbol) { return symbolSet.contains(symbol); } } private boolean isPrimaryKeyNullOrChanged() { for (Symbol symbol : primaryKey().getSymbols()) { Field field = fields.get(symbol); if (field == null || field.getValue() == null || field.isChanged()) { return true; } } return false; } private boolean isPrimaryKeyNull() { for (Symbol symbol : primaryKey().getSymbols()) { Field field = fields.get(symbol); if (field == null || field.getValue() == null) { return true; } } return false; } private void assertPrimaryKeyNotNull() { if (isPrimaryKeyNull()) { throw new IllegalStateException("Primary key contains NULL value(s)"); } } /** * Save the record. This is done by a call to {@link #insert()} if the id * field is null, unset or changed, otherwise by a call to {@link #update()}. * * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public void save(ResultMode mode) throws SQLException { checkReadOnly(); if (isPrimaryKeyNullOrChanged()) { insert(mode); } else { update(mode); } } public void save() throws SQLException { save(ResultMode.REPOPULATE); } /** * Batch saves the records. This is done by a call to {@link #insert()} if the id * field is null, unset or changed, otherwise by a call to {@link #update()}. * * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static void save(Collection<? extends Record> records, int batchSize, ResultMode mode) throws SQLException { List<Record> insertRecords = new LinkedList<Record>(); List<Record> updateRecords = new LinkedList<Record>(); for (Record record : records) { if (record.isPrimaryKeyNullOrChanged()) { insertRecords.add(record); } else { updateRecords.add(record); } } insert(insertRecords, batchSize, mode); update(updateRecords, batchSize, mode); } /** * Batch saves the records. This is done by a call to {@link #insert()} if the id * field is null, unset or changed, otherwise by a call to {@link #update()}. * * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static void save(Collection<? extends Record> records) throws SQLException { save(records, 0, ResultMode.REPOPULATE); } /** * Deletes the record row from the database by executing the SQL query "DELETE FROM [tableName] WHERE [primaryKey] = [primaryKeyColumnValue]". * The primary key column value is also set to null. * * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public void delete() throws SQLException { checkReadOnly(); Dialect dialect = open().getDialect(); Composite primaryKey = primaryKey(); Query query = new Query(dialect, "DELETE FROM #1# WHERE #2#", table, dialect.toSqlExpression(primaryKey, id())); PreparedStatement preparedStatement = open().prepare(query); try { preparedStatement.execute(); } finally { preparedStatement.close(); } for (Symbol symbol : primaryKey.getSymbols()) { put(symbol, null); } } /** * Deletes multiple records by exeuting a DELETE FROM table WHERE id IN (...) * * @param records List of records to delete (must be of the same class, and bound to the same Database) * @throws SQLException * if a database access error occurs. */ public static void delete(Collection<? extends Record> records) throws SQLException { Record template = null; String database = null; for (Record record : records) { if (template != null) { if (!template.getClass().equals(record.getClass())) { throw new IllegalArgumentException("all records must be of the same class"); } if (!database.equals(record.table.getDatabase())) { throw new IllegalArgumentException("all records must be bound to the same Database"); } } else { template = record; database = record.table.getDatabase(); } record.checkReadOnly(); } if (template == null) { return; } Query query = new Query(template.open(), "DELETE FROM #1# WHERE", template.getClass()); Composite primaryKey = template.primaryKey(); Dialect dialect = template.open().getDialect(); if (primaryKey.isSingle()) { query.append("#:1# IN (#2:@#)", primaryKey, records); } else { if (dialect.isRowWiseComparisonSupported()) { query.append(" (#:1#) IN (", primaryKey); boolean isFirst = true; for (Record record : records) { query.append(isFirst ? "(#1#)" : ", (#1#)", record.id()); isFirst = false; } query.append(")"); } else { boolean isFirst = true; for (Record record : records) { query.append(isFirst ? " (#1#)" : " OR (#1#)", dialect.toSqlExpression(primaryKey, record.id())); isFirst = false; } } } template.open().execute(query); } /** * Marks all fields as changed. */ public void taint() { for (Entry<Symbol, Field> entry : fields.entrySet()) { Symbol symbol = entry.getKey(); Field field = entry.getValue(); if (!table.isImmutable(symbol) && !primaryKey().contains(symbol)) { field.setChanged(true); } } } /** * Marks all fields as unchanged. */ public void purify() { for (Field field : fields.values()) { field.setChanged(false); } } /** * Determines whether the record has been changed or not. * * @return true if at least one field has been changed, otherwise false. */ public boolean isChanged() { for (Field field : fields.values()) { if (field.isChanged()) { return true; } } return false; } /** * Marks this record as stale. It will be re-populated on the next call to * {@link #set(String, Object)}, {@link #set(Symbol, Object)}, * {@link #get(String)}, {@link #get(Symbol)} or {@link #refresh()}, * whichever comes first. */ public void markStale() { isStale = true; } /** * Determines whether the record is stale or not, i.e. needs to be * re-populated in any upcoming call to {@link #set(String, Object)}, * {@link #set(Symbol, Object)}, {@link #get(String)}, {@link #get(Symbol)} * or {@link #refresh()}, whichever comes first. * * @return true if the record is stale otherwise false. */ public boolean isStale() { return isStale; } private static List<? extends Record> batchChunk(Iterator<? extends Record> iterator, int size) { List<Record> records = null; if (iterator.hasNext()) { do { Record record = iterator.next(); if (record.isChanged()) { if (records == null) { records = new ArrayList<Record>(size); } records.add(record); size--; } } while (size > 0 && iterator.hasNext()); } return records; } private static class BatchInfo { private Set<Symbol> columns = new HashSet<Symbol>(); private Record template = null; } private static BatchInfo batchInfo(Collection<? extends Record> records) { BatchInfo batchInfo = new BatchInfo(); for (Record record : records) { record.checkReadOnly(); if (batchInfo.template == null) { batchInfo.template = record; } if (!batchInfo.template.getClass().equals(record.getClass())) { throw new IllegalArgumentException("all records must be of the same class"); } if (!batchInfo.template.table.getDatabase().equals(record.table.getDatabase())) { throw new IllegalArgumentException("all records must be bound to the same Database"); } batchInfo.columns.addAll( record.fields.keySet() ); } String immutablePrefix = batchInfo.template.table.getImmutablePrefix(); if (batchInfo.template != null && immutablePrefix != null) { for (Symbol symbol : batchInfo.columns) { if (symbol.getName().startsWith(immutablePrefix)) { batchInfo.columns.remove(symbol); } } } return batchInfo; } private static void batchExecute(Query query, Collection<? extends Record> records, ResultMode mode) throws SQLException { PreparedStatement preparedStatement = null; ResultSet resultSet = null; Record template = records.iterator().next(); Transaction transaction = template.open(); Table table = template.table(); Composite primaryKey = template.primaryKey(); Dialect dialect = transaction.getDialect(); // XXX UPDATE + REPOPULATE? if (mode != ResultMode.NO_RESULT && !primaryKey.isSingle() && !dialect.isReturningSupported()) { throw new UnsupportedOperationException("Batch operations on composite primary keys not supported by JDBC, and possibly your database (consider using ResultMode.NO_RESULT)"); } try { boolean useReturning = (mode == ResultMode.REPOPULATE) && dialect.isReturningSupported(); Map<Object, Record> map = null; if (useReturning) { query.append(" RETURNING *"); // XXX ID_ONLY support preparedStatement = transaction.prepare(query.getSql(), query.getParams()); resultSet = preparedStatement.executeQuery(); } else { preparedStatement = transaction.prepare(query.getSql(), query.getParams(), true); preparedStatement.execute(); resultSet = preparedStatement.getGeneratedKeys(); if (mode == ResultMode.REPOPULATE) { map = new HashMap<Object, Record>(); } } SymbolMap symbolMap = null; for (Record record : records) { if (!resultSet.next()) { throw new IllegalStateException("too few rows returned?"); } if (useReturning) { // RETURNING rocks! if (symbolMap == null) { symbolMap = new SymbolMap(resultSet.getMetaData()); } symbolMap.populate(record, resultSet); } else { Field field = record.getOrCreateField(primaryKey.getSymbol()); field.setValue(resultSet.getObject(1)); field.setChanged(false); if (mode == ResultMode.REPOPULATE) { if (map == null) throw new IllegalStateException("bug"); map.put(field.getValue(), record); record.isStale = false; // actually still stale } } } if (!useReturning && mode == ResultMode.REPOPULATE) { if (map == null) throw new IllegalStateException("bug"); resultSet.close(); resultSet = null; preparedStatement.close(); preparedStatement = null; // records must not be stale, or Query will generate SELECTs Query q = table.getSelectQuery(dialect).append("WHERE #1# IN (#2:@#)", primaryKey.getSymbol(), records); preparedStatement = transaction.prepare(q); resultSet = preparedStatement.executeQuery(); int idColumn = resultSet.findColumn(primaryKey.getSymbol().getName()); if (Dialect.DatabaseProduct.MYSQL.equals(dialect.getDatabaseProduct())) { while (resultSet.next()) { map.get(resultSet.getLong(idColumn)).populate(resultSet); } } else { while (resultSet.next()) { map.get(resultSet.getObject(idColumn)).populate(resultSet); } } } } catch (SQLException sqlException) { // records are in an unknown state, mark them stale for (Record record : records) { record.markStale(); } dialect.rethrow(sqlException); } finally { try { if (resultSet != null) { resultSet.close(); } } finally { if (preparedStatement != null) { preparedStatement.close(); } } } } /** * Inserts the record's changed values into the database by executing an SQL INSERT query. * The record's primary key value is set to the primary key generated by the database. * * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public void insert(ResultMode mode) throws SQLException { checkReadOnly(); if (isStale) { return; } if (mode != ResultMode.NO_RESULT && !primaryKey().isSingle() && !open().getDialect().isReturningSupported()) { throw new UnsupportedOperationException("INSERT with composite primary key not supported by JDBC, and possibly your database (consider using ResultMode.NO_RESULT)"); } Query query = new Query(open().getDialect()); query.append("INSERT INTO #1# (", table); boolean isFirst = true; for (Entry<Symbol, Field> entry : fields.entrySet()) { if (entry.getValue().isChanged()) { query.append(isFirst ? "#:1#" : ", #:1#", entry.getKey()); isFirst = false; } } if (isFirst) { // No fields are marked as changed, but we need to insert something... INSERT INTO foo DEFAULT VALUES is not supported on all databases query.append("#1#", primaryKey()); for (int i = 0; i < primaryKey().getSymbols().length; i++) { query.append(i == 0 ? ") VALUES (DEFAULT" : ", DEFAULT"); } } else { query.append(") VALUES ("); isFirst = true; for (Field field : fields.values()) { if (field.isChanged()) { if (field.getValue() instanceof Query) { query.append(isFirst ? "#1#" : ", #1#", field.getValue()); } else { query.append(isFirst ? "#?1#" : ", #?1#", field.getValue()); } isFirst = false; } } query.append(")"); } markStale(); if (open().getDialect().isReturningSupported()) { query.append(" RETURNING *"); // XXX ID_ONLY support selectInto(query); } else { PreparedStatement preparedStatement = open().prepare(query.getSql(), query.getParams(), true); ResultSet resultSet = null; Object id = null; try { preparedStatement.execute(); resultSet = preparedStatement.getGeneratedKeys(); if (resultSet.next()) { id = resultSet.getObject(1); } } catch (SQLException e) { throw open().getDialect().rethrow(e, query.getSql()); } finally { try { if (resultSet != null) { resultSet.close(); } } finally { preparedStatement.close(); } } if (id == null) { throw new RuntimeException("INSERT to " + table.toString() + " did not generate a key (AKA insert id): " + query.getSql()); } Field field = getOrCreateField(primaryKey().getSymbol()); field.setValue(id); field.setChanged(false); } } public void insert() throws SQLException { insert(ResultMode.REPOPULATE); } /** * Executes a batch INSERT (INSERT INTO ... (columns...) VALUES (row1), (row2), (row3), ...) and repopulates the list with stored entities. * * @param records List of records to insert (must be of the same class, and bound to the same Database) * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static void insert(Collection<? extends Record> records, ResultMode mode) throws SQLException { insert(records, 0, mode); } public static void insert(Collection<? extends Record> records) throws SQLException { insert(records, 0, ResultMode.REPOPULATE); } /** * Executes a batch INSERT (INSERT INTO ... (columns...) VALUES (row1), (row2), (row3), ...). * * For large sets of records, the use of chunkSize is recommended to avoid out-of-memory errors and too long SQL queries. * * Setting isFullRepopulate to true will re-populate the record fields with fresh values. This will generate * an additional SELECT query for every chunk of records for databases that do not support RETURNING. * * @param records List of records to insert (must be of the same class, and bound to the same Database) * @param chunkSize Splits the records into chunks, <= 0 disables * @param isFullRepopulate Whether or not to fully re-populate the record fields, or just update their primary key value and markStale() * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static void insert(Collection<? extends Record> records, int chunkSize, ResultMode mode) throws SQLException { BatchInfo batchInfo = batchInfo(records); if (records.isEmpty()) { return; } if (chunkSize <= 0) { batchInsert(batchInfo, records, mode); } else { Iterator<? extends Record> iterator = records.iterator(); List<? extends Record> batch; while ((batch = batchChunk(iterator, chunkSize)) != null) { batchInsert(batchInfo, batch, mode); } } } private static void batchInsert(BatchInfo batchInfo, Collection<? extends Record> records, ResultMode mode) throws SQLException { Table table = batchInfo.template.table; Transaction transaction = batchInfo.template.open(); Dialect dialect = transaction.getDialect(); Query query = new Query(dialect); for (Symbol symbol : table.getPrimaryKey().getSymbols()) { batchInfo.columns.add(symbol); } query.append("INSERT INTO #1# (", table); boolean isFirst = true; for (Symbol column : batchInfo.columns) { query.append(isFirst ? "#:1#" : ", #:1#", column); isFirst = false; } if (isFirst) { throw new RuntimeException("zero columns to insert!"); } query.append(") VALUES "); isFirst = true; for (Record record : records) { query.append(isFirst ? "(" : ", ("); isFirst = false; boolean isColumnFirst = true; for (Symbol column : batchInfo.columns) { if (record.isFieldChanged(column)) { Object value = record.get(column); if (value instanceof Query) { query.append(isColumnFirst ? "#1#" : ", #1#", value); } else { query.append(isColumnFirst ? "#?1#" : ", #?1#", value); } } else { query.append(isColumnFirst ? "DEFAULT" : ", DEFAULT"); } isColumnFirst = false; } query.append(")"); record.markStale(); } batchExecute(query, records, mode); } /** * Updates the record's changed column values by executing an SQL UPDATE query. * * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public void update(ResultMode mode) throws SQLException { checkReadOnly(); if (!isChanged()) { return; } if (isStale) { //throw new IllegalStateException("Attempting to update a stale record!"); return; } Query query = new Query(open().getDialect()); query.append("UPDATE #1# SET ", table); boolean isFirst = true; for (Entry<Symbol, Field> entry : fields.entrySet()) { Field field = entry.getValue(); if (field.isChanged()) { if (field.getValue() instanceof Query) { query.append(isFirst ? "#:1# = #2#" : ", #:1# = #2#", entry.getKey(), field.getValue()); } else { query.append(isFirst ? "#:1# = #?2#" : ", #:1# = #?2#", entry.getKey(), field.getValue()); } isFirst = false; } } assertPrimaryKeyNotNull(); query.append(" WHERE #1#", open().getDialect().toSqlExpression(primaryKey(), id())); markStale(); if (open().getDialect().isReturningSupported() && mode == ResultMode.REPOPULATE) { query.append(" RETURNING *"); selectInto(query); } else { open().executeUpdate(query); } } public void update() throws SQLException { update(ResultMode.REPOPULATE); } /** * Executes a batch UPDATE (UPDATE ... SET x = s.x, y = s.y FROM (values, ...) s WHERE id = s.id). * * Currently, this is only supported on PostgreSQL. The method will fall back to using individual update()s on other databases. * * @param records List of records to insert (must be of the same class, and bound to the same Database) * @throws SQLException * if a database access error occurs */ public static void update(Collection<? extends Record> records) throws SQLException { update(records, 0, ResultMode.REPOPULATE); } /** * Executes a batch UPDATE (UPDATE ... SET x = s.x, y = s.y FROM (values, ...) s WHERE id = s.id). * * For large sets of records, the use of chunkSize is recommended to avoid out-of-memory errors and too long SQL queries. * * Setting isFullRepopulate to true will re-populate the record fields with fresh values. * * Currently, this is only supported on PostgreSQL. The method will fall back to using individual update()s on other databases. * * @param records List of records to insert (must be of the same class, and bound to the same Database) * @param chunkSize Splits the records into chunks, <= 0 disables * @param isFullRepopulate Whether or not to fully re-populate the record fields, or just update their primary key value and markStale() * @throws SQLException * if a database access error occurs */ public static void update(Collection<? extends Record> records, int chunkSize, ResultMode mode) throws SQLException { BatchInfo batchInfo = batchInfo(records); if (records.isEmpty()) { return; } if (batchInfo.columns.isEmpty()) { throw new IllegalArgumentException("No columns to update"); } Dialect dialect = records.iterator().next().open().getDialect(); if (!Dialect.DatabaseProduct.POSTGRESQL.equals(dialect.getDatabaseProduct())) { for (Record record : records) { record.update(); } return; } if (chunkSize <= 0) { batchUpdate(batchInfo, records, mode); } else { Iterator<? extends Record> iterator = records.iterator(); List<? extends Record> batch; while ((batch = batchChunk(iterator, chunkSize)) != null) { batchUpdate(batchInfo, batch, mode); } } } private static void batchUpdate(final BatchInfo batchInfo, Collection<? extends Record> records, ResultMode mode) throws SQLException { Table table = batchInfo.template.table(); Transaction transaction = batchInfo.template.open(); Query query = new Query(transaction); String vTable = table.getTable().equals("v") ? "v2" : "v"; query.append("UPDATE #1# SET ", table); boolean isFirstColumn = true; for (Symbol column : batchInfo.columns) { query.append(isFirstColumn ? "#1# = #!2#.#1#" : ", #1# = #!2#.#1#", column, vTable); isFirstColumn = false; } query.append(" FROM (VALUES "); boolean isFirstValue = true; for (Record record : records) { if (record.isPrimaryKeyNull()) { throw new IllegalArgumentException("Record has unset or NULL primary key: " + record); } isFirstColumn = true; query.append(isFirstValue ? "(" : ", ("); for (Symbol column : batchInfo.columns) { Object value = record.get(column); if (value instanceof Query) { query.append(isFirstColumn ? "#1#" : ", #1#", value); } else { query.append(isFirstColumn ? "#?1#" : ", #?1#", value); } isFirstColumn = false; } query.append(")"); isFirstValue = false; } query.append(") #!1# (", vTable); isFirstColumn = true; for (Symbol column : batchInfo.columns) { query.append(isFirstColumn ? "#1#" : ", #1#", column); isFirstColumn = false; } query.append(") WHERE"); boolean isFirst = true; for (Symbol symbol : table.getPrimaryKey().getSymbols()) { if (isFirst) { isFirst = false; } else { query.append(" AND"); } query.append(" #1#.#2# = #:3#.#2#", table, symbol, vTable); } batchExecute(query, records, mode); } /** * Determines whether a field has been changed or not. * * @param symbol * the symbol of the column name defining the field. * @return true if the field has been changed, false otherwise. */ public boolean isFieldChanged(Symbol symbol) { Field field = fields.get(symbol); if (field == null) { return false; } return field.isChanged(); } /** * Returns true if specified class is a subclass of Record.class. */ public static boolean isRecordSubclass(Class<?> clazz) { return Record.class.isAssignableFrom(clazz) && !clazz.equals(Record.class); } /** * Re-populates a stale record with fresh database values by a select query. * A record is considered stale after a call to either * {@link Record#insert()} or {@link Record#insert()}, if the SQL dialect of * the mapped database does not support returning. A record mapped to a * table in a Postgres database is thus never stale. * * * @throws RuntimeException * whenever a SQLException occurs. */ public void refresh() { if (isStale) { try { Value value = primaryKey().valueFrom(this, false); boolean allNull = true; for (Object v : value.getValues()) { if (v != null) { allNull = false; } } if (allNull) { throw new NullPointerException("Attempted to refresh record with null primary key value"); } populateById(primaryKey().valueFrom(this, false)); } catch (SQLException e) { throw new RuntimeException("Failed to refresh stale record", e); } isStale = false; } } /** * Sets the record as read only according to given value * * @param isReadOnly * the value determining read only state of the record. * @throws RuntimeException * whenever a record is set to read only without table mapping * provided by an {@link Jorm} annotation, i.e. on anonymous * records retrieved through calls to * {@link Transaction#select(Query)}, * {@link Transaction#select(String, Object...)}, * {@link Transaction#selectAll(Query)} and * {@link Transaction#selectAll(String, Object...)}. */ public void readOnly(boolean isReadOnly) { if (primaryKey() == null && isReadOnly) { throw new RuntimeException("Cannot mark anonymous records as read only!"); } this.isReadOnly = isReadOnly; } /** * Returns true if this record is read only. */ public boolean isReadOnly() { return isReadOnly; } private void checkReadOnly() { if (isReadOnly) { throw new RuntimeException("Record is read only!"); } } private boolean isChanged(Symbol symbol, Object newValue) { if (isReadOnly || table.isImmutable(symbol)) { return false; } Field field = fields.get(symbol); if (field == null) { return true; } Object oldValue = field.getValue(); if (oldValue == null && newValue == null) { return false; } else { return oldValue == null || !oldValue.equals(newValue); } } private void put(Symbol symbol, Object value) { refresh(); boolean isChanged; Field field = fields.get(symbol); if (field == null) { field = new Field(); } if (value != null && isRecordSubclass(value.getClass())) { Record record = (Record)value; if (!record.primaryKey().isSingle()) { throw new UnsupportedOperationException("Composite foreign key references are not supported"); } Object id = record.id().getValue(); if (id == null) { throw new NullPointerException("While setting " + record + "." + symbol.getName() + " = " + value + " -- id (primary key) is null -- perhaps you need to save()?"); } isChanged = isChanged(symbol, id); if (isChanged) { notifyFieldChanged(symbol, value); } field.setReference(record); field.setValue(id); } else { isChanged = isChanged(symbol, value); if (isChanged) { notifyFieldChanged(symbol, value); } if (isChanged) { field.setReference(null); // invalidate cached reference } field.setValue(value); } if (isChanged) { // it's OK to mark the id column as changed here field.setChanged(true); } fields.put(symbol, field); } /** * Sets the specified field corresponding to a column of the mapped record. * Any field values extending {@link Record} are cached until the field is * changed again, and the mapped id of the record is set as field value * instead. * * @param column * the name of the column corresponding to the field to set. * @param value * the value. */ public void set(String column, Object value) { set(Symbol.get(column), value); } /** * Sets the specified field corresponding to a column of the mapped record. * Any field values extending {@link Record} are cached until the field is * changed again, and the mapped id of the record is set as field value * instead. * * @param symbol * the symbol of the column corresponding to the field to set. * @param value * the value. */ public void set(Symbol symbol, Object value) { checkReadOnly(); put(symbol, value); } /** * Unsets the specified field corresponding to a column of the mapped record. * * @param column * the name of the column corresponding to the field to set. */ public void unset(String column) { unset(Symbol.get(column)); } /** * Unsets the specified field corresponding to a column of the mapped record. * * @param symbol * the symbol of the column corresponding to the field to set. */ public void unset(Symbol symbol) { checkReadOnly(); Field field; refresh(); field = fields.get(symbol); if (field != null) { notifyFieldChanged(symbol, null); fields.remove(symbol); } } /** * Determines whether the field corresponding to a given column name is set * or not. * * @param column * the name of the column corresponding to the field to set. * @return true if the field is set, false otherwise. */ public boolean isSet(String column) { return isSet(Symbol.get(column)); } /** * Determines whether the field corresponding to a given column name is set * or not. * * @param symbol * the symbol of the column corresponding to the field to set. * @return true if the field is set, false otherwise. */ public boolean isSet(Symbol symbol) { refresh(); return fields.get(symbol) != null; } /** * Provides a cached instance of a record represented by a field defined by * a given column name. If the record has not previously been cached it is * fetched from the database and cached. * * @param column * the column name. * @param clazz * the expected class of the cached record. * @return the cached record corresponding to the given symbol. */ public <T> T get(String column, Class<T> clazz) { try { return getField(Symbol.get(column), clazz, false, false); } catch (SQLException e) { // UNREACHABLE throw new IllegalStateException(e); } } public <T extends Record> T ref(String column, Class<T> clazz) throws SQLException { return getField(Symbol.get(column), clazz, false, true); } /** * Provides a cached instance of a record represented by a field defined by * a given symbol for a column name. If the record has not previously been * cached it is fetched from the database and cached. * * @param symbol * the symbol defining the column name. * @param clazz * the expected class of the cached record. * @return the cached record corresponding to the given symbol. */ public <T> T get(Symbol symbol, Class<T> clazz) { try { return getField(symbol, clazz, false, false); } catch (SQLException e) { // UNREACHABLE throw new IllegalStateException(e); } } public <T extends Record> T ref(Symbol symbol, Class<T> clazz) throws SQLException { return getField(symbol, clazz, false, true); } /** * Provides a cached instance of a record represented by a field defined by * a given symbol for a column name. * * @param symbol * the symbol defining the column name. * @param clazz * the expected class of the cached record. * @param isCacheOnly only retrieves previously cached values. * @return the cached record corresponding to the given symbol. */ public <T extends Record> T get(Symbol symbol, Class<T> clazz, boolean isCacheOnly) throws SQLException { return getField(symbol, clazz, isCacheOnly, true); } @SuppressWarnings("unchecked") private <T> T getField(Symbol symbol, Class<T> clazz, boolean isReferenceCacheOnly, boolean throwSqlException) throws SQLException { refresh(); Field field = fields.get(symbol); if (field == null) { return null; } Object value = field.getValue(); if (value != null) { if (isRecordSubclass(clazz)) { // Load foreign key if ((field.getReference() == null) && !isReferenceCacheOnly) { try { Record reference = Record.findById((Class<? extends Record>)clazz, value); field.setReference(reference); value = reference; } catch (SQLException e) { if (throwSqlException) { throw e; } throw new RuntimeException("failed to findById(" + clazz + ", " + value + ")", e); } } else { value = field.getReference(); } } else if (!clazz.isAssignableFrom(value.getClass())) { throw new RuntimeException("column " + symbol.getName() + " is of type " + value.getClass() + ", but " + clazz + " was requested"); } } return (T) value; } /** * Provides the value of the field defined by a given column name. * * @param column * the name of the column defining the field. * @throws RuntimeException * if the column does not exist (or has not been set) */ public Object get(String column) { return get(Symbol.get(column)); } /** * Provides the value of the field defined by a given symbol for a column * name. * * @param symbol * the symbol of the column defining the field. * @throws RuntimeException * if the column does not exist (or has not been set) */ public Object get(Symbol symbol) { refresh(); Field field = fields.get(symbol); if (field == null) { throw new RuntimeException("column '" + symbol.getName() + "' does not exist, or has not yet been set"); } return field.getValue(); } @Override public String toString() { StringBuilder stringBuilder = new StringBuilder(); boolean isFirst = true; if (table.getSchema() != null) { stringBuilder.append(table.getSchema()); stringBuilder.append('.'); } if (table.getTable() != null) { stringBuilder.append(table.getTable()); } if (isStale) { stringBuilder.append("stale"); } if (isReadOnly) { stringBuilder.append("read-only"); } stringBuilder.append(" { "); for (Entry<Symbol, Field> entry : fields.entrySet()) { if (isFirst) { isFirst = false; } else { stringBuilder.append(", "); } stringBuilder.append(entry.getKey().getName()); stringBuilder.append(" => "); stringBuilder.append(entry.getValue().getValue()); } stringBuilder.append(" }"); return stringBuilder.toString(); } @Override public boolean equals(Object object) { if (getClass().isInstance(object)) { return id().equals(((Record)object).id()); } return false; } @Override public int hashCode() { return id().hashCode(); } }
src/main/java/com/jajja/jorm/Record.java
/* * Copyright (C) 2013 Jajja Communications AB * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.jajja.jorm; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.jajja.jorm.Composite.Value; /** * <p> * Records provide the main interface for viewing and modifying data stored in * database tables. For generic SQL queries, examine {@link Transaction} * instead. For SQL syntax, examine {@link Query} * </p> * <p> * Records are not thread-safe! In fact, shared records will use thread-local * transactions with possibly unpredictable results for seemingly synchronized * execution. * </p> * <h3>Object relational mapping</h3> * <p> * Record template implementations can be automated according to JDBC types, * using the {@link Generator#string(String)} and * {@link Generator#string(String, String)} methods for targeted tables. * </p> * <p> * <strong>From SQL:</strong> * * <pre> * CREATE TABLE phrases ( * id serial NOT NULL, * phrase varchar NOT NULL, * locale_id integer NOT NULL, * PRIMARY KEY (id), * UNIQUE (phrase, locale_id), * FOREIGN KEY (locale_id) REFERENCES locales (id) ON DELETE CASCADE * ) * </pre> * * </p> * <p> * <strong>To Java:</strong> * * <pre> * &#064;Table(database = &quot;default&quot;, table = &quot;phrases&quot;, id = &quot;id&quot;) * public class Phrase extends Record { * * public Integer getId() { * return get(&quot;id&quot;, Integer.class); * } * * public void setId(Integer id) { * set(&quot;id&quot;, id); * } * * public String getPhrase() { * return get(&quot;phrase&quot;, String.class); * } * * public void setPhrase(String phrase) { * set(&quot;phrase&quot;, phrase); * } * * public Integer getLocaleId() { * return get(&quot;locale_id&quot;, Integer.class); * } * * public void setLocaleId(Integer id) { * set(&quot;locale_id&quot;, id); * } * * public Locale getLocale() { * return get(&quot;locale_id&quot;, Locale.class); * } * * public void setLocale(Locale Locale) { * set(&quot;locale_id&quot;, locale); * } * * } * </pre> * * </p> * <p> * Note that related records are cached by the method * {@link Record#get(String, Class)}. Cache invalidation upon change of foreign * keys is maintained in records. Further control can be achieved by overriding * {@link Record#notifyFieldChanged(Symbol, Object)}. * </p> * * @see Jorm * @see Query * @author Andreas Allerdahl <[email protected]> * @author Martin Korinth <[email protected]> * @author Daniel Adolfsson <[email protected]> * @since 1.0.0 */ public abstract class Record { Map<Symbol, Field> fields = new HashMap<Symbol, Field>(); private Table table; private boolean isStale = false; private boolean isReadOnly = false; private static Map<Class<? extends Record>, Log> logs = new ConcurrentHashMap<Class<? extends Record>, Log>(16, 0.75f, 1); public static enum ResultMode { /** For both INSERTs and UPDATEs, fully repopulate record(s). This is the default. */ REPOPULATE, /** For INSERTs, fetch only generated keys, mark record(s) as stale. For UPDATEs, this is equivalent to NO_RESULT. */ ID_ONLY, /** Fetch nothing, mark record as stale and assume the primary key value is accurate. */ NO_RESULT; } public static class Field { private Object value = null; private boolean isChanged = false; private Record reference = null; private Field() {} void setValue(Object value) { this.value = value; } public Object getValue() { return value; } void setChanged(boolean isChanged) { this.isChanged = isChanged; } boolean isChanged() { return isChanged; } void setReference(Record reference) { this.reference = reference; } public Record getReference() { return reference; } } private Field getOrCreateField(Symbol symbol) { Field field = fields.get(symbol); if (field == null) { field = new Field(); fields.put(symbol, field); } return field; } /** * Provides a cached log for the specified class. Stores a new log in the * cache if no preceding call with the given class has been made. * * @param clazz * the class defining log instance. * @return the class specific cached log. */ public static Log log(Class<? extends Record> clazz) { Log log = logs.get(clazz); if (log == null) { synchronized (logs) { log = logs.get(clazz); if (log == null) { log = LogFactory.getLog(clazz); logs.put(clazz, log); } } } return log; } /** * Provides the cached log for the instance class according to {@link Record#log(Class)}. */ public Log log() { return log(getClass()); } /** * Constructs a mapped record. Depends on {@link Jorm} annotation for table * mapping. */ public Record() { table = Table.get(getClass()); } /** * Constructs a mapped record. Mainly intended for anonymous record * instantiation such as the results from the transaction select methods * {@link Transaction#select(Query)}, * {@link Transaction#select(String, Object...)}, * {@link Transaction#selectAll(Query)} and * {@link Transaction#selectAll(String, Object...)}. * * @param table * the table mapping. */ public Record(Table table) { this.table = table; } /** * Instantiates a record class of the specified type. */ public static <T extends Record> T construct(Class<T> clazz) { try { return clazz.newInstance(); } catch (Exception e) { throw new RuntimeException("Failed to instantiate " + clazz, e); } } /** * Notifies field changes. The default implementation is empty, but provides * the option to override and act upon changes. This method is called * whenever {@link #set(String, Object)} or {@link #set(Symbol, Object)} * changes a field, or {@link #populate(ResultSet)} is called. * * @param symbol * the symbol of the column. * @param object * the value of the field after change. */ protected void notifyFieldChanged(Symbol symbol, Object object) { } public Value id() { return get(primaryKey()); } public Composite primaryKey() { return primaryKey(); } public static Composite primaryKey(Class<? extends Record> clazz) { return Table.get(clazz).getPrimaryKey(); } public Value get(Composite composite) { return composite.valueFrom(this); } /** * Provides the table mapping for the record. * * @return the table mapping. */ public Table table() { return table; } /** * Provides an immutable view of the fields of the record. * * @return the fields. */ public Map<Symbol, Field> fields() { return Collections.unmodifiableMap(fields); } /** * <p> * Opens a thread local transaction to the database mapped by the record * class. If an open transaction already exists for the record class, it is * reused. This method is idempotent when called from the same thread. * </p> * <p> * This is corresponds to a call to {@link Database#open(String)} for the * database named by the class mapping of the record. Requires the given * class to be mapped by {@link Jorm}. * </p> * * @param clazz * the mapped record class. * @return the open transaction. */ public static Transaction open(Class<? extends Record> clazz) { return Database.open(Table.get(clazz).getDatabase()); } /** * <p> * Commits the thread local transaction to the named database mapped by the * record class, if it has been opened. * </p> * <p> * This is corresponds to a call to {@link Database#commit(String)} for the * database named by the class mapping of the record. Requires the given * class to be mapped by {@link Jorm}. * </p> * * @param clazz * the mapped record class. * @return the committed transaction or null for no active transaction. */ public static Transaction commit(Class<? extends Record> clazz) throws SQLException { return Database.commit(Table.get(clazz).getDatabase()); } /** * <p> * Closes the thread local transaction to the named database mapped by the * record class, if it has been opened. This method is idempotent when * called from the same thread. * </p> * <p> * This is corresponds to a call to {@link Database#close(String)} for the * database named by the class mapping of the record. Requires the given * class to be mapped by {@link Jorm}. * </p> * * @param clazz * the mapped record class. * @return the closed transaction or null for no active transaction. */ public static Transaction close(Class<? extends Record> clazz) { return Database.close(Table.get(clazz).getDatabase()); } /** * <p> * Opens a thread local transaction to the named database mapped by the * record. If an open transaction already exists for the record, it is * reused. This method is idempotent when called from the same thread. * </p> * <p> * This is corresponds to a call to {@link Database#open(String)} for the * database named by the table mapping of the record. * </p> * * @return the open transaction. */ public Transaction open() { return Database.open(table.getDatabase()); } /** * <p> * Commits the thread local transaction to the named database mapped by the * record, if it has been opened. * </p> * <p> * This is corresponds to a call to {@link Database#commit(String)} for the * database named by the table mapping of the record. * </p> * <p> * <strong>Note:</strong> This may cause changes of other records to be * persisted to the mapped database of the record, since all records mapped * to the same named database share transaction in the context of the * current thread. * </p> * * @throws SQLException * if a database access error occurs. * @return the committed transaction or null for no active transaction. */ public Transaction commit() throws SQLException { return Database.commit(table.getDatabase()); } /** * <p> * Closes the thread local transaction to the named database mapped by the * record, if it has been opened. This method is idempotent when called from * the same thread. * </p> * <p> * This is corresponds to a call to {@link Database#close(String)} for the * database named by the table mapping of the record. * </p> * <p> * <strong>Note:</strong> This may cause changes of other records to be * discarded in the mapped database of the record, since all records mapped * to the same named database share transaction in the context of the * current thread. * </p> * * @return the closed transaction or null for no active transaction. */ public Transaction close() { return Database.close(table.getDatabase()); } /** * Populates the record with the first result for which the given column name * matches the given value. * * @param symbol * the column symbol. * @param value * the value to match. * @return true if the record could be updated with a matching row from the * table. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public boolean populateByComposite(Composite composite, Value value) throws SQLException { return selectInto(getSelectQuery(getClass(), composite, value)); } /** * Populates the record with the result for which the id column matches the * given value. * * @param id * the id value to match. * @return true if the record could be updated with a matching row from the * table. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public boolean populateById(Value id) throws SQLException { return populateByComposite(primaryKey(), id); } private static <T extends Record> Query getSelectQuery(Class<T> clazz) { return Table.get(clazz).getSelectQuery(open(clazz).getDialect()); } private static <T extends Record> Query getSelectQuery(Class<T> clazz, Composite composite, Object value) { Value v; if (value instanceof Value) { v = (Value)value; } else { v = primaryKey(clazz).value(value); } composite.assertCompatible(v); Dialect dialect = open(clazz).getDialect(); Query query = Table.get(clazz).getSelectQuery(dialect); query.append("WHERE "); query.append(dialect.toSqlExpression(composite, v)); return query; } /** * Builds a generic SQL query for the record. * * @param sql * the SQL statement to represent the query. * @return the built query. */ public Query build(String sql) { return new Query(open().getDialect(), sql); } /** * Builds a generic SQL query for the record and quotes identifiers from the * given parameters according to the SQL dialect of the mapped database of * the record. * * @param sql * the Jorm SQL statement to represent the query. * @param params * the parameters applying to the SQL hash markup. * @return the built query. */ public Query build(String sql, Object... params) { return new Query(open().getDialect(), sql, params); } /** * Builds a generic SQL query for a given record class. * * @param clazz * the mapped record class. * @param sql * the SQL statement to represent the query. * @return the built query. */ public static Query build(Class<? extends Record> clazz, String sql) { return new Query(open(clazz).getDialect(), sql); } /** * Builds a generic SQL query for a given record class and quotes * identifiers from the given parameters according to the SQL dialect of the * mapped database of the record class. * * @param clazz * the mapped record class. * @param sql * the Jorm SQL statement to represent the query. * @param params * the parameters applying to the SQL hash markup. * @return the built query. * @return the built query. */ public static Query build(Class<? extends Record> clazz, String sql, Object... params) { return new Query(open(clazz).getDialect(), sql, params); } /** * Provides a selected record from the mapped database table, populated with * the first result for which the primary key matches. * * @param clazz * the class defining the table mapping. * @param composite * the composite key * @param value * the composite key value * @return the matched record or null for no match. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> T find(Class<T> clazz, Composite composite, Object value) throws SQLException { return select(clazz, getSelectQuery(clazz, composite, value)); } /** * Provides a complete list of selected records from the mapped database * table, populated with the results for which the composite key matches. * * @param clazz * the class defining the table mapping. * @param composite * the composite key * @param value * the composite key value * @return the matched records. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> List<T> findAll(Class<T> clazz, Composite composite, Value value) throws SQLException { return selectAll(clazz, getSelectQuery(clazz, composite, value)); } public static <T extends Record> List<T> findAll(Class<T> clazz) throws SQLException { return selectAll(clazz, getSelectQuery(clazz)); } /** * Provides a complete list of selected reference records of a given class * referring to the mapped record through a given foreign key column. * * @param clazz * the class of the records referring to the mapped record. * @param column * the column defining the foreign key for the reference records. * @return the matched references. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public <T extends Record> List<T> findReferences(Class<T> clazz, String column) throws SQLException { return findReferences(clazz, Symbol.get(column)); } /** * Provides a complete list of selected reference records of a given class * referring to the mapped record through a given foreign key column. * * @param clazz * the class of the records referring to the mapped record. * @param symbol * the symbol of the column defining the foreign key for the * reference records. * @return the matched references. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public <T extends Record> List<T> findReferences(Class<T> clazz, Symbol symbol) throws SQLException { Table table = Table.get(clazz); return selectAll(clazz, "SELECT * FROM #1# WHERE #:2# = #3#", table, symbol, get(symbol)); } /** * Provides a selected record, populated with the result for which the primary key * column matches the given id value. * * @param clazz * the class defining the table mapping. * @param id * the primary key value (can be either a {@link Composite.Value} or a single column value). * @return the matched record or null for no match. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> T findById(Class<T> clazz, Object id) throws SQLException { return find(clazz, primaryKey(clazz), id); } /** * Provides a selected record, populated with the first result from the * query given by a plain SQL statement and applicable parameters. * * @param clazz * the class defining the table mapping. * @param sql * the plain SQL statement. * @return the matched record or null for no match. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> T select(Class<T> clazz, String sql) throws SQLException { return select(clazz, new Query(open(clazz).getDialect(), sql)); } /** * Provides a selected record, populated with the first result from the * query given by a Jorm SQL statement and applicable parameters. * * @param clazz * the class defining the table mapping. * @param sql * the Jorm SQL statement. * @param params * the applicable parameters. * @return the matched record or null for no match. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> T select(Class<T> clazz, String sql, Object... params) throws SQLException { return select(clazz, new Query(open(clazz).getDialect(), sql, params)); } /** * Provides a selected record, populated with the first result from the * given query. * * @param clazz * the class defining the table mapping. * @param query * the query. * @return the matched record or null for no match. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> T select(Class<T> clazz, Query query) throws SQLException { T record = construct(clazz); if (record.selectInto(query)) { return record; } return null; } /** * Provides a list of selected records, populated with the results from the * query given by a plain SQL statement. * * @param clazz * the class defining the table mapping. * @param sql * the plain SQL statement. * @return the matched records * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> List<T> selectAll(Class<T> clazz, String sql) throws SQLException { return selectAll(clazz, new Query(open(clazz).getDialect(), sql)); } /** * Provides a list of selected records, populated with the results from the * query given by a Jorm SQL statement and applicable parameters. * * @param clazz * the class defining the table mapping. * @param sql * the Jorm SQL statement. * @param params * the applicable parameters. * @return the matched records * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> List<T> selectAll(Class<T> clazz, String sql, Object... params) throws SQLException { return selectAll(clazz, new Query(open(clazz).getDialect(), sql, params)); } /** * Provides a list of selected records, populated with the results from the * given query. * * @param clazz * the class defining the table mapping. * @param query * the query. * @return the matched records. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> List<T> selectAll(Class<T> clazz, Query query) throws SQLException { PreparedStatement preparedStatement = open(clazz).prepare(query.getSql(), query.getParams()); ResultSet resultSet = null; LinkedList<T> records = new LinkedList<T>(); try { resultSet = preparedStatement.executeQuery(); SymbolMap symbolMap = new SymbolMap(resultSet.getMetaData()); while (resultSet.next()) { T record = construct(clazz); symbolMap.populate(record, resultSet); records.add(record); } } catch (SQLException sqlException) { open(clazz).getDialect().rethrow(sqlException, query.getSql()); } finally { try { if (resultSet != null) { resultSet.close(); } } finally { preparedStatement.close(); } } return records; } /** * Provides a hash map of selected records, populated with the results from the * given query. * * @param clazz * the class defining the table mapping. * @param column * the column to use as key. * @param query * the query. * @return the matched records. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> Map<Composite.Value, T> selectAsMap(Class<T> clazz, Composite compositeKey, boolean allowDuplicates, Query query) throws SQLException { PreparedStatement preparedStatement = open(clazz).prepare(query.getSql(), query.getParams()); ResultSet resultSet = null; HashMap<Composite.Value, T> records = new HashMap<Composite.Value, T>(); try { resultSet = preparedStatement.executeQuery(); while (resultSet.next()) { T record = construct(clazz); record.populate(resultSet); Value value = compositeKey.valueFrom(record); if (records.put(value, record) != null && !allowDuplicates) { throw new IllegalStateException("Duplicate key " + value); } } } catch (SQLException sqlException) { open(clazz).getDialect().rethrow(sqlException, query.getSql()); } finally { if (resultSet != null) resultSet.close(); preparedStatement.close(); } return records; } public static <T extends Record> Map<Composite.Value, T> selectAsMap(Class<T> clazz, Composite compositeKey, boolean allowDuplicates, String sql, Object... params) throws SQLException { return selectAsMap(clazz, compositeKey, allowDuplicates, new Query(open(clazz).getDialect(), sql, params)); } /** * Provides a hash map of selected records, populated with the results from the * given query. * * @param clazz * the class defining the table mapping. * @param column * the column to use as key. * @param query * the query. * @return the matched records. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> Map<Composite.Value, List<T>> selectAllAsMap(Class<T> clazz, Composite compositeKey, Query query) throws SQLException { PreparedStatement preparedStatement = open(clazz).prepare(query.getSql(), query.getParams()); ResultSet resultSet = null; HashMap<Composite.Value, List<T>> records = new HashMap<Composite.Value, List<T>>(); try { resultSet = preparedStatement.executeQuery(); while (resultSet.next()) { T record = construct(clazz); record.populate(resultSet); Value value = compositeKey.valueFrom(record); List<T> list = records.get(value); if (list == null) { list = new LinkedList<T>(); records.put(value, list); } list.add(record); } } catch (SQLException sqlException) { open(clazz).getDialect().rethrow(sqlException, query.getSql()); } finally { if (resultSet != null) resultSet.close(); preparedStatement.close(); } return records; } public static <T extends Record> Map<Composite.Value, List<T>> selectAllAsMap(Class<T> clazz, Composite compositeKey, String sql, Object... params) throws SQLException { return selectAllAsMap(clazz, compositeKey, new Query(open(clazz).getDialect(), sql, params)); } /** * Executes the query given by a plain SQL statement and applicable * parameters and populates the record with the first row of the result. Any * values in the record object are cleared if the record was previously * populated. * * @param sql * the plain SQL statement. * @return true if the record was populated, otherwise false. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public boolean selectInto(String sql) throws SQLException { return selectInto(new Query(open().getDialect(), sql)); } /** * Executes the query given by a Jorm SQL statement and applicable * parameters and populates the record with the first row of the result. Any * values in the record object are cleared if the record was previously * populated. * * @param sql * the Jorm SQL statement. * @param params * the applicable parameters. * @return true if the record was populated, otherwise false. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public boolean selectInto(String sql, Object... params) throws SQLException { return selectInto(new Query(open().getDialect(), sql, params)); } /** * Executes the given query and populates the record with the first row of * the result. Any values in the record object are cleared if the record was * previously populated. * * @param query * the query. * @return true if the record was populated, otherwise false. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public boolean selectInto(Query query) throws SQLException { PreparedStatement preparedStatement = open().prepare(query.getSql(), query.getParams()); ResultSet resultSet = null; try { resultSet = preparedStatement.executeQuery(); if (resultSet.next()) { populate(resultSet); return true; } } catch (SQLException sqlException) { open().getDialect().rethrow(sqlException, query.getSql()); } finally { try { if (resultSet != null) { resultSet.close(); } } finally { preparedStatement.close(); } } return false; } /** * Populates all records in the given collection of records with a single * prefetched reference of the given record class. Existing cached * references are not overwritten. * * @param records * the records to populate with prefetched references. * @param foreignKeySymbol * the symbol defining the foreign key to the referenced records. * @param clazz * the class of the referenced records. * @param referredSymbol * the symbol defining the referred column of the referenced * records. * @return the prefetched records. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> List<T> prefetch(Collection<? extends Record> records, Symbol foreignKeySymbol, Class<T> clazz, Symbol referredSymbol) throws SQLException { Set<Object> values = new HashSet<Object>(); for (Record record : records) { Field field = record.fields.get(foreignKeySymbol); if (field != null && field.getValue() != null && field.getReference() == null) { values.add(field.getValue()); } } if (values.isEmpty()) { return new LinkedList<T>(); } List<T> referenceRecords = selectAll(clazz, "SELECT * FROM #1# WHERE #2# IN (#3#)", Table.get(clazz), referredSymbol, values); Map<Object, Record> map = new HashMap<Object, Record>(); for (Record referenceRecord : referenceRecords) { map.put(referenceRecord.get(referredSymbol), referenceRecord); } for (Record record : records) { Field field = record.fields.get(foreignKeySymbol); if (field != null && field.getValue() != null && field.getReference() == null) { Record referenceRecord = map.get(field.getValue()); if (referenceRecord == null) { throw new IllegalStateException(field.getValue() + " not present in " + Table.get(clazz).getTable() + "." + referredSymbol.getName()); } record.set(foreignKeySymbol, referenceRecord); } } return referenceRecords; } /** * Populates all records in the given collection of records with a single * prefetched reference of the given record class. Existing cached * references are not overwritten. * * @param records * the records to populate with prefetched references. * @param foreignKeySymbol * the column name defining the foreign key to the referenced records. * @param clazz * the class of the referenced records. * @param referredSymbol * the column name defining the referred column of the referenced * records. * @return the prefetched records. * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static <T extends Record> List<T> prefetch(Collection<? extends Record> records, String foreignKeySymbol, Class<T> clazz, String referredSymbol) throws SQLException { return prefetch(records, Symbol.get(foreignKeySymbol), clazz, Symbol.get(referredSymbol)); } /** * Populates the record with the first row of the result. Any values in the * record object are cleared if the record was previously populated. * * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public void populate(ResultSet resultSet) throws SQLException { SymbolMap symbolMap = new SymbolMap(resultSet.getMetaData()); symbolMap.populate(this, resultSet); } public static class SymbolMap { private Symbol[] symbols; private Set<Symbol> symbolSet = new HashSet<Symbol>(); public SymbolMap(ResultSetMetaData resultSetMetaData) throws SQLException { symbols = new Symbol[resultSetMetaData.getColumnCount()]; symbolSet = new HashSet<Symbol>(symbols.length + 1, 1.0f); // + 1 to prevent resize for (int i = 0; i < symbols.length; i++) { symbols[i] = Symbol.get(resultSetMetaData.getColumnLabel(i + 1)); symbolSet.add(symbols[i]); } } public void populate(Record record, ResultSet resultSet) throws SQLException { for (int i = 0; i < symbols.length; i++) { record.isStale = false; try { record.put(symbols[i], resultSet.getObject(i + 1)); } catch (SQLException sqlException) { record.open().getDialect().rethrow(sqlException); } finally { record.isStale = true; // lol exception } record.isStale = false; } Iterator<Symbol> i = record.fields.keySet().iterator(); while (i.hasNext()) { Symbol symbol = i.next(); if (!contains(symbol)) { record.unset(symbol); } } record.purify(); } public boolean contains(Symbol symbol) { return symbolSet.contains(symbol); } } private boolean isPrimaryKeyNullOrChanged() { for (Symbol symbol : primaryKey().getSymbols()) { Field field = fields.get(symbol); if (field == null || field.getValue() == null || field.isChanged()) { return true; } } return false; } private boolean isPrimaryKeyNull() { for (Symbol symbol : primaryKey().getSymbols()) { Field field = fields.get(symbol); if (field == null || field.getValue() == null) { return true; } } return false; } private void assertPrimaryKeyNotNull() { if (isPrimaryKeyNull()) { throw new IllegalStateException("Primary key contains NULL value(s)"); } } /** * Save the record. This is done by a call to {@link #insert()} if the id * field is null, unset or changed, otherwise by a call to {@link #update()}. * * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public void save(ResultMode mode) throws SQLException { checkReadOnly(); if (isPrimaryKeyNullOrChanged()) { insert(mode); } else { update(mode); } } public void save() throws SQLException { save(ResultMode.REPOPULATE); } /** * Batch saves the records. This is done by a call to {@link #insert()} if the id * field is null, unset or changed, otherwise by a call to {@link #update()}. * * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static void save(Collection<? extends Record> records, int batchSize, ResultMode mode) throws SQLException { List<Record> insertRecords = new LinkedList<Record>(); List<Record> updateRecords = new LinkedList<Record>(); for (Record record : records) { if (record.isPrimaryKeyNullOrChanged()) { insertRecords.add(record); } else { updateRecords.add(record); } } insert(insertRecords, batchSize, mode); update(updateRecords, batchSize, mode); } /** * Batch saves the records. This is done by a call to {@link #insert()} if the id * field is null, unset or changed, otherwise by a call to {@link #update()}. * * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static void save(Collection<? extends Record> records) throws SQLException { save(records, 0, ResultMode.REPOPULATE); } /** * Deletes the record row from the database by executing the SQL query "DELETE FROM [tableName] WHERE [primaryKey] = [primaryKeyColumnValue]". * The primary key column value is also set to null. * * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public void delete() throws SQLException { checkReadOnly(); Dialect dialect = open().getDialect(); Composite primaryKey = primaryKey(); Query query = new Query(dialect, "DELETE FROM #1# WHERE #2#", table, dialect.toSqlExpression(primaryKey, id())); PreparedStatement preparedStatement = open().prepare(query); try { preparedStatement.execute(); } finally { preparedStatement.close(); } for (Symbol symbol : primaryKey.getSymbols()) { put(symbol, null); } } /** * Deletes multiple records by exeuting a DELETE FROM table WHERE id IN (...) * * @param records List of records to delete (must be of the same class, and bound to the same Database) * @throws SQLException * if a database access error occurs. */ public static void delete(Collection<? extends Record> records) throws SQLException { Record template = null; String database = null; for (Record record : records) { if (template != null) { if (!template.getClass().equals(record.getClass())) { throw new IllegalArgumentException("all records must be of the same class"); } if (!database.equals(record.table.getDatabase())) { throw new IllegalArgumentException("all records must be bound to the same Database"); } } else { template = record; database = record.table.getDatabase(); } record.checkReadOnly(); } if (template == null) { return; } Query query = new Query(template.open(), "DELETE FROM #1# WHERE", template.getClass()); Composite primaryKey = template.primaryKey(); Dialect dialect = template.open().getDialect(); if (primaryKey.isSingle()) { query.append("#:1# IN (#2:@#)", primaryKey, records); } else { if (dialect.isRowWiseComparisonSupported()) { query.append(" (#:1#) IN (", primaryKey); boolean isFirst = true; for (Record record : records) { query.append(isFirst ? "(#1#)" : ", (#1#)", record.id()); isFirst = false; } query.append(")"); } else { boolean isFirst = true; for (Record record : records) { query.append(isFirst ? " (#1#)" : " OR (#1#)", dialect.toSqlExpression(primaryKey, record.id())); isFirst = false; } } } template.open().execute(query); } /** * Marks all fields as changed. */ public void taint() { for (Entry<Symbol, Field> entry : fields.entrySet()) { Symbol symbol = entry.getKey(); Field field = entry.getValue(); if (!table.isImmutable(symbol) && !primaryKey().contains(symbol)) { field.setChanged(true); } } } /** * Marks all fields as unchanged. */ public void purify() { for (Field field : fields.values()) { field.setChanged(false); } } /** * Determines whether the record has been changed or not. * * @return true if at least one field has been changed, otherwise false. */ public boolean isChanged() { for (Field field : fields.values()) { if (field.isChanged()) { return true; } } return false; } /** * Marks this record as stale. It will be re-populated on the next call to * {@link #set(String, Object)}, {@link #set(Symbol, Object)}, * {@link #get(String)}, {@link #get(Symbol)} or {@link #refresh()}, * whichever comes first. */ public void markStale() { isStale = true; } /** * Determines whether the record is stale or not, i.e. needs to be * re-populated in any upcoming call to {@link #set(String, Object)}, * {@link #set(Symbol, Object)}, {@link #get(String)}, {@link #get(Symbol)} * or {@link #refresh()}, whichever comes first. * * @return true if the record is stale otherwise false. */ public boolean isStale() { return isStale; } private static List<? extends Record> batchChunk(Iterator<? extends Record> iterator, int size) { List<Record> records = null; if (iterator.hasNext()) { do { Record record = iterator.next(); if (record.isChanged()) { if (records == null) { records = new ArrayList<Record>(size); } records.add(record); size--; } } while (size > 0 && iterator.hasNext()); } return records; } private static class BatchInfo { private Set<Symbol> columns = new HashSet<Symbol>(); private Record template = null; } private static BatchInfo batchInfo(Collection<? extends Record> records) { BatchInfo batchInfo = new BatchInfo(); for (Record record : records) { record.checkReadOnly(); if (batchInfo.template == null) { batchInfo.template = record; } if (!batchInfo.template.getClass().equals(record.getClass())) { throw new IllegalArgumentException("all records must be of the same class"); } if (!batchInfo.template.table.getDatabase().equals(record.table.getDatabase())) { throw new IllegalArgumentException("all records must be bound to the same Database"); } batchInfo.columns.addAll( record.fields.keySet() ); } String immutablePrefix = batchInfo.template.table.getImmutablePrefix(); if (batchInfo.template != null && immutablePrefix != null) { for (Symbol symbol : batchInfo.columns) { if (symbol.getName().startsWith(immutablePrefix)) { batchInfo.columns.remove(symbol); } } } return batchInfo; } private static void batchExecute(Query query, Collection<? extends Record> records, ResultMode mode) throws SQLException { PreparedStatement preparedStatement = null; ResultSet resultSet = null; Record template = records.iterator().next(); Transaction transaction = template.open(); Table table = template.table(); Composite primaryKey = template.primaryKey(); Dialect dialect = transaction.getDialect(); // XXX UPDATE + REPOPULATE? if (mode != ResultMode.NO_RESULT && !primaryKey.isSingle() && !dialect.isReturningSupported()) { throw new UnsupportedOperationException("Batch operations on composite primary keys not supported by JDBC, and possibly your database (consider using ResultMode.NO_RESULT)"); } try { boolean useReturning = (mode == ResultMode.REPOPULATE) && dialect.isReturningSupported(); Map<Object, Record> map = null; if (useReturning) { query.append(" RETURNING *"); // XXX ID_ONLY support preparedStatement = transaction.prepare(query.getSql(), query.getParams()); resultSet = preparedStatement.executeQuery(); } else { preparedStatement = transaction.prepare(query.getSql(), query.getParams(), true); preparedStatement.execute(); resultSet = preparedStatement.getGeneratedKeys(); if (mode == ResultMode.REPOPULATE) { map = new HashMap<Object, Record>(); } } SymbolMap symbolMap = null; for (Record record : records) { if (!resultSet.next()) { throw new IllegalStateException("too few rows returned?"); } if (useReturning) { // RETURNING rocks! if (symbolMap == null) { symbolMap = new SymbolMap(resultSet.getMetaData()); } symbolMap.populate(record, resultSet); } else { Field field = record.getOrCreateField(primaryKey.getSymbol()); field.setValue(resultSet.getObject(1)); field.setChanged(false); if (mode == ResultMode.REPOPULATE) { if (map == null) throw new IllegalStateException("bug"); map.put(field.getValue(), record); record.isStale = false; // actually still stale } } } if (!useReturning && mode == ResultMode.REPOPULATE) { if (map == null) throw new IllegalStateException("bug"); resultSet.close(); resultSet = null; preparedStatement.close(); preparedStatement = null; // records must not be stale, or Query will generate SELECTs Query q = table.getSelectQuery(dialect).append("WHERE #1# IN (#2:@#)", primaryKey.getSymbol(), records); preparedStatement = transaction.prepare(q); resultSet = preparedStatement.executeQuery(); int idColumn = resultSet.findColumn(primaryKey.getSymbol().getName()); if (Dialect.DatabaseProduct.MYSQL.equals(dialect.getDatabaseProduct())) { while (resultSet.next()) { map.get(resultSet.getLong(idColumn)).populate(resultSet); } } else { while (resultSet.next()) { map.get(resultSet.getObject(idColumn)).populate(resultSet); } } } } catch (SQLException sqlException) { // records are in an unknown state, mark them stale for (Record record : records) { record.markStale(); } dialect.rethrow(sqlException); } finally { try { if (resultSet != null) { resultSet.close(); } } finally { if (preparedStatement != null) { preparedStatement.close(); } } } } /** * Inserts the record's changed values into the database by executing an SQL INSERT query. * The record's primary key value is set to the primary key generated by the database. * * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public void insert(ResultMode mode) throws SQLException { checkReadOnly(); if (isStale) { return; } if (mode != ResultMode.NO_RESULT && !primaryKey().isSingle() && !open().getDialect().isReturningSupported()) { throw new UnsupportedOperationException("INSERT with composite primary key not supported by JDBC, and possibly your database (consider using ResultMode.NO_RESULT)"); } Query query = new Query(open().getDialect()); query.append("INSERT INTO #1# (", table); boolean isFirst = true; for (Entry<Symbol, Field> entry : fields.entrySet()) { if (entry.getValue().isChanged()) { query.append(isFirst ? "#:1#" : ", #:1#", entry.getKey()); isFirst = false; } } if (isFirst) { // No fields are marked as changed, but we need to insert something... INSERT INTO foo DEFAULT VALUES is not supported on all databases query.append("#1#", primaryKey()); for (int i = 0; i < primaryKey().getSymbols().length; i++) { query.append(i == 0 ? ") VALUES (DEFAULT" : ", DEFAULT"); } } else { query.append(") VALUES ("); isFirst = true; for (Field field : fields.values()) { if (field.isChanged()) { if (field.getValue() instanceof Query) { query.append(isFirst ? "#1#" : ", #1#", field.getValue()); } else { query.append(isFirst ? "#?1#" : ", #?1#", field.getValue()); } isFirst = false; } } query.append(")"); } markStale(); if (open().getDialect().isReturningSupported()) { query.append(" RETURNING *"); // XXX ID_ONLY support selectInto(query); } else { PreparedStatement preparedStatement = open().prepare(query.getSql(), query.getParams(), true); ResultSet resultSet = null; Object id = null; try { preparedStatement.execute(); resultSet = preparedStatement.getGeneratedKeys(); if (resultSet.next()) { id = resultSet.getObject(1); } } catch (SQLException e) { throw open().getDialect().rethrow(e, query.getSql()); } finally { try { if (resultSet != null) { resultSet.close(); } } finally { preparedStatement.close(); } } if (id == null) { throw new RuntimeException("INSERT to " + table.toString() + " did not generate a key (AKA insert id): " + query.getSql()); } Field field = getOrCreateField(primaryKey().getSymbol()); field.setValue(id); field.setChanged(false); } } public void insert() throws SQLException { insert(ResultMode.REPOPULATE); } /** * Executes a batch INSERT (INSERT INTO ... (columns...) VALUES (row1), (row2), (row3), ...) and repopulates the list with stored entities. * * @param records List of records to insert (must be of the same class, and bound to the same Database) * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static void insert(Collection<? extends Record> records, ResultMode mode) throws SQLException { insert(records, 0, mode); } public static void insert(Collection<? extends Record> records) throws SQLException { insert(records, 0, ResultMode.REPOPULATE); } /** * Executes a batch INSERT (INSERT INTO ... (columns...) VALUES (row1), (row2), (row3), ...). * * For large sets of records, the use of chunkSize is recommended to avoid out-of-memory errors and too long SQL queries. * * Setting isFullRepopulate to true will re-populate the record fields with fresh values. This will generate * an additional SELECT query for every chunk of records for databases that do not support RETURNING. * * @param records List of records to insert (must be of the same class, and bound to the same Database) * @param chunkSize Splits the records into chunks, <= 0 disables * @param isFullRepopulate Whether or not to fully re-populate the record fields, or just update their primary key value and markStale() * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public static void insert(Collection<? extends Record> records, int chunkSize, ResultMode mode) throws SQLException { BatchInfo batchInfo = batchInfo(records); if (records.isEmpty()) { return; } if (chunkSize <= 0) { batchInsert(batchInfo, records, mode); } else { Iterator<? extends Record> iterator = records.iterator(); List<? extends Record> batch; while ((batch = batchChunk(iterator, chunkSize)) != null) { batchInsert(batchInfo, batch, mode); } } } private static void batchInsert(BatchInfo batchInfo, Collection<? extends Record> records, ResultMode mode) throws SQLException { Table table = batchInfo.template.table; Transaction transaction = batchInfo.template.open(); Dialect dialect = transaction.getDialect(); Query query = new Query(dialect); for (Symbol symbol : table.getPrimaryKey().getSymbols()) { batchInfo.columns.add(symbol); } query.append("INSERT INTO #1# (", table); boolean isFirst = true; for (Symbol column : batchInfo.columns) { query.append(isFirst ? "#:1#" : ", #:1#", column); isFirst = false; } if (isFirst) { throw new RuntimeException("zero columns to insert!"); } query.append(") VALUES "); isFirst = true; for (Record record : records) { query.append(isFirst ? "(" : ", ("); isFirst = false; boolean isColumnFirst = true; for (Symbol column : batchInfo.columns) { if (record.isFieldChanged(column)) { Object value = record.get(column); if (value instanceof Query) { query.append(isColumnFirst ? "#1#" : ", #1#", value); } else { query.append(isColumnFirst ? "#?1#" : ", #?1#", value); } } else { query.append(isColumnFirst ? "DEFAULT" : ", DEFAULT"); } isColumnFirst = false; } query.append(")"); record.markStale(); } batchExecute(query, records, mode); } /** * Updates the record's changed column values by executing an SQL UPDATE query. * * @throws SQLException * if a database access error occurs or the generated SQL * statement does not return a result set. */ public void update(ResultMode mode) throws SQLException { checkReadOnly(); if (!isChanged()) { return; } if (isStale) { //throw new IllegalStateException("Attempting to update a stale record!"); return; } Query query = new Query(open().getDialect()); query.append("UPDATE #1# SET ", table); boolean isFirst = true; for (Entry<Symbol, Field> entry : fields.entrySet()) { Field field = entry.getValue(); if (field.isChanged()) { if (field.getValue() instanceof Query) { query.append(isFirst ? "#:1# = #2#" : ", #:1# = #2#", entry.getKey(), field.getValue()); } else { query.append(isFirst ? "#:1# = #?2#" : ", #:1# = #?2#", entry.getKey(), field.getValue()); } isFirst = false; } } assertPrimaryKeyNotNull(); query.append(" WHERE #1#", open().getDialect().toSqlExpression(primaryKey(), id())); markStale(); if (open().getDialect().isReturningSupported() && mode == ResultMode.REPOPULATE) { query.append(" RETURNING *"); selectInto(query); } else { open().executeUpdate(query); } } public void update() throws SQLException { update(ResultMode.REPOPULATE); } /** * Executes a batch UPDATE (UPDATE ... SET x = s.x, y = s.y FROM (values, ...) s WHERE id = s.id). * * Currently, this is only supported on PostgreSQL. The method will fall back to using individual update()s on other databases. * * @param records List of records to insert (must be of the same class, and bound to the same Database) * @throws SQLException * if a database access error occurs */ public static void update(Collection<? extends Record> records) throws SQLException { update(records, 0, ResultMode.REPOPULATE); } /** * Executes a batch UPDATE (UPDATE ... SET x = s.x, y = s.y FROM (values, ...) s WHERE id = s.id). * * For large sets of records, the use of chunkSize is recommended to avoid out-of-memory errors and too long SQL queries. * * Setting isFullRepopulate to true will re-populate the record fields with fresh values. * * Currently, this is only supported on PostgreSQL. The method will fall back to using individual update()s on other databases. * * @param records List of records to insert (must be of the same class, and bound to the same Database) * @param chunkSize Splits the records into chunks, <= 0 disables * @param isFullRepopulate Whether or not to fully re-populate the record fields, or just update their primary key value and markStale() * @throws SQLException * if a database access error occurs */ public static void update(Collection<? extends Record> records, int chunkSize, ResultMode mode) throws SQLException { BatchInfo batchInfo = batchInfo(records); if (records.isEmpty()) { return; } if (batchInfo.columns.isEmpty()) { throw new IllegalArgumentException("No columns to update"); } Dialect dialect = records.iterator().next().open().getDialect(); if (!Dialect.DatabaseProduct.POSTGRESQL.equals(dialect.getDatabaseProduct())) { for (Record record : records) { record.update(); } return; } if (chunkSize <= 0) { batchUpdate(batchInfo, records, mode); } else { Iterator<? extends Record> iterator = records.iterator(); List<? extends Record> batch; while ((batch = batchChunk(iterator, chunkSize)) != null) { batchUpdate(batchInfo, batch, mode); } } } private static void batchUpdate(final BatchInfo batchInfo, Collection<? extends Record> records, ResultMode mode) throws SQLException { Table table = batchInfo.template.table(); Transaction transaction = batchInfo.template.open(); Query query = new Query(transaction); String vTable = table.getTable().equals("v") ? "v2" : "v"; query.append("UPDATE #1# SET ", table); boolean isFirstColumn = true; for (Symbol column : batchInfo.columns) { query.append(isFirstColumn ? "#1# = #!2#.#1#" : ", #1# = #!2#.#1#", column, vTable); isFirstColumn = false; } query.append(" FROM (VALUES "); boolean isFirstValue = true; for (Record record : records) { if (record.isPrimaryKeyNull()) { throw new IllegalArgumentException("Record has unset or NULL primary key: " + record); } isFirstColumn = true; query.append(isFirstValue ? "(" : ", ("); for (Symbol column : batchInfo.columns) { Object value = record.get(column); if (value instanceof Query) { query.append(isFirstColumn ? "#1#" : ", #1#", value); } else { query.append(isFirstColumn ? "#?1#" : ", #?1#", value); } isFirstColumn = false; } query.append(")"); isFirstValue = false; } query.append(") #!1# (", vTable); isFirstColumn = true; for (Symbol column : batchInfo.columns) { query.append(isFirstColumn ? "#1#" : ", #1#", column); isFirstColumn = false; } query.append(") WHERE"); boolean isFirst = true; for (Symbol symbol : table.getPrimaryKey().getSymbols()) { if (isFirst) { isFirst = false; } else { query.append(" AND"); } query.append(" #1#.#2# = #:3#.#2#", table, symbol, vTable); } batchExecute(query, records, mode); } /** * Determines whether a field has been changed or not. * * @param symbol * the symbol of the column name defining the field. * @return true if the field has been changed, false otherwise. */ public boolean isFieldChanged(Symbol symbol) { Field field = fields.get(symbol); if (field == null) { return false; } return field.isChanged(); } /** * Returns true if specified class is a subclass of Record.class. */ public static boolean isRecordSubclass(Class<?> clazz) { return Record.class.isAssignableFrom(clazz) && !clazz.equals(Record.class); } /** * Re-populates a stale record with fresh database values by a select query. * A record is considered stale after a call to either * {@link Record#insert()} or {@link Record#insert()}, if the SQL dialect of * the mapped database does not support returning. A record mapped to a * table in a Postgres database is thus never stale. * * * @throws RuntimeException * whenever a SQLException occurs. */ public void refresh() { if (isStale) { try { Value value = primaryKey().valueFrom(this, false); boolean allNull = true; for (Object v : value.getValues()) { if (v != null) { allNull = false; } } if (allNull) { throw new NullPointerException("Attempted to refresh record with null primary key value"); } populateById(primaryKey().valueFrom(this, false)); } catch (SQLException e) { throw new RuntimeException("Failed to refresh stale record", e); } isStale = false; } } /** * Sets the record as read only according to given value * * @param isReadOnly * the value determining read only state of the record. * @throws RuntimeException * whenever a record is set to read only without table mapping * provided by an {@link Jorm} annotation, i.e. on anonymous * records retrieved through calls to * {@link Transaction#select(Query)}, * {@link Transaction#select(String, Object...)}, * {@link Transaction#selectAll(Query)} and * {@link Transaction#selectAll(String, Object...)}. */ public void readOnly(boolean isReadOnly) { if (primaryKey() == null && isReadOnly) { throw new RuntimeException("Cannot mark anonymous records as read only!"); } this.isReadOnly = isReadOnly; } /** * Returns true if this record is read only. */ public boolean isReadOnly() { return isReadOnly; } private void checkReadOnly() { if (isReadOnly) { throw new RuntimeException("Record is read only!"); } } private boolean isChanged(Symbol symbol, Object newValue) { if (isReadOnly || table.isImmutable(symbol)) { return false; } Field field = fields.get(symbol); if (field == null) { return true; } Object oldValue = field.getValue(); if (oldValue == null && newValue == null) { return false; } else { return oldValue == null || !oldValue.equals(newValue); } } private void put(Symbol symbol, Object value) { refresh(); boolean isChanged; Field field = fields.get(symbol); if (field == null) { field = new Field(); } if (value != null && isRecordSubclass(value.getClass())) { Record record = (Record)value; if (!record.primaryKey().isSingle()) { throw new UnsupportedOperationException("Composite foreign key references are not supported"); } Object id = record.id().getValue(); if (id == null) { throw new NullPointerException("While setting " + record + "." + symbol.getName() + " = " + value + " -- id (primary key) is null -- perhaps you need to save()?"); } isChanged = isChanged(symbol, id); if (isChanged) { notifyFieldChanged(symbol, value); } field.setReference(record); field.setValue(id); } else { isChanged = isChanged(symbol, value); if (isChanged) { notifyFieldChanged(symbol, value); } if (isChanged) { field.setReference(null); // invalidate cached reference } field.setValue(value); } if (isChanged) { // it's OK to mark the id column as changed here field.setChanged(true); } fields.put(symbol, field); } /** * Sets the specified field corresponding to a column of the mapped record. * Any field values extending {@link Record} are cached until the field is * changed again, and the mapped id of the record is set as field value * instead. * * @param column * the name of the column corresponding to the field to set. * @param value * the value. */ public void set(String column, Object value) { set(Symbol.get(column), value); } /** * Sets the specified field corresponding to a column of the mapped record. * Any field values extending {@link Record} are cached until the field is * changed again, and the mapped id of the record is set as field value * instead. * * @param symbol * the symbol of the column corresponding to the field to set. * @param value * the value. */ public void set(Symbol symbol, Object value) { checkReadOnly(); put(symbol, value); } /** * Unsets the specified field corresponding to a column of the mapped record. * * @param column * the name of the column corresponding to the field to set. */ public void unset(String column) { unset(Symbol.get(column)); } /** * Unsets the specified field corresponding to a column of the mapped record. * * @param symbol * the symbol of the column corresponding to the field to set. */ public void unset(Symbol symbol) { checkReadOnly(); Field field; refresh(); field = fields.get(symbol); if (field != null) { notifyFieldChanged(symbol, null); fields.remove(symbol); } } /** * Determines whether the field corresponding to a given column name is set * or not. * * @param column * the name of the column corresponding to the field to set. * @return true if the field is set, false otherwise. */ public boolean isSet(String column) { return isSet(Symbol.get(column)); } /** * Determines whether the field corresponding to a given column name is set * or not. * * @param symbol * the symbol of the column corresponding to the field to set. * @return true if the field is set, false otherwise. */ public boolean isSet(Symbol symbol) { refresh(); return fields.get(symbol) != null; } /** * Provides a cached instance of a record represented by a field defined by * a given column name. If the record has not previously been cached it is * fetched from the database and cached. * * @param column * the column name. * @param clazz * the expected class of the cached record. * @return the cached record corresponding to the given symbol. */ public <T> T get(String column, Class<T> clazz) { try { return getField(Symbol.get(column), clazz, false, false); } catch (SQLException e) { // UNREACHABLE throw new IllegalStateException(e); } } public <T extends Record> T ref(String column, Class<T> clazz) throws SQLException { return getField(Symbol.get(column), clazz, false, true); } /** * Provides a cached instance of a record represented by a field defined by * a given symbol for a column name. If the record has not previously been * cached it is fetched from the database and cached. * * @param symbol * the symbol defining the column name. * @param clazz * the expected class of the cached record. * @return the cached record corresponding to the given symbol. */ public <T> T get(Symbol symbol, Class<T> clazz) { try { return getField(symbol, clazz, false, false); } catch (SQLException e) { // UNREACHABLE throw new IllegalStateException(e); } } public <T extends Record> T ref(Symbol symbol, Class<T> clazz) throws SQLException { return getField(symbol, clazz, false, true); } /** * Provides a cached instance of a record represented by a field defined by * a given symbol for a column name. * * @param symbol * the symbol defining the column name. * @param clazz * the expected class of the cached record. * @param isCacheOnly only retrieves previously cached values. * @return the cached record corresponding to the given symbol. */ public <T extends Record> T get(Symbol symbol, Class<T> clazz, boolean isCacheOnly) throws SQLException { return getField(symbol, clazz, isCacheOnly, true); } @SuppressWarnings("unchecked") private <T> T getField(Symbol symbol, Class<T> clazz, boolean isReferenceCacheOnly, boolean throwSqlException) throws SQLException { refresh(); Field field = fields.get(symbol); if (field == null) { return null; } Object value = field.getValue(); if (value != null) { if (isRecordSubclass(clazz)) { // Load foreign key if ((field.getReference() == null) && !isReferenceCacheOnly) { try { Record reference = Record.findById((Class<? extends Record>)clazz, value); field.setReference(reference); value = reference; } catch (SQLException e) { if (throwSqlException) { throw e; } throw new RuntimeException("failed to findById(" + clazz + ", " + value + ")", e); } } else { value = field.getReference(); } } else if (!clazz.isAssignableFrom(value.getClass())) { throw new RuntimeException("column " + symbol.getName() + " is of type " + value.getClass() + ", but " + clazz + " was requested"); } } return (T) value; } /** * Provides the value of the field defined by a given column name. * * @param column * the name of the column defining the field. * @throws RuntimeException * if the column does not exist (or has not been set) */ public Object get(String column) { return get(Symbol.get(column)); } /** * Provides the value of the field defined by a given symbol for a column * name. * * @param symbol * the symbol of the column defining the field. * @throws RuntimeException * if the column does not exist (or has not been set) */ public Object get(Symbol symbol) { refresh(); Field field = fields.get(symbol); if (field == null) { throw new RuntimeException("column '" + symbol.getName() + "' does not exist, or has not yet been set"); } return field.getValue(); } @Override public String toString() { StringBuilder stringBuilder = new StringBuilder(); boolean isFirst = true; if (table.getSchema() != null) { stringBuilder.append(table.getSchema()); stringBuilder.append('.'); } if (table.getTable() != null) { stringBuilder.append(table.getTable()); } if (isStale) { stringBuilder.append("stale"); } if (isReadOnly) { stringBuilder.append("read-only"); } stringBuilder.append(" { "); for (Entry<Symbol, Field> entry : fields.entrySet()) { if (isFirst) { isFirst = false; } else { stringBuilder.append(", "); } stringBuilder.append(entry.getKey().getName()); stringBuilder.append(" => "); stringBuilder.append(entry.getValue().getValue()); } stringBuilder.append(" }"); return stringBuilder.toString(); } @Override public boolean equals(Object object) { if (getClass().isInstance(object)) { return id().equals(((Record)object).id()); } return false; } @Override public int hashCode() { return id().hashCode(); } }
Fix stack overflow
src/main/java/com/jajja/jorm/Record.java
Fix stack overflow
Java
mit
481f822dce06b830ed8300d602e346c9b1b41504
0
lpcsmath/QTReader
package de.csmath.QT; import java.util.*; import java.time.*; /** * The MvhdAtom class represents the QuickTime File Type Atom ('mvhd'). * It specifies the characteristics of an entire QuickTime movie. * @author lpfeiler */ public final class MvhdAtom extends QTAtom { /** * The size of the flag-array. */ public static int FLAGS_SIZE = 3; /** * The size of the reserved-array. */ public static int RESERVED_SIZE = 10; /** * The size of the matrix-array. */ public static int MATRIX_SIZE = 9; /** * The version of this movie header atom. */ private final byte version; /** * The space for future movie header flags. */ private final byte[] flags = new byte[FLAGS_SIZE]; /** * The date and time when the movie was created. */ private final ZonedDateTime creationTime; /** * The date and time when the movie was changed. */ private final ZonedDateTime modificationTime; /** * The number of time units that pass per second * in its time coordinate system. */ private final int timeScale; /** * The duration of the longest trak in time scale units (@see #timeScale). */ private final int duration; /** * The preferred rate at which to play this movie. */ private final int rate; /** * The preferred volume of the movie's sound. */ private final short volume; /** * This space is reserved by Apple. */ private final byte[] reserved = new byte[RESERVED_SIZE]; /** * The mapping of points from one coordinate space into another. */ private final int[] matrix = new int[MATRIX_SIZE]; /** * The time at which the preview begins. */ private final int prevTime; /** * The duration of the preview in time scale units. */ private final int prevDuration; /** * The time of the movie poster. */ private final int posterTime; /** * The start time of the current selection. */ private final int selectTime; /** * The duration of the current selection in time scale units. */ private final int selectDuration; /** * The current time position within the movie. */ private final int currTime; /** * The next track ID indicates the value to use for the next track * added to the movie. */ private final int nextTrackId; /** * Constructs a MvhdAtom. * @param size the size of the atom in the file * @param type the type of the atom, should be set to 'mvhd' * @param version the version of this movie header atom * @param flags the future movie header flags * @param creationTime the creation time * @param modificationTime the modification time * @param timeScale the time scale * @param duration the duration of the movie * @param rate the preferred rate to play the movie * @param volume the preferred the volume * @param reserved the reserved data * @param matrix the point mapping matrix * @param prevTime the preview start time * @param prevDuration the preview duration * @param posterTime the time of the movie poster * @param selectTime the time of the selection * @param selectDuration the duration of the selection * @param currTime the current time position in the movie * @param nextTrackId the next track ID to use */ public MvhdAtom(int size, int type, byte version, byte[] flags, int creationTime, int modificationTime, int timeScale, int duration, int rate, short volume, byte[] reserved, int[] matrix, int prevTime, int prevDuration, int posterTime, int selectTime, int selectDuration, int currTime, int nextTrackId) { super(size, type); if (type != QTAtom.MVHD) throw new IllegalArgumentException("no mvhd type"); this.version = version; for (int i=0; i < flags.length && i < this.flags.length; i++) { this.flags[i] = flags[i]; } this.creationTime = convertToDate(creationTime); this.modificationTime = convertToDate(modificationTime); this.timeScale = timeScale; this.duration = duration; this.rate = rate; this.volume = volume; for (int i=0; i < reserved.length && i < this.reserved.length; i++) { this.reserved[i] = reserved[i]; } for (int i=0; i < matrix.length && i < this.matrix.length; i++) { this.matrix[i] = matrix[i]; } this.prevTime = prevTime; this.prevDuration = prevDuration; this.posterTime = posterTime; this.selectTime = selectTime; this.selectDuration = selectDuration; this.currTime = currTime; this.nextTrackId = nextTrackId; } public MvhdAtom(int size, int type, int creationTime, int timeScale, int duration) { this(size,type,(byte)0,new byte[0],creationTime,0,timeScale,duration, 0,(short)0,new byte[0], new int[0], 0,0,0,0,0,0,0); } /** * Returns the version of the movie header atom. * @return the version of the movie header atom */ public byte getVersion() { return version; } /** * Returns the creation date and time of the movie. * @return the creation date and time of the movie */ public ZonedDateTime getCreationTime() { return creationTime; } /** * Converts the date/time value of the file (in seconds since 1904-01-01 0:00) * into a java.util.Date. * @param time date/time value (in seconds since 1904-01-01 0:00) * @return the date/time value as java.util.Date */ private ZonedDateTime convertToDate(int time) { ZonedDateTime dt = ZonedDateTime.of(1904,1,1,0,0,0,0,ZoneId.of("UTC")); long ct = ((1L << 32) + time); return dt.plusSeconds(ct); } /** * Returns the modification time. * @return the modification time */ public ZonedDateTime getModificationTime() { return modificationTime; } /** * Returns the time scale. * @return the time scale */ public int getTimeScale() { return timeScale; } /** * Returns the frames per seconds, derived from the time scale. * @return the frames per seconds */ public double getFps() { return timeScale / 1000.0; } /** * Returns the duration of the movie in time scale units. * @return the duration of the movie in time scale units */ public int getDuration() { return duration; } /** * Returns the duration of the movie in seconds. * @return the duration of the movie in seconds */ public int getDurationSec() { return duration / timeScale; } /** * Returns the preferred rate. * @return the preferred rate */ public int getRate() { return rate; } /** * Returns the preferred volume. * @return the preferred volume */ public short getVolume() { return volume; } /** * Returns an iterator over the matrix. * @return an iterator over the matrix */ public Iterator<Integer> getMatrix() { return new MatrixIterator(); } /** * Returns the start time of the preview. * @return the start time of the preview */ public int getPrevTime() { return prevTime; } /** * Returns the duration of the preview in time scale units. * @return the duration of the preview in time scale units */ public int getPrevDuration() { return prevDuration; } /** * Returns the time position of the movie poster. * @return the time position of the movie poster */ public int getPosterTime() { return posterTime; } /** * Returns the start time of the current selection. * @return the start time of the current selection */ public int getSelectTime() { return selectTime; } /** * Returns the duration of the current selection. * @return the duration of the current selection */ public int getSelectDuration() { return selectDuration; } /** * Returns the current time position. * @return the current time position */ public int getCurrTime() { return currTime; } /** * Returns the next track ID for added track. * @return the next track ID for added track */ public int getNextTrackId() { return nextTrackId; } /** * Iterator class over the matrix integers. */ private class MatrixIterator implements Iterator<Integer> { /** * The current index of the iterator. */ private int index = 0; @Override public boolean hasNext() { return index < matrix.length; } @Override public Integer next() { return matrix[index++]; } } }
src/main/java/de/csmath/QT/MvhdAtom.java
package de.csmath.QT; import java.time.temporal.TemporalAmount; import java.util.*; import java.time.*; /** * The MvhdAtom class represents the QuickTime File Type Atom ('mvhd'). * It specifies the characteristics of an entire QuickTime movie. * @author lpfeiler */ public final class MvhdAtom extends QTAtom { /** * The size of the flag-array. */ public static int FLAGS_SIZE = 3; /** * The size of the reserved-array. */ public static int RESERVED_SIZE = 10; /** * The size of the matrix-array. */ public static int MATRIX_SIZE = 9; /** * The version of this movie header atom. */ private final byte version; /** * The space for future movie header flags. */ private final byte[] flags = new byte[FLAGS_SIZE]; /** * The date and time when the movie was created. */ private final ZonedDateTime creationTime; /** * The date and time when the movie was changed. */ private final ZonedDateTime modificationTime; /** * The number of time units that pass per second * in its time coordinate system. */ private final int timeScale; /** * The duration of the longest trak in time scale units (@see #timeScale). */ private final int duration; /** * The preferred rate at which to play this movie. */ private final int rate; /** * The preferred volume of the movie's sound. */ private final short volume; /** * This space is reserved by Apple. */ private final byte[] reserved = new byte[RESERVED_SIZE]; /** * The mapping of points from one coordinate space into another. */ private final int[] matrix = new int[MATRIX_SIZE]; /** * The time at which the preview begins. */ private final int prevTime; /** * The duration of the preview in time scale units. */ private final int prevDuration; /** * The time of the movie poster. */ private final int posterTime; /** * The start time of the current selection. */ private final int selectTime; /** * The duration of the current selection in time scale units. */ private final int selectDuration; /** * The current time position within the movie. */ private final int currTime; /** * The next track ID indicates the value to use for the next track * added to the movie. */ private final int nextTrackId; /** * Constructs a MvhdAtom. * @param size the size of the atom in the file * @param type the type of the atom, should be set to 'mvhd' * @param version the version of this movie header atom * @param flags the future movie header flags * @param creationTime the creation time * @param modificationTime the modification time * @param timeScale the time scale * @param duration the duration of the movie * @param rate the preferred rate to play the movie * @param volume the preferred the volume * @param reserved the reserved data * @param matrix the point mapping matrix * @param prevTime the preview start time * @param prevDuration the preview duration * @param posterTime the time of the movie poster * @param selectTime the time of the selection * @param selectDuration the duration of the selection * @param currTime the current time position in the movie * @param nextTrackId the next track ID to use */ public MvhdAtom(int size, int type, byte version, byte[] flags, int creationTime, int modificationTime, int timeScale, int duration, int rate, short volume, byte[] reserved, int[] matrix, int prevTime, int prevDuration, int posterTime, int selectTime, int selectDuration, int currTime, int nextTrackId) { super(size, type); if (type != QTAtom.MVHD) throw new IllegalArgumentException("no mvhd type"); this.version = version; for (int i=0; i < flags.length && i < this.flags.length; i++) { this.flags[i] = flags[i]; } this.creationTime = convertToDate(creationTime); this.modificationTime = convertToDate(modificationTime); this.timeScale = timeScale; this.duration = duration; this.rate = rate; this.volume = volume; for (int i=0; i < reserved.length && i < this.reserved.length; i++) { this.reserved[i] = reserved[i]; } for (int i=0; i < matrix.length && i < this.matrix.length; i++) { this.matrix[i] = matrix[i]; } this.prevTime = prevTime; this.prevDuration = prevDuration; this.posterTime = posterTime; this.selectTime = selectTime; this.selectDuration = selectDuration; this.currTime = currTime; this.nextTrackId = nextTrackId; } public MvhdAtom(int size, int type, int creationTime, int timeScale, int duration) { this(size,type,(byte)0,new byte[0],creationTime,0,timeScale,duration, 0,(short)0,new byte[0], new int[0], 0,0,0,0,0,0,0); } /** * Returns the version of the movie header atom. * @return the version of the movie header atom */ public byte getVersion() { return version; } /** * Returns the creation date and time of the movie. * @return the creation date and time of the movie */ public ZonedDateTime getCreationTime() { return creationTime; } /** * Converts the date/time value of the file (in seconds since 1904-01-01 0:00) * into a java.util.Date. * @param time date/time value (in seconds since 1904-01-01 0:00) * @return the date/time value as java.util.Date */ private ZonedDateTime convertToDate(int time) { // Calendar cal = new GregorianCalendar(TimeZone.getTimeZone("UTC")); // cal.set(1904,0,1,0,0,0); // cal.set(Calendar.MILLISECOND,0); // long ct = ((1L << 32) + time) * 1000; // ct += cal.getTimeInMillis(); // cal.setTimeInMillis(ct); // //TODO 2 hours off // return cal.getTime(); ZonedDateTime dt = ZonedDateTime.of(1904,1,1,0,0,0,0,ZoneId.of("UTC")); long ct = ((1L << 32) + time); return dt.plusSeconds(ct); } /** * Returns the modification time. * @return the modification time */ public ZonedDateTime getModificationTime() { return modificationTime; } /** * Returns the time scale. * @return the time scale */ public int getTimeScale() { return timeScale; } /** * Returns the frames per seconds, derived from the time scale. * @return the frames per seconds */ public double getFps() { return timeScale / 1000.0; } /** * Returns the duration of the movie in time scale units. * @return the duration of the movie in time scale units */ public int getDuration() { return duration; } /** * Returns the duration of the movie in seconds. * @return the duration of the movie in seconds */ public int getDurationSec() { return duration / timeScale; } /** * Returns the preferred rate. * @return the preferred rate */ public int getRate() { return rate; } /** * Returns the preferred volume. * @return the preferred volume */ public short getVolume() { return volume; } /** * Returns an iterator over the matrix. * @return an iterator over the matrix */ public Iterator<Integer> getMatrix() { return new MatrixIterator(); } /** * Returns the start time of the preview. * @return the start time of the preview */ public int getPrevTime() { return prevTime; } /** * Returns the duration of the preview in time scale units. * @return the duration of the preview in time scale units */ public int getPrevDuration() { return prevDuration; } /** * Returns the time position of the movie poster. * @return the time position of the movie poster */ public int getPosterTime() { return posterTime; } /** * Returns the start time of the current selection. * @return the start time of the current selection */ public int getSelectTime() { return selectTime; } /** * Returns the duration of the current selection. * @return the duration of the current selection */ public int getSelectDuration() { return selectDuration; } /** * Returns the current time position. * @return the current time position */ public int getCurrTime() { return currTime; } /** * Returns the next track ID for added track. * @return the next track ID for added track */ public int getNextTrackId() { return nextTrackId; } /** * Iterator class over the matrix integers. */ private class MatrixIterator implements Iterator<Integer> { /** * The current index of the iterator. */ private int index = 0; @Override public boolean hasNext() { return index < matrix.length; } @Override public Integer next() { return matrix[index++]; } } }
MvhdAtom: changing Date types to new ZonedDateTime types
src/main/java/de/csmath/QT/MvhdAtom.java
MvhdAtom: changing Date types to new ZonedDateTime types
Java
mpl-2.0
2b0186ccc204f5b642807fbd3639853f30a72add
0
jsdoc3/rhino,InstantWebP2P/rhino-android,Distrotech/rhino,Angelfirenze/rhino,tuchida/rhino,ashwinrayaprolu1984/rhino,tuchida/rhino,ashwinrayaprolu1984/rhino,tejassaoji/RhinoCoarseTainting,swannodette/rhino,sainaen/rhino,sainaen/rhino,lv7777/egit_test,tuchida/rhino,Pilarbrist/rhino,AlexTrotsenko/rhino,sainaen/rhino,lv7777/egit_test,Angelfirenze/rhino,tntim96/rhino-jscover,sainaen/rhino,Angelfirenze/rhino,tejassaoji/RhinoCoarseTainting,AlexTrotsenko/rhino,tejassaoji/RhinoCoarseTainting,qhanam/rhino,qhanam/rhino,lv7777/egit_test,lv7777/egit_test,Pilarbrist/rhino,ashwinrayaprolu1984/rhino,Pilarbrist/rhino,qhanam/rhino,AlexTrotsenko/rhino,ashwinrayaprolu1984/rhino,tntim96/rhino-apigee,tejassaoji/RhinoCoarseTainting,tntim96/htmlunit-rhino-fork,sainaen/rhino,swannodette/rhino,Pilarbrist/rhino,ashwinrayaprolu1984/rhino,tejassaoji/RhinoCoarseTainting,tntim96/rhino-jscover-repackaged,Angelfirenze/rhino,swannodette/rhino,lv7777/egit_test,AlexTrotsenko/rhino,jsdoc3/rhino,sam/htmlunit-rhino-fork,sam/htmlunit-rhino-fork,AlexTrotsenko/rhino,jsdoc3/rhino,tntim96/htmlunit-rhino-fork,AlexTrotsenko/rhino,Pilarbrist/rhino,tntim96/rhino-jscover-repackaged,tuchida/rhino,swannodette/rhino,sainaen/rhino,ashwinrayaprolu1984/rhino,tejassaoji/RhinoCoarseTainting,tuchida/rhino,rasmuserik/rhino,Pilarbrist/rhino,sam/htmlunit-rhino-fork,sainaen/rhino,tntim96/rhino-apigee,ashwinrayaprolu1984/rhino,tuchida/rhino,Angelfirenze/rhino,InstantWebP2P/rhino-android,lv7777/egit_test,sam/htmlunit-rhino-fork,sam/htmlunit-rhino-fork,Pilarbrist/rhino,qhanam/rhino,tntim96/rhino-jscover,rasmuserik/rhino,Angelfirenze/rhino,swannodette/rhino,sam/htmlunit-rhino-fork,Distrotech/rhino,swannodette/rhino,tntim96/rhino-apigee,lv7777/egit_test,tuchida/rhino,sam/htmlunit-rhino-fork,AlexTrotsenko/rhino,tejassaoji/RhinoCoarseTainting,swannodette/rhino,Angelfirenze/rhino
/* -*- Mode: java; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ /* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is collection of utilities useful for Rhino code. * * The Initial Developer of the Original Code is * RUnit Software AS. * Portions created by the Initial Developer are Copyright (C) 2003 * the Initial Developer. All Rights Reserved. * * Contributor(s): Igor Bukanov, [email protected] * * Alternatively, the contents of this file may be used under the terms of * either the GNU General Public License Version 2 or later (the "GPL"), or * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the MPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the MPL, the GPL or the LGPL. * * ***** END LICENSE BLOCK ***** */ package org.mozilla.javascript; import java.util.Hashtable; import java.io.IOException; import java.io.Reader; /** * Collection of utilities */ public class Kit { static Class classOrNull(String className) { try { return Class.forName(className); } catch (ClassNotFoundException ex) { } catch (SecurityException ex) { } catch (LinkageError ex) { } catch (IllegalArgumentException e) { // Can be thrown if name has characters that a class name // can not contain } return null; } static Class classOrNull(ClassLoader loader, String className) { try { return loader.loadClass(className); } catch (ClassNotFoundException ex) { } catch (SecurityException ex) { } catch (LinkageError ex) { } catch (IllegalArgumentException e) { // Can be thrown if name has characters that a class name // can not contain } return null; } static Object newInstanceOrNull(Class cl) { try { return cl.newInstance(); } catch (SecurityException x) { } catch (LinkageError ex) { } catch (InstantiationException x) { } catch (IllegalAccessException x) { } return null; } /** * Split string into array of strings using semicolon as string terminator * (; after the last string is required). */ public static String[] semicolonSplit(String s) { int count = 0; for (int cursor = 0; ;) { int next = s.indexOf(';', cursor) + 1; if (next <= 0) { // check for missing ; if (cursor + 1 < s.length()) throw new IllegalArgumentException(); break; } ++count; cursor = next + 1; } String[] array = new String[count]; count = 0; for (int cursor = 0; ;) { int next = s.indexOf(';', cursor); if (next < 0) { break; } array[count] = s.substring(cursor, next); ++count; cursor = next + 1; } return array; } /** * Add <i>listener</i> to <i>bag</i> of listeners. * The function does not modify <i>bag</i> and return a new collection * containing <i>listener</i> and all listeners from <i>bag</i>. * Bag without listeners always represented as the null value. * <p> * Usage example: * <pre> * private volatile Object changeListeners; * * public void addMyListener(PropertyChangeListener l) * { * synchronized (this) { * changeListeners = Kit.addListener(changeListeners, l); * } * } * * public void removeTextListener(PropertyChangeListener l) * { * synchronized (this) { * changeListeners = Kit.removeListener(changeListeners, l); * } * } * * public void fireChangeEvent(Object oldValue, Object newValue) * { * // Get immune local copy * Object listeners = changeListeners; * if (listeners != null) { * PropertyChangeEvent e = new PropertyChangeEvent( * this, "someProperty" oldValue, newValue); * for (int i = 0; ; ++i) { * Object l = Kit.getListener(listeners, i); * if (l == null) * break; * ((PropertyChangeListener)l).propertyChange(e); * } * } * } * </pre> * * @param listener Listener to add to <i>bag</i> * @param bag Current collection of listeners. * @return A new bag containing all listeners from <i>bag</i> and * <i>listener</i>. * @see #removeListener(Object bag, Object listener) * @see #getListener(Object bag, int index) */ public static Object addListener(Object bag, Object listener) { if (listener == null) throw new IllegalArgumentException(); if (listener instanceof Object[]) throw new IllegalArgumentException(); if (bag == null) { bag = listener; } else if (!(bag instanceof Object[])) { bag = new Object[] { bag, listener }; } else { Object[] array = (Object[])bag; int L = array.length; // bag has at least 2 elements if it is array if (L < 2) throw new IllegalArgumentException(); Object[] tmp = new Object[L + 1]; System.arraycopy(array, 0, tmp, 0, L); tmp[L] = listener; bag = tmp; } return bag; } /** * Remove <i>listener</i> from <i>bag</i> of listeners. * The function does not modify <i>bag</i> and return a new collection * containing all listeners from <i>bag</i> except <i>listener</i>. * If <i>bag</i> does not contain <i>listener</i>, the function returns * <i>bag</i>. * <p> * For usage example, see {@link addListener(Object bag, Object listener)}. * * @param listener Listener to remove from <i>bag</i> * @param bag Current collection of listeners. * @return A new bag containing all listeners from <i>bag</i> except * <i>listener</i>. * @see #addListener(Object bag, Object listener) * @see #getListener(Object bag, int index) */ public static Object removeListener(Object bag, Object listener) { if (listener == null) throw new IllegalArgumentException(); if (listener instanceof Object[]) throw new IllegalArgumentException(); if (bag == listener) { bag = null; } else if (bag instanceof Object[]) { Object[] array = (Object[])bag; int L = array.length; // bag has at least 2 elements if it is array if (L < 2) throw new IllegalArgumentException(); if (L == 2) { if (array[1] == listener) { bag = array[0]; } else if (array[0] == listener) { bag = array[1]; } } else { int i = L; do { --i; if (array[i] == listener) { Object[] tmp = new Object[L - 1]; System.arraycopy(array, 0, tmp, 0, i); System.arraycopy(array, i + 1, tmp, i, L - (i + 1)); bag = tmp; break; } } while (i != 0); } } return bag; } /** * Get listener at <i>index</i> position in <i>bag</i> or null if * <i>index</i> equals to number of listeners in <i>bag</i>. * <p> * For usage example, see {@link addListener(Object bag, Object listener)}. * * @param bag Current collection of listeners. * @param index Index of the listener to access. * @return Listener at the given index or null. * @see #addListener(Object bag, Object listener) * @see #removeListener(Object bag, Object listener) */ public static Object getListener(Object bag, int index) { if (index == 0) { if (bag == null) return null; if (!(bag instanceof Object[])) return bag; Object[] array = (Object[])bag; // bag has at least 2 elements if it is array if (array.length < 2) throw new IllegalArgumentException(); return array[0]; } else if (index == 1) { if (!(bag instanceof Object[])) { if (bag == null) throw new IllegalArgumentException(); return null; } Object[] array = (Object[])bag; // the array access will check for index on its own return array[1]; } else { // bag has to array Object[] array = (Object[])bag; int L = array.length; if (L < 2) throw new IllegalArgumentException(); if (index == L) return null; return array[index]; } } static Object initHash(Hashtable h, Object key, Object initialValue) { synchronized (h) { Object current = h.get(key); if (current == null) { h.put(key, initialValue); } else { initialValue = current; } } return initialValue; } private final static class ComplexKey { private Object key1; private Object key2; private int hash; ComplexKey(Object key1, Object key2) { this.key1 = key1; this.key2 = key2; } public boolean equals(Object anotherObj) { if (!(anotherObj instanceof ComplexKey)) return false; ComplexKey another = (ComplexKey)anotherObj; return key1.equals(another.key1) && key2.equals(another.key2); } public int hashCode() { if (hash == 0) { hash = key1.hashCode() ^ key2.hashCode(); } return hash; } } public static Object makeHashKeyFromPair(Object key1, Object key2) { if (key1 == null) throw new IllegalArgumentException(); if (key2 == null) throw new IllegalArgumentException(); return new ComplexKey(key1, key2); } public static String readReader(Reader r) throws IOException { char[] buffer = new char[512]; int cursor = 0; for (;;) { int n = r.read(buffer, cursor, buffer.length - cursor); if (n < 0) { break; } cursor += n; if (cursor == buffer.length) { char[] tmp = new char[buffer.length * 2]; System.arraycopy(buffer, 0, tmp, 0, cursor); buffer = tmp; } } return new String(buffer, 0, cursor); } /** * Throws RuntimeException to indicate failed assertion. * The function never returns and its return type is RuntimeException * only to be able to write <tt>throw Kit.codeBug()</tt> if plain * <tt>Kit.codeBug()</tt> triggers unreachable code error. */ public static RuntimeException codeBug() throws RuntimeException { throw new RuntimeException("FAILED ASSERTION"); } }
src/org/mozilla/javascript/Kit.java
/* -*- Mode: java; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ /* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is collection of utilities useful for Rhino code. * * The Initial Developer of the Original Code is * RUnit Software AS. * Portions created by the Initial Developer are Copyright (C) 2003 * the Initial Developer. All Rights Reserved. * * Contributor(s): Igor Bukanov, [email protected] * * Alternatively, the contents of this file may be used under the terms of * either the GNU General Public License Version 2 or later (the "GPL"), or * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the MPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the MPL, the GPL or the LGPL. * * ***** END LICENSE BLOCK ***** */ package org.mozilla.javascript; import java.util.Hashtable; import java.io.IOException; import java.io.Reader; /** * Collection of utilities */ public class Kit { static Class classOrNull(String className) { try { return Class.forName(className); } catch (ClassNotFoundException ex) { } catch (SecurityException ex) { } catch (LinkageError ex) { } catch (IllegalArgumentException e) { // Can be thrown if name has characters that a class name // can not contain } return null; } static Class classOrNull(ClassLoader loader, String className) { try { return loader.loadClass(className); } catch (ClassNotFoundException ex) { } catch (SecurityException ex) { } catch (LinkageError ex) { } catch (IllegalArgumentException e) { // Can be thrown if name has characters that a class name // can not contain } return null; } static Object newInstanceOrNull(Class cl) { try { return cl.newInstance(); } catch (SecurityException x) { } catch (LinkageError ex) { } catch (InstantiationException x) { } catch (IllegalAccessException x) { } return null; } /** * Split string into array of strings using semicolon as string terminator * (; after the last string is required). */ public static String[] semicolonSplit(String s) { int count = 0; for (int cursor = 0; ;) { int next = s.indexOf(';', cursor) + 1; if (next <= 0) { // check for missing ; if (cursor + 1 < s.length()) throw new IllegalArgumentException(); break; } ++count; cursor = next + 1; } String[] array = new String[count]; count = 0; for (int cursor = 0; ;) { int next = s.indexOf(';', cursor); if (next < 0) { break; } array[count] = s.substring(cursor, next); ++count; cursor = next + 1; } return array; } /** * Add <i>listener</i> to <i>bag</i> of listeners. * The function does not modify <i>bag</i> and return a new collection * containing <i>listener</i> and all listeners from <i>bag</i>. * Bag without listeners always represented as the null value. * <p> * Usage example: * <pre> * private volatile Object changeListeners; * * public void addMyListener(PropertyChangeListener l) * { * synchronized (this) { * changeListeners = Kit.addListener(changeListeners, l); * } * } * * public void removeTextListener(PropertyChangeListener l) * { * synchronized (this) { * changeListeners = Kit.removeListener(changeListeners, l); * } * } * * public void fireChangeEvent(Object oldValue, Object newValue) * { * // Get immune local copy * Object listeners = changeListeners; * if (listeners != null) { * PropertyChangeEvent e = new PropertyChangeEvent( * this, "someProperty" oldValue, newValue); * for (int i = 0; ; ++i) { * Object l = Kit.getListener(listeners, i); * if (l == null) * break; * ((PropertyChangeListener)l).propertyChange(e); * } * } * } * </pre> * * @param listener Listener to add to <i>bag</i> * @param bag Current collection of listeners. * @return A new bag containing all listeners from <i>bag</i> and * <i>listener</i>. * @see #removeListener(Object bag, Object listener) * @see #getListener(Object bag, int index) */ public static Object addListener(Object bag, Object listener) { if (listener == null) throw new IllegalArgumentException(); if (listener instanceof Object[]) throw new IllegalArgumentException(); if (bag == null) { bag = listener; } else if (!(bag instanceof Object[])) { bag = new Object[] { bag, listener }; } else { Object[] array = (Object[])bag; int L = array.length; // bag has at least 2 elements if it is array if (L < 2) throw new IllegalArgumentException(); Object[] tmp = new Object[L + 1]; System.arraycopy(array, 0, tmp, 0, L); tmp[L] = listener; bag = tmp; } return bag; } /** * Remove <i>listener</i> from <i>bag</i> of listeners. * The function does not modify <i>bag</i> and return a new collection * containing all listeners from <i>bag</i> except <i>listener</i>. * If <i>bag</i> does not contain <i>listener</i>, the function returns * <i>bag</i>. * <p> * For usage example, see {@link addListener(Object bag, Object listener)}. * * @param listener Listener to remove from <i>bag</i> * @param bag Current collection of listeners. * @return A new bag containing all listeners from <i>bag</i> except * <i>listener</i>. * @see #addListener(Object bag, Object listener) * @see #getListener(Object bag, int index) */ public static Object removeListener(Object bag, Object listener) { if (listener == null) throw new IllegalArgumentException(); if (listener instanceof Object[]) throw new IllegalArgumentException(); if (bag == listener) { bag = null; } else if (bag instanceof Object[]) { Object[] array = (Object[])bag; int L = array.length; // bag has at least 2 elements if it is array if (L < 2) throw new IllegalArgumentException(); if (L == 2) { if (array[1] == listener) { bag = array[0]; } else if (array[0] == listener) { bag = array[1]; } } else { int i = L; do { --i; if (array[i] == listener) { Object[] tmp = new Object[L - 1]; System.arraycopy(array, 0, tmp, 0, i); System.arraycopy(array, i + 1, tmp, i, L - (i + 1)); bag = tmp; break; } } while (i != 0); } } return bag; } /** * Get listener at <i>index</i> position in <i>bag</i> or null if * <i>index</i> equals to number of listeners in <i>bag</i>. * <p> * For usage example, see {@link addListener(Object bag, Object listener)}. * * @param bag Current collection of listeners. * @param index Index of the listener to access. * @return Listener at the given index or null. * @see #addListener(Object bag, Object listener) * @see #removeListener(Object bag, Object listener) */ public static Object getListener(Object bag, int index) { if (index == 0) { if (bag == null) return null; if (!(bag instanceof Object[])) return bag; Object[] array = (Object[])bag; // bag has at least 2 elements if it is array if (array.length < 2) throw new IllegalArgumentException(); return array[0]; } else if (index == 1) { if (!(bag instanceof Object[])) { if (bag == null) throw new IllegalArgumentException(); return null; } Object[] array = (Object[])bag; // the array access will check for index on its own return array[1]; } else { // bag has to array Object[] array = (Object[])bag; int L = array.length; if (L < 2) throw new IllegalArgumentException(); if (index == L) return null; return array[index]; } } static Object initHash(Hashtable h, Object key, Object initialValue) { synchronized (h) { Object current = h.get(key); if (current == null) { h.put(key, initialValue); } else { initialValue = current; } } return initialValue; } public static String readReader(Reader r) throws IOException { char[] buffer = new char[512]; int cursor = 0; for (;;) { int n = r.read(buffer, cursor, buffer.length - cursor); if (n < 0) { break; } cursor += n; if (cursor == buffer.length) { char[] tmp = new char[buffer.length * 2]; System.arraycopy(buffer, 0, tmp, 0, cursor); buffer = tmp; } } return new String(buffer, 0, cursor); } /** * Throws RuntimeException to indicate failed assertion. * The function never returns and its return type is RuntimeException * only to be able to write <tt>throw Kit.codeBug()</tt> if plain * <tt>Kit.codeBug()</tt> triggers unreachable code error. */ public static RuntimeException codeBug() throws RuntimeException { throw new RuntimeException("FAILED ASSERTION"); } }
Added Kit.makeHashKeyFromPair to make combined key for hashtables from 2 objects
src/org/mozilla/javascript/Kit.java
Added Kit.makeHashKeyFromPair to make combined key for hashtables from 2 objects
Java
lgpl-2.1
0145978bd7fc3c5e83d6f148c3bf96f68822dc45
0
tomck/intermine,drhee/toxoMine,justincc/intermine,joshkh/intermine,zebrafishmine/intermine,joshkh/intermine,justincc/intermine,joshkh/intermine,tomck/intermine,joshkh/intermine,Arabidopsis-Information-Portal/intermine,zebrafishmine/intermine,zebrafishmine/intermine,zebrafishmine/intermine,drhee/toxoMine,justincc/intermine,Arabidopsis-Information-Portal/intermine,kimrutherford/intermine,Arabidopsis-Information-Portal/intermine,kimrutherford/intermine,Arabidopsis-Information-Portal/intermine,joshkh/intermine,zebrafishmine/intermine,JoeCarlson/intermine,joshkh/intermine,tomck/intermine,JoeCarlson/intermine,JoeCarlson/intermine,Arabidopsis-Information-Portal/intermine,JoeCarlson/intermine,elsiklab/intermine,Arabidopsis-Information-Portal/intermine,joshkh/intermine,joshkh/intermine,elsiklab/intermine,justincc/intermine,elsiklab/intermine,JoeCarlson/intermine,elsiklab/intermine,drhee/toxoMine,drhee/toxoMine,drhee/toxoMine,tomck/intermine,kimrutherford/intermine,Arabidopsis-Information-Portal/intermine,drhee/toxoMine,Arabidopsis-Information-Portal/intermine,elsiklab/intermine,justincc/intermine,JoeCarlson/intermine,drhee/toxoMine,tomck/intermine,zebrafishmine/intermine,tomck/intermine,zebrafishmine/intermine,joshkh/intermine,JoeCarlson/intermine,elsiklab/intermine,drhee/toxoMine,kimrutherford/intermine,kimrutherford/intermine,JoeCarlson/intermine,justincc/intermine,drhee/toxoMine,tomck/intermine,kimrutherford/intermine,zebrafishmine/intermine,justincc/intermine,justincc/intermine,JoeCarlson/intermine,kimrutherford/intermine,zebrafishmine/intermine,tomck/intermine,kimrutherford/intermine,elsiklab/intermine,Arabidopsis-Information-Portal/intermine,tomck/intermine,elsiklab/intermine,elsiklab/intermine,justincc/intermine,kimrutherford/intermine
package org.intermine.bio.io.gff3; /* * Copyright (C) 2002-2005 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.LinkedHashMap; import java.util.List; import java.util.ArrayList; import java.util.StringTokenizer; import java.util.Iterator; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.net.URLEncoder; import org.intermine.util.StringUtil; /** * A class that represents one line of a GFF3 file. Some of this code is * derived from BioJava. * * @author Kim Rutherford */ public class GFF3Record { private String sequenceID; private String source; private String type; private int start; private int end; private Double score; private String strand; private String phase; private Map attributes = new LinkedHashMap(); private static Map replacements; /** * Create a GFF3Record from a line of a GFF3 file * @param line the String to parse * @throws IOException if there is an error during parsing the line */ public GFF3Record(String line) throws IOException { StringTokenizer st = new StringTokenizer(line, "\t", false); if (st.countTokens() < 8) { throw new IOException("GFF line too short (" + st.countTokens() + " fields): " + line); } sequenceID = fixEntityNames(URLDecoder.decode(st.nextToken(), "UTF-8")); source = st.nextToken(); if (source.equals("") || source.equals(".")) { source = null; } type = st.nextToken(); String startString = st.nextToken(); try { if (startString.equals(".")) { start = -1; } else { start = Integer.parseInt(startString); } } catch (NumberFormatException nfe) { throw new IOException("can not parse integer for start position: " + startString); } String endString = st.nextToken(); try { if (endString.equals(".")) { end = -1; } else { end = Integer.parseInt(endString); } } catch (NumberFormatException nfe) { throw new IOException("can not parse integer for end position: " + endString); } String scoreString = st.nextToken(); if (scoreString.equals("") || scoreString.equals(".")) { score = null; } else { try { score = new Double(scoreString); } catch (NumberFormatException nfe) { throw new IOException("can not parse score: " + scoreString); } } strand = st.nextToken(); if (strand.equals("") || strand.equals(".")) { strand = null; } phase = st.nextToken(); if (phase.equals("") || phase.equals(".")) { phase = null; } if (st.hasMoreTokens()) { parseAttribute(st.nextToken(), line); } } /** * Create a new GFF3Record * @param sequenceID the sequence name * @param source the source * @param type the feature type * @param start the start coordinate on the sequence given by sequenceID * @param end the end coordinate on the sequence * @param score the feature score or null if there is no score * @param strand the feature strand or null * @param phase the phase or null * @param attributes a Map from attribute name to a List of attribute values */ public GFF3Record(String sequenceID, String source, String type, int start, int end, Double score, String strand, String phase, Map attributes) { this.sequenceID = sequenceID; this.source = source; this.type = type; this.start = start; this.end = end; this.score = score; this.strand = strand; this.phase = phase; this.attributes = attributes; } private void parseAttribute(String attributeString, String line) throws IOException { StringTokenizer sTok = new StringTokenizer(attributeString, ";", false); while (sTok.hasMoreTokens()) { String attVal = sTok.nextToken().trim(); if (attVal.length() == 0) { continue; } String attName; List valList = new ArrayList(); int spaceIndx = attVal.indexOf("="); if (spaceIndx == -1) { throw new IOException("the attributes section must contain name=value pairs, " + "while parsing: " + line); } else { attName = attVal.substring(0, spaceIndx); attributeString = attVal.substring(spaceIndx + 1).trim(); while (attributeString.length() > 0) { if (attributeString.startsWith("\"")) { attributeString = attributeString.substring(1); int quoteIndx = attributeString.indexOf("\""); if (quoteIndx > 0) { valList.add(attributeString.substring(0, quoteIndx)); attributeString = attributeString.substring(quoteIndx + 1).trim(); if (attributeString.startsWith(",")) { attributeString = attributeString.substring(1).trim(); } } else { throw new IOException("unmatched quote in this line: " + line); } } else { int commaIndx = attributeString.indexOf(","); if (commaIndx == -1) { valList.add(attributeString); attributeString = ""; } else { valList.add(attributeString.substring(0, commaIndx)); attributeString = attributeString.substring(commaIndx + 1).trim(); } } } } // Decode values for (int i = 0; i < valList.size(); i++) { String value = (String) valList.get(i); if (!attName.equals("Target") && !attName.equals("Gap")) { value = URLDecoder.decode(value, "UTF-8"); } value = fixEntityNames(value); valList.set(i, value); } attributes.put(attName, valList); } } /** * Return the sequenceID field of this record. * @return the sequenceID field of this record */ public String getSequenceID () { return sequenceID; } /** * Return the source field of this record. * @return the source field of this record */ public String getSource () { return source; } /** * Return the type field of this record. * @return the type field of this record */ public String getType () { return type; } /** * Set the type of this record. * @param type the new type */ public void setType(String type) { this.type = type; } /** * Return the start field of this record. * @return the start field of this record */ public int getStart () { return start; } /** * Return the end field of this record. * @return the end field of this record */ public int getEnd () { return end; } /** * Return the score field of this record. * @return the score field of this record */ public Double getScore () { return score; } /** * Return the strand field of this record. * @return returns null if the strand is unset (ie. with an empty field or contained "." in the * original GFF3 file) */ public String getStrand () { return strand; } /** * Return the phase field of this record. * @return returns null if the phase is unset (ie. with an empty field or contained "." in the * original GFF3 file) */ public String getPhase () { return phase; } /** * Return the first value of the Id field from the attributes of this record. * @return the Id from the attributes of this record or null of there isn't a value */ public String getId () { if (getAttributes().containsKey("ID")) { return (String) ((List) getAttributes().get("ID")).get(0); } else { return null; } } /** * Set the Id of this GFF3Record. * @param id the new id */ public void setId(String id) { attributes.put("ID", Collections.singletonList(id)); } /** * Return the list of the Name field from the attributes of this record. * @return the Name from the attributes of this record or null of there isn't a value */ public List getNames() { if (getAttributes().containsKey("Name")) { return (List) getAttributes().get("Name"); } else { return null; } } /** * Return the first value of the Alias field from the attributes of this record. * @return the Alias from the attributes of this record or null of there isn't a value */ public String getAlias () { if (getAttributes().containsKey("Alias")) { return (String) ((List) getAttributes().get("Alias")).get(0); } else { return null; } } /** * Return the list of the Parent field from the attributes of this record. * @return the Parent from the attributes of this record or null of there isn't a value */ public List getParents () { if (getAttributes().containsKey("Parent")) { return (List) getAttributes().get("Parent"); } else { return null; } } /** * Return the first value of the Target field from the attributes of this record. * @return the Target from the attributes of this record or null of there isn't a value */ public String getTarget () { if (getAttributes().containsKey("Target")) { return (String) ((List) getAttributes().get("Target")).get(0); } else { return null; } } /** * Return the first value of the Gap field from the attributes of this record. * @return the Gap from the attributes of this record or null of there isn't a value */ public String getGap () { if (getAttributes().containsKey("Gap")) { return (String) ((List) getAttributes().get("Gap")).get(0); } else { return null; } } /** * Return the first value of the Note field from the attributes of this record. * @return the Note from the attributes of this record or null of there isn't a value */ public String getNote () { if (getAttributes().containsKey("Note")) { return (String) ((List) getAttributes().get("Note")).get(0); } else { return null; } } /** * Return the first value of the Dbxref field from the attributes of this record. * @return the Dbxref from the attributes of this record or null of there isn't a value */ public List getDbxrefs () { if (getAttributes().containsKey("Dbxref")) { return (List) getAttributes().get("Dbxref"); } else { return null; } } /** * Return the first value of the OntologyTerm field from the attributes of this record. * @return the OntologyTerm from the attributes of this record or null of there isn't a value */ public String getOntologyTerm () { if (getAttributes().containsKey("Ontology_term")) { return (String) ((List) getAttributes().get("Ontology_term")).get(0); } else { return null; } } /** * Return the attributes of this record as a Map from attribute key to Lists of attribute * values. * @return the attributes of this record */ public Map getAttributes () { return attributes; } /** * @see java.lang.Object#toString() */ public String toString() { return "<GFF3Record: sequenceID: " + sequenceID + " source: " + source + " type: " + type + " start: " + start + " end: " + end + " score: " + score + " strand: " + strand + " phase: " + phase + " attributes: " + attributes + ">"; } /** * Return this record in GFF format. The String is suitable for output to a GFF file. * @return a GFF line */ public String toGFF3() { try { return URLEncoder.encode(sequenceID, "UTF-8") + "\t" + ((source == null) ? "." : source) + "\t" + type + "\t" + start + "\t" + end + "\t" + ((score == null) ? "." : score.toString()) + "\t" + ((strand == null) ? "." : strand) + "\t" + ((phase == null) ? "." : phase) + "\t" + writeAttributes(); } catch (UnsupportedEncodingException e) { throw new RuntimeException("error while encoding: " + sequenceID, e); } } private String writeAttributes() { StringBuffer sb = new StringBuffer(); boolean first = true; Iterator iter = attributes.entrySet().iterator(); while (iter.hasNext()) { Map.Entry entry = (Map.Entry) iter.next(); if (!first) { sb.append(";"); } first = false; String listValue; if (entry.getValue() instanceof List) { List oldList = (List) entry.getValue(); List encodedList = new ArrayList(oldList); for (int i = 0; i < encodedList.size(); i++) { Object oldValue = encodedList.get(i); String newValue; try { newValue = URLEncoder.encode("" + oldValue, "UTF-8"); } catch (UnsupportedEncodingException e) { throw new RuntimeException("error while encoding: " + oldValue, e); } encodedList.set(i, newValue); } listValue = StringUtil.join(encodedList, ","); } else { try { listValue = URLEncoder.encode("" + entry.getValue(), "UTF-8"); } catch (UnsupportedEncodingException e) { throw new RuntimeException("error while encoding: " + entry.getValue(), e); } } sb.append(entry.getKey() + "=" + listValue); } return sb.toString(); } /** * Replace greek character entity names with entity names that work in HTML. * @param value input string * @return string with replacements */ protected static String fixEntityNames(String value) { synchronized (GFF3Record.class) { if (replacements == null) { replacements = new HashMap(); replacements.put("agr", "alpha"); replacements.put("Agr", "Alpha"); replacements.put("bgr", "beta"); replacements.put("Bgr", "Beta"); replacements.put("ggr", "gamma"); replacements.put("Ggr", "Gamma"); replacements.put("dgr", "delta"); replacements.put("Dgr", "Delta"); replacements.put("egr", "epsilon"); replacements.put("Egr", "Epsilon"); replacements.put("zgr", "zeta"); replacements.put("Zgr", "Zeta"); replacements.put("eegr", "eta"); replacements.put("EEgr", "Eta"); replacements.put("thgr", "theta"); replacements.put("THgr", "Theta"); replacements.put("igr", "iota"); replacements.put("Igr", "Iota"); replacements.put("kgr", "kappa"); replacements.put("Kgr", "Kappa"); replacements.put("lgr", "lambda"); replacements.put("Lgr", "Lambda"); replacements.put("mgr", "mu"); replacements.put("Mgr", "Mu"); replacements.put("ngr", "nu"); replacements.put("Ngr", "Nu"); replacements.put("xgr", "xi"); replacements.put("Xgr", "Xi"); replacements.put("ogr", "omicron"); replacements.put("Ogr", "Omicron"); replacements.put("pgr", "pi"); replacements.put("Pgr", "Pi"); replacements.put("rgr", "rho"); replacements.put("Rgr", "Rho"); replacements.put("sgr", "sigma"); replacements.put("Sgr", "Sigma"); replacements.put("sfgr", "sigmaf"); replacements.put("tgr", "tau"); replacements.put("Tgr", "Tau"); replacements.put("ugr", "upsilon"); replacements.put("Ugr", "Upsilon"); replacements.put("phgr", "phi"); replacements.put("PHgr", "Phi"); replacements.put("khgr", "chi"); replacements.put("KHgr", "Chi"); replacements.put("psgr", "psi"); replacements.put("PSgr", "Psi"); replacements.put("ohgr", "omega"); replacements.put("OHgr", "Omega"); } } for (Iterator iter = replacements.entrySet().iterator(); iter.hasNext(); ) { Map.Entry entry = (Map.Entry) iter.next(); if (value.indexOf('&') != -1) { value = value.replaceAll("&" + entry.getKey() + ";", "&" + entry.getValue() + ";"); } } return value; } }
bio/core/main/src/org/intermine/bio/io/gff3/GFF3Record.java
package org.intermine.bio.io.gff3; /* * Copyright (C) 2002-2005 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.LinkedHashMap; import java.util.List; import java.util.ArrayList; import java.util.StringTokenizer; import java.util.Iterator; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.net.URLEncoder; import org.intermine.util.StringUtil; /** * A class that represents one line of a GFF3 file. Some of this code is * derived from BioJava. * * @author Kim Rutherford */ public class GFF3Record { private String sequenceID; private String source; private String type; private int start; private int end; private Double score; private String strand; private String phase; private Map attributes = new LinkedHashMap(); private static Map replacements; /** * Create a GFF3Record from a line of a GFF3 file * @param line the String to parse * @throws IOException if there is an error during parsing the line */ public GFF3Record(String line) throws IOException { StringTokenizer st = new StringTokenizer(line, "\t", false); if (st.countTokens() < 8) { throw new IOException("GFF line too short (" + st.countTokens() + " fields): " + line); } sequenceID = fixEntityNames(URLDecoder.decode(st.nextToken(), "UTF-8")); source = st.nextToken(); if (source.equals("") || source.equals(".")) { source = null; } type = st.nextToken(); String startString = st.nextToken(); try { start = Integer.parseInt(startString); } catch (NumberFormatException nfe) { throw new IOException("can not parse integer for start position: " + startString); } String endString = st.nextToken(); try { end = Integer.parseInt(endString); } catch (NumberFormatException nfe) { throw new IOException("can not parse integer for end position: " + endString); } String scoreString = st.nextToken(); if (scoreString.equals("") || scoreString.equals(".")) { score = null; } else { try { score = new Double(scoreString); } catch (NumberFormatException nfe) { throw new IOException("can not parse score: " + scoreString); } } strand = st.nextToken(); if (strand.equals("") || strand.equals(".")) { strand = null; } phase = st.nextToken(); if (phase.equals("") || phase.equals(".")) { phase = null; } if (st.hasMoreTokens()) { parseAttribute(st.nextToken(), line); } } /** * Create a new GFF3Record * @param sequenceID the sequence name * @param source the source * @param type the feature type * @param start the start coordinate on the sequence given by sequenceID * @param end the end coordinate on the sequence * @param score the feature score or null if there is no score * @param strand the feature strand or null * @param phase the phase or null * @param attributes a Map from attribute name to a List of attribute values */ public GFF3Record(String sequenceID, String source, String type, int start, int end, Double score, String strand, String phase, Map attributes) { this.sequenceID = sequenceID; this.source = source; this.type = type; this.start = start; this.end = end; this.score = score; this.strand = strand; this.phase = phase; this.attributes = attributes; } private void parseAttribute(String attributeString, String line) throws IOException { StringTokenizer sTok = new StringTokenizer(attributeString, ";", false); while (sTok.hasMoreTokens()) { String attVal = sTok.nextToken().trim(); if (attVal.length() == 0) { continue; } String attName; List valList = new ArrayList(); int spaceIndx = attVal.indexOf("="); if (spaceIndx == -1) { throw new IOException("the attributes section must contain name=value pairs, " + "while parsing: " + line); } else { attName = attVal.substring(0, spaceIndx); attributeString = attVal.substring(spaceIndx + 1).trim(); while (attributeString.length() > 0) { if (attributeString.startsWith("\"")) { attributeString = attributeString.substring(1); int quoteIndx = attributeString.indexOf("\""); if (quoteIndx > 0) { valList.add(attributeString.substring(0, quoteIndx)); attributeString = attributeString.substring(quoteIndx + 1).trim(); if (attributeString.startsWith(",")) { attributeString = attributeString.substring(1).trim(); } } else { throw new IOException("unmatched quote in this line: " + line); } } else { int commaIndx = attributeString.indexOf(","); if (commaIndx == -1) { valList.add(attributeString); attributeString = ""; } else { valList.add(attributeString.substring(0, commaIndx)); attributeString = attributeString.substring(commaIndx + 1).trim(); } } } } // Decode values for (int i = 0; i < valList.size(); i++) { String value = (String) valList.get(i); if (!attName.equals("Target") && !attName.equals("Gap")) { value = URLDecoder.decode(value, "UTF-8"); } value = fixEntityNames(value); valList.set(i, value); } attributes.put(attName, valList); } } /** * Return the sequenceID field of this record. * @return the sequenceID field of this record */ public String getSequenceID () { return sequenceID; } /** * Return the source field of this record. * @return the source field of this record */ public String getSource () { return source; } /** * Return the type field of this record. * @return the type field of this record */ public String getType () { return type; } /** * Set the type of this record. * @param type the new type */ public void setType(String type) { this.type = type; } /** * Return the start field of this record. * @return the start field of this record */ public int getStart () { return start; } /** * Return the end field of this record. * @return the end field of this record */ public int getEnd () { return end; } /** * Return the score field of this record. * @return the score field of this record */ public Double getScore () { return score; } /** * Return the strand field of this record. * @return returns null if the strand is unset (ie. with an empty field or contained "." in the * original GFF3 file) */ public String getStrand () { return strand; } /** * Return the phase field of this record. * @return returns null if the phase is unset (ie. with an empty field or contained "." in the * original GFF3 file) */ public String getPhase () { return phase; } /** * Return the first value of the Id field from the attributes of this record. * @return the Id from the attributes of this record or null of there isn't a value */ public String getId () { if (getAttributes().containsKey("ID")) { return (String) ((List) getAttributes().get("ID")).get(0); } else { return null; } } /** * Set the Id of this GFF3Record. * @param id the new id */ public void setId(String id) { attributes.put("ID", Collections.singletonList(id)); } /** * Return the list of the Name field from the attributes of this record. * @return the Name from the attributes of this record or null of there isn't a value */ public List getNames() { if (getAttributes().containsKey("Name")) { return (List) getAttributes().get("Name"); } else { return null; } } /** * Return the first value of the Alias field from the attributes of this record. * @return the Alias from the attributes of this record or null of there isn't a value */ public String getAlias () { if (getAttributes().containsKey("Alias")) { return (String) ((List) getAttributes().get("Alias")).get(0); } else { return null; } } /** * Return the list of the Parent field from the attributes of this record. * @return the Parent from the attributes of this record or null of there isn't a value */ public List getParents () { if (getAttributes().containsKey("Parent")) { return (List) getAttributes().get("Parent"); } else { return null; } } /** * Return the first value of the Target field from the attributes of this record. * @return the Target from the attributes of this record or null of there isn't a value */ public String getTarget () { if (getAttributes().containsKey("Target")) { return (String) ((List) getAttributes().get("Target")).get(0); } else { return null; } } /** * Return the first value of the Gap field from the attributes of this record. * @return the Gap from the attributes of this record or null of there isn't a value */ public String getGap () { if (getAttributes().containsKey("Gap")) { return (String) ((List) getAttributes().get("Gap")).get(0); } else { return null; } } /** * Return the first value of the Note field from the attributes of this record. * @return the Note from the attributes of this record or null of there isn't a value */ public String getNote () { if (getAttributes().containsKey("Note")) { return (String) ((List) getAttributes().get("Note")).get(0); } else { return null; } } /** * Return the first value of the Dbxref field from the attributes of this record. * @return the Dbxref from the attributes of this record or null of there isn't a value */ public List getDbxrefs () { if (getAttributes().containsKey("Dbxref")) { return (List) getAttributes().get("Dbxref"); } else { return null; } } /** * Return the first value of the OntologyTerm field from the attributes of this record. * @return the OntologyTerm from the attributes of this record or null of there isn't a value */ public String getOntologyTerm () { if (getAttributes().containsKey("Ontology_term")) { return (String) ((List) getAttributes().get("Ontology_term")).get(0); } else { return null; } } /** * Return the attributes of this record as a Map from attribute key to Lists of attribute * values. * @return the attributes of this record */ public Map getAttributes () { return attributes; } /** * @see java.lang.Object#toString() */ public String toString() { return "<GFF3Record: sequenceID: " + sequenceID + " source: " + source + " type: " + type + " start: " + start + " end: " + end + " score: " + score + " strand: " + strand + " phase: " + phase + " attributes: " + attributes + ">"; } /** * Return this record in GFF format. The String is suitable for output to a GFF file. * @return a GFF line */ public String toGFF3() { try { return URLEncoder.encode(sequenceID, "UTF-8") + "\t" + ((source == null) ? "." : source) + "\t" + type + "\t" + start + "\t" + end + "\t" + ((score == null) ? "." : score.toString()) + "\t" + ((strand == null) ? "." : strand) + "\t" + ((phase == null) ? "." : phase) + "\t" + writeAttributes(); } catch (UnsupportedEncodingException e) { throw new RuntimeException("error while encoding: " + sequenceID, e); } } private String writeAttributes() { StringBuffer sb = new StringBuffer(); boolean first = true; Iterator iter = attributes.entrySet().iterator(); while (iter.hasNext()) { Map.Entry entry = (Map.Entry) iter.next(); if (!first) { sb.append(";"); } first = false; String listValue; if (entry.getValue() instanceof List) { List oldList = (List) entry.getValue(); List encodedList = new ArrayList(oldList); for (int i = 0; i < encodedList.size(); i++) { Object oldValue = encodedList.get(i); String newValue; try { newValue = URLEncoder.encode("" + oldValue, "UTF-8"); } catch (UnsupportedEncodingException e) { throw new RuntimeException("error while encoding: " + oldValue, e); } encodedList.set(i, newValue); } listValue = StringUtil.join(encodedList, ","); } else { try { listValue = URLEncoder.encode("" + entry.getValue(), "UTF-8"); } catch (UnsupportedEncodingException e) { throw new RuntimeException("error while encoding: " + entry.getValue(), e); } } sb.append(entry.getKey() + "=" + listValue); } return sb.toString(); } /** * Replace greek character entity names with entity names that work in HTML. * @param value input string * @return string with replacements */ protected static String fixEntityNames(String value) { synchronized (GFF3Record.class) { if (replacements == null) { replacements = new HashMap(); replacements.put("agr", "alpha"); replacements.put("Agr", "Alpha"); replacements.put("bgr", "beta"); replacements.put("Bgr", "Beta"); replacements.put("ggr", "gamma"); replacements.put("Ggr", "Gamma"); replacements.put("dgr", "delta"); replacements.put("Dgr", "Delta"); replacements.put("egr", "epsilon"); replacements.put("Egr", "Epsilon"); replacements.put("zgr", "zeta"); replacements.put("Zgr", "Zeta"); replacements.put("eegr", "eta"); replacements.put("EEgr", "Eta"); replacements.put("thgr", "theta"); replacements.put("THgr", "Theta"); replacements.put("igr", "iota"); replacements.put("Igr", "Iota"); replacements.put("kgr", "kappa"); replacements.put("Kgr", "Kappa"); replacements.put("lgr", "lambda"); replacements.put("Lgr", "Lambda"); replacements.put("mgr", "mu"); replacements.put("Mgr", "Mu"); replacements.put("ngr", "nu"); replacements.put("Ngr", "Nu"); replacements.put("xgr", "xi"); replacements.put("Xgr", "Xi"); replacements.put("ogr", "omicron"); replacements.put("Ogr", "Omicron"); replacements.put("pgr", "pi"); replacements.put("Pgr", "Pi"); replacements.put("rgr", "rho"); replacements.put("Rgr", "Rho"); replacements.put("sgr", "sigma"); replacements.put("Sgr", "Sigma"); replacements.put("sfgr", "sigmaf"); replacements.put("tgr", "tau"); replacements.put("Tgr", "Tau"); replacements.put("ugr", "upsilon"); replacements.put("Ugr", "Upsilon"); replacements.put("phgr", "phi"); replacements.put("PHgr", "Phi"); replacements.put("khgr", "chi"); replacements.put("KHgr", "Chi"); replacements.put("psgr", "psi"); replacements.put("PSgr", "Psi"); replacements.put("ohgr", "omega"); replacements.put("OHgr", "Omega"); } } for (Iterator iter = replacements.entrySet().iterator(); iter.hasNext(); ) { Map.Entry entry = (Map.Entry) iter.next(); if (value.indexOf('&') != -1) { value = value.replaceAll("&" + entry.getKey() + ";", "&" + entry.getValue() + ";"); } } return value; } }
Fixed GFF3 parser to cope with start or end set to "."
bio/core/main/src/org/intermine/bio/io/gff3/GFF3Record.java
Fixed GFF3 parser to cope with start or end set to "."
Java
lgpl-2.1
d9a2a9c20945f3dd533b1baa1fbb6d2d19ad9862
0
bjalon/nuxeo-features,bjalon/nuxeo-features,bjalon/nuxeo-features,deadcyclo/nuxeo-features,deadcyclo/nuxeo-features,bjalon/nuxeo-features,nuxeo-archives/nuxeo-features,nuxeo-archives/nuxeo-features,bjalon/nuxeo-features,nuxeo-archives/nuxeo-features,deadcyclo/nuxeo-features,nuxeo-archives/nuxeo-features,bjalon/nuxeo-features,deadcyclo/nuxeo-features,deadcyclo/nuxeo-features,deadcyclo/nuxeo-features,nuxeo-archives/nuxeo-features
/* * (C) Copyright 2006-2009 Nuxeo SAS (http://nuxeo.com/) and contributors. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the GNU Lesser General Public License * (LGPL) version 2.1 which accompanies this distribution, and is available at * http://www.gnu.org/licenses/lgpl.html * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * Contributors: * Nuxeo - initial API and implementation * * $Id$ */ package org.nuxeo.theme.webwidgets.ui; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Set; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.CacheControl; import javax.ws.rs.core.EntityTag; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; import net.sf.json.JSONObject; import org.nuxeo.ecm.webengine.forms.FormData; import org.nuxeo.ecm.webengine.model.WebObject; import org.nuxeo.ecm.webengine.model.impl.ModuleRoot; import org.nuxeo.theme.webwidgets.Manager; import org.nuxeo.theme.webwidgets.WidgetData; import org.nuxeo.theme.webwidgets.WidgetType; @WebObject(type = "nxthemes-webwidgets") @Produces("text/html") public class Main extends ModuleRoot { @GET @Path("webWidgetFactory") public Object renderPerspectiveSelector( @QueryParam("org.nuxeo.theme.application.path") String path) { return getTemplate("webWidgetFactory.ftl").arg("widget_categories", getWidgetCategories()).arg("widget_types", getWidgetTypes()).arg( "selected_category", getSelectedWidgetCategory()); } @GET @Path("get_panel_data") public String getPanelData(@QueryParam("area") int area, @QueryParam("mode") String mode) { try { return Manager.getPanelData(area, mode); } catch (Exception e) { throw new WidgetEditorException(e.getMessage(), e); } } @POST @Path("add_widget") public void addWidget() { FormData form = ctx.getForm(); int area = Integer.valueOf(form.getString("area")); String widgetName = form.getString("widget_name"); int order = Integer.valueOf(form.getString("order")); Editor.addWidget(area, widgetName, order); } @POST @Path("move_widget") public String moveWidget() { FormData form = ctx.getForm(); int srcArea = Integer.valueOf(form.getString("src_area")); String srcUid = form.getString("src_uid"); int destArea = Integer.valueOf(form.getString("dest_area")); int destOrder = Integer.valueOf(form.getString("dest_order")); return Editor.moveWidget(srcArea, srcUid, destArea, destOrder); } @POST @Path("remove_widget") public void removeWidget() { FormData form = ctx.getForm(); String providerName = form.getString("provider"); String widgetUid = form.getString("widget_uid"); Editor.removeWidget(providerName, widgetUid); } @POST @Path("set_widget_state") public void setWidgetState() { FormData form = ctx.getForm(); String providerName = form.getString("provider"); String widgetUid = form.getString("widget_uid"); String state = form.getString("state"); Editor.setWidgetState(providerName, widgetUid, state); } @POST @Path("set_widget_category") public void setWidgetCategory() { FormData form = ctx.getForm(); String category = form.getString("category"); SessionManager.setWidgetCategory(category); } @GET @Path("get_widget_data_info") public String getWidgetDataInfo( @QueryParam("provider") String providerName, @QueryParam("widget_uid") String widgetUid, @QueryParam("name") String dataName) { try { return Manager.getWidgetDataInfo(providerName, widgetUid, dataName); } catch (Exception e) { throw new WidgetEditorException(e.getMessage(), e); } } @POST @Path("upload_file") public String uploadFile(@QueryParam("provider") String providerName, @QueryParam("widget_uid") String widgetUid, @QueryParam("data") String dataName) { HttpServletRequest req = ctx.getRequest(); String res = Editor.uploadFile(req, providerName, widgetUid, dataName); long timestamp = new Date().getTime(); String dataUrl = String.format("nxwebwidgets://data/%s/%s/%s/%s", providerName, widgetUid, dataName, timestamp); Editor.setWidgetPreference(providerName, widgetUid, dataName, dataUrl); return res; } @GET @Path("render_widget_data") public Response renderWidgetData( @QueryParam("widget_uid") String widgetUid, @QueryParam("data") String dataName, @QueryParam("provider") String providerName, @QueryParam("timestamp") String timestamp) { HttpServletRequest request = ctx.getRequest(); String etag = request.getHeader("If-None-Match"); if (timestamp.equals(etag)) { return Response.notModified().build(); } WidgetData data = null; try { data = Manager.getWidgetData(providerName, widgetUid, dataName); } catch (Exception e) { throw new WidgetEditorException(e.getMessage(), e); } ResponseBuilder builder = Response.ok(data.getContent()); builder.tag(timestamp); builder.type(data.getContentType()); return builder.build(); } @POST @Path("update_widget_preferences") @SuppressWarnings("unchecked") public void updateWidgetPreferences() { FormData form = ctx.getForm(); String providerName = form.getString("provider"); String widgetUid = form.getString("widget_uid"); String preferences_map = form.getString("preferences"); Map<String, String> preferencesMap = JSONObject.fromObject(preferences_map); Editor.updateWidgetPreferences(providerName, widgetUid, preferencesMap); } @GET @Path("render_widget_icon") public Response renderWidgetIcon(@QueryParam("name") String widgetTypeName) { byte[] content = Manager.getWidgetIconContent(widgetTypeName); ResponseBuilder builder = Response.ok(content); CacheControl cc = new CacheControl(); // Set a default max-age of 1 day. cc.setMaxAge(86400); builder.cacheControl(cc); // builder.type(???) return builder.build(); } /* API */ public static String getSelectedWidgetCategory() { String category = SessionManager.getWidgetCategory(); if (category == null) { category = ""; } return category; } public static Set<String> getWidgetCategories() { return Manager.getService().getWidgetCategories(); } public static List<WidgetType> getWidgetTypes() { String widgetCategory = getSelectedWidgetCategory(); return Manager.getService().getWidgetTypes(widgetCategory); } }
nuxeo-platform-webwidgets/nuxeo-theme-webwidgets-ui/src/main/java/org/nuxeo/theme/webwidgets/ui/Main.java
/* * (C) Copyright 2006-2009 Nuxeo SAS (http://nuxeo.com/) and contributors. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the GNU Lesser General Public License * (LGPL) version 2.1 which accompanies this distribution, and is available at * http://www.gnu.org/licenses/lgpl.html * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * Contributors: * Nuxeo - initial API and implementation * * $Id$ */ package org.nuxeo.theme.webwidgets.ui; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Set; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.CacheControl; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; import net.sf.json.JSONObject; import org.nuxeo.ecm.webengine.forms.FormData; import org.nuxeo.ecm.webengine.model.WebObject; import org.nuxeo.ecm.webengine.model.impl.ModuleRoot; import org.nuxeo.theme.webwidgets.Manager; import org.nuxeo.theme.webwidgets.WidgetData; import org.nuxeo.theme.webwidgets.WidgetType; @WebObject(type = "nxthemes-webwidgets") @Produces("text/html") public class Main extends ModuleRoot { final private int CACHE_MAX_AGE = 600; @GET @Path("webWidgetFactory") public Object renderPerspectiveSelector( @QueryParam("org.nuxeo.theme.application.path") String path) { return getTemplate("webWidgetFactory.ftl").arg("widget_categories", getWidgetCategories()).arg("widget_types", getWidgetTypes()).arg( "selected_category", getSelectedWidgetCategory()); } @GET @Path("get_panel_data") public String getPanelData(@QueryParam("area") int area, @QueryParam("mode") String mode) { try { return Manager.getPanelData(area, mode); } catch (Exception e) { throw new WidgetEditorException(e.getMessage(), e); } } @POST @Path("add_widget") public void addWidget() { FormData form = ctx.getForm(); int area = Integer.valueOf(form.getString("area")); String widgetName = form.getString("widget_name"); int order = Integer.valueOf(form.getString("order")); Editor.addWidget(area, widgetName, order); } @POST @Path("move_widget") public String moveWidget() { FormData form = ctx.getForm(); int srcArea = Integer.valueOf(form.getString("src_area")); String srcUid = form.getString("src_uid"); int destArea = Integer.valueOf(form.getString("dest_area")); int destOrder = Integer.valueOf(form.getString("dest_order")); return Editor.moveWidget(srcArea, srcUid, destArea, destOrder); } @POST @Path("remove_widget") public void removeWidget() { FormData form = ctx.getForm(); String providerName = form.getString("provider"); String widgetUid = form.getString("widget_uid"); Editor.removeWidget(providerName, widgetUid); } @POST @Path("set_widget_state") public void setWidgetState() { FormData form = ctx.getForm(); String providerName = form.getString("provider"); String widgetUid = form.getString("widget_uid"); String state = form.getString("state"); Editor.setWidgetState(providerName, widgetUid, state); } @POST @Path("set_widget_category") public void setWidgetCategory() { FormData form = ctx.getForm(); String category = form.getString("category"); SessionManager.setWidgetCategory(category); } @GET @Path("get_widget_data_info") public String getWidgetDataInfo( @QueryParam("provider") String providerName, @QueryParam("widget_uid") String widgetUid, @QueryParam("name") String dataName) { try { return Manager.getWidgetDataInfo(providerName, widgetUid, dataName); } catch (Exception e) { throw new WidgetEditorException(e.getMessage(), e); } } @POST @Path("upload_file") public String uploadFile(@QueryParam("provider") String providerName, @QueryParam("widget_uid") String widgetUid, @QueryParam("data") String dataName) { HttpServletRequest req = ctx.getRequest(); String res = Editor.uploadFile(req, providerName, widgetUid, dataName); long timestamp = new Date().getTime(); String dataUrl = String.format("nxwebwidgets://data/%s/%s/%s/%s", providerName, widgetUid, dataName, timestamp); Editor.setWidgetPreference(providerName, widgetUid, dataName, dataUrl); return res; } @GET @Path("render_widget_data") public Response renderWidgetData( @QueryParam("widget_uid") String widgetUid, @QueryParam("data") String dataName, @QueryParam("provider") String providerName) { WidgetData data = null; try { data = Manager.getWidgetData(providerName, widgetUid, dataName); } catch (Exception e) { throw new WidgetEditorException(e.getMessage(), e); } ResponseBuilder builder = Response.ok(data.getContent()); builder.type(data.getContentType()); CacheControl cc = new CacheControl(); cc.setMaxAge(CACHE_MAX_AGE); builder.cacheControl(cc); return builder.build(); } @POST @Path("update_widget_preferences") @SuppressWarnings("unchecked") public void updateWidgetPreferences() { FormData form = ctx.getForm(); String providerName = form.getString("provider"); String widgetUid = form.getString("widget_uid"); String preferences_map = form.getString("preferences"); Map<String, String> preferencesMap = JSONObject.fromObject(preferences_map); Editor.updateWidgetPreferences(providerName, widgetUid, preferencesMap); } @GET @Path("render_widget_icon") public Response renderWidgetIcon(@QueryParam("name") String widgetTypeName) { byte[] content = Manager.getWidgetIconContent(widgetTypeName); ResponseBuilder builder = Response.ok(content); CacheControl cc = new CacheControl(); cc.setMaxAge(CACHE_MAX_AGE); builder.cacheControl(cc); // builder.type(???) return builder.build(); } /* API */ public static String getSelectedWidgetCategory() { String category = SessionManager.getWidgetCategory(); if (category == null) { category = ""; } return category; } public static Set<String> getWidgetCategories() { return Manager.getService().getWidgetCategories(); } public static List<WidgetType> getWidgetTypes() { String widgetCategory = getSelectedWidgetCategory(); return Manager.getService().getWidgetTypes(widgetCategory); } }
NXP-4744: use eTag for caching widget data
nuxeo-platform-webwidgets/nuxeo-theme-webwidgets-ui/src/main/java/org/nuxeo/theme/webwidgets/ui/Main.java
NXP-4744: use eTag for caching widget data
Java
apache-2.0
51d0eb144af6d45fcd8910cf1db8f74dbec742d4
0
prasos/bittiraha-walletd,prasos/bittiraha-walletd
package fi.bittiraha.walletd; import fi.bittiraha.walletd.JSONRPC2Handler; import fi.bittiraha.walletd.WalletAccountManager; import java.net.InetSocketAddress; import com.sun.net.httpserver.HttpServer; import java.text.*; import java.util.*; import java.math.BigDecimal; import com.thetransactioncompany.jsonrpc2.*; import com.thetransactioncompany.jsonrpc2.server.*; import net.minidev.json.*; import org.bitcoinj.core.*; import org.bitcoinj.store.*; import org.bitcoinj.params.MainNetParams; import org.bitcoinj.crypto.KeyCrypterException; import org.bitcoinj.utils.BriefLogFormatter; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.base.Joiner; import java.io.File; import static com.google.common.base.Preconditions.checkNotNull; public class WalletRPC extends Thread implements RequestHandler { private NetworkParameters params; private String filePrefix; private int port; private WalletAccountManager kit; private Map account; private JSONRPC2Handler server; private Coin paytxfee; public WalletRPC(int port, String filePrefix, NetworkParameters params) { this.filePrefix = filePrefix; this.params = params; this.port = port; this.paytxfee = Coin.parseCoin("0.00020011"); } public void run() { try { System.out.println(filePrefix + " wallet starting."); server = new JSONRPC2Handler(port, this); kit = new WalletAccountManager(params, new File("."), filePrefix); kit.startAsync(); kit.awaitRunning(); System.out.println(filePrefix + " wallet running."); } catch (Exception e) { e.printStackTrace(); } } public String[] handledRequests() { return new String[]{ "getinfo", "getnewaddress", "getaccountaddress", "getunconfirmedbalance", "getbalance", "sendtoaddress", "sendmany", "validateaddress"}; } private String getnewaddress() { return kit.wallet().freshReceiveKey().toAddress(params).toString(); } private String sendmany(Map<String,Object> paylist) throws InsufficientMoneyException, AddressFormatException { Transaction tx = new Transaction(params); Iterator<Map.Entry<String,Object>> entries = paylist.entrySet().iterator(); while (entries.hasNext()) { Map.Entry<String,Object> entry = entries.next(); Address target = new Address(params, entry.getKey()); Coin value = Coin.parseCoin(entry.getValue().toString()); tx.addOutput(value,target); } Wallet.SendRequest req = Wallet.SendRequest.forTx(tx); req.feePerKb = paytxfee; Wallet.SendResult result = kit.wallet().sendCoins(req); return result.tx.getHash().toString(); } private String sendtoaddress(String address, String amount) throws InsufficientMoneyException, AddressFormatException { Address target = new Address(params, address); Coin value = Coin.parseCoin(amount); Wallet.SendRequest req = Wallet.SendRequest.to(target,value); req.feePerKb = paytxfee; Wallet.SendResult result = kit.wallet().sendCoins(req); return result.tx.getHash().toString(); } private BigDecimal getbalance() { BigDecimal satoshis = new BigDecimal(kit.wallet().getBalance().value); return new BigDecimal("0.00000001").multiply(satoshis); } private BigDecimal getunconfirmedbalance() { BigDecimal satoshis = new BigDecimal(kit.wallet().getBalance(Wallet.BalanceType.ESTIMATED).value); return new BigDecimal("0.00000001").multiply(satoshis); } private Object validateaddress(String address) { JSONObject result = new JSONObject(); try { Address validated = new Address(params,address); result.put("isvalid",true); result.put("address",validated.toString()); List<Address> addresses = kit.wallet().getIssuedReceiveAddresses(); result.put("ismine",addresses.contains(validated)); } catch (AddressFormatException e) { result.put("isvalid",false); } return result; } private Object getinfo() throws BlockStoreException { JSONObject info = new JSONObject(); StoredBlock chainHead = kit.store().getChainHead(); // info.put("version",null); // info.put("protocolversion",null); // info.put("walletversion",null); info.put("balance",getbalance()); info.put("blocks",chainHead.getHeight()); // info.put("timeoffset",null); info.put("connections",kit.peerGroup().numConnectedPeers()); info.put("difficulty",chainHead.getHeader().getDifficultyTarget()); info.put("testnet",params != MainNetParams.get()); // info.put("keypoololdest",null); // info.put("keypoolsize",null); info.put("paytxfee",paytxfee.toPlainString()); // info.put("relayfee",null); info.put("errors",""); return info; } public JSONRPC2Response process(JSONRPC2Request req, MessageContext ctx) { Object response = "dummy"; List<Object> requestParams = req.getPositionalParams(); String method = req.getMethod(); try { if (method.equals("getnewaddress")) { response = getnewaddress(); } else if (method.equals("getaccountaddress")) { response = getnewaddress(); } else if (method.equals("getbalance")) { response = getbalance(); } else if (method.equals("getunconfirmedbalance")) { response = getunconfirmedbalance(); } else if (method.equals("sendtoaddress")) { response = sendtoaddress((String)requestParams.get(0),requestParams.get(1).toString()); } else if (method.equals("sendmany")) { response = sendmany((JSONObject)JSONValue.parse((String)requestParams.get(0))); } else if (method.equals("sendfrom")) { } else if (method.equals("validateaddress")) { response = validateaddress((String)requestParams.get(0)); } else if (method.equals("getinfo")) { response = getinfo(); } else { response = JSONRPC2Error.METHOD_NOT_FOUND; } } catch (InsufficientMoneyException e) { JSONRPC2Error error = new JSONRPC2Error(-6,"Insufficient funds",e.getMessage()); return new JSONRPC2Response(error,req.getID()); } catch (AddressFormatException e) { JSONRPC2Error error = new JSONRPC2Error(-5,"Invalid Bitcoin address",e.getMessage()); return new JSONRPC2Response(error,req.getID()); } catch (Exception e) { e.printStackTrace(); JSONRPC2Error error = new JSONRPC2Error(-32602,"Invalid parameters",e.getMessage()); return new JSONRPC2Response(error,req.getID()); } return new JSONRPC2Response(response,req.getID()); } }
src/fi/bittiraha/walletd/WalletRPC.java
package fi.bittiraha.walletd; import fi.bittiraha.walletd.JSONRPC2Handler; import fi.bittiraha.walletd.WalletAccountManager; import java.net.InetSocketAddress; import com.sun.net.httpserver.HttpServer; import java.text.*; import java.util.*; import java.math.BigDecimal; import com.thetransactioncompany.jsonrpc2.*; import com.thetransactioncompany.jsonrpc2.server.*; import net.minidev.json.*; import org.bitcoinj.core.*; import org.bitcoinj.store.*; import org.bitcoinj.params.MainNetParams; import org.bitcoinj.crypto.KeyCrypterException; import org.bitcoinj.utils.BriefLogFormatter; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.base.Joiner; import java.io.File; import static com.google.common.base.Preconditions.checkNotNull; public class WalletRPC extends Thread implements RequestHandler { private NetworkParameters params; private String filePrefix; private int port; private WalletAccountManager kit; private Map account; private JSONRPC2Handler server; public WalletRPC(int port, String filePrefix, NetworkParameters params) { this.filePrefix = filePrefix; this.params = params; this.port = port; } public void run() { try { System.out.println(filePrefix + " wallet starting."); server = new JSONRPC2Handler(port, this); kit = new WalletAccountManager(params, new File("."), filePrefix); kit.startAsync(); kit.awaitRunning(); System.out.println(filePrefix + " wallet running."); } catch (Exception e) { e.printStackTrace(); } } public String[] handledRequests() { return new String[]{ "getinfo", "getnewaddress", "getaccountaddress", "getunconfirmedbalance", "getbalance", "sendtoaddress", "validateaddress"}; } private String getnewaddress() { return kit.wallet().freshReceiveKey().toAddress(params).toString(); } private String sendtoaddress(String address, String amount) throws InsufficientMoneyException, AddressFormatException { Address target = new Address(params, address); Coin value = Coin.parseCoin(amount); Wallet.SendResult result = kit.wallet().sendCoins(Wallet.SendRequest.to(target,value)); return result.tx.getHash().toString(); } private BigDecimal getbalance() { BigDecimal satoshis = new BigDecimal(kit.wallet().getBalance().value); return new BigDecimal("0.00000001").multiply(satoshis); } private BigDecimal getunconfirmedbalance() { BigDecimal satoshis = new BigDecimal(kit.wallet().getBalance(Wallet.BalanceType.ESTIMATED).value); return new BigDecimal("0.00000001").multiply(satoshis); } private Object validateaddress(String address) { JSONObject result = new JSONObject(); try { Address validated = new Address(params,address); result.put("isvalid",true); result.put("address",validated.toString()); List<Address> addresses = kit.wallet().getIssuedReceiveAddresses(); result.put("ismine",addresses.contains(validated)); } catch (AddressFormatException e) { result.put("isvalid",false); } return result; } private Object getinfo() throws BlockStoreException { JSONObject info = new JSONObject(); StoredBlock chainHead = kit.store().getChainHead(); // info.put("version",null); // info.put("protocolversion",null); // info.put("walletversion",null); info.put("balance",getbalance()); info.put("blocks",chainHead.getHeight()); // info.put("timeoffset",null); info.put("connections",kit.peerGroup().numConnectedPeers()); info.put("difficulty",chainHead.getHeader().getDifficultyTarget()); info.put("testnet",params != MainNetParams.get()); // info.put("keypoololdest",null); // info.put("keypoolsize",null); // info.put("paytxfee",BigDecimal('0.00020000')); // info.put("relayfee",null); info.put("errors",""); return info; } public JSONRPC2Response process(JSONRPC2Request req, MessageContext ctx) { Object response = "dummy"; List<Object> requestParams = req.getPositionalParams(); String method = req.getMethod(); try { if (method.equals("getnewaddress")) { response = getnewaddress(); } else if (method.equals("getaccountaddress")) { response = getnewaddress(); } else if (method.equals("getbalance")) { response = getbalance(); } else if (method.equals("getunconfirmedbalance")) { response = getunconfirmedbalance(); } else if (method.equals("sendtoaddress")) { response = sendtoaddress((String)requestParams.get(0),requestParams.get(1).toString()); } else if (method.equals("sendmany")) { } else if (method.equals("sendfrom")) { } else if (method.equals("validateaddress")) { response = validateaddress((String)requestParams.get(0)); } else if (method.equals("getinfo")) { response = getinfo(); } else { response = JSONRPC2Error.METHOD_NOT_FOUND; } } catch (InsufficientMoneyException e) { JSONRPC2Error error = new JSONRPC2Error(-6,"Insufficient funds",e.getMessage()); return new JSONRPC2Response(error,req.getID()); } catch (AddressFormatException e) { JSONRPC2Error error = new JSONRPC2Error(-5,"Invalid Bitcoin address",e.getMessage()); return new JSONRPC2Response(error,req.getID()); } catch (Exception e) { e.printStackTrace(); JSONRPC2Error error = new JSONRPC2Error(-32602,"Invalid parameters",e.getMessage()); return new JSONRPC2Response(error,req.getID()); } return new JSONRPC2Response(response,req.getID()); } }
- Bump txfee to 0.00020011 BTC - add sendmany support
src/fi/bittiraha/walletd/WalletRPC.java
- Bump txfee to 0.00020011 BTC - add sendmany support
Java
apache-2.0
18d0b0daae9fd305bc97917eef39be00582cc4cb
0
DmitriiEskov/deskov
package ru.job4j.tracker; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; /** * Test for StartUI class. * * @author Dmitrii Eskov ([email protected]) * @since 13.12.2018 * @version 1.0 */ public class StartUITest { /** * The default output to the console. */ private final PrintStream stdout = System.out; /** * The buffer for a result. */ private final ByteArrayOutputStream out = new ByteArrayOutputStream(); /** * Tests when a user wants to add an item. */ @Test public void whenUserAddItemThenTrackerHasNewItemWithSameName() { Tracker tracker = new Tracker(); Input input = new StubInput(new String[]{"0", "test name", "desc", "6"}); new StartUI(input, tracker).init(); assertThat(tracker.getAll()[0].getName(), is("test name")); } /** * Tests when a user wants to update an item. */ @Test public void whenUpdateThenTrackerHasUpdatedValue() { Tracker tracker = new Tracker(); Item item = tracker.add(new Item("test name", "desc")); Input input = new StubInput(new String[] {"2", item.getId(), "test replace", "the item was changed", "6"}); new StartUI(input, tracker).init(); assertThat(tracker.findById(item.getId()).getName(), is("test replace")); } /** * Tests when a user wants to delete an item. */ @Test public void whenDeleteThenTrackerHasDeletedValue() { Tracker tracker = new Tracker(); Item item = tracker.add(new Item("The testing item", "We want it to be deleted.")); Input input = new StubInput(new String[] {"3", item.getId(), "6"}); new StartUI(input, tracker).init(); Item result = null; assertThat(tracker.findById(item.getId()), is(result)); } /** * Tests when a user wants to find an item by an id. */ @Test public void whenFindItemByIDThenTrackerFindsIt() { Tracker tracker = new Tracker(); Item item = tracker.add(new Item("The testing item", "We want it to be found.")); Input input = new StubInput(new String[] {"4", item.getId(), "6"}); new StartUI(input, tracker).init(); assertThat(tracker.findById(item.getId()), is(item)); } /** * Tests when a user wants to find an item by a name. */ @Test public void whenFindItemByNameThenTrackerFindsIt() { Tracker tracker = new Tracker(); Item item = tracker.add(new Item("Testing name", "We want it to be found by name.")); Input input = new StubInput(new String[] {"5", item.getName(), "6"}); new StartUI(input, tracker).init(); Item[] result = new Item[1]; result[0] = item; assertThat(tracker.findByName(item.getName()), is(result)); } /** * Reassigns the output stream. */ @Before public void loadOutput() { System.setOut(new PrintStream(this.out)); } /** * Reassigns the output stream back. */ @After public void backOutput() { System.setOut(new PrintStream(this.stdout)); } /** * Tests when it needs all created items to be shown. */ @Test public void whenGetAllItems() { Tracker tracker = new Tracker(); Item first = tracker.add(new Item("First", "Testing description.")); Input input = new StubInput(new String[] {"1", "6"}); new StartUI(input, tracker).init(); assertThat(new String(this.out.toByteArray()), is( new StringBuilder() .append("Menu\r\n") .append("0. Add new Item.\r\n") .append("1. Show all items.\r\n") .append("2. Edit item.\r\n") .append("3. Delete item.\r\n") .append("4. Find item by an id.\r\n") .append("5. Find items by a name.\r\n") .append("6. Exit Programme.\r\n") .append("------------ Showing all created items --------------\r\n\r\n") .append("id: " + first.getId() + "\r\n") .append("Name: " + first.getName() + "\r\n") .append("Created: " + first.getCreate() + "\r\n") .append("Description: " + first.getDescription() + "\r\n" + "\r\n") .append("Menu" + "\r\n") .append("0. Add new Item." + "\r\n") .append("1. Show all items." + "\r\n") .append("2. Edit item." + "\r\n") .append("3. Delete item." + "\r\n") .append("4. Find item by an id." + "\r\n") .append("5. Find items by a name." + "\r\n") .append("6. Exit Programme." + "\r\n") .toString() ) ); } /** * Tests when it needs an item by name to be shown. */ @Test public void whenGetItemByName() { Tracker tracker = new Tracker(); Item first = tracker.add(new Item("Second", "Testing description.")); Input input = new StubInput(new String[] {"5", first.getName(), "6"}); new StartUI(input, tracker).init(); assertThat(new String(this.out.toByteArray()), is( new StringBuilder() .append("Menu\r\n") .append("0. Add new Item.\r\n") .append("1. Show all items.\r\n") .append("2. Edit item.\r\n") .append("3. Delete item.\r\n") .append("4. Find item by an id.\r\n") .append("5. Find items by a name.\r\n") .append("6. Exit Programme.\r\n") .append("------------ Success! --------------\r\n") .append("id: " + first.getId() + "\r\n") .append("Name: " + first.getName() + "\r\n") .append("Description: " + first.getDescription() + "\r\n") .append("Create: " + first.getCreate() + "\r\n" + "\r\n") .append("Menu" + "\r\n") .append("0. Add new Item." + "\r\n") .append("1. Show all items." + "\r\n") .append("2. Edit item." + "\r\n") .append("3. Delete item." + "\r\n") .append("4. Find item by an id." + "\r\n") .append("5. Find items by a name." + "\r\n") .append("6. Exit Programme." + "\r\n") .toString() ) ); } }
chapter_002/src/test/java/ru/job4j/tracker/StartUITest.java
package ru.job4j.tracker; import org.junit.Test; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; /** * Test for StartUI class. * * @author Dmitrii Eskov ([email protected]) * @since 13.12.2018 * @version 1.0 */ public class StartUITest { /** * Tests when a user wants to add an item. */ @Test public void whenUserAddItemThenTrackerHasNewItemWithSameName() { Tracker tracker = new Tracker(); Input input = new StubInput(new String[]{"0", "test name", "desc", "6"}); new StartUI(input, tracker).init(); assertThat(tracker.getAll()[0].getName(), is("test name")); } /** * Tests when a user wants to update an item. */ @Test public void whenUpdateThenTrackerHasUpdatedValue() { Tracker tracker = new Tracker(); Item item = tracker.add(new Item("test name", "desc")); Input input = new StubInput(new String[] {"2", item.getId(), "test replace", "the item was changed", "6"}); new StartUI(input, tracker).init(); assertThat(tracker.findById(item.getId()).getName(), is("test replace")); } /** * Tests when a user wants to delete an item. */ @Test public void whenDeleteThenTrackerHasDeletedValue() { Tracker tracker = new Tracker(); Item item = tracker.add(new Item("The testing item", "We want it to be deleted.")); Input input = new StubInput(new String[] {"3", item.getId(), "6"}); new StartUI(input, tracker).init(); Item result = null; assertThat(tracker.findById(item.getId()), is(result)); } /** * Tests when a user wants to find an item by an id. */ @Test public void whenFindItemByIDThenTrackerFindsIt() { Tracker tracker = new Tracker(); Item item = tracker.add(new Item("The testing item", "We want it to be found.")); Input input = new StubInput(new String[] {"4", item.getId(), "6"}); new StartUI(input, tracker).init(); assertThat(tracker.findById(item.getId()), is(item)); } /** * Tests when a user wants to find an item by a name. */ @Test public void whenFindItemByNameThenTrackerFindsIt() { Tracker tracker = new Tracker(); Item item = tracker.add(new Item("Testing name", "We want it to be found by name.")); Input input = new StubInput(new String[] {"5", item.getName(), "6"}); new StartUI(input, tracker).init(); Item[] result = new Item[1]; result[0] = item; assertThat(tracker.findByName(item.getName()), is(result)); } }
4.6. Added 2 tests for the "StartUITest". 1. A test for the "getAll" method. 2. A test for the "findByName" method.
chapter_002/src/test/java/ru/job4j/tracker/StartUITest.java
4.6. Added 2 tests for the "StartUITest". 1. A test for the "getAll" method. 2. A test for the "findByName" method.
Java
apache-2.0
ddc115f824565afe94bc46470182a115d85e853c
0
darranl/directory-shared
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.shared.ldap.codec; import org.apache.directory.shared.asn1.AbstractAsn1Object; import org.apache.directory.shared.asn1.Asn1Object; import org.apache.directory.shared.asn1.ber.tlv.TLV; import org.apache.directory.shared.asn1.ber.tlv.UniversalTag; import org.apache.directory.shared.asn1.ber.tlv.Value; import org.apache.directory.shared.asn1.codec.EncoderException; import org.apache.directory.shared.ldap.codec.abandon.AbandonRequest; import org.apache.directory.shared.ldap.codec.add.AddRequest; import org.apache.directory.shared.ldap.codec.add.AddResponse; import org.apache.directory.shared.ldap.codec.bind.BindRequest; import org.apache.directory.shared.ldap.codec.bind.BindResponse; import org.apache.directory.shared.ldap.codec.compare.CompareRequest; import org.apache.directory.shared.ldap.codec.compare.CompareResponse; import org.apache.directory.shared.ldap.codec.del.DelRequest; import org.apache.directory.shared.ldap.codec.del.DelResponse; import org.apache.directory.shared.ldap.codec.extended.ExtendedRequest; import org.apache.directory.shared.ldap.codec.extended.ExtendedResponse; import org.apache.directory.shared.ldap.codec.modify.ModifyRequest; import org.apache.directory.shared.ldap.codec.modify.ModifyResponse; import org.apache.directory.shared.ldap.codec.modifyDn.ModifyDNRequest; import org.apache.directory.shared.ldap.codec.modifyDn.ModifyDNResponse; import org.apache.directory.shared.ldap.codec.search.SearchRequest; import org.apache.directory.shared.ldap.codec.search.SearchResultDone; import org.apache.directory.shared.ldap.codec.search.SearchResultEntry; import org.apache.directory.shared.ldap.codec.search.SearchResultReference; import org.apache.directory.shared.ldap.codec.unbind.UnBindRequest; import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; /** * The main ldapObject : every Ldap Message are encapsulated in it. It contains * a message Id, a operation (protocolOp) and one ore more Controls. * * @author <a href="mailto:[email protected]">Apache Directory Project</a> */ public class LdapMessage extends AbstractAsn1Object { // ~ Instance fields // ---------------------------------------------------------------------------- /** The message ID */ private int messageId; /** The request or response being carried by the message */ private Asn1Object protocolOp; /** The controls */ private List<Control> controls; /** The current control */ private Control currentControl; /** The LdapMessage length */ private int ldapMessageLength; /** The controls length */ private int controlsLength; /** The controls sequence length */ private int controlsSequenceLength; // ~ Constructors // ------------------------------------------------------------------------------- /** * Creates a new LdapMessage object. */ public LdapMessage() { // We should not create this kind of object directly } // ~ Methods // ------------------------------------------------------------------------------------ /** * Get the Control Object at a specific index * * @param i The index of the Control Object to get * @return The selected Control Object */ public Control getControls( int i ) { return controls.get( i ); } /** * Get the Control Objects * * @return The Control Objects */ public List<Control> getControls() { return controls; } /** * Get the current Control Object * * @return The current Control Object */ public Control getCurrentControl() { return currentControl; } /** * Add a control to the Controls array * * @param control The Control to add */ public void addControl( Control control ) { currentControl = control; if ( controls == null ) { controls = new ArrayList<Control>(); } controls.add( control ); } /** * Init the controls array */ public void initControls() { controls = new ArrayList<Control>(); } /** * Get the message ID * * @return The message ID */ public int getMessageId() { return messageId; } /** * Set the message ID * * @param messageId The message ID */ public void setMessageId( int messageId ) { this.messageId = messageId; } /** * Get the message type * * @return The message type */ public int getMessageType() { return ( ( LdapMessage ) protocolOp ).getMessageType(); } /** * Get the message type Name * * @return The message type name */ public String getMessageTypeName() { switch ( ( ( LdapMessage ) protocolOp ).getMessageType() ) { case LdapConstants.ABANDON_REQUEST: return "ABANDON_REQUEST"; case LdapConstants.ADD_REQUEST: return "ADD_REQUEST"; case LdapConstants.ADD_RESPONSE: return "ADD_RESPONSE"; case LdapConstants.BIND_REQUEST: return "BIND_REQUEST"; case LdapConstants.BIND_RESPONSE: return "BIND_RESPONSE"; case LdapConstants.COMPARE_REQUEST: return "COMPARE_REQUEST"; case LdapConstants.COMPARE_RESPONSE: return "COMPARE_RESPONSE"; case LdapConstants.DEL_REQUEST: return "DEL_REQUEST"; case LdapConstants.DEL_RESPONSE: return "DEL_RESPONSE"; case LdapConstants.EXTENDED_REQUEST: return "EXTENDED_REQUEST"; case LdapConstants.EXTENDED_RESPONSE: return "EXTENDED_RESPONSE"; case LdapConstants.MODIFYDN_REQUEST: return "MODIFYDN_REQUEST"; case LdapConstants.MODIFYDN_RESPONSE: return "MODIFYDN_RESPONSE"; case LdapConstants.MODIFY_REQUEST: return "MODIFY_REQUEST"; case LdapConstants.MODIFY_RESPONSE: return "MODIFY_RESPONSE"; case LdapConstants.SEARCH_REQUEST: return "SEARCH_REQUEST"; case LdapConstants.SEARCH_RESULT_DONE: return "SEARCH_RESULT_DONE"; case LdapConstants.SEARCH_RESULT_ENTRY: return "SEARCH_RESULT_ENTRY"; case LdapConstants.SEARCH_RESULT_REFERENCE: return "SEARCH_RESULT_REFERENCE"; case LdapConstants.UNBIND_REQUEST: return "UNBIND_REQUEST"; default: return "UNKNOWN"; } } /** * Get the encapsulated Ldap response. * * @return Returns the Ldap response. */ public LdapResponse getLdapResponse() { return ( LdapResponse ) protocolOp; } /** * Get a AbandonRequest ldapObject, assuming that the caller knows that it * is the LdapMessage exact type. * * @return Returns the AbandonRequest ldapObject. */ public AbandonRequest getAbandonRequest() { return ( AbandonRequest ) protocolOp; } /** * Get a AddRequest ldapObject, assuming that the caller knows that it is * the LdapMessage exact type. * * @return Returns the AddRequest ldapObject. */ public AddRequest getAddRequest() { return ( AddRequest ) protocolOp; } /** * Get a AddResponse ldapObject, assuming that the caller knows that it is * the LdapMessage exact type. * * @return Returns the AddResponse ldapObject. */ public AddResponse getAddResponse() { return ( AddResponse ) protocolOp; } /** * Get a BindRequest ldapObject, assuming that the caller knows that it is * the LdapMessage exact type. * * @return Returns the BindRequest ldapObject. */ public BindRequest getBindRequest() { return ( BindRequest ) protocolOp; } /** * Get a BindResponse ldapObject, assuming that the caller knows that it is * the LdapMessage exact type. * * @return Returns the BindResponse ldapObject. */ public BindResponse getBindResponse() { return ( BindResponse ) protocolOp; } /** * Get a CompareRequest ldapObject, assuming that the caller knows that it * is the LdapMessage exact type. * * @return Returns the CompareRequest ldapObject. */ public CompareRequest getCompareRequest() { return ( CompareRequest ) protocolOp; } /** * Get a CompareResponse ldapObject, assuming that the caller knows that it * is the LdapMessage exact type. * * @return Returns the CompareResponse ldapObject. */ public CompareResponse getCompareResponse() { return ( CompareResponse ) protocolOp; } /** * Get a DelRequest ldapObject, assuming that the caller knows that it is * the LdapMessage exact type. * * @return Returns the DelRequest ldapObject. */ public DelRequest getDelRequest() { return ( DelRequest ) protocolOp; } /** * Get a DelResponse ldapObject, assuming that the caller knows that it is * the LdapMessage exact type. * * @return Returns the DelResponse ldapObject. */ public DelResponse getDelResponse() { return ( DelResponse ) protocolOp; } /** * Get a ExtendedRequest ldapObject, assuming that the caller knows that it * is the LdapMessage exact type. * * @return Returns the ExtendedRequest ldapObject. */ public ExtendedRequest getExtendedRequest() { return ( ExtendedRequest ) protocolOp; } /** * Get a ExtendedResponse ldapObject, assuming that the caller knows that it * is the LdapMessage exact type. * * @return Returns the ExtendedResponse ldapObject. */ public ExtendedResponse getExtendedResponse() { return ( ExtendedResponse ) protocolOp; } /** * Get a ModifyDNRequest ldapObject, assuming that the caller knows that it * is the LdapMessage exact type. * * @return Returns the ModifyDNRequest ldapObject. */ public ModifyDNRequest getModifyDNRequest() { return ( ModifyDNRequest ) protocolOp; } /** * Get a ModifyDNResponse ldapObject, assuming that the caller knows that it * is the LdapMessage exact type. * * @return Returns the ModifyDNResponse ldapObject. */ public ModifyDNResponse getModifyDNResponse() { return ( ModifyDNResponse ) protocolOp; } /** * Get a ModifyRequest ldapObject, assuming that the caller knows that it is * the LdapMessage exact type. * * @return Returns the ModifyRequest ldapObject. */ public ModifyRequest getModifyRequest() { return ( ModifyRequest ) protocolOp; } /** * Get a ModifyResponse ldapObject, assuming that the caller knows that it * is the LdapMessage exact type. * * @return Returns the ModifyResponse ldapObject. */ public ModifyResponse getModifyResponse() { return ( ModifyResponse ) protocolOp; } /** * Get a SearchRequest ldapObject, assuming that the caller knows that it is * the LdapMessage exact type. * * @return Returns the SearchRequest ldapObject. */ public SearchRequest getSearchRequest() { return ( SearchRequest ) protocolOp; } /** * Get a SearchResultDone ldapObject, assuming that the caller knows that it * is the LdapMessage exact type. * * @return Returns the SearchRequestDone ldapObject. */ public SearchResultDone getSearchResultDone() { return ( SearchResultDone ) protocolOp; } /** * Get a SearchResultEntry ldapObject, assuming that the caller knows that * it is the LdapMessage exact type. * * @return Returns the SearchResultEntry ldapObject. */ public SearchResultEntry getSearchResultEntry() { return ( SearchResultEntry ) protocolOp; } /** * Get a SearchResultReference ldapObject, assuming that the caller knows * that it is the LdapMessage exact type. * * @return Returns the SearchResultReference ldapObject. */ public SearchResultReference getSearchResultReference() { return ( SearchResultReference ) protocolOp; } /** * Get a UnBindRequest ldapObject, assuming that the caller knows that it is * the LdapMessage exact type. * * @return Returns the UnBindRequest ldapObject. */ public UnBindRequest getUnBindRequest() { return ( UnBindRequest ) protocolOp; } /** * Set the ProtocolOP * * @param protocolOp The protocolOp to set. */ public void setProtocolOP( Asn1Object protocolOp ) { this.protocolOp = protocolOp; } /** * Compute the LdapMessage length LdapMessage : * 0x30 L1 * | * +--> 0x02 0x0(1-4) [0..2^31-1] (MessageId) * +--> protocolOp * [+--> Controls] * * MessageId length = Length(0x02) + length(MessageId) + MessageId.length * L1 = length(ProtocolOp) * LdapMessage length = Length(0x30) + Length(L1) + MessageId length + L1 */ public int computeLength() { // The length of the MessageId. It's the sum of // - the tag (0x02), 1 byte // - the length of the Id length, 1 byte // - the Id length, 1 to 4 bytes ldapMessageLength = 1 + 1 + Value.getNbBytes( messageId ); // Get the protocolOp length int protocolOpLength = protocolOp.computeLength(); // Add the protocol length to the message length ldapMessageLength += protocolOpLength; // Do the same thing for Controls, if any. if ( controls != null ) { // Controls : // 0xA0 L3 // | // +--> 0x30 L4 // +--> 0x30 L5 // +--> ... // +--> 0x30 Li // +--> ... // +--> 0x30 Ln // // L3 = Length(0x30) + Length(L5) + L5 // + Length(0x30) + Length(L6) + L6 // + ... // + Length(0x30) + Length(Li) + Li // + ... // + Length(0x30) + Length(Ln) + Ln // // LdapMessageLength = LdapMessageLength + Length(0x90) // + Length(L3) + L3 controlsSequenceLength = 0; // We may have more than one control. ControlsLength is L4. for ( Control control:controls ) { controlsSequenceLength += control.computeLength(); } // Computes the controls length controlsLength = controlsSequenceLength; // 1 + Length.getNbBytes( // controlsSequenceLength // ) + controlsSequenceLength; // Now, add the tag and the length of the controls length ldapMessageLength += 1 + TLV.getNbBytes( controlsSequenceLength ) + controlsSequenceLength; } // finally, calculate the global message size : // length(Tag) + Length(length) + length return 1 + ldapMessageLength + TLV.getNbBytes( ldapMessageLength ); } /** * Generate the PDU which contains the encoded object. * * The generation is done in two phases : * - first, we compute the length of each part and the * global PDU length * - second, we produce the PDU. * * 0x30 L1 * | * +--> 0x02 L2 MessageId * +--> ProtocolOp * +--> Controls * * L2 = Length(MessageId) * L1 = Length(0x02) + Length(L2) + L2 + Length(ProtocolOp) + Length(Controls) * LdapMessageLength = Length(0x30) + Length(L1) + L1 * * @param buffer The encoded PDU * @return A ByteBuffer that contaons the PDU * @throws EncoderException If anything goes wrong. */ public ByteBuffer encode( ByteBuffer buffer ) throws EncoderException { // Allocate the bytes buffer. ByteBuffer bb = ByteBuffer.allocate( computeLength() ); try { // The LdapMessage Sequence bb.put( UniversalTag.SEQUENCE_TAG ); // The length has been calculated by the computeLength method bb.put( TLV.getBytes( ldapMessageLength ) ); } catch ( BufferOverflowException boe ) { throw new EncoderException( "The PDU buffer size is too small !" ); } // The message Id Value.encode( bb, messageId ); // Add the protocolOp part protocolOp.encode( bb ); // Do the same thing for Controls, if any. if ( controls != null ) { // Encode the controls bb.put( ( byte ) LdapConstants.CONTROLS_TAG ); bb.put( TLV.getBytes( controlsLength ) ); // Encode each control for ( Control control:controls ) { control.encode( bb ); } } return bb; } /** * Get a String representation of a LdapMessage * * @return A LdapMessage String */ public String toString() { StringBuffer sb = new StringBuffer(); sb.append( "LdapMessage\n" ); sb.append( " message Id : " ).append( messageId ).append( '\n' ); sb.append( protocolOp ); if ( controls != null ) { for ( Control control:controls ) { sb.append( control ); } } return sb.toString(); } }
ldap/src/main/java/org/apache/directory/shared/ldap/codec/LdapMessage.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.shared.ldap.codec; import org.apache.directory.shared.asn1.AbstractAsn1Object; import org.apache.directory.shared.asn1.Asn1Object; import org.apache.directory.shared.asn1.ber.tlv.TLV; import org.apache.directory.shared.asn1.ber.tlv.UniversalTag; import org.apache.directory.shared.asn1.ber.tlv.Value; import org.apache.directory.shared.asn1.codec.EncoderException; import org.apache.directory.shared.ldap.codec.abandon.AbandonRequest; import org.apache.directory.shared.ldap.codec.add.AddRequest; import org.apache.directory.shared.ldap.codec.add.AddResponse; import org.apache.directory.shared.ldap.codec.bind.BindRequest; import org.apache.directory.shared.ldap.codec.bind.BindResponse; import org.apache.directory.shared.ldap.codec.compare.CompareRequest; import org.apache.directory.shared.ldap.codec.compare.CompareResponse; import org.apache.directory.shared.ldap.codec.del.DelRequest; import org.apache.directory.shared.ldap.codec.del.DelResponse; import org.apache.directory.shared.ldap.codec.extended.ExtendedRequest; import org.apache.directory.shared.ldap.codec.extended.ExtendedResponse; import org.apache.directory.shared.ldap.codec.modify.ModifyRequest; import org.apache.directory.shared.ldap.codec.modify.ModifyResponse; import org.apache.directory.shared.ldap.codec.modifyDn.ModifyDNRequest; import org.apache.directory.shared.ldap.codec.modifyDn.ModifyDNResponse; import org.apache.directory.shared.ldap.codec.search.SearchRequest; import org.apache.directory.shared.ldap.codec.search.SearchResultDone; import org.apache.directory.shared.ldap.codec.search.SearchResultEntry; import org.apache.directory.shared.ldap.codec.search.SearchResultReference; import org.apache.directory.shared.ldap.codec.unbind.UnBindRequest; import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; /** * The main ldapObject : every Ldap Message are encapsulated in it. It contains * a message Id, a operation (protocolOp) and one ore more Controls. * * @author <a href="mailto:[email protected]">Apache Directory Project</a> */ public class LdapMessage extends AbstractAsn1Object { // ~ Instance fields // ---------------------------------------------------------------------------- /** The message ID */ private int messageId; /** The request or response being carried by the message */ private Asn1Object protocolOp; /** The controls */ private List<Control> controls; /** The current control */ private Control currentControl; /** The LdapMessage length */ private int ldapMessageLength; /** The controls length */ private int controlsLength; /** The controls sequence length */ private int controlsSequenceLength; // ~ Constructors // ------------------------------------------------------------------------------- /** * Creates a new LdapMessage object. */ public LdapMessage() { // We should not create this kind of object directly } // ~ Methods // ------------------------------------------------------------------------------------ /** * Get the Control Object at a specific index * * @param i The index of the Control Object to get * @return The selected Control Object */ public Control getControls( int i ) { return controls.get( i ); } /** * Get the Control Objects * * @return The Control Objects */ public List<Control> getControls() { return controls; } /** * Get the current Control Object * * @return The current Control Object */ public Control getCurrentControl() { return currentControl; } /** * Add a control to the Controls array * * @param control The Control to add */ public void addControl( Control control ) { currentControl = control; if ( controls == null ) { controls = new ArrayList<Control>(); } controls.add( control ); } /** * Init the controls array */ public void initControls() { controls = new ArrayList<Control>(); } /** * Get the message ID * * @return The message ID */ public int getMessageId() { return messageId; } /** * Set the message ID * * @param messageId The message ID */ public void setMessageId( int messageId ) { this.messageId = messageId; } /** * Get the message type * * @return The message type */ public int getMessageType() { return ( ( LdapMessage ) protocolOp ).getMessageType(); } /** * Get the message type Name * * @return The message type name */ public String getMessageTypeName() { switch ( ( ( LdapMessage ) protocolOp ).getMessageType() ) { case LdapConstants.ABANDON_REQUEST: return "ABANDON_REQUEST"; case LdapConstants.ADD_REQUEST: return "ADD_REQUEST"; case LdapConstants.ADD_RESPONSE: return "ADD_RESPONSE"; case LdapConstants.BIND_REQUEST: return "BIND_REQUEST"; case LdapConstants.BIND_RESPONSE: return "BIND_RESPONSE"; case LdapConstants.COMPARE_REQUEST: return "COMPARE_REQUEST"; case LdapConstants.COMPARE_RESPONSE: return "COMPARE_REQUEST"; case LdapConstants.DEL_REQUEST: return "DEL_REQUEST"; case LdapConstants.DEL_RESPONSE: return "DEL_RESPONSE"; case LdapConstants.EXTENDED_REQUEST: return "EXTENDED_REQUEST"; case LdapConstants.EXTENDED_RESPONSE: return "EXTENDED_RESPONSE"; case LdapConstants.MODIFYDN_REQUEST: return "MODIFYDN_REQUEST"; case LdapConstants.MODIFYDN_RESPONSE: return "MODIFYDN_RESPONSE"; case LdapConstants.MODIFY_REQUEST: return "MODIFY_REQUEST"; case LdapConstants.MODIFY_RESPONSE: return "MODIFY_RESPONSE"; case LdapConstants.SEARCH_REQUEST: return "SEARCH_REQUEST"; case LdapConstants.SEARCH_RESULT_DONE: return "SEARCH_RESULT_DONE"; case LdapConstants.SEARCH_RESULT_ENTRY: return "SEARCH_RESULT_ENTRY"; case LdapConstants.SEARCH_RESULT_REFERENCE: return "SEARCH_RESULT_REFERENCE"; case LdapConstants.UNBIND_REQUEST: return "UNBIND_REQUEST"; default: return "UNKNOWN"; } } /** * Get the encapsulated Ldap response. * * @return Returns the Ldap response. */ public LdapResponse getLdapResponse() { return ( LdapResponse ) protocolOp; } /** * Get a AbandonRequest ldapObject, assuming that the caller knows that it * is the LdapMessage exact type. * * @return Returns the AbandonRequest ldapObject. */ public AbandonRequest getAbandonRequest() { return ( AbandonRequest ) protocolOp; } /** * Get a AddRequest ldapObject, assuming that the caller knows that it is * the LdapMessage exact type. * * @return Returns the AddRequest ldapObject. */ public AddRequest getAddRequest() { return ( AddRequest ) protocolOp; } /** * Get a AddResponse ldapObject, assuming that the caller knows that it is * the LdapMessage exact type. * * @return Returns the AddResponse ldapObject. */ public AddResponse getAddResponse() { return ( AddResponse ) protocolOp; } /** * Get a BindRequest ldapObject, assuming that the caller knows that it is * the LdapMessage exact type. * * @return Returns the BindRequest ldapObject. */ public BindRequest getBindRequest() { return ( BindRequest ) protocolOp; } /** * Get a BindResponse ldapObject, assuming that the caller knows that it is * the LdapMessage exact type. * * @return Returns the BindResponse ldapObject. */ public BindResponse getBindResponse() { return ( BindResponse ) protocolOp; } /** * Get a CompareRequest ldapObject, assuming that the caller knows that it * is the LdapMessage exact type. * * @return Returns the CompareRequest ldapObject. */ public CompareRequest getCompareRequest() { return ( CompareRequest ) protocolOp; } /** * Get a CompareResponse ldapObject, assuming that the caller knows that it * is the LdapMessage exact type. * * @return Returns the CompareResponse ldapObject. */ public CompareResponse getCompareResponse() { return ( CompareResponse ) protocolOp; } /** * Get a DelRequest ldapObject, assuming that the caller knows that it is * the LdapMessage exact type. * * @return Returns the DelRequest ldapObject. */ public DelRequest getDelRequest() { return ( DelRequest ) protocolOp; } /** * Get a DelResponse ldapObject, assuming that the caller knows that it is * the LdapMessage exact type. * * @return Returns the DelResponse ldapObject. */ public DelResponse getDelResponse() { return ( DelResponse ) protocolOp; } /** * Get a ExtendedRequest ldapObject, assuming that the caller knows that it * is the LdapMessage exact type. * * @return Returns the ExtendedRequest ldapObject. */ public ExtendedRequest getExtendedRequest() { return ( ExtendedRequest ) protocolOp; } /** * Get a ExtendedResponse ldapObject, assuming that the caller knows that it * is the LdapMessage exact type. * * @return Returns the ExtendedResponse ldapObject. */ public ExtendedResponse getExtendedResponse() { return ( ExtendedResponse ) protocolOp; } /** * Get a ModifyDNRequest ldapObject, assuming that the caller knows that it * is the LdapMessage exact type. * * @return Returns the ModifyDNRequest ldapObject. */ public ModifyDNRequest getModifyDNRequest() { return ( ModifyDNRequest ) protocolOp; } /** * Get a ModifyDNResponse ldapObject, assuming that the caller knows that it * is the LdapMessage exact type. * * @return Returns the ModifyDNResponse ldapObject. */ public ModifyDNResponse getModifyDNResponse() { return ( ModifyDNResponse ) protocolOp; } /** * Get a ModifyRequest ldapObject, assuming that the caller knows that it is * the LdapMessage exact type. * * @return Returns the ModifyRequest ldapObject. */ public ModifyRequest getModifyRequest() { return ( ModifyRequest ) protocolOp; } /** * Get a ModifyResponse ldapObject, assuming that the caller knows that it * is the LdapMessage exact type. * * @return Returns the ModifyResponse ldapObject. */ public ModifyResponse getModifyResponse() { return ( ModifyResponse ) protocolOp; } /** * Get a SearchRequest ldapObject, assuming that the caller knows that it is * the LdapMessage exact type. * * @return Returns the SearchRequest ldapObject. */ public SearchRequest getSearchRequest() { return ( SearchRequest ) protocolOp; } /** * Get a SearchResultDone ldapObject, assuming that the caller knows that it * is the LdapMessage exact type. * * @return Returns the SearchRequestDone ldapObject. */ public SearchResultDone getSearchResultDone() { return ( SearchResultDone ) protocolOp; } /** * Get a SearchResultEntry ldapObject, assuming that the caller knows that * it is the LdapMessage exact type. * * @return Returns the SearchResultEntry ldapObject. */ public SearchResultEntry getSearchResultEntry() { return ( SearchResultEntry ) protocolOp; } /** * Get a SearchResultReference ldapObject, assuming that the caller knows * that it is the LdapMessage exact type. * * @return Returns the SearchResultReference ldapObject. */ public SearchResultReference getSearchResultReference() { return ( SearchResultReference ) protocolOp; } /** * Get a UnBindRequest ldapObject, assuming that the caller knows that it is * the LdapMessage exact type. * * @return Returns the UnBindRequest ldapObject. */ public UnBindRequest getUnBindRequest() { return ( UnBindRequest ) protocolOp; } /** * Set the ProtocolOP * * @param protocolOp The protocolOp to set. */ public void setProtocolOP( Asn1Object protocolOp ) { this.protocolOp = protocolOp; } /** * Compute the LdapMessage length LdapMessage : * 0x30 L1 * | * +--> 0x02 0x0(1-4) [0..2^31-1] (MessageId) * +--> protocolOp * [+--> Controls] * * MessageId length = Length(0x02) + length(MessageId) + MessageId.length * L1 = length(ProtocolOp) * LdapMessage length = Length(0x30) + Length(L1) + MessageId length + L1 */ public int computeLength() { // The length of the MessageId. It's the sum of // - the tag (0x02), 1 byte // - the length of the Id length, 1 byte // - the Id length, 1 to 4 bytes ldapMessageLength = 1 + 1 + Value.getNbBytes( messageId ); // Get the protocolOp length int protocolOpLength = protocolOp.computeLength(); // Add the protocol length to the message length ldapMessageLength += protocolOpLength; // Do the same thing for Controls, if any. if ( controls != null ) { // Controls : // 0xA0 L3 // | // +--> 0x30 L4 // +--> 0x30 L5 // +--> ... // +--> 0x30 Li // +--> ... // +--> 0x30 Ln // // L3 = Length(0x30) + Length(L5) + L5 // + Length(0x30) + Length(L6) + L6 // + ... // + Length(0x30) + Length(Li) + Li // + ... // + Length(0x30) + Length(Ln) + Ln // // LdapMessageLength = LdapMessageLength + Length(0x90) // + Length(L3) + L3 controlsSequenceLength = 0; // We may have more than one control. ControlsLength is L4. for ( Control control:controls ) { controlsSequenceLength += control.computeLength(); } // Computes the controls length controlsLength = controlsSequenceLength; // 1 + Length.getNbBytes( // controlsSequenceLength // ) + controlsSequenceLength; // Now, add the tag and the length of the controls length ldapMessageLength += 1 + TLV.getNbBytes( controlsSequenceLength ) + controlsSequenceLength; } // finally, calculate the global message size : // length(Tag) + Length(length) + length return 1 + ldapMessageLength + TLV.getNbBytes( ldapMessageLength ); } /** * Generate the PDU which contains the encoded object. * * The generation is done in two phases : * - first, we compute the length of each part and the * global PDU length * - second, we produce the PDU. * * 0x30 L1 * | * +--> 0x02 L2 MessageId * +--> ProtocolOp * +--> Controls * * L2 = Length(MessageId) * L1 = Length(0x02) + Length(L2) + L2 + Length(ProtocolOp) + Length(Controls) * LdapMessageLength = Length(0x30) + Length(L1) + L1 * * @param buffer The encoded PDU * @return A ByteBuffer that contaons the PDU * @throws EncoderException If anything goes wrong. */ public ByteBuffer encode( ByteBuffer buffer ) throws EncoderException { // Allocate the bytes buffer. ByteBuffer bb = ByteBuffer.allocate( computeLength() ); try { // The LdapMessage Sequence bb.put( UniversalTag.SEQUENCE_TAG ); // The length has been calculated by the computeLength method bb.put( TLV.getBytes( ldapMessageLength ) ); } catch ( BufferOverflowException boe ) { throw new EncoderException( "The PDU buffer size is too small !" ); } // The message Id Value.encode( bb, messageId ); // Add the protocolOp part protocolOp.encode( bb ); // Do the same thing for Controls, if any. if ( controls != null ) { // Encode the controls bb.put( ( byte ) LdapConstants.CONTROLS_TAG ); bb.put( TLV.getBytes( controlsLength ) ); // Encode each control for ( Control control:controls ) { control.encode( bb ); } } return bb; } /** * Get a String representation of a LdapMessage * * @return A LdapMessage String */ public String toString() { StringBuffer sb = new StringBuffer(); sb.append( "LdapMessage\n" ); sb.append( " message Id : " ).append( messageId ).append( '\n' ); sb.append( protocolOp ); if ( controls != null ) { for ( Control control:controls ) { sb.append( control ); } } return sb.toString(); } }
Fixing Jira DIRSERVER-819 (getMessageTypeName() of LdapMessage returns wrong name for CompareResponse). git-svn-id: 8669792a45dee92960b930f442bbe2d0561ce91c@494401 13f79535-47bb-0310-9956-ffa450edef68
ldap/src/main/java/org/apache/directory/shared/ldap/codec/LdapMessage.java
Fixing Jira DIRSERVER-819 (getMessageTypeName() of LdapMessage returns wrong name for CompareResponse).
Java
apache-2.0
8e1f3e687d4e9070609108646cdcb5b014dd3f7c
0
metaborg/nabl,metaborg/nabl,metaborg/nabl
package org.metaborg.meta.nabl2.terms; import com.google.common.collect.ImmutableClassToInstanceMap; public interface ITermVar extends ITerm, IListTerm { String getResource(); String getName(); ITermVar withAttachments(ImmutableClassToInstanceMap<Object> value); @Override ITermVar withLocked(boolean locked); }
meta.nabl2.terms/src/main/java/org/metaborg/meta/nabl2/terms/ITermVar.java
package org.metaborg.meta.nabl2.terms; import com.google.common.collect.ImmutableClassToInstanceMap; public interface ITermVar extends ITerm, IListTerm { String getResource(); String getName(); ITermVar withAttachments(ImmutableClassToInstanceMap<Object> value); }
Add override to refine return type.
meta.nabl2.terms/src/main/java/org/metaborg/meta/nabl2/terms/ITermVar.java
Add override to refine return type.
Java
apache-2.0
b7dec8f65e4277c3e949358338f16e297d674c54
0
windbender/moquette,windbender/moquette,surferboy87/MQTT-TSS-Moquette-Server,andsel/moquette,andsel/moquette,andsel/moquette,windbender/moquette,windbender/moquette,surferboy87/MQTT-TSS-Moquette-Server,andsel/moquette,surferboy87/MQTT-TSS-Moquette-Server,surferboy87/MQTT-TSS-Moquette-Server
/* * Copyright (c) 2012-2015 The original author or authors * ------------------------------------------------------ * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Apache License v2.0 which accompanies this distribution. * * The Eclipse Public License is available at * http://www.eclipse.org/legal/epl-v10.html * * The Apache License v2.0 is available at * http://www.opensource.org/licenses/apache2.0.php * * You may elect to redistribute this code under either of these licenses. */ package io.moquette.parser.netty; import io.netty.buffer.ByteBuf; import io.netty.util.AttributeMap; import java.util.List; import io.moquette.parser.proto.messages.DisconnectMessage; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author andrea */ class DisconnectDecoder extends DemuxDecoder { private static Logger LOG = LoggerFactory.getLogger(DisconnectDecoder.class); @Override void decode(AttributeMap ctx, ByteBuf in, List<Object> out) throws Exception { //Common decoding part in.resetReaderIndex(); DisconnectMessage message = new DisconnectMessage(); if (!decodeCommonHeader(message, 0x00, in)) { in.resetReaderIndex(); return; } LOG.debug("Decoding disconnect"); out.add(message); } }
netty_parser/src/main/java/io/moquette/parser/netty/DisconnectDecoder.java
/* * Copyright (c) 2012-2015 The original author or authors * ------------------------------------------------------ * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Apache License v2.0 which accompanies this distribution. * * The Eclipse Public License is available at * http://www.eclipse.org/legal/epl-v10.html * * The Apache License v2.0 is available at * http://www.opensource.org/licenses/apache2.0.php * * You may elect to redistribute this code under either of these licenses. */ package io.moquette.parser.netty; import io.netty.buffer.ByteBuf; import io.netty.util.AttributeMap; import java.util.List; import io.moquette.parser.proto.messages.DisconnectMessage; /** * * @author andrea */ class DisconnectDecoder extends DemuxDecoder { @Override void decode(AttributeMap ctx, ByteBuf in, List<Object> out) throws Exception { //Common decoding part in.resetReaderIndex(); DisconnectMessage message = new DisconnectMessage(); if (!decodeCommonHeader(message, 0x00, in)) { in.resetReaderIndex(); return; } out.add(message); } }
Minor change, just log in disconnet decoder
netty_parser/src/main/java/io/moquette/parser/netty/DisconnectDecoder.java
Minor change, just log in disconnet decoder
Java
apache-2.0
75f59120671c0ba13aab856a3431d29124c30c5f
0
enioka/jqm,enioka/jqm,enioka/jqm,enioka/jqm,enioka/jqm
/** * Copyright © 2013 enioka. All rights reserved * Authors: Marc-Antoine GOUILLART ([email protected]) * Pierre COPPEE ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.enioka.jqm.tools; import java.lang.management.ManagementFactory; import java.util.Calendar; import java.util.List; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import javax.management.MBeanServer; import javax.management.ObjectName; import javax.persistence.EntityManager; import javax.persistence.EntityNotFoundException; import javax.persistence.LockModeType; import javax.persistence.PersistenceException; import org.apache.log4j.Logger; import org.hibernate.TransactionException; import org.hibernate.exception.JDBCConnectionException; import com.enioka.jqm.jpamodel.DeploymentParameter; import com.enioka.jqm.jpamodel.JobInstance; import com.enioka.jqm.jpamodel.Queue; import com.enioka.jqm.jpamodel.State; /** * A thread that polls a queue according to the parameters defined inside a {@link DeploymentParameter}. */ class QueuePoller implements Runnable, QueuePollerMBean { private static Logger jqmlogger = Logger.getLogger(QueuePoller.class); private DeploymentParameter dp = null; private Queue queue = null; private LibraryCache cache = null; JqmEngine engine; private boolean run = true; private AtomicInteger actualNbThread = new AtomicInteger(0); private boolean hasStopped = true; private Calendar lastLoop = null; private ObjectName name = null; private Thread localThread = null; private Semaphore loop; @Override public void stop() { run = false; if (localThread != null) { localThread.interrupt(); } } /** * Will make the thread ready to run once again after it has stopped. */ void reset() { if (!hasStopped) { throw new IllegalStateException("cannot reset a non stopped queue poller"); } hasStopped = false; run = true; actualNbThread.set(0); lastLoop = null; loop = new Semaphore(0); } QueuePoller(DeploymentParameter dp, LibraryCache cache, JqmEngine engine) { jqmlogger.info("Engine " + engine.getNode().getName() + " will poll JobInstances on queue " + dp.getQueue().getName() + " every " + dp.getPollingInterval() / 1000 + "s with " + dp.getNbThread() + " threads for concurrent instances"); reset(); EntityManager em = Helpers.getNewEm(); this.dp = em .createQuery("SELECT dp FROM DeploymentParameter dp LEFT JOIN FETCH dp.queue LEFT JOIN FETCH dp.node WHERE dp.id = :l", DeploymentParameter.class).setParameter("l", dp.getId()).getSingleResult(); this.queue = dp.getQueue(); this.cache = cache; this.engine = engine; try { if (this.engine.loadJmxBeans) { MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); name = new ObjectName("com.enioka.jqm:type=Node.Queue,Node=" + this.dp.getNode().getName() + ",name=" + this.dp.getQueue().getName()); mbs.registerMBean(this, name); } } catch (Exception e) { throw new JqmInitError("Could not create JMX beans", e); } finally { em.close(); } } protected JobInstance dequeue(EntityManager em) { // Free room? if (actualNbThread.get() >= dp.getNbThread()) { return null; } // Get the list of all jobInstance within the defined queue, ordered by position List<JobInstance> availableJobs = em .createQuery( "SELECT j FROM JobInstance j LEFT JOIN FETCH j.jd WHERE j.queue = :q AND j.state = :s ORDER BY j.internalPosition ASC", JobInstance.class).setParameter("q", queue).setParameter("s", State.SUBMITTED).setMaxResults(dp.getNbThread()) .getResultList(); em.getTransaction().begin(); for (JobInstance res : availableJobs) { // Lock is given when object is read, not during select... stupid. // So we must check if the object is still SUBMITTED. try { em.refresh(res, LockModeType.PESSIMISTIC_WRITE); } catch (EntityNotFoundException e) { // It has already been eaten and finished by another engine continue; } if (!res.getState().equals(State.SUBMITTED)) { // Already eaten by another engine, not yet done continue; } // Highlander? if (res.getJd().isHighlander() && !highlanderPollingMode(res, em)) { continue; } // Reserve the JI for this engine. Use a query rather than setter to avoid updating all fields (and locks when verifying FKs) em.createQuery( "UPDATE JobInstance j SET j.state = 'ATTRIBUTED', j.node = :n, j.attributionDate = current_timestamp() WHERE id=:i") .setParameter("i", res.getId()).setParameter("n", dp.getNode()).executeUpdate(); // Stop at the first suitable JI. Release the lock & update the JI which has been attributed to us. em.getTransaction().commit(); return res; } // If here, no suitable JI is available em.getTransaction().rollback(); return null; } /** * * @param jobToTest * @param em * @return true if job can be launched even if it is in highlander mode */ protected boolean highlanderPollingMode(JobInstance jobToTest, EntityManager em) { List<JobInstance> jobs = em .createQuery( "SELECT j FROM JobInstance j WHERE j IS NOT :refid AND j.jd = :jd AND (j.state = 'RUNNING' OR j.state = 'ATTRIBUTED')", JobInstance.class).setParameter("refid", jobToTest).setParameter("jd", jobToTest.getJd()).getResultList(); return jobs.isEmpty(); } @Override public void run() { this.localThread = Thread.currentThread(); this.localThread.setName("QUEUE_POLLER;polling;" + this.dp.getQueue().getName()); EntityManager em = null; while (true) { lastLoop = Calendar.getInstance(); try { // Get a JI to run em = Helpers.getNewEm(); JobInstance ji = dequeue(em); while (ji != null) { // We will run this JI! jqmlogger.trace("JI number " + ji.getId() + " will be run by this poller this loop (already " + actualNbThread + "/" + dp.getNbThread() + " on " + this.queue.getName() + ")"); actualNbThread.incrementAndGet(); // Run it if (!ji.getJd().isExternal()) { (new Thread(new Loader(ji, cache, this))).start(); } else { (new Thread(new LoaderExternal(em, ji, this))).start(); } // Check if there is another job to run (does nothing - no db query - if queue is full so this is not expensive) ji = dequeue(em); } } catch (PersistenceException e) { if (e.getCause() instanceof JDBCConnectionException || e.getCause() instanceof TransactionException) { jqmlogger.error("connection to database lost - stopping poller"); jqmlogger.trace("connection error was:", e.getCause()); this.engine.pollerRestartNeeded(this); break; } else { throw e; } } finally { // Reset the em on each loop. Helpers.closeQuietly(em); } // Wait according to the deploymentParameter try { loop.tryAcquire(dp.getPollingInterval(), TimeUnit.MILLISECONDS); } catch (InterruptedException e) { run = false; } // Exit if asked to if (!run) { break; } } if (!run) { // Run is true only if the loop has exited abnormally, in which case the engine should try to restart the poller // So only do the graceful shutdown procedure if normal shutdown. jqmlogger.info("Poller loop on queue " + this.queue.getName() + " is stopping [engine " + this.dp.getNode().getName() + "]"); waitForAllThreads(60 * 1000); jqmlogger.info("Poller on queue " + dp.getQueue().getName() + " has ended normally"); // Let the engine decide if it should stop completely this.hasStopped = true; // BEFORE check this.engine.checkEngineEnd(); } else { this.run = false; this.hasStopped = true; } // JMX if (this.engine.loadJmxBeans) { try { ManagementFactory.getPlatformMBeanServer().unregisterMBean(name); } catch (Exception e) { jqmlogger.error("Could not unregister JMX beans", e); } } } @Override public Integer getCurrentActiveThreadCount() { return actualNbThread.get(); } /** * Called when a payload thread has ended. This notifies the poller to free a slot and poll once again. */ synchronized void decreaseNbThread() { this.actualNbThread.decrementAndGet(); loop.release(1); } public DeploymentParameter getDp() { return dp; } boolean isRunning() { return !this.hasStopped; } private void waitForAllThreads(long timeOutMs) { long timeWaitedMs = 0; long stepMs = 1000; while (timeWaitedMs <= timeOutMs) { jqmlogger.trace("Waiting the end of " + actualNbThread + " job(s)"); if (actualNbThread.get() == 0) { break; } if (timeWaitedMs == 0) { jqmlogger.info("Waiting for the end of " + actualNbThread + " jobs on queue " + this.dp.getQueue().getName() + " - timeout is " + timeOutMs + "ms"); } try { Thread.sleep(stepMs); } catch (InterruptedException e) { // Interruption => stop right now jqmlogger.warn("Some job instances did not finish in time - wait was interrupted"); return; } timeWaitedMs += stepMs; } if (timeWaitedMs > timeOutMs) { jqmlogger.warn("Some job instances did not finish in time - they will be killed for the poller to be able to stop"); } } // ////////////////////////////////////////////////////////// // JMX // ////////////////////////////////////////////////////////// @Override public long getCumulativeJobInstancesCount() { EntityManager em2 = Helpers.getNewEm(); Long nb = em2.createQuery("SELECT COUNT(i) From History i WHERE i.node = :n AND i.queue = :q", Long.class) .setParameter("n", this.dp.getNode()).setParameter("q", this.dp.getQueue()).getSingleResult(); em2.close(); return nb; } @Override public float getJobsFinishedPerSecondLastMinute() { EntityManager em2 = Helpers.getNewEm(); Calendar minusOneMinute = Calendar.getInstance(); minusOneMinute.add(Calendar.MINUTE, -1); Float nb = em2.createQuery("SELECT COUNT(i) From History i WHERE i.endDate >= :d and i.node = :n AND i.queue = :q", Long.class) .setParameter("d", minusOneMinute).setParameter("n", this.dp.getNode()).setParameter("q", this.dp.getQueue()) .getSingleResult() / 60f; em2.close(); return nb; } @Override public long getCurrentlyRunningJobCount() { EntityManager em2 = Helpers.getNewEm(); Long nb = em2.createQuery("SELECT COUNT(i) From JobInstance i WHERE i.node = :n AND i.queue = :q", Long.class) .setParameter("n", this.dp.getNode()).setParameter("q", this.dp.getQueue()).getSingleResult(); em2.close(); return nb; } @Override public Integer getPollingIntervalMilliseconds() { return this.dp.getPollingInterval(); } @Override public Integer getMaxConcurrentJobInstanceCount() { return this.dp.getNbThread(); } @Override public boolean isActuallyPolling() { // 100ms is a rough estimate of the time taken to do the actual poll. If it's more, there is a huge issue elsewhere. return (Calendar.getInstance().getTimeInMillis() - this.lastLoop.getTimeInMillis()) <= dp.getPollingInterval() + 100; } @Override public boolean isFull() { return this.actualNbThread.get() >= this.dp.getNbThread(); } }
jqm-all/jqm-engine/src/main/java/com/enioka/jqm/tools/QueuePoller.java
/** * Copyright © 2013 enioka. All rights reserved * Authors: Marc-Antoine GOUILLART ([email protected]) * Pierre COPPEE ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.enioka.jqm.tools; import java.lang.management.ManagementFactory; import java.util.Calendar; import java.util.List; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import javax.management.MBeanServer; import javax.management.ObjectName; import javax.persistence.EntityManager; import javax.persistence.EntityNotFoundException; import javax.persistence.LockModeType; import javax.persistence.PersistenceException; import org.apache.log4j.Logger; import org.hibernate.TransactionException; import org.hibernate.exception.JDBCConnectionException; import com.enioka.jqm.jpamodel.DeploymentParameter; import com.enioka.jqm.jpamodel.JobInstance; import com.enioka.jqm.jpamodel.Queue; import com.enioka.jqm.jpamodel.State; /** * A thread that polls a queue according to the parameters defined inside a {@link DeploymentParameter}. */ class QueuePoller implements Runnable, QueuePollerMBean { private static Logger jqmlogger = Logger.getLogger(QueuePoller.class); private DeploymentParameter dp = null; private Queue queue = null; private LibraryCache cache = null; JqmEngine engine; private boolean run = true; private Integer actualNbThread; private boolean hasStopped = true; private Calendar lastLoop = null; private ObjectName name = null; private Thread localThread = null; private Semaphore loop; @Override public void stop() { run = false; if (localThread != null) { localThread.interrupt(); } } /** * Will make the thread ready to run once again after it has stopped. */ void reset() { if (!hasStopped) { throw new IllegalStateException("cannot reset a non stopped queue poller"); } hasStopped = false; run = true; actualNbThread = 0; lastLoop = null; loop = new Semaphore(0); } QueuePoller(DeploymentParameter dp, LibraryCache cache, JqmEngine engine) { jqmlogger.info("Engine " + engine.getNode().getName() + " will poll JobInstances on queue " + dp.getQueue().getName() + " every " + dp.getPollingInterval() / 1000 + "s with " + dp.getNbThread() + " threads for concurrent instances"); reset(); EntityManager em = Helpers.getNewEm(); this.dp = em .createQuery("SELECT dp FROM DeploymentParameter dp LEFT JOIN FETCH dp.queue LEFT JOIN FETCH dp.node WHERE dp.id = :l", DeploymentParameter.class).setParameter("l", dp.getId()).getSingleResult(); this.queue = dp.getQueue(); this.cache = cache; this.engine = engine; try { if (this.engine.loadJmxBeans) { MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); name = new ObjectName("com.enioka.jqm:type=Node.Queue,Node=" + this.dp.getNode().getName() + ",name=" + this.dp.getQueue().getName()); mbs.registerMBean(this, name); } } catch (Exception e) { throw new JqmInitError("Could not create JMX beans", e); } finally { em.close(); } } protected JobInstance dequeue(EntityManager em) { // Free room? if (actualNbThread >= dp.getNbThread()) { return null; } // Get the list of all jobInstance within the defined queue, ordered by position List<JobInstance> availableJobs = em .createQuery( "SELECT j FROM JobInstance j LEFT JOIN FETCH j.jd WHERE j.queue = :q AND j.state = :s ORDER BY j.internalPosition ASC", JobInstance.class).setParameter("q", queue).setParameter("s", State.SUBMITTED).setMaxResults(dp.getNbThread()) .getResultList(); em.getTransaction().begin(); for (JobInstance res : availableJobs) { // Lock is given when object is read, not during select... stupid. // So we must check if the object is still SUBMITTED. try { em.refresh(res, LockModeType.PESSIMISTIC_WRITE); } catch (EntityNotFoundException e) { // It has already been eaten and finished by another engine continue; } if (!res.getState().equals(State.SUBMITTED)) { // Already eaten by another engine, not yet done continue; } // Highlander? if (res.getJd().isHighlander() && !highlanderPollingMode(res, em)) { continue; } // Reserve the JI for this engine. Use a query rather than setter to avoid updating all fields (and locks when verifying FKs) em.createQuery( "UPDATE JobInstance j SET j.state = 'ATTRIBUTED', j.node = :n, j.attributionDate = current_timestamp() WHERE id=:i") .setParameter("i", res.getId()).setParameter("n", dp.getNode()).executeUpdate(); // Stop at the first suitable JI. Release the lock & update the JI which has been attributed to us. em.getTransaction().commit(); return res; } // If here, no suitable JI is available em.getTransaction().rollback(); return null; } /** * * @param jobToTest * @param em * @return true if job can be launched even if it is in highlander mode */ protected boolean highlanderPollingMode(JobInstance jobToTest, EntityManager em) { List<JobInstance> jobs = em .createQuery( "SELECT j FROM JobInstance j WHERE j IS NOT :refid AND j.jd = :jd AND (j.state = 'RUNNING' OR j.state = 'ATTRIBUTED')", JobInstance.class).setParameter("refid", jobToTest).setParameter("jd", jobToTest.getJd()).getResultList(); return jobs.isEmpty(); } @Override public void run() { this.localThread = Thread.currentThread(); this.localThread.setName("QUEUE_POLLER;polling;" + this.dp.getQueue().getName()); EntityManager em = null; while (true) { lastLoop = Calendar.getInstance(); try { // Get a JI to run em = Helpers.getNewEm(); JobInstance ji = dequeue(em); while (ji != null) { // We will run this JI! jqmlogger.trace("JI number " + ji.getId() + " will be run by this poller this loop (already " + actualNbThread + "/" + dp.getNbThread() + " on " + this.queue.getName() + ")"); actualNbThread++; // Run it if (!ji.getJd().isExternal()) { (new Thread(new Loader(ji, cache, this))).start(); } else { (new Thread(new LoaderExternal(em, ji, this))).start(); } // Check if there is another job to run (does nothing - no db query - if queue is full so this is not expensive) ji = dequeue(em); } } catch (PersistenceException e) { if (e.getCause() instanceof JDBCConnectionException || e.getCause() instanceof TransactionException) { jqmlogger.error("connection to database lost - stopping poller"); jqmlogger.trace("connection error was:", e.getCause()); this.engine.pollerRestartNeeded(this); break; } else { throw e; } } finally { // Reset the em on each loop. Helpers.closeQuietly(em); } // Wait according to the deploymentParameter try { loop.tryAcquire(dp.getPollingInterval(), TimeUnit.MILLISECONDS); } catch (InterruptedException e) { run = false; } // Exit if asked to if (!run) { break; } } if (!run) { // Run is true only if the loop has exited abnormally, in which case the engine should try to restart the poller // So only do the graceful shutdown procedure if normal shutdown. jqmlogger.info("Poller loop on queue " + this.queue.getName() + " is stopping [engine " + this.dp.getNode().getName() + "]"); waitForAllThreads(60 * 1000); jqmlogger.info("Poller on queue " + dp.getQueue().getName() + " has ended normally"); // Let the engine decide if it should stop completely this.hasStopped = true; // BEFORE check this.engine.checkEngineEnd(); } else { this.run = false; this.hasStopped = true; } // JMX if (this.engine.loadJmxBeans) { try { ManagementFactory.getPlatformMBeanServer().unregisterMBean(name); } catch (Exception e) { jqmlogger.error("Could not unregister JMX beans", e); } } } @Override public Integer getCurrentActiveThreadCount() { return actualNbThread; } /** * Called when a payload thread has ended. This notifies the poller to free a slot and poll once again. */ synchronized void decreaseNbThread() { this.actualNbThread--; loop.release(1); } public DeploymentParameter getDp() { return dp; } boolean isRunning() { return !this.hasStopped; } private void waitForAllThreads(long timeOutMs) { long timeWaitedMs = 0; long stepMs = 1000; while (timeWaitedMs <= timeOutMs) { jqmlogger.trace("Waiting the end of " + actualNbThread + " job(s)"); if (actualNbThread == 0) { break; } if (timeWaitedMs == 0) { jqmlogger.info("Waiting for the end of " + actualNbThread + " jobs on queue " + this.dp.getQueue().getName() + " - timeout is " + timeOutMs + "ms"); } try { Thread.sleep(stepMs); } catch (InterruptedException e) { // Interruption => stop right now jqmlogger.warn("Some job instances did not finish in time - wait was interrupted"); return; } timeWaitedMs += stepMs; } if (timeWaitedMs > timeOutMs) { jqmlogger.warn("Some job instances did not finish in time - they will be killed for the poller to be able to stop"); } } // ////////////////////////////////////////////////////////// // JMX // ////////////////////////////////////////////////////////// @Override public long getCumulativeJobInstancesCount() { EntityManager em2 = Helpers.getNewEm(); Long nb = em2.createQuery("SELECT COUNT(i) From History i WHERE i.node = :n AND i.queue = :q", Long.class) .setParameter("n", this.dp.getNode()).setParameter("q", this.dp.getQueue()).getSingleResult(); em2.close(); return nb; } @Override public float getJobsFinishedPerSecondLastMinute() { EntityManager em2 = Helpers.getNewEm(); Calendar minusOneMinute = Calendar.getInstance(); minusOneMinute.add(Calendar.MINUTE, -1); Float nb = em2.createQuery("SELECT COUNT(i) From History i WHERE i.endDate >= :d and i.node = :n AND i.queue = :q", Long.class) .setParameter("d", minusOneMinute).setParameter("n", this.dp.getNode()).setParameter("q", this.dp.getQueue()) .getSingleResult() / 60f; em2.close(); return nb; } @Override public long getCurrentlyRunningJobCount() { EntityManager em2 = Helpers.getNewEm(); Long nb = em2.createQuery("SELECT COUNT(i) From JobInstance i WHERE i.node = :n AND i.queue = :q", Long.class) .setParameter("n", this.dp.getNode()).setParameter("q", this.dp.getQueue()).getSingleResult(); em2.close(); return nb; } @Override public Integer getPollingIntervalMilliseconds() { return this.dp.getPollingInterval(); } @Override public Integer getMaxConcurrentJobInstanceCount() { return this.dp.getNbThread(); } @Override public boolean isActuallyPolling() { // 100ms is a rough estimate of the time taken to do the actual poll. If it's more, there is a huge issue elsewhere. return (Calendar.getInstance().getTimeInMillis() - this.lastLoop.getTimeInMillis()) <= dp.getPollingInterval() + 100; } @Override public boolean isFull() { return this.actualNbThread >= this.dp.getNbThread(); } }
Fix: number of running JI could very rarely become wrong Because -- is not an atomic operator in Java...
jqm-all/jqm-engine/src/main/java/com/enioka/jqm/tools/QueuePoller.java
Fix: number of running JI could very rarely become wrong
Java
apache-2.0
be09acb8c2e63b9059b81eae3b93df6c825225ed
0
kalaspuffar/pdfbox,apache/pdfbox,kalaspuffar/pdfbox,apache/pdfbox
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.text; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Stack; import org.apache.pdfbox.cos.COSDictionary; import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.pdmodel.documentinterchange.markedcontent.PDMarkedContent; import org.apache.pdfbox.pdmodel.graphics.PDXObject; import org.apache.pdfbox.contentstream.operator.markedcontent.BeginMarkedContentSequence; import org.apache.pdfbox.contentstream.operator.markedcontent.BeginMarkedContentSequenceWithProperties; import org.apache.pdfbox.contentstream.operator.markedcontent.EndMarkedContentSequence; /** * This is an stream engine to extract the marked content of a pdf. * * @author Johannes Koch */ public class PDFMarkedContentExtractor extends PDFTextStreamEngine { private final boolean suppressDuplicateOverlappingText = true; private final List<PDMarkedContent> markedContents = new ArrayList<PDMarkedContent>(); private final Stack<PDMarkedContent> currentMarkedContents = new Stack<PDMarkedContent>(); private final Map<String, List<TextPosition>> characterListMapping = new HashMap<String, List<TextPosition>>(); /** * Instantiate a new PDFTextStripper object. */ public PDFMarkedContentExtractor() throws IOException { this(null); } /** * Constructor. Will apply encoding-specific conversions to the output text. * * @param encoding The encoding that the output will be written in. */ public PDFMarkedContentExtractor(String encoding) throws IOException { addOperator(new BeginMarkedContentSequenceWithProperties()); addOperator(new BeginMarkedContentSequence()); addOperator(new EndMarkedContentSequence()); // todo: DP - Marked Content Point // todo: MP - Marked Content Point with Properties } /** * This will determine of two floating point numbers are within a specified variance. * * @param first The first number to compare to. * @param second The second number to compare to. * @param variance The allowed variance. */ private boolean within( float first, float second, float variance ) { return second > first - variance && second < first + variance; } public void beginMarkedContentSequence(COSName tag, COSDictionary properties) { PDMarkedContent markedContent = PDMarkedContent.create(tag, properties); if (this.currentMarkedContents.isEmpty()) { this.markedContents.add(markedContent); } else { PDMarkedContent currentMarkedContent = this.currentMarkedContents.peek(); if (currentMarkedContent != null) { currentMarkedContent.addMarkedContent(markedContent); } } this.currentMarkedContents.push(markedContent); } public void endMarkedContentSequence() { if (!this.currentMarkedContents.isEmpty()) { this.currentMarkedContents.pop(); } } public void xobject(PDXObject xobject) { if (!this.currentMarkedContents.isEmpty()) { this.currentMarkedContents.peek().addXObject(xobject); } } /** * This will process a TextPosition object and add the * text to the list of characters on a page. It takes care of * overlapping text. * * @param text The text to process. */ @Override protected void processTextPosition( TextPosition text ) { boolean showCharacter = true; if( this.suppressDuplicateOverlappingText ) { showCharacter = false; String textCharacter = text.getUnicode(); float textX = text.getX(); float textY = text.getY(); List<TextPosition> sameTextCharacters = this.characterListMapping.get( textCharacter ); if( sameTextCharacters == null ) { sameTextCharacters = new ArrayList<TextPosition>(); this.characterListMapping.put( textCharacter, sameTextCharacters ); } // RDD - Here we compute the value that represents the end of the rendered // text. This value is used to determine whether subsequent text rendered // on the same line overwrites the current text. // // We subtract any positive padding to handle cases where extreme amounts // of padding are applied, then backed off (not sure why this is done, but there // are cases where the padding is on the order of 10x the character width, and // the TJ just backs up to compensate after each character). Also, we subtract // an amount to allow for kerning (a percentage of the width of the last // character). // boolean suppressCharacter = false; float tolerance = (text.getWidth()/textCharacter.length())/3.0f; for (TextPosition sameTextCharacter : sameTextCharacters) { TextPosition character = sameTextCharacter; String charCharacter = character.getUnicode(); float charX = character.getX(); float charY = character.getY(); //only want to suppress if( charCharacter != null && //charCharacter.equals( textCharacter ) && within( charX, textX, tolerance ) && within( charY, textY, tolerance ) ) { suppressCharacter = true; break; } } if( !suppressCharacter ) { sameTextCharacters.add( text ); showCharacter = true; } } if( showCharacter ) { List<TextPosition> textList = new ArrayList<TextPosition>(); /* In the wild, some PDF encoded documents put diacritics (accents on * top of characters) into a separate Tj element. When displaying them * graphically, the two chunks get overlayed. With text output though, * we need to do the overlay. This code recombines the diacritic with * its associated character if the two are consecutive. */ if(textList.isEmpty()) { textList.add(text); } else { /* test if we overlap the previous entry. * Note that we are making an assumption that we need to only look back * one TextPosition to find what we are overlapping. * This may not always be true. */ TextPosition previousTextPosition = textList.get(textList.size()-1); if(text.isDiacritic() && previousTextPosition.contains(text)) { previousTextPosition.mergeDiacritic(text); } /* If the previous TextPosition was the diacritic, merge it into this * one and remove it from the list. */ else if(previousTextPosition.isDiacritic() && text.contains(previousTextPosition)) { text.mergeDiacritic(previousTextPosition); textList.remove(textList.size()-1); textList.add(text); } else { textList.add(text); } } if (!this.currentMarkedContents.isEmpty()) { this.currentMarkedContents.peek().addText(text); } } } public List<PDMarkedContent> getMarkedContents() { return this.markedContents; } }
pdfbox/src/main/java/org/apache/pdfbox/text/PDFMarkedContentExtractor.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.text; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Stack; import org.apache.pdfbox.cos.COSDictionary; import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.pdmodel.documentinterchange.markedcontent.PDMarkedContent; import org.apache.pdfbox.pdmodel.graphics.PDXObject; import org.apache.pdfbox.contentstream.operator.markedcontent.BeginMarkedContentSequence; import org.apache.pdfbox.contentstream.operator.markedcontent.BeginMarkedContentSequenceWithProperties; import org.apache.pdfbox.contentstream.operator.markedcontent.EndMarkedContentSequence; /** * This is an stream engine to extract the marked content of a pdf. * * @author Johannes Koch */ public class PDFMarkedContentExtractor extends PDFTextStreamEngine { private boolean suppressDuplicateOverlappingText = true; private List<PDMarkedContent> markedContents = new ArrayList<PDMarkedContent>(); private Stack<PDMarkedContent> currentMarkedContents = new Stack<PDMarkedContent>(); private Map<String, List<TextPosition>> characterListMapping = new HashMap<String, List<TextPosition>>(); /** * Instantiate a new PDFTextStripper object. */ public PDFMarkedContentExtractor() throws IOException { this(null); } /** * Constructor. Will apply encoding-specific conversions to the output text. * * @param encoding The encoding that the output will be written in. */ public PDFMarkedContentExtractor(String encoding) throws IOException { addOperator(new BeginMarkedContentSequenceWithProperties()); addOperator(new BeginMarkedContentSequence()); addOperator(new EndMarkedContentSequence()); // todo: DP - Marked Content Point // todo: MP - Marked Content Point with Properties } /** * This will determine of two floating point numbers are within a specified variance. * * @param first The first number to compare to. * @param second The second number to compare to. * @param variance The allowed variance. */ private boolean within( float first, float second, float variance ) { return second > first - variance && second < first + variance; } public void beginMarkedContentSequence(COSName tag, COSDictionary properties) { PDMarkedContent markedContent = PDMarkedContent.create(tag, properties); if (this.currentMarkedContents.isEmpty()) { this.markedContents.add(markedContent); } else { PDMarkedContent currentMarkedContent = this.currentMarkedContents.peek(); if (currentMarkedContent != null) { currentMarkedContent.addMarkedContent(markedContent); } } this.currentMarkedContents.push(markedContent); } public void endMarkedContentSequence() { if (!this.currentMarkedContents.isEmpty()) { this.currentMarkedContents.pop(); } } public void xobject(PDXObject xobject) { if (!this.currentMarkedContents.isEmpty()) { this.currentMarkedContents.peek().addXObject(xobject); } } /** * This will process a TextPosition object and add the * text to the list of characters on a page. It takes care of * overlapping text. * * @param text The text to process. */ @Override protected void processTextPosition( TextPosition text ) { boolean showCharacter = true; if( this.suppressDuplicateOverlappingText ) { showCharacter = false; String textCharacter = text.getUnicode(); float textX = text.getX(); float textY = text.getY(); List<TextPosition> sameTextCharacters = this.characterListMapping.get( textCharacter ); if( sameTextCharacters == null ) { sameTextCharacters = new ArrayList<TextPosition>(); this.characterListMapping.put( textCharacter, sameTextCharacters ); } // RDD - Here we compute the value that represents the end of the rendered // text. This value is used to determine whether subsequent text rendered // on the same line overwrites the current text. // // We subtract any positive padding to handle cases where extreme amounts // of padding are applied, then backed off (not sure why this is done, but there // are cases where the padding is on the order of 10x the character width, and // the TJ just backs up to compensate after each character). Also, we subtract // an amount to allow for kerning (a percentage of the width of the last // character). // boolean suppressCharacter = false; float tolerance = (text.getWidth()/textCharacter.length())/3.0f; for (TextPosition sameTextCharacter : sameTextCharacters) { TextPosition character = (TextPosition) sameTextCharacter; String charCharacter = character.getUnicode(); float charX = character.getX(); float charY = character.getY(); //only want to suppress if( charCharacter != null && //charCharacter.equals( textCharacter ) && within( charX, textX, tolerance ) && within( charY, textY, tolerance ) ) { suppressCharacter = true; break; } } if( !suppressCharacter ) { sameTextCharacters.add( text ); showCharacter = true; } } if( showCharacter ) { List<TextPosition> textList = new ArrayList<TextPosition>(); /* In the wild, some PDF encoded documents put diacritics (accents on * top of characters) into a separate Tj element. When displaying them * graphically, the two chunks get overlayed. With text output though, * we need to do the overlay. This code recombines the diacritic with * its associated character if the two are consecutive. */ if(textList.isEmpty()) { textList.add(text); } else { /* test if we overlap the previous entry. * Note that we are making an assumption that we need to only look back * one TextPosition to find what we are overlapping. * This may not always be true. */ TextPosition previousTextPosition = (TextPosition)textList.get(textList.size()-1); if(text.isDiacritic() && previousTextPosition.contains(text)) { previousTextPosition.mergeDiacritic(text); } /* If the previous TextPosition was the diacritic, merge it into this * one and remove it from the list. */ else if(previousTextPosition.isDiacritic() && text.contains(previousTextPosition)) { text.mergeDiacritic(previousTextPosition); textList.remove(textList.size()-1); textList.add(text); } else { textList.add(text); } } if (!this.currentMarkedContents.isEmpty()) { this.currentMarkedContents.peek().addText(text); } } } public List<PDMarkedContent> getMarkedContents() { return this.markedContents; } }
PDFBOX-2852: make fields final, remove unneeded casts git-svn-id: c3ad59981690829a43dc34c293c4e2cd04bcd994@1708242 13f79535-47bb-0310-9956-ffa450edef68
pdfbox/src/main/java/org/apache/pdfbox/text/PDFMarkedContentExtractor.java
PDFBOX-2852: make fields final, remove unneeded casts
Java
apache-2.0
344fbc8c2731db84575b390d9e683dd37cb2ca35
0
sajithatharaka/Gson
package projectgson; import gson.DateDeserializerAdapter; import gson.GsonManager; import gson.ModuleDeserializerAdapter; import java.lang.reflect.Field; import java.lang.reflect.Type; import java.util.Date; import java.util.HashMap; import java.util.Map; import sample.pojo.Module; import sample.pojo.Student; /* * @Type : Class * @Dependencies : N/A * @Data : N/A * @Description : N/A */ public class ProjectGson { public static void main(String[] args) { //============= Map of Composite Objects ============================================================ Map<Type, Object> compositeObjectAdapterMap = new HashMap<Type, Object>(); compositeObjectAdapterMap.put(Date.class, new DateDeserializerAdapter()); compositeObjectAdapterMap.put(Module.class, new ModuleDeserializerAdapter()); //============= Instead of Json file, Json String is passed ========================================= String s = "{name:'Sajitha',id:'ST-001',date:'2016-07-09',age:24,subjects:['Maths','English'],module:{id:'MD00-CS',name:'Secure Systems'}}"; Student student = (Student) GsonManager.getContext(compositeObjectAdapterMap, s, Student.class); //============= Used Reflection - Not Important printTheFields(student); } public static void printTheFields(Student student) { try { Field[] fields = Student.class.getDeclaredFields(); for (Field field : fields) { if (field.getName().equals("module")) { Object newObj = field.get(student); Module module = (Module) newObj; System.out.println(field.getName()+".id" + " :" + module.getId()); System.out.println(field.getName()+".name" + " :" + module.getName()); } else { field.setAccessible(true); Object newObj = field.get(student); System.out.println(field.getName() + " :" + newObj); } } } catch (Exception e) { System.out.println("Exception : " + e.getMessage()); } } }
src/projectgson/ProjectGson.java
package projectgson; import gson.DateDeserializerAdapter; import gson.GsonManager; import gson.ModuleDeserializerAdapter; import java.lang.reflect.Field; import java.lang.reflect.Type; import java.util.Date; import java.util.HashMap; import java.util.Map; import sample.pojo.Module; import sample.pojo.Student; /* * @Type : Class * @Dependencies : N/A * @Data : N/A * @Description : N/A */ public class ProjectGson { public static void main(String[] args) { //============= Map of Composite Objects ============================================================ Map<Type, Object> compositeObjectAdapterMap = new HashMap<Type, Object>(); compositeObjectAdapterMap.put(Date.class, new DateDeserializerAdapter()); compositeObjectAdapterMap.put(Module.class, new ModuleDeserializerAdapter()); //============= Instead of Json file, Json String is passed ========================================= String s = "{name:'Sajitha',id:'ST-001',date:'2016-07-09',age:24,subjects:['Maths','English'],module:{id:'MD00-CS',name:'Secure Systems'}}"; Student student = (Student) GsonManager.getContext(compositeObjectAdapterMap, s, Student.class); printTheFields(student); } public static void printTheFields(Student student) { try { Field[] fields = Student.class.getDeclaredFields(); for (Field field : fields) { field.setAccessible(true); Object newObj = field.get(student); System.out.println(field.getName() + " :" + newObj); } } catch (Exception e) { System.out.println("Exception : " + e.getMessage()); } } }
Updated printTheFields(...) method with moudle object
src/projectgson/ProjectGson.java
Updated printTheFields(...) method with moudle object
Java
apache-2.0
e1d4cfd9b31b6a2fa66e0a4ae483d28d237a0aeb
0
sruehl/camel-example-rcode,sruehl/camel-example-rcode
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package org.apacheextras.camel.examples.rcode.builder; import org.apacheextras.camel.examples.rcode.aggregator.CalendarAgregationStrategy; import org.apacheextras.camel.examples.rcode.aggregator.ConcatenateAggregationStrategy; import org.apacheextras.camel.examples.rcode.aggregator.EnrichServiceResponseAggregationStrategy; import org.apache.camel.Exchange; import org.apache.camel.LoggingLevel; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.dataformat.csv.CsvDataFormat; import java.io.File; import java.util.Date; import java.util.List; import org.apache.camel.model.dataformat.JsonLibrary; /** * @author cemmersb, Sebastian Rühl */ public class RCodeRouteBuilder extends RouteBuilder { private final static String DEVICE_COMMAND = "jpeg('${exchangeId}.jpg',quality=90);"; private final static String PLOT_COMMAND = "plot(quantity, type=\"l\");"; private final static String RETRIEVE_PLOT_COMMAND = "r=readBin('${exchangeId}.jpg','raw',1024*1024); unlink('${exchangeId}.jpg'); r"; private final static String FINAL_COMMAND = DEVICE_COMMAND + PLOT_COMMAND + "dev.off();" + RETRIEVE_PLOT_COMMAND; private final static String HTTP4_RS_CAL_ENDPOINT = "http4://kayaposoft.com/enrico/json/v1.0/"; private File basePath; public RCodeRouteBuilder(File basePath) { this.basePath = basePath; } @Override public void configure() throws Exception { configureCsvRoute(); configureRestCalendarRoute(); configureRCodeRoute(); configureGraphRoute(); wireRoutes(); } /** * Takes an input as bytes and writes it as an jpeg file. */ private void configureGraphRoute() { from("direct:graph") .setHeader(Exchange.FILE_NAME, simple("graph${exchangeId}.jpeg")) .to("file://" + basePath.getParent() + "/output") .log("Generated graph file: ${header.CamelFileNameProduced}"); } /** * Takes an incoming string argument containing monthly quantities and * generates an output graph. */ private void configureRCodeRoute() { from("direct:rcode") //.setBody(simple("calendar <- c(${});\n")) Das muss sowieso wo anders passieren .setBody(simple("quantity <- c(${body});\n" + FINAL_COMMAND)) .to("log://command?level=DEBUG") .to("rcode://localhost:6311/parse_and_eval?bufferSize=4194304") .to("log://r_output?level=INFO") .setBody(simple("${body.asBytes}")); } /** * Configures a CSV route that reads the quantity values from the route and * sends the result to the RCode route. */ private void configureCsvRoute() { // Configure CSV data format with ';' as separator and skipping of the header final CsvDataFormat csv = new CsvDataFormat(); csv.setDelimiter(";"); csv.setSkipFirstLine(true); // Route takes a CSV file, splits the body and reads the actual values from(basePath.toURI() + "?noop=TRUE") .log("Unmarshalling CSV file.") .unmarshal(csv) .to("log://CSV?level=DEBUG") .setHeader("id", simple("${exchangeId}")) .split().body() .to("log://CSV?level=DEBUG") // TODO: Create monthly based output instead of taking the yearly figures .setBody(simple("${body[1]}")) .to("log://CSV?level=DEBUG") // Now we aggregate the retrived contents in a big string .aggregate(header("id"), new ConcatenateAggregationStrategy()).completionTimeout(3000) .log(LoggingLevel.INFO, "Finished the unmarshaling") .to("direct:CSV_sink"); } private void configureRestCalendarRoute() { from("direct:REST_CALENDAR") // Configure Query Parameters .setHeader(Exchange.HTTP_QUERY, constant("action=getPublicHolidaysForYear&year=2012&country=ger&region=Bavaria")) .to(HTTP4_RS_CAL_ENDPOINT) .convertBodyTo(String.class) .to("log://rest_calendar?level=INFO") .unmarshal().json(JsonLibrary.Gson, List.class) .split().body() .setBody(simple("${body.date.year}/${body.date.month}/${body.date.day}")) .convertBodyTo(Date.class) .aggregate(header("id"), new CalendarAgregationStrategy()).completionTimeout(3000) .to("log://date_calendar?level=INFO") .end(); } /** * Wires together the routes. */ private void wireRoutes() { from("direct:CSV_sink") .enrich("direct:REST_CALENDAR", new EnrichServiceResponseAggregationStrategy()) .to("direct:rcode") .to("direct:graph"); } }
src/main/java/org/apacheextras/camel/examples/rcode/builder/RCodeRouteBuilder.java
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package org.apacheextras.camel.examples.rcode.builder; import org.apacheextras.camel.examples.rcode.aggregator.CalendarAgregationStrategy; import org.apacheextras.camel.examples.rcode.aggregator.ConcatenateAggregationStrategy; import org.apacheextras.camel.examples.rcode.aggregator.EnrichServiceResponseAggregationStrategy; import org.apache.camel.Exchange; import org.apache.camel.LoggingLevel; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.dataformat.csv.CsvDataFormat; import java.io.File; import java.util.Date; import org.apache.camel.model.dataformat.JsonLibrary; /** * @author cemmersb, Sebastian Rühl */ public class RCodeRouteBuilder extends RouteBuilder { private final static String DEVICE_COMMAND = "jpeg('${exchangeId}.jpg',quality=90);"; private final static String PLOT_COMMAND = "plot(quantity, type=\"l\");"; private final static String RETRIEVE_PLOT_COMMAND = "r=readBin('${exchangeId}.jpg','raw',1024*1024); unlink('${exchangeId}.jpg'); r"; private final static String FINAL_COMMAND = DEVICE_COMMAND + PLOT_COMMAND + "dev.off();" + RETRIEVE_PLOT_COMMAND; private final static String HTTP4_RS_CAL_ENDPOINT = "http4://kayaposoft.com/enrico/json/v1.0/"; private File basePath; public RCodeRouteBuilder(File basePath) { this.basePath = basePath; } @Override public void configure() throws Exception { configureCsvRoute(); configureRestCalendarRoute(); configureRCodeRoute(); configureGraphRoute(); wireRoutes(); } /** * Takes an input as bytes and writes it as an jpeg file. */ private void configureGraphRoute() { from("direct:graph") .setHeader(Exchange.FILE_NAME, simple("graph${exchangeId}.jpeg")) .to("file://" + basePath.getParent() + "/output") .log("Generated graph file: ${header.CamelFileNameProduced}"); } /** * Takes an incoming string argument containing monthly quantities and * generates an output graph. */ private void configureRCodeRoute() { from("direct:rcode") //.setBody(simple("calendar <- c(${});\n")) Das muss sowieso wo anders passieren .setBody(simple("quantity <- c(${body});\n" + FINAL_COMMAND)) .to("log://command?level=DEBUG") .to("rcode://localhost:6311/parse_and_eval?bufferSize=4194304") .to("log://r_output?level=INFO") .setBody(simple("${body.asBytes}")); } /** * Configures a CSV route that reads the quantity values from the route and * sends the result to the RCode route. */ private void configureCsvRoute() { // Configure CSV data format with ';' as separator and skipping of the header final CsvDataFormat csv = new CsvDataFormat(); csv.setDelimiter(";"); csv.setSkipFirstLine(true); // Route takes a CSV file, splits the body and reads the actual values from(basePath.toURI() + "?noop=TRUE") .log("Unmarshalling CSV file.") .unmarshal(csv) .to("log://CSV?level=DEBUG") .setHeader("id", simple("${exchangeId}")) .split().body() .to("log://CSV?level=DEBUG") // TODO: Create monthly based output instead of taking the yearly figures .setBody(simple("${body[1]}")) .to("log://CSV?level=DEBUG") // Now we aggregate the retrived contents in a big string .aggregate(header("id"), new ConcatenateAggregationStrategy()).completionTimeout(3000) .log(LoggingLevel.INFO, "Finished the unmarshaling") .to("direct:CSV_sink"); } private void configureRestCalendarRoute() { from("direct:REST_CALENDAR") // Configure Query Parameters .setHeader(Exchange.HTTP_QUERY, constant("action=getPublicHolidaysForYear&year=2012&country=ger&region=Bavaria")) .to(HTTP4_RS_CAL_ENDPOINT) .convertBodyTo(String.class) .to("log://rest_calendar?level=INFO") .convertBodyTo(String[].class) .split().body() .unmarshal().json(JsonLibrary.Gson) .convertBodyTo(Date.class) .aggregate(header("id"), new CalendarAgregationStrategy()).completionTimeout(3000) .to("log://date_calendar?level=INFO") .end(); } /** * Wires together the routes. */ private void wireRoutes() { from("direct:CSV_sink") .enrich("direct:REST_CALENDAR", new EnrichServiceResponseAggregationStrategy()) .to("direct:rcode") .to("direct:graph"); } }
fix gson
src/main/java/org/apacheextras/camel/examples/rcode/builder/RCodeRouteBuilder.java
fix gson
Java
apache-2.0
3da7c64df907979e5b1eeb7179b5c2a758e2e658
0
Horizon-Engineering/Android_application_ILS
package com.example.hesolutions.horizon; import android.app.Activity; import android.app.DatePickerDialog; import android.app.ProgressDialog; import android.app.TimePickerDialog; import android.content.Context; import android.content.Intent; import android.graphics.Color; import android.os.AsyncTask; import android.os.Bundle; import android.os.Handler; import android.text.InputFilter; import android.text.Spanned; import android.util.DisplayMetrics; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.inputmethod.InputMethodManager; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.DatePicker; import android.widget.EditText; import android.widget.ListView; import android.widget.RelativeLayout; import android.widget.SimpleAdapter; import android.widget.Switch; import android.widget.TextView; import android.widget.TimePicker; import android.widget.Toast; import com.mylibrary.WeekView; import com.mylibrary.WeekViewEvent; import com.google.common.collect.BiMap; import com.homa.hls.database.DatabaseManager; import com.homa.hls.database.Device; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; public class CalendarTask extends Activity { TextView startdate; TextView starttime; TextView finishdate; TextView finishtime; Button Apply; Button cancelTOcalendar; Button delete; Switch switch1; EditText weeknumber; TextView textView4,textView5; Integer weeks; RelativeLayout layout1; CheckBox Sunday, Monday, Tuesday, Wednesday, Thursday, Friday, Saturday; Integer day; ListView sectorlistView; MyCustomAdapter deviceAdapter = null; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_calendar_task); startdate = (TextView)findViewById(R.id.startdate); starttime = (TextView)findViewById(R.id.starttime); finishdate = (TextView)findViewById(R.id.finishdate); finishtime = (TextView)findViewById(R.id.finishtime); Apply = (Button)findViewById(R.id.Apply); cancelTOcalendar = (Button)findViewById(R.id.cancelTOcalendar); delete = (Button)findViewById(R.id.delete); switch1 =(Switch)findViewById(R.id.switch1); weeknumber = (EditText)findViewById(R.id.weeknumber); textView4 = (TextView)findViewById(R.id.textView4); textView5 = (TextView)findViewById(R.id.textView5); layout1 = (RelativeLayout)findViewById(R.id.layout1); Sunday = (CheckBox)findViewById(R.id.Sunday); Monday = (CheckBox)findViewById(R.id.Monday); Tuesday = (CheckBox)findViewById(R.id.Tuesday); Wednesday = (CheckBox)findViewById(R.id.Wednesday); Thursday = (CheckBox)findViewById(R.id.Thursday); Friday = (CheckBox)findViewById(R.id.Friday); Saturday = (CheckBox)findViewById(R.id.Saturday); sectorlistView = (ListView)findViewById(R.id.sectorlistView); final SimpleDateFormat ddf = new SimpleDateFormat("MMM dd, yyyy"); final SimpleDateFormat sdf = new SimpleDateFormat("HH:mm:ss"); String currentdate = ddf.format(new java.util.Date()); startdate.setText(currentdate); finishdate.setText(currentdate); String currenttime = sdf.format(new java.util.Date()); starttime.setText(currenttime); finishtime.setText(currenttime); final Calendar startTime = Calendar.getInstance(); final Calendar finishTime = Calendar.getInstance(); day =startTime.get(Calendar.DAY_OF_WEEK)-1; //========================================Loading the sector info BiMap<String, BiMap> sector = DataManager.getInstance().getsector(); String username = DataManager.getInstance().getUsername(); ArrayList<Group> arrayList = new ArrayList<Group>(); if (sector.get(username)==null) {} else { BiMap<String, ArrayList> sectordetails = sector.get(username); for (Map.Entry<String, ArrayList> entry : sectordetails.entrySet()) { String key = entry.getKey(); ArrayList value = entry.getValue(); Group group = new Group(key, value, false); arrayList.add(group); } deviceAdapter = new MyCustomAdapter(this, R.layout.devicelist, arrayList); sectorlistView.setAdapter(deviceAdapter); } //=======================================start date and time=============================================== startdate.setOnClickListener(new View.OnClickListener() { DatePickerDialog.OnDateSetListener date = new DatePickerDialog.OnDateSetListener() { @Override public void onDateSet(DatePicker view, int year, int monthOfYear, int dayOfMonth) { startTime.set(Calendar.YEAR, year); startTime.set(Calendar.MONTH, monthOfYear); startTime.set(Calendar.DAY_OF_MONTH, dayOfMonth); startdate.setText(ddf.format(startTime.getTime())); day =startTime.get(Calendar.DAY_OF_WEEK)-1; finishTime.set(Calendar.YEAR, year); finishTime.set(Calendar.MONTH, monthOfYear); finishTime.set(Calendar.DAY_OF_MONTH, dayOfMonth); finishdate.setText(ddf.format(startTime.getTime())); } }; @Override public void onClick(View v) { new DatePickerDialog(CalendarTask.this, date, startTime .get(Calendar.YEAR), startTime.get(Calendar.MONTH), startTime.get(Calendar.DAY_OF_MONTH)).show(); } }); starttime.setOnClickListener(new View.OnClickListener() { TimePickerDialog.OnTimeSetListener time = new TimePickerDialog.OnTimeSetListener() { @Override public void onTimeSet(TimePicker view, int Hour, int Minute) { startTime.set(Calendar.HOUR_OF_DAY, Hour); startTime.set(Calendar.MINUTE, Minute); starttime.setText(sdf.format(startTime.getTime())); finishTime.set(Calendar.HOUR_OF_DAY, Hour); finishTime.set(Calendar.MINUTE, Minute); finishtime.setText(sdf.format(startTime.getTime())); } }; @Override public void onClick(View v) { new TimePickerDialog(CalendarTask.this, time, startTime.get(Calendar.HOUR_OF_DAY), startTime.get(Calendar.MINUTE), true).show(); } }); //=================================finish date time============================== finishdate.setOnClickListener(new View.OnClickListener() { DatePickerDialog.OnDateSetListener date = new DatePickerDialog.OnDateSetListener() { @Override public void onDateSet(DatePicker view, int year, int monthOfYear, int dayOfMonth) { finishTime.set(Calendar.YEAR, year); finishTime.set(Calendar.MONTH, monthOfYear); finishTime.set(Calendar.DAY_OF_MONTH, dayOfMonth); finishdate.setText(ddf.format(finishTime.getTime())); } }; @Override public void onClick(View v) { new DatePickerDialog(CalendarTask.this, date, finishTime .get(Calendar.YEAR), finishTime.get(Calendar.MONTH), finishTime.get(Calendar.DAY_OF_MONTH)).show(); } }); finishtime.setOnClickListener(new View.OnClickListener() { TimePickerDialog.OnTimeSetListener time = new TimePickerDialog.OnTimeSetListener() { @Override public void onTimeSet(TimePicker view, int Hour, int Minute) { finishTime.set(Calendar.HOUR_OF_DAY, Hour); finishTime.set(Calendar.MINUTE, Minute); finishtime.setText(sdf.format(finishTime.getTime())); } }; @Override public void onClick(View v) { new TimePickerDialog(CalendarTask.this, time, finishTime.get(Calendar.HOUR_OF_DAY),finishTime.get(Calendar.MINUTE),true).show(); } }); //=================================================================================================== cancelTOcalendar.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent1 = new Intent(v.getContext(), GlobalCalendar.class); startActivity(intent1); } }); Apply.setOnClickListener(new View.OnClickListener() { String cname = DataManager.getInstance().getUsername(); String colorname = DataManager.getInstance().getcolorname(); int colorName = Color.parseColor(colorname); long id, oldid; final List<WeekViewEvent> list = DataManager.getInstance().getevents(); final List<Long> IDlist = DataManager.getInstance().getEventID(); final List<List<Long>> grouplist = DataManager.getInstance().getGroupID(); @Override public void onClick(final View v) { final ArrayList<Device> choosedevice = new ArrayList<Device>(); ArrayList<Group> choosegrouplist = deviceAdapter.arrayList; for (int i = 0; i < choosegrouplist.size(); i++) { Group group = choosegrouplist.get(i); if (group.getChecked() == true) { ArrayList<Device> devicelist = group.getList(); for (int j = 0; j < devicelist.size(); j++) { Device device = devicelist.get(j); choosedevice.add(device); } } } if (choosedevice.isEmpty()) { Toast.makeText(CalendarTask.this, "At least one group should be selected", Toast.LENGTH_SHORT).show(); } else { new Thread(new Runnable() { @Override public void run() { final Intent intent = new Intent(v.getContext(), GlobalCalendar.class); // Repetition if (switch1.isChecked()) { if (!weeknumber.getText().toString().isEmpty()) { weeks = Integer.parseInt(weeknumber.getText().toString()); final List<Long> groupedlist = new ArrayList<Long>(); if (weeks > 0) { if ((finishTime.after(startTime))) { // repeat i weeks new Thread(new Runnable() { @Override public void run() { if (!IDlist.isEmpty()) { oldid = IDlist.get((IDlist.size() - 1)); } final AtomicLong counter = new AtomicLong(oldid); for (int i = 0; i < weeks; i++) { final int j = i; new Thread(new Runnable() { @Override public void run() { if (Monday.isChecked()) { id = counter.incrementAndGet(); Calendar MonSt = Calendar.getInstance(), MonFi = Calendar.getInstance(); MonSt.set(Calendar.YEAR, startTime.get(Calendar.YEAR)); MonSt.set(Calendar.MONTH, startTime.get(Calendar.MONTH)); MonSt.set(Calendar.DAY_OF_MONTH, startTime.get(Calendar.DAY_OF_MONTH) + 7 * j); MonSt.set(Calendar.HOUR_OF_DAY, startTime.get(Calendar.HOUR_OF_DAY)); MonSt.set(Calendar.MINUTE, startTime.get(Calendar.MINUTE)); MonFi.set(Calendar.YEAR, finishTime.get(Calendar.YEAR)); MonFi.set(Calendar.MONTH, finishTime.get(Calendar.MONTH)); MonFi.set(Calendar.DAY_OF_MONTH, finishTime.get(Calendar.DAY_OF_MONTH) + 7 * j); MonFi.set(Calendar.HOUR_OF_DAY, finishTime.get(Calendar.HOUR_OF_DAY)); MonFi.set(Calendar.MINUTE, finishTime.get(Calendar.MINUTE)); Integer date = 1 - day; MonSt.add(Calendar.DAY_OF_MONTH, date); MonFi.add(Calendar.DAY_OF_MONTH, date); WeekViewEvent event; event = new WeekViewEvent(id, cname, MonSt, MonFi, colorName, choosedevice); list.add(event); groupedlist.add(id); IDlist.add(id); } } }).start(); new Thread(new Runnable() { @Override public void run() { if (Tuesday.isChecked()) { id = counter.incrementAndGet(); Calendar TueSt = Calendar.getInstance(), TusFi = Calendar.getInstance(); TueSt.set(Calendar.YEAR, startTime.get(Calendar.YEAR)); TueSt.set(Calendar.MONTH, startTime.get(Calendar.MONTH)); TueSt.set(Calendar.DAY_OF_MONTH, startTime.get(Calendar.DAY_OF_MONTH) + 7 * j); TueSt.set(Calendar.HOUR_OF_DAY, startTime.get(Calendar.HOUR_OF_DAY)); TueSt.set(Calendar.MINUTE, startTime.get(Calendar.MINUTE)); TusFi.set(Calendar.YEAR, finishTime.get(Calendar.YEAR)); TusFi.set(Calendar.MONTH, finishTime.get(Calendar.MONTH)); TusFi.set(Calendar.DAY_OF_MONTH, finishTime.get(Calendar.DAY_OF_MONTH) + 7 * j); TusFi.set(Calendar.HOUR_OF_DAY, finishTime.get(Calendar.HOUR_OF_DAY)); TusFi.set(Calendar.MINUTE, finishTime.get(Calendar.MINUTE)); Integer date = 2 - day; TueSt.add(Calendar.DAY_OF_MONTH, date); TusFi.add(Calendar.DAY_OF_MONTH, date); WeekViewEvent event; event = new WeekViewEvent(id, cname, TueSt, TusFi, colorName, choosedevice); list.add(event); groupedlist.add(id); IDlist.add(id); } } }).start(); new Thread(new Runnable() { @Override public void run() { if (Wednesday.isChecked()) { id = counter.incrementAndGet(); Calendar WedSt = Calendar.getInstance(), WedFi = Calendar.getInstance(); WedSt.set(Calendar.YEAR, startTime.get(Calendar.YEAR)); WedSt.set(Calendar.MONTH, startTime.get(Calendar.MONTH)); WedSt.set(Calendar.DAY_OF_MONTH, startTime.get(Calendar.DAY_OF_MONTH) + 7 * j); WedSt.set(Calendar.HOUR_OF_DAY, startTime.get(Calendar.HOUR_OF_DAY)); WedSt.set(Calendar.MINUTE, startTime.get(Calendar.MINUTE)); WedFi.set(Calendar.YEAR, finishTime.get(Calendar.YEAR)); WedFi.set(Calendar.MONTH, finishTime.get(Calendar.MONTH)); WedFi.set(Calendar.DAY_OF_MONTH, finishTime.get(Calendar.DAY_OF_MONTH) + 7 * j); WedFi.set(Calendar.HOUR_OF_DAY, finishTime.get(Calendar.HOUR_OF_DAY)); WedFi.set(Calendar.MINUTE, finishTime.get(Calendar.MINUTE)); Integer date = 3 - day; WedSt.add(Calendar.DAY_OF_MONTH, date); WedFi.add(Calendar.DAY_OF_MONTH, date); WeekViewEvent event; event = new WeekViewEvent(id, cname, WedSt, WedFi, colorName, choosedevice); list.add(event); groupedlist.add(id); IDlist.add(id); } } }).start(); new Thread(new Runnable() { @Override public void run() { if (Thursday.isChecked()) { id = counter.incrementAndGet(); Calendar ThuSt = Calendar.getInstance(), ThuFi = Calendar.getInstance(); ThuSt.set(Calendar.YEAR, startTime.get(Calendar.YEAR)); ThuSt.set(Calendar.MONTH, startTime.get(Calendar.MONTH)); ThuSt.set(Calendar.DAY_OF_MONTH, startTime.get(Calendar.DAY_OF_MONTH) + 7 * j); ThuSt.set(Calendar.HOUR_OF_DAY, startTime.get(Calendar.HOUR_OF_DAY)); ThuSt.set(Calendar.MINUTE, startTime.get(Calendar.MINUTE)); ThuFi.set(Calendar.YEAR, finishTime.get(Calendar.YEAR)); ThuFi.set(Calendar.MONTH, finishTime.get(Calendar.MONTH)); ThuFi.set(Calendar.DAY_OF_MONTH, finishTime.get(Calendar.DAY_OF_MONTH) + 7 * j); ThuFi.set(Calendar.HOUR_OF_DAY, finishTime.get(Calendar.HOUR_OF_DAY)); ThuFi.set(Calendar.MINUTE, finishTime.get(Calendar.MINUTE)); Integer date = 4 - day; ThuSt.add(Calendar.DAY_OF_MONTH, date); ThuFi.add(Calendar.DAY_OF_MONTH, date); WeekViewEvent event; event = new WeekViewEvent(id, cname, ThuSt, ThuFi, colorName, choosedevice); list.add(event); groupedlist.add(id); IDlist.add(id); } } }).start(); new Thread(new Runnable() { @Override public void run() { if (Friday.isChecked()) { id = counter.incrementAndGet(); Calendar FriSt = Calendar.getInstance(), FriFi = Calendar.getInstance(); FriSt.set(Calendar.YEAR, startTime.get(Calendar.YEAR)); FriSt.set(Calendar.MONTH, startTime.get(Calendar.MONTH)); FriSt.set(Calendar.DAY_OF_MONTH, startTime.get(Calendar.DAY_OF_MONTH) + 7 * j); FriSt.set(Calendar.HOUR_OF_DAY, startTime.get(Calendar.HOUR_OF_DAY)); FriSt.set(Calendar.MINUTE, startTime.get(Calendar.MINUTE)); FriFi.set(Calendar.YEAR, finishTime.get(Calendar.YEAR)); FriFi.set(Calendar.MONTH, finishTime.get(Calendar.MONTH)); FriFi.set(Calendar.DAY_OF_MONTH, finishTime.get(Calendar.DAY_OF_MONTH) + 7 * j); FriFi.set(Calendar.HOUR_OF_DAY, finishTime.get(Calendar.HOUR_OF_DAY)); FriFi.set(Calendar.MINUTE, finishTime.get(Calendar.MINUTE)); Integer date = 5 - day; FriSt.add(Calendar.DAY_OF_MONTH, date); FriFi.add(Calendar.DAY_OF_MONTH, date); WeekViewEvent event; event = new WeekViewEvent(id, cname, FriSt, FriFi, colorName, choosedevice); list.add(event); groupedlist.add(id); IDlist.add(id); } } }).start(); new Thread(new Runnable() { @Override public void run() { if (Saturday.isChecked()) { id = counter.incrementAndGet(); Calendar SatSt = Calendar.getInstance(), SatFi = Calendar.getInstance(); SatSt.set(Calendar.YEAR, startTime.get(Calendar.YEAR)); SatSt.set(Calendar.MONTH, startTime.get(Calendar.MONTH)); SatSt.set(Calendar.DAY_OF_MONTH, startTime.get(Calendar.DAY_OF_MONTH) + 7 * j); SatSt.set(Calendar.HOUR_OF_DAY, startTime.get(Calendar.HOUR_OF_DAY)); SatSt.set(Calendar.MINUTE, startTime.get(Calendar.MINUTE)); SatFi.set(Calendar.YEAR, finishTime.get(Calendar.YEAR)); SatFi.set(Calendar.MONTH, finishTime.get(Calendar.MONTH)); SatFi.set(Calendar.DAY_OF_MONTH, finishTime.get(Calendar.DAY_OF_MONTH) + 7 * j); SatFi.set(Calendar.HOUR_OF_DAY, finishTime.get(Calendar.HOUR_OF_DAY)); SatFi.set(Calendar.MINUTE, finishTime.get(Calendar.MINUTE)); Integer date = 6 - day; SatSt.add(Calendar.DAY_OF_MONTH, date); SatFi.add(Calendar.DAY_OF_MONTH, date); WeekViewEvent event; event = new WeekViewEvent(id, cname, SatSt, SatFi, colorName, choosedevice); list.add(event); IDlist.add(id); groupedlist.add(id); } } }).start(); new Thread(new Runnable() { @Override public void run() { if (Sunday.isChecked()) { id = counter.incrementAndGet(); Calendar SunSt = Calendar.getInstance(), SunFi = Calendar.getInstance(); SunSt.set(Calendar.YEAR, startTime.get(Calendar.YEAR)); SunSt.set(Calendar.MONTH, startTime.get(Calendar.MONTH)); SunSt.set(Calendar.DAY_OF_MONTH, startTime.get(Calendar.DAY_OF_MONTH) + 7 * j); SunSt.set(Calendar.HOUR_OF_DAY, startTime.get(Calendar.HOUR_OF_DAY)); SunSt.set(Calendar.MINUTE, startTime.get(Calendar.MINUTE)); SunFi.set(Calendar.YEAR, finishTime.get(Calendar.YEAR)); SunFi.set(Calendar.MONTH, finishTime.get(Calendar.MONTH)); SunFi.set(Calendar.DAY_OF_MONTH, finishTime.get(Calendar.DAY_OF_MONTH) + 7 * j); SunFi.set(Calendar.HOUR_OF_DAY, finishTime.get(Calendar.HOUR_OF_DAY)); SunFi.set(Calendar.MINUTE, finishTime.get(Calendar.MINUTE)); Integer date = 0 - day; SunSt.add(Calendar.DAY_OF_MONTH, date); SunFi.add(Calendar.DAY_OF_MONTH, date); WeekViewEvent event; event = new WeekViewEvent(id, cname, SunSt, SunFi, colorName, choosedevice); list.add(event); IDlist.add(id); groupedlist.add(id); } } }).start(); } grouplist.add(groupedlist); DataManager.getInstance().setGroupID(grouplist); DataManager.getInstance().setEventID(IDlist); DataManager.getInstance().setevents(list); startActivity(intent); } }).start(); } else { runOnUiThread(new Runnable() { public void run() { Toast.makeText(CalendarTask.this, "Unvaild time", Toast.LENGTH_LONG).show(); } }); } } else { runOnUiThread(new Runnable() { public void run() { Toast.makeText(CalendarTask.this, "Enter a valid week number (at least 1)", Toast.LENGTH_LONG).show(); } }); } } else { runOnUiThread(new Runnable() { public void run() { Toast.makeText(CalendarTask.this, "Please enter a number", Toast.LENGTH_LONG).show(); } }); } } // not repetition if (switch1.isChecked() == false) { if ((finishTime.after(startTime))) { if (!IDlist.isEmpty()) { id = IDlist.get((IDlist.size() - 1)) + 1; } WeekViewEvent event = new WeekViewEvent(id, cname, startTime, finishTime, colorName, choosedevice); list.add(event); IDlist.add(id); DataManager.getInstance().setevents(list); DataManager.getInstance().setEventID(IDlist); startActivity(intent); } else { runOnUiThread(new Runnable() { public void run() { Toast.makeText(CalendarTask.this, "Unvalid Time", Toast.LENGTH_LONG).show(); } }); } } } }).start(); } } }); switch1.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (isChecked == true) { startdate.setEnabled(false); finishdate.setEnabled(false); starttime.setEnabled(false); finishtime.setEnabled(false); for (int i = 0; i < layout1.getChildCount(); i++) { View child = layout1.getChildAt(i); child.setEnabled(true); } if (day.equals(1)) { Monday.setChecked(true); Monday.setEnabled(false); } else if (day.equals(2)) { Tuesday.setChecked(true); Tuesday.setEnabled(false); } else if (day.equals(3)) { Wednesday.setChecked(true); Wednesday.setEnabled(false); } else if (day.equals(4)) { Thursday.setChecked(true); Thursday.setEnabled(false); } else if (day.equals(5)) { Friday.setChecked(true); Friday.setEnabled(false); } else if (day.equals(6)) { Saturday.setChecked(true); Saturday.setEnabled(false); } else { Sunday.setChecked(true); Sunday.setEnabled(false); } } else { for (int i = 0; i < layout1.getChildCount(); i++) { View child = layout1.getChildAt(i); child.setEnabled(false); } startdate.setEnabled(true); finishdate.setEnabled(true); starttime.setEnabled(true); finishtime.setEnabled(true); Monday.setChecked(false); Tuesday.setChecked(false); Wednesday.setChecked(false); Thursday.setChecked(false); Friday.setChecked(false); Saturday.setChecked(false); Sunday.setChecked(false); } } }); } private class MyCustomAdapter extends ArrayAdapter<Group> { ArrayList<Group> arrayList; public MyCustomAdapter(Context context, int textViewResourceId, ArrayList<Group> arrayList) { super(context, textViewResourceId, arrayList); this.arrayList = new ArrayList<Group>(); this.arrayList.addAll(arrayList); } @Override public View getView(int position, View convertView, ViewGroup parent) { if (convertView == null) { LayoutInflater vi = (LayoutInflater) getSystemService( Context.LAYOUT_INFLATER_SERVICE); convertView = vi.inflate(R.layout.devicelist, null); } Group group = arrayList.get(position); TextView name = (TextView) convertView.findViewById(R.id.name); final CheckBox checked = (CheckBox) convertView.findViewById(R.id.checked); checked.setTag(group); name.setText(group.getName()); checked.setText(""); checked.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Group group = (Group) v.getTag(); if (checked.isChecked()) { group.setChecked(true); } else group.setChecked(false); } }); return convertView; } } public class Group { String name; ArrayList<Device> devicelist; boolean ischecked; public Group(String name, ArrayList devicelist, boolean ischecked) { this.name = name; this.devicelist = devicelist; this.ischecked = ischecked; } public boolean getChecked() { return ischecked; } public void setChecked(boolean ischecked) { this.ischecked = ischecked; } public String getName() { return name; } public void setName(String name) { this.name = name; } public ArrayList getList() { return devicelist; } public void setList(ArrayList devicelist) { this.devicelist = devicelist; } } }
Horizon/app/src/main/java/com/example/hesolutions/horizon/CalendarTask.java
package com.example.hesolutions.horizon; import android.app.Activity; import android.app.DatePickerDialog; import android.app.ProgressDialog; import android.app.TimePickerDialog; import android.content.Context; import android.content.Intent; import android.graphics.Color; import android.os.AsyncTask; import android.os.Bundle; import android.os.Handler; import android.text.InputFilter; import android.text.Spanned; import android.util.DisplayMetrics; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.inputmethod.InputMethodManager; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.DatePicker; import android.widget.EditText; import android.widget.ListView; import android.widget.RelativeLayout; import android.widget.SimpleAdapter; import android.widget.Switch; import android.widget.TextView; import android.widget.TimePicker; import android.widget.Toast; import com.mylibrary.WeekView; import com.mylibrary.WeekViewEvent; import com.google.common.collect.BiMap; import com.homa.hls.database.DatabaseManager; import com.homa.hls.database.Device; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; public class CalendarTask extends Activity { TextView startdate; TextView starttime; TextView finishdate; TextView finishtime; Button Apply; Button cancelTOcalendar; Button delete; Switch switch1; EditText weeknumber; TextView textView4,textView5; Integer weeks; RelativeLayout layout1; CheckBox Sunday, Monday, Tuesday, Wednesday, Thursday, Friday, Saturday; Integer day; ListView sectorlistView; MyCustomAdapter deviceAdapter = null; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_calendar_task); startdate = (TextView)findViewById(R.id.startdate); starttime = (TextView)findViewById(R.id.starttime); finishdate = (TextView)findViewById(R.id.finishdate); finishtime = (TextView)findViewById(R.id.finishtime); Apply = (Button)findViewById(R.id.Apply); cancelTOcalendar = (Button)findViewById(R.id.cancelTOcalendar); delete = (Button)findViewById(R.id.delete); switch1 =(Switch)findViewById(R.id.switch1); weeknumber = (EditText)findViewById(R.id.weeknumber); textView4 = (TextView)findViewById(R.id.textView4); textView5 = (TextView)findViewById(R.id.textView5); layout1 = (RelativeLayout)findViewById(R.id.layout1); Sunday = (CheckBox)findViewById(R.id.Sunday); Monday = (CheckBox)findViewById(R.id.Monday); Tuesday = (CheckBox)findViewById(R.id.Tuesday); Wednesday = (CheckBox)findViewById(R.id.Wednesday); Thursday = (CheckBox)findViewById(R.id.Thursday); Friday = (CheckBox)findViewById(R.id.Friday); Saturday = (CheckBox)findViewById(R.id.Saturday); sectorlistView = (ListView)findViewById(R.id.sectorlistView); final SimpleDateFormat ddf = new SimpleDateFormat("MMM dd, yyyy"); final SimpleDateFormat sdf = new SimpleDateFormat("HH:mm:ss"); String currentdate = ddf.format(new java.util.Date()); startdate.setText(currentdate); finishdate.setText(currentdate); String currenttime = sdf.format(new java.util.Date()); starttime.setText(currenttime); finishtime.setText(currenttime); final Calendar startTime = Calendar.getInstance(); final Calendar finishTime = Calendar.getInstance(); day =startTime.get(Calendar.DAY_OF_WEEK)-1; //========================================Loading the sector info BiMap<String, BiMap> sector = DataManager.getInstance().getsector(); String username = DataManager.getInstance().getUsername(); ArrayList<Group> arrayList = new ArrayList<Group>(); BiMap<String,ArrayList> sectordetails = sector.get(username); for (Map.Entry<String, ArrayList> entry : sectordetails.entrySet()) { String key = entry.getKey(); ArrayList value = entry.getValue(); Group group = new Group(key,value,false); arrayList.add(group); } deviceAdapter = new MyCustomAdapter(this, R.layout.devicelist, arrayList); sectorlistView.setAdapter(deviceAdapter); //=======================================start date and time=============================================== startdate.setOnClickListener(new View.OnClickListener() { DatePickerDialog.OnDateSetListener date = new DatePickerDialog.OnDateSetListener() { @Override public void onDateSet(DatePicker view, int year, int monthOfYear, int dayOfMonth) { startTime.set(Calendar.YEAR, year); startTime.set(Calendar.MONTH, monthOfYear); startTime.set(Calendar.DAY_OF_MONTH, dayOfMonth); startdate.setText(ddf.format(startTime.getTime())); day =startTime.get(Calendar.DAY_OF_WEEK)-1; finishTime.set(Calendar.YEAR, year); finishTime.set(Calendar.MONTH, monthOfYear); finishTime.set(Calendar.DAY_OF_MONTH, dayOfMonth); finishdate.setText(ddf.format(startTime.getTime())); } }; @Override public void onClick(View v) { new DatePickerDialog(CalendarTask.this, date, startTime .get(Calendar.YEAR), startTime.get(Calendar.MONTH), startTime.get(Calendar.DAY_OF_MONTH)).show(); } }); starttime.setOnClickListener(new View.OnClickListener() { TimePickerDialog.OnTimeSetListener time = new TimePickerDialog.OnTimeSetListener() { @Override public void onTimeSet(TimePicker view, int Hour, int Minute) { startTime.set(Calendar.HOUR_OF_DAY, Hour); startTime.set(Calendar.MINUTE, Minute); starttime.setText(sdf.format(startTime.getTime())); finishTime.set(Calendar.HOUR_OF_DAY, Hour); finishTime.set(Calendar.MINUTE, Minute); finishtime.setText(sdf.format(startTime.getTime())); } }; @Override public void onClick(View v) { new TimePickerDialog(CalendarTask.this, time, startTime.get(Calendar.HOUR_OF_DAY), startTime.get(Calendar.MINUTE), true).show(); } }); //=================================finish date time============================== finishdate.setOnClickListener(new View.OnClickListener() { DatePickerDialog.OnDateSetListener date = new DatePickerDialog.OnDateSetListener() { @Override public void onDateSet(DatePicker view, int year, int monthOfYear, int dayOfMonth) { finishTime.set(Calendar.YEAR, year); finishTime.set(Calendar.MONTH, monthOfYear); finishTime.set(Calendar.DAY_OF_MONTH, dayOfMonth); finishdate.setText(ddf.format(finishTime.getTime())); } }; @Override public void onClick(View v) { new DatePickerDialog(CalendarTask.this, date, finishTime .get(Calendar.YEAR), finishTime.get(Calendar.MONTH), finishTime.get(Calendar.DAY_OF_MONTH)).show(); } }); finishtime.setOnClickListener(new View.OnClickListener() { TimePickerDialog.OnTimeSetListener time = new TimePickerDialog.OnTimeSetListener() { @Override public void onTimeSet(TimePicker view, int Hour, int Minute) { finishTime.set(Calendar.HOUR_OF_DAY, Hour); finishTime.set(Calendar.MINUTE, Minute); finishtime.setText(sdf.format(finishTime.getTime())); } }; @Override public void onClick(View v) { new TimePickerDialog(CalendarTask.this, time, finishTime.get(Calendar.HOUR_OF_DAY),finishTime.get(Calendar.MINUTE),true).show(); } }); //=================================================================================================== cancelTOcalendar.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent1 = new Intent(v.getContext(), GlobalCalendar.class); startActivity(intent1); } }); Apply.setOnClickListener(new View.OnClickListener() { String cname = DataManager.getInstance().getUsername(); String colorname = DataManager.getInstance().getcolorname(); int colorName = Color.parseColor(colorname); long id, oldid; final List<WeekViewEvent> list = DataManager.getInstance().getevents(); final List<Long> IDlist = DataManager.getInstance().getEventID(); final List<List<Long>> grouplist = DataManager.getInstance().getGroupID(); @Override public void onClick(final View v) { final ArrayList<Device> choosedevice = new ArrayList<Device>(); ArrayList<Group> choosegrouplist = deviceAdapter.arrayList; for (int i = 0; i < choosegrouplist.size(); i++) { Group group = choosegrouplist.get(i); if (group.getChecked() == true) { ArrayList<Device> devicelist = group.getList(); for (int j = 0; j < devicelist.size(); j++) { Device device = devicelist.get(j); choosedevice.add(device); } } } if (choosedevice.isEmpty()) { Toast.makeText(CalendarTask.this, "At least one group should be selected", Toast.LENGTH_SHORT).show(); } else { new Thread(new Runnable() { @Override public void run() { final Intent intent = new Intent(v.getContext(), GlobalCalendar.class); // Repetition if (switch1.isChecked()) { if (!weeknumber.getText().toString().isEmpty()) { weeks = Integer.parseInt(weeknumber.getText().toString()); final List<Long> groupedlist = new ArrayList<Long>(); if (weeks > 0) { if ((finishTime.after(startTime))) { // repeat i weeks new Thread(new Runnable() { @Override public void run() { if (!IDlist.isEmpty()) { oldid = IDlist.get((IDlist.size() - 1)); } final AtomicLong counter = new AtomicLong(oldid); for (int i = 0; i < weeks; i++) { final int j = i; new Thread(new Runnable() { @Override public void run() { if (Monday.isChecked()) { id = counter.incrementAndGet(); Calendar MonSt = Calendar.getInstance(), MonFi = Calendar.getInstance(); MonSt.set(Calendar.YEAR, startTime.get(Calendar.YEAR)); MonSt.set(Calendar.MONTH, startTime.get(Calendar.MONTH)); MonSt.set(Calendar.DAY_OF_MONTH, startTime.get(Calendar.DAY_OF_MONTH) + 7 * j); MonSt.set(Calendar.HOUR_OF_DAY, startTime.get(Calendar.HOUR_OF_DAY)); MonSt.set(Calendar.MINUTE, startTime.get(Calendar.MINUTE)); MonFi.set(Calendar.YEAR, finishTime.get(Calendar.YEAR)); MonFi.set(Calendar.MONTH, finishTime.get(Calendar.MONTH)); MonFi.set(Calendar.DAY_OF_MONTH, finishTime.get(Calendar.DAY_OF_MONTH) + 7 * j); MonFi.set(Calendar.HOUR_OF_DAY, finishTime.get(Calendar.HOUR_OF_DAY)); MonFi.set(Calendar.MINUTE, finishTime.get(Calendar.MINUTE)); Integer date = 1 - day; MonSt.add(Calendar.DAY_OF_MONTH, date); MonFi.add(Calendar.DAY_OF_MONTH, date); WeekViewEvent event; event = new WeekViewEvent(id, cname, MonSt, MonFi, colorName, choosedevice); list.add(event); groupedlist.add(id); IDlist.add(id); } } }).start(); new Thread(new Runnable() { @Override public void run() { if (Tuesday.isChecked()) { id = counter.incrementAndGet(); Calendar TueSt = Calendar.getInstance(), TusFi = Calendar.getInstance(); TueSt.set(Calendar.YEAR, startTime.get(Calendar.YEAR)); TueSt.set(Calendar.MONTH, startTime.get(Calendar.MONTH)); TueSt.set(Calendar.DAY_OF_MONTH, startTime.get(Calendar.DAY_OF_MONTH) + 7 * j); TueSt.set(Calendar.HOUR_OF_DAY, startTime.get(Calendar.HOUR_OF_DAY)); TueSt.set(Calendar.MINUTE, startTime.get(Calendar.MINUTE)); TusFi.set(Calendar.YEAR, finishTime.get(Calendar.YEAR)); TusFi.set(Calendar.MONTH, finishTime.get(Calendar.MONTH)); TusFi.set(Calendar.DAY_OF_MONTH, finishTime.get(Calendar.DAY_OF_MONTH) + 7 * j); TusFi.set(Calendar.HOUR_OF_DAY, finishTime.get(Calendar.HOUR_OF_DAY)); TusFi.set(Calendar.MINUTE, finishTime.get(Calendar.MINUTE)); Integer date = 2 - day; TueSt.add(Calendar.DAY_OF_MONTH, date); TusFi.add(Calendar.DAY_OF_MONTH, date); WeekViewEvent event; event = new WeekViewEvent(id, cname, TueSt, TusFi, colorName, choosedevice); list.add(event); groupedlist.add(id); IDlist.add(id); } } }).start(); new Thread(new Runnable() { @Override public void run() { if (Wednesday.isChecked()) { id = counter.incrementAndGet(); Calendar WedSt = Calendar.getInstance(), WedFi = Calendar.getInstance(); WedSt.set(Calendar.YEAR, startTime.get(Calendar.YEAR)); WedSt.set(Calendar.MONTH, startTime.get(Calendar.MONTH)); WedSt.set(Calendar.DAY_OF_MONTH, startTime.get(Calendar.DAY_OF_MONTH) + 7 * j); WedSt.set(Calendar.HOUR_OF_DAY, startTime.get(Calendar.HOUR_OF_DAY)); WedSt.set(Calendar.MINUTE, startTime.get(Calendar.MINUTE)); WedFi.set(Calendar.YEAR, finishTime.get(Calendar.YEAR)); WedFi.set(Calendar.MONTH, finishTime.get(Calendar.MONTH)); WedFi.set(Calendar.DAY_OF_MONTH, finishTime.get(Calendar.DAY_OF_MONTH) + 7 * j); WedFi.set(Calendar.HOUR_OF_DAY, finishTime.get(Calendar.HOUR_OF_DAY)); WedFi.set(Calendar.MINUTE, finishTime.get(Calendar.MINUTE)); Integer date = 3 - day; WedSt.add(Calendar.DAY_OF_MONTH, date); WedFi.add(Calendar.DAY_OF_MONTH, date); WeekViewEvent event; event = new WeekViewEvent(id, cname, WedSt, WedFi, colorName, choosedevice); list.add(event); groupedlist.add(id); IDlist.add(id); } } }).start(); new Thread(new Runnable() { @Override public void run() { if (Thursday.isChecked()) { id = counter.incrementAndGet(); Calendar ThuSt = Calendar.getInstance(), ThuFi = Calendar.getInstance(); ThuSt.set(Calendar.YEAR, startTime.get(Calendar.YEAR)); ThuSt.set(Calendar.MONTH, startTime.get(Calendar.MONTH)); ThuSt.set(Calendar.DAY_OF_MONTH, startTime.get(Calendar.DAY_OF_MONTH) + 7 * j); ThuSt.set(Calendar.HOUR_OF_DAY, startTime.get(Calendar.HOUR_OF_DAY)); ThuSt.set(Calendar.MINUTE, startTime.get(Calendar.MINUTE)); ThuFi.set(Calendar.YEAR, finishTime.get(Calendar.YEAR)); ThuFi.set(Calendar.MONTH, finishTime.get(Calendar.MONTH)); ThuFi.set(Calendar.DAY_OF_MONTH, finishTime.get(Calendar.DAY_OF_MONTH) + 7 * j); ThuFi.set(Calendar.HOUR_OF_DAY, finishTime.get(Calendar.HOUR_OF_DAY)); ThuFi.set(Calendar.MINUTE, finishTime.get(Calendar.MINUTE)); Integer date = 4 - day; ThuSt.add(Calendar.DAY_OF_MONTH, date); ThuFi.add(Calendar.DAY_OF_MONTH, date); WeekViewEvent event; event = new WeekViewEvent(id, cname, ThuSt, ThuFi, colorName, choosedevice); list.add(event); groupedlist.add(id); IDlist.add(id); } } }).start(); new Thread(new Runnable() { @Override public void run() { if (Friday.isChecked()) { id = counter.incrementAndGet(); Calendar FriSt = Calendar.getInstance(), FriFi = Calendar.getInstance(); FriSt.set(Calendar.YEAR, startTime.get(Calendar.YEAR)); FriSt.set(Calendar.MONTH, startTime.get(Calendar.MONTH)); FriSt.set(Calendar.DAY_OF_MONTH, startTime.get(Calendar.DAY_OF_MONTH) + 7 * j); FriSt.set(Calendar.HOUR_OF_DAY, startTime.get(Calendar.HOUR_OF_DAY)); FriSt.set(Calendar.MINUTE, startTime.get(Calendar.MINUTE)); FriFi.set(Calendar.YEAR, finishTime.get(Calendar.YEAR)); FriFi.set(Calendar.MONTH, finishTime.get(Calendar.MONTH)); FriFi.set(Calendar.DAY_OF_MONTH, finishTime.get(Calendar.DAY_OF_MONTH) + 7 * j); FriFi.set(Calendar.HOUR_OF_DAY, finishTime.get(Calendar.HOUR_OF_DAY)); FriFi.set(Calendar.MINUTE, finishTime.get(Calendar.MINUTE)); Integer date = 5 - day; FriSt.add(Calendar.DAY_OF_MONTH, date); FriFi.add(Calendar.DAY_OF_MONTH, date); WeekViewEvent event; event = new WeekViewEvent(id, cname, FriSt, FriFi, colorName, choosedevice); list.add(event); groupedlist.add(id); IDlist.add(id); } } }).start(); new Thread(new Runnable() { @Override public void run() { if (Saturday.isChecked()) { id = counter.incrementAndGet(); Calendar SatSt = Calendar.getInstance(), SatFi = Calendar.getInstance(); SatSt.set(Calendar.YEAR, startTime.get(Calendar.YEAR)); SatSt.set(Calendar.MONTH, startTime.get(Calendar.MONTH)); SatSt.set(Calendar.DAY_OF_MONTH, startTime.get(Calendar.DAY_OF_MONTH) + 7 * j); SatSt.set(Calendar.HOUR_OF_DAY, startTime.get(Calendar.HOUR_OF_DAY)); SatSt.set(Calendar.MINUTE, startTime.get(Calendar.MINUTE)); SatFi.set(Calendar.YEAR, finishTime.get(Calendar.YEAR)); SatFi.set(Calendar.MONTH, finishTime.get(Calendar.MONTH)); SatFi.set(Calendar.DAY_OF_MONTH, finishTime.get(Calendar.DAY_OF_MONTH) + 7 * j); SatFi.set(Calendar.HOUR_OF_DAY, finishTime.get(Calendar.HOUR_OF_DAY)); SatFi.set(Calendar.MINUTE, finishTime.get(Calendar.MINUTE)); Integer date = 6 - day; SatSt.add(Calendar.DAY_OF_MONTH, date); SatFi.add(Calendar.DAY_OF_MONTH, date); WeekViewEvent event; event = new WeekViewEvent(id, cname, SatSt, SatFi, colorName, choosedevice); list.add(event); IDlist.add(id); groupedlist.add(id); } } }).start(); new Thread(new Runnable() { @Override public void run() { if (Sunday.isChecked()) { id = counter.incrementAndGet(); Calendar SunSt = Calendar.getInstance(), SunFi = Calendar.getInstance(); SunSt.set(Calendar.YEAR, startTime.get(Calendar.YEAR)); SunSt.set(Calendar.MONTH, startTime.get(Calendar.MONTH)); SunSt.set(Calendar.DAY_OF_MONTH, startTime.get(Calendar.DAY_OF_MONTH) + 7 * j); SunSt.set(Calendar.HOUR_OF_DAY, startTime.get(Calendar.HOUR_OF_DAY)); SunSt.set(Calendar.MINUTE, startTime.get(Calendar.MINUTE)); SunFi.set(Calendar.YEAR, finishTime.get(Calendar.YEAR)); SunFi.set(Calendar.MONTH, finishTime.get(Calendar.MONTH)); SunFi.set(Calendar.DAY_OF_MONTH, finishTime.get(Calendar.DAY_OF_MONTH) + 7 * j); SunFi.set(Calendar.HOUR_OF_DAY, finishTime.get(Calendar.HOUR_OF_DAY)); SunFi.set(Calendar.MINUTE, finishTime.get(Calendar.MINUTE)); Integer date = 0 - day; SunSt.add(Calendar.DAY_OF_MONTH, date); SunFi.add(Calendar.DAY_OF_MONTH, date); WeekViewEvent event; event = new WeekViewEvent(id, cname, SunSt, SunFi, colorName, choosedevice); list.add(event); IDlist.add(id); groupedlist.add(id); } } }).start(); } grouplist.add(groupedlist); DataManager.getInstance().setGroupID(grouplist); DataManager.getInstance().setEventID(IDlist); DataManager.getInstance().setevents(list); startActivity(intent); } }).start(); } else { runOnUiThread(new Runnable() { public void run() { Toast.makeText(CalendarTask.this, "Unvaild time", Toast.LENGTH_LONG).show(); } }); } } else { runOnUiThread(new Runnable() { public void run() { Toast.makeText(CalendarTask.this, "Enter a valid week number (at least 1)", Toast.LENGTH_LONG).show(); } }); } } else { runOnUiThread(new Runnable() { public void run() { Toast.makeText(CalendarTask.this, "Please enter a number", Toast.LENGTH_LONG).show(); } }); } } // not repetition if (switch1.isChecked() == false) { if ((finishTime.after(startTime))) { if (!IDlist.isEmpty()) { id = IDlist.get((IDlist.size() - 1)) + 1; } WeekViewEvent event = new WeekViewEvent(id, cname, startTime, finishTime, colorName, choosedevice); list.add(event); IDlist.add(id); DataManager.getInstance().setevents(list); DataManager.getInstance().setEventID(IDlist); startActivity(intent); } else { runOnUiThread(new Runnable() { public void run() { Toast.makeText(CalendarTask.this, "Unvalid Time", Toast.LENGTH_LONG).show(); } }); } } } }).start(); } } }); switch1.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (isChecked == true) { startdate.setEnabled(false); finishdate.setEnabled(false); starttime.setEnabled(false); finishtime.setEnabled(false); for (int i = 0; i < layout1.getChildCount(); i++) { View child = layout1.getChildAt(i); child.setEnabled(true); } if (day.equals(1)) { Monday.setChecked(true); Monday.setEnabled(false); } else if (day.equals(2)) { Tuesday.setChecked(true); Tuesday.setEnabled(false); } else if (day.equals(3)) { Wednesday.setChecked(true); Wednesday.setEnabled(false); } else if (day.equals(4)) { Thursday.setChecked(true); Thursday.setEnabled(false); } else if (day.equals(5)) { Friday.setChecked(true); Friday.setEnabled(false); } else if (day.equals(6)) { Saturday.setChecked(true); Saturday.setEnabled(false); } else { Sunday.setChecked(true); Sunday.setEnabled(false); } } else { for (int i = 0; i < layout1.getChildCount(); i++) { View child = layout1.getChildAt(i); child.setEnabled(false); } startdate.setEnabled(true); finishdate.setEnabled(true); starttime.setEnabled(true); finishtime.setEnabled(true); Monday.setChecked(false); Tuesday.setChecked(false); Wednesday.setChecked(false); Thursday.setChecked(false); Friday.setChecked(false); Saturday.setChecked(false); Sunday.setChecked(false); } } }); } private class MyCustomAdapter extends ArrayAdapter<Group> { ArrayList<Group> arrayList; public MyCustomAdapter(Context context, int textViewResourceId, ArrayList<Group> arrayList) { super(context, textViewResourceId, arrayList); this.arrayList = new ArrayList<Group>(); this.arrayList.addAll(arrayList); } @Override public View getView(int position, View convertView, ViewGroup parent) { if (convertView == null) { LayoutInflater vi = (LayoutInflater) getSystemService( Context.LAYOUT_INFLATER_SERVICE); convertView = vi.inflate(R.layout.devicelist, null); } Group group = arrayList.get(position); TextView name = (TextView) convertView.findViewById(R.id.name); final CheckBox checked = (CheckBox) convertView.findViewById(R.id.checked); checked.setTag(group); name.setText(group.getName()); checked.setText(""); checked.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Group group = (Group) v.getTag(); if (checked.isChecked()) { group.setChecked(true); } else group.setChecked(false); } }); return convertView; } } public class Group { String name; ArrayList<Device> devicelist; boolean ischecked; public Group(String name, ArrayList devicelist, boolean ischecked) { this.name = name; this.devicelist = devicelist; this.ischecked = ischecked; } public boolean getChecked() { return ischecked; } public void setChecked(boolean ischecked) { this.ischecked = ischecked; } public String getName() { return name; } public void setName(String name) { this.name = name; } public ArrayList getList() { return devicelist; } public void setList(ArrayList devicelist) { this.devicelist = devicelist; } } }
demo version, bug fixed
Horizon/app/src/main/java/com/example/hesolutions/horizon/CalendarTask.java
demo version, bug fixed
Java
apache-2.0
51fb45514ad8ef841d3290998f4a812a4a20e909
0
surya-janani/sakai,kwedoff1/sakai,rodriguezdevera/sakai,introp-software/sakai,colczr/sakai,conder/sakai,whumph/sakai,buckett/sakai-gitflow,introp-software/sakai,buckett/sakai-gitflow,hackbuteer59/sakai,rodriguezdevera/sakai,noondaysun/sakai,willkara/sakai,liubo404/sakai,OpenCollabZA/sakai,surya-janani/sakai,OpenCollabZA/sakai,kwedoff1/sakai,whumph/sakai,udayg/sakai,puramshetty/sakai,bkirschn/sakai,tl-its-umich-edu/sakai,tl-its-umich-edu/sakai,bzhouduke123/sakai,ouit0408/sakai,bkirschn/sakai,pushyamig/sakai,tl-its-umich-edu/sakai,zqian/sakai,joserabal/sakai,surya-janani/sakai,clhedrick/sakai,udayg/sakai,rodriguezdevera/sakai,bzhouduke123/sakai,rodriguezdevera/sakai,ktakacs/sakai,lorenamgUMU/sakai,duke-compsci290-spring2016/sakai,noondaysun/sakai,rodriguezdevera/sakai,hackbuteer59/sakai,bzhouduke123/sakai,bzhouduke123/sakai,ouit0408/sakai,lorenamgUMU/sakai,conder/sakai,lorenamgUMU/sakai,udayg/sakai,kingmook/sakai,colczr/sakai,Fudan-University/sakai,buckett/sakai-gitflow,duke-compsci290-spring2016/sakai,liubo404/sakai,puramshetty/sakai,noondaysun/sakai,Fudan-University/sakai,duke-compsci290-spring2016/sakai,frasese/sakai,kingmook/sakai,joserabal/sakai,ktakacs/sakai,duke-compsci290-spring2016/sakai,introp-software/sakai,noondaysun/sakai,Fudan-University/sakai,hackbuteer59/sakai,zqian/sakai,kingmook/sakai,hackbuteer59/sakai,conder/sakai,ouit0408/sakai,buckett/sakai-gitflow,pushyamig/sakai,willkara/sakai,pushyamig/sakai,conder/sakai,liubo404/sakai,clhedrick/sakai,ktakacs/sakai,willkara/sakai,kwedoff1/sakai,colczr/sakai,joserabal/sakai,pushyamig/sakai,noondaysun/sakai,kingmook/sakai,wfuedu/sakai,kingmook/sakai,liubo404/sakai,ouit0408/sakai,kingmook/sakai,willkara/sakai,ktakacs/sakai,surya-janani/sakai,conder/sakai,frasese/sakai,whumph/sakai,buckett/sakai-gitflow,colczr/sakai,bkirschn/sakai,Fudan-University/sakai,noondaysun/sakai,pushyamig/sakai,udayg/sakai,kwedoff1/sakai,buckett/sakai-gitflow,udayg/sakai,tl-its-umich-edu/sakai,frasese/sakai,OpenCollabZA/sakai,wfuedu/sakai,ktakacs/sakai,duke-compsci290-spring2016/sakai,ktakacs/sakai,OpenCollabZA/sakai,ouit0408/sakai,joserabal/sakai,wfuedu/sakai,wfuedu/sakai,noondaysun/sakai,Fudan-University/sakai,puramshetty/sakai,liubo404/sakai,introp-software/sakai,buckett/sakai-gitflow,liubo404/sakai,Fudan-University/sakai,kwedoff1/sakai,surya-janani/sakai,OpenCollabZA/sakai,hackbuteer59/sakai,Fudan-University/sakai,conder/sakai,bkirschn/sakai,puramshetty/sakai,hackbuteer59/sakai,colczr/sakai,OpenCollabZA/sakai,surya-janani/sakai,lorenamgUMU/sakai,bkirschn/sakai,joserabal/sakai,conder/sakai,conder/sakai,puramshetty/sakai,bzhouduke123/sakai,duke-compsci290-spring2016/sakai,pushyamig/sakai,clhedrick/sakai,zqian/sakai,wfuedu/sakai,whumph/sakai,liubo404/sakai,whumph/sakai,OpenCollabZA/sakai,clhedrick/sakai,kwedoff1/sakai,zqian/sakai,bkirschn/sakai,kingmook/sakai,lorenamgUMU/sakai,bzhouduke123/sakai,hackbuteer59/sakai,whumph/sakai,clhedrick/sakai,tl-its-umich-edu/sakai,joserabal/sakai,colczr/sakai,surya-janani/sakai,udayg/sakai,frasese/sakai,bzhouduke123/sakai,pushyamig/sakai,introp-software/sakai,joserabal/sakai,whumph/sakai,pushyamig/sakai,zqian/sakai,frasese/sakai,liubo404/sakai,udayg/sakai,clhedrick/sakai,willkara/sakai,bkirschn/sakai,rodriguezdevera/sakai,surya-janani/sakai,OpenCollabZA/sakai,tl-its-umich-edu/sakai,joserabal/sakai,bkirschn/sakai,lorenamgUMU/sakai,tl-its-umich-edu/sakai,frasese/sakai,lorenamgUMU/sakai,puramshetty/sakai,ouit0408/sakai,whumph/sakai,noondaysun/sakai,zqian/sakai,clhedrick/sakai,ktakacs/sakai,frasese/sakai,zqian/sakai,ouit0408/sakai,kwedoff1/sakai,tl-its-umich-edu/sakai,rodriguezdevera/sakai,colczr/sakai,zqian/sakai,udayg/sakai,puramshetty/sakai,Fudan-University/sakai,clhedrick/sakai,kwedoff1/sakai,lorenamgUMU/sakai,rodriguezdevera/sakai,colczr/sakai,puramshetty/sakai,duke-compsci290-spring2016/sakai,introp-software/sakai,introp-software/sakai,ktakacs/sakai,ouit0408/sakai,introp-software/sakai,willkara/sakai,bzhouduke123/sakai,buckett/sakai-gitflow,wfuedu/sakai,wfuedu/sakai,hackbuteer59/sakai,wfuedu/sakai,duke-compsci290-spring2016/sakai,frasese/sakai,willkara/sakai,kingmook/sakai,willkara/sakai
/********************************************************************************** * $HeadURL$ * $Id$ *********************************************************************************** * * Copyright (c) 2004-2005 The Regents of the University of Michigan, Trustees of Indiana University, * Board of Trustees of the Leland Stanford, Jr., University, and The MIT Corporation * * Licensed under the Educational Community License Version 1.0 (the "License"); * By obtaining, using and/or copying this Original Work, you agree that you have read, * understand, and will comply with the terms and conditions of the Educational Community License. * You may obtain a copy of the License at: * * http://cvs.sakaiproject.org/licenses/license_1_0.html * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE * AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * **********************************************************************************/ package org.sakaiproject.tool.assessment.ui.listener.author; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import javax.faces.event.AbortProcessingException; import javax.faces.event.ActionEvent; import javax.faces.event.ActionListener; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.tool.assessment.data.dao.assessment.Answer; import org.sakaiproject.tool.assessment.data.dao.assessment.AnswerFeedback; import org.sakaiproject.tool.assessment.data.dao.assessment.ItemMetaData; import org.sakaiproject.tool.assessment.data.dao.assessment.ItemText; import org.sakaiproject.tool.assessment.data.ifc.assessment.AnswerFeedbackIfc; import org.sakaiproject.tool.assessment.facade.AgentFacade; import org.sakaiproject.tool.assessment.facade.AssessmentFacade; import org.sakaiproject.tool.assessment.facade.ItemFacade; import org.sakaiproject.tool.assessment.facade.SectionFacade; import org.sakaiproject.tool.assessment.facade.TypeFacade; import org.sakaiproject.tool.assessment.services.ItemService; import org.sakaiproject.tool.assessment.services.QuestionPoolService; import org.sakaiproject.tool.assessment.services.assessment.AssessmentService; import org.sakaiproject.tool.assessment.ui.bean.author.AnswerBean; import org.sakaiproject.tool.assessment.ui.bean.author.AssessmentBean; import org.sakaiproject.tool.assessment.ui.bean.author.ItemAuthorBean; import org.sakaiproject.tool.assessment.ui.bean.author.ItemBean; import org.sakaiproject.tool.assessment.ui.bean.author.MatchItemBean; import org.sakaiproject.tool.assessment.ui.bean.questionpool.QuestionPoolBean; import org.sakaiproject.tool.assessment.ui.listener.util.ContextUtil; /** * <p>Title: Samigo</p> * <p>Description: Sakai Assessment Manager</p> * <p>Copyright: Copyright (c) 2004 Sakai Project</p> * <p>Organization: Sakai Project</p> * @version $Id$ */ public class ItemAddListener implements ActionListener { private static Log log = LogFactory.getLog(ItemAddListener.class); private static ContextUtil cu; private String scalename; // used for multiple choice Survey /** * Standard process action method. * @param ae ActionEvent * @throws AbortProcessingException */ public void processAction(ActionEvent ae) throws AbortProcessingException { boolean correct=false; log.info("ItemAdd LISTENER."); System.out.println("lydiatest BEGIN Add item"); ItemAuthorBean itemauthorbean = (ItemAuthorBean) cu.lookupBean("itemauthor"); ItemBean item =itemauthorbean.getCurrentItem(); String answer=item.getCorrAnswer(); String iType=item.getItemType(); //if((!iType.equals("1"))&&(!iType.equals("2"))){ // only check this for Single Correct MC questions if(!iType.equals(TypeFacade.MULTIPLE_CHOICE.toString())){ if (!saveItem(itemauthorbean)) { throw new RuntimeException("failed to saveItem."); } } else if(iType.equals(TypeFacade.MULTIPLE_CHOICE.toString())) { if(!answer.equals("")&& answer!=null) { if (!saveItem(itemauthorbean)) { throw new RuntimeException("failed to saveItem."); } } } /* // do not need to check for mcmc else //Multiple choice,multiple correct { Iterator iter = item.getMultipleChoiceAnswers().iterator(); if(item.getMultipleChoiceAnswers()!=null){ while (iter.hasNext()) { AnswerBean answerbean = (AnswerBean) iter.next(); if (isCorrectChoice(item, answerbean.getLabel().trim())) { correct=true; break; } } if(correct) { if (!saveItem(itemauthorbean)) { throw new RuntimeException("failed to saveItem."); } } } } */ } public boolean saveItem(ItemAuthorBean itemauthor) { //System.out.println("lydiatest in saveItem() "); boolean update = false; try { ItemBean bean = itemauthor.getCurrentItem(); ItemService delegate = new ItemService(); ItemFacade item; // update not working yet, delete, then add if ( (bean.getItemId() != null) && (!bean.getItemId().equals("0"))) { update = true; //System.out.println("lydiatest in saveItem() this is for MODIFY "); // if modify ,itemid shouldn't be null , or 0. Long oldId = new Long(bean.getItemId()); delegate.deleteItemContent(oldId, AgentFacade.getAgentString()); item = delegate.getItem(oldId,AgentFacade.getAgentString()); } else{ item = new ItemFacade(); } item.setScore(new Float(bean.getItemScore())); //System.out.println("lydiatest " + item.getScore()); item.setStatus(ItemFacade.ACTIVE_STATUS); //System.out.println("lydiatest in saveItem() should be 1 === " + // bean.getItemType()); item.setTypeId(new Long(bean.getItemType())); //System.out.println("lydiatest set typeid () " + item.getTypeId()); item.setCreatedBy(AgentFacade.getAgentString()); item.setCreatedDate(new Date()); item.setLastModifiedBy(AgentFacade.getAgentString()); item.setLastModifiedDate(new Date()); if (bean.getInstruction() != null) { // for matching item.setInstruction(bean.getInstruction()); } // update hasRationale if (bean.getRationale() != null) { item.setHasRationale(new Boolean(bean.getRationale())); } else { item.setHasRationale(Boolean.FALSE); } // update maxNumAttempts for audio if (bean.getNumAttempts() != null) { //System.out.println("lydiatest numattempt " + bean.getNumAttempts()); item.setTriesAllowed(new Integer(bean.getNumAttempts())); } // save timeallowed for audio recording if (bean.getTimeAllowed() != null) { //System.out.println("lydiatest audio timeallowed " + bean.getTimeAllowed()); item.setDuration(new Integer(bean.getTimeAllowed())); } if (update) { // reset item contents for modify //System.out.println("lydiatest getitemid not 0 ,it is " + item.getItemId()); item.setItemTextSet(new HashSet()); item.setItemMetaDataSet(new HashSet()); //System.out.println( // "lydiatest getitemid not 0 ,setting itemtext metadata to be new hashset "); } // prepare itemText, including answers if (!item.getTypeId().equals(TypeFacade.MATCHING)) { //System.out.println("lydiatest item is not Matching " + item.getTypeId()); item.setItemTextSet(prepareText(item, bean, itemauthor)); } else { //System.out.println("lydiatest item IS Matching " + item.getTypeId()); item.setItemTextSet(prepareTextForMatching(item, bean, itemauthor)); } // prepare MetaData item.setItemMetaDataSet(prepareMetaData(item, bean)); /////////////////////////////////////////////// // FEEDBACK /////////////////////////////////////////////// /* System.out.println("**** FEEDBACK ****"); System.out.println("**** bean.getCorrFeedback()=[" + bean.getCorrFeedback() + "] ***"); System.out.println("**** bean.getIncorrFeedback()=[" + bean.getIncorrFeedback() + "] ***"); System.out.println("**** bean.getGeneralFeedback()=[" + bean.getGeneralFeedback() + "] ***"); */ /////////////////////////////////////////////// // prepare feedback, only store if feedbacks are not empty if ( (bean.getCorrFeedback() != null) && (!bean.getCorrFeedback().equals(""))) { item.setCorrectItemFeedback(stripPtags(bean.getCorrFeedback())); } if ( (bean.getIncorrFeedback() != null) && (!bean.getIncorrFeedback().equals(""))) { item.setInCorrectItemFeedback(stripPtags(bean.getIncorrFeedback())); } if ( (bean.getGeneralFeedback() != null) && (!bean.getGeneralFeedback().equals(""))) { item.setGeneralItemFeedback(stripPtags(bean.getGeneralFeedback())); } /////////////////////////////////////////////// // FEEDBACK /////////////////////////////////////////////// /* System.out.println("**** FEEDBACK ****"); System.out.println("**** item.getCorrectItemFeedback()=[" + item.getCorrectItemFeedback() + "] ***"); System.out.println("**** item.getInCorrectItemFeedback()=[" + item.getInCorrectItemFeedback() + "] ***"); System.out.println("**** item.getGeneralItemFeedback()=[" + item.getGeneralItemFeedback() + "] ***"); */ /////////////////////////////////////////////// //ItemFacade savedItem = null; if ( (itemauthor.getTarget() != null) && (itemauthor.getTarget().equals(itemauthor.FROM_QUESTIONPOOL))) { // Came from Pool manager delegate.saveItem(item); //System.out.println( // "lydiatest target=questionpool adding item to qpool "); QuestionPoolService qpdelegate = new QuestionPoolService(); //System.out.println("lydiatest adding item to qpool : " + // itemauthor.getQpoolId()); if (!qpdelegate.hasItem(item.getItemIdString(), new Long(itemauthor.getQpoolId()))) { qpdelegate.addItemToPool(item.getItemIdString(), new Long(itemauthor.getQpoolId())); } QuestionPoolBean qpoolbean = (QuestionPoolBean) cu.lookupBean( "questionpool"); qpoolbean.buildTree(); /* // Reset question pool bean QuestionPoolFacade thepool= qpdelegate.getPool(new Long(itemauthor.getQpoolId()), AgentFacade.getAgentString()); qpoolbean.getCurrentPool().setNumberOfQuestions(thepool.getQuestionSize().toString()); */ qpoolbean.startEditPoolAgain(itemauthor.getQpoolId()); // return to edit pool itemauthor.setOutcome("editPool"); } else { // Came from Assessment Authoring AssessmentService assessdelegate = new AssessmentService(); //System.out.println("lydiatest target=assessment "); // add the item to the specified part, otherwise add to default if (bean.getSelectedSection() != null) { //System.out.println("lydiatest section " + bean.getSelectedSection()); SectionFacade section = assessdelegate.getSection(bean. getSelectedSection()); item.setSection(section); if (update) { // if Modify, need to reorder if assgned to different section ' if ( (bean.getOrigSection() != null) && (!bean.getOrigSection().equals(bean.getSelectedSection()))) { //System.out.println("lydiatest modified, assigned to new section " + bean.getOrigSection() + " to new section = " + bean.getSelectedSection() ); // if reassigned to different section Integer oldSeq = item.getSequence(); item.setSequence(new Integer(section.getItemSet().size() + 1)); //System.out.println("lydiatest new sequence is " + item.getSequence()); // reorder the sequences of items in the OrigSection SectionFacade origsect= assessdelegate.getSection(bean.getOrigSection()); //System.out.println("lydiatest reorder old items sequence in origSection "); shiftItemsInOrigSection(origsect, oldSeq); //System.out.println("lydiatest DONE reorder old items sequence in origSection "); } else { // no action needed } } if (!update) { //System.out.println("lydiatest getInsertPositon() = :" + // itemauthor.getInsertPosition() + "."); if ( (itemauthor.getInsertPosition() == null) || ("".equals(itemauthor.getInsertPosition()))) { //System.out.println("lydiatest add at the end " + // itemauthor.getInsertPosition() + "."); // if adding to the end item.setSequence(new Integer(section.getItemSet().size() + 1)); } else { //System.out.println("lydiatest insert,needs shifting " + // itemauthor.getInsertPosition() + "."); // if inserting or a question String insertPos = itemauthor.getInsertPosition(); shiftSequences(section, new Integer(insertPos)); int insertPosInt = (new Integer(insertPos)).intValue() + 1; item.setSequence(new Integer(insertPosInt)); // reset InsertPosition itemauthor.setInsertPosition(""); } } delegate.saveItem(item); /* section.addItem(item); assessdelegate.saveOrUpdateSection(section); */ } QuestionPoolService qpdelegate = new QuestionPoolService(); // removed the old pool-item mappings if ( (bean.getOrigPool() != null) && (!bean.getOrigPool().equals(""))) { qpdelegate.removeQuestionFromPool(item.getItemIdString(), new Long(bean.getOrigPool())); } // if assign to pool, add the item to the pool if ( (!bean.getSelectedPool().equals("")) && (bean.getSelectedPool() != null)) { //System.out.println("lydiatest poolid " + bean.getSelectedPool()); qpdelegate.addItemToPool(item.getItemIdString(), new Long(bean.getSelectedPool())); } // #1a - goto editAssessment.jsp, so reset assessmentBean AssessmentBean assessmentBean = (AssessmentBean) cu.lookupBean( "assessmentBean"); AssessmentFacade assessment = assessdelegate.getAssessment( assessmentBean.getAssessmentId()); assessmentBean.setAssessment(assessment); itemauthor.setOutcome("editAssessment"); } //System.out.println( // "lydiatest SUCCESSFULLY saved Item !!!!!!!!!!!!!!!!!!!!!!!!!"); return true; } catch (Exception e) { e.printStackTrace(); return false; } } private HashSet prepareTextForMatching(ItemFacade item, ItemBean bean, ItemAuthorBean itemauthor) { // looping through matchItemBean //System.out.println("lydiatest prepareTextForMatching: BEGIN>>>>> "); ArrayList matchItemBeanList = bean.getMatchItemBeanList(); HashSet textSet = new HashSet(); Iterator choiceiter = matchItemBeanList.iterator(); while (choiceiter.hasNext()) { MatchItemBean choicebean = (MatchItemBean) choiceiter.next(); ItemText choicetext = new ItemText(); choicetext.setItem(item.getData()); // all set to the same ItemFacade choicetext.setSequence(choicebean.getSequence()); //System.out.println( // "lydiatest prepareTextForMatching: choicetext.setSequence " + // choicetext.getSequence()); //System.out.println("lydiatest getItemtext() " + choicebean.getChoice()); choicetext.setText(stripPtags(choicebean.getChoice())); //System.out.println( // "lydiatest prepareTextForMatching: this is first itemtext choicetext.setText" + // choicetext.getText()); // need to loop through matches for in matchItemBean list // and add all possible matches to this choice //System.out.println( // "lydiatest prepareTextForMatching: now loop through all answers for " + // choicetext.getText()); Iterator answeriter = matchItemBeanList.iterator(); HashSet answerSet = new HashSet(); Answer answer = null; while (answeriter.hasNext()) { MatchItemBean answerbean = (MatchItemBean) answeriter.next(); if (answerbean.getSequence().equals(choicebean.getSequence())) { // System.out.println( // "lydiatest prepareTextForMatching: CORRECT answer : " + // answerbean.getMatch()); answer = new Answer(choicetext, stripPtags(answerbean.getMatch()), answerbean.getSequence(), AnswerBean.choiceLabels[answerbean.getSequence().intValue()-1], Boolean.TRUE, null, new Float(bean.getItemScore())); // only add feedback for correct pairs HashSet answerFeedbackSet = new HashSet(); answerFeedbackSet.add(new AnswerFeedback(answer, AnswerFeedbackIfc. CORRECT_FEEDBACK, stripPtags(answerbean.getCorrMatchFeedback()))); answerFeedbackSet.add(new AnswerFeedback(answer, AnswerFeedbackIfc. INCORRECT_FEEDBACK, stripPtags(answerbean.getIncorrMatchFeedback()))); /* System.out.println("lydiatest prepareTextForMatching: feedback : " + answerbean.getCorrMatchFeedback()); System.out.println("lydiatest prepareTextForMatching: feedback : " + answerbean.getIncorrMatchFeedback()); */ answer.setAnswerFeedbackSet(answerFeedbackSet); } else { //System.out.println( // "lydiatest prepareTextForMatching: WRONG answer : " + // answerbean.getMatch()); answer = new Answer(choicetext, stripPtags(answerbean.getMatch()), answerbean.getSequence(), AnswerBean.choiceLabels[answerbean.getSequence().intValue()-1], Boolean.FALSE, null, new Float(bean.getItemScore())); } // record answers for all combination of pairs HashSet answerFeedbackSet = new HashSet(); answerFeedbackSet.add(new AnswerFeedback(answer, AnswerFeedbackIfc. CORRECT_FEEDBACK, stripPtags(answerbean.getCorrMatchFeedback()))); answerFeedbackSet.add(new AnswerFeedback(answer, AnswerFeedbackIfc. INCORRECT_FEEDBACK, stripPtags(answerbean.getIncorrMatchFeedback()))); System.out.println("lydiatest prepareTextForMatching: feedback : " + answerbean.getCorrMatchFeedback()); System.out.println("lydiatest prepareTextForMatching: feedback : " + answerbean.getIncorrMatchFeedback()); answer.setAnswerFeedbackSet(answerFeedbackSet); // answerSet.add(answer); } choicetext.setAnswerSet(answerSet); textSet.add(choicetext); } //System.out.println("lydiatest prepareTextForMatching: END >>>>> "); return textSet; } private HashSet prepareText(ItemFacade item, ItemBean bean, ItemAuthorBean itemauthor) { HashSet textSet = new HashSet(); HashSet answerSet1 = new HashSet(); ///////////////////////////////////////////////////////////// // 1. save Question Text for items with single Question Text // (except matching) ///////////////////////////////////////////////////////////// ItemText text1 = new ItemText(); text1.setItem(item.getData()); text1.setSequence(new Long(1)); //System.out.println("lydiatest getItemtext() " + bean.getItemText()); text1.setText(bean.getItemText()); ///////////////////////////////////////////////////////////// // // 2. save Answers // ///////////////////////////////////////////////////////////// //System.out.println("lydiatest getItemType() " + itemauthor.getItemType()); if (item.getTypeId().equals(TypeFacade.TRUE_FALSE)) { //System.out.println("lydiatest setting answers for true and false"); // find correct answer Answer newanswer = null; for (int i = 0; i < bean.getAnswers().length; i++) { String theanswer = bean.getAnswers()[i]; String thelabel = bean.getAnswerLabels()[i]; // store thelabel as the answer text if (theanswer.equals(bean.getCorrAnswer())) { // label is null because we don't use labels in true/false questions // labels are like a, b, c, or i, ii, iii, in multiple choice type newanswer = new Answer(text1, theanswer, new Long(i + 1), null, Boolean.TRUE, null, new Float(bean.getItemScore())); } else { newanswer = new Answer(text1, theanswer, new Long(i + 1), null, Boolean.FALSE, null, new Float(bean.getItemScore())); } answerSet1.add(newanswer); } text1.setAnswerSet(answerSet1); textSet.add(text1); } else if (item.getTypeId().equals(TypeFacade.ESSAY_QUESTION)) { //System.out.println("lydiatest setting answers for short Answer item"); // Storing the model answer essay as an Answer, and feedback in the Answerfeedback String theanswer = bean.getCorrAnswer(); if (theanswer == null) { theanswer = ""; // can be empty } // label is null because we don't use labels in essay questions //theanswer is the model answer used as a sample for student Answer modelanswer = new Answer(text1, theanswer, new Long(1), null, Boolean.TRUE, null, new Float(bean.getItemScore())); HashSet answerFeedbackSet1 = new HashSet(); answerFeedbackSet1.add(new AnswerFeedback(modelanswer, "modelanswer", stripPtags(bean.getCorrFeedback()))); modelanswer.setAnswerFeedbackSet(answerFeedbackSet1); answerSet1.add(modelanswer); text1.setAnswerSet(answerSet1); textSet.add(text1); } else if (item.getTypeId().equals(TypeFacade.MULTIPLE_CHOICE_SURVEY)) { //System.out.println("lydiatest setting answers for survey"); /* TODO: need to use property file for the survey choices, to be able to internationalize. Properties p = null; // get properties file try{ p = ContextUtil.getProperties(filename); if (p == null) { throw new Error("Could not find properties file: " + filename); } } catch (Exception e){ e.printStackTrace(); } String noprop = p.getProperty("no"); String yesprop = p.getProperty("yes"); String agreeprop = p.getProperty("agree"); String disagreeprop = p.getProperty("disagree"); */ String scalename = bean.getScaleName(); String[] choices = new String[2]; // label is null because we don't use labels in survey if (scalename.equals("YESNO")) { choices = new String[2]; choices[0] = "No"; choices[1] = "Yes"; } if (scalename.equals("AGREE")) { choices = new String[2]; choices[0] = "Disagree"; choices[1] = "Agree"; } if (scalename.equals("UNDECIDED")) { choices = new String[3]; choices[0] = "Disagree"; choices[1] = "Undecided"; choices[2] = "Agree"; } if (scalename.equals("AVERAGE")) { choices = new String[3]; choices[0] = "Below Average"; choices[1] = "Average"; choices[2] = "Above Average"; } if (scalename.equals("STRONGLY_AGREE")) { choices = new String[5]; choices[0] = "Strongly Disagree"; choices[1] = "Disagree"; choices[2] = "Undecided"; choices[3] = "Agree"; choices[4] = "Strongly Agree"; } if (scalename.equals("EXCELLENT")) { choices = new String[5]; choices[0] = "Unacceptable"; choices[1] = "Below Average"; choices[2] = "Average"; choices[3] = "Above Average"; choices[4] = "Excellent"; } if (scalename.equals("SCALEFIVE")) { choices = new String[5]; choices[0] = "1"; choices[1] = "2"; choices[2] = "3"; choices[3] = "4"; choices[4] = "5"; } if (scalename.equals("SCALETEN")) { choices = new String[10]; choices[0] = "1"; choices[1] = "2"; choices[2] = "3"; choices[3] = "4"; choices[4] = "5"; choices[5] = "6"; choices[6] = "7"; choices[7] = "8"; choices[8] = "9"; choices[9] = "10"; } for (int i = 0; i < choices.length; i++) { Answer answer1 = new Answer(text1, choices[i], new Long(i + 1), null, null, null, new Float(bean.getItemScore())); answerSet1.add(answer1); } text1.setAnswerSet(answerSet1); textSet.add(text1); } // not doing parsing in authoring else if (item.getTypeId().equals(TypeFacade.FILL_IN_BLANK)) { // this is for fill in blank String entiretext = bean.getItemText(); String fibtext = entiretext.replaceAll("[\\{][^\\}]*[\\}]", "{}"); text1.setText(fibtext); //System.out.println(" new text without answer is = " + fibtext); Object[] fibanswers = getFIBanswers(entiretext).toArray(); for (int i = 0; i < fibanswers.length; i++) { String oneanswer = (String) fibanswers[i]; Answer answer1 = new Answer(text1, oneanswer, new Long(i + 1), null, Boolean.TRUE, null, new Float(bean.getItemScore())); answerSet1.add(answer1); } text1.setAnswerSet(answerSet1); textSet.add(text1); } else if ( (item.getTypeId().equals(TypeFacade.MULTIPLE_CHOICE)) || (item.getTypeId().equals(TypeFacade.MULTIPLE_CORRECT))) { // this is for both single/multiple correct multiple choice types //System.out.println("lydiatest multiple choice, "); // for single choice //String theanswer=bean.getCorrAnswer(); Iterator iter = bean.getMultipleChoiceAnswers().iterator(); Answer answer = null; while (iter.hasNext()) { AnswerBean answerbean = (AnswerBean) iter.next(); //System.out.println("lydiatest multiple choice, answerbean.gettext " + // answerbean.getText()); if (isCorrectChoice(bean, answerbean.getLabel().trim())) { answer = new Answer(text1, stripPtags(answerbean.getText()), answerbean.getSequence(), answerbean.getLabel(), Boolean.TRUE, null, new Float(bean.getItemScore())); } else { answer = new Answer(text1, stripPtags(answerbean.getText()), answerbean.getSequence(), answerbean.getLabel(), Boolean.FALSE, null, new Float(bean.getItemScore())); } HashSet answerFeedbackSet1 = new HashSet(); answerFeedbackSet1.add(new AnswerFeedback(answer, AnswerFeedbackIfc. GENERAL_FEEDBACK, stripPtags(answerbean.getFeedback()))); answer.setAnswerFeedbackSet(answerFeedbackSet1); answerSet1.add(answer); } text1.setAnswerSet(answerSet1); textSet.add(text1); } // for file Upload and audio recording else { // no answers need to be added textSet.add(text1); } ///////////////////////////////////////////////////////////// // END ///////////////////////////////////////////////////////////// return textSet; } private HashSet prepareMetaData(ItemFacade item, ItemBean bean) { //System.out.println("lydiatest in prepareMetaData() "); HashSet set = new HashSet(); if (bean.getKeyword() != null) { //System.out.println("lydiatest keyword() " + bean.getKeyword()); set.add(new ItemMetaData(item.getData(), ItemMetaData.KEYWORD, bean.getKeyword())); } if (bean.getRubric() != null) { //System.out.println("lydiatest rubric() " + bean.getRubric()); set.add(new ItemMetaData(item.getData(), ItemMetaData.RUBRIC, bean.getRubric())); } if (bean.getObjective() != null) { System.out.println("lydiatest obj() " + bean.getObjective()); set.add(new ItemMetaData(item.getData(), ItemMetaData.OBJECTIVE, bean.getObjective())); } // Randomize property got left out, added in metadata if (bean.getRandomized() != null) { //System.out.println("lydiatest randomize() " + bean.getRandomized()); set.add(new ItemMetaData(item.getData(), ItemMetaData.RANDOMIZE, bean.getRandomized())); } // save ScaleName for survey if it's a survey item if (bean.getScaleName() != null) { //System.out.println("lydiatest scalename() " + bean.getScaleName()); set.add(new ItemMetaData(item.getData(), ItemMetaData.SCALENAME, bean.getScaleName())); } // save part id if (bean.getSelectedSection() != null) { //System.out.println("lydiatest section " + bean.getSelectedSection()); set.add(new ItemMetaData(item.getData(), ItemMetaData.PARTID, bean.getSelectedSection())); } // save pool id if (bean.getSelectedPool() != null) { //System.out.println("lydiatest poolid " + bean.getSelectedPool()); set.add(new ItemMetaData(item.getData(), ItemMetaData.POOLID, bean.getSelectedPool())); } // save timeallowed for audio recording /* // save them in ItemFacade if (bean.getTimeAllowed()!=null){ System.out.println("lydiatest poolid "+ bean.getTimeAllowed() ); set.add(new ItemMetaData(item.getData(), ItemMetaData.TIMEALLOWED, bean.getTimeAllowed())); } */ // save timeallowed for audio recording /* // save them in ItemFacade if (bean.getNumAttempts()!=null){ System.out.println("lydiatest poolid "+ bean.getNumAttempts() ); set.add(new ItemMetaData(item.getData(), ItemMetaData.NUMATTEMPTS, bean.getNumAttempts())); } */ return set; } private static ArrayList getFIBanswers(String entiretext) { String[] tokens = entiretext.split("[\\}][^\\{]*[\\{]"); ArrayList list = new ArrayList(); //System.out.println("lydiatest token.length " + tokens.length); if (tokens.length==1) { String[] afteropen= tokens[0].split("\\{"); if (afteropen.length>1) { // must have text in between {} String[] lastpart = afteropen[1].split("\\}"); list.add(lastpart[0]); } } else { for (int i = 0; i < tokens.length; i++) { if (i == 0) { String[] firstpart = tokens[i].split("\\{"); if (firstpart.length>1) { list.add(firstpart[1]); } } else if (i == (tokens.length - 1)) { String[] lastpart = tokens[i].split("\\}"); list.add(lastpart[0]); } else { list.add(tokens[i]); } } } // token.length>1 return list; } /** ** returns if the multile choice label is the correct choice, ** bean.getCorrAnswers() returns a string[] of labels ** bean.getCorrAnswer() returns a string of label **/ public boolean isCorrectChoice(ItemBean bean, String label) { boolean returnvalue = false; if (!bean.getMultipleCorrect()) { //System.out.println( // "lydiatest saving answers : bean.geMultipleCorrect() " + // bean.getMultipleCorrect()); //System.out.println("lydiatest saving answers : label " + label); String corranswer = ContextUtil.lookupParam("itemForm:selectedRadioBtn"); if (corranswer.equals(label)) { returnvalue = true; } else { returnvalue = false; } } else { ArrayList corranswersList = ContextUtil.paramArrayValueLike( "mccheckboxes"); Iterator iter = corranswersList.iterator(); while (iter.hasNext()) { String currentcorrect = (String) iter.next(); if (currentcorrect.trim().equals(label)) { returnvalue = true; break; } else { returnvalue = false; } } } return returnvalue; } /** ** shift sequence number down when inserting or reordering **/ public void shiftSequences(SectionFacade sectfacade, Integer currSeq) { ItemService delegate = new ItemService(); Set itemset = sectfacade.getItemFacadeSet(); //System.out.println("lydiatest item itemset size is " + itemset.size()); Iterator iter = itemset.iterator(); while (iter.hasNext()) { ItemFacade itemfacade = (ItemFacade) iter.next(); Integer itemfacadeseq = itemfacade.getSequence(); //System.out.println("lydiatest shifting orig seq = " + itemfacadeseq); if (itemfacadeseq.compareTo(currSeq) > 0) { itemfacade.setSequence(new Integer(itemfacadeseq.intValue() + 1)); //System.out.println("lydiatest after the deleted item , shift to = " + // itemfacade.getSequence()); delegate.saveItem(itemfacade); } } } public void shiftItemsInOrigSection(SectionFacade sectfacade, Integer currSeq){ ItemService delegate = new ItemService(); Set itemset = sectfacade.getItemFacadeSet(); //System.out.println("lydiatest in shiftItemsInOrigSection item itemset size is " + itemset.size()); // should be size-1 now. Iterator iter = itemset.iterator(); while (iter.hasNext()) { ItemFacade itemfacade = (ItemFacade) iter.next(); Integer itemfacadeseq = itemfacade.getSequence(); //System.out.println("lydiatest shifting orig seq = " + itemfacadeseq); if (itemfacadeseq.compareTo(currSeq) > 0 ){ itemfacade.setSequence(new Integer(itemfacadeseq.intValue()-1) ); //System.out.println("lydiatest after the deleted item , shift to = " + itemfacade.getSequence()); delegate.saveItem(itemfacade); } } } private String stripPtags(String origtext) { // interim solution for the wywisyg bug. This will strip off the first <p> and last </p> if both exists. String newanswer = origtext; if ((origtext!= null)&& (origtext.startsWith("<p")) && (origtext.endsWith("</p>")) ){ newanswer = origtext.substring(origtext.indexOf(">") + 1, origtext.lastIndexOf("</p>")); return newanswer.trim(); } else { return newanswer; } } }
samigo/src/org/sakaiproject/tool/assessment/ui/listener/author/ItemAddListener.java
/********************************************************************************** * $HeadURL$ * $Id$ *********************************************************************************** * * Copyright (c) 2004-2005 The Regents of the University of Michigan, Trustees of Indiana University, * Board of Trustees of the Leland Stanford, Jr., University, and The MIT Corporation * * Licensed under the Educational Community License Version 1.0 (the "License"); * By obtaining, using and/or copying this Original Work, you agree that you have read, * understand, and will comply with the terms and conditions of the Educational Community License. * You may obtain a copy of the License at: * * http://cvs.sakaiproject.org/licenses/license_1_0.html * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE * AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * **********************************************************************************/ package org.sakaiproject.tool.assessment.ui.listener.author; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import javax.faces.event.AbortProcessingException; import javax.faces.event.ActionEvent; import javax.faces.event.ActionListener; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.tool.assessment.data.dao.assessment.Answer; import org.sakaiproject.tool.assessment.data.dao.assessment.AnswerFeedback; import org.sakaiproject.tool.assessment.data.dao.assessment.ItemMetaData; import org.sakaiproject.tool.assessment.data.dao.assessment.ItemText; import org.sakaiproject.tool.assessment.data.ifc.assessment.AnswerFeedbackIfc; import org.sakaiproject.tool.assessment.facade.AgentFacade; import org.sakaiproject.tool.assessment.facade.AssessmentFacade; import org.sakaiproject.tool.assessment.facade.ItemFacade; import org.sakaiproject.tool.assessment.facade.SectionFacade; import org.sakaiproject.tool.assessment.facade.TypeFacade; import org.sakaiproject.tool.assessment.services.ItemService; import org.sakaiproject.tool.assessment.services.QuestionPoolService; import org.sakaiproject.tool.assessment.services.assessment.AssessmentService; import org.sakaiproject.tool.assessment.ui.bean.author.AnswerBean; import org.sakaiproject.tool.assessment.ui.bean.author.AssessmentBean; import org.sakaiproject.tool.assessment.ui.bean.author.ItemAuthorBean; import org.sakaiproject.tool.assessment.ui.bean.author.ItemBean; import org.sakaiproject.tool.assessment.ui.bean.author.MatchItemBean; import org.sakaiproject.tool.assessment.ui.bean.questionpool.QuestionPoolBean; import org.sakaiproject.tool.assessment.ui.listener.util.ContextUtil; /** * <p>Title: Samigo</p> * <p>Description: Sakai Assessment Manager</p> * <p>Copyright: Copyright (c) 2004 Sakai Project</p> * <p>Organization: Sakai Project</p> * @version $Id$ */ public class ItemAddListener implements ActionListener { private static Log log = LogFactory.getLog(ItemAddListener.class); private static ContextUtil cu; private String scalename; // used for multiple choice Survey /** * Standard process action method. * @param ae ActionEvent * @throws AbortProcessingException */ public void processAction(ActionEvent ae) throws AbortProcessingException { boolean correct=false; log.info("ItemAdd LISTENER."); System.out.println("lydiatest BEGIN Add item"); ItemAuthorBean itemauthorbean = (ItemAuthorBean) cu.lookupBean("itemauthor"); ItemBean item =itemauthorbean.getCurrentItem(); String answer=item.getCorrAnswer(); String iType=item.getItemType(); //if((!iType.equals("1"))&&(!iType.equals("2"))){ // only check this for Single Correct MC questions if(!iType.equals(TypeFacade.MULTIPLE_CHOICE.toString())){ if (!saveItem(itemauthorbean)) { throw new RuntimeException("failed to saveItem."); } } else if(iType.equals(TypeFacade.MULTIPLE_CHOICE.toString())) { if(!answer.equals("")&& answer!=null) { if (!saveItem(itemauthorbean)) { throw new RuntimeException("failed to saveItem."); } } } else //Multiple choice,multiple correct { Iterator iter = item.getMultipleChoiceAnswers().iterator(); if(item.getMultipleChoiceAnswers()!=null){ while (iter.hasNext()) { AnswerBean answerbean = (AnswerBean) iter.next(); if (isCorrectChoice(item, answerbean.getLabel().trim())) { correct=true; break; } } if(correct) { if (!saveItem(itemauthorbean)) { throw new RuntimeException("failed to saveItem."); } } } } } public boolean saveItem(ItemAuthorBean itemauthor) { //System.out.println("lydiatest in saveItem() "); boolean update = false; try { ItemBean bean = itemauthor.getCurrentItem(); ItemService delegate = new ItemService(); ItemFacade item; // update not working yet, delete, then add if ( (bean.getItemId() != null) && (!bean.getItemId().equals("0"))) { update = true; //System.out.println("lydiatest in saveItem() this is for MODIFY "); // if modify ,itemid shouldn't be null , or 0. Long oldId = new Long(bean.getItemId()); delegate.deleteItemContent(oldId, AgentFacade.getAgentString()); item = delegate.getItem(oldId,AgentFacade.getAgentString()); } else{ item = new ItemFacade(); } item.setScore(new Float(bean.getItemScore())); //System.out.println("lydiatest " + item.getScore()); item.setStatus(ItemFacade.ACTIVE_STATUS); //System.out.println("lydiatest in saveItem() should be 1 === " + // bean.getItemType()); item.setTypeId(new Long(bean.getItemType())); //System.out.println("lydiatest set typeid () " + item.getTypeId()); item.setCreatedBy(AgentFacade.getAgentString()); item.setCreatedDate(new Date()); item.setLastModifiedBy(AgentFacade.getAgentString()); item.setLastModifiedDate(new Date()); if (bean.getInstruction() != null) { // for matching item.setInstruction(bean.getInstruction()); } // update hasRationale if (bean.getRationale() != null) { item.setHasRationale(new Boolean(bean.getRationale())); } else { item.setHasRationale(Boolean.FALSE); } // update maxNumAttempts for audio if (bean.getNumAttempts() != null) { //System.out.println("lydiatest numattempt " + bean.getNumAttempts()); item.setTriesAllowed(new Integer(bean.getNumAttempts())); } // save timeallowed for audio recording if (bean.getTimeAllowed() != null) { //System.out.println("lydiatest audio timeallowed " + bean.getTimeAllowed()); item.setDuration(new Integer(bean.getTimeAllowed())); } if (update) { // reset item contents for modify //System.out.println("lydiatest getitemid not 0 ,it is " + item.getItemId()); item.setItemTextSet(new HashSet()); item.setItemMetaDataSet(new HashSet()); //System.out.println( // "lydiatest getitemid not 0 ,setting itemtext metadata to be new hashset "); } // prepare itemText, including answers if (!item.getTypeId().equals(TypeFacade.MATCHING)) { //System.out.println("lydiatest item is not Matching " + item.getTypeId()); item.setItemTextSet(prepareText(item, bean, itemauthor)); } else { //System.out.println("lydiatest item IS Matching " + item.getTypeId()); item.setItemTextSet(prepareTextForMatching(item, bean, itemauthor)); } // prepare MetaData item.setItemMetaDataSet(prepareMetaData(item, bean)); /////////////////////////////////////////////// // FEEDBACK /////////////////////////////////////////////// /* System.out.println("**** FEEDBACK ****"); System.out.println("**** bean.getCorrFeedback()=[" + bean.getCorrFeedback() + "] ***"); System.out.println("**** bean.getIncorrFeedback()=[" + bean.getIncorrFeedback() + "] ***"); System.out.println("**** bean.getGeneralFeedback()=[" + bean.getGeneralFeedback() + "] ***"); */ /////////////////////////////////////////////// // prepare feedback, only store if feedbacks are not empty if ( (bean.getCorrFeedback() != null) && (!bean.getCorrFeedback().equals(""))) { item.setCorrectItemFeedback(stripPtags(bean.getCorrFeedback())); } if ( (bean.getIncorrFeedback() != null) && (!bean.getIncorrFeedback().equals(""))) { item.setInCorrectItemFeedback(stripPtags(bean.getIncorrFeedback())); } if ( (bean.getGeneralFeedback() != null) && (!bean.getGeneralFeedback().equals(""))) { item.setGeneralItemFeedback(stripPtags(bean.getGeneralFeedback())); } /////////////////////////////////////////////// // FEEDBACK /////////////////////////////////////////////// /* System.out.println("**** FEEDBACK ****"); System.out.println("**** item.getCorrectItemFeedback()=[" + item.getCorrectItemFeedback() + "] ***"); System.out.println("**** item.getInCorrectItemFeedback()=[" + item.getInCorrectItemFeedback() + "] ***"); System.out.println("**** item.getGeneralItemFeedback()=[" + item.getGeneralItemFeedback() + "] ***"); */ /////////////////////////////////////////////// //ItemFacade savedItem = null; if ( (itemauthor.getTarget() != null) && (itemauthor.getTarget().equals(itemauthor.FROM_QUESTIONPOOL))) { // Came from Pool manager delegate.saveItem(item); //System.out.println( // "lydiatest target=questionpool adding item to qpool "); QuestionPoolService qpdelegate = new QuestionPoolService(); //System.out.println("lydiatest adding item to qpool : " + // itemauthor.getQpoolId()); if (!qpdelegate.hasItem(item.getItemIdString(), new Long(itemauthor.getQpoolId()))) { qpdelegate.addItemToPool(item.getItemIdString(), new Long(itemauthor.getQpoolId())); } QuestionPoolBean qpoolbean = (QuestionPoolBean) cu.lookupBean( "questionpool"); qpoolbean.buildTree(); /* // Reset question pool bean QuestionPoolFacade thepool= qpdelegate.getPool(new Long(itemauthor.getQpoolId()), AgentFacade.getAgentString()); qpoolbean.getCurrentPool().setNumberOfQuestions(thepool.getQuestionSize().toString()); */ qpoolbean.startEditPoolAgain(itemauthor.getQpoolId()); // return to edit pool itemauthor.setOutcome("editPool"); } else { // Came from Assessment Authoring AssessmentService assessdelegate = new AssessmentService(); //System.out.println("lydiatest target=assessment "); // add the item to the specified part, otherwise add to default if (bean.getSelectedSection() != null) { //System.out.println("lydiatest section " + bean.getSelectedSection()); SectionFacade section = assessdelegate.getSection(bean. getSelectedSection()); item.setSection(section); if (update) { // if Modify, need to reorder if assgned to different section ' if ( (bean.getOrigSection() != null) && (!bean.getOrigSection().equals(bean.getSelectedSection()))) { //System.out.println("lydiatest modified, assigned to new section " + bean.getOrigSection() + " to new section = " + bean.getSelectedSection() ); // if reassigned to different section Integer oldSeq = item.getSequence(); item.setSequence(new Integer(section.getItemSet().size() + 1)); //System.out.println("lydiatest new sequence is " + item.getSequence()); // reorder the sequences of items in the OrigSection SectionFacade origsect= assessdelegate.getSection(bean.getOrigSection()); //System.out.println("lydiatest reorder old items sequence in origSection "); shiftItemsInOrigSection(origsect, oldSeq); //System.out.println("lydiatest DONE reorder old items sequence in origSection "); } else { // no action needed } } if (!update) { //System.out.println("lydiatest getInsertPositon() = :" + // itemauthor.getInsertPosition() + "."); if ( (itemauthor.getInsertPosition() == null) || ("".equals(itemauthor.getInsertPosition()))) { //System.out.println("lydiatest add at the end " + // itemauthor.getInsertPosition() + "."); // if adding to the end item.setSequence(new Integer(section.getItemSet().size() + 1)); } else { //System.out.println("lydiatest insert,needs shifting " + // itemauthor.getInsertPosition() + "."); // if inserting or a question String insertPos = itemauthor.getInsertPosition(); shiftSequences(section, new Integer(insertPos)); int insertPosInt = (new Integer(insertPos)).intValue() + 1; item.setSequence(new Integer(insertPosInt)); // reset InsertPosition itemauthor.setInsertPosition(""); } } delegate.saveItem(item); /* section.addItem(item); assessdelegate.saveOrUpdateSection(section); */ } QuestionPoolService qpdelegate = new QuestionPoolService(); // removed the old pool-item mappings if ( (bean.getOrigPool() != null) && (!bean.getOrigPool().equals(""))) { qpdelegate.removeQuestionFromPool(item.getItemIdString(), new Long(bean.getOrigPool())); } // if assign to pool, add the item to the pool if ( (!bean.getSelectedPool().equals("")) && (bean.getSelectedPool() != null)) { //System.out.println("lydiatest poolid " + bean.getSelectedPool()); qpdelegate.addItemToPool(item.getItemIdString(), new Long(bean.getSelectedPool())); } // #1a - goto editAssessment.jsp, so reset assessmentBean AssessmentBean assessmentBean = (AssessmentBean) cu.lookupBean( "assessmentBean"); AssessmentFacade assessment = assessdelegate.getAssessment( assessmentBean.getAssessmentId()); assessmentBean.setAssessment(assessment); itemauthor.setOutcome("editAssessment"); } //System.out.println( // "lydiatest SUCCESSFULLY saved Item !!!!!!!!!!!!!!!!!!!!!!!!!"); return true; } catch (Exception e) { e.printStackTrace(); return false; } } private HashSet prepareTextForMatching(ItemFacade item, ItemBean bean, ItemAuthorBean itemauthor) { // looping through matchItemBean //System.out.println("lydiatest prepareTextForMatching: BEGIN>>>>> "); ArrayList matchItemBeanList = bean.getMatchItemBeanList(); HashSet textSet = new HashSet(); Iterator choiceiter = matchItemBeanList.iterator(); while (choiceiter.hasNext()) { MatchItemBean choicebean = (MatchItemBean) choiceiter.next(); ItemText choicetext = new ItemText(); choicetext.setItem(item.getData()); // all set to the same ItemFacade choicetext.setSequence(choicebean.getSequence()); //System.out.println( // "lydiatest prepareTextForMatching: choicetext.setSequence " + // choicetext.getSequence()); //System.out.println("lydiatest getItemtext() " + choicebean.getChoice()); choicetext.setText(stripPtags(choicebean.getChoice())); //System.out.println( // "lydiatest prepareTextForMatching: this is first itemtext choicetext.setText" + // choicetext.getText()); // need to loop through matches for in matchItemBean list // and add all possible matches to this choice //System.out.println( // "lydiatest prepareTextForMatching: now loop through all answers for " + // choicetext.getText()); Iterator answeriter = matchItemBeanList.iterator(); HashSet answerSet = new HashSet(); Answer answer = null; while (answeriter.hasNext()) { MatchItemBean answerbean = (MatchItemBean) answeriter.next(); if (answerbean.getSequence().equals(choicebean.getSequence())) { // System.out.println( // "lydiatest prepareTextForMatching: CORRECT answer : " + // answerbean.getMatch()); answer = new Answer(choicetext, stripPtags(answerbean.getMatch()), answerbean.getSequence(), AnswerBean.choiceLabels[answerbean.getSequence().intValue()-1], Boolean.TRUE, null, new Float(bean.getItemScore())); // only add feedback for correct pairs HashSet answerFeedbackSet = new HashSet(); answerFeedbackSet.add(new AnswerFeedback(answer, AnswerFeedbackIfc. CORRECT_FEEDBACK, stripPtags(answerbean.getCorrMatchFeedback()))); answerFeedbackSet.add(new AnswerFeedback(answer, AnswerFeedbackIfc. INCORRECT_FEEDBACK, stripPtags(answerbean.getIncorrMatchFeedback()))); /* System.out.println("lydiatest prepareTextForMatching: feedback : " + answerbean.getCorrMatchFeedback()); System.out.println("lydiatest prepareTextForMatching: feedback : " + answerbean.getIncorrMatchFeedback()); */ answer.setAnswerFeedbackSet(answerFeedbackSet); } else { //System.out.println( // "lydiatest prepareTextForMatching: WRONG answer : " + // answerbean.getMatch()); answer = new Answer(choicetext, stripPtags(answerbean.getMatch()), answerbean.getSequence(), AnswerBean.choiceLabels[answerbean.getSequence().intValue()-1], Boolean.FALSE, null, new Float(bean.getItemScore())); } // record answers for all combination of pairs HashSet answerFeedbackSet = new HashSet(); answerFeedbackSet.add(new AnswerFeedback(answer, AnswerFeedbackIfc. CORRECT_FEEDBACK, stripPtags(answerbean.getCorrMatchFeedback()))); answerFeedbackSet.add(new AnswerFeedback(answer, AnswerFeedbackIfc. INCORRECT_FEEDBACK, stripPtags(answerbean.getIncorrMatchFeedback()))); System.out.println("lydiatest prepareTextForMatching: feedback : " + answerbean.getCorrMatchFeedback()); System.out.println("lydiatest prepareTextForMatching: feedback : " + answerbean.getIncorrMatchFeedback()); answer.setAnswerFeedbackSet(answerFeedbackSet); // answerSet.add(answer); } choicetext.setAnswerSet(answerSet); textSet.add(choicetext); } //System.out.println("lydiatest prepareTextForMatching: END >>>>> "); return textSet; } private HashSet prepareText(ItemFacade item, ItemBean bean, ItemAuthorBean itemauthor) { HashSet textSet = new HashSet(); HashSet answerSet1 = new HashSet(); ///////////////////////////////////////////////////////////// // 1. save Question Text for items with single Question Text // (except matching) ///////////////////////////////////////////////////////////// ItemText text1 = new ItemText(); text1.setItem(item.getData()); text1.setSequence(new Long(1)); //System.out.println("lydiatest getItemtext() " + bean.getItemText()); text1.setText(bean.getItemText()); ///////////////////////////////////////////////////////////// // // 2. save Answers // ///////////////////////////////////////////////////////////// //System.out.println("lydiatest getItemType() " + itemauthor.getItemType()); if (item.getTypeId().equals(TypeFacade.TRUE_FALSE)) { //System.out.println("lydiatest setting answers for true and false"); // find correct answer Answer newanswer = null; for (int i = 0; i < bean.getAnswers().length; i++) { String theanswer = bean.getAnswers()[i]; String thelabel = bean.getAnswerLabels()[i]; // store thelabel as the answer text if (theanswer.equals(bean.getCorrAnswer())) { // label is null because we don't use labels in true/false questions // labels are like a, b, c, or i, ii, iii, in multiple choice type newanswer = new Answer(text1, theanswer, new Long(i + 1), null, Boolean.TRUE, null, new Float(bean.getItemScore())); } else { newanswer = new Answer(text1, theanswer, new Long(i + 1), null, Boolean.FALSE, null, new Float(bean.getItemScore())); } answerSet1.add(newanswer); } text1.setAnswerSet(answerSet1); textSet.add(text1); } else if (item.getTypeId().equals(TypeFacade.ESSAY_QUESTION)) { //System.out.println("lydiatest setting answers for short Answer item"); // Storing the model answer essay as an Answer, and feedback in the Answerfeedback String theanswer = bean.getCorrAnswer(); if (theanswer == null) { theanswer = ""; // can be empty } // label is null because we don't use labels in essay questions //theanswer is the model answer used as a sample for student Answer modelanswer = new Answer(text1, theanswer, new Long(1), null, Boolean.TRUE, null, new Float(bean.getItemScore())); HashSet answerFeedbackSet1 = new HashSet(); answerFeedbackSet1.add(new AnswerFeedback(modelanswer, "modelanswer", stripPtags(bean.getCorrFeedback()))); modelanswer.setAnswerFeedbackSet(answerFeedbackSet1); answerSet1.add(modelanswer); text1.setAnswerSet(answerSet1); textSet.add(text1); } else if (item.getTypeId().equals(TypeFacade.MULTIPLE_CHOICE_SURVEY)) { //System.out.println("lydiatest setting answers for survey"); /* TODO: need to use property file for the survey choices, to be able to internationalize. Properties p = null; // get properties file try{ p = ContextUtil.getProperties(filename); if (p == null) { throw new Error("Could not find properties file: " + filename); } } catch (Exception e){ e.printStackTrace(); } String noprop = p.getProperty("no"); String yesprop = p.getProperty("yes"); String agreeprop = p.getProperty("agree"); String disagreeprop = p.getProperty("disagree"); */ String scalename = bean.getScaleName(); String[] choices = new String[2]; // label is null because we don't use labels in survey if (scalename.equals("YESNO")) { choices = new String[2]; choices[0] = "No"; choices[1] = "Yes"; } if (scalename.equals("AGREE")) { choices = new String[2]; choices[0] = "Disagree"; choices[1] = "Agree"; } if (scalename.equals("UNDECIDED")) { choices = new String[3]; choices[0] = "Disagree"; choices[1] = "Undecided"; choices[2] = "Agree"; } if (scalename.equals("AVERAGE")) { choices = new String[3]; choices[0] = "Below Average"; choices[1] = "Average"; choices[2] = "Above Average"; } if (scalename.equals("STRONGLY_AGREE")) { choices = new String[5]; choices[0] = "Strongly Disagree"; choices[1] = "Disagree"; choices[2] = "Undecided"; choices[3] = "Agree"; choices[4] = "Strongly Agree"; } if (scalename.equals("EXCELLENT")) { choices = new String[5]; choices[0] = "Unacceptable"; choices[1] = "Below Average"; choices[2] = "Average"; choices[3] = "Above Average"; choices[4] = "Excellent"; } if (scalename.equals("SCALEFIVE")) { choices = new String[5]; choices[0] = "1"; choices[1] = "2"; choices[2] = "3"; choices[3] = "4"; choices[4] = "5"; } if (scalename.equals("SCALETEN")) { choices = new String[10]; choices[0] = "1"; choices[1] = "2"; choices[2] = "3"; choices[3] = "4"; choices[4] = "5"; choices[5] = "6"; choices[6] = "7"; choices[7] = "8"; choices[8] = "9"; choices[9] = "10"; } for (int i = 0; i < choices.length; i++) { Answer answer1 = new Answer(text1, choices[i], new Long(i + 1), null, null, null, new Float(bean.getItemScore())); answerSet1.add(answer1); } text1.setAnswerSet(answerSet1); textSet.add(text1); } // not doing parsing in authoring else if (item.getTypeId().equals(TypeFacade.FILL_IN_BLANK)) { // this is for fill in blank String entiretext = bean.getItemText(); String fibtext = entiretext.replaceAll("[\\{][^\\}]*[\\}]", "{}"); text1.setText(fibtext); //System.out.println(" new text without answer is = " + fibtext); Object[] fibanswers = getFIBanswers(entiretext).toArray(); for (int i = 0; i < fibanswers.length; i++) { String oneanswer = (String) fibanswers[i]; Answer answer1 = new Answer(text1, oneanswer, new Long(i + 1), null, Boolean.TRUE, null, new Float(bean.getItemScore())); answerSet1.add(answer1); } text1.setAnswerSet(answerSet1); textSet.add(text1); } else if ( (item.getTypeId().equals(TypeFacade.MULTIPLE_CHOICE)) || (item.getTypeId().equals(TypeFacade.MULTIPLE_CORRECT))) { // this is for both single/multiple correct multiple choice types //System.out.println("lydiatest multiple choice, "); // for single choice //String theanswer=bean.getCorrAnswer(); Iterator iter = bean.getMultipleChoiceAnswers().iterator(); Answer answer = null; while (iter.hasNext()) { AnswerBean answerbean = (AnswerBean) iter.next(); //System.out.println("lydiatest multiple choice, answerbean.gettext " + // answerbean.getText()); if (isCorrectChoice(bean, answerbean.getLabel().trim())) { answer = new Answer(text1, stripPtags(answerbean.getText()), answerbean.getSequence(), answerbean.getLabel(), Boolean.TRUE, null, new Float(bean.getItemScore())); } else { answer = new Answer(text1, stripPtags(answerbean.getText()), answerbean.getSequence(), answerbean.getLabel(), Boolean.FALSE, null, new Float(bean.getItemScore())); } HashSet answerFeedbackSet1 = new HashSet(); answerFeedbackSet1.add(new AnswerFeedback(answer, AnswerFeedbackIfc. GENERAL_FEEDBACK, stripPtags(answerbean.getFeedback()))); answer.setAnswerFeedbackSet(answerFeedbackSet1); answerSet1.add(answer); } text1.setAnswerSet(answerSet1); textSet.add(text1); } // for file Upload and audio recording else { // no answers need to be added textSet.add(text1); } ///////////////////////////////////////////////////////////// // END ///////////////////////////////////////////////////////////// return textSet; } private HashSet prepareMetaData(ItemFacade item, ItemBean bean) { //System.out.println("lydiatest in prepareMetaData() "); HashSet set = new HashSet(); if (bean.getKeyword() != null) { //System.out.println("lydiatest keyword() " + bean.getKeyword()); set.add(new ItemMetaData(item.getData(), ItemMetaData.KEYWORD, bean.getKeyword())); } if (bean.getRubric() != null) { //System.out.println("lydiatest rubric() " + bean.getRubric()); set.add(new ItemMetaData(item.getData(), ItemMetaData.RUBRIC, bean.getRubric())); } if (bean.getObjective() != null) { System.out.println("lydiatest obj() " + bean.getObjective()); set.add(new ItemMetaData(item.getData(), ItemMetaData.OBJECTIVE, bean.getObjective())); } // Randomize property got left out, added in metadata if (bean.getRandomized() != null) { //System.out.println("lydiatest randomize() " + bean.getRandomized()); set.add(new ItemMetaData(item.getData(), ItemMetaData.RANDOMIZE, bean.getRandomized())); } // save ScaleName for survey if it's a survey item if (bean.getScaleName() != null) { //System.out.println("lydiatest scalename() " + bean.getScaleName()); set.add(new ItemMetaData(item.getData(), ItemMetaData.SCALENAME, bean.getScaleName())); } // save part id if (bean.getSelectedSection() != null) { //System.out.println("lydiatest section " + bean.getSelectedSection()); set.add(new ItemMetaData(item.getData(), ItemMetaData.PARTID, bean.getSelectedSection())); } // save pool id if (bean.getSelectedPool() != null) { //System.out.println("lydiatest poolid " + bean.getSelectedPool()); set.add(new ItemMetaData(item.getData(), ItemMetaData.POOLID, bean.getSelectedPool())); } // save timeallowed for audio recording /* // save them in ItemFacade if (bean.getTimeAllowed()!=null){ System.out.println("lydiatest poolid "+ bean.getTimeAllowed() ); set.add(new ItemMetaData(item.getData(), ItemMetaData.TIMEALLOWED, bean.getTimeAllowed())); } */ // save timeallowed for audio recording /* // save them in ItemFacade if (bean.getNumAttempts()!=null){ System.out.println("lydiatest poolid "+ bean.getNumAttempts() ); set.add(new ItemMetaData(item.getData(), ItemMetaData.NUMATTEMPTS, bean.getNumAttempts())); } */ return set; } private static ArrayList getFIBanswers(String entiretext) { String[] tokens = entiretext.split("[\\}][^\\{]*[\\{]"); ArrayList list = new ArrayList(); //System.out.println("lydiatest token.length " + tokens.length); if (tokens.length==1) { String[] afteropen= tokens[0].split("\\{"); if (afteropen.length>1) { // must have text in between {} String[] lastpart = afteropen[1].split("\\}"); list.add(lastpart[0]); } } else { for (int i = 0; i < tokens.length; i++) { if (i == 0) { String[] firstpart = tokens[i].split("\\{"); if (firstpart.length>1) { list.add(firstpart[1]); } } else if (i == (tokens.length - 1)) { String[] lastpart = tokens[i].split("\\}"); list.add(lastpart[0]); } else { list.add(tokens[i]); } } } // token.length>1 return list; } /** ** returns if the multile choice label is the correct choice, ** bean.getCorrAnswers() returns a string[] of labels ** bean.getCorrAnswer() returns a string of label **/ public boolean isCorrectChoice(ItemBean bean, String label) { boolean returnvalue = false; if (!bean.getMultipleCorrect()) { //System.out.println( // "lydiatest saving answers : bean.geMultipleCorrect() " + // bean.getMultipleCorrect()); //System.out.println("lydiatest saving answers : label " + label); String corranswer = ContextUtil.lookupParam("itemForm:selectedRadioBtn"); if (corranswer.equals(label)) { returnvalue = true; } else { returnvalue = false; } } else { ArrayList corranswersList = ContextUtil.paramArrayValueLike( "mccheckboxes"); Iterator iter = corranswersList.iterator(); while (iter.hasNext()) { String currentcorrect = (String) iter.next(); if (currentcorrect.trim().equals(label)) { returnvalue = true; break; } else { returnvalue = false; } } } return returnvalue; } /** ** shift sequence number down when inserting or reordering **/ public void shiftSequences(SectionFacade sectfacade, Integer currSeq) { ItemService delegate = new ItemService(); Set itemset = sectfacade.getItemFacadeSet(); //System.out.println("lydiatest item itemset size is " + itemset.size()); Iterator iter = itemset.iterator(); while (iter.hasNext()) { ItemFacade itemfacade = (ItemFacade) iter.next(); Integer itemfacadeseq = itemfacade.getSequence(); //System.out.println("lydiatest shifting orig seq = " + itemfacadeseq); if (itemfacadeseq.compareTo(currSeq) > 0) { itemfacade.setSequence(new Integer(itemfacadeseq.intValue() + 1)); //System.out.println("lydiatest after the deleted item , shift to = " + // itemfacade.getSequence()); delegate.saveItem(itemfacade); } } } public void shiftItemsInOrigSection(SectionFacade sectfacade, Integer currSeq){ ItemService delegate = new ItemService(); Set itemset = sectfacade.getItemFacadeSet(); //System.out.println("lydiatest in shiftItemsInOrigSection item itemset size is " + itemset.size()); // should be size-1 now. Iterator iter = itemset.iterator(); while (iter.hasNext()) { ItemFacade itemfacade = (ItemFacade) iter.next(); Integer itemfacadeseq = itemfacade.getSequence(); //System.out.println("lydiatest shifting orig seq = " + itemfacadeseq); if (itemfacadeseq.compareTo(currSeq) > 0 ){ itemfacade.setSequence(new Integer(itemfacadeseq.intValue()-1) ); //System.out.println("lydiatest after the deleted item , shift to = " + itemfacade.getSequence()); delegate.saveItem(itemfacade); } } } private String stripPtags(String origtext) { // interim solution for the wywisyg bug. This will strip off the first <p> and last </p> if both exists. String newanswer = origtext; if ((origtext!= null)&& (origtext.startsWith("<p")) && (origtext.endsWith("</p>")) ){ newanswer = origtext.substring(origtext.indexOf(">") + 1, origtext.lastIndexOf("</p>")); return newanswer.trim(); } else { return newanswer; } } }
SAM-336 comment out unused code git-svn-id: 840349cbf2a7f44860f6cfd02374235b623e9bf9@460 66ffb92e-73f9-0310-93c1-f5514f145a0a
samigo/src/org/sakaiproject/tool/assessment/ui/listener/author/ItemAddListener.java
SAM-336 comment out unused code
Java
apache-2.0
1b1b65a1c778d6cad0566f6bf797f40ea0a81315
0
tomasulo/docker-compose-rule,palantir/docker-compose-rule,palantir/docker-compose-rule
package com.palantir.docker.compose.execution; import com.palantir.docker.compose.connection.ContainerNames; import org.junit.Before; import org.junit.Test; import java.io.IOException; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class RetryingDockerComposeShould { private final DockerCompose dockerCompose = mock(DockerCompose.class); private final Retryer retryer = mock(Retryer.class); private final RetryingDockerCompose retryingDockerCompose = new RetryingDockerCompose(retryer, dockerCompose); private final ContainerNames someContainerNames = new ContainerNames("hey"); @Before public void before() throws IOException, InterruptedException { retryerJustCallsOperation(); } private void retryerJustCallsOperation() throws IOException, InterruptedException { when(retryer.runWithRetries(any(Retryer.RetryableDockerComposeOperation.class))).thenAnswer(invocation -> { Retryer.RetryableDockerComposeOperation operation = (Retryer.RetryableDockerComposeOperation) invocation.getArguments()[0]; return operation.call(); }); } @Test public void calls_up_on_the_underlying_docker_compose() throws IOException, InterruptedException { retryingDockerCompose.up(); verify(dockerCompose).up(); verifyNoMoreInteractions(dockerCompose); } @Test public void call_ps_on_the_underlying_docker_compose_and_returns_the_same_value() throws IOException, InterruptedException { when(dockerCompose.ps()).thenReturn(someContainerNames); assertThat(retryingDockerCompose.ps(), is(someContainerNames)); verify(dockerCompose).ps(); verifyNoMoreInteractions(dockerCompose); } }
src/test/java/com/palantir/docker/compose/execution/RetryingDockerComposeShould.java
package com.palantir.docker.compose.execution; import com.palantir.docker.compose.connection.ContainerNames; import com.palantir.docker.compose.utils.MockitoMultiAnswer; import org.junit.Test; import org.mockito.invocation.InvocationOnMock; import java.io.IOException; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class RetryingDockerComposeShould { private static final Object SUCCESS = null; private final DockerCompose dockerCompose = mock(DockerCompose.class); private final RetryingDockerCompose retryingDockerCompose = new RetryingDockerCompose(2, dockerCompose); private final ContainerNames someContainerNames = new ContainerNames("hey"); @Test public void not_retry_if_the_up_command_was_successful() throws IOException, InterruptedException { retryingDockerCompose.up(); verify(dockerCompose).up(); verifyNoMoreInteractions(dockerCompose); } @Test public void retry_up_if_the_command_failed_once() throws IOException, InterruptedException { doAnswer(new MockitoMultiAnswer() { @Override protected Object firstCall(InvocationOnMock invocation) throws Exception { throw new DockerComposeExecutionException(); } @Override protected Object secondCall(InvocationOnMock invocation) throws Exception { return SUCCESS; } }).when(dockerCompose).up(); retryingDockerCompose.up(); verify(dockerCompose, times(2)).up(); verifyNoMoreInteractions(dockerCompose); } @Test public void throw_the_last_exception_when_up_fails_more_times_than_the_specified_attempts() throws IOException, InterruptedException { DockerComposeExecutionException finalException = new DockerComposeExecutionException(); doAnswer(new MockitoMultiAnswer() { @Override protected Object firstCall(InvocationOnMock invocation) throws Exception { throw new DockerComposeExecutionException(); } @Override protected Object secondCall(InvocationOnMock invocation) throws Exception { throw finalException; } }).when(dockerCompose).up(); try { retryingDockerCompose.up(); fail("Should have caught exception"); } catch (DockerComposeExecutionException actualException) { assertThat(actualException, is(finalException)); } verify(dockerCompose, times(2)).up(); verifyNoMoreInteractions(dockerCompose); } @Test public void not_retry_if_the_ps_command_was_successful_and_return_the_correct_container_names() throws IOException, InterruptedException { when(dockerCompose.ps()).thenReturn(someContainerNames); assertThat(retryingDockerCompose.ps(), is(someContainerNames)); verify(dockerCompose).ps(); verifyNoMoreInteractions(dockerCompose); } @Test public void retry_ps_if_the_command_failed_once_and_return_the_last_container_names() throws IOException, InterruptedException { when(dockerCompose.ps()).thenAnswer(new MockitoMultiAnswer() { @Override protected Object firstCall(InvocationOnMock invocation) throws Exception { throw new DockerComposeExecutionException(); } @Override protected Object secondCall(InvocationOnMock invocation) throws Exception { return someContainerNames; } }); assertThat(retryingDockerCompose.ps(), is(someContainerNames)); verify(dockerCompose, times(2)).ps(); verifyNoMoreInteractions(dockerCompose); } @Test public void throw_the_last_exception_when_ps_fails_more_times_than_the_specified_attempts() throws IOException, InterruptedException { DockerComposeExecutionException finalException = new DockerComposeExecutionException(); when(dockerCompose.ps()).thenAnswer(new MockitoMultiAnswer() { @Override protected Object firstCall(InvocationOnMock invocation) throws Exception { throw new DockerComposeExecutionException(); } @Override protected Object secondCall(InvocationOnMock invocation) throws Exception { throw finalException; } }); try { retryingDockerCompose.ps(); fail("Should have caught exception"); } catch (DockerComposeExecutionException actualException) { assertThat(actualException, is(finalException)); } verify(dockerCompose, times(2)).ps(); verifyNoMoreInteractions(dockerCompose); } }
Cut down tests on RetryingDockerComposeShould
src/test/java/com/palantir/docker/compose/execution/RetryingDockerComposeShould.java
Cut down tests on RetryingDockerComposeShould
Java
apache-2.0
a43700d40a854e98121b8bcb9c4cb5ddde68ff6b
0
chinmaykolhatkar/incubator-apex-core,simplifi-it/otterx,tushargosavi/apex-core,deepak-narkhede/apex-core,PramodSSImmaneni/apex-core,tweise/apex-core,sandeshh/incubator-apex-core,sandeshh/incubator-apex-core,sandeshh/apex-core,vrozov/apex-core,sandeshh/apex-core,klynchDS/incubator-apex-core,mt0803/incubator-apex-core,vrozov/incubator-apex-core,tweise/incubator-apex-core,apache/incubator-apex-core,PramodSSImmaneni/incubator-apex-core,PramodSSImmaneni/incubator-apex-core,tushargosavi/apex-core,mt0803/incubator-apex-core,devtagare/incubator-apex-core,aniruddhas/incubator-apex-core,chinmaykolhatkar/incubator-apex-core,tushargosavi/incubator-apex-core,tweise/apex-core,deepak-narkhede/apex-core,andyperlitch/incubator-apex-core,brightchen/apex-core,MalharJenkins/incubator-apex-core,ishark/incubator-apex-core,tweise/incubator-apex-core,brightchen/apex-core,chinmaykolhatkar/incubator-apex-core,ishark/incubator-apex-core,devtagare/incubator-apex-core,tushargosavi/incubator-apex-core,PramodSSImmaneni/apex-core,ishark/incubator-apex-core,vrozov/apex-core,sandeshh/apex-core,vrozov/incubator-apex-core,andyperlitch/incubator-apex-core,mattqzhang/apex-core,PramodSSImmaneni/apex-core,vrozov/apex-core,simplifi-it/otterx,tushargosavi/apex-core,mattqzhang/apex-core,brightchen/apex-core,apache/incubator-apex-core,deepak-narkhede/apex-core,mattqzhang/apex-core,amberarrow/incubator-apex-core,vrozov/incubator-apex-core,tushargosavi/incubator-apex-core,MalharJenkins/incubator-apex-core,amberarrow/incubator-apex-core,simplifi-it/otterx,aniruddhas/incubator-apex-core,sandeshh/incubator-apex-core,devtagare/incubator-apex-core,apache/incubator-apex-core,brightchen/incubator-apex-core,PramodSSImmaneni/incubator-apex-core,tweise/incubator-apex-core,brightchen/incubator-apex-core,klynchDS/incubator-apex-core,tweise/apex-core
/* * Copyright (c) 2012 Malhar, Inc. * All Rights Reserved. */ package com.malhartech.bufferserver.internal; import com.malhartech.bufferserver.packet.MessageType; import com.malhartech.bufferserver.storage.Storage; import com.malhartech.bufferserver.util.SerializedData; import java.util.Iterator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author chetan */ class DataListIterator implements Iterator<SerializedData> { private static final Logger logger = LoggerFactory.getLogger(DataListIterator.class); Block da; SerializedData previous = null; SerializedData current = new SerializedData(); private final Storage storage; /** * * @param da */ DataListIterator(Block da, Storage storage) { da.acquire(storage, true); this.da = da; this.storage = storage; current.bytes = da.data; current.offset = da.readingOffset; } /** * * @return boolean */ @Override public synchronized boolean hasNext() { while (true) { da.getNextData(current); switch (current.size) { case -1: return false; case 0: if (da.next == null) { return false; } da.release(storage, false); da.next.acquire(storage, true); da = da.next; current.bytes = da.data; current.offset = da.readingOffset; break; default: if (current.bytes[current.dataOffset] != MessageType.NO_MESSAGE_VALUE) { return true; } current.offset += current.size; break; } } } @Override @SuppressWarnings("FinalizeDeclaration") protected void finalize() throws Throwable { da.release(storage, false); super.finalize(); } /** * * @return {@link com.malhartech.bufferserver.util.SerializedData} */ @Override public SerializedData next() { previous = current; current = new SerializedData(); current.offset = previous.offset + previous.size; current.bytes = previous.bytes; return previous; } /** * Removes from the underlying collection the last element returned by the iterator (optional operation). This method can be called only once per call to * next. The behavior of an iterator is unspecified if the underlying collection is modified while the iteration is in progress in any way other than by * calling this method. */ @Override public void remove() { if (previous == null) { throw new IllegalStateException("Nothing to remove"); } previous.bytes[previous.dataOffset] = MessageType.NO_MESSAGE_VALUE; } void rewind(int processingOffset) { current.offset = processingOffset; } }
bufferserver/src/main/java/com/malhartech/bufferserver/internal/DataListIterator.java
/* * Copyright (c) 2012 Malhar, Inc. * All Rights Reserved. */ package com.malhartech.bufferserver.internal; import com.malhartech.bufferserver.packet.MessageType; import com.malhartech.bufferserver.util.SerializedData; import java.util.Iterator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author chetan */ class DataListIterator implements Iterator<SerializedData> { private static final Logger logger = LoggerFactory.getLogger(DataListIterator.class); Block da; SerializedData previous = null; SerializedData current = new SerializedData(); /** * * @param da */ DataListIterator(Block da) { da.acquire(true); this.da = da; current.bytes = da.data; current.offset = da.readingOffset; } /** * * @return boolean */ @Override public synchronized boolean hasNext() { while (true) { da.getNextData(current); switch (current.size) { case -1: return false; case 0: if (da.next == null) { return false; } da.release(false); da.next.acquire(true); da = da.next; current.bytes = da.data; current.offset = da.readingOffset; break; default: if (current.bytes[current.dataOffset] != MessageType.NO_MESSAGE_VALUE) { return true; } current.offset += current.size; break; } } } @Override @SuppressWarnings("FinalizeDeclaration") protected void finalize() throws Throwable { da.release(false); super.finalize(); } /** * * @return {@link com.malhartech.bufferserver.util.SerializedData} */ @Override public SerializedData next() { previous = current; current = new SerializedData(); current.offset = previous.offset + previous.size; current.bytes = previous.bytes; return previous; } /** * Removes from the underlying collection the last element returned by the iterator (optional operation). This method can be called only once per call to * next. The behavior of an iterator is unspecified if the underlying collection is modified while the iteration is in progress in any way other than by * calling this method. */ @Override public void remove() { if (previous == null) { throw new IllegalStateException("Nothing to remove"); } previous.bytes[previous.dataOffset] = MessageType.NO_MESSAGE_VALUE; } void rewind(int processingOffset) { current.offset = processingOffset; } }
integrate with storage.
bufferserver/src/main/java/com/malhartech/bufferserver/internal/DataListIterator.java
integrate with storage.
Java
apache-2.0
7e1375b22b549c5a99ef90410c62a2642d5734cf
0
yuri0x7c1/ofbiz-explorer,yuri0x7c1/ofbiz-explorer,yuri0x7c1/ofbiz-explorer,yuri0x7c1/ofbiz-explorer,yuri0x7c1/ofbiz-explorer,yuri0x7c1/ofbiz-explorer,yuri0x7c1/ofbiz-explorer,yuri0x7c1/ofbiz-explorer
package com.github.yuri0x7c1.ofbiz.explorer.generator.util; import java.io.File; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang3.RandomUtils; import org.apache.commons.lang3.StringUtils; import org.jboss.forge.roaster.Roaster; import org.jboss.forge.roaster.model.source.FieldSource; import org.jboss.forge.roaster.model.source.JavaClassSource; import org.jboss.forge.roaster.model.source.MethodSource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.core.env.Environment; import org.springframework.stereotype.Component; import com.github.yuri0x7c1.ofbiz.explorer.entity.xml.Entity; import com.github.yuri0x7c1.ofbiz.explorer.entity.xml.Field; import com.github.yuri0x7c1.ofbiz.explorer.entity.xml.FieldType; import com.github.yuri0x7c1.ofbiz.explorer.entity.xml.Relation; import com.github.yuri0x7c1.ofbiz.explorer.util.OfbizInstance; import lombok.Getter; import lombok.Setter; import lombok.extern.slf4j.Slf4j; @Slf4j @Component public class EntityGenerator { public static final String GENERIC_VALUE_CLASS_NAME = "org.apache.ofbiz.entity.GenericValue"; @Autowired private OfbizInstance ofbizInstance; @Autowired private Environment env; public String getRelationFieldName(Relation relation) { StringBuilder name = new StringBuilder(); if (!StringUtils.isBlank(relation.getTitle())) { name.append(relation.getTitle()) .append(relation.getRelEntityName()); } else { name.append(relation.getRelEntityName()); } name.setCharAt(0, Character.toLowerCase(name.charAt(0))); if (relation.getKeyMap().size() == 1) { if (name.toString().equals(relation.getKeyMap().get(0).getFieldName())) { name.append("Relation"); } } return StringUtils.uncapitalize(name.toString()); } public String getPackageName(Entity entity) { String packageName = entity.getPackageName().replace("return", "_return").replace("enum", "_enum"); String basePackage = env.getProperty("generator.base_package"); if (basePackage != null && !basePackage.equals("org.apache.ofbiz")) { packageName = packageName.replace("org.apache.ofbiz", basePackage); } String entityPackage = env.getProperty("generator.entity.package"); if (entityPackage != null) { packageName += "." + entityPackage; } return packageName; } /** * Create entity class * @param entity * @return */ private JavaClassSource createEntityClass(Entity entity) { // create entity class final JavaClassSource entityClass = Roaster.create(JavaClassSource.class); entityClass.setPackage(getPackageName(entity)) .setName(entity.getEntityName()); // comment entityClass.getJavaDoc().setFullText(GeneratorUtil.createCaptionFromCamelCase(entity.getEntityName())); // add serialization stuff entityClass.addInterface(Serializable.class); entityClass.addField() .setName("serialVersionUID") .setType(long.class) .setLiteralInitializer(String.valueOf(RandomUtils.nextLong(0, Long.MAX_VALUE-1)) + "L") .setPublic() .setStatic(true) .setFinal(true); // add static entity name field entityClass.addField() .setName("NAME") .setType(String.class) .setStringInitializer(entity.getEntityName()) .setPublic() .setStatic(true) .setFinal(true); // create columns for (Field field : entity.getField()) { Class<?> fieldJavaType = FieldType.find(field).getJavaType(); FieldSource<JavaClassSource> entityField = entityClass.addField() .setName(field.getName()) .setType(fieldJavaType) .setPrivate(); // add comment entityField.getJavaDoc().setFullText(GeneratorUtil.createCaptionFromCamelCase(field.getName())); // add lombok getters an setters entityField.addAnnotation(Getter.class); entityField.addAnnotation(Setter.class); } return entityClass; } /** * Create constructor with GenericValue entity parameter * @param entity * @param entityClass */ private void createConstructorWithGenericValueParameter(Entity entity, JavaClassSource entityClass) { // create columns StringBuilder constructorBody = new StringBuilder(); // constructor body for (Field field : entity.getField()) { // append param to "fromValue()" body constructorBody.append(String.format("%s = %s value.get(\"%s\");", field.getName(), "(" + FieldType.find(field).getJavaType().getSimpleName() + ")", field.getName())); } MethodSource<JavaClassSource> constructor = entityClass.addMethod() .setConstructor(true) .setPublic() .setBody(constructorBody.toString()); constructor.addParameter(GENERIC_VALUE_CLASS_NAME, "value"); } /** * Create fromValue static method * @param entity * @param entityClass */ private void createFromValueStaticMethod(Entity entity, JavaClassSource entityClass) { String fromValueMethodBody = "return new " + entityClass.getName() + "(value);"; MethodSource<JavaClassSource> fromValueMethod = entityClass.addMethod() .setName("fromValue") .setPublic() .setStatic(true) .setReturnType(entityClass) .setBody(fromValueMethodBody); fromValueMethod.addParameter(GENERIC_VALUE_CLASS_NAME, "value"); } /** * Create fromValues static method * @param entity * @param entityClass */ private void createFromValuesStaticMethod(Entity entity, JavaClassSource entityClass) { entityClass.addImport(List.class); entityClass.addImport(ArrayList.class); String fromValuesMethodBody = String.format("List<%s> entities = new ArrayList<>();" + "for (GenericValue value : values) {" + " entities.add(new %s(value));" + "}" + "return entities;", entityClass.getName(), entityClass.getName()); MethodSource<JavaClassSource> fromValuesMethod = entityClass.addMethod() .setName("fromValues") .setPublic() .setStatic(true) .setReturnType("List<" + entityClass.getName() + ">") .setBody(fromValuesMethodBody); fromValuesMethod.addParameter("List<GenericValue>", "values"); } /** * Generate code * @param entity * @return * @throws Exception */ public String generate(Entity entity) throws Exception { log.info("Generate entity: {}", entity.getEntityName()); // create entity class final JavaClassSource entityClass = createEntityClass(entity); // create constructor createConstructorWithGenericValueParameter(entity, entityClass); // create fromValue static method createFromValueStaticMethod(entity, entityClass); // create fromValues static method createFromValuesStaticMethod(entity, entityClass); String destinationPath = env.getProperty("generator.destination_path"); File src = new File(FilenameUtils.concat(destinationPath, GeneratorUtil.packageNameToPath(getPackageName(entity))), entity.getEntityName() + ".java"); FileUtils.writeStringToFile(src, entityClass.toString()); return entityClass.toString(); } }
src/main/java/com/github/yuri0x7c1/ofbiz/explorer/generator/util/EntityGenerator.java
package com.github.yuri0x7c1.ofbiz.explorer.generator.util; import java.io.File; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang3.RandomUtils; import org.apache.commons.lang3.StringUtils; import org.jboss.forge.roaster.Roaster; import org.jboss.forge.roaster.model.source.FieldSource; import org.jboss.forge.roaster.model.source.JavaClassSource; import org.jboss.forge.roaster.model.source.MethodSource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.core.env.Environment; import org.springframework.stereotype.Component; import com.github.yuri0x7c1.ofbiz.explorer.entity.xml.Entity; import com.github.yuri0x7c1.ofbiz.explorer.entity.xml.Field; import com.github.yuri0x7c1.ofbiz.explorer.entity.xml.FieldType; import com.github.yuri0x7c1.ofbiz.explorer.entity.xml.Relation; import com.github.yuri0x7c1.ofbiz.explorer.util.OfbizInstance; import lombok.Getter; import lombok.Setter; import lombok.extern.slf4j.Slf4j; @Slf4j @Component public class EntityGenerator { public static final String GENERIC_VALUE_CLASS_NAME = "org.apache.ofbiz.entity.GenericValue"; @Autowired private OfbizInstance ofbizInstance; @Autowired private Environment env; public String getRelationFieldName(Relation relation) { StringBuilder name = new StringBuilder(); if (!StringUtils.isBlank(relation.getTitle())) { name.append(relation.getTitle()) .append(relation.getRelEntityName()); } else { name.append(relation.getRelEntityName()); } name.setCharAt(0, Character.toLowerCase(name.charAt(0))); if (relation.getKeyMap().size() == 1) { if (name.toString().equals(relation.getKeyMap().get(0).getFieldName())) { name.append("Relation"); } } return StringUtils.uncapitalize(name.toString()); } public String getPackageName(Entity entity) { String packageName = entity.getPackageName().replace("return", "_return").replace("enum", "_enum"); String basePackage = env.getProperty("generator.base_package"); if (basePackage != null && !basePackage.equals("org.apache.ofbiz")) { packageName = packageName.replace("org.apache.ofbiz", basePackage); } String entityPackage = env.getProperty("generator.entity.package"); if (entityPackage != null) { packageName += "." + entityPackage; } return packageName; } /** * Create entity class * @param entity * @return */ private JavaClassSource createEntityClass(Entity entity) { // create entity class final JavaClassSource entityClass = Roaster.create(JavaClassSource.class); entityClass.setPackage(getPackageName(entity)) .setName(entity.getEntityName()); // comment entityClass.getJavaDoc().setFullText(GeneratorUtil.createCaptionFromCamelCase(entity.getEntityName())); // add serialization stuff entityClass.addInterface(Serializable.class); entityClass.addField() .setName("serialVersionUID") .setType(long.class) .setLiteralInitializer(String.valueOf(RandomUtils.nextLong(0, Long.MAX_VALUE-1)) + "L") .setPublic() .setStatic(true) .setFinal(true); // create columns for (Field field : entity.getField()) { Class<?> fieldJavaType = FieldType.find(field).getJavaType(); FieldSource<JavaClassSource> entityField = entityClass.addField() .setName(field.getName()) .setType(fieldJavaType) .setPrivate(); // add comment entityField.getJavaDoc().setFullText(GeneratorUtil.createCaptionFromCamelCase(field.getName())); // add lombok getters an setters entityField.addAnnotation(Getter.class); entityField.addAnnotation(Setter.class); } return entityClass; } /** * Create constructor with GenericValue entity parameter * @param entity * @param entityClass */ private void createConstructorWithGenericValueParameter(Entity entity, JavaClassSource entityClass) { // create columns StringBuilder constructorBody = new StringBuilder(); // constructor body for (Field field : entity.getField()) { // append param to "fromValue()" body constructorBody.append(String.format("%s = %s value.get(\"%s\");", field.getName(), "(" + FieldType.find(field).getJavaType().getSimpleName() + ")", field.getName())); } MethodSource<JavaClassSource> constructor = entityClass.addMethod() .setConstructor(true) .setPublic() .setBody(constructorBody.toString()); constructor.addParameter(GENERIC_VALUE_CLASS_NAME, "value"); } /** * Create fromValue static method * @param entity * @param entityClass */ private void createFromValueStaticMethod(Entity entity, JavaClassSource entityClass) { String fromValueMethodBody = "return new " + entityClass.getName() + "(value);"; MethodSource<JavaClassSource> fromValueMethod = entityClass.addMethod() .setName("fromValue") .setPublic() .setStatic(true) .setReturnType(entityClass) .setBody(fromValueMethodBody); fromValueMethod.addParameter(GENERIC_VALUE_CLASS_NAME, "value"); } /** * Create fromValues static method * @param entity * @param entityClass */ private void createFromValuesStaticMethod(Entity entity, JavaClassSource entityClass) { entityClass.addImport(List.class); entityClass.addImport(ArrayList.class); String fromValuesMethodBody = String.format("List<%s> entities = new ArrayList<>();" + "for (GenericValue value : values) {" + " entities.add(new %s(value));" + "}" + "return entities;", entityClass.getName(), entityClass.getName()); MethodSource<JavaClassSource> fromValuesMethod = entityClass.addMethod() .setName("fromValues") .setPublic() .setStatic(true) .setReturnType("List<" + entityClass.getName() + ">") .setBody(fromValuesMethodBody); fromValuesMethod.addParameter("List<GenericValue>", "values"); } /** * Generate code * @param entity * @return * @throws Exception */ public String generate(Entity entity) throws Exception { log.info("Generate entity: {}", entity.getEntityName()); // create entity class final JavaClassSource entityClass = createEntityClass(entity); // create constructor createConstructorWithGenericValueParameter(entity, entityClass); // create fromValue static method createFromValueStaticMethod(entity, entityClass); // create fromValues static method createFromValuesStaticMethod(entity, entityClass); String destinationPath = env.getProperty("generator.destination_path"); File src = new File(FilenameUtils.concat(destinationPath, GeneratorUtil.packageNameToPath(getPackageName(entity))), entity.getEntityName() + ".java"); FileUtils.writeStringToFile(src, entityClass.toString()); return entityClass.toString(); } }
Add entity name constant to entity pojo
src/main/java/com/github/yuri0x7c1/ofbiz/explorer/generator/util/EntityGenerator.java
Add entity name constant to entity pojo
Java
apache-2.0
4ffb593af97cb27f962390e324dc5a234b6bf079
0
aleksandr-m/strutsclipse
/* * Copyright 2015-2017 Aleksandr Mashchenko. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amashchenko.eclipse.strutsclipse.taglib; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IResource; import org.eclipse.jface.text.IDocument; import org.eclipse.jface.text.IRegion; import org.eclipse.jface.text.ITextViewer; import org.eclipse.jface.text.hyperlink.IHyperlink; import com.amashchenko.eclipse.strutsclipse.AbstractStrutsHyperlinkDetector; import com.amashchenko.eclipse.strutsclipse.JarEntryStorage; import com.amashchenko.eclipse.strutsclipse.ParseUtil; import com.amashchenko.eclipse.strutsclipse.ProjectUtil; import com.amashchenko.eclipse.strutsclipse.PropertiesParser; import com.amashchenko.eclipse.strutsclipse.ResourceDocument; import com.amashchenko.eclipse.strutsclipse.strutsxml.StrutsXmlConstants; import com.amashchenko.eclipse.strutsclipse.strutsxml.StrutsXmlParser; import com.amashchenko.eclipse.strutsclipse.xmlparser.TagRegion; public class StrutsTaglibHyperlinkDetector extends AbstractStrutsHyperlinkDetector implements StrutsTaglibLocations { private final StrutsTaglibParser strutsTaglibParser; private final StrutsXmlParser strutsXmlParser; private final PropertiesParser propertiesParser; public StrutsTaglibHyperlinkDetector() { strutsTaglibParser = new StrutsTaglibParser(); strutsXmlParser = new StrutsXmlParser(); propertiesParser = new PropertiesParser(); } @Override public IHyperlink[] detectHyperlinks(ITextViewer textViewer, IRegion region, boolean canShowMultipleHyperlinks) { IDocument document = textViewer.getDocument(); List<IHyperlink> linksList = new ArrayList<IHyperlink>(); final TagRegion tagRegion = strutsTaglibParser.getTagRegion(document, region.getOffset()); if (tagRegion != null && tagRegion.getCurrentElement() != null) { final IRegion elementRegion = tagRegion.getCurrentElement() .getValueRegion(); final String elementValue = tagRegion.getCurrentElement() .getValue(); final String key = tagRegion.getName() + tagRegion.getCurrentElement().getName(); switch (key) { case URL_ACTION: case FORM_ACTION: case LINK_ACTION: case ACTION_NAME: case SUBMIT_ACTION: linksList.addAll(createActionLinks(document, elementValue, elementRegion, tagRegion.getAttrValue( StrutsTaglibConstants.NAMESPACE_ATTR, null))); break; case TEXT_NAME: linksList.addAll(createPropertiesKeysLinks(document, elementValue, elementRegion)); break; } } // getText final TagRegion getTextRegion = strutsTaglibParser.getGetTextRegion( document, region.getOffset()); if (getTextRegion != null && getTextRegion.getCurrentElement() != null) { linksList.addAll(createPropertiesKeysLinks(document, getTextRegion .getCurrentElement().getValue(), getTextRegion .getCurrentElement().getValueRegion())); } return linksListToArray(linksList); } private List<IHyperlink> createActionLinks(final IDocument document, final String elementValue, final IRegion elementRegion, final String namespaceParamValue) { final List<IHyperlink> links = new ArrayList<IHyperlink>(); final Set<String> namespaces = new HashSet<String>(); if (namespaceParamValue != null) { namespaces.add(namespaceParamValue); } // find struts resources List<ResourceDocument> resources = ProjectUtil .findStrutsResources(document); if (namespaceParamValue == null) { for (ResourceDocument rd : resources) { List<IRegion> regions = strutsXmlParser.getActionRegions( rd.getDocument(), elementValue); if (regions != null) { for (IRegion region : regions) { if (rd.getResource().getType() == IResource.FILE && rd.getResource().exists()) { links.add(new FileHyperlink(elementRegion, (IFile) rd.getResource(), region)); } } } } } else { for (ResourceDocument rd : resources) { IRegion region = strutsXmlParser.getActionRegion( rd.getDocument(), namespaces, elementValue); if (region != null) { if (rd.getResource().getType() == IResource.FILE && rd.getResource().exists()) { links.add(new FileHyperlink(elementRegion, (IFile) rd .getResource(), region)); } } } } return links; } private List<IHyperlink> createPropertiesKeysLinks( final IDocument document, final String elementValue, final IRegion elementRegion) { final List<IHyperlink> links = new ArrayList<IHyperlink>(); // get bundle names Set<String> bundleNames = new HashSet<String>(); List<ResourceDocument> strutsResources = ProjectUtil .findStrutsResources(document); for (ResourceDocument rd : strutsResources) { Map<String, String> constants = strutsXmlParser.getConstantsMap(rd .getDocument()); bundleNames .addAll(ParseUtil.delimitedStringToSet(constants .get(StrutsXmlConstants.CONSTANT_CUSTOM_RESOURCES), StrutsXmlConstants.MULTI_VALUE_SEPARATOR)); } // local List<ResourceDocument> resources = ProjectUtil.findPropertiesResources( document, bundleNames); for (ResourceDocument rd : resources) { IRegion keyRegion = propertiesParser.getKeyRegion(rd.getDocument(), elementValue); if (keyRegion != null) { if (rd.getResource().getType() == IResource.FILE && rd.getResource().exists()) { links.add(new FileHyperlink(elementRegion, (IFile) rd .getResource(), keyRegion)); } } } // jars List<JarEntryStorage> jarStorages = ProjectUtil .findJarEntryPropertyResources(document, bundleNames); for (JarEntryStorage jarStorage : jarStorages) { IRegion keyRegion = propertiesParser.getKeyRegion( jarStorage.toDocument(), elementValue); if (keyRegion != null) { links.add(new StorageHyperlink(elementRegion, jarStorage, keyRegion)); } } return links; } }
strutsclipse-plugin/src/com/amashchenko/eclipse/strutsclipse/taglib/StrutsTaglibHyperlinkDetector.java
/* * Copyright 2015-2017 Aleksandr Mashchenko. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amashchenko.eclipse.strutsclipse.taglib; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IResource; import org.eclipse.jface.text.IDocument; import org.eclipse.jface.text.IRegion; import org.eclipse.jface.text.ITextViewer; import org.eclipse.jface.text.hyperlink.IHyperlink; import com.amashchenko.eclipse.strutsclipse.AbstractStrutsHyperlinkDetector; import com.amashchenko.eclipse.strutsclipse.JarEntryStorage; import com.amashchenko.eclipse.strutsclipse.ParseUtil; import com.amashchenko.eclipse.strutsclipse.ProjectUtil; import com.amashchenko.eclipse.strutsclipse.PropertiesParser; import com.amashchenko.eclipse.strutsclipse.ResourceDocument; import com.amashchenko.eclipse.strutsclipse.strutsxml.StrutsXmlConstants; import com.amashchenko.eclipse.strutsclipse.strutsxml.StrutsXmlParser; import com.amashchenko.eclipse.strutsclipse.xmlparser.TagRegion; public class StrutsTaglibHyperlinkDetector extends AbstractStrutsHyperlinkDetector implements StrutsTaglibLocations { private final StrutsTaglibParser strutsTaglibParser; private final StrutsXmlParser strutsXmlParser; private final PropertiesParser propertiesParser; public StrutsTaglibHyperlinkDetector() { strutsTaglibParser = new StrutsTaglibParser(); strutsXmlParser = new StrutsXmlParser(); propertiesParser = new PropertiesParser(); } @Override public IHyperlink[] detectHyperlinks(ITextViewer textViewer, IRegion region, boolean canShowMultipleHyperlinks) { IDocument document = textViewer.getDocument(); List<IHyperlink> linksList = new ArrayList<IHyperlink>(); final TagRegion tagRegion = strutsTaglibParser.getTagRegion(document, region.getOffset()); if (tagRegion != null && tagRegion.getCurrentElement() != null) { final IRegion elementRegion = tagRegion.getCurrentElement() .getValueRegion(); final String elementValue = tagRegion.getCurrentElement() .getValue(); final String key = tagRegion.getName() + tagRegion.getCurrentElement().getName(); switch (key) { case URL_ACTION: case FORM_ACTION: case LINK_ACTION: case ACTION_NAME: case SUBMIT_ACTION: linksList.addAll(createActionLinks(document, elementValue, elementRegion, tagRegion.getAttrValue( StrutsTaglibConstants.NAMESPACE_ATTR, null))); break; case TEXT_NAME: linksList.addAll(createPropertiesKeysLinks(document, elementValue, elementRegion)); break; } } return linksListToArray(linksList); } private List<IHyperlink> createActionLinks(final IDocument document, final String elementValue, final IRegion elementRegion, final String namespaceParamValue) { final List<IHyperlink> links = new ArrayList<IHyperlink>(); final Set<String> namespaces = new HashSet<String>(); if (namespaceParamValue != null) { namespaces.add(namespaceParamValue); } // find struts resources List<ResourceDocument> resources = ProjectUtil .findStrutsResources(document); if (namespaceParamValue == null) { for (ResourceDocument rd : resources) { List<IRegion> regions = strutsXmlParser.getActionRegions( rd.getDocument(), elementValue); if (regions != null) { for (IRegion region : regions) { if (rd.getResource().getType() == IResource.FILE && rd.getResource().exists()) { links.add(new FileHyperlink(elementRegion, (IFile) rd.getResource(), region)); } } } } } else { for (ResourceDocument rd : resources) { IRegion region = strutsXmlParser.getActionRegion( rd.getDocument(), namespaces, elementValue); if (region != null) { if (rd.getResource().getType() == IResource.FILE && rd.getResource().exists()) { links.add(new FileHyperlink(elementRegion, (IFile) rd .getResource(), region)); } } } } return links; } private List<IHyperlink> createPropertiesKeysLinks( final IDocument document, final String elementValue, final IRegion elementRegion) { final List<IHyperlink> links = new ArrayList<IHyperlink>(); // get bundle names Set<String> bundleNames = new HashSet<String>(); List<ResourceDocument> strutsResources = ProjectUtil .findStrutsResources(document); for (ResourceDocument rd : strutsResources) { Map<String, String> constants = strutsXmlParser.getConstantsMap(rd .getDocument()); bundleNames .addAll(ParseUtil.delimitedStringToSet(constants .get(StrutsXmlConstants.CONSTANT_CUSTOM_RESOURCES), StrutsXmlConstants.MULTI_VALUE_SEPARATOR)); } // local List<ResourceDocument> resources = ProjectUtil.findPropertiesResources( document, bundleNames); for (ResourceDocument rd : resources) { IRegion keyRegion = propertiesParser.getKeyRegion(rd.getDocument(), elementValue); if (keyRegion != null) { if (rd.getResource().getType() == IResource.FILE && rd.getResource().exists()) { links.add(new FileHyperlink(elementRegion, (IFile) rd .getResource(), keyRegion)); } } } // jars List<JarEntryStorage> jarStorages = ProjectUtil .findJarEntryPropertyResources(document, bundleNames); for (JarEntryStorage jarStorage : jarStorages) { IRegion keyRegion = propertiesParser.getKeyRegion( jarStorage.toDocument(), elementValue); if (keyRegion != null) { links.add(new StorageHyperlink(elementRegion, jarStorage, keyRegion)); } } return links; } }
add hyperlinks for getText in JSP
strutsclipse-plugin/src/com/amashchenko/eclipse/strutsclipse/taglib/StrutsTaglibHyperlinkDetector.java
add hyperlinks for getText in JSP
Java
apache-2.0
6e6d757d154d511f7bb541780f6aa7646d8d55df
0
edwardmlyte/maven-plugins,jdcasey/maven-plugins-fixes,omnidavesz/maven-plugins,johnmccabe/maven-plugins,zigarn/maven-plugins,Orange-OpenSource/maven-plugins,apache/maven-plugins,lennartj/maven-plugins,rkorpachyov/maven-plugins,hazendaz/maven-plugins,hazendaz/maven-plugins,mcculls/maven-plugins,mikkokar/maven-plugins,kidaa/maven-plugins,HubSpot/maven-plugins,HubSpot/maven-plugins,edwardmlyte/maven-plugins,apache/maven-plugins,apache/maven-plugins,kidaa/maven-plugins,criteo-forks/maven-plugins,Orange-OpenSource/maven-plugins,krosenvold/maven-plugins,PressAssociation/maven-plugins,sonatype/maven-plugins,criteo-forks/maven-plugins,sonatype/maven-plugins,mcculls/maven-plugins,apache/maven-plugins,lennartj/maven-plugins,criteo-forks/maven-plugins,jdcasey/maven-plugins-fixes,kidaa/maven-plugins,hazendaz/maven-plugins,mikkokar/maven-plugins,restlet/maven-plugins,restlet/maven-plugins,ptahchiev/maven-plugins,restlet/maven-plugins,edwardmlyte/maven-plugins,johnmccabe/maven-plugins,omnidavesz/maven-plugins,mikkokar/maven-plugins,johnmccabe/maven-plugins,hgschmie/apache-maven-plugins,lennartj/maven-plugins,apache/maven-plugins,lennartj/maven-plugins,HubSpot/maven-plugins,mikkokar/maven-plugins,HubSpot/maven-plugins,rkorpachyov/maven-plugins,restlet/maven-plugins,omnidavesz/maven-plugins,ptahchiev/maven-plugins,zigarn/maven-plugins,edwardmlyte/maven-plugins,sonatype/maven-plugins,Orange-OpenSource/maven-plugins,hgschmie/apache-maven-plugins,hgschmie/apache-maven-plugins,hazendaz/maven-plugins,krosenvold/maven-plugins,sonatype/maven-plugins,kidaa/maven-plugins,ptahchiev/maven-plugins,PressAssociation/maven-plugins,PressAssociation/maven-plugins,Orange-OpenSource/maven-plugins,johnmccabe/maven-plugins,kikinteractive/maven-plugins,zigarn/maven-plugins,zigarn/maven-plugins,jdcasey/maven-plugins-fixes,rkorpachyov/maven-plugins,hgschmie/apache-maven-plugins,omnidavesz/maven-plugins,mcculls/maven-plugins,rkorpachyov/maven-plugins,rkorpachyov/maven-plugins,krosenvold/maven-plugins,PressAssociation/maven-plugins,ptahchiev/maven-plugins,krosenvold/maven-plugins,kikinteractive/maven-plugins,mcculls/maven-plugins,hazendaz/maven-plugins,criteo-forks/maven-plugins,kikinteractive/maven-plugins
package org.apache.maven.plugin.rar; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.maven.model.Resource; /** * @author Olivier Lamy * @since 2.3 */ public class RarResource extends Resource { // no op }
maven-rar-plugin/src/main/java/org/apache/maven/plugin/rar/RarResource.java
package org.apache.maven.plugin.rar; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.maven.model.Resource; /** * @author Olivier Lamy * @since 2.3 */ public class RarResource extends Resource { // no op }
checkstyle issue git-svn-id: 6038db50b076e48c7926ed71fd94f8e91be2fbc9@1411443 13f79535-47bb-0310-9956-ffa450edef68
maven-rar-plugin/src/main/java/org/apache/maven/plugin/rar/RarResource.java
checkstyle issue
Java
apache-2.0
d4fe77d843ee2c18ceeef32090e366f44a84ee7c
0
pnarayanan/ambry,linkedin/ambry,vgkholla/ambry,daniellitoc/ambry-Research,nsivabalan/ambry,vgkholla/ambry,linkedin/ambry,pnarayanan/ambry,cgtz/ambry,cgtz/ambry,xiahome/ambry,linkedin/ambry,cgtz/ambry,linkedin/ambry,xiahome/ambry,cgtz/ambry,nsivabalan/ambry,daniellitoc/ambry-Research
package com.github.ambry.utils; import java.nio.ByteBuffer; import java.nio.channels.ClosedChannelException; import java.nio.channels.WritableByteChannel; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantLock; /** * A {@link WritableByteChannel} that stores the bytes written into it in a {@link ByteBuffer}. */ public class ByteBufferChannel implements WritableByteChannel { private final AtomicBoolean channelOpen = new AtomicBoolean(true); private final ReentrantLock bufferLock = new ReentrantLock(); private final ByteBuffer buffer; /** * Gets the {@link ByteBuffer} that is being used to receive writes. * @return the {@link ByteBuffer} that is receives writes to this channel. */ public ByteBuffer getBuffer() { return buffer; } /** * This object needs to be instantiated with a {@link ByteBuffer} that is provided by the caller. The maximum * number of bytes that can be written into the {@code buffer} is determined by {@code buffer.remaining()}. * @param buffer the buffer that can be used to recieve writes. */ public ByteBufferChannel(ByteBuffer buffer) { this.buffer = buffer; } /** * {@inheritDoc} * <p/> * Copies bytes from {@code src} into the {@link ByteBuffer} ({@code buffer}) backing this channel. The number of * bytes copied is the minimum of {@code src.remaining()} and {@code buffer.remaining()}. * @param src the source {@link ByteBuffer} to copy bytes from. * @return the number of bytes copied. * @throws ClosedChannelException if the channel is closed when this function was called. */ @Override public int write(ByteBuffer src) throws ClosedChannelException { if (!isOpen()) { throw new ClosedChannelException(); } int bytesWritten = 0; bufferLock.lock(); try { int bytesToWrite = Math.min(src.remaining(), buffer.remaining()); for (; bytesWritten < bytesToWrite; bytesWritten++) { buffer.put(src.get()); } } finally { bufferLock.unlock(); } return bytesWritten; } @Override public boolean isOpen() { return channelOpen.get(); } @Override public void close() { channelOpen.set(false); } }
ambry-utils/src/main/java/com.github.ambry.utils/ByteBufferChannel.java
package com.github.ambry.utils; import java.nio.ByteBuffer; import java.nio.channels.ClosedChannelException; import java.nio.channels.WritableByteChannel; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantLock; /** * A {@link WritableByteChannel} that stores the bytes written into it in a {@link ByteBuffer}. */ public class ByteBufferChannel implements WritableByteChannel { private final AtomicBoolean channelOpen = new AtomicBoolean(true); private final ReentrantLock bufferLock = new ReentrantLock(); private final ByteBuffer buffer; /** * Gets the {@link ByteBuffer} that is being used to receive writes. * @return the {@link ByteBuffer} that is receives writes to this channel. */ public ByteBuffer getBuffer() { return buffer; } /** * This object needs to be instantiated with a {@link ByteBuffer} that is provided by the caller. The maximum * number of bytes that can be written into the {@code buffer} is determined by {@code buffer.remaining()}. * @param buffer the buffer that can be used to recieve writes. */ public ByteBufferChannel(ByteBuffer buffer) { this.buffer = buffer; } /** * {@inheritDoc} * <p/> * Copies bytes from {@code src} into the {@link ByteBuffer} ({@code buffer}) backing this channel. The number of * bytes copied is the minimum of {@code src.remaining()} and {@code buffer.remaining()}. * @param src the source {@link ByteBuffer} to copy bytes from. * @return the number of bytes copied. * @throws ClosedChannelException if the channel is closed when this function was called. */ @Override public int write(ByteBuffer src) throws ClosedChannelException { if (!isOpen()) { throw new ClosedChannelException(); } int bytesWritten = 0; try { bufferLock.lock(); int bytesToWrite = Math.min(src.remaining(), buffer.remaining()); for (; bytesWritten < bytesToWrite; bytesWritten++) { buffer.put(src.get()); } } finally { bufferLock.unlock(); } return bytesWritten; } @Override public boolean isOpen() { return channelOpen.get(); } @Override public void close() { channelOpen.set(false); } }
One more lock relocation
ambry-utils/src/main/java/com.github.ambry.utils/ByteBufferChannel.java
One more lock relocation
Java
apache-2.0
1eecfc27d1bc487718ac2649c264303bbf4985a9
0
SpineEventEngine/core-java,SpineEventEngine/core-java,SpineEventEngine/core-java
/* * Copyright 2021, TeamDev. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Redistribution and use in source and/or binary forms, with or without * modification, must retain the above copyright notice and the following * disclaimer. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package io.spine.server.integration; import com.google.common.collect.ImmutableSet; import io.spine.core.BoundedContextName; import io.spine.server.transport.Publisher; import static com.google.common.base.Preconditions.checkNotNull; import static io.spine.base.Identifier.newUuid; import static io.spine.base.Identifier.pack; /** * Notifies other Bounded Contexts that this Bounded Context now requests some set of external * domain events for consumption. */ final class BroadcastWantedEvents { private final BoundedContextName context; private final Publisher publisher; private ImmutableSet<ExternalEventType> wantedEvents = ImmutableSet.of(); /** * Creates a new instance of this broadcast. * * @param context * the name of the Bounded Context from which the broadcast is performed * @param channel * the channel for broadcasting */ BroadcastWantedEvents(BoundedContextName context, Publisher channel) { this.context = checkNotNull(context); this.publisher = checkNotNull(channel); } /** * Notifies other Bounded Contexts about a change in the types of wanted events. * * <p>If the given {@code newTypes} are the same as those known to this instance previously, * the notification is not sent. * * @param newTypes * types of external events that are consumed by the bounded context */ synchronized void onEventsChanged(ImmutableSet<ExternalEventType> newTypes) { checkNotNull(newTypes); if (wantedEvents.equals(newTypes)) { return; } wantedEvents = newTypes; send(); } /** * Notifies other Bounded Contexts about the domain events for which * it has {@code external} subscribers. */ synchronized void send() { ExternalEventsWanted request = ExternalEventsWanted .newBuilder() .addAllType(wantedEvents) .vBuild(); ExternalMessage wrapped = ExternalMessages.of(request, context); publisher.publish(wrapped.getId(), wrapped); } }
server/src/main/java/io/spine/server/integration/BroadcastWantedEvents.java
/* * Copyright 2021, TeamDev. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Redistribution and use in source and/or binary forms, with or without * modification, must retain the above copyright notice and the following * disclaimer. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package io.spine.server.integration; import com.google.common.collect.ImmutableSet; import io.spine.core.BoundedContextName; import io.spine.server.transport.Publisher; import static com.google.common.base.Preconditions.checkNotNull; import static io.spine.base.Identifier.newUuid; import static io.spine.base.Identifier.pack; /** * Notifies other Bounded Contexts that this Bounded Context now requests some set of external * domain events for consumption. */ final class BroadcastWantedEvents { private final BoundedContextName context; private final Publisher publisher; private ImmutableSet<ExternalEventType> wantedEvents = ImmutableSet.of(); /** * Creates a new instance of this broadcast. * * @param context * the name of the Bounded Context from which the broadcast is performed * @param channel * the channel for broadcasting */ BroadcastWantedEvents(BoundedContextName context, Publisher channel) { this.context = checkNotNull(context); this.publisher = checkNotNull(channel); } /** * Notifies other Bounded Contexts about a change in the types of wanted events. * * <p>If the given {@code newTypes} are the same as those known to this instance previously, * the notification is not sent. * * @param newTypes * types of external events that are consumed by the bounded context */ synchronized void onEventsChanged(ImmutableSet<ExternalEventType> newTypes) { checkNotNull(newTypes); if (wantedEvents.equals(newTypes)) { return; } wantedEvents = newTypes; send(); } /** * Notifies other Bounded Contexts about the domain events for which * it has {@code external} subscribers. */ synchronized void send() { ExternalEventsWanted request = ExternalEventsWanted .newBuilder() .addAllType(wantedEvents) .vBuild(); ExternalMessage externalMessage = ExternalMessages.of(request, context); publisher.publish(pack(newUuid()), externalMessage); } }
Rename the variable.
server/src/main/java/io/spine/server/integration/BroadcastWantedEvents.java
Rename the variable.
Java
apache-2.0
2b4ace94ece7412a6a9fbdf2b1f6ffd3b4fea43b
0
jackwakefield/TorrentStream-Android,se-bastiaan/TorrentStream-Android
/* * Copyright (C) 2015-2016 Sébastiaan (github.com/se-bastiaan) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.se_bastiaan.torrentstream; import com.frostwire.jlibtorrent.AlertListener; import com.frostwire.jlibtorrent.FileStorage; import com.frostwire.jlibtorrent.Priority; import com.frostwire.jlibtorrent.TorrentHandle; import com.frostwire.jlibtorrent.TorrentInfo; import com.frostwire.jlibtorrent.TorrentStatus; import com.frostwire.jlibtorrent.alerts.Alert; import com.frostwire.jlibtorrent.alerts.AlertType; import com.frostwire.jlibtorrent.alerts.BlockFinishedAlert; import com.frostwire.jlibtorrent.alerts.PieceFinishedAlert; import com.github.se_bastiaan.torrentstream.listeners.TorrentListener; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; public class Torrent implements AlertListener { private final static Integer MAX_PREPARE_COUNT = 20; private final static Integer MIN_PREPARE_COUNT = 2; private final static Integer DEFAULT_PREPARE_COUNT = 5; private final static Integer SEQUENTIAL_CONCURRENT_PIECES_COUNT = 5; public enum State {UNKNOWN, RETRIEVING_META, STARTING, STREAMING} private Integer piecesToPrepare; private Integer lastPieceIndex; private Integer firstPieceIndex; private Integer selectedFileIndex = -1; private Integer interestedPieceIndex = 0; private Double prepareProgress = 0d; private Double progressStep = 0d; private List<Integer> preparePieces; private Boolean[] hasPieces; private State state = State.RETRIEVING_META; private final TorrentHandle torrentHandle; private final TorrentListener listener; private final Long prepareSize; /** * The constructor for a new Torrent * <p/> * First the largest file in the download is selected as the file for playback * <p/> * After setting this priority, the first and last index of the pieces that make up this file are determined. * And last: amount of pieces that are needed for playback are calculated (needed for playback means: make up 10 megabyte of the file) * * @param torrentHandle jlibtorrent TorrentHandle */ public Torrent(TorrentHandle torrentHandle, TorrentListener listener, Long prepareSize) { this.torrentHandle = torrentHandle; this.listener = listener; this.prepareSize = prepareSize; torrentHandle.setPriority(Priority.NORMAL.swig()); if (selectedFileIndex == -1) setLargestFile(); if (this.listener != null) this.listener.onStreamPrepared(this); } /** * Reset piece priorities of selected file to normal */ private void resetPriorities() { Priority[] priorities = torrentHandle.getPiecePriorities(); for (int i = 0; i < priorities.length; i++) { if (i >= firstPieceIndex && i <= lastPieceIndex) { torrentHandle.piecePriority(i, Priority.NORMAL); } else { torrentHandle.piecePriority(i, Priority.IGNORE); } } } /** * Get LibTorrent torrent handle of this torrent * * @return {@link TorrentHandle} */ public TorrentHandle getTorrentHandle() { return torrentHandle; } public File getVideoFile() { return new File(torrentHandle.getSavePath() + "/" + torrentHandle.getTorrentInfo().files().filePath(selectedFileIndex)); } /** * Get the location of the file that is being downloaded * * @return {@link File} The file location */ public File getSaveLocation() { return new File(torrentHandle.getSavePath() + "/" + torrentHandle.getName()); } /** * Resume the torrent download */ public void resume() { torrentHandle.resume(); } /** * Pause the torrent download */ public void pause() { torrentHandle.pause(); } /** * Set the selected file index to the largest file in the torrent */ public void setLargestFile() { setSelectedFileIndex(-1); } /** * Set the index of the file that should be downloaded * If the given index is -1, then the largest file is chosen * * @param selectedFileIndex {@link Integer} Index of the file */ public void setSelectedFileIndex(Integer selectedFileIndex) { TorrentInfo torrentInfo = torrentHandle.getTorrentInfo(); FileStorage fileStorage = torrentInfo.files(); if (selectedFileIndex == -1) { long highestFileSize = 0; int selectedFile = -1; for (int i = 0; i < fileStorage.numFiles(); i++) { long fileSize = fileStorage.fileSize(i); if (highestFileSize < fileSize) { highestFileSize = fileSize; torrentHandle.setFilePriority(selectedFile, Priority.IGNORE); selectedFile = i; torrentHandle.setFilePriority(i, Priority.NORMAL); } else { torrentHandle.setFilePriority(i, Priority.IGNORE); } } selectedFileIndex = selectedFile; } else { for (int i = 0; i < fileStorage.numFiles(); i++) { if (i == selectedFileIndex) { torrentHandle.setFilePriority(i, Priority.NORMAL); } else { torrentHandle.setFilePriority(i, Priority.IGNORE); } } } this.selectedFileIndex = selectedFileIndex; Priority[] piecePriorities = torrentHandle.getPiecePriorities(); int firstPieceIndexLocal = -1; int lastPieceIndexLocal = -1; for (int i = 0; i < piecePriorities.length; i++) { if (piecePriorities[i] != Priority.IGNORE) { if (firstPieceIndexLocal == -1) { firstPieceIndexLocal = i; } piecePriorities[i] = Priority.IGNORE; } else { if (firstPieceIndexLocal != -1 && lastPieceIndexLocal == -1) { lastPieceIndexLocal = i - 1; } } } if (lastPieceIndexLocal == -1) { lastPieceIndexLocal = piecePriorities.length - 1; } int pieceCount = lastPieceIndexLocal - firstPieceIndexLocal + 1; int pieceLength = torrentHandle.getTorrentInfo().pieceLength(); int activePieceCount; if (pieceLength > 0) { activePieceCount = (int) (prepareSize / pieceLength); if (activePieceCount < MIN_PREPARE_COUNT) { activePieceCount = MIN_PREPARE_COUNT; } else if (activePieceCount > MAX_PREPARE_COUNT) { activePieceCount = MAX_PREPARE_COUNT; } } else { activePieceCount = DEFAULT_PREPARE_COUNT; } if (pieceCount < activePieceCount) { activePieceCount = pieceCount / 2; } this.firstPieceIndex = firstPieceIndexLocal; this.interestedPieceIndex = this.firstPieceIndex; this.lastPieceIndex = lastPieceIndexLocal; piecesToPrepare = activePieceCount; } /** * Get the filenames of the files in the torrent * * @return {@link String[]} */ public String[] getFileNames() { FileStorage fileStorage = torrentHandle.getTorrentInfo().files(); String[] fileNames = new String[fileStorage.numFiles()]; for (int i = 0; i < fileStorage.numFiles(); i++) { fileNames[i] = fileStorage.fileName(i); } return fileNames; } /** * Prepare torrent for playback. Prioritize the first {@code piecesToPrepare} pieces and the last {@code piecesToPrepare} pieces * from {@code firstPieceIndex} and {@code lastPieceIndex}. Ignore all other pieces. */ public void startDownload() { if (state == State.STREAMING) return; state = State.STARTING; torrentHandle.setPriority(Priority.NORMAL.swig()); List<Integer> indices = new ArrayList<>(); Priority[] priorities = torrentHandle.getPiecePriorities(); for (int i = 0; i < priorities.length; i++) { if (priorities[i] != Priority.IGNORE) { torrentHandle.piecePriority(i, Priority.NORMAL); } } for (int i = 0; i < piecesToPrepare; i++) { indices.add(lastPieceIndex - i); torrentHandle.piecePriority(lastPieceIndex - i, Priority.SEVEN); torrentHandle.setPieceDeadline(lastPieceIndex - i, 1000); } for (int i = 0; i < piecesToPrepare; i++) { indices.add(firstPieceIndex + i); torrentHandle.piecePriority(firstPieceIndex + i, Priority.SEVEN); torrentHandle.setPieceDeadline(firstPieceIndex + i, 1000); } preparePieces = indices; hasPieces = new Boolean[lastPieceIndex - firstPieceIndex + 1]; Arrays.fill(hasPieces, false); TorrentInfo torrentInfo = torrentHandle.getTorrentInfo(); TorrentStatus status = torrentHandle.getStatus(); double blockCount = indices.size() * torrentInfo.pieceLength() / status.getBlockSize(); progressStep = 100 / blockCount; torrentHandle.resume(); listener.onStreamStarted(this); } /** * Check if the piece that contains the specified bytes were downloaded already * * @param bytes The bytes you're interested in * @return {@code true} if downloaded, {@code false} if not */ public boolean hasBytes(long bytes) { if (hasPieces == null) { return false; } int pieceIndex = (int) (bytes / torrentHandle.getTorrentInfo().pieceLength()); return hasPieces[pieceIndex]; } /** * Set the bytes of the selected file that you're interested in * The piece of that specific offset is selected and that piece plus the 1 preceding and the 3 after it. * These pieces will then be prioritised, which results in continuing the sequential download after that piece * * @param bytes The bytes you're interested in */ public void setInterestedBytes(long bytes) { if (hasPieces == null && bytes >= 0) { return; } int pieceIndex = (int) (bytes / torrentHandle.getTorrentInfo().pieceLength()); interestedPieceIndex = pieceIndex; if (!hasPieces[pieceIndex] && torrentHandle.piecePriority(pieceIndex + firstPieceIndex) != Priority.SEVEN) { interestedPieceIndex = pieceIndex; int pieces = 5; for (int i = pieceIndex; i < hasPieces.length; i++) { // Set full priority to first found piece that is not confirmed finished if (!hasPieces[i]) { torrentHandle.piecePriority(i + firstPieceIndex, Priority.SEVEN); torrentHandle.setPieceDeadline(i + firstPieceIndex, 1000); pieces--; if (pieces == 0) { break; } } } } } /** * Checks if the interesting pieces are downloaded already * * @return {@code true} if the 5 pieces that were selected using `setInterestedBytes` are all reported complete including the `nextPieces`, {@code false} if not */ public boolean hasInterestedBytes(int nextPieces) { for (int i = 0; i < 5 + nextPieces; i++) { int index = interestedPieceIndex + i; if (hasPieces.length <= index || index < 0) { continue; } if (!hasPieces[interestedPieceIndex + i]) { return false; } } return true; } /** * Checks if the interesting pieces are downloaded already * * @return {@code true} if the 5 pieces that were selected using `setInterestedBytes` are all reported complete, {@code false} if not */ public boolean hasInterestedBytes() { return hasInterestedBytes(5); } /** * Get the index of the piece we're currently interested in * @return Interested piece index */ public int getInterestedPieceIndex() { return interestedPieceIndex; } /** * Get amount of pieces to prepare * @return Amount of pieces to prepare */ public Integer getPiecesToPrepare() { return piecesToPrepare; } /** * Start sequential mode downloading */ private void startSequentialMode() { resetPriorities(); if (hasPieces == null) { torrentHandle.setSequentialDownload(true); } else { for (int i = firstPieceIndex + piecesToPrepare; i < firstPieceIndex + piecesToPrepare + SEQUENTIAL_CONCURRENT_PIECES_COUNT; i++) { torrentHandle.piecePriority(i, Priority.SEVEN); torrentHandle.setPieceDeadline(i, 1000); } } } /** * Get current torrent state * * @return {@link State} */ public State getState() { return state; } /** * Piece finished * * @param alert */ private void pieceFinished(PieceFinishedAlert alert) { if (state == State.STREAMING && hasPieces != null) { int pieceIndex = alert.pieceIndex() - firstPieceIndex; hasPieces[pieceIndex] = true; if (pieceIndex >= interestedPieceIndex) { for (int i = pieceIndex; i < hasPieces.length; i++) { // Set full priority to first found piece that is not confirmed finished if (!hasPieces[i]) { torrentHandle.piecePriority(i + firstPieceIndex, Priority.SEVEN); torrentHandle.setPieceDeadline(i + firstPieceIndex, 1000); break; } } } } else { Iterator<Integer> piecesIterator = preparePieces.iterator(); while (piecesIterator.hasNext()) { int index = piecesIterator.next(); if (index == alert.pieceIndex()) { piecesIterator.remove(); } } if (hasPieces != null) { hasPieces[alert.pieceIndex() - firstPieceIndex] = true; } if (preparePieces.size() == 0) { startSequentialMode(); prepareProgress = 100d; sendStreamProgress(); state = State.STREAMING; if (listener != null) { listener.onStreamReady(this); } } } } private void blockFinished(BlockFinishedAlert alert) { for (Integer index : preparePieces) { if (index == alert.getPieceIndex()) { prepareProgress += progressStep; break; } } sendStreamProgress(); } private void sendStreamProgress() { TorrentStatus status = torrentHandle.getStatus(); float progress = status.getProgress() * 100; int seeds = status.getNumSeeds(); int downloadSpeed = status.getDownloadPayloadRate(); if (listener != null && prepareProgress >= 1) { listener.onStreamProgress(this, new StreamStatus(progress, prepareProgress.intValue(), seeds, downloadSpeed)); } } @Override public int[] types() { return new int[]{ AlertType.PIECE_FINISHED.swig(), AlertType.BLOCK_FINISHED.swig() }; } @Override public void alert(Alert<?> alert) { switch (alert.type()) { case PIECE_FINISHED: pieceFinished((PieceFinishedAlert) alert); break; case BLOCK_FINISHED: blockFinished((BlockFinishedAlert) alert); break; default: break; } } }
library/src/main/java/com/github/se_bastiaan/torrentstream/Torrent.java
/* * Copyright (C) 2015-2016 Sébastiaan (github.com/se-bastiaan) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.se_bastiaan.torrentstream; import com.frostwire.jlibtorrent.AlertListener; import com.frostwire.jlibtorrent.FileStorage; import com.frostwire.jlibtorrent.Priority; import com.frostwire.jlibtorrent.TorrentHandle; import com.frostwire.jlibtorrent.TorrentInfo; import com.frostwire.jlibtorrent.TorrentStatus; import com.frostwire.jlibtorrent.alerts.Alert; import com.frostwire.jlibtorrent.alerts.AlertType; import com.frostwire.jlibtorrent.alerts.BlockFinishedAlert; import com.frostwire.jlibtorrent.alerts.PieceFinishedAlert; import com.github.se_bastiaan.torrentstream.listeners.TorrentListener; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; public class Torrent implements AlertListener { private final static Integer MAX_PREPARE_COUNT = 20; private final static Integer MIN_PREPARE_COUNT = 2; private final static Integer DEFAULT_PREPARE_COUNT = 5; private final static Integer SEQUENTIAL_CONCURRENT_PIECES_COUNT = 5; public enum State {UNKNOWN, RETRIEVING_META, STARTING, STREAMING} private Integer piecesToPrepare; private Integer lastPieceIndex; private Integer firstPieceIndex; private Integer selectedFileIndex = -1; private Integer interestedPieceIndex = 0; private Double prepareProgress = 0d; private Double progressStep = 0d; private List<Integer> preparePieces; private Boolean[] hasPieces; private State state = State.RETRIEVING_META; private final TorrentHandle torrentHandle; private final TorrentListener listener; private final Long prepareSize; /** * The constructor for a new Torrent * <p/> * First the largest file in the download is selected as the file for playback * <p/> * After setting this priority, the first and last index of the pieces that make up this file are determined. * And last: amount of pieces that are needed for playback are calculated (needed for playback means: make up 10 megabyte of the file) * * @param torrentHandle jlibtorrent TorrentHandle */ public Torrent(TorrentHandle torrentHandle, TorrentListener listener, Long prepareSize) { this.torrentHandle = torrentHandle; this.listener = listener; this.prepareSize = prepareSize; torrentHandle.setPriority(Priority.NORMAL.swig()); if (selectedFileIndex == -1) setLargestFile(); if (this.listener != null) this.listener.onStreamPrepared(this); } /** * Reset piece priorities of selected file to normal */ private void resetPriorities() { Priority[] priorities = torrentHandle.getPiecePriorities(); for (int i = 0; i < priorities.length; i++) { if (i >= firstPieceIndex && i <= lastPieceIndex) { torrentHandle.piecePriority(i, Priority.NORMAL); } else { torrentHandle.piecePriority(i, Priority.IGNORE); } } } /** * Get LibTorrent torrent handle of this torrent * * @return {@link TorrentHandle} */ public TorrentHandle getTorrentHandle() { return torrentHandle; } public File getVideoFile() { return new File(torrentHandle.getSavePath() + "/" + torrentHandle.getTorrentInfo().files().filePath(selectedFileIndex)); } /** * Get the location of the file that is being downloaded * * @return {@link File} The file location */ public File getSaveLocation() { return new File(torrentHandle.getSavePath() + "/" + torrentHandle.getName()); } /** * Resume the torrent download */ public void resume() { torrentHandle.resume(); } /** * Pause the torrent download */ public void pause() { torrentHandle.pause(); } /** * Set the selected file index to the largest file in the torrent */ public void setLargestFile() { setSelectedFileIndex(-1); } /** * Set the index of the file that should be downloaded * If the given index is -1, then the largest file is chosen * * @param selectedFileIndex {@link Integer} Index of the file */ public void setSelectedFileIndex(Integer selectedFileIndex) { TorrentInfo torrentInfo = torrentHandle.getTorrentInfo(); FileStorage fileStorage = torrentInfo.files(); if (selectedFileIndex == -1) { long highestFileSize = 0; int selectedFile = -1; for (int i = 0; i < fileStorage.numFiles(); i++) { long fileSize = fileStorage.fileSize(i); if (highestFileSize < fileSize) { highestFileSize = fileSize; torrentHandle.setFilePriority(selectedFile, Priority.IGNORE); selectedFile = i; torrentHandle.setFilePriority(i, Priority.NORMAL); } else { torrentHandle.setFilePriority(i, Priority.IGNORE); } } selectedFileIndex = selectedFile; } else { for (int i = 0; i < fileStorage.numFiles(); i++) { if (i == selectedFileIndex) { torrentHandle.setFilePriority(i, Priority.NORMAL); } else { torrentHandle.setFilePriority(i, Priority.IGNORE); } } } this.selectedFileIndex = selectedFileIndex; Priority[] piecePriorities = torrentHandle.getPiecePriorities(); int firstPieceIndexLocal = -1; int lastPieceIndexLocal = -1; for (int i = 0; i < piecePriorities.length; i++) { if (piecePriorities[i] != Priority.IGNORE) { if (firstPieceIndexLocal == -1) { firstPieceIndexLocal = i; } piecePriorities[i] = Priority.IGNORE; } else { if (firstPieceIndexLocal != -1 && lastPieceIndexLocal == -1) { lastPieceIndexLocal = i - 1; } } } if (lastPieceIndexLocal == -1) { lastPieceIndexLocal = piecePriorities.length - 1; } int pieceCount = lastPieceIndexLocal - firstPieceIndexLocal + 1; int pieceLength = torrentHandle.getTorrentInfo().pieceLength(); int activePieceCount; if (pieceLength > 0) { activePieceCount = (int) (prepareSize / pieceLength); if (activePieceCount < MIN_PREPARE_COUNT) { activePieceCount = MIN_PREPARE_COUNT; } else if (activePieceCount > MAX_PREPARE_COUNT) { activePieceCount = MAX_PREPARE_COUNT; } } else { activePieceCount = DEFAULT_PREPARE_COUNT; } if (pieceCount < activePieceCount) { activePieceCount = pieceCount / 2; } this.firstPieceIndex = firstPieceIndexLocal; this.interestedPieceIndex = this.firstPieceIndex; this.lastPieceIndex = lastPieceIndexLocal; piecesToPrepare = activePieceCount; } /** * Get the filenames of the files in the torrent * * @return {@link String[]} */ public String[] getFileNames() { FileStorage fileStorage = torrentHandle.getTorrentInfo().files(); String[] fileNames = new String[fileStorage.numFiles()]; for (int i = 0; i < fileStorage.numFiles(); i++) { fileNames[i] = fileStorage.fileName(i); } return fileNames; } /** * Prepare torrent for playback. Prioritize the first {@code piecesToPrepare} pieces and the last {@code piecesToPrepare} pieces * from {@code firstPieceIndex} and {@code lastPieceIndex}. Ignore all other pieces. */ public void startDownload() { if (state == State.STREAMING) return; state = State.STARTING; torrentHandle.setPriority(Priority.NORMAL.swig()); List<Integer> indices = new ArrayList<>(); Priority[] priorities = torrentHandle.getPiecePriorities(); for (int i = 0; i < priorities.length; i++) { if (priorities[i] != Priority.IGNORE) { torrentHandle.piecePriority(i, Priority.NORMAL); } } for (int i = 0; i < piecesToPrepare; i++) { indices.add(lastPieceIndex - i); torrentHandle.piecePriority(lastPieceIndex - i, Priority.SEVEN); torrentHandle.setPieceDeadline(lastPieceIndex - i, 1000); } for (int i = 0; i < piecesToPrepare; i++) { indices.add(firstPieceIndex + i); torrentHandle.piecePriority(firstPieceIndex + i, Priority.SEVEN); torrentHandle.setPieceDeadline(firstPieceIndex + i, 1000); } preparePieces = indices; hasPieces = new Boolean[lastPieceIndex - firstPieceIndex + 1]; Arrays.fill(hasPieces, false); TorrentInfo torrentInfo = torrentHandle.getTorrentInfo(); TorrentStatus status = torrentHandle.getStatus(); double blockCount = indices.size() * torrentInfo.pieceLength() / status.getBlockSize(); progressStep = 100 / blockCount; torrentHandle.resume(); listener.onStreamStarted(this); } /** * Check if the piece that contains the specified bytes were downloaded already * * @param bytes The bytes you're interested in * @return {@code true} if downloaded, {@code false} if not */ public boolean hasBytes(long bytes) { if (hasPieces == null) { return false; } int pieceIndex = (int) (bytes / torrentHandle.getTorrentInfo().pieceLength()); return hasPieces[pieceIndex]; } /** * Set the bytes of the selected file that you're interested in * The piece of that specific offset is selected and that piece plus the 1 preceding and the 3 after it. * These pieces will then be prioritised, which results in continuing the sequential download after that piece * * @param bytes The bytes you're interested in */ public void setInterestedBytes(long bytes) { if (hasPieces == null && bytes >= 0) { return; } int pieceIndex = (int) (bytes / torrentHandle.getTorrentInfo().pieceLength()); interestedPieceIndex = pieceIndex; if (!hasPieces[pieceIndex] && torrentHandle.piecePriority(pieceIndex + firstPieceIndex) != Priority.SEVEN) { interestedPieceIndex = pieceIndex; int pieces = 5; for (int i = pieceIndex; i < hasPieces.length; i++) { // Set full priority to first found piece that is not confirmed finished if (!hasPieces[i]) { torrentHandle.piecePriority(i + firstPieceIndex, Priority.SEVEN); torrentHandle.setPieceDeadline(i + firstPieceIndex, 1000); pieces--; if (pieces == 0) { break; } } } } } /** * Checks if the interesting pieces are downloaded already * * @return {@code true} if the 5 pieces that were selected using `setInterestedBytes` are all reported complete including the `nextPieces`, {@code false} if not */ public boolean hasInterestedBytes(int nextPieces) { for (int i = 0; i < 5 + nextPieces; i++) { int index = interestedPieceIndex + i; if (hasPieces.length <= index || index < 0) { continue; } if (!hasPieces[interestedPieceIndex + i]) { return false; } } return true; } /** * Checks if the interesting pieces are downloaded already * * @return {@code true} if the 5 pieces that were selected using `setInterestedBytes` are all reported complete, {@code false} if not */ public boolean hasInterestedBytes() { return hasInterestedBytes(0); } /** * Get the index of the piece we're currently interested in * @return Interested piece index */ public int getInterestedPieceIndex() { return interestedPieceIndex; } /** * Start sequential mode downloading */ private void startSequentialMode() { resetPriorities(); if (hasPieces == null) { torrentHandle.setSequentialDownload(true); } else { for (int i = firstPieceIndex + piecesToPrepare; i < firstPieceIndex + piecesToPrepare + SEQUENTIAL_CONCURRENT_PIECES_COUNT; i++) { torrentHandle.piecePriority(i, Priority.SEVEN); torrentHandle.setPieceDeadline(i, 1000); } } } /** * Get current torrent state * * @return {@link State} */ public State getState() { return state; } /** * Piece finished * * @param alert */ private void pieceFinished(PieceFinishedAlert alert) { if (state == State.STREAMING && hasPieces != null) { int pieceIndex = alert.pieceIndex() - firstPieceIndex; hasPieces[pieceIndex] = true; if (pieceIndex >= interestedPieceIndex) { for (int i = pieceIndex; i < hasPieces.length; i++) { // Set full priority to first found piece that is not confirmed finished if (!hasPieces[i]) { torrentHandle.piecePriority(i + firstPieceIndex, Priority.SEVEN); torrentHandle.setPieceDeadline(i + firstPieceIndex, 1000); break; } } } } else { Iterator<Integer> piecesIterator = preparePieces.iterator(); while (piecesIterator.hasNext()) { int index = piecesIterator.next(); if (index == alert.pieceIndex()) { piecesIterator.remove(); } } if (hasPieces != null) { hasPieces[alert.pieceIndex() - firstPieceIndex] = true; } if (preparePieces.size() == 0) { startSequentialMode(); prepareProgress = 100d; sendStreamProgress(); state = State.STREAMING; if (listener != null) { listener.onStreamReady(this); } } } } private void blockFinished(BlockFinishedAlert alert) { for (Integer index : preparePieces) { if (index == alert.getPieceIndex()) { prepareProgress += progressStep; break; } } sendStreamProgress(); } private void sendStreamProgress() { TorrentStatus status = torrentHandle.getStatus(); float progress = status.getProgress() * 100; int seeds = status.getNumSeeds(); int downloadSpeed = status.getDownloadPayloadRate(); if (listener != null && prepareProgress >= 1) { listener.onStreamProgress(this, new StreamStatus(progress, prepareProgress.intValue(), seeds, downloadSpeed)); } } @Override public int[] types() { return new int[]{ AlertType.PIECE_FINISHED.swig(), AlertType.BLOCK_FINISHED.swig() }; } @Override public void alert(Alert<?> alert) { switch (alert.type()) { case PIECE_FINISHED: pieceFinished((PieceFinishedAlert) alert); break; case BLOCK_FINISHED: blockFinished((BlockFinishedAlert) alert); break; default: break; } } }
Add getter for pieces to prepare
library/src/main/java/com/github/se_bastiaan/torrentstream/Torrent.java
Add getter for pieces to prepare
Java
apache-2.0
c105d976a3e136a7b715c64fb2900ec76615844d
0
cpollet/thorium
/* * Copyright 2015 Christophe Pollet * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ch.pollet.thorium.evaluation; import ch.pollet.thorium.antlr.ThoriumBaseVisitor; import ch.pollet.thorium.antlr.ThoriumParser; import ch.pollet.thorium.semantic.exception.SymbolNotFoundException; import ch.pollet.thorium.values.Constant; import ch.pollet.thorium.values.DirectValue; import ch.pollet.thorium.values.Symbol; import ch.pollet.thorium.values.Value; import ch.pollet.thorium.values.Variable; /** * @author Christophe Pollet */ public class VisitorEvaluator extends ThoriumBaseVisitor<Void> { private EvaluationContext context; public VisitorEvaluator(EvaluationContext context) { this.context = context; } //region Statements @Override public Void visitUnconditionalStatement(ThoriumParser.UnconditionalStatementContext ctx) { super.visitUnconditionalStatement(ctx); context.lastStatementValue = context.popStack(); return null; } @Override public Void visitStatementsBlock(ThoriumParser.StatementsBlockContext ctx) { visitStatementsInNestedContext(ctx.statements()); return null; } private void visitStatementsInNestedContext(ThoriumParser.StatementsContext ctx) { context = context.createChild(); visitStatements(ctx); context = context.destroyAndRestoreParent(); } @Override public Void visitConditionalIfStatement(ThoriumParser.ConditionalIfStatementContext ctx) { // TODO SEM add check that potential assigned variable is already defined in symbol table if (isExpressionTrue(ctx.expression(1))) { visit(ctx.expression(0)); context.lastStatementValue = context.popStack(); } else { context.lastStatementValue = DirectValue.build(); } return null; } private boolean isExpressionTrue(ThoriumParser.ExpressionContext expression) { visit(expression); Value condition = context.popStack(); return condition.value().equals(DirectValue.build(true)); } @Override public Void visitConditionalUnlessStatement(ThoriumParser.ConditionalUnlessStatementContext ctx) { // TODO SEM add check that potential assigned variable is already defined in symbol table if (!isExpressionTrue(ctx.expression(1))) { visit(ctx.expression(0)); context.lastStatementValue = context.popStack(); } else { context.lastStatementValue = DirectValue.build(); } return null; } //endregion //region Expressions @Override public Void visitMultiplicationExpression(ThoriumParser.MultiplicationExpressionContext ctx) { super.visitMultiplicationExpression(ctx); evalOperator("*"); return null; } @Override public Void visitAdditionExpression(ThoriumParser.AdditionExpressionContext ctx) { super.visitAdditionExpression(ctx); evalOperator("+"); return null; } private void evalOperator(String operator) { Value right = context.popStack(); Value left = context.popStack(); Method method = left.type().lookupMethod(new MethodMatcher(operator, right.type())); context.pushStack(method.apply(left, right)); } @Override public Void visitAssignmentExpression(ThoriumParser.AssignmentExpressionContext ctx) { super.visitAssignmentExpression(ctx); Value right = context.popStack(); Value left = context.popStack(); Symbol symbol = context.lookupSymbol(left.getName()); symbol.setValue(right.value()); symbol.setType(right.type()); context.pushStack(right.value()); return null; } @Override public Void visitBlockExpression(ThoriumParser.BlockExpressionContext ctx) { visitBlock(ctx.block()); context.pushStack(context.lastStatementValue); return null; } //endregion //region If Statement @Override public Void visitIfStatement(ThoriumParser.IfStatementContext ctx) { context = context.createChild(); visitNestedIfStatement(ctx); context = context.destroyAndRestoreParent(); return null; } private void visitNestedIfStatement(ThoriumParser.IfStatementContext ctx) { if (isExpressionTrue(ctx.expression())) { visitStatements(ctx.statements()); } else if (ctx.elseStatement() != null) { visitElseStatement(ctx.elseStatement()); } else { context.lastStatementValue = DirectValue.build(); } } @Override public Void visitElseStatement(ThoriumParser.ElseStatementContext ctx) { if (ctx.statements() != null) { visitStatements(ctx.statements()); } else if (ctx.ifStatement() != null) { visitNestedIfStatement(ctx.ifStatement()); } else { throw new IllegalStateException(); } return null; } //endregion //region Values @Override public Void visitIntegerLiteral(ThoriumParser.IntegerLiteralContext ctx) { context.pushStack(DirectValue.build(Long.valueOf(ctx.IntegerLiteral().getText()))); return null; } @Override public Void visitFloatLiteral(ThoriumParser.FloatLiteralContext ctx) { context.pushStack(DirectValue.build(Double.valueOf(ctx.FloatLiteral().getText()))); return null; } @Override public Void visitBooleanLiteral(ThoriumParser.BooleanLiteralContext ctx) { context.pushStack(DirectValue.build(Boolean.valueOf(ctx.BooleanLiteral().getText()))); return null; } @Override public Void visitVariableName(ThoriumParser.VariableNameContext ctx) { Symbol symbol; try { symbol = context.lookupSymbol(ctx.getText()); } catch (SymbolNotFoundException e) { symbol = new Variable(ctx.getText()); // TODO EVAL: should be symbol reference instead? context.insertSymbol(symbol); } context.pushStack(symbol); return null; } @Override public Void visitConstantName(ThoriumParser.ConstantNameContext ctx) { Symbol symbol; try { symbol = context.lookupSymbol(ctx.getText()); } catch (SymbolNotFoundException e) { symbol = new Constant(ctx.getText()); // TODO EVAL: should be symbol reference instead? context.insertSymbol(symbol); } context.pushStack(symbol); return null; } //endregion }
lang/src/main/java/ch/pollet/thorium/evaluation/VisitorEvaluator.java
/* * Copyright 2015 Christophe Pollet * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ch.pollet.thorium.evaluation; import ch.pollet.thorium.antlr.ThoriumBaseVisitor; import ch.pollet.thorium.antlr.ThoriumParser; import ch.pollet.thorium.semantic.exception.SymbolNotFoundException; import ch.pollet.thorium.values.Constant; import ch.pollet.thorium.values.DirectValue; import ch.pollet.thorium.values.Symbol; import ch.pollet.thorium.values.Value; import ch.pollet.thorium.values.Variable; /** * @author Christophe Pollet */ public class VisitorEvaluator extends ThoriumBaseVisitor<Void> { private EvaluationContext context; public VisitorEvaluator(EvaluationContext context) { this.context = context; } //region Statements @Override public Void visitUnconditionalStatement(ThoriumParser.UnconditionalStatementContext ctx) { super.visitUnconditionalStatement(ctx); context.lastStatementValue = context.popStack(); return null; } @Override public Void visitStatementsBlock(ThoriumParser.StatementsBlockContext ctx) { visitStatementsInNestedContext(ctx.statements()); return null; } private void visitStatementsInNestedContext(ThoriumParser.StatementsContext ctx) { context = context.createChild(); visitStatements(ctx); context = context.destroyAndRestoreParent(); } @Override public Void visitConditionalIfStatement(ThoriumParser.ConditionalIfStatementContext ctx) { // TODO SEM add check that potential assigned variable is already defined in symbol table if (isExpressionTrue(ctx.expression(1))) { visit(ctx.expression(0)); context.lastStatementValue = context.popStack(); } else { context.lastStatementValue = DirectValue.build(); } return null; } private boolean isExpressionTrue(ThoriumParser.ExpressionContext expression) { visit(expression); Value condition = context.popStack(); return condition.value().equals(DirectValue.build(true)); } @Override public Void visitConditionalUnlessStatement(ThoriumParser.ConditionalUnlessStatementContext ctx) { // TODO SEM add check that potential assigned variable is already defined in symbol table if (!isExpressionTrue(ctx.expression(1))) { visit(ctx.expression(0)); context.lastStatementValue = context.popStack(); } else { context.lastStatementValue = DirectValue.build(); } return null; } //endregion //region Expressions @Override public Void visitMultiplicationExpression(ThoriumParser.MultiplicationExpressionContext ctx) { super.visitMultiplicationExpression(ctx); evalOperator("*"); return null; } @Override public Void visitAdditionExpression(ThoriumParser.AdditionExpressionContext ctx) { super.visitAdditionExpression(ctx); evalOperator("+"); return null; } private void evalOperator(String operator) { Value right = context.popStack(); Value left = context.popStack(); Method method = left.type().lookupMethod(new MethodMatcher(operator, right.type())); context.pushStack(method.apply(left, right)); } @Override public Void visitAssignmentExpression(ThoriumParser.AssignmentExpressionContext ctx) { super.visitAssignmentExpression(ctx); Value right = context.popStack(); Value left = context.popStack(); // TODO SEM: move this if (!left.isWritable()) { throw new IllegalStateException("Cannot assign " + right.toString() + " to " + left.toString()); } Symbol symbol = context.lookupSymbol(left.getName()); symbol.setValue(right.value()); symbol.setType(right.type()); context.pushStack(right.value()); return null; } @Override public Void visitBlockExpression(ThoriumParser.BlockExpressionContext ctx) { visitBlock(ctx.block()); context.pushStack(context.lastStatementValue); return null; } //endregion //region If Statement @Override public Void visitIfStatement(ThoriumParser.IfStatementContext ctx) { context = context.createChild(); visitNestedIfStatement(ctx); context = context.destroyAndRestoreParent(); return null; } private void visitNestedIfStatement(ThoriumParser.IfStatementContext ctx) { if (isExpressionTrue(ctx.expression())) { visitStatements(ctx.statements()); } else if (ctx.elseStatement() != null) { visitElseStatement(ctx.elseStatement()); } else { context.lastStatementValue = DirectValue.build(); } } @Override public Void visitElseStatement(ThoriumParser.ElseStatementContext ctx) { if (ctx.statements() != null) { visitStatements(ctx.statements()); } else if (ctx.ifStatement() != null) { visitNestedIfStatement(ctx.ifStatement()); } else { throw new IllegalStateException(); } return null; } //endregion //region Values @Override public Void visitIntegerLiteral(ThoriumParser.IntegerLiteralContext ctx) { context.pushStack(DirectValue.build(Long.valueOf(ctx.IntegerLiteral().getText()))); return null; } @Override public Void visitFloatLiteral(ThoriumParser.FloatLiteralContext ctx) { context.pushStack(DirectValue.build(Double.valueOf(ctx.FloatLiteral().getText()))); return null; } @Override public Void visitBooleanLiteral(ThoriumParser.BooleanLiteralContext ctx) { context.pushStack(DirectValue.build(Boolean.valueOf(ctx.BooleanLiteral().getText()))); return null; } @Override public Void visitVariableName(ThoriumParser.VariableNameContext ctx) { Symbol symbol; try { symbol = context.lookupSymbol(ctx.getText()); } catch (SymbolNotFoundException e) { symbol = new Variable(ctx.getText()); // TODO EVAL: should be symbol reference instead? context.insertSymbol(symbol); } context.pushStack(symbol); return null; } @Override public Void visitConstantName(ThoriumParser.ConstantNameContext ctx) { Symbol symbol; try { symbol = context.lookupSymbol(ctx.getText()); } catch (SymbolNotFoundException e) { symbol = new Constant(ctx.getText()); // TODO EVAL: should be symbol reference instead? context.insertSymbol(symbol); } context.pushStack(symbol); return null; } //endregion }
cleaning
lang/src/main/java/ch/pollet/thorium/evaluation/VisitorEvaluator.java
cleaning
Java
apache-2.0
78260e2021b1064e4cb3485b8660add98d97cfb8
0
amzn/exoplayer-amazon-port,androidx/media,google/ExoPlayer,amzn/exoplayer-amazon-port,ened/ExoPlayer,androidx/media,ened/ExoPlayer,androidx/media,amzn/exoplayer-amazon-port,ened/ExoPlayer,google/ExoPlayer,google/ExoPlayer
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.mediacodec; import android.annotation.SuppressLint; import android.media.MediaCodecInfo.CodecCapabilities; import android.media.MediaCodecInfo.CodecProfileLevel; import android.media.MediaCodecList; import android.text.TextUtils; import android.util.Pair; import androidx.annotation.CheckResult; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.video.ColorInfo; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.checkerframework.checker.nullness.qual.EnsuresNonNull; /** * A utility class for querying the available codecs. */ @SuppressLint("InlinedApi") public final class MediaCodecUtil { /** * Thrown when an error occurs querying the device for its underlying media capabilities. * <p> * Such failures are not expected in normal operation and are normally temporary (e.g. if the * mediaserver process has crashed and is yet to restart). */ public static class DecoderQueryException extends Exception { private DecoderQueryException(Throwable cause) { super("Failed to query underlying media codecs", cause); } } private static final String TAG = "MediaCodecUtil"; private static final Pattern PROFILE_PATTERN = Pattern.compile("^\\D?(\\d+)$"); private static final HashMap<CodecKey, List<MediaCodecInfo>> decoderInfosCache = new HashMap<>(); // Codecs to constant mappings. // AVC. private static final String CODEC_ID_AVC1 = "avc1"; private static final String CODEC_ID_AVC2 = "avc2"; // VP9 private static final String CODEC_ID_VP09 = "vp09"; // HEVC. private static final String CODEC_ID_HEV1 = "hev1"; private static final String CODEC_ID_HVC1 = "hvc1"; // AV1. private static final String CODEC_ID_AV01 = "av01"; // MP4A AAC. private static final String CODEC_ID_MP4A = "mp4a"; // Lazily initialized. private static int maxH264DecodableFrameSize = -1; private MediaCodecUtil() {} /** * Optional call to warm the codec cache for a given mime type. * * <p>Calling this method may speed up subsequent calls to {@link #getDecoderInfo(String, boolean, * boolean)} and {@link #getDecoderInfos(String, boolean, boolean)}. * * @param mimeType The mime type. * @param secure Whether the decoder is required to support secure decryption. Always pass false * unless secure decryption really is required. * @param tunneling Whether the decoder is required to support tunneling. Always pass false unless * tunneling really is required. */ public static void warmDecoderInfoCache(String mimeType, boolean secure, boolean tunneling) { try { getDecoderInfos(mimeType, secure, tunneling); } catch (DecoderQueryException e) { // Codec warming is best effort, so we can swallow the exception. Log.e(TAG, "Codec warming failed", e); } } /** * Returns information about a decoder suitable for audio passthrough. * * @return A {@link MediaCodecInfo} describing the decoder, or null if no suitable decoder exists. * @throws DecoderQueryException If there was an error querying the available decoders. */ @Nullable public static MediaCodecInfo getPassthroughDecoderInfo() throws DecoderQueryException { return getDecoderInfo(MimeTypes.AUDIO_RAW, /* secure= */ false, /* tunneling= */ false); } /** * Returns information about the preferred decoder for a given mime type. * * @param mimeType The MIME type. * @param secure Whether the decoder is required to support secure decryption. Always pass false * unless secure decryption really is required. * @param tunneling Whether the decoder is required to support tunneling. Always pass false unless * tunneling really is required. * @return A {@link MediaCodecInfo} describing the decoder, or null if no suitable decoder exists. * @throws DecoderQueryException If there was an error querying the available decoders. */ @Nullable public static MediaCodecInfo getDecoderInfo(String mimeType, boolean secure, boolean tunneling) throws DecoderQueryException { List<MediaCodecInfo> decoderInfos = getDecoderInfos(mimeType, secure, tunneling); return decoderInfos.isEmpty() ? null : decoderInfos.get(0); } /** * Returns all {@link MediaCodecInfo}s for the given mime type, in the order given by {@link * MediaCodecList}. * * @param mimeType The MIME type. * @param secure Whether the decoder is required to support secure decryption. Always pass false * unless secure decryption really is required. * @param tunneling Whether the decoder is required to support tunneling. Always pass false unless * tunneling really is required. * @return An unmodifiable list of all {@link MediaCodecInfo}s for the given mime type, in the * order given by {@link MediaCodecList}. * @throws DecoderQueryException If there was an error querying the available decoders. */ public static synchronized List<MediaCodecInfo> getDecoderInfos( String mimeType, boolean secure, boolean tunneling) throws DecoderQueryException { CodecKey key = new CodecKey(mimeType, secure, tunneling); @Nullable List<MediaCodecInfo> cachedDecoderInfos = decoderInfosCache.get(key); if (cachedDecoderInfos != null) { return cachedDecoderInfos; } MediaCodecListCompat mediaCodecList = Util.SDK_INT >= 21 ? new MediaCodecListCompatV21(secure, tunneling) : new MediaCodecListCompatV16(); ArrayList<MediaCodecInfo> decoderInfos = getDecoderInfosInternal(key, mediaCodecList); if (secure && decoderInfos.isEmpty() && 21 <= Util.SDK_INT && Util.SDK_INT <= 23) { // Some devices don't list secure decoders on API level 21 [Internal: b/18678462]. Try the // legacy path. We also try this path on API levels 22 and 23 as a defensive measure. mediaCodecList = new MediaCodecListCompatV16(); decoderInfos = getDecoderInfosInternal(key, mediaCodecList); if (!decoderInfos.isEmpty()) { Log.w(TAG, "MediaCodecList API didn't list secure decoder for: " + mimeType + ". Assuming: " + decoderInfos.get(0).name); } } applyWorkarounds(mimeType, decoderInfos); List<MediaCodecInfo> unmodifiableDecoderInfos = Collections.unmodifiableList(decoderInfos); decoderInfosCache.put(key, unmodifiableDecoderInfos); return unmodifiableDecoderInfos; } /** * Returns a copy of the provided decoder list sorted such that decoders with format support are * listed first. The returned list is modifiable for convenience. */ @CheckResult public static List<MediaCodecInfo> getDecoderInfosSortedByFormatSupport( List<MediaCodecInfo> decoderInfos, Format format) { decoderInfos = new ArrayList<>(decoderInfos); sortByScore( decoderInfos, decoderInfo -> { try { return decoderInfo.isFormatSupported(format) ? 1 : 0; } catch (DecoderQueryException e) { return -1; } }); return decoderInfos; } /** * Returns the maximum frame size supported by the default H264 decoder. * * @return The maximum frame size for an H264 stream that can be decoded on the device. */ public static int maxH264DecodableFrameSize() throws DecoderQueryException { if (maxH264DecodableFrameSize == -1) { int result = 0; @Nullable MediaCodecInfo decoderInfo = getDecoderInfo(MimeTypes.VIDEO_H264, /* secure= */ false, /* tunneling= */ false); if (decoderInfo != null) { for (CodecProfileLevel profileLevel : decoderInfo.getProfileLevels()) { result = Math.max(avcLevelToMaxFrameSize(profileLevel.level), result); } // We assume support for at least 480p (SDK_INT >= 21) or 360p (SDK_INT < 21), which are // the levels mandated by the Android CDD. result = Math.max(result, Util.SDK_INT >= 21 ? (720 * 480) : (480 * 360)); } maxH264DecodableFrameSize = result; } return maxH264DecodableFrameSize; } /** * Returns profile and level (as defined by {@link CodecProfileLevel}) corresponding to the codec * description string (as defined by RFC 6381) of the given format. * * @param format Media format with a codec description string, as defined by RFC 6381. * @return A pair (profile constant, level constant) if the codec of the {@code format} is * well-formed and recognized, or null otherwise. */ @Nullable public static Pair<Integer, Integer> getCodecProfileAndLevel(Format format) { if (format.codecs == null) { return null; } String[] parts = format.codecs.split("\\."); // Dolby Vision can use DV, AVC or HEVC codec IDs, so check the MIME type first. if (MimeTypes.VIDEO_DOLBY_VISION.equals(format.sampleMimeType)) { return getDolbyVisionProfileAndLevel(format.codecs, parts); } switch (parts[0]) { case CODEC_ID_AVC1: case CODEC_ID_AVC2: return getAvcProfileAndLevel(format.codecs, parts); case CODEC_ID_VP09: return getVp9ProfileAndLevel(format.codecs, parts); case CODEC_ID_HEV1: case CODEC_ID_HVC1: return getHevcProfileAndLevel(format.codecs, parts); case CODEC_ID_AV01: return getAv1ProfileAndLevel(format.codecs, parts, format.colorInfo); case CODEC_ID_MP4A: return getAacCodecProfileAndLevel(format.codecs, parts); default: return null; } } // Internal methods. /** * Returns {@link MediaCodecInfo}s for the given codec {@link CodecKey} in the order given by * {@code mediaCodecList}. * * @param key The codec key. * @param mediaCodecList The codec list. * @return The codec information for usable codecs matching the specified key. * @throws DecoderQueryException If there was an error querying the available decoders. */ private static ArrayList<MediaCodecInfo> getDecoderInfosInternal( CodecKey key, MediaCodecListCompat mediaCodecList) throws DecoderQueryException { try { ArrayList<MediaCodecInfo> decoderInfos = new ArrayList<>(); String mimeType = key.mimeType; int numberOfCodecs = mediaCodecList.getCodecCount(); boolean secureDecodersExplicit = mediaCodecList.secureDecodersExplicit(); // Note: MediaCodecList is sorted by the framework such that the best decoders come first. for (int i = 0; i < numberOfCodecs; i++) { android.media.MediaCodecInfo codecInfo = mediaCodecList.getCodecInfoAt(i); if (isAlias(codecInfo)) { // Skip aliases of other codecs, since they will also be listed under their canonical // names. continue; } String name = codecInfo.getName(); if (!isCodecUsableDecoder(codecInfo, name, secureDecodersExplicit, mimeType)) { continue; } @Nullable String codecMimeType = getCodecMimeType(codecInfo, name, mimeType); if (codecMimeType == null) { continue; } try { CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(codecMimeType); boolean tunnelingSupported = mediaCodecList.isFeatureSupported( CodecCapabilities.FEATURE_TunneledPlayback, codecMimeType, capabilities); boolean tunnelingRequired = mediaCodecList.isFeatureRequired( CodecCapabilities.FEATURE_TunneledPlayback, codecMimeType, capabilities); if ((!key.tunneling && tunnelingRequired) || (key.tunneling && !tunnelingSupported)) { continue; } boolean secureSupported = mediaCodecList.isFeatureSupported( CodecCapabilities.FEATURE_SecurePlayback, codecMimeType, capabilities); boolean secureRequired = mediaCodecList.isFeatureRequired( CodecCapabilities.FEATURE_SecurePlayback, codecMimeType, capabilities); if ((!key.secure && secureRequired) || (key.secure && !secureSupported)) { continue; } boolean hardwareAccelerated = isHardwareAccelerated(codecInfo); boolean softwareOnly = isSoftwareOnly(codecInfo); boolean vendor = isVendor(codecInfo); boolean forceDisableAdaptive = codecNeedsDisableAdaptationWorkaround(name); if ((secureDecodersExplicit && key.secure == secureSupported) || (!secureDecodersExplicit && !key.secure)) { decoderInfos.add( MediaCodecInfo.newInstance( name, mimeType, codecMimeType, capabilities, hardwareAccelerated, softwareOnly, vendor, forceDisableAdaptive, /* forceSecure= */ false)); } else if (!secureDecodersExplicit && secureSupported) { decoderInfos.add( MediaCodecInfo.newInstance( name + ".secure", mimeType, codecMimeType, capabilities, hardwareAccelerated, softwareOnly, vendor, forceDisableAdaptive, /* forceSecure= */ true)); // It only makes sense to have one synthesized secure decoder, return immediately. return decoderInfos; } } catch (Exception e) { if (Util.SDK_INT <= 23 && !decoderInfos.isEmpty()) { // Suppress error querying secondary codec capabilities up to API level 23. Log.e(TAG, "Skipping codec " + name + " (failed to query capabilities)"); } else { // Rethrow error querying primary codec capabilities, or secondary codec // capabilities if API level is greater than 23. Log.e(TAG, "Failed to query codec " + name + " (" + codecMimeType + ")"); throw e; } } } return decoderInfos; } catch (Exception e) { // If the underlying mediaserver is in a bad state, we may catch an IllegalStateException // or an IllegalArgumentException here. throw new DecoderQueryException(e); } } /** * Returns the codec's supported MIME type for media of type {@code mimeType}, or {@code null} if * the codec can't be used. * * @param info The codec information. * @param name The name of the codec * @param mimeType The MIME type. * @return The codec's supported MIME type for media of type {@code mimeType}, or {@code null} if * the codec can't be used. If non-null, the returned type will be equal to {@code mimeType} * except in cases where the codec is known to use a non-standard MIME type alias. */ @Nullable private static String getCodecMimeType( android.media.MediaCodecInfo info, String name, String mimeType) { String[] supportedTypes = info.getSupportedTypes(); for (String supportedType : supportedTypes) { if (supportedType.equalsIgnoreCase(mimeType)) { return supportedType; } } if (mimeType.equals(MimeTypes.VIDEO_DOLBY_VISION)) { // Handle decoders that declare support for DV via MIME types that aren't // video/dolby-vision. if ("OMX.MS.HEVCDV.Decoder".equals(name)) { return "video/hevcdv"; } else if ("OMX.RTK.video.decoder".equals(name) || "OMX.realtek.video.decoder.tunneled".equals(name)) { return "video/dv_hevc"; } } else if (mimeType.equals(MimeTypes.AUDIO_ALAC) && "OMX.lge.alac.decoder".equals(name)) { return "audio/x-lg-alac"; } else if (mimeType.equals(MimeTypes.AUDIO_FLAC) && "OMX.lge.flac.decoder".equals(name)) { return "audio/x-lg-flac"; } return null; } /** * Returns whether the specified codec is usable for decoding on the current device. * * @param info The codec information. * @param name The name of the codec * @param secureDecodersExplicit Whether secure decoders were explicitly listed, if present. * @param mimeType The MIME type. * @return Whether the specified codec is usable for decoding on the current device. */ private static boolean isCodecUsableDecoder( android.media.MediaCodecInfo info, String name, boolean secureDecodersExplicit, String mimeType) { if (info.isEncoder() || (!secureDecodersExplicit && name.endsWith(".secure"))) { return false; } // Work around broken audio decoders. if (Util.SDK_INT < 21 && ("CIPAACDecoder".equals(name) || "CIPMP3Decoder".equals(name) || "CIPVorbisDecoder".equals(name) || "CIPAMRNBDecoder".equals(name) || "AACDecoder".equals(name) || "MP3Decoder".equals(name))) { return false; } // Work around https://github.com/google/ExoPlayer/issues/1528 and // https://github.com/google/ExoPlayer/issues/3171. if (Util.SDK_INT < 18 && "OMX.MTK.AUDIO.DECODER.AAC".equals(name) && ("a70".equals(Util.DEVICE) || ("Xiaomi".equals(Util.MANUFACTURER) && Util.DEVICE.startsWith("HM")))) { return false; } // Work around an issue where querying/creating a particular MP3 decoder on some devices on // platform API version 16 fails. if (Util.SDK_INT == 16 && "OMX.qcom.audio.decoder.mp3".equals(name) && ("dlxu".equals(Util.DEVICE) // HTC Butterfly || "protou".equals(Util.DEVICE) // HTC Desire X || "ville".equals(Util.DEVICE) // HTC One S || "villeplus".equals(Util.DEVICE) || "villec2".equals(Util.DEVICE) || Util.DEVICE.startsWith("gee") // LGE Optimus G || "C6602".equals(Util.DEVICE) // Sony Xperia Z || "C6603".equals(Util.DEVICE) || "C6606".equals(Util.DEVICE) || "C6616".equals(Util.DEVICE) || "L36h".equals(Util.DEVICE) || "SO-02E".equals(Util.DEVICE))) { return false; } // Work around an issue where large timestamps are not propagated correctly. if (Util.SDK_INT == 16 && "OMX.qcom.audio.decoder.aac".equals(name) && ("C1504".equals(Util.DEVICE) // Sony Xperia E || "C1505".equals(Util.DEVICE) || "C1604".equals(Util.DEVICE) // Sony Xperia E dual || "C1605".equals(Util.DEVICE))) { return false; } // Work around https://github.com/google/ExoPlayer/issues/3249. if (Util.SDK_INT < 24 && ("OMX.SEC.aac.dec".equals(name) || "OMX.Exynos.AAC.Decoder".equals(name)) && "samsung".equals(Util.MANUFACTURER) && (Util.DEVICE.startsWith("zeroflte") // Galaxy S6 || Util.DEVICE.startsWith("zerolte") // Galaxy S6 Edge || Util.DEVICE.startsWith("zenlte") // Galaxy S6 Edge+ || "SC-05G".equals(Util.DEVICE) // Galaxy S6 || "marinelteatt".equals(Util.DEVICE) // Galaxy S6 Active || "404SC".equals(Util.DEVICE) // Galaxy S6 Edge || "SC-04G".equals(Util.DEVICE) || "SCV31".equals(Util.DEVICE))) { return false; } // Work around https://github.com/google/ExoPlayer/issues/548. // VP8 decoder on Samsung Galaxy S3/S4/S4 Mini/Tab 3/Note 2 does not render video. if (Util.SDK_INT <= 19 && "OMX.SEC.vp8.dec".equals(name) && "samsung".equals(Util.MANUFACTURER) && (Util.DEVICE.startsWith("d2") || Util.DEVICE.startsWith("serrano") || Util.DEVICE.startsWith("jflte") || Util.DEVICE.startsWith("santos") || Util.DEVICE.startsWith("t0"))) { return false; } // VP8 decoder on Samsung Galaxy S4 cannot be queried. if (Util.SDK_INT <= 19 && Util.DEVICE.startsWith("jflte") && "OMX.qcom.video.decoder.vp8".equals(name)) { return false; } // MTK E-AC3 decoder doesn't support decoding JOC streams in 2-D. See [Internal: b/69400041]. if (MimeTypes.AUDIO_E_AC3_JOC.equals(mimeType) && "OMX.MTK.AUDIO.DECODER.DSPAC3".equals(name)) { return false; } return true; } /** * Modifies a list of {@link MediaCodecInfo}s to apply workarounds where we know better than the * platform. * * @param mimeType The MIME type of input media. * @param decoderInfos The list to modify. */ private static void applyWorkarounds(String mimeType, List<MediaCodecInfo> decoderInfos) { if (MimeTypes.AUDIO_RAW.equals(mimeType)) { if (Util.SDK_INT < 26 && Util.DEVICE.equals("R9") && decoderInfos.size() == 1 && decoderInfos.get(0).name.equals("OMX.MTK.AUDIO.DECODER.RAW")) { // This device does not list a generic raw audio decoder, yet it can be instantiated by // name. See <a href="https://github.com/google/ExoPlayer/issues/5782">Issue #5782</a>. decoderInfos.add( MediaCodecInfo.newInstance( /* name= */ "OMX.google.raw.decoder", /* mimeType= */ MimeTypes.AUDIO_RAW, /* codecMimeType= */ MimeTypes.AUDIO_RAW, /* capabilities= */ null, /* hardwareAccelerated= */ false, /* softwareOnly= */ true, /* vendor= */ false, /* forceDisableAdaptive= */ false, /* forceSecure= */ false)); } // Work around inconsistent raw audio decoding behavior across different devices. sortByScore( decoderInfos, decoderInfo -> { String name = decoderInfo.name; if (name.startsWith("OMX.google") || name.startsWith("c2.android")) { // Prefer generic decoders over ones provided by the device. return 1; } if (Util.SDK_INT < 26 && name.equals("OMX.MTK.AUDIO.DECODER.RAW")) { // This decoder may modify the audio, so any other compatible decoders take // precedence. See [Internal: b/62337687]. return -1; } return 0; }); } if (Util.SDK_INT < 21 && decoderInfos.size() > 1) { String firstCodecName = decoderInfos.get(0).name; if ("OMX.SEC.mp3.dec".equals(firstCodecName) || "OMX.SEC.MP3.Decoder".equals(firstCodecName) || "OMX.brcm.audio.mp3.decoder".equals(firstCodecName)) { // Prefer OMX.google codecs over OMX.SEC.mp3.dec, OMX.SEC.MP3.Decoder and // OMX.brcm.audio.mp3.decoder on older devices. See: // https://github.com/google/ExoPlayer/issues/398 and // https://github.com/google/ExoPlayer/issues/4519. sortByScore(decoderInfos, decoderInfo -> decoderInfo.name.startsWith("OMX.google") ? 1 : 0); } } if (Util.SDK_INT < 30 && decoderInfos.size() > 1) { String firstCodecName = decoderInfos.get(0).name; // Prefer anything other than OMX.qti.audio.decoder.flac on older devices. See [Internal // ref: b/147278539] and [Internal ref: b/147354613]. if ("OMX.qti.audio.decoder.flac".equals(firstCodecName)) { decoderInfos.add(decoderInfos.remove(0)); } } } private static boolean isAlias(android.media.MediaCodecInfo info) { return Util.SDK_INT >= 29 && isAliasV29(info); } @RequiresApi(29) private static boolean isAliasV29(android.media.MediaCodecInfo info) { return info.isAlias(); } /** * The result of {@link android.media.MediaCodecInfo#isHardwareAccelerated()} for API levels 29+, * or a best-effort approximation for lower levels. */ private static boolean isHardwareAccelerated(android.media.MediaCodecInfo codecInfo) { if (Util.SDK_INT >= 29) { return isHardwareAcceleratedV29(codecInfo); } // codecInfo.isHardwareAccelerated() != codecInfo.isSoftwareOnly() is not necessarily true. // However, we assume this to be true as an approximation. return !isSoftwareOnly(codecInfo); } @RequiresApi(29) private static boolean isHardwareAcceleratedV29(android.media.MediaCodecInfo codecInfo) { return codecInfo.isHardwareAccelerated(); } /** * The result of {@link android.media.MediaCodecInfo#isSoftwareOnly()} for API levels 29+, or a * best-effort approximation for lower levels. */ private static boolean isSoftwareOnly(android.media.MediaCodecInfo codecInfo) { if (Util.SDK_INT >= 29) { return isSoftwareOnlyV29(codecInfo); } String codecName = Util.toLowerInvariant(codecInfo.getName()); if (codecName.startsWith("arc.")) { // App Runtime for Chrome (ARC) codecs return false; } return codecName.startsWith("omx.google.") || codecName.startsWith("omx.ffmpeg.") || (codecName.startsWith("omx.sec.") && codecName.contains(".sw.")) || codecName.equals("omx.qcom.video.decoder.hevcswvdec") || codecName.startsWith("c2.android.") || codecName.startsWith("c2.google.") || (!codecName.startsWith("omx.") && !codecName.startsWith("c2.")); } @RequiresApi(29) private static boolean isSoftwareOnlyV29(android.media.MediaCodecInfo codecInfo) { return codecInfo.isSoftwareOnly(); } /** * The result of {@link android.media.MediaCodecInfo#isVendor()} for API levels 29+, or a * best-effort approximation for lower levels. */ private static boolean isVendor(android.media.MediaCodecInfo codecInfo) { if (Util.SDK_INT >= 29) { return isVendorV29(codecInfo); } String codecName = Util.toLowerInvariant(codecInfo.getName()); return !codecName.startsWith("omx.google.") && !codecName.startsWith("c2.android.") && !codecName.startsWith("c2.google."); } @RequiresApi(29) private static boolean isVendorV29(android.media.MediaCodecInfo codecInfo) { return codecInfo.isVendor(); } /** * Returns whether the decoder is known to fail when adapting, despite advertising itself as an * adaptive decoder. * * @param name The decoder name. * @return True if the decoder is known to fail when adapting. */ private static boolean codecNeedsDisableAdaptationWorkaround(String name) { return Util.SDK_INT <= 22 && ("ODROID-XU3".equals(Util.MODEL) || "Nexus 10".equals(Util.MODEL)) && ("OMX.Exynos.AVC.Decoder".equals(name) || "OMX.Exynos.AVC.Decoder.secure".equals(name)); } @Nullable private static Pair<Integer, Integer> getDolbyVisionProfileAndLevel( String codec, String[] parts) { if (parts.length < 3) { // The codec has fewer parts than required by the Dolby Vision codec string format. Log.w(TAG, "Ignoring malformed Dolby Vision codec string: " + codec); return null; } // The profile_space gets ignored. Matcher matcher = PROFILE_PATTERN.matcher(parts[1]); if (!matcher.matches()) { Log.w(TAG, "Ignoring malformed Dolby Vision codec string: " + codec); return null; } @Nullable String profileString = matcher.group(1); @Nullable Integer profile = dolbyVisionStringToProfile(profileString); if (profile == null) { Log.w(TAG, "Unknown Dolby Vision profile string: " + profileString); return null; } String levelString = parts[2]; @Nullable Integer level = dolbyVisionStringToLevel(levelString); if (level == null) { Log.w(TAG, "Unknown Dolby Vision level string: " + levelString); return null; } return new Pair<>(profile, level); } @Nullable private static Pair<Integer, Integer> getHevcProfileAndLevel(String codec, String[] parts) { if (parts.length < 4) { // The codec has fewer parts than required by the HEVC codec string format. Log.w(TAG, "Ignoring malformed HEVC codec string: " + codec); return null; } // The profile_space gets ignored. Matcher matcher = PROFILE_PATTERN.matcher(parts[1]); if (!matcher.matches()) { Log.w(TAG, "Ignoring malformed HEVC codec string: " + codec); return null; } @Nullable String profileString = matcher.group(1); int profile; if ("1".equals(profileString)) { profile = CodecProfileLevel.HEVCProfileMain; } else if ("2".equals(profileString)) { profile = CodecProfileLevel.HEVCProfileMain10; } else { Log.w(TAG, "Unknown HEVC profile string: " + profileString); return null; } @Nullable String levelString = parts[3]; @Nullable Integer level = hevcCodecStringToProfileLevel(levelString); if (level == null) { Log.w(TAG, "Unknown HEVC level string: " + levelString); return null; } return new Pair<>(profile, level); } @Nullable private static Pair<Integer, Integer> getAvcProfileAndLevel(String codec, String[] parts) { if (parts.length < 2) { // The codec has fewer parts than required by the AVC codec string format. Log.w(TAG, "Ignoring malformed AVC codec string: " + codec); return null; } int profileInteger; int levelInteger; try { if (parts[1].length() == 6) { // Format: avc1.xxccyy, where xx is profile and yy level, both hexadecimal. profileInteger = Integer.parseInt(parts[1].substring(0, 2), 16); levelInteger = Integer.parseInt(parts[1].substring(4), 16); } else if (parts.length >= 3) { // Format: avc1.xx.[y]yy where xx is profile and [y]yy level, both decimal. profileInteger = Integer.parseInt(parts[1]); levelInteger = Integer.parseInt(parts[2]); } else { // We don't recognize the format. Log.w(TAG, "Ignoring malformed AVC codec string: " + codec); return null; } } catch (NumberFormatException e) { Log.w(TAG, "Ignoring malformed AVC codec string: " + codec); return null; } int profile = avcProfileNumberToConst(profileInteger); if (profile == -1) { Log.w(TAG, "Unknown AVC profile: " + profileInteger); return null; } int level = avcLevelNumberToConst(levelInteger); if (level == -1) { Log.w(TAG, "Unknown AVC level: " + levelInteger); return null; } return new Pair<>(profile, level); } @Nullable private static Pair<Integer, Integer> getVp9ProfileAndLevel(String codec, String[] parts) { if (parts.length < 3) { Log.w(TAG, "Ignoring malformed VP9 codec string: " + codec); return null; } int profileInteger; int levelInteger; try { profileInteger = Integer.parseInt(parts[1]); levelInteger = Integer.parseInt(parts[2]); } catch (NumberFormatException e) { Log.w(TAG, "Ignoring malformed VP9 codec string: " + codec); return null; } int profile = vp9ProfileNumberToConst(profileInteger); if (profile == -1) { Log.w(TAG, "Unknown VP9 profile: " + profileInteger); return null; } int level = vp9LevelNumberToConst(levelInteger); if (level == -1) { Log.w(TAG, "Unknown VP9 level: " + levelInteger); return null; } return new Pair<>(profile, level); } @Nullable private static Pair<Integer, Integer> getAv1ProfileAndLevel( String codec, String[] parts, @Nullable ColorInfo colorInfo) { if (parts.length < 4) { Log.w(TAG, "Ignoring malformed AV1 codec string: " + codec); return null; } int profileInteger; int levelInteger; int bitDepthInteger; try { profileInteger = Integer.parseInt(parts[1]); levelInteger = Integer.parseInt(parts[2].substring(0, 2)); bitDepthInteger = Integer.parseInt(parts[3]); } catch (NumberFormatException e) { Log.w(TAG, "Ignoring malformed AV1 codec string: " + codec); return null; } if (profileInteger != 0) { Log.w(TAG, "Unknown AV1 profile: " + profileInteger); return null; } if (bitDepthInteger != 8 && bitDepthInteger != 10) { Log.w(TAG, "Unknown AV1 bit depth: " + bitDepthInteger); return null; } int profile; if (bitDepthInteger == 8) { profile = CodecProfileLevel.AV1ProfileMain8; } else if (colorInfo != null && (colorInfo.hdrStaticInfo != null || colorInfo.colorTransfer == C.COLOR_TRANSFER_HLG || colorInfo.colorTransfer == C.COLOR_TRANSFER_ST2084)) { profile = CodecProfileLevel.AV1ProfileMain10HDR10; } else { profile = CodecProfileLevel.AV1ProfileMain10; } int level = av1LevelNumberToConst(levelInteger); if (level == -1) { Log.w(TAG, "Unknown AV1 level: " + levelInteger); return null; } return new Pair<>(profile, level); } /** * Conversion values taken from ISO 14496-10 Table A-1. * * @param avcLevel one of CodecProfileLevel.AVCLevel* constants. * @return maximum frame size that can be decoded by a decoder with the specified avc level * (or {@code -1} if the level is not recognized) */ private static int avcLevelToMaxFrameSize(int avcLevel) { switch (avcLevel) { case CodecProfileLevel.AVCLevel1: case CodecProfileLevel.AVCLevel1b: return 99 * 16 * 16; case CodecProfileLevel.AVCLevel12: case CodecProfileLevel.AVCLevel13: case CodecProfileLevel.AVCLevel2: return 396 * 16 * 16; case CodecProfileLevel.AVCLevel21: return 792 * 16 * 16; case CodecProfileLevel.AVCLevel22: case CodecProfileLevel.AVCLevel3: return 1620 * 16 * 16; case CodecProfileLevel.AVCLevel31: return 3600 * 16 * 16; case CodecProfileLevel.AVCLevel32: return 5120 * 16 * 16; case CodecProfileLevel.AVCLevel4: case CodecProfileLevel.AVCLevel41: return 8192 * 16 * 16; case CodecProfileLevel.AVCLevel42: return 8704 * 16 * 16; case CodecProfileLevel.AVCLevel5: return 22080 * 16 * 16; case CodecProfileLevel.AVCLevel51: case CodecProfileLevel.AVCLevel52: return 36864 * 16 * 16; default: return -1; } } @Nullable private static Pair<Integer, Integer> getAacCodecProfileAndLevel(String codec, String[] parts) { if (parts.length != 3) { Log.w(TAG, "Ignoring malformed MP4A codec string: " + codec); return null; } try { // Get the object type indication, which is a hexadecimal value (see RFC 6381/ISO 14496-1). int objectTypeIndication = Integer.parseInt(parts[1], 16); @Nullable String mimeType = MimeTypes.getMimeTypeFromMp4ObjectType(objectTypeIndication); if (MimeTypes.AUDIO_AAC.equals(mimeType)) { // For MPEG-4 audio this is followed by an audio object type indication as a decimal number. int audioObjectTypeIndication = Integer.parseInt(parts[2]); int profile = mp4aAudioObjectTypeToProfile(audioObjectTypeIndication); if (profile != -1) { // Level is set to zero in AAC decoder CodecProfileLevels. return new Pair<>(profile, 0); } } } catch (NumberFormatException e) { Log.w(TAG, "Ignoring malformed MP4A codec string: " + codec); } return null; } /** Stably sorts the provided {@code list} in-place, in order of decreasing score. */ private static <T> void sortByScore(List<T> list, ScoreProvider<T> scoreProvider) { Collections.sort(list, (a, b) -> scoreProvider.getScore(b) - scoreProvider.getScore(a)); } /** Interface for providers of item scores. */ private interface ScoreProvider<T> { /** Returns the score of the provided item. */ int getScore(T t); } private interface MediaCodecListCompat { /** * The number of codecs in the list. */ int getCodecCount(); /** * The info at the specified index in the list. * * @param index The index. */ android.media.MediaCodecInfo getCodecInfoAt(int index); /** * Returns whether secure decoders are explicitly listed, if present. */ boolean secureDecodersExplicit(); /** Whether the specified {@link CodecCapabilities} {@code feature} is supported. */ boolean isFeatureSupported(String feature, String mimeType, CodecCapabilities capabilities); /** Whether the specified {@link CodecCapabilities} {@code feature} is required. */ boolean isFeatureRequired(String feature, String mimeType, CodecCapabilities capabilities); } @RequiresApi(21) private static final class MediaCodecListCompatV21 implements MediaCodecListCompat { private final int codecKind; @Nullable private android.media.MediaCodecInfo[] mediaCodecInfos; public MediaCodecListCompatV21(boolean includeSecure, boolean includeTunneling) { codecKind = includeSecure || includeTunneling ? MediaCodecList.ALL_CODECS : MediaCodecList.REGULAR_CODECS; } @Override public int getCodecCount() { ensureMediaCodecInfosInitialized(); return mediaCodecInfos.length; } @Override public android.media.MediaCodecInfo getCodecInfoAt(int index) { ensureMediaCodecInfosInitialized(); return mediaCodecInfos[index]; } @Override public boolean secureDecodersExplicit() { return true; } @Override public boolean isFeatureSupported( String feature, String mimeType, CodecCapabilities capabilities) { return capabilities.isFeatureSupported(feature); } @Override public boolean isFeatureRequired( String feature, String mimeType, CodecCapabilities capabilities) { return capabilities.isFeatureRequired(feature); } @EnsuresNonNull({"mediaCodecInfos"}) private void ensureMediaCodecInfosInitialized() { if (mediaCodecInfos == null) { mediaCodecInfos = new MediaCodecList(codecKind).getCodecInfos(); } } } private static final class MediaCodecListCompatV16 implements MediaCodecListCompat { @Override public int getCodecCount() { return MediaCodecList.getCodecCount(); } @Override public android.media.MediaCodecInfo getCodecInfoAt(int index) { return MediaCodecList.getCodecInfoAt(index); } @Override public boolean secureDecodersExplicit() { return false; } @Override public boolean isFeatureSupported( String feature, String mimeType, CodecCapabilities capabilities) { // Secure decoders weren't explicitly listed prior to API level 21. We assume that a secure // H264 decoder exists. return CodecCapabilities.FEATURE_SecurePlayback.equals(feature) && MimeTypes.VIDEO_H264.equals(mimeType); } @Override public boolean isFeatureRequired( String feature, String mimeType, CodecCapabilities capabilities) { return false; } } private static final class CodecKey { public final String mimeType; public final boolean secure; public final boolean tunneling; public CodecKey(String mimeType, boolean secure, boolean tunneling) { this.mimeType = mimeType; this.secure = secure; this.tunneling = tunneling; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + mimeType.hashCode(); result = prime * result + (secure ? 1231 : 1237); result = prime * result + (tunneling ? 1231 : 1237); return result; } @Override public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } if (obj == null || obj.getClass() != CodecKey.class) { return false; } CodecKey other = (CodecKey) obj; return TextUtils.equals(mimeType, other.mimeType) && secure == other.secure && tunneling == other.tunneling; } } private static int avcProfileNumberToConst(int profileNumber) { switch (profileNumber) { case 66: return CodecProfileLevel.AVCProfileBaseline; case 77: return CodecProfileLevel.AVCProfileMain; case 88: return CodecProfileLevel.AVCProfileExtended; case 100: return CodecProfileLevel.AVCProfileHigh; case 110: return CodecProfileLevel.AVCProfileHigh10; case 122: return CodecProfileLevel.AVCProfileHigh422; case 244: return CodecProfileLevel.AVCProfileHigh444; default: return -1; } } private static int avcLevelNumberToConst(int levelNumber) { // TODO: Find int for CodecProfileLevel.AVCLevel1b. switch (levelNumber) { case 10: return CodecProfileLevel.AVCLevel1; case 11: return CodecProfileLevel.AVCLevel11; case 12: return CodecProfileLevel.AVCLevel12; case 13: return CodecProfileLevel.AVCLevel13; case 20: return CodecProfileLevel.AVCLevel2; case 21: return CodecProfileLevel.AVCLevel21; case 22: return CodecProfileLevel.AVCLevel22; case 30: return CodecProfileLevel.AVCLevel3; case 31: return CodecProfileLevel.AVCLevel31; case 32: return CodecProfileLevel.AVCLevel32; case 40: return CodecProfileLevel.AVCLevel4; case 41: return CodecProfileLevel.AVCLevel41; case 42: return CodecProfileLevel.AVCLevel42; case 50: return CodecProfileLevel.AVCLevel5; case 51: return CodecProfileLevel.AVCLevel51; case 52: return CodecProfileLevel.AVCLevel52; default: return -1; } } private static int vp9ProfileNumberToConst(int profileNumber) { switch (profileNumber) { case 0: return CodecProfileLevel.VP9Profile0; case 1: return CodecProfileLevel.VP9Profile1; case 2: return CodecProfileLevel.VP9Profile2; case 3: return CodecProfileLevel.VP9Profile3; default: return -1; } } private static int vp9LevelNumberToConst(int levelNumber) { switch (levelNumber) { case 10: return CodecProfileLevel.VP9Level1; case 11: return CodecProfileLevel.VP9Level11; case 20: return CodecProfileLevel.VP9Level2; case 21: return CodecProfileLevel.VP9Level21; case 30: return CodecProfileLevel.VP9Level3; case 31: return CodecProfileLevel.VP9Level31; case 40: return CodecProfileLevel.VP9Level4; case 41: return CodecProfileLevel.VP9Level41; case 50: return CodecProfileLevel.VP9Level5; case 51: return CodecProfileLevel.VP9Level51; case 60: return CodecProfileLevel.VP9Level6; case 61: return CodecProfileLevel.VP9Level61; case 62: return CodecProfileLevel.VP9Level62; default: return -1; } } @Nullable private static Integer hevcCodecStringToProfileLevel(@Nullable String codecString) { if (codecString == null) { return null; } switch (codecString) { case "L30": return CodecProfileLevel.HEVCMainTierLevel1; case "L60": return CodecProfileLevel.HEVCMainTierLevel2; case "L63": return CodecProfileLevel.HEVCMainTierLevel21; case "L90": return CodecProfileLevel.HEVCMainTierLevel3; case "L93": return CodecProfileLevel.HEVCMainTierLevel31; case "L120": return CodecProfileLevel.HEVCMainTierLevel4; case "L123": return CodecProfileLevel.HEVCMainTierLevel41; case "L150": return CodecProfileLevel.HEVCMainTierLevel5; case "L153": return CodecProfileLevel.HEVCMainTierLevel51; case "L156": return CodecProfileLevel.HEVCMainTierLevel52; case "L180": return CodecProfileLevel.HEVCMainTierLevel6; case "L183": return CodecProfileLevel.HEVCMainTierLevel61; case "L186": return CodecProfileLevel.HEVCMainTierLevel62; case "H30": return CodecProfileLevel.HEVCHighTierLevel1; case "H60": return CodecProfileLevel.HEVCHighTierLevel2; case "H63": return CodecProfileLevel.HEVCHighTierLevel21; case "H90": return CodecProfileLevel.HEVCHighTierLevel3; case "H93": return CodecProfileLevel.HEVCHighTierLevel31; case "H120": return CodecProfileLevel.HEVCHighTierLevel4; case "H123": return CodecProfileLevel.HEVCHighTierLevel41; case "H150": return CodecProfileLevel.HEVCHighTierLevel5; case "H153": return CodecProfileLevel.HEVCHighTierLevel51; case "H156": return CodecProfileLevel.HEVCHighTierLevel52; case "H180": return CodecProfileLevel.HEVCHighTierLevel6; case "H183": return CodecProfileLevel.HEVCHighTierLevel61; case "H186": return CodecProfileLevel.HEVCHighTierLevel62; default: return null; } } @Nullable private static Integer dolbyVisionStringToProfile(@Nullable String profileString) { if (profileString == null) { return null; } switch (profileString) { case "00": return CodecProfileLevel.DolbyVisionProfileDvavPer; case "01": return CodecProfileLevel.DolbyVisionProfileDvavPen; case "02": return CodecProfileLevel.DolbyVisionProfileDvheDer; case "03": return CodecProfileLevel.DolbyVisionProfileDvheDen; case "04": return CodecProfileLevel.DolbyVisionProfileDvheDtr; case "05": return CodecProfileLevel.DolbyVisionProfileDvheStn; case "06": return CodecProfileLevel.DolbyVisionProfileDvheDth; case "07": return CodecProfileLevel.DolbyVisionProfileDvheDtb; case "08": return CodecProfileLevel.DolbyVisionProfileDvheSt; case "09": return CodecProfileLevel.DolbyVisionProfileDvavSe; default: return null; } } @Nullable private static Integer dolbyVisionStringToLevel(@Nullable String levelString) { if (levelString == null) { return null; } switch (levelString) { case "01": return CodecProfileLevel.DolbyVisionLevelHd24; case "02": return CodecProfileLevel.DolbyVisionLevelHd30; case "03": return CodecProfileLevel.DolbyVisionLevelFhd24; case "04": return CodecProfileLevel.DolbyVisionLevelFhd30; case "05": return CodecProfileLevel.DolbyVisionLevelFhd60; case "06": return CodecProfileLevel.DolbyVisionLevelUhd24; case "07": return CodecProfileLevel.DolbyVisionLevelUhd30; case "08": return CodecProfileLevel.DolbyVisionLevelUhd48; case "09": return CodecProfileLevel.DolbyVisionLevelUhd60; default: return null; } } private static int av1LevelNumberToConst(int levelNumber) { // See https://aomediacodec.github.io/av1-spec/av1-spec.pdf Annex A: Profiles and levels for // more information on mapping AV1 codec strings to levels. switch (levelNumber) { case 0: return CodecProfileLevel.AV1Level2; case 1: return CodecProfileLevel.AV1Level21; case 2: return CodecProfileLevel.AV1Level22; case 3: return CodecProfileLevel.AV1Level23; case 4: return CodecProfileLevel.AV1Level3; case 5: return CodecProfileLevel.AV1Level31; case 6: return CodecProfileLevel.AV1Level32; case 7: return CodecProfileLevel.AV1Level33; case 8: return CodecProfileLevel.AV1Level4; case 9: return CodecProfileLevel.AV1Level41; case 10: return CodecProfileLevel.AV1Level42; case 11: return CodecProfileLevel.AV1Level43; case 12: return CodecProfileLevel.AV1Level5; case 13: return CodecProfileLevel.AV1Level51; case 14: return CodecProfileLevel.AV1Level52; case 15: return CodecProfileLevel.AV1Level53; case 16: return CodecProfileLevel.AV1Level6; case 17: return CodecProfileLevel.AV1Level61; case 18: return CodecProfileLevel.AV1Level62; case 19: return CodecProfileLevel.AV1Level63; case 20: return CodecProfileLevel.AV1Level7; case 21: return CodecProfileLevel.AV1Level71; case 22: return CodecProfileLevel.AV1Level72; case 23: return CodecProfileLevel.AV1Level73; default: return -1; } } private static int mp4aAudioObjectTypeToProfile(int profileNumber) { switch (profileNumber) { case 1: return CodecProfileLevel.AACObjectMain; case 2: return CodecProfileLevel.AACObjectLC; case 3: return CodecProfileLevel.AACObjectSSR; case 4: return CodecProfileLevel.AACObjectLTP; case 5: return CodecProfileLevel.AACObjectHE; case 6: return CodecProfileLevel.AACObjectScalable; case 17: return CodecProfileLevel.AACObjectERLC; case 20: return CodecProfileLevel.AACObjectERScalable; case 23: return CodecProfileLevel.AACObjectLD; case 29: return CodecProfileLevel.AACObjectHE_PS; case 39: return CodecProfileLevel.AACObjectELD; case 42: return CodecProfileLevel.AACObjectXHE; default: return -1; } } }
library/core/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecUtil.java
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.mediacodec; import android.annotation.SuppressLint; import android.media.MediaCodecInfo.CodecCapabilities; import android.media.MediaCodecInfo.CodecProfileLevel; import android.media.MediaCodecList; import android.text.TextUtils; import android.util.Pair; import android.util.SparseIntArray; import androidx.annotation.CheckResult; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.video.ColorInfo; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.checkerframework.checker.nullness.qual.EnsuresNonNull; /** * A utility class for querying the available codecs. */ @SuppressLint("InlinedApi") public final class MediaCodecUtil { /** * Thrown when an error occurs querying the device for its underlying media capabilities. * <p> * Such failures are not expected in normal operation and are normally temporary (e.g. if the * mediaserver process has crashed and is yet to restart). */ public static class DecoderQueryException extends Exception { private DecoderQueryException(Throwable cause) { super("Failed to query underlying media codecs", cause); } } private static final String TAG = "MediaCodecUtil"; private static final Pattern PROFILE_PATTERN = Pattern.compile("^\\D?(\\d+)$"); private static final HashMap<CodecKey, List<MediaCodecInfo>> decoderInfosCache = new HashMap<>(); // Codecs to constant mappings. // AVC. private static final SparseIntArray AVC_PROFILE_NUMBER_TO_CONST; private static final SparseIntArray AVC_LEVEL_NUMBER_TO_CONST; private static final String CODEC_ID_AVC1 = "avc1"; private static final String CODEC_ID_AVC2 = "avc2"; // VP9 private static final SparseIntArray VP9_PROFILE_NUMBER_TO_CONST; private static final SparseIntArray VP9_LEVEL_NUMBER_TO_CONST; private static final String CODEC_ID_VP09 = "vp09"; // HEVC. private static final Map<String, Integer> HEVC_CODEC_STRING_TO_PROFILE_LEVEL; private static final String CODEC_ID_HEV1 = "hev1"; private static final String CODEC_ID_HVC1 = "hvc1"; // Dolby Vision. private static final Map<String, Integer> DOLBY_VISION_STRING_TO_PROFILE; private static final Map<String, Integer> DOLBY_VISION_STRING_TO_LEVEL; // AV1. private static final SparseIntArray AV1_LEVEL_NUMBER_TO_CONST; private static final String CODEC_ID_AV01 = "av01"; // MP4A AAC. private static final SparseIntArray MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE; private static final String CODEC_ID_MP4A = "mp4a"; // Lazily initialized. private static int maxH264DecodableFrameSize = -1; private MediaCodecUtil() {} /** * Optional call to warm the codec cache for a given mime type. * * <p>Calling this method may speed up subsequent calls to {@link #getDecoderInfo(String, boolean, * boolean)} and {@link #getDecoderInfos(String, boolean, boolean)}. * * @param mimeType The mime type. * @param secure Whether the decoder is required to support secure decryption. Always pass false * unless secure decryption really is required. * @param tunneling Whether the decoder is required to support tunneling. Always pass false unless * tunneling really is required. */ public static void warmDecoderInfoCache(String mimeType, boolean secure, boolean tunneling) { try { getDecoderInfos(mimeType, secure, tunneling); } catch (DecoderQueryException e) { // Codec warming is best effort, so we can swallow the exception. Log.e(TAG, "Codec warming failed", e); } } /** * Returns information about a decoder suitable for audio passthrough. * * @return A {@link MediaCodecInfo} describing the decoder, or null if no suitable decoder exists. * @throws DecoderQueryException If there was an error querying the available decoders. */ @Nullable public static MediaCodecInfo getPassthroughDecoderInfo() throws DecoderQueryException { return getDecoderInfo(MimeTypes.AUDIO_RAW, /* secure= */ false, /* tunneling= */ false); } /** * Returns information about the preferred decoder for a given mime type. * * @param mimeType The MIME type. * @param secure Whether the decoder is required to support secure decryption. Always pass false * unless secure decryption really is required. * @param tunneling Whether the decoder is required to support tunneling. Always pass false unless * tunneling really is required. * @return A {@link MediaCodecInfo} describing the decoder, or null if no suitable decoder exists. * @throws DecoderQueryException If there was an error querying the available decoders. */ @Nullable public static MediaCodecInfo getDecoderInfo(String mimeType, boolean secure, boolean tunneling) throws DecoderQueryException { List<MediaCodecInfo> decoderInfos = getDecoderInfos(mimeType, secure, tunneling); return decoderInfos.isEmpty() ? null : decoderInfos.get(0); } /** * Returns all {@link MediaCodecInfo}s for the given mime type, in the order given by {@link * MediaCodecList}. * * @param mimeType The MIME type. * @param secure Whether the decoder is required to support secure decryption. Always pass false * unless secure decryption really is required. * @param tunneling Whether the decoder is required to support tunneling. Always pass false unless * tunneling really is required. * @return An unmodifiable list of all {@link MediaCodecInfo}s for the given mime type, in the * order given by {@link MediaCodecList}. * @throws DecoderQueryException If there was an error querying the available decoders. */ public static synchronized List<MediaCodecInfo> getDecoderInfos( String mimeType, boolean secure, boolean tunneling) throws DecoderQueryException { CodecKey key = new CodecKey(mimeType, secure, tunneling); @Nullable List<MediaCodecInfo> cachedDecoderInfos = decoderInfosCache.get(key); if (cachedDecoderInfos != null) { return cachedDecoderInfos; } MediaCodecListCompat mediaCodecList = Util.SDK_INT >= 21 ? new MediaCodecListCompatV21(secure, tunneling) : new MediaCodecListCompatV16(); ArrayList<MediaCodecInfo> decoderInfos = getDecoderInfosInternal(key, mediaCodecList); if (secure && decoderInfos.isEmpty() && 21 <= Util.SDK_INT && Util.SDK_INT <= 23) { // Some devices don't list secure decoders on API level 21 [Internal: b/18678462]. Try the // legacy path. We also try this path on API levels 22 and 23 as a defensive measure. mediaCodecList = new MediaCodecListCompatV16(); decoderInfos = getDecoderInfosInternal(key, mediaCodecList); if (!decoderInfos.isEmpty()) { Log.w(TAG, "MediaCodecList API didn't list secure decoder for: " + mimeType + ". Assuming: " + decoderInfos.get(0).name); } } applyWorkarounds(mimeType, decoderInfos); List<MediaCodecInfo> unmodifiableDecoderInfos = Collections.unmodifiableList(decoderInfos); decoderInfosCache.put(key, unmodifiableDecoderInfos); return unmodifiableDecoderInfos; } /** * Returns a copy of the provided decoder list sorted such that decoders with format support are * listed first. The returned list is modifiable for convenience. */ @CheckResult public static List<MediaCodecInfo> getDecoderInfosSortedByFormatSupport( List<MediaCodecInfo> decoderInfos, Format format) { decoderInfos = new ArrayList<>(decoderInfos); sortByScore( decoderInfos, decoderInfo -> { try { return decoderInfo.isFormatSupported(format) ? 1 : 0; } catch (DecoderQueryException e) { return -1; } }); return decoderInfos; } /** * Returns the maximum frame size supported by the default H264 decoder. * * @return The maximum frame size for an H264 stream that can be decoded on the device. */ public static int maxH264DecodableFrameSize() throws DecoderQueryException { if (maxH264DecodableFrameSize == -1) { int result = 0; @Nullable MediaCodecInfo decoderInfo = getDecoderInfo(MimeTypes.VIDEO_H264, /* secure= */ false, /* tunneling= */ false); if (decoderInfo != null) { for (CodecProfileLevel profileLevel : decoderInfo.getProfileLevels()) { result = Math.max(avcLevelToMaxFrameSize(profileLevel.level), result); } // We assume support for at least 480p (SDK_INT >= 21) or 360p (SDK_INT < 21), which are // the levels mandated by the Android CDD. result = Math.max(result, Util.SDK_INT >= 21 ? (720 * 480) : (480 * 360)); } maxH264DecodableFrameSize = result; } return maxH264DecodableFrameSize; } /** * Returns profile and level (as defined by {@link CodecProfileLevel}) corresponding to the codec * description string (as defined by RFC 6381) of the given format. * * @param format Media format with a codec description string, as defined by RFC 6381. * @return A pair (profile constant, level constant) if the codec of the {@code format} is * well-formed and recognized, or null otherwise. */ @Nullable public static Pair<Integer, Integer> getCodecProfileAndLevel(Format format) { if (format.codecs == null) { return null; } String[] parts = format.codecs.split("\\."); // Dolby Vision can use DV, AVC or HEVC codec IDs, so check the MIME type first. if (MimeTypes.VIDEO_DOLBY_VISION.equals(format.sampleMimeType)) { return getDolbyVisionProfileAndLevel(format.codecs, parts); } switch (parts[0]) { case CODEC_ID_AVC1: case CODEC_ID_AVC2: return getAvcProfileAndLevel(format.codecs, parts); case CODEC_ID_VP09: return getVp9ProfileAndLevel(format.codecs, parts); case CODEC_ID_HEV1: case CODEC_ID_HVC1: return getHevcProfileAndLevel(format.codecs, parts); case CODEC_ID_AV01: return getAv1ProfileAndLevel(format.codecs, parts, format.colorInfo); case CODEC_ID_MP4A: return getAacCodecProfileAndLevel(format.codecs, parts); default: return null; } } // Internal methods. /** * Returns {@link MediaCodecInfo}s for the given codec {@link CodecKey} in the order given by * {@code mediaCodecList}. * * @param key The codec key. * @param mediaCodecList The codec list. * @return The codec information for usable codecs matching the specified key. * @throws DecoderQueryException If there was an error querying the available decoders. */ private static ArrayList<MediaCodecInfo> getDecoderInfosInternal( CodecKey key, MediaCodecListCompat mediaCodecList) throws DecoderQueryException { try { ArrayList<MediaCodecInfo> decoderInfos = new ArrayList<>(); String mimeType = key.mimeType; int numberOfCodecs = mediaCodecList.getCodecCount(); boolean secureDecodersExplicit = mediaCodecList.secureDecodersExplicit(); // Note: MediaCodecList is sorted by the framework such that the best decoders come first. for (int i = 0; i < numberOfCodecs; i++) { android.media.MediaCodecInfo codecInfo = mediaCodecList.getCodecInfoAt(i); if (isAlias(codecInfo)) { // Skip aliases of other codecs, since they will also be listed under their canonical // names. continue; } String name = codecInfo.getName(); if (!isCodecUsableDecoder(codecInfo, name, secureDecodersExplicit, mimeType)) { continue; } @Nullable String codecMimeType = getCodecMimeType(codecInfo, name, mimeType); if (codecMimeType == null) { continue; } try { CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(codecMimeType); boolean tunnelingSupported = mediaCodecList.isFeatureSupported( CodecCapabilities.FEATURE_TunneledPlayback, codecMimeType, capabilities); boolean tunnelingRequired = mediaCodecList.isFeatureRequired( CodecCapabilities.FEATURE_TunneledPlayback, codecMimeType, capabilities); if ((!key.tunneling && tunnelingRequired) || (key.tunneling && !tunnelingSupported)) { continue; } boolean secureSupported = mediaCodecList.isFeatureSupported( CodecCapabilities.FEATURE_SecurePlayback, codecMimeType, capabilities); boolean secureRequired = mediaCodecList.isFeatureRequired( CodecCapabilities.FEATURE_SecurePlayback, codecMimeType, capabilities); if ((!key.secure && secureRequired) || (key.secure && !secureSupported)) { continue; } boolean hardwareAccelerated = isHardwareAccelerated(codecInfo); boolean softwareOnly = isSoftwareOnly(codecInfo); boolean vendor = isVendor(codecInfo); boolean forceDisableAdaptive = codecNeedsDisableAdaptationWorkaround(name); if ((secureDecodersExplicit && key.secure == secureSupported) || (!secureDecodersExplicit && !key.secure)) { decoderInfos.add( MediaCodecInfo.newInstance( name, mimeType, codecMimeType, capabilities, hardwareAccelerated, softwareOnly, vendor, forceDisableAdaptive, /* forceSecure= */ false)); } else if (!secureDecodersExplicit && secureSupported) { decoderInfos.add( MediaCodecInfo.newInstance( name + ".secure", mimeType, codecMimeType, capabilities, hardwareAccelerated, softwareOnly, vendor, forceDisableAdaptive, /* forceSecure= */ true)); // It only makes sense to have one synthesized secure decoder, return immediately. return decoderInfos; } } catch (Exception e) { if (Util.SDK_INT <= 23 && !decoderInfos.isEmpty()) { // Suppress error querying secondary codec capabilities up to API level 23. Log.e(TAG, "Skipping codec " + name + " (failed to query capabilities)"); } else { // Rethrow error querying primary codec capabilities, or secondary codec // capabilities if API level is greater than 23. Log.e(TAG, "Failed to query codec " + name + " (" + codecMimeType + ")"); throw e; } } } return decoderInfos; } catch (Exception e) { // If the underlying mediaserver is in a bad state, we may catch an IllegalStateException // or an IllegalArgumentException here. throw new DecoderQueryException(e); } } /** * Returns the codec's supported MIME type for media of type {@code mimeType}, or {@code null} if * the codec can't be used. * * @param info The codec information. * @param name The name of the codec * @param mimeType The MIME type. * @return The codec's supported MIME type for media of type {@code mimeType}, or {@code null} if * the codec can't be used. If non-null, the returned type will be equal to {@code mimeType} * except in cases where the codec is known to use a non-standard MIME type alias. */ @Nullable private static String getCodecMimeType( android.media.MediaCodecInfo info, String name, String mimeType) { String[] supportedTypes = info.getSupportedTypes(); for (String supportedType : supportedTypes) { if (supportedType.equalsIgnoreCase(mimeType)) { return supportedType; } } if (mimeType.equals(MimeTypes.VIDEO_DOLBY_VISION)) { // Handle decoders that declare support for DV via MIME types that aren't // video/dolby-vision. if ("OMX.MS.HEVCDV.Decoder".equals(name)) { return "video/hevcdv"; } else if ("OMX.RTK.video.decoder".equals(name) || "OMX.realtek.video.decoder.tunneled".equals(name)) { return "video/dv_hevc"; } } else if (mimeType.equals(MimeTypes.AUDIO_ALAC) && "OMX.lge.alac.decoder".equals(name)) { return "audio/x-lg-alac"; } else if (mimeType.equals(MimeTypes.AUDIO_FLAC) && "OMX.lge.flac.decoder".equals(name)) { return "audio/x-lg-flac"; } return null; } /** * Returns whether the specified codec is usable for decoding on the current device. * * @param info The codec information. * @param name The name of the codec * @param secureDecodersExplicit Whether secure decoders were explicitly listed, if present. * @param mimeType The MIME type. * @return Whether the specified codec is usable for decoding on the current device. */ private static boolean isCodecUsableDecoder( android.media.MediaCodecInfo info, String name, boolean secureDecodersExplicit, String mimeType) { if (info.isEncoder() || (!secureDecodersExplicit && name.endsWith(".secure"))) { return false; } // Work around broken audio decoders. if (Util.SDK_INT < 21 && ("CIPAACDecoder".equals(name) || "CIPMP3Decoder".equals(name) || "CIPVorbisDecoder".equals(name) || "CIPAMRNBDecoder".equals(name) || "AACDecoder".equals(name) || "MP3Decoder".equals(name))) { return false; } // Work around https://github.com/google/ExoPlayer/issues/1528 and // https://github.com/google/ExoPlayer/issues/3171. if (Util.SDK_INT < 18 && "OMX.MTK.AUDIO.DECODER.AAC".equals(name) && ("a70".equals(Util.DEVICE) || ("Xiaomi".equals(Util.MANUFACTURER) && Util.DEVICE.startsWith("HM")))) { return false; } // Work around an issue where querying/creating a particular MP3 decoder on some devices on // platform API version 16 fails. if (Util.SDK_INT == 16 && "OMX.qcom.audio.decoder.mp3".equals(name) && ("dlxu".equals(Util.DEVICE) // HTC Butterfly || "protou".equals(Util.DEVICE) // HTC Desire X || "ville".equals(Util.DEVICE) // HTC One S || "villeplus".equals(Util.DEVICE) || "villec2".equals(Util.DEVICE) || Util.DEVICE.startsWith("gee") // LGE Optimus G || "C6602".equals(Util.DEVICE) // Sony Xperia Z || "C6603".equals(Util.DEVICE) || "C6606".equals(Util.DEVICE) || "C6616".equals(Util.DEVICE) || "L36h".equals(Util.DEVICE) || "SO-02E".equals(Util.DEVICE))) { return false; } // Work around an issue where large timestamps are not propagated correctly. if (Util.SDK_INT == 16 && "OMX.qcom.audio.decoder.aac".equals(name) && ("C1504".equals(Util.DEVICE) // Sony Xperia E || "C1505".equals(Util.DEVICE) || "C1604".equals(Util.DEVICE) // Sony Xperia E dual || "C1605".equals(Util.DEVICE))) { return false; } // Work around https://github.com/google/ExoPlayer/issues/3249. if (Util.SDK_INT < 24 && ("OMX.SEC.aac.dec".equals(name) || "OMX.Exynos.AAC.Decoder".equals(name)) && "samsung".equals(Util.MANUFACTURER) && (Util.DEVICE.startsWith("zeroflte") // Galaxy S6 || Util.DEVICE.startsWith("zerolte") // Galaxy S6 Edge || Util.DEVICE.startsWith("zenlte") // Galaxy S6 Edge+ || "SC-05G".equals(Util.DEVICE) // Galaxy S6 || "marinelteatt".equals(Util.DEVICE) // Galaxy S6 Active || "404SC".equals(Util.DEVICE) // Galaxy S6 Edge || "SC-04G".equals(Util.DEVICE) || "SCV31".equals(Util.DEVICE))) { return false; } // Work around https://github.com/google/ExoPlayer/issues/548. // VP8 decoder on Samsung Galaxy S3/S4/S4 Mini/Tab 3/Note 2 does not render video. if (Util.SDK_INT <= 19 && "OMX.SEC.vp8.dec".equals(name) && "samsung".equals(Util.MANUFACTURER) && (Util.DEVICE.startsWith("d2") || Util.DEVICE.startsWith("serrano") || Util.DEVICE.startsWith("jflte") || Util.DEVICE.startsWith("santos") || Util.DEVICE.startsWith("t0"))) { return false; } // VP8 decoder on Samsung Galaxy S4 cannot be queried. if (Util.SDK_INT <= 19 && Util.DEVICE.startsWith("jflte") && "OMX.qcom.video.decoder.vp8".equals(name)) { return false; } // MTK E-AC3 decoder doesn't support decoding JOC streams in 2-D. See [Internal: b/69400041]. if (MimeTypes.AUDIO_E_AC3_JOC.equals(mimeType) && "OMX.MTK.AUDIO.DECODER.DSPAC3".equals(name)) { return false; } return true; } /** * Modifies a list of {@link MediaCodecInfo}s to apply workarounds where we know better than the * platform. * * @param mimeType The MIME type of input media. * @param decoderInfos The list to modify. */ private static void applyWorkarounds(String mimeType, List<MediaCodecInfo> decoderInfos) { if (MimeTypes.AUDIO_RAW.equals(mimeType)) { if (Util.SDK_INT < 26 && Util.DEVICE.equals("R9") && decoderInfos.size() == 1 && decoderInfos.get(0).name.equals("OMX.MTK.AUDIO.DECODER.RAW")) { // This device does not list a generic raw audio decoder, yet it can be instantiated by // name. See <a href="https://github.com/google/ExoPlayer/issues/5782">Issue #5782</a>. decoderInfos.add( MediaCodecInfo.newInstance( /* name= */ "OMX.google.raw.decoder", /* mimeType= */ MimeTypes.AUDIO_RAW, /* codecMimeType= */ MimeTypes.AUDIO_RAW, /* capabilities= */ null, /* hardwareAccelerated= */ false, /* softwareOnly= */ true, /* vendor= */ false, /* forceDisableAdaptive= */ false, /* forceSecure= */ false)); } // Work around inconsistent raw audio decoding behavior across different devices. sortByScore( decoderInfos, decoderInfo -> { String name = decoderInfo.name; if (name.startsWith("OMX.google") || name.startsWith("c2.android")) { // Prefer generic decoders over ones provided by the device. return 1; } if (Util.SDK_INT < 26 && name.equals("OMX.MTK.AUDIO.DECODER.RAW")) { // This decoder may modify the audio, so any other compatible decoders take // precedence. See [Internal: b/62337687]. return -1; } return 0; }); } if (Util.SDK_INT < 21 && decoderInfos.size() > 1) { String firstCodecName = decoderInfos.get(0).name; if ("OMX.SEC.mp3.dec".equals(firstCodecName) || "OMX.SEC.MP3.Decoder".equals(firstCodecName) || "OMX.brcm.audio.mp3.decoder".equals(firstCodecName)) { // Prefer OMX.google codecs over OMX.SEC.mp3.dec, OMX.SEC.MP3.Decoder and // OMX.brcm.audio.mp3.decoder on older devices. See: // https://github.com/google/ExoPlayer/issues/398 and // https://github.com/google/ExoPlayer/issues/4519. sortByScore(decoderInfos, decoderInfo -> decoderInfo.name.startsWith("OMX.google") ? 1 : 0); } } if (Util.SDK_INT < 30 && decoderInfos.size() > 1) { String firstCodecName = decoderInfos.get(0).name; // Prefer anything other than OMX.qti.audio.decoder.flac on older devices. See [Internal // ref: b/147278539] and [Internal ref: b/147354613]. if ("OMX.qti.audio.decoder.flac".equals(firstCodecName)) { decoderInfos.add(decoderInfos.remove(0)); } } } private static boolean isAlias(android.media.MediaCodecInfo info) { return Util.SDK_INT >= 29 && isAliasV29(info); } @RequiresApi(29) private static boolean isAliasV29(android.media.MediaCodecInfo info) { return info.isAlias(); } /** * The result of {@link android.media.MediaCodecInfo#isHardwareAccelerated()} for API levels 29+, * or a best-effort approximation for lower levels. */ private static boolean isHardwareAccelerated(android.media.MediaCodecInfo codecInfo) { if (Util.SDK_INT >= 29) { return isHardwareAcceleratedV29(codecInfo); } // codecInfo.isHardwareAccelerated() != codecInfo.isSoftwareOnly() is not necessarily true. // However, we assume this to be true as an approximation. return !isSoftwareOnly(codecInfo); } @RequiresApi(29) private static boolean isHardwareAcceleratedV29(android.media.MediaCodecInfo codecInfo) { return codecInfo.isHardwareAccelerated(); } /** * The result of {@link android.media.MediaCodecInfo#isSoftwareOnly()} for API levels 29+, or a * best-effort approximation for lower levels. */ private static boolean isSoftwareOnly(android.media.MediaCodecInfo codecInfo) { if (Util.SDK_INT >= 29) { return isSoftwareOnlyV29(codecInfo); } String codecName = Util.toLowerInvariant(codecInfo.getName()); if (codecName.startsWith("arc.")) { // App Runtime for Chrome (ARC) codecs return false; } return codecName.startsWith("omx.google.") || codecName.startsWith("omx.ffmpeg.") || (codecName.startsWith("omx.sec.") && codecName.contains(".sw.")) || codecName.equals("omx.qcom.video.decoder.hevcswvdec") || codecName.startsWith("c2.android.") || codecName.startsWith("c2.google.") || (!codecName.startsWith("omx.") && !codecName.startsWith("c2.")); } @RequiresApi(29) private static boolean isSoftwareOnlyV29(android.media.MediaCodecInfo codecInfo) { return codecInfo.isSoftwareOnly(); } /** * The result of {@link android.media.MediaCodecInfo#isVendor()} for API levels 29+, or a * best-effort approximation for lower levels. */ private static boolean isVendor(android.media.MediaCodecInfo codecInfo) { if (Util.SDK_INT >= 29) { return isVendorV29(codecInfo); } String codecName = Util.toLowerInvariant(codecInfo.getName()); return !codecName.startsWith("omx.google.") && !codecName.startsWith("c2.android.") && !codecName.startsWith("c2.google."); } @RequiresApi(29) private static boolean isVendorV29(android.media.MediaCodecInfo codecInfo) { return codecInfo.isVendor(); } /** * Returns whether the decoder is known to fail when adapting, despite advertising itself as an * adaptive decoder. * * @param name The decoder name. * @return True if the decoder is known to fail when adapting. */ private static boolean codecNeedsDisableAdaptationWorkaround(String name) { return Util.SDK_INT <= 22 && ("ODROID-XU3".equals(Util.MODEL) || "Nexus 10".equals(Util.MODEL)) && ("OMX.Exynos.AVC.Decoder".equals(name) || "OMX.Exynos.AVC.Decoder.secure".equals(name)); } @Nullable private static Pair<Integer, Integer> getDolbyVisionProfileAndLevel( String codec, String[] parts) { if (parts.length < 3) { // The codec has fewer parts than required by the Dolby Vision codec string format. Log.w(TAG, "Ignoring malformed Dolby Vision codec string: " + codec); return null; } // The profile_space gets ignored. Matcher matcher = PROFILE_PATTERN.matcher(parts[1]); if (!matcher.matches()) { Log.w(TAG, "Ignoring malformed Dolby Vision codec string: " + codec); return null; } @Nullable String profileString = matcher.group(1); @Nullable Integer profile = DOLBY_VISION_STRING_TO_PROFILE.get(profileString); if (profile == null) { Log.w(TAG, "Unknown Dolby Vision profile string: " + profileString); return null; } String levelString = parts[2]; @Nullable Integer level = DOLBY_VISION_STRING_TO_LEVEL.get(levelString); if (level == null) { Log.w(TAG, "Unknown Dolby Vision level string: " + levelString); return null; } return new Pair<>(profile, level); } @Nullable private static Pair<Integer, Integer> getHevcProfileAndLevel(String codec, String[] parts) { if (parts.length < 4) { // The codec has fewer parts than required by the HEVC codec string format. Log.w(TAG, "Ignoring malformed HEVC codec string: " + codec); return null; } // The profile_space gets ignored. Matcher matcher = PROFILE_PATTERN.matcher(parts[1]); if (!matcher.matches()) { Log.w(TAG, "Ignoring malformed HEVC codec string: " + codec); return null; } @Nullable String profileString = matcher.group(1); int profile; if ("1".equals(profileString)) { profile = CodecProfileLevel.HEVCProfileMain; } else if ("2".equals(profileString)) { profile = CodecProfileLevel.HEVCProfileMain10; } else { Log.w(TAG, "Unknown HEVC profile string: " + profileString); return null; } @Nullable String levelString = parts[3]; @Nullable Integer level = HEVC_CODEC_STRING_TO_PROFILE_LEVEL.get(levelString); if (level == null) { Log.w(TAG, "Unknown HEVC level string: " + levelString); return null; } return new Pair<>(profile, level); } @Nullable private static Pair<Integer, Integer> getAvcProfileAndLevel(String codec, String[] parts) { if (parts.length < 2) { // The codec has fewer parts than required by the AVC codec string format. Log.w(TAG, "Ignoring malformed AVC codec string: " + codec); return null; } int profileInteger; int levelInteger; try { if (parts[1].length() == 6) { // Format: avc1.xxccyy, where xx is profile and yy level, both hexadecimal. profileInteger = Integer.parseInt(parts[1].substring(0, 2), 16); levelInteger = Integer.parseInt(parts[1].substring(4), 16); } else if (parts.length >= 3) { // Format: avc1.xx.[y]yy where xx is profile and [y]yy level, both decimal. profileInteger = Integer.parseInt(parts[1]); levelInteger = Integer.parseInt(parts[2]); } else { // We don't recognize the format. Log.w(TAG, "Ignoring malformed AVC codec string: " + codec); return null; } } catch (NumberFormatException e) { Log.w(TAG, "Ignoring malformed AVC codec string: " + codec); return null; } int profile = AVC_PROFILE_NUMBER_TO_CONST.get(profileInteger, -1); if (profile == -1) { Log.w(TAG, "Unknown AVC profile: " + profileInteger); return null; } int level = AVC_LEVEL_NUMBER_TO_CONST.get(levelInteger, -1); if (level == -1) { Log.w(TAG, "Unknown AVC level: " + levelInteger); return null; } return new Pair<>(profile, level); } @Nullable private static Pair<Integer, Integer> getVp9ProfileAndLevel(String codec, String[] parts) { if (parts.length < 3) { Log.w(TAG, "Ignoring malformed VP9 codec string: " + codec); return null; } int profileInteger; int levelInteger; try { profileInteger = Integer.parseInt(parts[1]); levelInteger = Integer.parseInt(parts[2]); } catch (NumberFormatException e) { Log.w(TAG, "Ignoring malformed VP9 codec string: " + codec); return null; } int profile = VP9_PROFILE_NUMBER_TO_CONST.get(profileInteger, -1); if (profile == -1) { Log.w(TAG, "Unknown VP9 profile: " + profileInteger); return null; } int level = VP9_LEVEL_NUMBER_TO_CONST.get(levelInteger, -1); if (level == -1) { Log.w(TAG, "Unknown VP9 level: " + levelInteger); return null; } return new Pair<>(profile, level); } @Nullable private static Pair<Integer, Integer> getAv1ProfileAndLevel( String codec, String[] parts, @Nullable ColorInfo colorInfo) { if (parts.length < 4) { Log.w(TAG, "Ignoring malformed AV1 codec string: " + codec); return null; } int profileInteger; int levelInteger; int bitDepthInteger; try { profileInteger = Integer.parseInt(parts[1]); levelInteger = Integer.parseInt(parts[2].substring(0, 2)); bitDepthInteger = Integer.parseInt(parts[3]); } catch (NumberFormatException e) { Log.w(TAG, "Ignoring malformed AV1 codec string: " + codec); return null; } if (profileInteger != 0) { Log.w(TAG, "Unknown AV1 profile: " + profileInteger); return null; } if (bitDepthInteger != 8 && bitDepthInteger != 10) { Log.w(TAG, "Unknown AV1 bit depth: " + bitDepthInteger); return null; } int profile; if (bitDepthInteger == 8) { profile = CodecProfileLevel.AV1ProfileMain8; } else if (colorInfo != null && (colorInfo.hdrStaticInfo != null || colorInfo.colorTransfer == C.COLOR_TRANSFER_HLG || colorInfo.colorTransfer == C.COLOR_TRANSFER_ST2084)) { profile = CodecProfileLevel.AV1ProfileMain10HDR10; } else { profile = CodecProfileLevel.AV1ProfileMain10; } int level = AV1_LEVEL_NUMBER_TO_CONST.get(levelInteger, -1); if (level == -1) { Log.w(TAG, "Unknown AV1 level: " + levelInteger); return null; } return new Pair<>(profile, level); } /** * Conversion values taken from ISO 14496-10 Table A-1. * * @param avcLevel one of CodecProfileLevel.AVCLevel* constants. * @return maximum frame size that can be decoded by a decoder with the specified avc level * (or {@code -1} if the level is not recognized) */ private static int avcLevelToMaxFrameSize(int avcLevel) { switch (avcLevel) { case CodecProfileLevel.AVCLevel1: case CodecProfileLevel.AVCLevel1b: return 99 * 16 * 16; case CodecProfileLevel.AVCLevel12: case CodecProfileLevel.AVCLevel13: case CodecProfileLevel.AVCLevel2: return 396 * 16 * 16; case CodecProfileLevel.AVCLevel21: return 792 * 16 * 16; case CodecProfileLevel.AVCLevel22: case CodecProfileLevel.AVCLevel3: return 1620 * 16 * 16; case CodecProfileLevel.AVCLevel31: return 3600 * 16 * 16; case CodecProfileLevel.AVCLevel32: return 5120 * 16 * 16; case CodecProfileLevel.AVCLevel4: case CodecProfileLevel.AVCLevel41: return 8192 * 16 * 16; case CodecProfileLevel.AVCLevel42: return 8704 * 16 * 16; case CodecProfileLevel.AVCLevel5: return 22080 * 16 * 16; case CodecProfileLevel.AVCLevel51: case CodecProfileLevel.AVCLevel52: return 36864 * 16 * 16; default: return -1; } } @Nullable private static Pair<Integer, Integer> getAacCodecProfileAndLevel(String codec, String[] parts) { if (parts.length != 3) { Log.w(TAG, "Ignoring malformed MP4A codec string: " + codec); return null; } try { // Get the object type indication, which is a hexadecimal value (see RFC 6381/ISO 14496-1). int objectTypeIndication = Integer.parseInt(parts[1], 16); @Nullable String mimeType = MimeTypes.getMimeTypeFromMp4ObjectType(objectTypeIndication); if (MimeTypes.AUDIO_AAC.equals(mimeType)) { // For MPEG-4 audio this is followed by an audio object type indication as a decimal number. int audioObjectTypeIndication = Integer.parseInt(parts[2]); int profile = MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.get(audioObjectTypeIndication, -1); if (profile != -1) { // Level is set to zero in AAC decoder CodecProfileLevels. return new Pair<>(profile, 0); } } } catch (NumberFormatException e) { Log.w(TAG, "Ignoring malformed MP4A codec string: " + codec); } return null; } /** Stably sorts the provided {@code list} in-place, in order of decreasing score. */ private static <T> void sortByScore(List<T> list, ScoreProvider<T> scoreProvider) { Collections.sort(list, (a, b) -> scoreProvider.getScore(b) - scoreProvider.getScore(a)); } /** Interface for providers of item scores. */ private interface ScoreProvider<T> { /** Returns the score of the provided item. */ int getScore(T t); } private interface MediaCodecListCompat { /** * The number of codecs in the list. */ int getCodecCount(); /** * The info at the specified index in the list. * * @param index The index. */ android.media.MediaCodecInfo getCodecInfoAt(int index); /** * Returns whether secure decoders are explicitly listed, if present. */ boolean secureDecodersExplicit(); /** Whether the specified {@link CodecCapabilities} {@code feature} is supported. */ boolean isFeatureSupported(String feature, String mimeType, CodecCapabilities capabilities); /** Whether the specified {@link CodecCapabilities} {@code feature} is required. */ boolean isFeatureRequired(String feature, String mimeType, CodecCapabilities capabilities); } @RequiresApi(21) private static final class MediaCodecListCompatV21 implements MediaCodecListCompat { private final int codecKind; @Nullable private android.media.MediaCodecInfo[] mediaCodecInfos; public MediaCodecListCompatV21(boolean includeSecure, boolean includeTunneling) { codecKind = includeSecure || includeTunneling ? MediaCodecList.ALL_CODECS : MediaCodecList.REGULAR_CODECS; } @Override public int getCodecCount() { ensureMediaCodecInfosInitialized(); return mediaCodecInfos.length; } @Override public android.media.MediaCodecInfo getCodecInfoAt(int index) { ensureMediaCodecInfosInitialized(); return mediaCodecInfos[index]; } @Override public boolean secureDecodersExplicit() { return true; } @Override public boolean isFeatureSupported( String feature, String mimeType, CodecCapabilities capabilities) { return capabilities.isFeatureSupported(feature); } @Override public boolean isFeatureRequired( String feature, String mimeType, CodecCapabilities capabilities) { return capabilities.isFeatureRequired(feature); } @EnsuresNonNull({"mediaCodecInfos"}) private void ensureMediaCodecInfosInitialized() { if (mediaCodecInfos == null) { mediaCodecInfos = new MediaCodecList(codecKind).getCodecInfos(); } } } private static final class MediaCodecListCompatV16 implements MediaCodecListCompat { @Override public int getCodecCount() { return MediaCodecList.getCodecCount(); } @Override public android.media.MediaCodecInfo getCodecInfoAt(int index) { return MediaCodecList.getCodecInfoAt(index); } @Override public boolean secureDecodersExplicit() { return false; } @Override public boolean isFeatureSupported( String feature, String mimeType, CodecCapabilities capabilities) { // Secure decoders weren't explicitly listed prior to API level 21. We assume that a secure // H264 decoder exists. return CodecCapabilities.FEATURE_SecurePlayback.equals(feature) && MimeTypes.VIDEO_H264.equals(mimeType); } @Override public boolean isFeatureRequired( String feature, String mimeType, CodecCapabilities capabilities) { return false; } } private static final class CodecKey { public final String mimeType; public final boolean secure; public final boolean tunneling; public CodecKey(String mimeType, boolean secure, boolean tunneling) { this.mimeType = mimeType; this.secure = secure; this.tunneling = tunneling; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + mimeType.hashCode(); result = prime * result + (secure ? 1231 : 1237); result = prime * result + (tunneling ? 1231 : 1237); return result; } @Override public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } if (obj == null || obj.getClass() != CodecKey.class) { return false; } CodecKey other = (CodecKey) obj; return TextUtils.equals(mimeType, other.mimeType) && secure == other.secure && tunneling == other.tunneling; } } static { AVC_PROFILE_NUMBER_TO_CONST = new SparseIntArray(); AVC_PROFILE_NUMBER_TO_CONST.put(66, CodecProfileLevel.AVCProfileBaseline); AVC_PROFILE_NUMBER_TO_CONST.put(77, CodecProfileLevel.AVCProfileMain); AVC_PROFILE_NUMBER_TO_CONST.put(88, CodecProfileLevel.AVCProfileExtended); AVC_PROFILE_NUMBER_TO_CONST.put(100, CodecProfileLevel.AVCProfileHigh); AVC_PROFILE_NUMBER_TO_CONST.put(110, CodecProfileLevel.AVCProfileHigh10); AVC_PROFILE_NUMBER_TO_CONST.put(122, CodecProfileLevel.AVCProfileHigh422); AVC_PROFILE_NUMBER_TO_CONST.put(244, CodecProfileLevel.AVCProfileHigh444); AVC_LEVEL_NUMBER_TO_CONST = new SparseIntArray(); AVC_LEVEL_NUMBER_TO_CONST.put(10, CodecProfileLevel.AVCLevel1); // TODO: Find int for CodecProfileLevel.AVCLevel1b. AVC_LEVEL_NUMBER_TO_CONST.put(11, CodecProfileLevel.AVCLevel11); AVC_LEVEL_NUMBER_TO_CONST.put(12, CodecProfileLevel.AVCLevel12); AVC_LEVEL_NUMBER_TO_CONST.put(13, CodecProfileLevel.AVCLevel13); AVC_LEVEL_NUMBER_TO_CONST.put(20, CodecProfileLevel.AVCLevel2); AVC_LEVEL_NUMBER_TO_CONST.put(21, CodecProfileLevel.AVCLevel21); AVC_LEVEL_NUMBER_TO_CONST.put(22, CodecProfileLevel.AVCLevel22); AVC_LEVEL_NUMBER_TO_CONST.put(30, CodecProfileLevel.AVCLevel3); AVC_LEVEL_NUMBER_TO_CONST.put(31, CodecProfileLevel.AVCLevel31); AVC_LEVEL_NUMBER_TO_CONST.put(32, CodecProfileLevel.AVCLevel32); AVC_LEVEL_NUMBER_TO_CONST.put(40, CodecProfileLevel.AVCLevel4); AVC_LEVEL_NUMBER_TO_CONST.put(41, CodecProfileLevel.AVCLevel41); AVC_LEVEL_NUMBER_TO_CONST.put(42, CodecProfileLevel.AVCLevel42); AVC_LEVEL_NUMBER_TO_CONST.put(50, CodecProfileLevel.AVCLevel5); AVC_LEVEL_NUMBER_TO_CONST.put(51, CodecProfileLevel.AVCLevel51); AVC_LEVEL_NUMBER_TO_CONST.put(52, CodecProfileLevel.AVCLevel52); VP9_PROFILE_NUMBER_TO_CONST = new SparseIntArray(); VP9_PROFILE_NUMBER_TO_CONST.put(0, CodecProfileLevel.VP9Profile0); VP9_PROFILE_NUMBER_TO_CONST.put(1, CodecProfileLevel.VP9Profile1); VP9_PROFILE_NUMBER_TO_CONST.put(2, CodecProfileLevel.VP9Profile2); VP9_PROFILE_NUMBER_TO_CONST.put(3, CodecProfileLevel.VP9Profile3); VP9_LEVEL_NUMBER_TO_CONST = new SparseIntArray(); VP9_LEVEL_NUMBER_TO_CONST.put(10, CodecProfileLevel.VP9Level1); VP9_LEVEL_NUMBER_TO_CONST.put(11, CodecProfileLevel.VP9Level11); VP9_LEVEL_NUMBER_TO_CONST.put(20, CodecProfileLevel.VP9Level2); VP9_LEVEL_NUMBER_TO_CONST.put(21, CodecProfileLevel.VP9Level21); VP9_LEVEL_NUMBER_TO_CONST.put(30, CodecProfileLevel.VP9Level3); VP9_LEVEL_NUMBER_TO_CONST.put(31, CodecProfileLevel.VP9Level31); VP9_LEVEL_NUMBER_TO_CONST.put(40, CodecProfileLevel.VP9Level4); VP9_LEVEL_NUMBER_TO_CONST.put(41, CodecProfileLevel.VP9Level41); VP9_LEVEL_NUMBER_TO_CONST.put(50, CodecProfileLevel.VP9Level5); VP9_LEVEL_NUMBER_TO_CONST.put(51, CodecProfileLevel.VP9Level51); VP9_LEVEL_NUMBER_TO_CONST.put(60, CodecProfileLevel.VP9Level6); VP9_LEVEL_NUMBER_TO_CONST.put(61, CodecProfileLevel.VP9Level61); VP9_LEVEL_NUMBER_TO_CONST.put(62, CodecProfileLevel.VP9Level62); HEVC_CODEC_STRING_TO_PROFILE_LEVEL = new HashMap<>(); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L30", CodecProfileLevel.HEVCMainTierLevel1); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L60", CodecProfileLevel.HEVCMainTierLevel2); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L63", CodecProfileLevel.HEVCMainTierLevel21); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L90", CodecProfileLevel.HEVCMainTierLevel3); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L93", CodecProfileLevel.HEVCMainTierLevel31); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L120", CodecProfileLevel.HEVCMainTierLevel4); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L123", CodecProfileLevel.HEVCMainTierLevel41); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L150", CodecProfileLevel.HEVCMainTierLevel5); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L153", CodecProfileLevel.HEVCMainTierLevel51); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L156", CodecProfileLevel.HEVCMainTierLevel52); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L180", CodecProfileLevel.HEVCMainTierLevel6); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L183", CodecProfileLevel.HEVCMainTierLevel61); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("L186", CodecProfileLevel.HEVCMainTierLevel62); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H30", CodecProfileLevel.HEVCHighTierLevel1); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H60", CodecProfileLevel.HEVCHighTierLevel2); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H63", CodecProfileLevel.HEVCHighTierLevel21); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H90", CodecProfileLevel.HEVCHighTierLevel3); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H93", CodecProfileLevel.HEVCHighTierLevel31); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H120", CodecProfileLevel.HEVCHighTierLevel4); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H123", CodecProfileLevel.HEVCHighTierLevel41); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H150", CodecProfileLevel.HEVCHighTierLevel5); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H153", CodecProfileLevel.HEVCHighTierLevel51); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H156", CodecProfileLevel.HEVCHighTierLevel52); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H180", CodecProfileLevel.HEVCHighTierLevel6); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H183", CodecProfileLevel.HEVCHighTierLevel61); HEVC_CODEC_STRING_TO_PROFILE_LEVEL.put("H186", CodecProfileLevel.HEVCHighTierLevel62); DOLBY_VISION_STRING_TO_PROFILE = new HashMap<>(); DOLBY_VISION_STRING_TO_PROFILE.put("00", CodecProfileLevel.DolbyVisionProfileDvavPer); DOLBY_VISION_STRING_TO_PROFILE.put("01", CodecProfileLevel.DolbyVisionProfileDvavPen); DOLBY_VISION_STRING_TO_PROFILE.put("02", CodecProfileLevel.DolbyVisionProfileDvheDer); DOLBY_VISION_STRING_TO_PROFILE.put("03", CodecProfileLevel.DolbyVisionProfileDvheDen); DOLBY_VISION_STRING_TO_PROFILE.put("04", CodecProfileLevel.DolbyVisionProfileDvheDtr); DOLBY_VISION_STRING_TO_PROFILE.put("05", CodecProfileLevel.DolbyVisionProfileDvheStn); DOLBY_VISION_STRING_TO_PROFILE.put("06", CodecProfileLevel.DolbyVisionProfileDvheDth); DOLBY_VISION_STRING_TO_PROFILE.put("07", CodecProfileLevel.DolbyVisionProfileDvheDtb); DOLBY_VISION_STRING_TO_PROFILE.put("08", CodecProfileLevel.DolbyVisionProfileDvheSt); DOLBY_VISION_STRING_TO_PROFILE.put("09", CodecProfileLevel.DolbyVisionProfileDvavSe); DOLBY_VISION_STRING_TO_LEVEL = new HashMap<>(); DOLBY_VISION_STRING_TO_LEVEL.put("01", CodecProfileLevel.DolbyVisionLevelHd24); DOLBY_VISION_STRING_TO_LEVEL.put("02", CodecProfileLevel.DolbyVisionLevelHd30); DOLBY_VISION_STRING_TO_LEVEL.put("03", CodecProfileLevel.DolbyVisionLevelFhd24); DOLBY_VISION_STRING_TO_LEVEL.put("04", CodecProfileLevel.DolbyVisionLevelFhd30); DOLBY_VISION_STRING_TO_LEVEL.put("05", CodecProfileLevel.DolbyVisionLevelFhd60); DOLBY_VISION_STRING_TO_LEVEL.put("06", CodecProfileLevel.DolbyVisionLevelUhd24); DOLBY_VISION_STRING_TO_LEVEL.put("07", CodecProfileLevel.DolbyVisionLevelUhd30); DOLBY_VISION_STRING_TO_LEVEL.put("08", CodecProfileLevel.DolbyVisionLevelUhd48); DOLBY_VISION_STRING_TO_LEVEL.put("09", CodecProfileLevel.DolbyVisionLevelUhd60); // See https://aomediacodec.github.io/av1-spec/av1-spec.pdf Annex A: Profiles and levels for // more information on mapping AV1 codec strings to levels. AV1_LEVEL_NUMBER_TO_CONST = new SparseIntArray(); AV1_LEVEL_NUMBER_TO_CONST.put(0, CodecProfileLevel.AV1Level2); AV1_LEVEL_NUMBER_TO_CONST.put(1, CodecProfileLevel.AV1Level21); AV1_LEVEL_NUMBER_TO_CONST.put(2, CodecProfileLevel.AV1Level22); AV1_LEVEL_NUMBER_TO_CONST.put(3, CodecProfileLevel.AV1Level23); AV1_LEVEL_NUMBER_TO_CONST.put(4, CodecProfileLevel.AV1Level3); AV1_LEVEL_NUMBER_TO_CONST.put(5, CodecProfileLevel.AV1Level31); AV1_LEVEL_NUMBER_TO_CONST.put(6, CodecProfileLevel.AV1Level32); AV1_LEVEL_NUMBER_TO_CONST.put(7, CodecProfileLevel.AV1Level33); AV1_LEVEL_NUMBER_TO_CONST.put(8, CodecProfileLevel.AV1Level4); AV1_LEVEL_NUMBER_TO_CONST.put(9, CodecProfileLevel.AV1Level41); AV1_LEVEL_NUMBER_TO_CONST.put(10, CodecProfileLevel.AV1Level42); AV1_LEVEL_NUMBER_TO_CONST.put(11, CodecProfileLevel.AV1Level43); AV1_LEVEL_NUMBER_TO_CONST.put(12, CodecProfileLevel.AV1Level5); AV1_LEVEL_NUMBER_TO_CONST.put(13, CodecProfileLevel.AV1Level51); AV1_LEVEL_NUMBER_TO_CONST.put(14, CodecProfileLevel.AV1Level52); AV1_LEVEL_NUMBER_TO_CONST.put(15, CodecProfileLevel.AV1Level53); AV1_LEVEL_NUMBER_TO_CONST.put(16, CodecProfileLevel.AV1Level6); AV1_LEVEL_NUMBER_TO_CONST.put(17, CodecProfileLevel.AV1Level61); AV1_LEVEL_NUMBER_TO_CONST.put(18, CodecProfileLevel.AV1Level62); AV1_LEVEL_NUMBER_TO_CONST.put(19, CodecProfileLevel.AV1Level63); AV1_LEVEL_NUMBER_TO_CONST.put(20, CodecProfileLevel.AV1Level7); AV1_LEVEL_NUMBER_TO_CONST.put(21, CodecProfileLevel.AV1Level71); AV1_LEVEL_NUMBER_TO_CONST.put(22, CodecProfileLevel.AV1Level72); AV1_LEVEL_NUMBER_TO_CONST.put(23, CodecProfileLevel.AV1Level73); MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE = new SparseIntArray(); MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(1, CodecProfileLevel.AACObjectMain); MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(2, CodecProfileLevel.AACObjectLC); MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(3, CodecProfileLevel.AACObjectSSR); MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(4, CodecProfileLevel.AACObjectLTP); MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(5, CodecProfileLevel.AACObjectHE); MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(6, CodecProfileLevel.AACObjectScalable); MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(17, CodecProfileLevel.AACObjectERLC); MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(20, CodecProfileLevel.AACObjectERScalable); MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(23, CodecProfileLevel.AACObjectLD); MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(29, CodecProfileLevel.AACObjectHE_PS); MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(39, CodecProfileLevel.AACObjectELD); MP4A_AUDIO_OBJECT_TYPE_TO_PROFILE.put(42, CodecProfileLevel.AACObjectXHE); } }
Replacing static arrays with switch statements in MediaCodecUtil. PiperOrigin-RevId: 322537851
library/core/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecUtil.java
Replacing static arrays with switch statements in MediaCodecUtil.
Java
apache-2.0
3957303b9d5cf6d0f26abe63896bb39f7440f0b6
0
kageiit/buck,davido/buck,daedric/buck,shybovycha/buck,marcinkwiatkowski/buck,marcinkwiatkowski/buck,LegNeato/buck,SeleniumHQ/buck,dsyang/buck,clonetwin26/buck,romanoid/buck,rmaz/buck,k21/buck,rmaz/buck,davido/buck,ilya-klyuchnikov/buck,k21/buck,k21/buck,robbertvanginkel/buck,clonetwin26/buck,Addepar/buck,LegNeato/buck,kageiit/buck,daedric/buck,daedric/buck,JoelMarcey/buck,dsyang/buck,dsyang/buck,rmaz/buck,darkforestzero/buck,shs96c/buck,nguyentruongtho/buck,darkforestzero/buck,zhan-xiong/buck,davido/buck,marcinkwiatkowski/buck,grumpyjames/buck,clonetwin26/buck,clonetwin26/buck,brettwooldridge/buck,marcinkwiatkowski/buck,daedric/buck,darkforestzero/buck,LegNeato/buck,brettwooldridge/buck,SeleniumHQ/buck,ilya-klyuchnikov/buck,romanoid/buck,nguyentruongtho/buck,darkforestzero/buck,daedric/buck,shs96c/buck,Addepar/buck,shs96c/buck,marcinkwiatkowski/buck,clonetwin26/buck,brettwooldridge/buck,robbertvanginkel/buck,romanoid/buck,robbertvanginkel/buck,rmaz/buck,zhan-xiong/buck,vschs007/buck,ilya-klyuchnikov/buck,JoelMarcey/buck,shs96c/buck,JoelMarcey/buck,ilya-klyuchnikov/buck,JoelMarcey/buck,k21/buck,zpao/buck,shs96c/buck,rmaz/buck,ilya-klyuchnikov/buck,SeleniumHQ/buck,JoelMarcey/buck,dsyang/buck,vschs007/buck,daedric/buck,clonetwin26/buck,robbertvanginkel/buck,nguyentruongtho/buck,SeleniumHQ/buck,shybovycha/buck,grumpyjames/buck,vschs007/buck,shs96c/buck,SeleniumHQ/buck,robbertvanginkel/buck,shybovycha/buck,vschs007/buck,romanoid/buck,shybovycha/buck,grumpyjames/buck,shs96c/buck,grumpyjames/buck,LegNeato/buck,vschs007/buck,daedric/buck,zhan-xiong/buck,brettwooldridge/buck,vschs007/buck,ilya-klyuchnikov/buck,shs96c/buck,zhan-xiong/buck,shs96c/buck,daedric/buck,grumpyjames/buck,SeleniumHQ/buck,k21/buck,LegNeato/buck,davido/buck,vschs007/buck,grumpyjames/buck,marcinkwiatkowski/buck,SeleniumHQ/buck,davido/buck,grumpyjames/buck,Addepar/buck,vschs007/buck,k21/buck,SeleniumHQ/buck,LegNeato/buck,k21/buck,vschs007/buck,dsyang/buck,grumpyjames/buck,shybovycha/buck,Addepar/buck,facebook/buck,rmaz/buck,ilya-klyuchnikov/buck,rmaz/buck,rmaz/buck,robbertvanginkel/buck,JoelMarcey/buck,kageiit/buck,kageiit/buck,Addepar/buck,JoelMarcey/buck,davido/buck,darkforestzero/buck,darkforestzero/buck,rmaz/buck,daedric/buck,marcinkwiatkowski/buck,SeleniumHQ/buck,darkforestzero/buck,zhan-xiong/buck,robbertvanginkel/buck,brettwooldridge/buck,vschs007/buck,romanoid/buck,facebook/buck,clonetwin26/buck,ilya-klyuchnikov/buck,JoelMarcey/buck,brettwooldridge/buck,romanoid/buck,Addepar/buck,dsyang/buck,clonetwin26/buck,daedric/buck,ilya-klyuchnikov/buck,dsyang/buck,daedric/buck,Addepar/buck,grumpyjames/buck,k21/buck,dsyang/buck,facebook/buck,davido/buck,darkforestzero/buck,LegNeato/buck,shybovycha/buck,SeleniumHQ/buck,JoelMarcey/buck,rmaz/buck,shybovycha/buck,zhan-xiong/buck,marcinkwiatkowski/buck,k21/buck,nguyentruongtho/buck,davido/buck,Addepar/buck,vschs007/buck,ilya-klyuchnikov/buck,zpao/buck,romanoid/buck,nguyentruongtho/buck,zhan-xiong/buck,vschs007/buck,marcinkwiatkowski/buck,davido/buck,k21/buck,romanoid/buck,clonetwin26/buck,Addepar/buck,ilya-klyuchnikov/buck,JoelMarcey/buck,SeleniumHQ/buck,zhan-xiong/buck,robbertvanginkel/buck,vschs007/buck,robbertvanginkel/buck,facebook/buck,shybovycha/buck,marcinkwiatkowski/buck,robbertvanginkel/buck,Addepar/buck,clonetwin26/buck,LegNeato/buck,brettwooldridge/buck,brettwooldridge/buck,grumpyjames/buck,brettwooldridge/buck,LegNeato/buck,kageiit/buck,zpao/buck,JoelMarcey/buck,shs96c/buck,daedric/buck,shybovycha/buck,davido/buck,marcinkwiatkowski/buck,LegNeato/buck,Addepar/buck,davido/buck,shs96c/buck,ilya-klyuchnikov/buck,grumpyjames/buck,kageiit/buck,shybovycha/buck,darkforestzero/buck,darkforestzero/buck,rmaz/buck,dsyang/buck,brettwooldridge/buck,romanoid/buck,zpao/buck,zhan-xiong/buck,facebook/buck,Addepar/buck,k21/buck,kageiit/buck,shybovycha/buck,zpao/buck,dsyang/buck,SeleniumHQ/buck,shs96c/buck,davido/buck,robbertvanginkel/buck,LegNeato/buck,marcinkwiatkowski/buck,nguyentruongtho/buck,dsyang/buck,facebook/buck,romanoid/buck,Addepar/buck,clonetwin26/buck,darkforestzero/buck,rmaz/buck,brettwooldridge/buck,nguyentruongtho/buck,JoelMarcey/buck,dsyang/buck,brettwooldridge/buck,robbertvanginkel/buck,zhan-xiong/buck,ilya-klyuchnikov/buck,rmaz/buck,clonetwin26/buck,k21/buck,shybovycha/buck,marcinkwiatkowski/buck,LegNeato/buck,LegNeato/buck,zpao/buck,darkforestzero/buck,zhan-xiong/buck,zhan-xiong/buck,daedric/buck,facebook/buck,romanoid/buck,robbertvanginkel/buck,zpao/buck,dsyang/buck,zhan-xiong/buck,davido/buck,grumpyjames/buck,shs96c/buck,k21/buck,brettwooldridge/buck,shybovycha/buck,darkforestzero/buck,clonetwin26/buck,SeleniumHQ/buck,romanoid/buck,romanoid/buck,JoelMarcey/buck
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.cxx; import com.facebook.buck.parser.NoSuchBuildTargetException; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.BuildRules; import com.facebook.buck.rules.NoopBuildRule; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.util.RichStream; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedSet; /** * Represents a precompilable header file, along with dependencies. * * Rules which depend on this will inherit this rule's of dependencies. For example if a given * rule R uses a precompiled header rule P, then all of P's {@code deps} will get merged into * R's {@code deps} list. */ public class CxxPrecompiledHeaderTemplate extends NoopBuildRule implements NativeLinkable { public final BuildRuleParams params; public final BuildRuleResolver ruleResolver; public final SourcePath sourcePath; /** * @param buildRuleParams the params for this PCH rule, <b>including</b> {@code deps} */ CxxPrecompiledHeaderTemplate( BuildRuleParams buildRuleParams, BuildRuleResolver ruleResolver, SourcePathResolver pathResolver, SourcePath sourcePath) { super(buildRuleParams, pathResolver); this.params = buildRuleParams; this.ruleResolver = ruleResolver; this.sourcePath = sourcePath; } private ImmutableSortedSet<BuildRule> getExportedDeps() { return BuildRules.getExportedRules(getDeps()); } /** * Returns our {@link #getDeps()}, * limited to the subset of those which are {@link NativeLinkable}. */ @Override public Iterable<? extends NativeLinkable> getNativeLinkableDeps() { return RichStream.from(getDeps()).filter(NativeLinkable.class).toImmutableList(); } /** * Returns our {@link #getExportedDeps()}, * limited to the subset of those which are {@link NativeLinkable}. */ @Override public Iterable<? extends NativeLinkable> getNativeLinkableExportedDeps() { return RichStream.from(getExportedDeps()).filter(NativeLinkable.class).toImmutableList(); } /** * Pick a linkage, any linkage. Just pick your favorite. This will be overridden * by config anyway. */ @Override public Linkage getPreferredLinkage(CxxPlatform cxxPlatform) { return Linkage.SHARED; } /** * Doesn't really apply to us. No shared libraries to add here. */ @Override public ImmutableMap<String, SourcePath> getSharedLibraries(CxxPlatform cxxPlatform) { return ImmutableMap.of(); } /** * This class doesn't add any native linkable code of its own, it just has deps * which need to be passed along and up to the top-level (e.g. a `cxx_binary`) rule. * Take all our linkable deps, then, and pass it along as our linker input. */ @Override public NativeLinkableInput getNativeLinkableInput( CxxPlatform cxxPlatform, Linker.LinkableDepType type) throws NoSuchBuildTargetException { return NativeLinkables.getTransitiveNativeLinkableInput( cxxPlatform, getDeps(), Linker.LinkableDepType.SHARED, NativeLinkable.class::isInstance); } }
src/com/facebook/buck/cxx/CxxPrecompiledHeaderTemplate.java
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.cxx; import com.facebook.buck.parser.NoSuchBuildTargetException; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.BuildRules; import com.facebook.buck.rules.NoopBuildRule; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.util.RichStream; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedSet; /** * Represents a precompilable header file, along with dependencies. * * Rules which depend on this will inherit this rule's of dependencies. For example if a given * rule R uses a precompiled header rule P, then all of P's {@code deps} will get merged into * R's {@code deps} list. */ public class CxxPrecompiledHeaderTemplate extends NoopBuildRule implements NativeLinkable { public final BuildRuleParams params; public final BuildRuleResolver ruleResolver; public final SourcePathResolver pathResolver; public final SourcePath sourcePath; /** * @param buildRuleParams the params for this PCH rule, <b>including</b> {@code deps} */ CxxPrecompiledHeaderTemplate( BuildRuleParams buildRuleParams, BuildRuleResolver ruleResolver, SourcePathResolver pathResolver, SourcePath sourcePath) { super(buildRuleParams, pathResolver); this.params = buildRuleParams; this.ruleResolver = ruleResolver; this.pathResolver = pathResolver; this.sourcePath = sourcePath; } private ImmutableSortedSet<BuildRule> getExportedDeps() { return BuildRules.getExportedRules(getDeps()); } /** * Returns our {@link #getDeps()}, * limited to the subset of those which are {@link NativeLinkable}. */ @Override public Iterable<? extends NativeLinkable> getNativeLinkableDeps() { return RichStream.from(getDeps()).filter(NativeLinkable.class).toImmutableList(); } /** * Returns our {@link #getExportedDeps()}, * limited to the subset of those which are {@link NativeLinkable}. */ @Override public Iterable<? extends NativeLinkable> getNativeLinkableExportedDeps() { return RichStream.from(getExportedDeps()).filter(NativeLinkable.class).toImmutableList(); } /** * Pick a linkage, any linkage. Just pick your favorite. This will be overridden * by config anyway. */ @Override public Linkage getPreferredLinkage(CxxPlatform cxxPlatform) { return Linkage.SHARED; } /** * Doesn't really apply to us. No shared libraries to add here. */ @Override public ImmutableMap<String, SourcePath> getSharedLibraries(CxxPlatform cxxPlatform) { return ImmutableMap.of(); } /** * This class doesn't add any native linkable code of its own, it just has deps * which need to be passed along and up to the top-level (e.g. a `cxx_binary`) rule. * Take all our linkable deps, then, and pass it along as our linker input. */ @Override public NativeLinkableInput getNativeLinkableInput( CxxPlatform cxxPlatform, Linker.LinkableDepType type) throws NoSuchBuildTargetException { return NativeLinkables.getTransitiveNativeLinkableInput( cxxPlatform, getDeps(), Linker.LinkableDepType.SHARED, NativeLinkable.class::isInstance); } }
Remove unused field Test Plan: CI Reviewed By: Coneko fbshipit-source-id: 9c094f1
src/com/facebook/buck/cxx/CxxPrecompiledHeaderTemplate.java
Remove unused field
Java
apache-2.0
da54c44a919658e0dd36822da53fb188c8484051
0
apache/velocity-engine,apache/velocity-engine
package org.apache.velocity.runtime.directive; /* * The Apache Software License, Version 1.1 * * Copyright (c) 2000-2001 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, if * any, must include the following acknowlegement: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowlegement may appear in the software itself, * if and wherever such third-party acknowlegements normally appear. * * 4. The names "The Jakarta Project", "Velocity", and "Apache Software * Foundation" must not be used to endorse or promote products derived * from this software without prior written permission. For written * permission, please contact [email protected]. * * 5. Products derived from this software may not be called "Apache" * nor may "Apache" appear in their names without prior written * permission of the Apache Group. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. */ import java.io.Writer; import java.io.IOException; import java.lang.reflect.Method; import java.util.Collection; import java.util.Enumeration; import java.util.Iterator; import java.util.Map; import java.util.Vector; import org.apache.velocity.runtime.RuntimeServices; import org.apache.velocity.runtime.RuntimeConstants; import org.apache.velocity.context.InternalContextAdapter; import org.apache.velocity.util.ArrayIterator; import org.apache.velocity.util.EnumerationIterator; import org.apache.velocity.runtime.parser.Token; import org.apache.velocity.runtime.parser.ParserTreeConstants; import org.apache.velocity.runtime.parser.node.Node; import org.apache.velocity.exception.MethodInvocationException; import org.apache.velocity.exception.ParseErrorException; import org.apache.velocity.exception.ResourceNotFoundException; import org.apache.velocity.util.introspection.Introspector; import org.apache.velocity.util.introspection.IntrospectionCacheData; /** * Foreach directive used for moving through arrays, * or objects that provide an Iterator. * * @author <a href="mailto:[email protected]">Jason van Zyl</a> * @author <a href="mailto:[email protected]">Geir Magnusson Jr.</a> * @version $Id: Foreach.java,v 1.40 2001/10/24 03:06:17 geirm Exp $ */ public class Foreach extends Directive { /** * Return name of this directive. */ public String getName() { return "foreach"; } /** * Return type of this directive. */ public int getType() { return BLOCK; } private final static int UNKNOWN = -1; /** * Flag to indicate that the list object being used * in an array. */ private final static int INFO_ARRAY = 1; /** * Flag to indicate that the list object being used * provides an Iterator. */ private final static int INFO_ITERATOR = 2; /** * Flag to indicate that the list object being used * is a Map. */ private final static int INFO_MAP = 3; /** * Flag to indicate that the list object being used * is a Collection. */ private final static int INFO_COLLECTION = 4; /** * Flag to indicate that the list object being used * is an Enumeration */ private final static int INFO_ENUMERATION = 5; /** * The name of the variable to use when placing * the counter value into the context. Right * now the default is $velocityCount. */ private String counterName; /** * What value to start the loop counter at. */ private int counterInitialValue; /** * The reference name used to access each * of the elements in the list object. It * is the $item in the following: * * #foreach ($item in $list) * * This can be used class wide because * it is immutable. */ private String elementKey; /** * simple init - init the tree and get the elementKey from * the AST */ public void init( RuntimeServices rs, InternalContextAdapter context, Node node) throws Exception { super.init( rs, context, node ); counterName = rsvc.getString(RuntimeConstants.COUNTER_NAME); counterInitialValue = rsvc.getInt(RuntimeConstants.COUNTER_INITIAL_VALUE); /* * this is really the only thing we can do here as everything * else is context sensitive */ elementKey = node.jjtGetChild(0).getFirstToken().image.substring(1); } /** * returns an Iterator to the collection in the #foreach() * * @param context current context * @param node AST node * @return Iterator to do the dataset */ private Iterator getIterator( InternalContextAdapter context, Node node ) throws MethodInvocationException { /* * get our list object, and punt if it's null. */ Object listObject = node.jjtGetChild(2).value(context); if (listObject == null) return null; /* * See if we already know what type this is. * Use the introspection cache */ int type = UNKNOWN; IntrospectionCacheData icd = context.icacheGet( this ); Class c = listObject.getClass(); /* * if we have an entry in the cache, and the Class we have * cached is the same as the Class of the data object * then we are ok */ if ( icd != null && icd.contextData == c ) { /* dig the type out of the cata object */ type = ((Integer) icd.thingy ).intValue(); } /* * If we still don't know what this is, * figure out what type of object the list * element is, and get the iterator for it */ if ( type == UNKNOWN ) { if ( listObject.getClass().isArray() ) type = INFO_ARRAY; else if ( listObject instanceof Collection) type = INFO_COLLECTION; else if ( listObject instanceof Map ) type = INFO_MAP; else if ( listObject instanceof Iterator ) type = INFO_ITERATOR; else if ( listObject instanceof Enumeration ) type = INFO_ENUMERATION; /* * if we did figure it out, cache it */ if ( type != UNKNOWN ) { icd = new IntrospectionCacheData(); icd.thingy = new Integer( type ); icd.contextData = c; context.icachePut( this, icd ); } } /* * now based on the type from either cache or examination... */ switch( type ) { case INFO_COLLECTION : return ( (Collection) listObject).iterator(); case INFO_ITERATOR : rsvc.warn ("Warning! The reference " + node.jjtGetChild(2).getFirstToken().image + " is an Iterator in the #foreach() loop at [" + getLine() + "," + getColumn() + "]" + " in template " + context.getCurrentTemplateName() + ". Because it's not resetable," + " if used in more than once, this may lead to" + " unexpected results."); return ( (Iterator) listObject); case INFO_ENUMERATION : rsvc.warn ("Warning! The reference " + node.jjtGetChild(2).getFirstToken().image + " is an Enumeration in the #foreach() loop at [" + getLine() + "," + getColumn() + "]" + " in template " + context.getCurrentTemplateName() + ". Because it's not resetable," + " if used in more than once, this may lead to" + " unexpected results."); return new EnumerationIterator( (Enumeration) listObject ); case INFO_ARRAY: return new ArrayIterator( listObject ); case INFO_MAP: return ( (Map) listObject).values().iterator(); default: /* we have no clue what this is */ rsvc.warn ("Could not determine type of iterator in " + "#foreach loop for " + node.jjtGetChild(2).getFirstToken().image + " at [" + getLine() + "," + getColumn() + "]" + " in template " + context.getCurrentTemplateName() ); return null; } } /** * renders the #foreach() block */ public boolean render( InternalContextAdapter context, Writer writer, Node node ) throws IOException, MethodInvocationException, ResourceNotFoundException, ParseErrorException { /* * do our introspection to see what our collection is */ Iterator i = getIterator( context, node ); if ( i == null ) return false; int counter = counterInitialValue; /* * save the element key if there is one, * and the loop counter */ Object o = context.get( elementKey ); Object ctr = context.get( counterName); while (i.hasNext()) { context.put( counterName , new Integer(counter)); context.put(elementKey,i.next()); node.jjtGetChild(3).render(context, writer); counter++; } /* * restores the loop counter (if we were nested) * if we have one, else just removes */ if( ctr != null) { context.put( counterName, ctr ); } else { context.remove( counterName ); } /* * restores element key if exists * otherwise just removes */ if (o != null) { context.put( elementKey, o ); } else { context.remove(elementKey); } return true; } }
src/java/org/apache/velocity/runtime/directive/Foreach.java
package org.apache.velocity.runtime.directive; /* * The Apache Software License, Version 1.1 * * Copyright (c) 2000-2001 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, if * any, must include the following acknowlegement: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowlegement may appear in the software itself, * if and wherever such third-party acknowlegements normally appear. * * 4. The names "The Jakarta Project", "Velocity", and "Apache Software * Foundation" must not be used to endorse or promote products derived * from this software without prior written permission. For written * permission, please contact [email protected]. * * 5. Products derived from this software may not be called "Apache" * nor may "Apache" appear in their names without prior written * permission of the Apache Group. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. */ import java.io.Writer; import java.io.IOException; import java.lang.reflect.Method; import java.util.Collection; import java.util.Enumeration; import java.util.Iterator; import java.util.Map; import java.util.Vector; import org.apache.velocity.runtime.RuntimeServices; import org.apache.velocity.runtime.RuntimeConstants; import org.apache.velocity.context.InternalContextAdapter; import org.apache.velocity.util.ArrayIterator; import org.apache.velocity.util.EnumerationIterator; import org.apache.velocity.runtime.parser.Token; import org.apache.velocity.runtime.parser.ParserTreeConstants; import org.apache.velocity.runtime.parser.node.Node; import org.apache.velocity.exception.MethodInvocationException; import org.apache.velocity.exception.ParseErrorException; import org.apache.velocity.exception.ResourceNotFoundException; import org.apache.velocity.util.introspection.Introspector; import org.apache.velocity.util.introspection.IntrospectionCacheData; /** * Foreach directive used for moving through arrays, * or objects that provide an Iterator. * * @author <a href="mailto:[email protected]">Jason van Zyl</a> * @author <a href="mailto:[email protected]">Geir Magnusson Jr.</a> * @version $Id: Foreach.java,v 1.39 2001/10/22 03:53:24 jon Exp $ */ public class Foreach extends Directive { /** * Return name of this directive. */ public String getName() { return "foreach"; } /** * Return type of this directive. */ public int getType() { return BLOCK; } private final static int UNKNOWN = -1; /** * Flag to indicate that the list object being used * in an array. */ private final static int INFO_ARRAY = 1; /** * Flag to indicate that the list object being used * provides an Iterator. */ private final static int INFO_ITERATOR = 2; /** * Flag to indicate that the list object being used * is a Map. */ private final static int INFO_MAP = 3; /** * Flag to indicate that the list object being used * is a Collection. */ private final static int INFO_COLLECTION = 4; /** * Flag to indicate that the list object being used * is an Enumeration */ private final static int INFO_ENUMERATION = 5; /** * The name of the variable to use when placing * the counter value into the context. Right * now the default is $velocityCount. */ private String counterName; /** * What value to start the loop counter at. */ private int counterInitialValue; /** * The reference name used to access each * of the elements in the list object. It * is the $item in the following: * * #foreach ($item in $list) * * This can be used class wide because * it is immutable. */ private String elementKey; /** * simple init - init the tree and get the elementKey from * the AST */ public void init( RuntimeServices rs, InternalContextAdapter context, Node node) throws Exception { super.init( rs, context, node ); counterName = rsvc.getString(RuntimeConstants.COUNTER_NAME); counterInitialValue = rsvc.getInt(RuntimeConstants.COUNTER_INITIAL_VALUE); /* * this is really the only thing we can do here as everything * else is context sensitive */ elementKey = node.jjtGetChild(0).getFirstToken().image.substring(1); } /** * returns an Iterator to the collection in the #foreach() * * @param context current context * @param node AST node * @return Iterator to do the dataset */ private Iterator getIterator( InternalContextAdapter context, Node node ) throws MethodInvocationException { /* * get our list object, and punt if it's null. */ Object listObject = node.jjtGetChild(2).value(context); if (listObject == null) return null; /* * See if we already know what type this is. * Use the introspection cache */ int type = UNKNOWN; IntrospectionCacheData icd = context.icacheGet( this ); Class c = listObject.getClass(); /* * if we have an entry in the cache, and the Class we have * cached is the same as the Class of the data object * then we are ok */ if ( icd != null && icd.contextData == c ) { /* dig the type out of the cata object */ type = ((Integer) icd.thingy ).intValue(); } /* * If we still don't know what this is, * figure out what type of object the list * element is, and get the iterator for it */ if ( type == UNKNOWN ) { if (listObject instanceof Object[]) type = INFO_ARRAY; else if ( listObject instanceof Collection) type = INFO_COLLECTION; else if ( listObject instanceof Map ) type = INFO_MAP; else if ( listObject instanceof Iterator ) type = INFO_ITERATOR; else if ( listObject instanceof Enumeration ) type = INFO_ENUMERATION; /* * if we did figure it out, cache it */ if ( type != UNKNOWN ) { icd = new IntrospectionCacheData(); icd.thingy = new Integer( type ); icd.contextData = c; context.icachePut( this, icd ); } } /* * now based on the type from either cache or examination... */ switch( type ) { case INFO_COLLECTION : return ( (Collection) listObject).iterator(); case INFO_ITERATOR : rsvc.warn ("Warning! The reference " + node.jjtGetChild(2).getFirstToken().image + " is an Iterator in the #foreach() loop at [" + getLine() + "," + getColumn() + "]" + " in template " + context.getCurrentTemplateName() + ". Because it's not resetable," + " if used in more than once, this may lead to" + " unexpected results."); return ( (Iterator) listObject); case INFO_ENUMERATION : rsvc.warn ("Warning! The reference " + node.jjtGetChild(2).getFirstToken().image + " is an Enumeration in the #foreach() loop at [" + getLine() + "," + getColumn() + "]" + " in template " + context.getCurrentTemplateName() + ". Because it's not resetable," + " if used in more than once, this may lead to" + " unexpected results."); return new EnumerationIterator( (Enumeration) listObject ); case INFO_ARRAY: return new ArrayIterator( (Object [] ) listObject ); case INFO_MAP: return ( (Map) listObject).values().iterator(); default: /* we have no clue what this is */ rsvc.warn ("Could not determine type of iterator in " + "#foreach loop for " + node.jjtGetChild(2).getFirstToken().image + " at [" + getLine() + "," + getColumn() + "]" + " in template " + context.getCurrentTemplateName() ); return null; } } /** * renders the #foreach() block */ public boolean render( InternalContextAdapter context, Writer writer, Node node ) throws IOException, MethodInvocationException, ResourceNotFoundException, ParseErrorException { /* * do our introspection to see what our collection is */ Iterator i = getIterator( context, node ); if ( i == null ) return false; int counter = counterInitialValue; /* * save the element key if there is one, * and the loop counter */ Object o = context.get( elementKey ); Object ctr = context.get( counterName); while (i.hasNext()) { context.put( counterName , new Integer(counter)); context.put(elementKey,i.next()); node.jjtGetChild(3).render(context, writer); counter++; } /* * restores the loop counter (if we were nested) * if we have one, else just removes */ if( ctr != null) { context.put( counterName, ctr ); } else { context.remove( counterName ); } /* * restores element key if exists * otherwise just removes */ if (o != null) { context.put( elementKey, o ); } else { context.remove(elementKey); } return true; } }
Supports alteration to ArrayIterator to support array of primitives, addressing bug# 4370. PR: Obtained from: Submitted by: Reviewed by: git-svn-id: 7267684f36935cb3df12efc1f4c0216d758271d4@75368 13f79535-47bb-0310-9956-ffa450edef68
src/java/org/apache/velocity/runtime/directive/Foreach.java
Supports alteration to ArrayIterator to support array of primitives, addressing bug# 4370. PR: Obtained from: Submitted by: Reviewed by:
Java
apache-2.0
2d5e985f427e20709003a577aead9b326cf30831
0
mgormley/pacaya,mgormley/pacaya
package edu.jhu.autodiff.erma; import edu.jhu.autodiff.Module; import edu.jhu.autodiff.Tensor; import edu.jhu.autodiff.TensorIdentity; import edu.jhu.autodiff.TopoOrder; import edu.jhu.autodiff.erma.ErmaObjective.DlFactory; import edu.jhu.autodiff.tensor.ElemLinear; import edu.jhu.gm.model.Var.VarType; import edu.jhu.gm.model.VarConfig; import edu.jhu.gm.model.VarSet; import edu.jhu.util.collections.Lists; import edu.jhu.util.semiring.Algebras; import edu.jhu.util.semiring.LogSignAlgebra; /** * Softmax MBR decoder for dependency parsing evaluated with expected recall. * * @author mgormley */ public class DepParseDecodeLoss extends TopoOrder<Tensor> implements Module<Tensor> { /** * This factory defines the decoder / loss module as non-stationary: the softmax parameter on * the MBR decoder is annealed linearly from a starting temperature to a small epsilon. * * Optionally, this loss function can be annealed from MSE to softmax MBR with expected recall. */ public static class DepParseDecodeLossFactory implements DlFactory { public double startTemp = 10; public double endTemp = .1; public boolean annealMse = true; @Override public Module<Tensor> getDl(VarConfig goldConfig, ExpFamFactorsModule effm, Module<Beliefs> inf, int curIter, int maxIter) { double temperature = getTemperature(curIter, maxIter); TensorIdentity temp = new TensorIdentity(Tensor.getScalarTensor(Algebras.REAL_ALGEBRA, temperature)); if (annealMse) { double prop = (double) curIter / maxIter; Module<Tensor> mse = new MeanSquaredError(inf, goldConfig); Module<Tensor> dep = new DepParseDecodeLoss(inf, goldConfig, temp); Module<Tensor> lin = new ElemLinear(mse, dep, (1.0-prop), prop); return new TopoOrder<Tensor>(Lists.getList(inf, temp), lin); } else { return new DepParseDecodeLoss(inf, goldConfig, temp); } } public double getTemperature(int curIter, int maxIter) { double prop = (double) curIter / maxIter; double temp = (1.0 - prop) * startTemp + prop * endTemp; assert !Double.isNaN(temp); return temp; } } public DepParseDecodeLoss(Module<Beliefs> inf, VarConfig vc, Module<Tensor> temperature) { super(); shallowCopy(build(inf, vc, temperature)); } private static TopoOrder<Tensor> build(Module<Beliefs> inf, VarConfig goldConfig, Module<Tensor> temperature) { // Decoding. DepTensorFromBeliefs b2d = new DepTensorFromBeliefs(inf); SoftmaxMbrDepParse mbr = new SoftmaxMbrDepParse(b2d, temperature, new LogSignAlgebra()); DepTensorToBeliefs d2b = new DepTensorToBeliefs(mbr, inf); // Loss. VarSet predVars = VarSet.getVarsOfType(goldConfig.getVars(), VarType.PREDICTED); VarConfig predConfig = goldConfig.getSubset(predVars); ExpectedRecall er = new ExpectedRecall(d2b, predConfig); return new TopoOrder<Tensor>(Lists.getList(inf, temperature), er); } }
src/main/java/edu/jhu/autodiff/erma/DepParseDecodeLoss.java
package edu.jhu.autodiff.erma; import edu.jhu.autodiff.Module; import edu.jhu.autodiff.Tensor; import edu.jhu.autodiff.TensorIdentity; import edu.jhu.autodiff.TopoOrder; import edu.jhu.autodiff.erma.ErmaObjective.DlFactory; import edu.jhu.autodiff.tensor.ElemLinear; import edu.jhu.gm.model.Var.VarType; import edu.jhu.gm.model.VarConfig; import edu.jhu.gm.model.VarSet; import edu.jhu.util.collections.Lists; import edu.jhu.util.semiring.Algebras; import edu.jhu.util.semiring.LogSignAlgebra; /** * Softmax MBR decoder for dependency parsing evaluated with expected recall. * * @author mgormley */ public class DepParseDecodeLoss extends TopoOrder<Tensor> implements Module<Tensor> { /** * This factory defines the decoder / loss module as non-stationary: the softmax parameter on * the MBR decoder is annealed linearly from a starting temperature to a small epsilon. * * Optionally, this loss function can be annealed from MSE to softmax MBR with expected recall. */ public static class DepParseDecodeLossFactory implements DlFactory { public double startTemp = 10; public double endTemp = .1; public boolean annealMse = true; @Override public Module<Tensor> getDl(VarConfig goldConfig, ExpFamFactorsModule effm, Module<Beliefs> inf, int curIter, int maxIter) { double temperature = getTemperature(curIter, maxIter); TensorIdentity temp = new TensorIdentity(Tensor.getScalarTensor(Algebras.REAL_ALGEBRA, temperature)); if (annealMse) { double prop = (double) curIter / maxIter; Module<Tensor> mse = new MeanSquaredError(inf, goldConfig); Module<Tensor> dep = new DepParseDecodeLoss(inf, goldConfig, temp); Module<Tensor> lin = new ElemLinear(mse, dep, (1.0-prop), prop); return new TopoOrder<Tensor>(Lists.getList(inf), lin); } else { return new DepParseDecodeLoss(inf, goldConfig, temp); } } public double getTemperature(int curIter, int maxIter) { double prop = (double) curIter / maxIter; double temp = (1.0 - prop) * startTemp + prop * endTemp; assert !Double.isNaN(temp); return temp; } } public DepParseDecodeLoss(Module<Beliefs> inf, VarConfig vc, Module<Tensor> temperature) { super(); shallowCopy(build(inf, vc, temperature)); } private static TopoOrder<Tensor> build(Module<Beliefs> inf, VarConfig goldConfig, Module<Tensor> temperature) { // Decoding. DepTensorFromBeliefs b2d = new DepTensorFromBeliefs(inf); SoftmaxMbrDepParse mbr = new SoftmaxMbrDepParse(b2d, temperature, new LogSignAlgebra()); DepTensorToBeliefs d2b = new DepTensorToBeliefs(mbr, inf); // Loss. VarSet predVars = VarSet.getVarsOfType(goldConfig.getVars(), VarType.PREDICTED); VarConfig predConfig = goldConfig.getSubset(predVars); ExpectedRecall er = new ExpectedRecall(d2b, predConfig); return new TopoOrder<Tensor>(Lists.getList(inf, temperature), er); } }
Bug fix: incorrect leaf set was missing temperature
src/main/java/edu/jhu/autodiff/erma/DepParseDecodeLoss.java
Bug fix: incorrect leaf set was missing temperature
Java
apache-2.0
558a99e4153baa344b40ef8647359bdd95a5a010
0
vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa
// Copyright 2018 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.search.dispatch; import com.yahoo.collections.ListMap; import com.yahoo.compress.CompressionType; import com.yahoo.compress.Compressor; import com.yahoo.container.protect.Error; import com.yahoo.data.access.Inspector; import com.yahoo.data.access.slime.SlimeAdapter; import com.yahoo.prelude.fastsearch.DocumentDatabase; import com.yahoo.prelude.fastsearch.FastHit; import com.yahoo.prelude.fastsearch.TimeoutException; import com.yahoo.search.Query; import com.yahoo.search.Result; import com.yahoo.search.query.SessionId; import com.yahoo.search.result.ErrorMessage; import com.yahoo.search.result.Hit; import com.yahoo.slime.ArrayTraverser; import com.yahoo.slime.BinaryFormat; import com.yahoo.slime.Cursor; import com.yahoo.slime.Slime; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; /** * {@link FillInvoker} implementation using RPC * * @author bratseth * @author ollivir */ public class RpcFillInvoker extends FillInvoker { private static final Logger log = Logger.getLogger(RpcFillInvoker.class.getName()); private final DocumentDatabase documentDb; private final RpcResourcePool resourcePool; private GetDocsumsResponseReceiver responseReceiver; RpcFillInvoker(RpcResourcePool resourcePool, DocumentDatabase documentDb) { this.documentDb = documentDb; this.resourcePool = resourcePool; } @Override protected void sendFillRequest(Result result, String summaryClass) { ListMap<Integer, FastHit> hitsByNode = hitsByNode(result); CompressionType compression = CompressionType .valueOf(result.getQuery().properties().getString(RpcResourcePool.dispatchCompression, "LZ4").toUpperCase()); if (result.getQuery().getTraceLevel() >= 3) result.getQuery().trace("Sending " + hitsByNode.size() + " summary fetch RPC requests", 3); responseReceiver = new GetDocsumsResponseReceiver(hitsByNode.size(), resourcePool.compressor(), result); for (Map.Entry<Integer, List<FastHit>> nodeHits : hitsByNode.entrySet()) { sendGetDocsumsRequest(nodeHits.getKey(), nodeHits.getValue(), summaryClass, compression, result, responseReceiver); } } @Override protected void getFillResults(Result result, String summaryClass) { try { responseReceiver.processResponses(result.getQuery(), summaryClass, documentDb); result.hits().setSorted(false); result.analyzeHits(); } catch (TimeoutException e) { result.hits().addError(ErrorMessage.createTimeout("Summary data is incomplete: " + e.getMessage())); } } @Override protected void release() { // nothing to release } /** Return a map of hits by their search node (partition) id */ private static ListMap<Integer, FastHit> hitsByNode(Result result) { ListMap<Integer, FastHit> hitsByNode = new ListMap<>(); for (Iterator<Hit> i = result.hits().unorderedDeepIterator(); i.hasNext();) { Hit h = i.next(); if (!(h instanceof FastHit)) continue; FastHit hit = (FastHit) h; hitsByNode.put(hit.getDistributionKey(), hit); } return hitsByNode; } /** Send a getDocsums request to a node. Responses will be added to the given receiver. */ private void sendGetDocsumsRequest(int nodeId, List<FastHit> hits, String summaryClass, CompressionType compression, Result result, GetDocsumsResponseReceiver responseReceiver) { Client.NodeConnection node = resourcePool.nodeConnections().get(nodeId); if (node == null) { String error = "Could not fill hits from unknown node " + nodeId; responseReceiver.receive(Client.GetDocsumsResponseOrError.fromError(error)); result.hits().addError(ErrorMessage.createEmptyDocsums(error)); log.warning("Got hits with partid " + nodeId + ", which is not included in the current dispatch config"); return; } Query query = result.getQuery(); String rankProfile = query.getRanking().getProfile(); byte[] serializedSlime = BinaryFormat .encode(toSlime(rankProfile, summaryClass, query.getModel().getDocumentDb(), query.getSessionId(false), hits)); double timeoutSeconds = ((double) query.getTimeLeft() - 3.0) / 1000.0; Compressor.Compression compressionResult = resourcePool.compressor().compress(compression, serializedSlime); resourcePool.client().getDocsums(hits, node, compressionResult.type(), serializedSlime.length, compressionResult.data(), responseReceiver, timeoutSeconds); } static private Slime toSlime(String rankProfile, String summaryClass, String docType, SessionId sessionId, List<FastHit> hits) { Slime slime = new Slime(); Cursor root = slime.setObject(); if (summaryClass != null) { root.setString("class", summaryClass); } if (sessionId != null) { root.setData("sessionid", sessionId.asUtf8String().getBytes()); } if (docType != null) { root.setString("doctype", docType); } if (rankProfile != null) { root.setString("ranking", rankProfile); } Cursor gids = root.setArray("gids"); for (FastHit hit : hits) { gids.addData(hit.getGlobalId().getRawId()); } return slime; } /** Receiver of the responses to a set of getDocsums requests */ public static class GetDocsumsResponseReceiver { private final BlockingQueue<Client.GetDocsumsResponseOrError> responses; private final Compressor compressor; private final Result result; /** Whether we have already logged/notified about an error - to avoid spamming */ private boolean hasReportedError = false; /** The number of responses we should receive (and process) before this is complete */ private int outstandingResponses; GetDocsumsResponseReceiver(int requestCount, Compressor compressor, Result result) { this.compressor = compressor; responses = new LinkedBlockingQueue<>(requestCount); outstandingResponses = requestCount; this.result = result; } /** Called by a thread belonging to the client when a valid response becomes available */ public void receive(Client.GetDocsumsResponseOrError response) { responses.add(response); } private void throwTimeout() throws TimeoutException { throw new TimeoutException("Timed out waiting for summary data. " + outstandingResponses + " responses outstanding."); } /** * Call this from the dispatcher thread to initiate and complete processing of responses. * This will block until all responses are available and processed, or to timeout. */ void processResponses(Query query, String summaryClass, DocumentDatabase documentDb) throws TimeoutException { try { int skippedHits = 0; while (outstandingResponses > 0) { long timeLeftMs = query.getTimeLeft(); if (timeLeftMs <= 0) { throwTimeout(); } Client.GetDocsumsResponseOrError response = responses.poll(timeLeftMs, TimeUnit.MILLISECONDS); if (response == null) throwTimeout(); skippedHits += processResponse(response, summaryClass, documentDb); outstandingResponses--; } if (skippedHits != 0) { result.hits().addError(com.yahoo.search.result.ErrorMessage.createEmptyDocsums("Missing hit summary data for summary " + summaryClass + " for " + skippedHits + " hits")); } } catch (InterruptedException e) { // TODO: Add error } } private int processResponse(Client.GetDocsumsResponseOrError responseOrError, String summaryClass, DocumentDatabase documentDb) { if (responseOrError.error().isPresent()) { if (hasReportedError) return 0; String error = responseOrError.error().get(); result.hits().addError(ErrorMessage.createBackendCommunicationError(error)); log.log(Level.WARNING, "Error fetching summary data: "+ error); } else { Client.GetDocsumsResponse response = responseOrError.response().get(); CompressionType compression = CompressionType.valueOf(response.compression()); byte[] slimeBytes = compressor.decompress(response.compressedSlimeBytes(), compression, response.uncompressedSize()); return fill(response.hitsContext(), summaryClass, documentDb, slimeBytes); } return 0; } private void addErrors(com.yahoo.slime.Inspector errors) { errors.traverse((ArrayTraverser) (int index, com.yahoo.slime.Inspector value) -> { int errorCode = ("timeout".equalsIgnoreCase(value.field("type").asString())) ? Error.TIMEOUT.code : Error.UNSPECIFIED.code; result.hits().addError(new ErrorMessage(errorCode, value.field("message").asString(), value.field("details").asString())); }); } private int fill(List<FastHit> hits, String summaryClass, DocumentDatabase documentDb, byte[] slimeBytes) { com.yahoo.slime.Inspector root = BinaryFormat.decode(slimeBytes).get(); com.yahoo.slime.Inspector errors = root.field("errors"); boolean hasErrors = errors.valid() && (errors.entries() > 0); if (hasErrors) { addErrors(errors); } Inspector summaries = new SlimeAdapter(root.field("docsums")); if ( ! summaries.valid()) return 0; // No summaries; Perhaps we requested a non-existing summary class int skippedHits = 0; for (int i = 0; i < hits.size(); i++) { Inspector summary = summaries.entry(i).field("docsum"); if (summary.fieldCount() != 0) { hits.get(i).setField(Hit.SDDOCNAME_FIELD, documentDb.getName()); hits.get(i).addSummary(documentDb.getDocsumDefinitionSet().getDocsum(summaryClass), summary); hits.get(i).setFilled(summaryClass); } else { skippedHits++; } } return skippedHits; } } }
container-search/src/main/java/com/yahoo/search/dispatch/RpcFillInvoker.java
// Copyright 2018 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.search.dispatch; import com.yahoo.collections.ListMap; import com.yahoo.compress.CompressionType; import com.yahoo.compress.Compressor; import com.yahoo.container.protect.Error; import com.yahoo.data.access.Inspector; import com.yahoo.data.access.slime.SlimeAdapter; import com.yahoo.prelude.fastsearch.DocumentDatabase; import com.yahoo.prelude.fastsearch.FastHit; import com.yahoo.prelude.fastsearch.TimeoutException; import com.yahoo.search.Query; import com.yahoo.search.Result; import com.yahoo.search.query.SessionId; import com.yahoo.search.result.ErrorMessage; import com.yahoo.search.result.Hit; import com.yahoo.slime.ArrayTraverser; import com.yahoo.slime.BinaryFormat; import com.yahoo.slime.Cursor; import com.yahoo.slime.Slime; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; /** * {@link FillInvoker} implementation using RPC * * @author bratseth * @author ollivir */ public class RpcFillInvoker extends FillInvoker { private static final Logger log = Logger.getLogger(RpcFillInvoker.class.getName()); private final DocumentDatabase documentDb; private final RpcResourcePool resourcePool; private GetDocsumsResponseReceiver responseReceiver; public RpcFillInvoker(RpcResourcePool resourcePool, DocumentDatabase documentDb) { this.documentDb = documentDb; this.resourcePool = resourcePool; } @Override protected void sendFillRequest(Result result, String summaryClass) { ListMap<Integer, FastHit> hitsByNode = hitsByNode(result); CompressionType compression = CompressionType .valueOf(result.getQuery().properties().getString(RpcResourcePool.dispatchCompression, "LZ4").toUpperCase()); if (result.getQuery().getTraceLevel() >= 3) result.getQuery().trace("Sending " + hitsByNode.size() + " summary fetch RPC requests", 3); responseReceiver = new GetDocsumsResponseReceiver(hitsByNode.size(), resourcePool.compressor(), result); for (Map.Entry<Integer, List<FastHit>> nodeHits : hitsByNode.entrySet()) { sendGetDocsumsRequest(nodeHits.getKey(), nodeHits.getValue(), summaryClass, compression, result, responseReceiver); } } @Override protected void getFillResults(Result result, String summaryClass) { try { responseReceiver.processResponses(result.getQuery(), summaryClass, documentDb); result.hits().setSorted(false); result.analyzeHits(); } catch (TimeoutException e) { result.hits().addError(ErrorMessage.createTimeout("Summary data is incomplete: " + e.getMessage())); } } @Override protected void release() { // nothing to release } /** Return a map of hits by their search node (partition) id */ private static ListMap<Integer, FastHit> hitsByNode(Result result) { ListMap<Integer, FastHit> hitsByNode = new ListMap<>(); for (Iterator<Hit> i = result.hits().unorderedDeepIterator(); i.hasNext();) { Hit h = i.next(); if (!(h instanceof FastHit)) continue; FastHit hit = (FastHit) h; hitsByNode.put(hit.getDistributionKey(), hit); } return hitsByNode; } /** Send a getDocsums request to a node. Responses will be added to the given receiver. */ private void sendGetDocsumsRequest(int nodeId, List<FastHit> hits, String summaryClass, CompressionType compression, Result result, GetDocsumsResponseReceiver responseReceiver) { Client.NodeConnection node = resourcePool.nodeConnections().get(nodeId); if (node == null) { result.hits().addError(ErrorMessage.createEmptyDocsums("Could not fill hits from unknown node " + nodeId)); log.warning("Got hits with partid " + nodeId + ", which is not included in the current dispatch config"); return; } Query query = result.getQuery(); String rankProfile = query.getRanking().getProfile(); byte[] serializedSlime = BinaryFormat .encode(toSlime(rankProfile, summaryClass, query.getModel().getDocumentDb(), query.getSessionId(false), hits)); double timeoutSeconds = ((double) query.getTimeLeft() - 3.0) / 1000.0; Compressor.Compression compressionResult = resourcePool.compressor().compress(compression, serializedSlime); resourcePool.client().getDocsums(hits, node, compressionResult.type(), serializedSlime.length, compressionResult.data(), responseReceiver, timeoutSeconds); } static private Slime toSlime(String rankProfile, String summaryClass, String docType, SessionId sessionId, List<FastHit> hits) { Slime slime = new Slime(); Cursor root = slime.setObject(); if (summaryClass != null) { root.setString("class", summaryClass); } if (sessionId != null) { root.setData("sessionid", sessionId.asUtf8String().getBytes()); } if (docType != null) { root.setString("doctype", docType); } if (rankProfile != null) { root.setString("ranking", rankProfile); } Cursor gids = root.setArray("gids"); for (FastHit hit : hits) { gids.addData(hit.getGlobalId().getRawId()); } return slime; } /** Receiver of the responses to a set of getDocsums requests */ public static class GetDocsumsResponseReceiver { private final BlockingQueue<Client.GetDocsumsResponseOrError> responses; private final Compressor compressor; private final Result result; /** Whether we have already logged/notified about an error - to avoid spamming */ private boolean hasReportedError = false; /** The number of responses we should receive (and process) before this is complete */ private int outstandingResponses; public GetDocsumsResponseReceiver(int requestCount, Compressor compressor, Result result) { this.compressor = compressor; responses = new LinkedBlockingQueue<>(requestCount); outstandingResponses = requestCount; this.result = result; } /** Called by a thread belonging to the client when a valid response becomes available */ public void receive(Client.GetDocsumsResponseOrError response) { responses.add(response); } private void throwTimeout() throws TimeoutException { throw new TimeoutException("Timed out waiting for summary data. " + outstandingResponses + " responses outstanding."); } /** * Call this from the dispatcher thread to initiate and complete processing of responses. * This will block until all responses are available and processed, or to timeout. */ public void processResponses(Query query, String summaryClass, DocumentDatabase documentDb) throws TimeoutException { try { int skippedHits = 0; while (outstandingResponses > 0) { long timeLeftMs = query.getTimeLeft(); if (timeLeftMs <= 0) { throwTimeout(); } Client.GetDocsumsResponseOrError response = responses.poll(timeLeftMs, TimeUnit.MILLISECONDS); if (response == null) throwTimeout(); skippedHits += processResponse(response, summaryClass, documentDb); outstandingResponses--; } if (skippedHits != 0) { result.hits().addError(com.yahoo.search.result.ErrorMessage.createEmptyDocsums("Missing hit summary data for summary " + summaryClass + " for " + skippedHits + " hits")); } } catch (InterruptedException e) { // TODO: Add error } } private int processResponse(Client.GetDocsumsResponseOrError responseOrError, String summaryClass, DocumentDatabase documentDb) { if (responseOrError.error().isPresent()) { if (hasReportedError) return 0; String error = responseOrError.error().get(); result.hits().addError(ErrorMessage.createBackendCommunicationError(error)); log.log(Level.WARNING, "Error fetching summary data: "+ error); } else { Client.GetDocsumsResponse response = responseOrError.response().get(); CompressionType compression = CompressionType.valueOf(response.compression()); byte[] slimeBytes = compressor.decompress(response.compressedSlimeBytes(), compression, response.uncompressedSize()); return fill(response.hitsContext(), summaryClass, documentDb, slimeBytes); } return 0; } private void addErrors(com.yahoo.slime.Inspector errors) { errors.traverse((ArrayTraverser) (int index, com.yahoo.slime.Inspector value) -> { int errorCode = ("timeout".equalsIgnoreCase(value.field("type").asString())) ? Error.TIMEOUT.code : Error.UNSPECIFIED.code; result.hits().addError(new ErrorMessage(errorCode, value.field("message").asString(), value.field("details").asString())); }); } private int fill(List<FastHit> hits, String summaryClass, DocumentDatabase documentDb, byte[] slimeBytes) { com.yahoo.slime.Inspector root = BinaryFormat.decode(slimeBytes).get(); com.yahoo.slime.Inspector errors = root.field("errors"); boolean hasErrors = errors.valid() && (errors.entries() > 0); if (hasErrors) { addErrors(errors); } Inspector summaries = new SlimeAdapter(root.field("docsums")); if ( ! summaries.valid()) return 0; // No summaries; Perhaps we requested a non-existing summary class int skippedHits = 0; for (int i = 0; i < hits.size(); i++) { Inspector summary = summaries.entry(i).field("docsum"); if (summary.fieldCount() != 0) { hits.get(i).setField(Hit.SDDOCNAME_FIELD, documentDb.getName()); hits.get(i).addSummary(documentDb.getDocsumDefinitionSet().getDocsum(summaryClass), summary); hits.get(i).setFilled(summaryClass); } else { skippedHits++; } } return skippedHits; } } }
If you promise to deliver an answer, you must provide it. Even if you already know it is negative.
container-search/src/main/java/com/yahoo/search/dispatch/RpcFillInvoker.java
If you promise to deliver an answer, you must provide it. Even if you already know it is negative.
Java
apache-2.0
e412f6b7a5fd3f444e1511f4c1ef4ffc0ce37f57
0
trixon/netbeans-nbgames,trixon/netbeans-nbgames
/* * Copyright 2017 Patrik Karlsson. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.nbgames.core.actions; import org.openide.awt.ActionID; import org.openide.awt.ActionReference; import org.openide.awt.ActionReferences; import org.openide.awt.ActionRegistration; import org.openide.util.NbBundle; /** * * @author Patrik Karlsson */ public class CallbackOptionsAction { @ActionID(category = "Game", id = "org.nbgames.core.actions.OptionsAction") @ActionRegistration(displayName = "#CTL_OptionsAction") @ActionReferences({ @ActionReference(path = "Shortcuts", name = "D-P") , @ActionReference(path = "Shortcuts", name = "D-COMMA") }) @NbBundle.Messages("CTL_OptionsAction=Options") public static final String KEY = "OptionsAction"; }
core/src/main/java/org/nbgames/core/actions/CallbackOptionsAction.java
/* * Copyright 2017 Patrik Karlsson. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.nbgames.core.actions; import org.openide.awt.ActionID; import org.openide.awt.ActionReference; import org.openide.awt.ActionReferences; import org.openide.awt.ActionRegistration; import org.openide.util.NbBundle; /** * * @author Patrik Karlsson */ public class CallbackOptionsAction { @ActionID(category = "Game", id = "org.nbgames.core.actions.OptionsAction") @ActionRegistration(displayName = "#CTL_OptionsAction") @ActionReferences({ @ActionReference(path = "Shortcuts", name = "D-P") , @ActionReference(path = "Shortcuts", name = "C-PERIOD") }) @NbBundle.Messages("CTL_OptionsAction=Options") public static final String KEY = "OptionsAction"; }
Core: Use cmd comma for options
core/src/main/java/org/nbgames/core/actions/CallbackOptionsAction.java
Core: Use cmd comma for options
Java
apache-2.0
bb4f164195cd3b1b61812d3c6095fcfb3611745f
0
orientechnologies/orientdb,orientechnologies/orientdb,orientechnologies/orientdb,orientechnologies/orientdb
package com.orientechnologies.orient.graph.blueprints; import com.orientechnologies.orient.core.exception.OConcurrentModificationException; import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.impls.orient.OrientGraph; import com.tinkerpop.blueprints.impls.orient.OrientGraphFactory; import com.tinkerpop.blueprints.impls.orient.OrientVertex; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; public class ConcurrentTxTest { private final static String STORAGE_ENGINE = "memory"; private final static String DATABASE_URL = STORAGE_ENGINE + ":" + ConcurrentTxTest.class.getSimpleName(); private final static String PROPERTY_NAME = "pn"; OrientGraphFactory graphFactory; @Before public void setUpGraph() { graphFactory = new OrientGraphFactory(DATABASE_URL); graphFactory.setAutoStartTx(false); graphFactory.setupPool(0, 200); } @After public void tearDownGraph() { graphFactory.drop(); } @Test(expected = OConcurrentModificationException.class) public void testMultithreadedProvokeOConcurrentModificationException2() throws Throwable { // Create vertex OrientGraph mainTx = graphFactory.getTx(); mainTx.begin(); OrientVertex vertex = mainTx.addVertex(null, PROPERTY_NAME, "init"); mainTx.commit(); mainTx.shutdown(); int threadCount = 200; final Object recordId = vertex.getId(); final CyclicBarrier barrier = new CyclicBarrier(threadCount); List<Thread> threads = new ArrayList<Thread>(); final AtomicReference<Throwable> t = new AtomicReference<Throwable>(null); // Spawn two threads and modify the vertex for (int i = 0; i < threadCount; i++) { final int threadNo = i; Thread thread = run(new Runnable() { @Override public void run() { OrientGraph tx = graphFactory.getTx(); try { tx.begin(); OrientVertex secondVertexHandle = tx.getVertex(recordId); secondVertexHandle.setProperty(PROPERTY_NAME, threadNo); waitFor(barrier); tx.commit(); } catch (Exception e) { t.set(e); } finally { tx.shutdown(); } } }); threads.add(thread); } // Wait for threads for (Thread thread : threads) { thread.join(); } if (t.get() != null) { throw t.get(); } } @Test(expected = OConcurrentModificationException.class) public void testMultithreadedProvokeOConcurrentModificationException() throws Throwable { final int firstValue = 0; final int secondValue = 1; // Create vertex OrientGraph mainTx = graphFactory.getTx(); mainTx.begin(); OrientVertex firstVertexHandle = mainTx.addVertex(null, PROPERTY_NAME, firstValue); mainTx.commit(); mainTx.shutdown(); final Object recordId = firstVertexHandle.getId(); final CyclicBarrier barrier = new CyclicBarrier(2); List<Thread> threads = new ArrayList<Thread>(); final AtomicReference<Throwable> t = new AtomicReference<Throwable>(null); // Spawn two threads and modify the vertex for (int i = 0; i < 2; i++) { Thread thread = run(new Runnable() { @Override public void run() { OrientGraph tx = graphFactory.getTx(); try { tx.begin(); Vertex secondVertexHandle = tx.getVertex(recordId); secondVertexHandle.setProperty(PROPERTY_NAME, secondValue); waitFor(barrier); tx.commit(); } catch (Exception e) { t.set(e); } finally { tx.shutdown(); } } }); threads.add(thread); } // Wait for threads for (Thread thread : threads) { thread.join(); } if (t.get() != null) { throw t.get(); } } private void waitFor(CyclicBarrier barrier) { try { barrier.await(1000, TimeUnit.SECONDS); } catch (Exception e) { e.printStackTrace(); } } @Test(expected = OConcurrentModificationException.class) public void testProvokeOConcurrentModificationException() throws Exception { final int firstValue = 0; final int secondValue = 1; final int thirdValue = 3; // Create vertex OrientGraph tx = graphFactory.getTx(); tx.begin(); final OrientVertex firstVertexHandle = tx.addVertex(null, PROPERTY_NAME, firstValue); tx.commit(); //tx.shutdown(); final Object recordId = firstVertexHandle.getId(); Thread updateThread = new Thread(new Runnable() { @Override public void run() { // 1. Update OrientGraph tx2 = graphFactory.getTx(); try { tx2.begin(); Vertex secondVertexHandle = tx2.getVertex(recordId); secondVertexHandle.setProperty(PROPERTY_NAME, secondValue); tx2.commit(); } finally { tx2.shutdown(); } } }); updateThread.start(); updateThread.join(); // 2. Update OrientGraph tx3 = graphFactory.getTx(); try { tx3.begin(); Vertex thirdVertexHandle = tx3.getVertex(recordId); thirdVertexHandle.setProperty(PROPERTY_NAME, thirdValue); //commit tx3.commit(); } finally { tx3.shutdown(); tx.shutdown(); } } public static Thread run(Runnable runnable) { Thread thread = new Thread(runnable); thread.start(); return thread; } }
graphdb/src/test/java/com/orientechnologies/orient/graph/blueprints/ConcurrentTxTest.java
package com.orientechnologies.orient.graph.blueprints; import com.orientechnologies.orient.core.exception.OConcurrentModificationException; import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.impls.orient.OrientGraph; import com.tinkerpop.blueprints.impls.orient.OrientGraphFactory; import com.tinkerpop.blueprints.impls.orient.OrientVertex; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; public class ConcurrentTxTest { private final static String STORAGE_ENGINE = "memory"; private final static String DATABASE_URL = STORAGE_ENGINE + ":" + ConcurrentTxTest.class.getSimpleName(); private final static String PROPERTY_NAME = "pn"; OrientGraphFactory graphFactory; @Before public void setUpGraph() { graphFactory = new OrientGraphFactory(DATABASE_URL); graphFactory.setAutoStartTx(false); graphFactory.setupPool(0, 200); } @After public void tearDownGraph() { graphFactory.drop(); } @Test(expected = OConcurrentModificationException.class) public void testMultithreadedProvokeOConcurrentModificationException2() throws Throwable { // Create vertex OrientGraph mainTx = graphFactory.getTx(); mainTx.begin(); OrientVertex vertex = mainTx.addVertex(null, PROPERTY_NAME, "init"); mainTx.commit(); mainTx.shutdown(); int threadCount = 200; final Object recordId = vertex.getId(); final CyclicBarrier barrier = new CyclicBarrier(threadCount); List<Thread> threads = new ArrayList<Thread>(); final AtomicReference<Throwable> t = new AtomicReference<Throwable>(null); // Spawn two threads and modify the vertex for (int i = 0; i < threadCount; i++) { final int threadNo = i; Thread thread = run(new Runnable() { @Override public void run() { OrientGraph tx = graphFactory.getTx(); try { tx.begin(); OrientVertex secondVertexHandle = tx.getVertex(recordId); secondVertexHandle.setProperty(PROPERTY_NAME, threadNo); waitFor(barrier); tx.commit(); } catch (Exception e) { t.set(e); } finally { tx.shutdown(); } } }); threads.add(thread); } // Wait for threads for (Thread thread : threads) { thread.join(); } if (t.get() != null) { throw t.get(); } } @Test(expected = OConcurrentModificationException.class) public void testMultithreadedProvokeOConcurrentModificationException() throws Throwable { final int firstValue = 0; final int secondValue = 1; // Create vertex OrientGraph mainTx = graphFactory.getTx(); mainTx.begin(); OrientVertex firstVertexHandle = mainTx.addVertex(null, PROPERTY_NAME, firstValue); mainTx.commit(); final Object recordId = firstVertexHandle.getId(); final CyclicBarrier barrier = new CyclicBarrier(2); List<Thread> threads = new ArrayList<Thread>(); final AtomicReference<Throwable> t = new AtomicReference<Throwable>(null); // Spawn two threads and modify the vertex for (int i = 0; i < 2; i++) { Thread thread = run(new Runnable() { @Override public void run() { OrientGraph tx = graphFactory.getTx(); try { tx.begin(); Vertex secondVertexHandle = tx.getVertex(recordId); secondVertexHandle.setProperty(PROPERTY_NAME, secondValue); waitFor(barrier); tx.commit(); } catch (Exception e) { t.set(e); } finally { tx.shutdown(); } } }); threads.add(thread); } // Wait for threads for (Thread thread : threads) { thread.join(); } if (t.get() != null) { throw t.get(); } } private void waitFor(CyclicBarrier barrier) { try { barrier.await(1000, TimeUnit.SECONDS); } catch (Exception e) { e.printStackTrace(); } } @Test(expected = OConcurrentModificationException.class) public void testProvokeOConcurrentModificationException() throws Exception { final int firstValue = 0; final int secondValue = 1; final int thirdValue = 3; // Create vertex OrientGraph tx = graphFactory.getTx(); tx.begin(); final OrientVertex firstVertexHandle = tx.addVertex(null, PROPERTY_NAME, firstValue); tx.commit(); final Object recordId = firstVertexHandle.getId(); Thread updateThread = new Thread(new Runnable() { @Override public void run() { // 1. Update OrientGraph tx2 = graphFactory.getTx(); tx2.begin(); Vertex secondVertexHandle = tx2.getVertex(recordId); secondVertexHandle.setProperty(PROPERTY_NAME, secondValue); tx2.commit(); } }); updateThread.start(); updateThread.join(); // 2. Update OrientGraph tx3 = graphFactory.getTx(); tx3.begin(); Vertex thirdVertexHandle = tx3.getVertex(recordId); thirdVertexHandle.setProperty(PROPERTY_NAME, thirdValue); //commit tx3.commit(); } public static Thread run(Runnable runnable) { Thread thread = new Thread(runnable); thread.start(); return thread; } }
fixed test failure for test order
graphdb/src/test/java/com/orientechnologies/orient/graph/blueprints/ConcurrentTxTest.java
fixed test failure for test order
Java
bsd-3-clause
6e488ae3469d1965b4ee96c1734de5d04f8f9742
0
Condroidapp/android
package cz.quinix.condroid.database; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import android.content.Context; import android.database.Cursor; import android.net.Uri; import android.util.Log; import android.widget.Toast; import cz.quinix.condroid.model.Annotation; import cz.quinix.condroid.model.Convention; import cz.quinix.condroid.model.ProgramLine; public class DataProvider { public static String AUTHORITY = "cz.quinix.condroid.database.DataProvider"; public static Uri CONTENT_URI = Uri.parse("content://"+ AUTHORITY + "/database"); public static int ITEMS_PER_PAGE = 40; private CondroidDatabase mDatabase; private Convention con; private List<Integer> favorited; private static volatile DataProvider instance; private static HashMap<Integer, String> programLines = null; private DataProvider(Context context) { mDatabase = new CondroidDatabase(context); } public static DataProvider getInstance(Context context) { if(instance == null) { synchronized (CondroidDatabase.class) { if(instance == null) { instance = new DataProvider(context); } } } return instance; } public boolean hasData() { return !mDatabase.isEmpty(); } public void setConvention(Convention convention) { con = convention; } public DatabaseLoader prepareInsert() { if(!mDatabase.isEmpty()) { mDatabase.purge(); programLines = null; } return new DatabaseLoader(null, mDatabase, con); } public List<Annotation> getAnnotations(String condition, int page) { List<Annotation> ret = new ArrayList<Annotation>(); Cursor c = this.mDatabase.query(CondroidDatabase.ANNOTATION_TABLE, null, condition, null, "startTime ASC, lid ASC, title ASC", (page*ITEMS_PER_PAGE) + ","+ ITEMS_PER_PAGE); while(c.moveToNext()) { ret.add(readAnnotation(c)); } c.close(); return ret; } public ProgramLine getProgramLine (int lid) { ProgramLine pl = new ProgramLine(); if(programLines == null) { this.loadProgramLines(); } if(programLines.containsKey(lid)) { pl.setLid(lid); pl.setName(programLines.get(lid)); } return pl; } public HashMap<Integer, String> getProgramLines() { if(programLines == null) { this.loadProgramLines(); } return programLines; } private void loadProgramLines() { programLines = new HashMap<Integer, String>(); Cursor c = this.mDatabase.query(CondroidDatabase.LINE_TABLE, null, null, null, "title ASC", null); while(c.moveToNext()) { programLines.put(c.getInt(c.getColumnIndex("id")), c.getString(c.getColumnIndex("title"))); } c.close(); } public List<Date> getDates() { Cursor c = this.mDatabase.query("SELECT DISTINCT STRFTIME('%Y-%m-%d',startTime) AS sDate FROM "+CondroidDatabase.ANNOTATION_TABLE+" ORDER by STRFTIME('%Y-%m-%d',startTime) ASC"); List<Date> map = new ArrayList<Date>(); DateFormat df = new SimpleDateFormat("yyyy-MM-dd"); if(c.getCount() > 0) { do { try { map.add(df.parse(c.getString(c.getColumnIndex("sDate")))); } catch (ParseException e) { Log.w("DB", e); } } while(c.moveToNext()); } c.close(); return map; } public List<Annotation> getRunningAndNext() { return this.getRunningAndNext(false); } public List<Annotation> getRunningAndNext(boolean favoritedOnly) { List<Annotation> l = new ArrayList<Annotation>(); String favoritedcondition = ""; if(favoritedOnly) { List<Integer> f = this.getFavorited(); if(f.size() > 0) { for (Integer integer : f) { favoritedcondition += integer+","; } favoritedcondition = " AND pid IN ("+favoritedcondition.substring(0, favoritedcondition.length()-1)+")"; } else { throw new IllegalStateException("No favorited"); } } Cursor c = this.mDatabase.query(CondroidDatabase.ANNOTATION_TABLE, null, "startTime < DATETIME('now') AND endTime > DATETIME('now')"+favoritedcondition, null, "startTime DESC", null, false, null); while (c.moveToNext()) { if(c.isFirst()) { Annotation a = new Annotation(); a.setTitle("break"); a.setSQLStartTime(c.getString(c.getColumnIndex("startTime"))); a.setAnnotation("now"); l.add(a); } Annotation annotation = readAnnotation(c); l.add(annotation); } c.close(); Cursor c2 = this.mDatabase.query(CondroidDatabase.ANNOTATION_TABLE, null, "startTime > DATETIME('now')"+favoritedcondition, null, "startTime ASC, lid ASC", "0,100", false, null); String previous = ""; int hours = 0; while (c2.moveToNext()) { if (!previous.equals(c2.getString(c2.getColumnIndex("startTime")))) { if(hours++ > 5) break; Annotation a = new Annotation(); a.setTitle("break"); a.setSQLStartTime(c2.getString(c2.getColumnIndex("startTime"))); l.add(a); previous = c2.getString(c2.getColumnIndex("startTime")); } Annotation annotation = readAnnotation(c2); l.add(annotation); } c2.close(); return l; } private Annotation readAnnotation(Cursor c) { Annotation annotation = new Annotation(); annotation.setPid(c.getString(c.getColumnIndex("pid"))); annotation.setTitle(c.getString(c.getColumnIndex("title"))); annotation.setAnnotation(c.getString(c.getColumnIndex("annotation"))); annotation.setAuthor(c.getString(c.getColumnIndex("talker"))); annotation.setSQLEndTime(c.getString(c.getColumnIndex("endTime"))); //annotation.setLength(c.getString(c.getColumnIndex("length"))); annotation.setLocation(c.getString(c.getColumnIndex("location"))); annotation.setLid(c.getInt(c.getColumnIndex("lid"))); annotation.setSQLStartTime(c.getString(c.getColumnIndex("startTime"))); annotation.setType(c.getString(c.getColumnIndex("mainType"))); annotation.setAdditonalTypes(c.getString(c.getColumnIndex("additionalTypes"))); return annotation; } public Convention getCon() { if (this.con != null) { return con; } Cursor c = this.mDatabase.query(CondroidDatabase.CON_TABLE, null, null, null, null, null); Convention co = new Convention(); while (c.moveToNext()) { co.setCid(c.getInt(c.getColumnIndex("id"))); co.setDataUrl(c.getString(c.getColumnIndex("dataUrl"))); co.setDate(c.getString(c.getColumnIndex("date"))); co.setIconUrl(c.getString(c.getColumnIndex("iconUrl"))); co.setName(c.getString(c.getColumnIndex("name"))); co.setMessage(c.getString(c.getColumnIndex("message"))); co.setLocationsFile(c.getString(c.getColumnIndex("locationsFile"))); } c.close(); this.con = co; return co; } public List<Integer> getFavorited() { if(favorited != null) { return favorited; } Cursor c = this.mDatabase.query(CondroidDatabase.FAVORITE_TABLE, null, null, null, "pid ASC", null); favorited = new ArrayList<Integer>(); while (c.moveToNext()) { favorited.add(c.getInt(c.getColumnIndex("pid"))); } return favorited; } public boolean doFavorite(String pid) { Cursor c = this.mDatabase.query(CondroidDatabase.FAVORITE_TABLE, null, "pid="+pid, null, null, null); favorited = null; if(c.getCount() > 0) { this.mDatabase.query("DELETE FROM " + CondroidDatabase.FAVORITE_TABLE + " WHERE pid = '"+pid+"'"); return false; } else { this.mDatabase.query("INSERT INTO "+CondroidDatabase.FAVORITE_TABLE +" (pid) VALUES ('"+pid +"')"); return true; } } }
src/cz/quinix/condroid/database/DataProvider.java
package cz.quinix.condroid.database; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import android.content.Context; import android.database.Cursor; import android.net.Uri; import android.util.Log; import android.widget.Toast; import cz.quinix.condroid.model.Annotation; import cz.quinix.condroid.model.Convention; import cz.quinix.condroid.model.ProgramLine; public class DataProvider { public static String AUTHORITY = "cz.quinix.condroid.database.DataProvider"; public static Uri CONTENT_URI = Uri.parse("content://"+ AUTHORITY + "/database"); public static int ITEMS_PER_PAGE = 40; private CondroidDatabase mDatabase; private Convention con; private List<Integer> favorited; private static volatile DataProvider instance; private static HashMap<Integer, String> programLines = null; private DataProvider(Context context) { mDatabase = new CondroidDatabase(context); } public static DataProvider getInstance(Context context) { if(instance == null) { synchronized (CondroidDatabase.class) { if(instance == null) { instance = new DataProvider(context); } } } return instance; } public boolean hasData() { return !mDatabase.isEmpty(); } public void setConvention(Convention convention) { con = convention; } public DatabaseLoader prepareInsert() { if(!mDatabase.isEmpty()) { mDatabase.purge(); programLines = null; } return new DatabaseLoader(null, mDatabase, con); } public List<Annotation> getAnnotations(String condition, int page) { List<Annotation> ret = new ArrayList<Annotation>(); Cursor c = this.mDatabase.query(CondroidDatabase.ANNOTATION_TABLE, null, condition, null, "startTime ASC, lid ASC, title ASC", (page*ITEMS_PER_PAGE) + ","+ ITEMS_PER_PAGE); while(c.moveToNext()) { ret.add(readAnnotation(c)); } c.close(); return ret; } public ProgramLine getProgramLine (int lid) { ProgramLine pl = new ProgramLine(); if(programLines == null) { this.loadProgramLines(); } if(programLines.containsKey(lid)) { pl.setLid(lid); pl.setName(programLines.get(lid)); } return pl; } public HashMap<Integer, String> getProgramLines() { if(programLines == null) { this.loadProgramLines(); } return programLines; } private void loadProgramLines() { programLines = new HashMap<Integer, String>(); Cursor c = this.mDatabase.query(CondroidDatabase.LINE_TABLE, null, null, null, "title ASC", null); while(c.moveToNext()) { programLines.put(c.getInt(c.getColumnIndex("id")), c.getString(c.getColumnIndex("title"))); } c.close(); } public List<Date> getDates() { Cursor c = this.mDatabase.query("SELECT DISTINCT STRFTIME('%Y-%m-%d',startTime) AS sDate FROM "+CondroidDatabase.ANNOTATION_TABLE+" ORDER by STRFTIME('%Y-%m-%d',startTime) ASC"); List<Date> map = new ArrayList<Date>(); DateFormat df = new SimpleDateFormat("yyyy-MM-dd"); while(c.moveToNext()) { try { map.add(df.parse(c.getString(c.getColumnIndex("sDate")))); } catch (ParseException e) { Log.w("DB", e); } } c.close(); return map; } public List<Annotation> getRunningAndNext() { return this.getRunningAndNext(false); } public List<Annotation> getRunningAndNext(boolean favoritedOnly) { List<Annotation> l = new ArrayList<Annotation>(); String favoritedcondition = ""; if(favoritedOnly) { List<Integer> f = this.getFavorited(); if(f.size() > 0) { for (Integer integer : f) { favoritedcondition += integer+","; } favoritedcondition = " AND pid IN ("+favoritedcondition.substring(0, favoritedcondition.length()-1)+")"; } else { throw new IllegalStateException("No favorited"); } } Cursor c = this.mDatabase.query(CondroidDatabase.ANNOTATION_TABLE, null, "startTime < DATETIME('now') AND endTime > DATETIME('now')"+favoritedcondition, null, "startTime DESC", null, false, null); while (c.moveToNext()) { if(c.isFirst()) { Annotation a = new Annotation(); a.setTitle("break"); a.setSQLStartTime(c.getString(c.getColumnIndex("startTime"))); a.setAnnotation("now"); l.add(a); } Annotation annotation = readAnnotation(c); l.add(annotation); } c.close(); Cursor c2 = this.mDatabase.query(CondroidDatabase.ANNOTATION_TABLE, null, "startTime > DATETIME('now')"+favoritedcondition, null, "startTime ASC, lid ASC", "0,100", false, null); String previous = ""; int hours = 0; while (c2.moveToNext()) { if (!previous.equals(c2.getString(c2.getColumnIndex("startTime")))) { if(hours++ > 5) break; Annotation a = new Annotation(); a.setTitle("break"); a.setSQLStartTime(c2.getString(c2.getColumnIndex("startTime"))); l.add(a); previous = c2.getString(c2.getColumnIndex("startTime")); } Annotation annotation = readAnnotation(c2); l.add(annotation); } c2.close(); return l; } private Annotation readAnnotation(Cursor c) { Annotation annotation = new Annotation(); annotation.setPid(c.getString(c.getColumnIndex("pid"))); annotation.setTitle(c.getString(c.getColumnIndex("title"))); annotation.setAnnotation(c.getString(c.getColumnIndex("annotation"))); annotation.setAuthor(c.getString(c.getColumnIndex("talker"))); annotation.setSQLEndTime(c.getString(c.getColumnIndex("endTime"))); //annotation.setLength(c.getString(c.getColumnIndex("length"))); annotation.setLocation(c.getString(c.getColumnIndex("location"))); annotation.setLid(c.getInt(c.getColumnIndex("lid"))); annotation.setSQLStartTime(c.getString(c.getColumnIndex("startTime"))); annotation.setType(c.getString(c.getColumnIndex("mainType"))); annotation.setAdditonalTypes(c.getString(c.getColumnIndex("additionalTypes"))); return annotation; } public Convention getCon() { if (this.con != null) { return con; } Cursor c = this.mDatabase.query(CondroidDatabase.CON_TABLE, null, null, null, null, null); Convention co = new Convention(); while (c.moveToNext()) { co.setCid(c.getInt(c.getColumnIndex("id"))); co.setDataUrl(c.getString(c.getColumnIndex("dataUrl"))); co.setDate(c.getString(c.getColumnIndex("date"))); co.setIconUrl(c.getString(c.getColumnIndex("iconUrl"))); co.setName(c.getString(c.getColumnIndex("name"))); co.setMessage(c.getString(c.getColumnIndex("message"))); co.setLocationsFile(c.getString(c.getColumnIndex("locationsFile"))); } c.close(); this.con = co; return co; } public List<Integer> getFavorited() { if(favorited != null) { return favorited; } Cursor c = this.mDatabase.query(CondroidDatabase.FAVORITE_TABLE, null, null, null, "pid ASC", null); favorited = new ArrayList<Integer>(); while (c.moveToNext()) { favorited.add(c.getInt(c.getColumnIndex("pid"))); } return favorited; } public boolean doFavorite(String pid) { Cursor c = this.mDatabase.query(CondroidDatabase.FAVORITE_TABLE, null, "pid="+pid, null, null, null); favorited = null; if(c.getCount() > 0) { this.mDatabase.query("DELETE FROM " + CondroidDatabase.FAVORITE_TABLE + " WHERE pid = '"+pid+"'"); return false; } else { this.mDatabase.query("INSERT INTO "+CondroidDatabase.FAVORITE_TABLE +" (pid) VALUES ('"+pid +"')"); return true; } } }
App doesn't eat first row of dates now :-)
src/cz/quinix/condroid/database/DataProvider.java
App doesn't eat first row of dates now :-)
Java
bsd-3-clause
427b01bb86136bb8e360f0c1e6e703fbece73ac0
0
edina/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon
package org.lockss.test; import org.lockss.app.LockssDaemon; import java.util.*; import org.lockss.hasher.*; import org.lockss.protocol.*; import org.lockss.poller.*; import org.lockss.state.*; import org.lockss.repository.*; import org.lockss.proxy.*; import org.lockss.crawler.*; import org.lockss.plugin.*; import org.lockss.app.*; public class MockLockssDaemon extends LockssDaemon { HashService hashService = null; PollManager pollManager = null; LcapComm commManager = null; LockssRepository lockssRepository = null; HistoryRepository historyRepository = null; ProxyHandler proxyHandler = null; CrawlManager crawlManager = null; PluginManager pluginManager = null; IdentityManager identityManager = null; NodeManagerService nodeManagerService = null; public MockLockssDaemon() { this(null); } public MockLockssDaemon(List urls) { super(urls); } public void startDaemon() throws Exception { } public void stopDaemon() { hashService = null; pollManager = null; commManager = null; lockssRepository = null; historyRepository = null; proxyHandler = null; crawlManager = null; pluginManager = null; identityManager = null; nodeManagerService = null; } /** * return the hash service instance * @return the HashService */ public HashService getHashService() { if (hashService == null) { hashService = new HashService(); try { hashService.initService(this); } catch (LockssDaemonException ex) { } theManagers.put(LockssDaemon.HASH_SERVICE, hashService); } return hashService; } /** * return the poll manager instance * @return the PollManager */ public PollManager getPollManager() { if (pollManager == null) { pollManager = new PollManager(); try { pollManager.initService(this); } catch (LockssDaemonException ex) { } theManagers.put(LockssDaemon.POLL_MANAGER, pollManager); } return pollManager; } /** * return the communication manager instance * @return the LcapComm */ public LcapComm getCommManager() { if (commManager == null) { commManager = new LcapComm(); try { commManager.initService(this); } catch (LockssDaemonException ex) { } theManagers.put(LockssDaemon.COMM_MANAGER, hashService); } return commManager; } /** * get a Lockss Repository instance. This is broken and not AU specific, * because using the proper factory method required Configuration parameters * which weren't always set in the tests. * @param au the ArchivalUnit (ignored) * @return the LockssRepository */ public LockssRepository getLockssRepository(ArchivalUnit au) { if (lockssRepository == null) { LockssRepositoryImpl impl = new LockssRepositoryImpl(); try { impl.initService(this); } catch (LockssDaemonException ex) { } lockssRepository = impl; theManagers.put(LockssDaemon.LOCKSS_REPOSITORY, lockssRepository); } return lockssRepository; } /** * return the history repository instance * @return the HistoryRepository */ public HistoryRepository getHistoryRepository() { if (historyRepository == null) { HistoryRepositoryImpl impl = new HistoryRepositoryImpl(); try { impl.initService(this); } catch (LockssDaemonException ex) { } historyRepository = impl; theManagers.put(LockssDaemon.HISTORY_REPOSITORY, historyRepository); } return historyRepository; } /** * return the node manager service * @return the NodeManagerService */ public NodeManagerService getNodeManagerService() { if (nodeManagerService == null) { nodeManagerService = new MockNodeManagerService(); try { nodeManagerService.initService(this); } catch (LockssDaemonException ex) { } theManagers.put(LockssDaemon.NODE_MANAGER_SERVICE, nodeManagerService); } return nodeManagerService; } /** * return the node manager instance. Uses NodeManagerService. * @param au the ArchivalUnit * @return the NodeManager */ public NodeManager getNodeManager(ArchivalUnit au) { getNodeManagerService().addNodeManager(au); return nodeManagerService.getNodeManager(au); } /** * return the proxy handler instance * @return the ProxyHandler */ public ProxyHandler getProxyHandler() { if (proxyHandler == null) { proxyHandler = new ProxyHandler(); try { proxyHandler.initService(this); } catch (LockssDaemonException ex) { } theManagers.put(LockssDaemon.PROXY_HANDLER, proxyHandler); } return proxyHandler; } /** * return the crawl manager instance * @return the CrawlManager */ public CrawlManager getCrawlManager() { if (crawlManager == null) { CrawlManagerImpl impl = new CrawlManagerImpl(); try { impl.initService(this); } catch (LockssDaemonException ex) { } crawlManager = impl; theManagers.put(LockssDaemon.CRAWL_MANAGER, crawlManager); } return crawlManager; } /** * return the plugin manager instance * @return the PluginManager */ public PluginManager getPluginManager() { if (pluginManager == null) { pluginManager = new PluginManager(); try { pluginManager.initService(this); } catch (LockssDaemonException ex) { } theManagers.put(LockssDaemon.PLUGIN_MANAGER, pluginManager); } return pluginManager; } /** * return the Identity Manager * @return IdentityManager */ public IdentityManager getIdentityManager() { if (identityManager == null) { identityManager = new IdentityManager(); try { identityManager.initService(this); } catch (LockssDaemonException ex) { } } theManagers.put(LockssDaemon.IDENTITY_MANAGER, identityManager); return identityManager; } /** * Set the CommManager * @param commMan the new manager */ public void setCommManager(LcapComm commMan) { commManager = commMan; theManagers.put(LockssDaemon.COMM_MANAGER, hashService); } /** * Set the CrawlManager * @param crawlMan the new manager */ public void setCrawlManager(CrawlManager crawlMan) { crawlManager = crawlMan; theManagers.put(LockssDaemon.CRAWL_MANAGER, crawlManager); } /** * Set the HashService * @param hashServ the new service */ public void setHashService(HashService hashServ) { hashService = hashServ; theManagers.put(LockssDaemon.HASH_SERVICE, hashService); } /** * Set the HistoryRepository * @param histRepo the new repository */ public void setHistoryRepository(HistoryRepository histRepo) { historyRepository = histRepo; theManagers.put(LockssDaemon.HISTORY_REPOSITORY, historyRepository); } /** * Set the IdentityManager * @param idMan the new manager */ public void setIdentityManager(IdentityManager idMan) { identityManager = idMan; theManagers.put(LockssDaemon.IDENTITY_MANAGER, identityManager); } /** * Set the LockssRepository * @param lockssRepo the new repository */ public void setLockssRepository(LockssRepository lockssRepo) { lockssRepository = lockssRepo; theManagers.put(LockssDaemon.LOCKSS_REPOSITORY, lockssRepository); } /** * Set a new NodeManagerService. * @param nms the new service */ public void setNodeManagerService(NodeManagerService nms) { nodeManagerService = nms; } /** * Set the NodeManager for a given AU. Requires a MocKNodeManagerService * (the default). * @param nodeMan the new manager * @param au the ArchivalUnit */ public void setNodeManager(NodeManager nodeMan, ArchivalUnit au) { getNodeManagerService(); if (nodeManagerService instanceof MockNodeManagerService) { ((MockNodeManagerService)nodeManagerService).auMaps.put(au, nodeMan); } else { throw new UnsupportedOperationException("Couldn't setNodeManager with"+ "a non-Mock service."); } } /** * Set the PluginManager * @param pluginMan the new manager */ public void setPluginManager(PluginManager pluginMan) { pluginManager = pluginMan; theManagers.put(LockssDaemon.PLUGIN_MANAGER, pluginManager); } /** * Set the PollManager * @param pollMan the new manager */ public void setPollManager(PollManager pollMan) { pollManager = pollMan; theManagers.put(LockssDaemon.POLL_MANAGER, pollManager); } /** * Set the ProxyHandler * @param proxyHand the new handler */ public void setProxyHandler(ProxyHandler proxyHand) { proxyHandler = proxyHand; theManagers.put(LockssDaemon.PROXY_HANDLER, proxyHandler); } }
test/src/org/lockss/test/MockLockssDaemon.java
package org.lockss.test; import org.lockss.app.LockssDaemon; import java.util.*; import org.lockss.hasher.*; import org.lockss.protocol.*; import org.lockss.poller.*; import org.lockss.state.*; import org.lockss.repository.*; import org.lockss.proxy.*; import org.lockss.crawler.*; import org.lockss.plugin.*; import org.lockss.app.*; public class MockLockssDaemon extends LockssDaemon { HashService hashService = null; PollManager pollManager = null; LcapComm commManager = null; LockssRepository lockssRepository = null; HistoryRepository historyRepository = null; ProxyHandler proxyHandler = null; CrawlManager crawlManager = null; PluginManager pluginManager = null; IdentityManager identityManager = null; NodeManagerService nodeManagerService = null; public MockLockssDaemon(List urls) { super(urls); } public void startDaemon() throws Exception { } public void stopDaemon() { hashService = null; pollManager = null; commManager = null; lockssRepository = null; historyRepository = null; proxyHandler = null; crawlManager = null; pluginManager = null; identityManager = null; nodeManagerService = null; } /** * return the hash service instance * @return the HashService */ public HashService getHashService() { if (hashService == null) { hashService = new HashService(); try { hashService.initService(this); } catch (LockssDaemonException ex) { } theManagers.put(LockssDaemon.HASH_SERVICE, hashService); } return hashService; } /** * return the poll manager instance * @return the PollManager */ public PollManager getPollManager() { if (pollManager == null) { pollManager = new PollManager(); try { pollManager.initService(this); } catch (LockssDaemonException ex) { } theManagers.put(LockssDaemon.POLL_MANAGER, pollManager); } return pollManager; } /** * return the communication manager instance * @return the LcapComm */ public LcapComm getCommManager() { if (commManager == null) { commManager = new LcapComm(); try { commManager.initService(this); } catch (LockssDaemonException ex) { } theManagers.put(LockssDaemon.COMM_MANAGER, hashService); } return commManager; } /** * get a Lockss Repository instance. This is broken and not AU specific, * because using the proper factory method required Configuration parameters * which weren't always set in the tests. * @param au the ArchivalUnit (ignored) * @return the LockssRepository */ public LockssRepository getLockssRepository(ArchivalUnit au) { if (lockssRepository == null) { LockssRepositoryImpl impl = new LockssRepositoryImpl(); try { impl.initService(this); } catch (LockssDaemonException ex) { } lockssRepository = impl; theManagers.put(LockssDaemon.LOCKSS_REPOSITORY, lockssRepository); } return lockssRepository; } /** * return the history repository instance * @return the HistoryRepository */ public HistoryRepository getHistoryRepository() { if (historyRepository == null) { HistoryRepositoryImpl impl = new HistoryRepositoryImpl(); try { impl.initService(this); } catch (LockssDaemonException ex) { } historyRepository = impl; theManagers.put(LockssDaemon.HISTORY_REPOSITORY, historyRepository); } return historyRepository; } /** * return the node manager service * @return the NodeManagerService */ public NodeManagerService getNodeManagerService() { if (nodeManagerService == null) { nodeManagerService = new MockNodeManagerService(); try { nodeManagerService.initService(this); } catch (LockssDaemonException ex) { } theManagers.put(LockssDaemon.NODE_MANAGER_SERVICE, nodeManagerService); } return nodeManagerService; } /** * return the node manager instance. Uses NodeManagerService. * @param au the ArchivalUnit * @return the NodeManager */ public NodeManager getNodeManager(ArchivalUnit au) { getNodeManagerService().addNodeManager(au); return nodeManagerService.getNodeManager(au); } /** * return the proxy handler instance * @return the ProxyHandler */ public ProxyHandler getProxyHandler() { if (proxyHandler == null) { proxyHandler = new ProxyHandler(); try { proxyHandler.initService(this); } catch (LockssDaemonException ex) { } theManagers.put(LockssDaemon.PROXY_HANDLER, proxyHandler); } return proxyHandler; } /** * return the crawl manager instance * @return the CrawlManager */ public CrawlManager getCrawlManager() { if (crawlManager == null) { CrawlManagerImpl impl = new CrawlManagerImpl(); try { impl.initService(this); } catch (LockssDaemonException ex) { } crawlManager = impl; theManagers.put(LockssDaemon.CRAWL_MANAGER, crawlManager); } return crawlManager; } /** * return the plugin manager instance * @return the PluginManager */ public PluginManager getPluginManager() { if (pluginManager == null) { pluginManager = new PluginManager(); try { pluginManager.initService(this); } catch (LockssDaemonException ex) { } theManagers.put(LockssDaemon.PLUGIN_MANAGER, pluginManager); } return pluginManager; } /** * return the Identity Manager * @return IdentityManager */ public IdentityManager getIdentityManager() { if (identityManager == null) { identityManager = new IdentityManager(); try { identityManager.initService(this); } catch (LockssDaemonException ex) { } } theManagers.put(LockssDaemon.IDENTITY_MANAGER, identityManager); return identityManager; } /** * Set the CommManager * @param commMan the new manager */ public void setCommManager(LcapComm commMan) { commManager = commMan; theManagers.put(LockssDaemon.COMM_MANAGER, hashService); } /** * Set the CrawlManager * @param crawlMan the new manager */ public void setCrawlManager(CrawlManager crawlMan) { crawlManager = crawlMan; theManagers.put(LockssDaemon.CRAWL_MANAGER, crawlManager); } /** * Set the HashService * @param hashServ the new service */ public void setHashService(HashService hashServ) { hashService = hashServ; theManagers.put(LockssDaemon.HASH_SERVICE, hashService); } /** * Set the HistoryRepository * @param histRepo the new repository */ public void setHistoryRepository(HistoryRepository histRepo) { historyRepository = histRepo; theManagers.put(LockssDaemon.HISTORY_REPOSITORY, historyRepository); } /** * Set the IdentityManager * @param idMan the new manager */ public void setIdentityManager(IdentityManager idMan) { identityManager = idMan; theManagers.put(LockssDaemon.IDENTITY_MANAGER, identityManager); } /** * Set the LockssRepository * @param lockssRepo the new repository */ public void setLockssRepository(LockssRepository lockssRepo) { lockssRepository = lockssRepo; theManagers.put(LockssDaemon.LOCKSS_REPOSITORY, lockssRepository); } /** * Set a new NodeManagerService. * @param nms the new service */ public void setNodeManagerService(NodeManagerService nms) { nodeManagerService = nms; } /** * Set the NodeManager for a given AU. Requires a MocKNodeManagerService * (the default). * @param nodeMan the new manager * @param au the ArchivalUnit */ public void setNodeManager(NodeManager nodeMan, ArchivalUnit au) { getNodeManagerService(); if (nodeManagerService instanceof MockNodeManagerService) { ((MockNodeManagerService)nodeManagerService).auMaps.put(au, nodeMan); } else { throw new UnsupportedOperationException("Couldn't setNodeManager with"+ "a non-Mock service."); } } /** * Set the PluginManager * @param pluginMan the new manager */ public void setPluginManager(PluginManager pluginMan) { pluginManager = pluginMan; theManagers.put(LockssDaemon.PLUGIN_MANAGER, pluginManager); } /** * Set the PollManager * @param pollMan the new manager */ public void setPollManager(PollManager pollMan) { pollManager = pollMan; theManagers.put(LockssDaemon.POLL_MANAGER, pollManager); } /** * Set the ProxyHandler * @param proxyHand the new handler */ public void setProxyHandler(ProxyHandler proxyHand) { proxyHandler = proxyHand; theManagers.put(LockssDaemon.PROXY_HANDLER, proxyHandler); } }
org.lockss.test.MockLockssDaemon *added null constructor git-svn-id: 293778eaa97c8c94097d610b1bd5133a8f478f36@787 4f837ed2-42f5-46e7-a7a5-fa17313484d4
test/src/org/lockss/test/MockLockssDaemon.java
org.lockss.test.MockLockssDaemon *added null constructor
Java
bsd-3-clause
e2b7c7d9866740a2f9ea6789b5456b78d50257af
0
NCIP/camod,NCIP/camod,NCIP/camod,NCIP/camod
/** * @author dgeorge * * $Id: QueryManagerImpl.java,v 1.15 2005-10-12 17:24:23 georgeda Exp $ * * $Log: not supported by cvs2svn $ * Revision 1.14 2005/10/11 18:15:10 georgeda * More comment changes * * Revision 1.13 2005/10/10 14:09:41 georgeda * Changes for comment curation and performance improvement * * Revision 1.12 2005/10/05 20:27:59 guruswas * implementation of drug screening search page * * Revision 1.11 2005/10/05 15:23:34 georgeda * Changed the theraputic approaches query to return all models w/ TA's if the search string passed in was blank * * Revision 1.10 2005/10/05 13:04:47 georgeda * Completed advanced search * * Revision 1.9 2005/10/04 20:19:43 georgeda * Updates from search changes * * Revision 1.6 2005/09/27 16:46:59 georgeda * Added environmental factor dropdown query * * Revision 1.5 2005/09/26 14:04:14 georgeda * Use HQL instead of SQL * * Revision 1.4 2005/09/16 19:30:04 guruswas * Display invivo data (from DTP) in the therapuetic approaches page * * Revision 1.3 2005/09/16 15:52:57 georgeda * Changes due to manager re-write * * */ package gov.nih.nci.camod.service.impl; import gov.nih.nci.camod.domain.Agent; import gov.nih.nci.camod.domain.AnimalModel; import gov.nih.nci.camod.domain.Comments; import gov.nih.nci.camod.domain.Log; import gov.nih.nci.camod.domain.Person; import gov.nih.nci.camod.util.DrugScreenResult; import gov.nih.nci.camod.webapp.form.SearchData; import gov.nih.nci.common.persistence.Search; import gov.nih.nci.common.persistence.exception.PersistenceException; import gov.nih.nci.common.persistence.hibernate.HQLParameter; import gov.nih.nci.common.persistence.hibernate.HibernateUtil; import java.sql.ResultSet; import java.util.ArrayList; import java.util.List; import java.util.StringTokenizer; import org.hibernate.Hibernate; import org.hibernate.Query; /** * Implementation of a wrapper around the HQL/JDBC interface. Used for more * complex instances where QBE does not have sufficient power. */ public class QueryManagerImpl extends BaseManager { /** * Return the list of environmental factor names * * @param inType * the type of environmental factor * * @return a sorted list of unique environmental factors * @throws PersistenceException */ public List getEnvironmentalFactors(String inType) throws PersistenceException { log.trace("Entering QueryManagerImpl.getAdministrativeRoutes"); // Format the query HQLParameter[] theParams = new HQLParameter[1]; theParams[0] = new HQLParameter(); theParams[0].setName("type"); theParams[0].setValue(inType); theParams[0].setType(Hibernate.STRING); String theHQLQuery = "select distinct ef.name from EnvironmentalFactor as ef where ef.type = :type and ef.name is not null order by ef.name asc "; List theList = Search.query(theHQLQuery, theParams); log.debug("Found matching items: " + theList.size()); log.trace("Exiting QueryManagerImpl.getAdministrativeRoutes"); return theList; } /** * Return the list of environmental factor names * * @param inType * the type of environmental factor * * @return a sorted list of unique environmental factors * @throws PersistenceException */ public List getQueryOnlyEnvironmentalFactors(String inType) throws PersistenceException { log.trace("Entering QueryManagerImpl.getQueryOnlyEnvironmentalFactors"); ResultSet theResultSet = null; List theEnvFactors = new ArrayList(); try { // Format the query String theSQLQuery = "SELECT distinct ef.name " + "FROM env_factor ef " + "WHERE ef.type = ? " + " AND ef.name IS NOT null " + " AND ef.env_factor_id IN (SELECT t.env_factor_id " + " FROM therapy t, animal_model_therapy at, abs_cancer_model am WHERE t.therapeutic_experiment = 0 " + " AND t.therapy_id = at.therapy_id " + " AND am.abs_cancer_model_id = at.abs_cancer_model_id AND am.state = 'Edited-approved') ORDER BY ef.name asc "; Object[] theParams = new Object[1]; theParams[0] = inType; theResultSet = Search.query(theSQLQuery, theParams); while (theResultSet.next()) { theEnvFactors.add(theResultSet.getString(1)); } log.trace("Exiting QueryManagerImpl.getQueryOnlyEnvironmentalFactors"); } catch (Exception e) { log.error("Exception in getQueryOnlyEnvironmentalFactors", e); throw new PersistenceException("Exception in getQueryOnlyEnvironmentalFactors: " + e); } finally { if (theResultSet != null) { try { theResultSet.close(); } catch (Exception e) { } } } return theEnvFactors; } /** * Return the list of environmental factor names which were used to induce a * mutation * * @return a sorted list of unique environmental factors * @throws PersistenceException */ public List getQueryOnlyInducedMutationAgents() throws PersistenceException { log.trace("Entering QueryManagerImpl.getQueryOnlyInducedMutationAgents"); ResultSet theResultSet = null; List theAgents = new ArrayList(); try { // Format the query String theSQLQuery = "SELECT distinct ef.name FROM env_factor ef, env_fac_ind_mutation im " + "WHERE ef.name IS NOT null AND ef.env_factor_id = im.env_factor_id"; Object[] theParams = new Object[0]; theResultSet = Search.query(theSQLQuery, theParams); while (theResultSet.next()) { theAgents.add(theResultSet.getString(1)); } log.trace("Exiting QueryManagerImpl.getQueryOnlyInducedMutationAgents"); } catch (Exception e) { log.error("Exception in getQueryOnlyInducedMutationAgents", e); throw new PersistenceException("Exception in getQueryOnlyInducedMutationAgents: " + e); } finally { if (theResultSet != null) { try { theResultSet.close(); } catch (Exception e) { } } } return theAgents; } /** * Return the list of species associated with animal models * * @return a sorted list of unique species * * @throws PersistenceException */ public List getQueryOnlySpecies() throws PersistenceException { log.trace("Entering QueryManagerImpl.getQueryOnlyEnvironmentalFactors"); // Format the query String theSQLString = "SELECT distinct scientific_name FROM taxon WHERE scientific_name IS NOT NULL " + " AND taxon_id IN (select distinct taxon_id from abs_cancer_model where state = 'Edited-approved')"; ResultSet theResultSet = null; List theSpeciesList = new ArrayList(); try { log.info("getModelsIds - SQL: " + theSQLString); Object[] params = new Object[0]; theResultSet = Search.query(theSQLString, params); while (theResultSet.next()) { theSpeciesList.add(theResultSet.getString(1)); } } catch (Exception e) { log.error("Exception in getQueryOnlySpecies", e); throw new PersistenceException("Exception in getQueryOnlySpecies: " + e); } finally { if (theResultSet != null) { try { theResultSet.close(); } catch (Exception e) { } } } return theSpeciesList; } /** * Return the list of PI's sorted by last name * * @return a sorted list of People objects * @throws PersistenceException */ public List getPrincipalInvestigators() throws PersistenceException { log.trace("Entering QueryManagerImpl.getPrincipalInvestigators"); // Format the query HQLParameter[] theParams = new HQLParameter[0]; String theHQLQuery = "from Person where is_principal_investigator = 1 order by last_name asc"; List theList = Search.query(theHQLQuery, theParams); log.debug("Found matching items: " + theList.size()); log.trace("Exiting QueryManagerImpl.getPrincipalInvestigators"); return theList; } /** * Return the list of species associated with animal models * * @return a sorted list of unique species * * @throws PersistenceException */ public List getQueryOnlyPrincipalInvestigators() throws PersistenceException { log.trace("Entering QueryManagerImpl.getQueryOnlyPrincipalInvestigators"); // Format the query String theSQLString = "SELECT last_name, first_name " + "FROM party " + "WHERE is_principal_investigator = 1 " + " AND first_name IS NOT NULL " + " AND last_name IS NOT NULL " + " AND party_id IN (SELECT DISTINCT principal_investigator_id FROM abs_cancer_model WHERE state = 'Edited-approved')" + "ORDER BY last_name ASC"; ResultSet theResultSet = null; List thePIList = new ArrayList(); try { log.info("getQueryOnlyPrincipalInvestigators - SQL: " + theSQLString); Object[] params = new Object[0]; theResultSet = Search.query(theSQLString, params); while (theResultSet.next()) { String thePIEntry = theResultSet.getString(1) + "," + theResultSet.getString(2); thePIList.add(thePIEntry); } } catch (Exception e) { log.error("Exception in getQueryOnlyPrincipalInvestigators", e); throw new PersistenceException("Exception in getQueryOnlyPrincipalInvestigators: " + e); } finally { if (theResultSet != null) { try { theResultSet.close(); } catch (Exception e) { } } } return thePIList; } /** * Return the latest log for an animal model * * @param inModel * the animal model to get the latest log for * * @return the current log for an animal model * @throws PersistenceException */ public Log getCurrentLog(AnimalModel inModel) throws PersistenceException { log.trace("Entering QueryManagerImpl.getCurrentLog"); HQLParameter[] theParams = new HQLParameter[1]; theParams[0] = new HQLParameter(); theParams[0].setName("abs_cancer_model_id"); theParams[0].setValue(inModel.getId()); theParams[0].setType(Hibernate.LONG); String theHQLQuery = "from Log where abs_cancer_model_id = :abs_cancer_model_id order by timestamp desc"; log.debug("The HQL query: " + theHQLQuery); List theLogs = Search.query(theHQLQuery, theParams); Log theLog = null; if (theLogs != null && theLogs.size() > 0) { theLog = (Log) theLogs.get(0); log.debug("Found a matching object: " + theLog.getId()); } else { log.debug("No object found."); } log.trace("Exiting QueryManagerImpl.getCurrentLog"); return theLog; } /** * Return the latest log for an animal model/user combo * * @param inModel * the animal model to get the latest log for * @param inUser * the user to get the latest log for * * @return the current log for an animal model/user combination * @throws PersistenceException */ public Log getCurrentLogForUser(AnimalModel inModel, Person inUser) throws PersistenceException { log.trace("Entering QueryManagerImpl.getCurrentLogForUser"); // Format the query HQLParameter[] theParams = new HQLParameter[2]; theParams[0] = new HQLParameter(); theParams[0].setName("abs_cancer_model_id"); theParams[0].setValue(inModel.getId()); theParams[0].setType(Hibernate.LONG); theParams[1] = new HQLParameter(); theParams[1].setName("party_id"); theParams[1].setValue(inUser.getId()); theParams[1].setType(Hibernate.LONG); String theHQLQuery = "from Log where abs_cancer_model_id = :abs_cancer_model_id and party_id = :party_id " + "and comments_id is null order by timestamp desc"; log.debug("the HQL Query: " + theHQLQuery); List theLogs = Search.query(theHQLQuery, theParams); Log theLog = null; if (theLogs != null && theLogs.size() > 0) { theLog = (Log) theLogs.get(0); log.debug("Found a matching object: " + theLog.getId()); } else { log.debug("No object found."); } log.trace("Exiting QueryManagerImpl.getCurrentLogForUser"); return theLog; } /** * Return the latest log for a comment/user combo * * @param inComments * the comments to get the latest log for * @param inUser * the user to get the latest log for * * @return the current log for an comments/user combination * * @throws PersistenceException */ public Log getCurrentLogForUser(Comments inComments, Person inUser) throws PersistenceException { log.trace("Entering QueryManagerImpl.getCurrentLogForUser"); // Format the query HQLParameter[] theParams = new HQLParameter[3]; theParams[0] = new HQLParameter(); theParams[0].setName("abs_cancer_model_id"); theParams[0].setValue(inComments.getCancerModel().getId()); theParams[0].setType(Hibernate.LONG); theParams[1] = new HQLParameter(); theParams[1].setName("party_id"); theParams[1].setValue(inUser.getId()); theParams[1].setType(Hibernate.LONG); theParams[2] = new HQLParameter(); theParams[2].setName("comments_id"); theParams[2].setValue(inComments.getId()); theParams[2].setType(Hibernate.LONG); System.out.println("Comments id: " + inComments.getId()); System.out.println("Party id: " + inUser.getId()); System.out.println("CM id: " + inComments.getCancerModel().getId()); String theHQLQuery = "from Log where abs_cancer_model_id = :abs_cancer_model_id and party_id = :party_id " + "and comments_id = :comments_id order by timestamp desc"; log.debug("the HQL Query: " + theHQLQuery); List theLogs = Search.query(theHQLQuery, theParams); Log theLog = null; if (theLogs != null && theLogs.size() > 0) { theLog = (Log) theLogs.get(0); log.debug("Found a matching object: " + theLog.getId()); } else { log.debug("No object found."); } log.trace("Exiting QueryManagerImpl.getCurrentLogForUser"); return theLog; } /** * Return all of the comments associated with a person that match the state * passed in * * @param inState * the state of the comment * * @param inPerson * the person to match * * @return a list of matching comments * * @throws PersistenceException */ public List getCommentsBySection(String inSection, Person inPerson, AnimalModel inModel) throws PersistenceException { log.trace("Entering QueryManagerImpl.getCommentsBySectionForPerson"); // If no person, only get approved items String theStateHQL = "(c.state = 'Screened-approved'"; if (inPerson == null) { theStateHQL += ") "; } else { theStateHQL += "or c.submitter = :party_id) "; } // TODO: make the states a constant String theHQLQuery = "from Comments as c where " + theStateHQL + " and c.cancerModel in (" + "from AnimalModel as am where am.id = :abs_cancer_model_id) and c.modelSection in (from ModelSection where name = :name)"; log.debug("The HQL query: " + theHQLQuery); Query theQuery = HibernateUtil.getSession().createQuery(theHQLQuery); theQuery.setParameter("abs_cancer_model_id", inModel.getId()); theQuery.setParameter("name", inSection); // Only query for party if the passed in party wasn't null if (inPerson != null) { theQuery.setParameter("party_id", inPerson.getId()); } List theComments = theQuery.list(); if (theComments == null) { theComments = new ArrayList(); } log.trace("Exiting QueryManagerImpl.getCommentsByStateForPerson"); return theComments; } /** * Return all of the comments associated with a person that match the state * passed in * * @param inState * the state of the comment * * @param inPerson * the person to match * * @return a list of matching comments * * @throws PersistenceException */ public List getCommentsByStateForPerson(String inState, Person inPerson) throws PersistenceException { log.trace("Entering QueryManagerImpl.getCommentsByStateForPerson"); String theHQLQuery = "from Comments as c where c.state = :state and c.id in (" + "select l.comment from Log as l where l.submitter = :party_id and l.type = :state)"; log.debug("The HQL query: " + theHQLQuery); Query theQuery = HibernateUtil.getSession().createQuery(theHQLQuery); theQuery.setParameter("party_id", inPerson.getId()); theQuery.setParameter("state", inState); List theComments = theQuery.list(); if (theComments == null) { theComments = new ArrayList(); } log.trace("Exiting QueryManagerImpl.getCommentsByStateForPerson"); return theComments; } /** * Return all of the models associated with a person that match the state * passed in * * @param inState * the state of the comment * * @param inPerson * the person to match * * @return a list of matching models * * @throws PersistenceException */ public List getModelsByStateForPerson(String inState, Person inPerson) throws PersistenceException { log.trace("Entering QueryManagerImpl.getCurrentLog"); String theHQLQuery = "from AnimalModel as am where am.state = :state and am.id in (" + "select l.cancerModel from Log as l where l.submitter = :party_id and l.type = :state)"; log.debug("The HQL query: " + theHQLQuery); Query theQuery = HibernateUtil.getSession().createQuery(theHQLQuery); theQuery.setParameter("party_id", inPerson.getId()); theQuery.setParameter("state", inState); List theComments = theQuery.list(); if (theComments == null) { theComments = new ArrayList(); } return theComments; } /** * Get yeast screen result data (ave inibition etc.) for a given Agent * (drug) * * @param agent * refers to the compound that was used in the yeast experiment * @param stage * is the stage of the experiment (0, 1, or 2) * * @return the results * @throws PersistenceException */ public DrugScreenResult getYeastScreenResults(Agent agent, String stage) throws PersistenceException { DrugScreenResult dsr = new DrugScreenResult(); ResultSet theResultSet = null; try { String theSQLString = "select tx.ethnicity_strain," + "\n" + " t.dosage," + "\n" + " sr.aveinh aveinh," + "\n" + " sr.diffinh diffinh" + "\n" + " from screening_Result sr," + "\n" + " env_factor a," + "\n" + " YST_MDL_SCRNING_RESULT ymsr," + "\n" + " abs_cancer_model acm," + "\n" + " treatment t," + "\n" + " taxon tx" + "\n" + " where sr.agent_id = a.env_factor_id" + "\n" + " and sr.screening_result_id = ymsr.screening_result_id" + "\n" + " and sr.treatment_id = t.treatment_id" + "\n" + " and ymsr.abs_cancer_model_id = acm.abs_cancer_model_id" + "\n" + " and acm.taxon_id = tx.taxon_id" + "\n" + " and a.nsc_number = ?" + "\n" + " and sr.stage = ?" + "\n" + " order by 1, 2"; log.info("getYeastScreenResults - SQL: " + theSQLString); Object[] params = new Object[2]; params[0] = agent.getNscNumber(); params[1] = stage; theResultSet = Search.query(theSQLString, params); while (theResultSet.next()) { final String strain = theResultSet.getString(1); final String dosage = theResultSet.getString(2); final float aveinh = theResultSet.getFloat(3); final float diffinh = theResultSet.getFloat(4); dsr.addEntry(strain, dosage, aveinh, diffinh); } log.info("Got " + dsr.strainCount + " strains"); } catch (Exception e) { log.error("Exception in getYeastScreenResults", e); throw new PersistenceException("Exception in getYeastScreenResults: " + e); } finally { if (theResultSet != null) { try { theResultSet.close(); } catch (Exception e) { } } } return dsr; } /** * Get the invivo (Xenograft) data (from DTP) for a given Agent.nscNumber * (drug) * * @param agent * refers to the compound that was used in the xenograft * experiment * * @return the results (list of abs_cancer_model_id, model_descriptor, and # * of records * @throws PersistenceException */ public List getInvivoResults(Agent agent) throws PersistenceException { List results = new ArrayList(); int cc = 0; ResultSet theResultSet = null; try { String theSQLString = "select acm.abs_cancer_model_id," + "\n" + " acm.model_descriptor," + "\n" + " tx.ethnicity_strain," + "\n" + " count(*)" + "\n" + " from invivo_Result sr," + "\n" + " env_factor a," + "\n" + " XENOGRAFT_INVIVO_RESULT ymsr," + "\n" + " abs_cancer_model acm," + "\n" + " treatment t," + "\n" + " taxon tx" + "\n" + " where sr.agent_id = a.env_factor_id" + "\n" + " and sr.invivo_result_id = ymsr.invivo_result_id" + "\n" + " and sr.treatment_id = t.treatment_id" + "\n" + " and ymsr.abs_cancer_model_id = acm.abs_cancer_model_id" + "\n" + " and acm.taxon_id = tx.taxon_id" + "\n" + " and a.nsc_number = ?" + "\n" + " group by acm.abs_cancer_model_id, acm.model_descriptor, tx.ethnicity_strain" + "\n" + " order by 3, 2"; log.info("getInvivoResults - SQL: " + theSQLString); Object[] params = new Object[1]; params[0] = agent.getNscNumber(); theResultSet = Search.query(theSQLString, params); while (theResultSet.next()) { String[] item = new String[4]; item[0] = theResultSet.getString(1); // the id item[1] = theResultSet.getString(2); // model descriptor item[2] = theResultSet.getString(3); // strain item[3] = theResultSet.getString(4); // record count results.add(item); cc++; } log.info("Got " + cc + " xenograft models"); } catch (Exception e) { log.error("Exception in getYeastScreenResults", e); throw new PersistenceException("Exception in getYeastScreenResults: " + e); } finally { if (theResultSet != null) { try { theResultSet.close(); } catch (Exception e) { } } } return results; } /** * Get the model id's for any model that has a histopathology associated * with a specific organ. * * @param inOrgan * the organ to search for * * @return a list of matching model id * * @throws PersistenceException */ private String getModelIdsForHistopathologyOrgan(String inConceptCodes) throws PersistenceException { String theConceptCodeList = ""; StringTokenizer theTokenizer = new StringTokenizer(inConceptCodes, ","); while (theTokenizer.hasMoreElements()) { theConceptCodeList += "'" + theTokenizer.nextToken() + "'"; // Only tack on a , if it's not the last element if (theTokenizer.hasMoreElements()) { theConceptCodeList += ","; } } String theSQLString = "SELECT distinct ani_hist.abs_cancer_model_id " + "FROM ani_mod_histopathology ani_hist " + "WHERE ani_hist.histopathology_id IN (SELECT h.histopathology_id " + " FROM histopathology h, organ_histopathology oh, organ o " + " WHERE h.histopathology_id = oh.histopathology_id AND oh.organ_id = o.organ_id " + " AND o.concept_code IN (" + theConceptCodeList + "))"; Object[] theParams = new Object[0]; return getModelIds(theSQLString, theParams); } /** * Get the model id's for any model that has a histopathology with a parent * histopathology * * @return a list of matching model ids * * @throws PersistenceException */ private String getModelIdsForHistoMetastasis() throws PersistenceException { String theSQLString = "SELECT distinct ani_hist.abs_cancer_model_id FROM ani_mod_histopathology ani_hist " + "WHERE ani_hist.histopathology_id IN (SELECT h.histopathology_id FROM histopathology h " + " WHERE h.parent_histopathology_id IS NOT NULL)"; Object[] theParams = new Object[0]; return getModelIds(theSQLString, theParams); } /** * Get the model id's for any model that has associated microarray data * * @return a list of matching model ids * * @throws PersistenceException */ private String getModelIdsForMicroArrayData() throws PersistenceException { String theSQLString = "SELECT distinct abs_cancer_model_id FROM ani_mod_mic_array_data"; Object[] theParams = new Object[0]; return getModelIds(theSQLString, theParams); } /** * Get the model id's for any model that has a cellline w/ a matching name * * @param inCellLineName * the text to search for in the cell-line * * @return a list of matching model id * * @throws PersistenceException */ private String getModelIdsForCellLine(String inCellLineName) throws PersistenceException { String theSQLString = "SELECT distinct ani_cell.abs_cancer_model_id FROM ani_mod_cell_line ani_cell " + "WHERE ani_cell.cell_line_id IN (SELECT c.cell_line_id FROM cell_line c " + " WHERE c.name LIKE ?)"; Object[] theParams = new Object[1]; theParams[0] = "%" + inCellLineName + "%"; return getModelIds(theSQLString, theParams); } /** * Get the model id's for any model that has a histopathology associated * with a specific organ. * * @param inDisease * the disease to search for * * @return a list of matching model id * * @throws PersistenceException */ private String getModelIdsForTherapeuticApproach(String inTherapeuticApproach) throws PersistenceException { String theSQLString = "SELECT distinct ani_ther.abs_cancer_model_id " + "FROM animal_model_therapy ani_ther " + "WHERE ani_ther.therapy_id IN (SELECT t.therapy_id FROM therapy t, env_factor e " + " WHERE t.therapeutic_experiment = 1 AND t.env_factor_id = e.env_factor_id " + " AND e.name like ?)"; String theSQLTheraputicApproach = "%"; if (inTherapeuticApproach != null && inTherapeuticApproach.trim().length() > 0) { theSQLTheraputicApproach = "%" + inTherapeuticApproach + "%"; } Object[] theParams = new Object[1]; theParams[0] = theSQLTheraputicApproach; return getModelIds(theSQLString, theParams); } /** * Get the model id's for any model that has a histopathology associated * with a specific organ. * * @param inDisease * the disease to search for * * @return a list of matching model id * * @throws PersistenceException */ private String getModelIdsForHistopathologyDisease(String inConceptCodes) throws PersistenceException { String theConceptCodeList = ""; StringTokenizer theTokenizer = new StringTokenizer(inConceptCodes, ","); while (theTokenizer.hasMoreElements()) { theConceptCodeList += "'" + theTokenizer.nextToken() + "'"; // Only tack on a , if it's not the last element if (theTokenizer.hasMoreElements()) { theConceptCodeList += ","; } } String theSQLString = "SELECT distinct ani_hist.abs_cancer_model_id " + "FROM ani_mod_histopathology ani_hist " + "WHERE ani_hist.histopathology_id IN (SELECT h.histopathology_id " + " FROM histopathology h, histopathology_disease hd, disease d " + " WHERE h.histopathology_id = hd.histopathology_id " + " AND hd.disease_id = d.disease_id AND d.concept_code IN (" + theConceptCodeList + "))"; Object[] theParams = new Object[0]; return getModelIds(theSQLString, theParams); } /** * Get the models with the associated engineered gene * * @param inGeneName * the name of the transgene or targeted modification * * @param isEngineeredTransgene * are we looking for a transgene? * * @param isEngineeredTransgene * are we looking for an induced mutation? * * @param inGenomicSegDesignator * the name of the genomic segment designator * * @param inInducedMutationAgent * the name of Agent which induced the mutation. Exact match. * * @return a list of matching model id * * @throws PersistenceException * */ private String getModelIdsForEngineeredGenes(String inGeneName, boolean isEngineeredTransgene, boolean isTargetedModification, String inGenomicSegDesignator, String inInducedMutationAgent) throws PersistenceException { List theList = new ArrayList(); String theSQLString = "SELECT distinct ani_ge.abs_cancer_model_id " + "FROM ani_mod_engineered_gene ani_ge WHERE "; String OR = " "; if (isEngineeredTransgene == true && inGeneName.length() > 0) { theSQLString += OR + " ani_ge.engineered_gene_id IN (SELECT distinct engineered_gene_id " + " FROM engineered_gene WHERE name LIKE ? AND engineered_gene_type = 'T')"; OR = " OR "; theList.add("%" + inGeneName + "%"); } if (isTargetedModification == true && inGeneName.length() > 0) { theSQLString += OR + " ani_ge.engineered_gene_id IN (SELECT distinct engineered_gene_id " + " FROM engineered_gene WHERE name LIKE ? AND engineered_gene_type = 'TM')"; OR = " OR "; theList.add("%" + inGeneName + "%"); } if (inInducedMutationAgent != null && inInducedMutationAgent.length() > 0) { theSQLString += OR + " ani_ge.engineered_gene_id IN (SELECT distinct engineered_gene_id " + " FROM engineered_gene WHERE engineered_gene_id IN (" + " SELECT distinct im.engineered_gene_id FROM env_factor ef, env_fac_ind_mutation im " + " WHERE ef.name = ? " + " AND ef.env_factor_id = im.env_factor_id) AND engineered_gene_type = 'IM')"; OR = " OR "; theList.add(inInducedMutationAgent); } if (inGenomicSegDesignator != null && inGenomicSegDesignator.length() > 0) { theSQLString += OR + " ani_ge.engineered_gene_id IN (SELECT distinct engineered_gene_id " + " FROM engineered_gene WHERE clone_designator LIKE ? AND engineered_gene_type = 'GS')"; theList.add(inGenomicSegDesignator); } // Convert the params Object[] theParams = new Object[theList.size()]; for (int i = 0; i < theList.size(); i++) { theParams[i] = theList.get(i); } return getModelIds(theSQLString, theParams); } /** * Get the models with the associated engineered gene * * @param inKeyword * the keyword to search for * * @return a list of matching model id * * @throws PersistenceException * */ private String getModelIdsForAnyEngineeredGene(String inKeyword) throws PersistenceException { String theSQLString = "SELECT distinct ani_ge.abs_cancer_model_id " + "FROM ani_mod_engineered_gene ani_ge WHERE "; theSQLString += " ani_ge.engineered_gene_id IN (SELECT distinct engineered_gene_id " + " FROM engineered_gene WHERE name LIKE ?)"; theSQLString += " OR ani_ge.engineered_gene_id IN (SELECT distinct engineered_gene_id " + " FROM engineered_gene WHERE engineered_gene_id IN (" + " SELECT distinct im.engineered_gene_id FROM env_factor ef, env_fac_ind_mutation im " + " WHERE ef.name = ? " + " AND ef.env_factor_id = im.env_factor_id) AND engineered_gene_type = 'IM')"; theSQLString += " OR ani_ge.engineered_gene_id IN (SELECT distinct engineered_gene_id " + " FROM engineered_gene WHERE clone_designator LIKE ? AND engineered_gene_type = 'GS')"; // Convert the params Object[] theParams = new Object[3]; theParams[0] = inKeyword; theParams[1] = inKeyword; theParams[2] = inKeyword; return getModelIds(theSQLString, theParams); } /** * Get the model id's that have a matching environmental factor * * @param inType * the EF type * @param inName * the name to look for * * @return a list of matching model id * * @throws PersistenceException */ private String getModelIdsForEnvironmentalFactor(String inType, String inName) throws PersistenceException { String theSQLString = "SELECT distinct ani_th.abs_cancer_model_id FROM animal_model_therapy ani_th " + "WHERE ani_th.therapy_id IN (SELECT t.therapy_id FROM therapy t, env_factor ef" + " WHERE t.env_factor_id = ef.env_factor_id AND ef.name = ? AND ef.type = ?)"; Object[] theParams = new Object[2]; theParams[0] = inName; theParams[1] = inType; return getModelIds(theSQLString, theParams); } /** * Get the model id's for any model has a keyword match in the env factor * * @param inKeyword * the name to look for * * @return a list of matching model id * * @throws PersistenceException */ private String getModelIdsForAnyEnvironmentalFactor(String inKeyword) throws PersistenceException { String theSQLString = "SELECT distinct ani_th.abs_cancer_model_id FROM animal_model_therapy ani_th " + "WHERE ani_th.therapy_id IN (SELECT t.therapy_id FROM therapy t, env_factor ef" + " WHERE t.env_factor_id = ef.env_factor_id AND ef.name like ?)"; Object[] theParams = new Object[1]; theParams[0] = inKeyword; return getModelIds(theSQLString, theParams); } public List searchForAnimalModels(SearchData inSearchData) throws Exception { log.trace("Entering searchForAnimalModels"); List theAnimalModels = null; String theFromClause = "from AnimalModel as am where am.state = 'Edited-approved' AND am.availability.releaseDate < sysdate "; String theOrderByClause = " ORDER BY am.modelDescriptor asc"; if (inSearchData.getKeyword() != null && inSearchData.getKeyword().length() > 0) { log.debug("Doing a keyword search: " + inSearchData.getKeyword()); theAnimalModels = keywordSearch(theFromClause, theOrderByClause, inSearchData.getKeyword()); } else { log.debug("Doing a criteria search"); theAnimalModels = criteriaSearch(theFromClause, theOrderByClause, inSearchData); } log.trace("Exiting searchForAnimalModels"); return theAnimalModels; } private List keywordSearch(String inFromClause, String inOrderByClause, String inKeyword) throws Exception { // Use the like search functionality String theKeyword = "%" + inKeyword + "%"; String theWhereClause = ""; theWhereClause += " AND (am.modelDescriptor like :keyword "; theWhereClause += " OR am.species IN (from Taxon as t where t.scientificName like :keyword )"; theWhereClause += " OR abs_cancer_model_id IN (" + getModelIdsForHistopathologyOrgan(theKeyword) + ")"; theWhereClause += " OR abs_cancer_model_id IN (" + getModelIdsForHistopathologyDisease(theKeyword) + ")"; theWhereClause += " OR abs_cancer_model_id IN (" + getModelIdsForAnyEnvironmentalFactor(inKeyword) + ")"; theWhereClause += " OR abs_cancer_model_id IN (" + getModelIdsForAnyEngineeredGene(inKeyword) + ")"; theWhereClause += " OR am.phenotype IN (from Phenotype as p where p.description like :keyword )"; theWhereClause += " OR abs_cancer_model_id IN (" + getModelIdsForCellLine(inKeyword) + ")"; theWhereClause += " OR abs_cancer_model_id IN (" + getModelIdsForTherapeuticApproach(inKeyword) + "))"; List theAnimalModels = null; try { String theHQLQuery = inFromClause + theWhereClause + inOrderByClause; log.info("HQL Query: " + theHQLQuery); Query theQuery = HibernateUtil.getSession().createQuery(theHQLQuery); theQuery.setParameter("keyword", theKeyword); theAnimalModels = theQuery.list(); } catch (Exception e) { log.error("Exception occurred searching for models", e); throw e; } return theAnimalModels; } private List criteriaSearch(String inFromClause, String inOrderByClause, SearchData inSearchData) throws Exception { String theWhereClause = ""; // PI criteria if (inSearchData.getPiName() != null && inSearchData.getPiName().length() > 0) { StringTokenizer theTokenizer = new StringTokenizer(inSearchData.getPiName()); String theLastName = theTokenizer.nextToken(",").trim(); String theFirstName = theTokenizer.nextToken().trim(); theWhereClause += " AND am.principalInvestigator IN (from Person as p where p.lastName = '" + theLastName + "' AND p.firstName = '" + theFirstName + "')"; } // Model descriptor criteria if (inSearchData.getModelDescriptor() != null && inSearchData.getModelDescriptor().length() > 0) { theWhereClause += " AND am.modelDescriptor like '%" + inSearchData.getModelDescriptor() + "%'"; } // Species criteria if (inSearchData.getSpecies() != null && inSearchData.getSpecies().length() > 0) { theWhereClause += "AND am.species IN (from Taxon as t where t.scientificName = '" + inSearchData.getSpecies() + "')"; } // Search for organ if (inSearchData.getOrganTissueCode() != null && inSearchData.getOrganTissueCode().length() > 0) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForHistopathologyOrgan(inSearchData.getOrganTissueCode()) + ")"; } // Search for disease if (inSearchData.getDiagnosisCode() != null && inSearchData.getDiagnosisCode().trim().length() > 0) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForHistopathologyDisease(inSearchData.getDiagnosisCode()) + ")"; } // /////////////////////////////////////// // Carcinogenic interventions // /////////////////////////////////////// if (inSearchData.isSearchCarcinogenicInterventions() == true) { log.debug("Searching for Carcinogenic Interventions"); // Search for chemical/drug if (inSearchData.getChemicalDrug() != null && inSearchData.getChemicalDrug().length() > 0) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForEnvironmentalFactor("Chemical / Drug", inSearchData.getChemicalDrug()) + ")"; } // Search for Surgery if (inSearchData.getSurgery() != null && inSearchData.getSurgery().length() > 0) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForEnvironmentalFactor("Surgery", inSearchData.getSurgery()) + ")"; } // Search for Hormone if (inSearchData.getHormone() != null && inSearchData.getHormone().length() > 0) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForEnvironmentalFactor("Hormone", inSearchData.getHormone()) + ")"; } // Search for Growth Factor if (inSearchData.getGrowthFactor() != null && inSearchData.getGrowthFactor().length() > 0) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForEnvironmentalFactor("Growth Factor", inSearchData.getGrowthFactor()) + ")"; } // Search for Radiation if (inSearchData.getRadiation() != null && inSearchData.getRadiation().length() > 0) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForEnvironmentalFactor("Radiation", inSearchData.getRadiation()) + ")"; } // Search for Viral if (inSearchData.getViral() != null && inSearchData.getViral().length() > 0) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForEnvironmentalFactor("Viral", inSearchData.getViral()) + ")"; } } // Only call if some of the data is set if ((inSearchData.getGeneName() != null && inSearchData.getGeneName().length() > 0) || (inSearchData.getGenomicSegDesignator() != null && inSearchData.getGenomicSegDesignator().length() > 0) || (inSearchData.getInducedMutationAgent() != null && inSearchData.getInducedMutationAgent().length() > 0)) { // Search for engineered genes theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForEngineeredGenes(inSearchData.getGeneName(), inSearchData.isEngineeredTransgene(), inSearchData.isTargetedModification(), inSearchData.getGenomicSegDesignator(), inSearchData .getInducedMutationAgent()) + ")"; } // Search for phenotype if (inSearchData.getPhenotype() != null && inSearchData.getPhenotype().length() > 0) { theWhereClause += " AND am.phenotype IN (from Phenotype as p where p.description like '%" + inSearchData.getPhenotype() + "%')"; } // Search for cellline if (inSearchData.getCellLine() != null && inSearchData.getCellLine().length() > 0) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForCellLine(inSearchData.getCellLine()) + ")"; } // Search for therapeutic approaches if (inSearchData.isSearchTherapeuticApproaches()) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForTherapeuticApproach(inSearchData.getTherapeuticApproach()) + ")"; } // Search for therapeutic approaches if (inSearchData.isSearchHistoMetastasis()) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForHistoMetastasis() + ")"; } // Search for therapeutic approaches if (inSearchData.isSearchMicroArrayData()) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForMicroArrayData() + ")"; } List theAnimalModels = null; try { String theHQLQuery = inFromClause + theWhereClause + inOrderByClause; log.info("HQL Query: " + theHQLQuery); // HQLParameter[] theParameters = new HQLParameter[0]; // animalModels = Search.query(theHQLQuery, theParameters); Query theQuery = HibernateUtil.getSession().createQuery(theHQLQuery); theAnimalModels = theQuery.list(); } catch (Exception e) { log.error("Exception occurred searching for models", e); throw e; } return theAnimalModels; } /** * Extract the model ID's for an sql query with a specific organ. * * @param inSQLString * the SQL string that returns a set of IDs * @param inParameters * the parameters to bind in the query * * @return a list of matching model id * * @throws PersistenceException */ private String getModelIds(String inSQLString, Object inParameters[]) throws PersistenceException { log.trace("In getModelIds"); String theModelIds = ""; ResultSet theResultSet = null; try { log.info("getModelsIds - SQL: " + inSQLString); theResultSet = Search.query(inSQLString, inParameters); if (theResultSet.next()) { theModelIds += theResultSet.getString(1); } while (theResultSet.next()) { theModelIds += "," + theResultSet.getString(1); } } catch (Exception e) { log.error("Exception in getModelIds", e); throw new PersistenceException("Exception in getModelIds: " + e); } finally { if (theResultSet != null) { try { theResultSet.close(); } catch (Exception e) { } } } if (theModelIds.equals("")) { theModelIds = "-1"; } return theModelIds; } public List getModelsForThisCompound(Long nscNumber) throws PersistenceException { List models = new ArrayList(); int cc = 0; ResultSet theResultSet = null; try { String theSQLString = "select acm.abs_cancer_model_id, " + "\n" + " acm.model_descriptor," + "\n" + " tx.abbreviation || ' ' || tx.ethnicity_strain" + "\n" + " from abs_cancer_model acm," + "\n" + " animal_model_therapy amt," + "\n" + " therapy t," + "\n" + " env_factor ef," + "\n" + " taxon tx" + "\n" + " where acm.abs_cancer_model_id = amt.abs_cancer_model_id" + "\n" + " and acm.abs_cancer_model_type = 'AM'" + "\n" + " and amt.therapy_id = t.therapy_id" + "\n" + " and t.env_factor_id = ef.env_factor_id" + "\n" + " and acm.taxon_id = tx.taxon_id" + "\n" + " and ef.nsc_number = ?"; log.info("getInvivoResults - SQL: " + theSQLString); Object[] params = new Object[1]; params[0] = nscNumber; theResultSet = Search.query(theSQLString, params); while (theResultSet.next()) { String[] item = new String[3]; item[0] = theResultSet.getString(1); // the id item[1] = theResultSet.getString(2); // model descriptor item[2] = theResultSet.getString(3); // strain models.add(item); cc++; } log.info("Got " + cc + " animal models"); } catch (Exception e) { log.error("Exception in getModelsForThisCompound", e); throw new PersistenceException("Exception in getModelsForThisCompound: " + e); } finally { if (theResultSet != null) { try { theResultSet.close(); } catch (Exception e) { } } } return models; } public static void main(String[] inArgs) { try { System.out.println("Model ids: " + QueryManagerSingleton.instance().getModelIdsForHistopathologyOrgan("Skin")); } catch (Exception e) { e.printStackTrace(); } } }
src/gov/nih/nci/camod/service/impl/QueryManagerImpl.java
/** * @author dgeorge * * $Id: QueryManagerImpl.java,v 1.14 2005-10-11 18:15:10 georgeda Exp $ * * $Log: not supported by cvs2svn $ * Revision 1.13 2005/10/10 14:09:41 georgeda * Changes for comment curation and performance improvement * * Revision 1.12 2005/10/05 20:27:59 guruswas * implementation of drug screening search page * * Revision 1.11 2005/10/05 15:23:34 georgeda * Changed the theraputic approaches query to return all models w/ TA's if the search string passed in was blank * * Revision 1.10 2005/10/05 13:04:47 georgeda * Completed advanced search * * Revision 1.9 2005/10/04 20:19:43 georgeda * Updates from search changes * * Revision 1.6 2005/09/27 16:46:59 georgeda * Added environmental factor dropdown query * * Revision 1.5 2005/09/26 14:04:14 georgeda * Use HQL instead of SQL * * Revision 1.4 2005/09/16 19:30:04 guruswas * Display invivo data (from DTP) in the therapuetic approaches page * * Revision 1.3 2005/09/16 15:52:57 georgeda * Changes due to manager re-write * * */ package gov.nih.nci.camod.service.impl; import gov.nih.nci.camod.domain.Agent; import gov.nih.nci.camod.domain.AnimalModel; import gov.nih.nci.camod.domain.Comments; import gov.nih.nci.camod.domain.Log; import gov.nih.nci.camod.domain.Person; import gov.nih.nci.camod.util.DrugScreenResult; import gov.nih.nci.camod.webapp.form.SearchData; import gov.nih.nci.common.persistence.Search; import gov.nih.nci.common.persistence.exception.PersistenceException; import gov.nih.nci.common.persistence.hibernate.HQLParameter; import gov.nih.nci.common.persistence.hibernate.HibernateUtil; import java.sql.ResultSet; import java.util.ArrayList; import java.util.List; import java.util.StringTokenizer; import org.hibernate.Hibernate; import org.hibernate.Query; /** * Implementation of a wrapper around the HQL/JDBC interface. Used for more * complex instances where QBE does not have sufficient power. */ public class QueryManagerImpl extends BaseManager { /** * Return the list of environmental factor names * * @param inType * the type of environmental factor * * @return a sorted list of unique environmental factors * @throws PersistenceException */ public List getEnvironmentalFactors(String inType) throws PersistenceException { log.trace("Entering QueryManagerImpl.getAdministrativeRoutes"); // Format the query HQLParameter[] theParams = new HQLParameter[1]; theParams[0] = new HQLParameter(); theParams[0].setName("type"); theParams[0].setValue(inType); theParams[0].setType(Hibernate.STRING); String theHQLQuery = "select distinct ef.name from EnvironmentalFactor as ef where ef.type = :type and ef.name is not null order by ef.name asc "; List theList = Search.query(theHQLQuery, theParams); log.debug("Found matching items: " + theList.size()); log.trace("Exiting QueryManagerImpl.getAdministrativeRoutes"); return theList; } /** * Return the list of environmental factor names * * @param inType * the type of environmental factor * * @return a sorted list of unique environmental factors * @throws PersistenceException */ public List getQueryOnlyEnvironmentalFactors(String inType) throws PersistenceException { log.trace("Entering QueryManagerImpl.getQueryOnlyEnvironmentalFactors"); ResultSet theResultSet = null; List theEnvFactors = new ArrayList(); try { // Format the query String theSQLQuery = "SELECT distinct ef.name " + "FROM env_factor ef " + "WHERE ef.type = ? " + " AND ef.name IS NOT null " + " AND ef.env_factor_id IN (SELECT t.env_factor_id " + " FROM therapy t, animal_model_therapy at WHERE t.therapeutic_experiment = 0 " + " AND t.therapy_id = at.therapy_id) ORDER BY ef.name asc "; Object[] theParams = new Object[1]; theParams[0] = inType; theResultSet = Search.query(theSQLQuery, theParams); while (theResultSet.next()) { theEnvFactors.add(theResultSet.getString(1)); } log.trace("Exiting QueryManagerImpl.getQueryOnlyEnvironmentalFactors"); } catch (Exception e) { log.error("Exception in getQueryOnlyEnvironmentalFactors", e); throw new PersistenceException("Exception in getQueryOnlyEnvironmentalFactors: " + e); } finally { if (theResultSet != null) { try { theResultSet.close(); } catch (Exception e) { } } } return theEnvFactors; } /** * Return the list of environmental factor names which were used to induce a * mutation * * @return a sorted list of unique environmental factors * @throws PersistenceException */ public List getQueryOnlyInducedMutationAgents() throws PersistenceException { log.trace("Entering QueryManagerImpl.getQueryOnlyInducedMutationAgents"); ResultSet theResultSet = null; List theAgents = new ArrayList(); try { // Format the query String theSQLQuery = "SELECT distinct ef.name " + "FROM env_factor ef, env_fac_ind_mutation im " + "WHERE ef.name IS NOT null " + " AND ef.env_factor_id = im.env_factor_id"; Object[] theParams = new Object[0]; theResultSet = Search.query(theSQLQuery, theParams); while (theResultSet.next()) { theAgents.add(theResultSet.getString(1)); } log.trace("Exiting QueryManagerImpl.getQueryOnlyInducedMutationAgents"); } catch (Exception e) { log.error("Exception in getQueryOnlyInducedMutationAgents", e); throw new PersistenceException("Exception in getQueryOnlyInducedMutationAgents: " + e); } finally { if (theResultSet != null) { try { theResultSet.close(); } catch (Exception e) { } } } return theAgents; } /** * Return the list of species associated with animal models * * @return a sorted list of unique species * * @throws PersistenceException */ public List getQueryOnlySpecies() throws PersistenceException { log.trace("Entering QueryManagerImpl.getQueryOnlyEnvironmentalFactors"); // Format the query String theSQLString = "SELECT distinct scientific_name FROM taxon " + "WHERE scientific_name IS NOT NULL " + " AND taxon_id IN (select distinct taxon_id from abs_cancer_model)"; ResultSet theResultSet = null; List theSpeciesList = new ArrayList(); try { log.info("getModelsIds - SQL: " + theSQLString); Object[] params = new Object[0]; theResultSet = Search.query(theSQLString, params); while (theResultSet.next()) { theSpeciesList.add(theResultSet.getString(1)); } } catch (Exception e) { log.error("Exception in getQueryOnlySpecies", e); throw new PersistenceException("Exception in getQueryOnlySpecies: " + e); } finally { if (theResultSet != null) { try { theResultSet.close(); } catch (Exception e) { } } } return theSpeciesList; } /** * Return the list of PI's sorted by last name * * @return a sorted list of People objects * @throws PersistenceException */ public List getPrincipalInvestigators() throws PersistenceException { log.trace("Entering QueryManagerImpl.getPrincipalInvestigators"); // Format the query HQLParameter[] theParams = new HQLParameter[0]; String theHQLQuery = "from Person where is_principal_investigator = 1 order by last_name asc"; List theList = Search.query(theHQLQuery, theParams); log.debug("Found matching items: " + theList.size()); log.trace("Exiting QueryManagerImpl.getPrincipalInvestigators"); return theList; } /** * Return the list of species associated with animal models * * @return a sorted list of unique species * * @throws PersistenceException */ public List getQueryOnlyPrincipalInvestigators() throws PersistenceException { log.trace("Entering QueryManagerImpl.getQueryOnlyPrincipalInvestigators"); // Format the query String theSQLString = "SELECT last_name, first_name " + "FROM party " + "WHERE is_principal_investigator = 1 " + " AND first_name IS NOT NULL " + " AND last_name IS NOT NULL " + " AND party_id IN (SELECT DISTINCT principal_investigator_id FROM abs_cancer_model WHERE state = 'Edited-approved')" + "ORDER BY last_name ASC"; ResultSet theResultSet = null; List thePIList = new ArrayList(); try { log.info("getQueryOnlyPrincipalInvestigators - SQL: " + theSQLString); Object[] params = new Object[0]; theResultSet = Search.query(theSQLString, params); while (theResultSet.next()) { String thePIEntry = theResultSet.getString(1) + "," + theResultSet.getString(2); thePIList.add(thePIEntry); } } catch (Exception e) { log.error("Exception in getQueryOnlyPrincipalInvestigators", e); throw new PersistenceException("Exception in getQueryOnlyPrincipalInvestigators: " + e); } finally { if (theResultSet != null) { try { theResultSet.close(); } catch (Exception e) { } } } return thePIList; } /** * Return the latest log for an animal model * * @param inModel * the animal model to get the latest log for * * @return the current log for an animal model * @throws PersistenceException */ public Log getCurrentLog(AnimalModel inModel) throws PersistenceException { log.trace("Entering QueryManagerImpl.getCurrentLog"); HQLParameter[] theParams = new HQLParameter[1]; theParams[0] = new HQLParameter(); theParams[0].setName("abs_cancer_model_id"); theParams[0].setValue(inModel.getId()); theParams[0].setType(Hibernate.LONG); String theHQLQuery = "from Log where abs_cancer_model_id = :abs_cancer_model_id order by timestamp desc"; log.debug("The HQL query: " + theHQLQuery); List theLogs = Search.query(theHQLQuery, theParams); Log theLog = null; if (theLogs != null && theLogs.size() > 0) { theLog = (Log) theLogs.get(0); log.debug("Found a matching object: " + theLog.getId()); } else { log.debug("No object found."); } log.trace("Exiting QueryManagerImpl.getCurrentLog"); return theLog; } /** * Return the latest log for an animal model/user combo * * @param inModel * the animal model to get the latest log for * @param inUser * the user to get the latest log for * * @return the current log for an animal model/user combination * @throws PersistenceException */ public Log getCurrentLogForUser(AnimalModel inModel, Person inUser) throws PersistenceException { log.trace("Entering QueryManagerImpl.getCurrentLogForUser"); // Format the query HQLParameter[] theParams = new HQLParameter[2]; theParams[0] = new HQLParameter(); theParams[0].setName("abs_cancer_model_id"); theParams[0].setValue(inModel.getId()); theParams[0].setType(Hibernate.LONG); theParams[1] = new HQLParameter(); theParams[1].setName("party_id"); theParams[1].setValue(inUser.getId()); theParams[1].setType(Hibernate.LONG); String theHQLQuery = "from Log where abs_cancer_model_id = :abs_cancer_model_id and party_id = :party_id " + "and comments_id is null order by timestamp desc"; log.debug("the HQL Query: " + theHQLQuery); List theLogs = Search.query(theHQLQuery, theParams); Log theLog = null; if (theLogs != null && theLogs.size() > 0) { theLog = (Log) theLogs.get(0); log.debug("Found a matching object: " + theLog.getId()); } else { log.debug("No object found."); } log.trace("Exiting QueryManagerImpl.getCurrentLogForUser"); return theLog; } /** * Return the latest log for a comment/user combo * * @param inComments * the comments to get the latest log for * @param inUser * the user to get the latest log for * * @return the current log for an comments/user combination * * @throws PersistenceException */ public Log getCurrentLogForUser(Comments inComments, Person inUser) throws PersistenceException { log.trace("Entering QueryManagerImpl.getCurrentLogForUser"); // Format the query HQLParameter[] theParams = new HQLParameter[3]; theParams[0] = new HQLParameter(); theParams[0].setName("abs_cancer_model_id"); theParams[0].setValue(inComments.getCancerModel().getId()); theParams[0].setType(Hibernate.LONG); theParams[1] = new HQLParameter(); theParams[1].setName("party_id"); theParams[1].setValue(inUser.getId()); theParams[1].setType(Hibernate.LONG); theParams[2] = new HQLParameter(); theParams[2].setName("comments_id"); theParams[2].setValue(inComments.getId()); theParams[2].setType(Hibernate.LONG); System.out.println("Comments id: " + inComments.getId()); System.out.println("Party id: " + inUser.getId()); System.out.println("CM id: " + inComments.getCancerModel().getId()); String theHQLQuery = "from Log where abs_cancer_model_id = :abs_cancer_model_id and party_id = :party_id " + "and comments_id = :comments_id order by timestamp desc"; log.debug("the HQL Query: " + theHQLQuery); List theLogs = Search.query(theHQLQuery, theParams); Log theLog = null; if (theLogs != null && theLogs.size() > 0) { theLog = (Log) theLogs.get(0); log.debug("Found a matching object: " + theLog.getId()); } else { log.debug("No object found."); } log.trace("Exiting QueryManagerImpl.getCurrentLogForUser"); return theLog; } /** * Return all of the comments associated with a person that match the state * passed in * * @param inState * the state of the comment * * @param inPerson * the person to match * * @return a list of matching comments * * @throws PersistenceException */ public List getCommentsBySection(String inSection, Person inPerson, AnimalModel inModel) throws PersistenceException { log.trace("Entering QueryManagerImpl.getCommentsBySectionForPerson"); // If no person, only get approved items String theStateHQL = "(c.state = 'Screened-approved'"; if (inPerson == null) { theStateHQL += ") "; } else { theStateHQL += "or c.submitter = :party_id) "; } // TODO: make the states a constant String theHQLQuery = "from Comments as c where " + theStateHQL + " and c.cancerModel in (" + "from AnimalModel as am where am.id = :abs_cancer_model_id) and c.modelSection in (from ModelSection where name = :name)"; log.debug("The HQL query: " + theHQLQuery); Query theQuery = HibernateUtil.getSession().createQuery(theHQLQuery); theQuery.setParameter("abs_cancer_model_id", inModel.getId()); theQuery.setParameter("name", inSection); // Only query for party if the passed in party wasn't null if (inPerson != null) { theQuery.setParameter("party_id", inPerson.getId()); } List theComments = theQuery.list(); if (theComments == null) { theComments = new ArrayList(); } log.trace("Exiting QueryManagerImpl.getCommentsByStateForPerson"); return theComments; } /** * Return all of the comments associated with a person that match the state * passed in * * @param inState * the state of the comment * * @param inPerson * the person to match * * @return a list of matching comments * * @throws PersistenceException */ public List getCommentsByStateForPerson(String inState, Person inPerson) throws PersistenceException { log.trace("Entering QueryManagerImpl.getCommentsByStateForPerson"); String theHQLQuery = "from Comments as c where c.state = :state and c.id in (" + "select l.comment from Log as l where l.submitter = :party_id and l.type = :state)"; log.debug("The HQL query: " + theHQLQuery); Query theQuery = HibernateUtil.getSession().createQuery(theHQLQuery); theQuery.setParameter("party_id", inPerson.getId()); theQuery.setParameter("state", inState); List theComments = theQuery.list(); if (theComments == null) { theComments = new ArrayList(); } log.trace("Exiting QueryManagerImpl.getCommentsByStateForPerson"); return theComments; } /** * Return all of the models associated with a person that match the state * passed in * * @param inState * the state of the comment * * @param inPerson * the person to match * * @return a list of matching models * * @throws PersistenceException */ public List getModelsByStateForPerson(String inState, Person inPerson) throws PersistenceException { log.trace("Entering QueryManagerImpl.getCurrentLog"); String theHQLQuery = "from AnimalModel as am where am.state = :state and am.id in (" + "select l.cancerModel from Log as l where l.submitter = :party_id and l.type = :state)"; log.debug("The HQL query: " + theHQLQuery); Query theQuery = HibernateUtil.getSession().createQuery(theHQLQuery); theQuery.setParameter("party_id", inPerson.getId()); theQuery.setParameter("state", inState); List theComments = theQuery.list(); if (theComments == null) { theComments = new ArrayList(); } return theComments; } /** * Get yeast screen result data (ave inibition etc.) for a given Agent * (drug) * * @param agent * refers to the compound that was used in the yeast experiment * @param stage * is the stage of the experiment (0, 1, or 2) * * @return the results * @throws PersistenceException */ public DrugScreenResult getYeastScreenResults(Agent agent, String stage) throws PersistenceException { DrugScreenResult dsr = new DrugScreenResult(); ResultSet theResultSet = null; try { String theSQLString = "select tx.ethnicity_strain," + "\n" + " t.dosage," + "\n" + " sr.aveinh aveinh," + "\n" + " sr.diffinh diffinh" + "\n" + " from screening_Result sr," + "\n" + " env_factor a," + "\n" + " YST_MDL_SCRNING_RESULT ymsr," + "\n" + " abs_cancer_model acm," + "\n" + " treatment t," + "\n" + " taxon tx" + "\n" + " where sr.agent_id = a.env_factor_id" + "\n" + " and sr.screening_result_id = ymsr.screening_result_id" + "\n" + " and sr.treatment_id = t.treatment_id" + "\n" + " and ymsr.abs_cancer_model_id = acm.abs_cancer_model_id" + "\n" + " and acm.taxon_id = tx.taxon_id" + "\n" + " and a.nsc_number = ?" + "\n" + " and sr.stage = ?" + "\n" + " order by 1, 2"; log.info("getYeastScreenResults - SQL: " + theSQLString); Object[] params = new Object[2]; params[0] = agent.getNscNumber(); params[1] = stage; theResultSet = Search.query(theSQLString, params); while (theResultSet.next()) { final String strain = theResultSet.getString(1); final String dosage = theResultSet.getString(2); final float aveinh = theResultSet.getFloat(3); final float diffinh = theResultSet.getFloat(4); dsr.addEntry(strain, dosage, aveinh, diffinh); } log.info("Got " + dsr.strainCount + " strains"); } catch (Exception e) { log.error("Exception in getYeastScreenResults", e); throw new PersistenceException("Exception in getYeastScreenResults: " + e); } finally { if (theResultSet != null) { try { theResultSet.close(); } catch (Exception e) { } } } return dsr; } /** * Get the invivo (Xenograft) data (from DTP) for a given Agent.nscNumber * (drug) * * @param agent * refers to the compound that was used in the xenograft * experiment * * @return the results (list of abs_cancer_model_id, model_descriptor, and # * of records * @throws PersistenceException */ public List getInvivoResults(Agent agent) throws PersistenceException { List results = new ArrayList(); int cc = 0; ResultSet theResultSet = null; try { String theSQLString = "select acm.abs_cancer_model_id," + "\n" + " acm.model_descriptor," + "\n" + " tx.ethnicity_strain," + "\n" + " count(*)" + "\n" + " from invivo_Result sr," + "\n" + " env_factor a," + "\n" + " XENOGRAFT_INVIVO_RESULT ymsr," + "\n" + " abs_cancer_model acm," + "\n" + " treatment t," + "\n" + " taxon tx" + "\n" + " where sr.agent_id = a.env_factor_id" + "\n" + " and sr.invivo_result_id = ymsr.invivo_result_id" + "\n" + " and sr.treatment_id = t.treatment_id" + "\n" + " and ymsr.abs_cancer_model_id = acm.abs_cancer_model_id" + "\n" + " and acm.taxon_id = tx.taxon_id" + "\n" + " and a.nsc_number = ?" + "\n" + " group by acm.abs_cancer_model_id, acm.model_descriptor, tx.ethnicity_strain" + "\n" + " order by 3, 2"; log.info("getInvivoResults - SQL: " + theSQLString); Object[] params = new Object[1]; params[0] = agent.getNscNumber(); theResultSet = Search.query(theSQLString, params); while (theResultSet.next()) { String[] item = new String[4]; item[0] = theResultSet.getString(1); // the id item[1] = theResultSet.getString(2); // model descriptor item[2] = theResultSet.getString(3); // strain item[3] = theResultSet.getString(4); // record count results.add(item); cc++; } log.info("Got " + cc + " xenograft models"); } catch (Exception e) { log.error("Exception in getYeastScreenResults", e); throw new PersistenceException("Exception in getYeastScreenResults: " + e); } finally { if (theResultSet != null) { try { theResultSet.close(); } catch (Exception e) { } } } return results; } /** * Get the model id's for any model that has a histopathology associated * with a specific organ. * * @param inOrgan * the organ to search for * * @return a list of matching model id * * @throws PersistenceException */ private String getModelIdsForHistopathologyOrgan(String inConceptCodes) throws PersistenceException { String theConceptCodeList = ""; StringTokenizer theTokenizer = new StringTokenizer(inConceptCodes, ","); while (theTokenizer.hasMoreElements()) { theConceptCodeList += "'" + theTokenizer.nextToken() + "'"; // Only tack on a , if it's not the last element if (theTokenizer.hasMoreElements()) { theConceptCodeList += ","; } } String theSQLString = "SELECT distinct ani_hist.abs_cancer_model_id " + "FROM ani_mod_histopathology ani_hist " + "WHERE ani_hist.histopathology_id IN (SELECT h.histopathology_id " + " FROM histopathology h, organ_histopathology oh, organ o " + " WHERE h.histopathology_id = oh.histopathology_id AND oh.organ_id = o.organ_id " + " AND o.concept_code IN (" + theConceptCodeList + "))"; Object[] theParams = new Object[0]; return getModelIds(theSQLString, theParams); } /** * Get the model id's for any model that has a histopathology with a parent * histopathology * * @return a list of matching model ids * * @throws PersistenceException */ private String getModelIdsForHistoMetastasis() throws PersistenceException { String theSQLString = "SELECT distinct ani_hist.abs_cancer_model_id FROM ani_mod_histopathology ani_hist " + "WHERE ani_hist.histopathology_id IN (SELECT h.histopathology_id FROM histopathology h " + " WHERE h.parent_histopathology_id IS NOT NULL)"; Object[] theParams = new Object[0]; return getModelIds(theSQLString, theParams); } /** * Get the model id's for any model that has associated microarray data * * @return a list of matching model ids * * @throws PersistenceException */ private String getModelIdsForMicroArrayData() throws PersistenceException { String theSQLString = "SELECT distinct abs_cancer_model_id FROM ani_mod_mic_array_data"; Object[] theParams = new Object[0]; return getModelIds(theSQLString, theParams); } /** * Get the model id's for any model that has a cellline w/ a matching name * * @param inCellLineName * the text to search for in the cell-line * * @return a list of matching model id * * @throws PersistenceException */ private String getModelIdsForCellLine(String inCellLineName) throws PersistenceException { String theSQLString = "SELECT distinct ani_cell.abs_cancer_model_id FROM ani_mod_cell_line ani_cell " + "WHERE ani_cell.cell_line_id IN (SELECT c.cell_line_id FROM cell_line c " + " WHERE c.name LIKE ?)"; Object[] theParams = new Object[1]; theParams[0] = "%" + inCellLineName + "%"; return getModelIds(theSQLString, theParams); } /** * Get the model id's for any model that has a histopathology associated * with a specific organ. * * @param inDisease * the disease to search for * * @return a list of matching model id * * @throws PersistenceException */ private String getModelIdsForTherapeuticApproach(String inTherapeuticApproach) throws PersistenceException { String theSQLString = "SELECT distinct ani_ther.abs_cancer_model_id " + "FROM animal_model_therapy ani_ther " + "WHERE ani_ther.therapy_id IN (SELECT t.therapy_id FROM therapy t, env_factor e " + " WHERE t.therapeutic_experiment = 1 AND t.env_factor_id = e.env_factor_id " + " AND e.name like ?)"; String theSQLTheraputicApproach = "%"; if (inTherapeuticApproach != null && inTherapeuticApproach.trim().length() > 0) { theSQLTheraputicApproach = "%" + inTherapeuticApproach + "%"; } Object[] theParams = new Object[1]; theParams[0] = theSQLTheraputicApproach; return getModelIds(theSQLString, theParams); } /** * Get the model id's for any model that has a histopathology associated * with a specific organ. * * @param inDisease * the disease to search for * * @return a list of matching model id * * @throws PersistenceException */ private String getModelIdsForHistopathologyDisease(String inConceptCodes) throws PersistenceException { String theConceptCodeList = ""; StringTokenizer theTokenizer = new StringTokenizer(inConceptCodes, ","); while (theTokenizer.hasMoreElements()) { theConceptCodeList += "'" + theTokenizer.nextToken() + "'"; // Only tack on a , if it's not the last element if (theTokenizer.hasMoreElements()) { theConceptCodeList += ","; } } String theSQLString = "SELECT distinct ani_hist.abs_cancer_model_id " + "FROM ani_mod_histopathology ani_hist " + "WHERE ani_hist.histopathology_id IN (SELECT h.histopathology_id " + " FROM histopathology h, histopathology_disease hd, disease d " + " WHERE h.histopathology_id = hd.histopathology_id " + " AND hd.disease_id = d.disease_id AND d.concept_code IN (" + theConceptCodeList + "))"; Object[] theParams = new Object[0]; return getModelIds(theSQLString, theParams); } /** * Get the models with the associated engineered gene * * @param inGeneName * the name of the transgene or targeted modification * * @param isEngineeredTransgene * are we looking for a transgene? * * @param isEngineeredTransgene * are we looking for an induced mutation? * * @param inGenomicSegDesignator * the name of the genomic segment designator * * @param inInducedMutationAgent * the name of Agent which induced the mutation. Exact match. * * @return a list of matching model id * * @throws PersistenceException * */ private String getModelIdsForEngineeredGenes(String inGeneName, boolean isEngineeredTransgene, boolean isTargetedModification, String inGenomicSegDesignator, String inInducedMutationAgent) throws PersistenceException { List theList = new ArrayList(); String theSQLString = "SELECT distinct ani_ge.abs_cancer_model_id " + "FROM ani_mod_engineered_gene ani_ge WHERE "; String OR = " "; if (isEngineeredTransgene == true && inGeneName.length() > 0) { theSQLString += OR + " ani_ge.engineered_gene_id IN (SELECT distinct engineered_gene_id " + " FROM engineered_gene WHERE name LIKE ? AND engineered_gene_type = 'T')"; OR = " OR "; theList.add("%" + inGeneName + "%"); } if (isTargetedModification == true && inGeneName.length() > 0) { theSQLString += OR + " ani_ge.engineered_gene_id IN (SELECT distinct engineered_gene_id " + " FROM engineered_gene WHERE name LIKE ? AND engineered_gene_type = 'TM')"; OR = " OR "; theList.add("%" + inGeneName + "%"); } if (inInducedMutationAgent != null && inInducedMutationAgent.length() > 0) { theSQLString += OR + " ani_ge.engineered_gene_id IN (SELECT distinct engineered_gene_id " + " FROM engineered_gene WHERE engineered_gene_id IN (" + " SELECT distinct im.engineered_gene_id FROM env_factor ef, env_fac_ind_mutation im " + " WHERE ef.name = ? " + " AND ef.env_factor_id = im.env_factor_id) AND engineered_gene_type = 'IM')"; OR = " OR "; theList.add(inInducedMutationAgent); } if (inGenomicSegDesignator != null && inGenomicSegDesignator.length() > 0) { theSQLString += OR + " ani_ge.engineered_gene_id IN (SELECT distinct engineered_gene_id " + " FROM engineered_gene WHERE clone_designator LIKE ? AND engineered_gene_type = 'GS')"; theList.add(inGenomicSegDesignator); } // Convert the params Object[] theParams = new Object[theList.size()]; for (int i = 0; i < theList.size(); i++) { theParams[i] = theList.get(i); } return getModelIds(theSQLString, theParams); } /** * Get the models with the associated engineered gene * * @param inKeyword * the keyword to search for * * @return a list of matching model id * * @throws PersistenceException * */ private String getModelIdsForAnyEngineeredGene(String inKeyword) throws PersistenceException { String theSQLString = "SELECT distinct ani_ge.abs_cancer_model_id " + "FROM ani_mod_engineered_gene ani_ge WHERE "; theSQLString += " ani_ge.engineered_gene_id IN (SELECT distinct engineered_gene_id " + " FROM engineered_gene WHERE name LIKE ?)"; theSQLString += " OR ani_ge.engineered_gene_id IN (SELECT distinct engineered_gene_id " + " FROM engineered_gene WHERE engineered_gene_id IN (" + " SELECT distinct im.engineered_gene_id FROM env_factor ef, env_fac_ind_mutation im " + " WHERE ef.name = ? " + " AND ef.env_factor_id = im.env_factor_id) AND engineered_gene_type = 'IM')"; theSQLString += " OR ani_ge.engineered_gene_id IN (SELECT distinct engineered_gene_id " + " FROM engineered_gene WHERE clone_designator LIKE ? AND engineered_gene_type = 'GS')"; // Convert the params Object[] theParams = new Object[3]; theParams[0] = inKeyword; theParams[1] = inKeyword; theParams[2] = inKeyword; return getModelIds(theSQLString, theParams); } /** * Get the model id's that have a matching environmental factor * * @param inType * the EF type * @param inName * the name to look for * * @return a list of matching model id * * @throws PersistenceException */ private String getModelIdsForEnvironmentalFactor(String inType, String inName) throws PersistenceException { String theSQLString = "SELECT distinct ani_th.abs_cancer_model_id FROM animal_model_therapy ani_th " + "WHERE ani_th.therapy_id IN (SELECT t.therapy_id FROM therapy t, env_factor ef" + " WHERE t.env_factor_id = ef.env_factor_id AND ef.name = ? AND ef.type = ?)"; Object[] theParams = new Object[2]; theParams[0] = inName; theParams[1] = inType; return getModelIds(theSQLString, theParams); } /** * Get the model id's for any model has a keyword match in the env factor * * @param inKeyword * the name to look for * * @return a list of matching model id * * @throws PersistenceException */ private String getModelIdsForAnyEnvironmentalFactor(String inKeyword) throws PersistenceException { String theSQLString = "SELECT distinct ani_th.abs_cancer_model_id FROM animal_model_therapy ani_th " + "WHERE ani_th.therapy_id IN (SELECT t.therapy_id FROM therapy t, env_factor ef" + " WHERE t.env_factor_id = ef.env_factor_id AND ef.name like ?)"; Object[] theParams = new Object[1]; theParams[0] = inKeyword; return getModelIds(theSQLString, theParams); } public List searchForAnimalModels(SearchData inSearchData) throws Exception { log.trace("Entering searchForAnimalModels"); List theAnimalModels = null; String theFromClause = "from AnimalModel as am where am.state = 'Edited-approved' AND am.availability.releaseDate < sysdate "; String theOrderByClause = " ORDER BY am.modelDescriptor asc"; if (inSearchData.getKeyword() != null && inSearchData.getKeyword().length() > 0) { log.debug("Doing a keyword search: " + inSearchData.getKeyword()); theAnimalModels = keywordSearch(theFromClause, theOrderByClause, inSearchData.getKeyword()); } else { log.debug("Doing a criteria search"); theAnimalModels = criteriaSearch(theFromClause, theOrderByClause, inSearchData); } log.trace("Exiting searchForAnimalModels"); return theAnimalModels; } private List keywordSearch(String inFromClause, String inOrderByClause, String inKeyword) throws Exception { // Use the like search functionality String theKeyword = "%" + inKeyword + "%"; String theWhereClause = ""; theWhereClause += " AND (am.modelDescriptor like :keyword "; theWhereClause += " OR am.species IN (from Taxon as t where t.scientificName like :keyword )"; theWhereClause += " OR abs_cancer_model_id IN (" + getModelIdsForHistopathologyOrgan(theKeyword) + ")"; theWhereClause += " OR abs_cancer_model_id IN (" + getModelIdsForHistopathologyDisease(theKeyword) + ")"; theWhereClause += " OR abs_cancer_model_id IN (" + getModelIdsForAnyEnvironmentalFactor(inKeyword) + ")"; theWhereClause += " OR abs_cancer_model_id IN (" + getModelIdsForAnyEngineeredGene(inKeyword) + ")"; theWhereClause += " OR am.phenotype IN (from Phenotype as p where p.description like :keyword )"; theWhereClause += " OR abs_cancer_model_id IN (" + getModelIdsForCellLine(inKeyword) + ")"; theWhereClause += " OR abs_cancer_model_id IN (" + getModelIdsForTherapeuticApproach(inKeyword) + "))"; List theAnimalModels = null; try { String theHQLQuery = inFromClause + theWhereClause + inOrderByClause; log.info("HQL Query: " + theHQLQuery); Query theQuery = HibernateUtil.getSession().createQuery(theHQLQuery); theQuery.setParameter("keyword", theKeyword); theAnimalModels = theQuery.list(); } catch (Exception e) { log.error("Exception occurred searching for models", e); throw e; } return theAnimalModels; } private List criteriaSearch(String inFromClause, String inOrderByClause, SearchData inSearchData) throws Exception { String theWhereClause = ""; // PI criteria if (inSearchData.getPiName() != null && inSearchData.getPiName().length() > 0) { StringTokenizer theTokenizer = new StringTokenizer(inSearchData.getPiName()); String theLastName = theTokenizer.nextToken(",").trim(); String theFirstName = theTokenizer.nextToken().trim(); theWhereClause += " AND am.principalInvestigator IN (from Person as p where p.lastName = '" + theLastName + "' AND p.firstName = '" + theFirstName + "')"; } // Model descriptor criteria if (inSearchData.getModelDescriptor() != null && inSearchData.getModelDescriptor().length() > 0) { theWhereClause += " AND am.modelDescriptor like '%" + inSearchData.getModelDescriptor() + "%'"; } // Species criteria if (inSearchData.getSpecies() != null && inSearchData.getSpecies().length() > 0) { theWhereClause += "AND am.species IN (from Taxon as t where t.scientificName = '" + inSearchData.getSpecies() + "')"; } // Search for organ if (inSearchData.getOrganTissueCode() != null && inSearchData.getOrganTissueCode().length() > 0) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForHistopathologyOrgan(inSearchData.getOrganTissueCode()) + ")"; } // Search for disease if (inSearchData.getDiagnosisCode() != null && inSearchData.getDiagnosisCode().trim().length() > 0) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForHistopathologyDisease(inSearchData.getDiagnosisCode()) + ")"; } // /////////////////////////////////////// // Carcinogenic interventions // /////////////////////////////////////// if (inSearchData.isSearchCarcinogenicInterventions() == true) { log.debug("Searching for Carcinogenic Interventions"); // Search for chemical/drug if (inSearchData.getChemicalDrug() != null && inSearchData.getChemicalDrug().length() > 0) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForEnvironmentalFactor("Chemical / Drug", inSearchData.getChemicalDrug()) + ")"; } // Search for Surgery if (inSearchData.getSurgery() != null && inSearchData.getSurgery().length() > 0) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForEnvironmentalFactor("Surgery", inSearchData.getSurgery()) + ")"; } // Search for Hormone if (inSearchData.getHormone() != null && inSearchData.getHormone().length() > 0) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForEnvironmentalFactor("Hormone", inSearchData.getHormone()) + ")"; } // Search for Growth Factor if (inSearchData.getGrowthFactor() != null && inSearchData.getGrowthFactor().length() > 0) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForEnvironmentalFactor("Growth Factor", inSearchData.getGrowthFactor()) + ")"; } // Search for Radiation if (inSearchData.getRadiation() != null && inSearchData.getRadiation().length() > 0) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForEnvironmentalFactor("Radiation", inSearchData.getRadiation()) + ")"; } // Search for Viral if (inSearchData.getViral() != null && inSearchData.getViral().length() > 0) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForEnvironmentalFactor("Viral", inSearchData.getViral()) + ")"; } } // Only call if some of the data is set if ((inSearchData.getGeneName() != null && inSearchData.getGeneName().length() > 0) || (inSearchData.getGenomicSegDesignator() != null && inSearchData.getGenomicSegDesignator().length() > 0) || (inSearchData.getInducedMutationAgent() != null && inSearchData.getInducedMutationAgent().length() > 0)) { // Search for engineered genes theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForEngineeredGenes(inSearchData.getGeneName(), inSearchData.isEngineeredTransgene(), inSearchData.isTargetedModification(), inSearchData.getGenomicSegDesignator(), inSearchData .getInducedMutationAgent()) + ")"; } // Search for phenotype if (inSearchData.getPhenotype() != null && inSearchData.getPhenotype().length() > 0) { theWhereClause += " AND am.phenotype IN (from Phenotype as p where p.description like '%" + inSearchData.getPhenotype() + "%')"; } // Search for cellline if (inSearchData.getCellLine() != null && inSearchData.getCellLine().length() > 0) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForCellLine(inSearchData.getCellLine()) + ")"; } // Search for therapeutic approaches if (inSearchData.isSearchTherapeuticApproaches()) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForTherapeuticApproach(inSearchData.getTherapeuticApproach()) + ")"; } // Search for therapeutic approaches if (inSearchData.isSearchHistoMetastasis()) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForHistoMetastasis() + ")"; } // Search for therapeutic approaches if (inSearchData.isSearchMicroArrayData()) { theWhereClause += " AND abs_cancer_model_id IN (" + getModelIdsForMicroArrayData() + ")"; } List theAnimalModels = null; try { String theHQLQuery = inFromClause + theWhereClause + inOrderByClause; log.info("HQL Query: " + theHQLQuery); // HQLParameter[] theParameters = new HQLParameter[0]; // animalModels = Search.query(theHQLQuery, theParameters); Query theQuery = HibernateUtil.getSession().createQuery(theHQLQuery); theAnimalModels = theQuery.list(); } catch (Exception e) { log.error("Exception occurred searching for models", e); throw e; } return theAnimalModels; } /** * Extract the model ID's for an sql query with a specific organ. * * @param inSQLString * the SQL string that returns a set of IDs * @param inParameters * the parameters to bind in the query * * @return a list of matching model id * * @throws PersistenceException */ private String getModelIds(String inSQLString, Object inParameters[]) throws PersistenceException { log.trace("In getModelIds"); String theModelIds = ""; ResultSet theResultSet = null; try { log.info("getModelsIds - SQL: " + inSQLString); theResultSet = Search.query(inSQLString, inParameters); if (theResultSet.next()) { theModelIds += theResultSet.getString(1); } while (theResultSet.next()) { theModelIds += "," + theResultSet.getString(1); } } catch (Exception e) { log.error("Exception in getModelIds", e); throw new PersistenceException("Exception in getModelIds: " + e); } finally { if (theResultSet != null) { try { theResultSet.close(); } catch (Exception e) { } } } if (theModelIds.equals("")) { theModelIds = "-1"; } return theModelIds; } public List getModelsForThisCompound(Long nscNumber) throws PersistenceException { List models = new ArrayList(); int cc = 0; ResultSet theResultSet = null; try { String theSQLString = "select acm.abs_cancer_model_id, " + "\n" + " acm.model_descriptor," + "\n" + " tx.abbreviation || ' ' || tx.ethnicity_strain" + "\n" + " from abs_cancer_model acm," + "\n" + " animal_model_therapy amt," + "\n" + " therapy t," + "\n" + " env_factor ef," + "\n" + " taxon tx" + "\n" + " where acm.abs_cancer_model_id = amt.abs_cancer_model_id" + "\n" + " and acm.abs_cancer_model_type = 'AM'" + "\n" + " and amt.therapy_id = t.therapy_id" + "\n" + " and t.env_factor_id = ef.env_factor_id" + "\n" + " and acm.taxon_id = tx.taxon_id" + "\n" + " and ef.nsc_number = ?"; log.info("getInvivoResults - SQL: " + theSQLString); Object[] params = new Object[1]; params[0] = nscNumber; theResultSet = Search.query(theSQLString, params); while (theResultSet.next()) { String[] item = new String[3]; item[0] = theResultSet.getString(1); // the id item[1] = theResultSet.getString(2); // model descriptor item[2] = theResultSet.getString(3); // strain models.add(item); cc++; } log.info("Got " + cc + " animal models"); } catch (Exception e) { log.error("Exception in getModelsForThisCompound", e); throw new PersistenceException("Exception in getModelsForThisCompound: " + e); } finally { if (theResultSet != null) { try { theResultSet.close(); } catch (Exception e) { } } } return models; } public static void main(String[] inArgs) { try { System.out.println("Model ids: " + QueryManagerSingleton.instance().getModelIdsForHistopathologyOrgan("Skin")); } catch (Exception e) { e.printStackTrace(); } } }
Get species only from approved models SVN-Revision: 657
src/gov/nih/nci/camod/service/impl/QueryManagerImpl.java
Get species only from approved models
Java
bsd-3-clause
121f643967c71e028b8ad893eb9da1361adaf90e
0
ceejii/jcomponent-search
package com.ceejii.gui.component; import java.awt.event.ActionEvent; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import javax.swing.JButton; import com.ceejii.gui.SearchSuggestionListener; import com.ceejii.gui.data.SearchSuggestion; public class DefaultSearchResultButton extends JButton implements SearchSuggestion { private String id; private String clue; private String name; private SearchSuggestionListener searchSuggestionListener; public DefaultSearchResultButton(String name, String id, String clue) { super(name); this.name = name; this.id = id; this.setToolTipText(this.clue); this.clue = clue; this.addMouseListener(new MouseListener(){ public void mouseClicked(MouseEvent arg0) { System.out.println("Mouse Clicked on button: " + DefaultSearchResultButton.this.getText() + " " + DefaultSearchResultButton.this.name); searchSuggestionListener.resultChosen(new ActionEvent(DefaultSearchResultButton.this,0,DefaultSearchResultButton.this.getText())); } public void mouseEntered(MouseEvent arg0) { System.out.println("Mouse Entered over button: " + DefaultSearchResultButton.this.getText() + " " + DefaultSearchResultButton.this.name); if(searchSuggestionListener != null){ searchSuggestionListener.resultHovered(new ActionEvent(DefaultSearchResultButton.this,0,DefaultSearchResultButton.this.getToolTipText())); } } public void mouseExited(MouseEvent arg0) { System.out.println("Mouse Exited over button: " + DefaultSearchResultButton.this.getText() + " " + DefaultSearchResultButton.this.name); if(searchSuggestionListener != null){ searchSuggestionListener.resultNoLongerHovered(new ActionEvent(DefaultSearchResultButton.this,0,DefaultSearchResultButton.this.getToolTipText())); } } public void mousePressed(MouseEvent arg0) { } public void mouseReleased(MouseEvent arg0) { } }); } public String getSuggestionId() { return this.id; } public String getSuggestionClue() { return this.clue; } public String getSuggestionName() { return this.name; } public void addSearchSuggestionListener(SearchSuggestionListener listener) { this.searchSuggestionListener = listener; } }
src/com/ceejii/gui/component/DefaultSearchResultButton.java
package com.ceejii.gui.component; import java.awt.event.ActionEvent; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import javax.swing.JButton; import com.ceejii.gui.SearchSuggestionListener; import com.ceejii.gui.data.SearchSuggestion; public class DefaultSearchResultButton extends JButton implements SearchSuggestion { private String id; private String clue; private String name; private SearchSuggestionListener searchSuggestionListener; public DefaultSearchResultButton(String name, String id, String clue) { super(name); this.name = name; this.id = id; this.setToolTipText(this.clue); this.clue = clue; this.addMouseListener(new MouseListener(){ public void mouseClicked(MouseEvent arg0) { System.out.println("Mouse Clicked on button: " + DefaultSearchResultButton.this.getText() + " " + DefaultSearchResultButton.this.name); searchSuggestionListener.resultChosen(new ActionEvent(DefaultSearchResultButton.this,0,DefaultSearchResultButton.this.getText())); } public void mouseEntered(MouseEvent arg0) { System.out.println("Mouse Entered over button: " + DefaultSearchResultButton.this.getText() + " " + DefaultSearchResultButton.this.name); if(searchSuggestionListener != null){ searchSuggestionListener.resultHovered(new ActionEvent(DefaultSearchResultButton.this,0,DefaultSearchResultButton.this.getToolTipText())); } } public void mouseExited(MouseEvent arg0) { System.out.println("Mouse Exited over button: " + DefaultSearchResultButton.this.getText() + " " + DefaultSearchResultButton.this.name); if(searchSuggestionListener != null){ searchSuggestionListener.resultHovered(new ActionEvent(DefaultSearchResultButton.this,0,DefaultSearchResultButton.this.getToolTipText())); } } public void mousePressed(MouseEvent arg0) { } public void mouseReleased(MouseEvent arg0) { } }); } public String getSuggestionId() { return this.id; } public String getSuggestionClue() { return this.clue; } public String getSuggestionName() { return this.name; } public void addSearchSuggestionListener(SearchSuggestionListener listener) { this.searchSuggestionListener = listener; } }
Fixed a bug that caused mouse hover events when the mouse actually left the button.
src/com/ceejii/gui/component/DefaultSearchResultButton.java
Fixed a bug that caused mouse hover events when the mouse actually left the button.
Java
bsd-3-clause
5d3b9b611375b03240e5009a2da0d07011ceda6a
0
TREAD-3219/Robot_2017
// RobotBuilder Version: 2.0 // // This file was generated by RobotBuilder. It contains sections of // code that are automatically generated and assigned by robotbuilder. // These sections will be updated in the future when you export to // Java from RobotBuilder. Do not put any code or make any change in // the blocks indicating autogenerated code or it will be lost on an // update. Deleting the comments indicating the section will prevent // it from being updated in the future. package org.usfirst.frc3219.TREAD; import org.usfirst.frc3219.TREAD.subsystems.GearSlot; import org.usfirst.frc3219.TREAD.commands.autonomous.StandardAutonomous; import org.usfirst.frc3219.TREAD.commands.shooter.AimRight; import org.usfirst.frc3219.TREAD.subsystems.Ballfeeder; import org.usfirst.frc3219.TREAD.subsystems.Drive; import org.usfirst.frc3219.TREAD.subsystems.BallIntake; import org.usfirst.frc3219.TREAD.subsystems.Turntable; import edu.wpi.first.wpilibj.DriverStation; import edu.wpi.first.wpilibj.IterativeRobot; import edu.wpi.first.wpilibj.command.Command; import org.usfirst.frc3219.TREAD.subsystems.*; import edu.wpi.first.wpilibj.command.Scheduler; import edu.wpi.first.wpilibj.livewindow.LiveWindow; import edu.wpi.first.wpilibj.smartdashboard.SendableChooser; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the IterativeRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class Robot extends IterativeRobot { // Command Declarations Command autonomousCommand; private static SendableChooser posChooser; public static String position = "Default"; public static boolean blueAlliance = true; // Subsystem Declarations public static OI oi; public static Climber climber; public static Turntable turntable; public static Drive drive; public static BallIntake intake; public static Ballfeeder ballfeeder; public static GearSlot gearSlot; public static Shooter shooter; public static Sensors sensors; /** * This function is run when the robot is first started up and should be * used for any initialization code. */ public void robotInit() { RobotMap.init(); // Subsystem Construction, OI must be last. climber = new Climber(); turntable = new Turntable(); drive = new Drive(); intake = new BallIntake(); ballfeeder = new Ballfeeder(); gearSlot = new GearSlot(); shooter= new Shooter(); sensors = new Sensors(); // OI must be constructed after subsystems. If the OI creates Commands // (which it very likely will), subsystems are not guaranteed to be // constructed yet. Thus, their requires() statements may grab null // pointers. Bad news. Don't move it. oi = new OI(); // instantiate the command chooser used for selecting autonomous posChooser = new SendableChooser(); posChooser.addDefault("Middle", "Middle"); posChooser.addObject("Left", "Left"); posChooser.addObject("Right", "Right"); SmartDashboard.putData("Position", posChooser); autonomousCommand = new StandardAutonomous(); } /** * This function is called when the disabled button is hit. You can use it * to reset subsystems before shutting down. */ public void disabledInit() { } public void disabledPeriodic() { Scheduler.getInstance().run(); } public void autonomousInit() { DriverStation.Alliance alliance = DriverStation.getInstance().getAlliance(); blueAlliance = alliance.equals(DriverStation.Alliance.Blue); position = (String) posChooser.getSelected(); autonomousCommand = new StandardAutonomous(); // schedule the autonomous command (example) if (autonomousCommand != null) autonomousCommand.start(); } /** * This function is called periodically during autonomous */ public void autonomousPeriodic() { Scheduler.getInstance().run(); } public void teleopInit() { DriverStation.Alliance alliance = DriverStation.getInstance().getAlliance(); blueAlliance = alliance.equals(DriverStation.Alliance.Blue); // This makes sure that the autonomous stops running when // teleop starts running. If you want the autonomous to // continue until interrupted by another command, remove // this line or comment it out. if (autonomousCommand != null) autonomousCommand.cancel(); } /** * This function is called periodically during operator control */ public void teleopPeriodic() { Scheduler.getInstance().run(); } /** * This function is called periodically during test mode */ public void testPeriodic() { LiveWindow.run(); } public static void addCommand(Command command) { Scheduler.getInstance().add(command); } }
src/org/usfirst/frc3219/TREAD/Robot.java
// RobotBuilder Version: 2.0 // // This file was generated by RobotBuilder. It contains sections of // code that are automatically generated and assigned by robotbuilder. // These sections will be updated in the future when you export to // Java from RobotBuilder. Do not put any code or make any change in // the blocks indicating autogenerated code or it will be lost on an // update. Deleting the comments indicating the section will prevent // it from being updated in the future. package org.usfirst.frc3219.TREAD; import org.usfirst.frc3219.TREAD.subsystems.GearSlot; import org.usfirst.frc3219.TREAD.commands.autonomous.StandardAutonomous; import org.usfirst.frc3219.TREAD.commands.shooter.AimRight; import org.usfirst.frc3219.TREAD.subsystems.Ballfeeder; import org.usfirst.frc3219.TREAD.subsystems.Drive; import org.usfirst.frc3219.TREAD.subsystems.BallIntake; import org.usfirst.frc3219.TREAD.subsystems.Turntable; import edu.wpi.first.wpilibj.DriverStation; import edu.wpi.first.wpilibj.IterativeRobot; import edu.wpi.first.wpilibj.command.Command; import org.usfirst.frc3219.TREAD.subsystems.*; import edu.wpi.first.wpilibj.command.Scheduler; import edu.wpi.first.wpilibj.livewindow.LiveWindow; import edu.wpi.first.wpilibj.smartdashboard.SendableChooser; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the IterativeRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class Robot extends IterativeRobot { // Command Declarations Command autonomousCommand; private static SendableChooser posChooser; public static String position = "Default"; public static boolean blueAlliance = true; // Subsystem Declarations public static OI oi; public static Climber climber; public static Turntable turntable; public static Drive drive; public static BallIntake intake; public static Ballfeeder ballfeeder; public static GearSlot gearSlot; public static Shooter shooter; public static Sensors sensors; /** * This function is run when the robot is first started up and should be * used for any initialization code. */ public void robotInit() { RobotMap.init(); // Subsystem Construction, OI must be last. climber = new Climber(); turntable = new Turntable(); drive = new Drive(); intake = new BallIntake(); ballfeeder = new Ballfeeder(); gearSlot = new GearSlot(); shooter= new Shooter(); sensors = new Sensors(); // OI must be constructed after subsystems. If the OI creates Commands // (which it very likely will), subsystems are not guaranteed to be // constructed yet. Thus, their requires() statements may grab null // pointers. Bad news. Don't move it. oi = new OI(); // instantiate the command chooser used for selecting autonomous posChooser = new SendableChooser(); posChooser.addDefault("Middle", "Middle"); posChooser.addObject("Side", "Diag"); SmartDashboard.putData("Position", posChooser); autonomousCommand = new StandardAutonomous(); } /** * This function is called when the disabled button is hit. You can use it * to reset subsystems before shutting down. */ public void disabledInit() { } public void disabledPeriodic() { Scheduler.getInstance().run(); } public void autonomousInit() { DriverStation.Alliance alliance = DriverStation.getInstance().getAlliance(); blueAlliance = alliance.equals(DriverStation.Alliance.Blue); position = (String) posChooser.getSelected(); autonomousCommand = new StandardAutonomous(); // schedule the autonomous command (example) if (autonomousCommand != null) autonomousCommand.start(); } /** * This function is called periodically during autonomous */ public void autonomousPeriodic() { Scheduler.getInstance().run(); } public void teleopInit() { DriverStation.Alliance alliance = DriverStation.getInstance().getAlliance(); blueAlliance = alliance.equals(DriverStation.Alliance.Blue); // This makes sure that the autonomous stops running when // teleop starts running. If you want the autonomous to // continue until interrupted by another command, remove // this line or comment it out. if (autonomousCommand != null) autonomousCommand.cancel(); } /** * This function is called periodically during operator control */ public void teleopPeriodic() { Scheduler.getInstance().run(); } /** * This function is called periodically during test mode */ public void testPeriodic() { LiveWindow.run(); } public static void addCommand(Command command) { Scheduler.getInstance().add(command); } }
Fix for position chooser
src/org/usfirst/frc3219/TREAD/Robot.java
Fix for position chooser
Java
mit
4b9599f07142d8c1765638e0f2eb098ce15d4cdb
0
cjburkey01/ClaimChunk,cjburkey01/ClaimChunk
package com.cjburkey.claimchunk.event; import com.cjburkey.claimchunk.ChunkHelper; import com.cjburkey.claimchunk.ClaimChunk; import com.cjburkey.claimchunk.Config; import java.util.Objects; import org.bukkit.entity.Animals; import org.bukkit.entity.EntityType; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.Listener; import org.bukkit.event.block.Action; import org.bukkit.event.block.BlockBreakEvent; import org.bukkit.event.block.BlockPlaceEvent; import org.bukkit.event.entity.EntityDamageByEntityEvent; import org.bukkit.event.entity.EntityExplodeEvent; import org.bukkit.event.hanging.HangingBreakByEntityEvent; import org.bukkit.event.hanging.HangingPlaceEvent; import org.bukkit.event.player.PlayerInteractEntityEvent; import org.bukkit.event.player.PlayerInteractEvent; @SuppressWarnings("unused") public class CancellableChunkEvents implements Listener { // Block Break @EventHandler public void onBlockBroken(BlockBreakEvent e) { if (e != null) { ChunkHelper.cancelEventIfNotOwned(e.getPlayer(), e.getBlock().getChunk(), e); } } // Clicking on Blocks @EventHandler public void onPlayerInteract(PlayerInteractEvent e) { if (e != null && e.getClickedBlock() != null) { if (e.getAction() == Action.LEFT_CLICK_BLOCK || e.getAction() == Action.LEFT_CLICK_AIR || e.getAction() == Action.RIGHT_CLICK_AIR) { return; } ChunkHelper.cancelEventIfNotOwned(e.getPlayer(), e.getClickedBlock().getChunk(), e); } } // Placing Blocks @EventHandler public void onBlockPlaced(BlockPlaceEvent e) { if (e != null) { ChunkHelper.cancelEventIfNotOwned(e.getPlayer(), e.getBlock().getChunk(), e); } } // Item Frame Rotation @EventHandler public void onPlayerInteract(PlayerInteractEntityEvent e) { if (e != null && (e.getRightClicked().getType().equals(EntityType.ITEM_FRAME) || e.getRightClicked().getType().equals(EntityType.PAINTING))) { ChunkHelper.cancelEventIfNotOwned(e.getPlayer(), e.getRightClicked().getLocation().getChunk(), e); } } // Item Frame Break @EventHandler public void onItemFrameBroken(HangingBreakByEntityEvent e) { if (e != null && Objects.requireNonNull(e.getRemover()).getType().equals(EntityType.PLAYER)) { ChunkHelper.cancelEventIfNotOwned((Player) e.getRemover(), e.getEntity().getLocation().getChunk(), e); } } // Item Frame Place @EventHandler public void onItemFramePlaced(HangingPlaceEvent e) { if (e != null && e.getPlayer() != null) { ChunkHelper.cancelEventIfNotOwned(e.getPlayer(), e.getEntity().getLocation().getChunk(), e); } } // Item Frame Remove Item @EventHandler public void onItemFramePlaced(EntityDamageByEntityEvent e) { if (e != null && (e.getEntity().getType().equals(EntityType.ITEM_FRAME) || e.getEntity().getType().equals(EntityType.PAINTING)) && e.getDamager().getType().equals(EntityType.PLAYER)) { ChunkHelper.cancelEventIfNotOwned((Player) e.getDamager(), e.getEntity().getLocation().getChunk(), e); } } // TnT and Creeper explosions @EventHandler public void onEntityExplode(EntityExplodeEvent e) { if (!e.isCancelled()) { if (ClaimChunk.getInstance().getChunkHandler().isUnclaimed(e.getLocation().getChunk()) && Config.getBool("protection", "blockUnclaimedChunks")) { return; } ChunkHelper.cancelExplosionIfConfig(e); } } // Animal damage @EventHandler() public void onEntityDamage(EntityDamageByEntityEvent e) { if (ClaimChunk.getInstance().getChunkHandler().isUnclaimed(e.getEntity().getLocation().getChunk())) { return; } if (e.getDamager() instanceof Player && e.getEntity() instanceof Animals) ChunkHelper.cancelAnimalDamage((Player) e.getDamager(), e.getDamager().getLocation().getChunk(), e); } }
src/main/java/com/cjburkey/claimchunk/event/CancellableChunkEvents.java
package com.cjburkey.claimchunk.event; import com.cjburkey.claimchunk.ChunkHelper; import com.cjburkey.claimchunk.ClaimChunk; import com.cjburkey.claimchunk.Config; import java.util.Objects; import org.bukkit.entity.Animals; import org.bukkit.entity.EntityType; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.Listener; import org.bukkit.event.block.Action; import org.bukkit.event.block.BlockBreakEvent; import org.bukkit.event.block.BlockPlaceEvent; import org.bukkit.event.entity.EntityDamageByEntityEvent; import org.bukkit.event.entity.EntityExplodeEvent; import org.bukkit.event.hanging.HangingBreakByEntityEvent; import org.bukkit.event.hanging.HangingPlaceEvent; import org.bukkit.event.player.PlayerInteractEntityEvent; import org.bukkit.event.player.PlayerInteractEvent; @SuppressWarnings("unused") public class CancellableChunkEvents implements Listener { // Block Break @EventHandler public void onBlockBroken(BlockBreakEvent e) { if (e != null) { ChunkHelper.cancelEventIfNotOwned(e.getPlayer(), e.getBlock().getChunk(), e); } } // Clicking on Blocks @EventHandler public void onPlayerInteract(PlayerInteractEvent e) { if (e != null && e.getClickedBlock() != null) { if (e.getAction() == Action.LEFT_CLICK_BLOCK) { return; } if (e.getAction() == Action.LEFT_CLICK_AIR) { return; } if (e.getAction() == Action.RIGHT_CLICK_AIR) { return; } ChunkHelper.cancelEventIfNotOwned(e.getPlayer(), e.getClickedBlock().getChunk(), e); } } // Placing Blocks @EventHandler public void onBlockPlaced(BlockPlaceEvent e) { if (e != null) { ChunkHelper.cancelEventIfNotOwned(e.getPlayer(), e.getBlock().getChunk(), e); } } // Item Frame Rotation @EventHandler public void onPlayerInteract(PlayerInteractEntityEvent e) { if (e != null && e.getRightClicked().getType().equals(EntityType.ITEM_FRAME)) { ChunkHelper.cancelEventIfNotOwned(e.getPlayer(), e.getRightClicked().getLocation().getChunk(), e); } } // Item Frame Break @EventHandler public void onItemFrameBroken(HangingBreakByEntityEvent e) { if (e != null && e.getEntity().getType().equals(EntityType.ITEM_FRAME) && Objects.requireNonNull(e.getRemover()).getType().equals(EntityType.PLAYER)) { ChunkHelper.cancelEventIfNotOwned((Player) e.getRemover(), e.getEntity().getLocation().getChunk(), e); } } // Item Frame Place @EventHandler public void onItemFramePlaced(HangingPlaceEvent e) { if (e != null && e.getEntity().getType().equals(EntityType.ITEM_FRAME) && e.getPlayer() != null) { ChunkHelper.cancelEventIfNotOwned(e.getPlayer(), e.getEntity().getLocation().getChunk(), e); } } // Item Frame Remove Item @EventHandler public void onItemFramePlaced(EntityDamageByEntityEvent e) { if (e != null && e.getEntity().getType().equals(EntityType.ITEM_FRAME) && e.getDamager().getType().equals(EntityType.PLAYER)) { ChunkHelper.cancelEventIfNotOwned((Player) e.getDamager(), e.getEntity().getLocation().getChunk(), e); } } // TnT and Creeper explosions @EventHandler public void onEntityExplode(EntityExplodeEvent e) { if (!e.isCancelled()) { if (ClaimChunk.getInstance().getChunkHandler().isUnclaimed(e.getLocation().getChunk()) && Config.getBool("protection", "blockUnclaimedChunks")) { return; } ChunkHelper.cancelExplosionIfConfig(e); } } // Animal damage @EventHandler() public void onEntityDamage(EntityDamageByEntityEvent e) { if (ClaimChunk.getInstance().getChunkHandler().isUnclaimed(e.getEntity().getLocation().getChunk())) { return; } if (e.getDamager() instanceof Player && e.getEntity() instanceof Animals) ChunkHelper.cancelAnimalDamage((Player) e.getDamager(), e.getDamager().getLocation().getChunk(), e); } }
Add painting protection
src/main/java/com/cjburkey/claimchunk/event/CancellableChunkEvents.java
Add painting protection
Java
mit
f5102e4c2c94e57f2489b0660d51b6a92c28633e
0
eXsio/clock,eXsio/clock,eXsio/clock
package com.exsio.clock.configuration; import com.beust.jcommander.internal.Lists; import com.exsio.clock.configuration.ApplicationConfiguration; import com.exsio.clock.configuration.support.AtmosphereArgumentResolver; import org.atmosphere.cpr.AtmosphereConfig; import org.atmosphere.cpr.AtmosphereFramework; import org.atmosphere.cpr.BroadcasterFactory; import org.springframework.boot.context.embedded.ServletRegistrationBean; import org.springframework.web.method.support.HandlerMethodArgumentResolver; import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistration; import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry; import org.testng.Assert; import org.testng.annotations.Test; import javax.naming.NamingException; import java.io.IOException; import java.util.List; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertTrue; public class ApplicationConfigurationTest { ApplicationConfiguration underTest = new ApplicationConfiguration(); @Test public void testAdditionOfAtmosphereArgumentResolver() { List<HandlerMethodArgumentResolver> resolvers = Lists.newArrayList(); underTest.addArgumentResolvers(resolvers); Assert.assertEquals(resolvers.size(), 1); Assert.assertTrue(resolvers.get(0) instanceof AtmosphereArgumentResolver); } @Test public void test_atmosphereframework() { AtmosphereFramework result = underTest.atmosphereFramework(); assertNotNull(result); } @Test public void test_broadcasterFactory() { AtmosphereFramework atmosphereFramework = mock(AtmosphereFramework.class); AtmosphereConfig atmosphereConfig = mock(AtmosphereConfig.class); BroadcasterFactory broadcasterFactory = mock(BroadcasterFactory.class); when(atmosphereFramework.getAtmosphereConfig()).thenReturn(atmosphereConfig); when(atmosphereConfig.getBroadcasterFactory()).thenReturn(broadcasterFactory); BroadcasterFactory result = underTest.broadcasterFactory(atmosphereFramework); assertEquals(result, broadcasterFactory); } @Test public void test_jacksonConverter() { assertNotNull(underTest.jacksonHttpMessageConverter()); } @Test public void test_servletRegistrationBean() { ServletRegistrationBean result = underTest.servletRegistrationBean(); assertNotNull(result); assertEquals(result.getServletName(), "push"); assertTrue(result.getUrlMappings().contains("*.push")); assertEquals(result.getInitParameters().size(), 5); } @Test public void test_addResourceHandlers() { ResourceHandlerRegistry registry = mock(ResourceHandlerRegistry.class); ResourceHandlerRegistration registration = mock(ResourceHandlerRegistration.class); when(registry.addResourceHandler(anyString())).thenReturn(registration); underTest.addResourceHandlers(registry); verify(registry, times(7)).addResourceHandler(anyString()); verify(registration,times(7)).addResourceLocations(anyString()); verifyNoMoreInteractions(registration); verifyNoMoreInteractions(registry); } @Test public void test_properties() throws IOException, NamingException { assertNotNull(underTest.properties()); } }
src/test/java/com/exsio/clock/configuration/ApplicationConfigurationTest.java
package com.exsio.clock.configuration; import com.beust.jcommander.internal.Lists; import com.exsio.clock.configuration.ApplicationConfiguration; import com.exsio.clock.configuration.support.AtmosphereArgumentResolver; import org.atmosphere.cpr.AtmosphereConfig; import org.atmosphere.cpr.AtmosphereFramework; import org.atmosphere.cpr.BroadcasterFactory; import org.springframework.boot.context.embedded.ServletRegistrationBean; import org.springframework.web.method.support.HandlerMethodArgumentResolver; import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistration; import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry; import org.testng.Assert; import org.testng.annotations.Test; import java.util.List; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertTrue; public class ApplicationConfigurationTest { ApplicationConfiguration underTest = new ApplicationConfiguration(); @Test public void testAdditionOfAtmosphereArgumentResolver() { List<HandlerMethodArgumentResolver> resolvers = Lists.newArrayList(); underTest.addArgumentResolvers(resolvers); Assert.assertEquals(resolvers.size(), 1); Assert.assertTrue(resolvers.get(0) instanceof AtmosphereArgumentResolver); } @Test public void test_atmosphereframework() { AtmosphereFramework result = underTest.atmosphereFramework(); assertNotNull(result); } @Test public void test_broadcasterFactory() { AtmosphereFramework atmosphereFramework = mock(AtmosphereFramework.class); AtmosphereConfig atmosphereConfig = mock(AtmosphereConfig.class); BroadcasterFactory broadcasterFactory = mock(BroadcasterFactory.class); when(atmosphereFramework.getAtmosphereConfig()).thenReturn(atmosphereConfig); when(atmosphereConfig.getBroadcasterFactory()).thenReturn(broadcasterFactory); BroadcasterFactory result = underTest.broadcasterFactory(atmosphereFramework); assertEquals(result, broadcasterFactory); } @Test public void test_jacksonConverter() { assertNotNull(underTest.jacksonHttpMessageConverter()); } @Test public void test_servletRegistrationBean() { ServletRegistrationBean result = underTest.servletRegistrationBean(); assertNotNull(result); assertEquals(result.getServletName(), "push"); assertTrue(result.getUrlMappings().contains("*.push")); assertEquals(result.getInitParameters().size(), 5); } @Test public void test_addResourceHandlers() { ResourceHandlerRegistry registry = mock(ResourceHandlerRegistry.class); ResourceHandlerRegistration registration = mock(ResourceHandlerRegistration.class); when(registry.addResourceHandler(anyString())).thenReturn(registration); underTest.addResourceHandlers(registry); verify(registry, times(7)).addResourceHandler(anyString()); verify(registration,times(7)).addResourceLocations(anyString()); verifyNoMoreInteractions(registration); verifyNoMoreInteractions(registry); } }
tests
src/test/java/com/exsio/clock/configuration/ApplicationConfigurationTest.java
tests
Java
mit
0c145b1d875f1cbd7f6461f0871b066d6b06add7
0
GluuFederation/oxAuth,madumlao/oxAuth,GluuFederation/oxAuth,GluuFederation/oxAuth,madumlao/oxAuth,GluuFederation/oxAuth,GluuFederation/oxAuth,madumlao/oxAuth,madumlao/oxAuth,madumlao/oxAuth
/* * oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text. * * Copyright (c) 2014, Gluu */ package org.xdi.oxauth.authorize.ws.rs; import org.apache.commons.lang.StringUtils; import org.codehaus.jettison.json.JSONException; import org.jboss.seam.Component; import org.jboss.seam.ScopeType; import org.jboss.seam.annotations.In; import org.jboss.seam.annotations.Logger; import org.jboss.seam.annotations.Name; import org.jboss.seam.annotations.Scope; import org.jboss.seam.faces.FacesManager; import org.jboss.seam.international.LocaleSelector; import org.jboss.seam.log.Log; import org.jboss.seam.security.Identity; import org.xdi.model.AuthenticationScriptUsageType; import org.xdi.model.custom.script.conf.CustomScriptConfiguration; import org.xdi.oxauth.auth.Authenticator; import org.xdi.oxauth.model.authorize.AuthorizeErrorResponseType; import org.xdi.oxauth.model.authorize.AuthorizeParamsValidator; import org.xdi.oxauth.model.common.Prompt; import org.xdi.oxauth.model.common.SessionIdState; import org.xdi.oxauth.model.common.SessionState; import org.xdi.oxauth.model.common.User; import org.xdi.oxauth.model.config.ConfigurationFactory; import org.xdi.oxauth.model.config.Constants; import org.xdi.oxauth.model.error.ErrorResponseFactory; import org.xdi.oxauth.model.federation.FederationTrust; import org.xdi.oxauth.model.federation.FederationTrustStatus; import org.xdi.oxauth.model.jwt.JwtClaimName; import org.xdi.oxauth.model.ldap.ClientAuthorizations; import org.xdi.oxauth.model.registration.Client; import org.xdi.oxauth.model.util.LocaleUtil; import org.xdi.oxauth.model.util.Util; import org.xdi.oxauth.service.*; import org.xdi.oxauth.service.external.ExternalAuthenticationService; import org.xdi.service.net.NetworkService; import org.xdi.util.StringHelper; import javax.faces.context.ExternalContext; import javax.faces.context.FacesContext; import java.io.UnsupportedEncodingException; import java.util.*; /** * @author Javier Rojas Blum * @author Yuriy Movchan * @version December 15, 2015 */ @Name("authorizeAction") @Scope(ScopeType.EVENT) // Do not change scope, we try to keep server without http sessions public class AuthorizeAction { @Logger private Log log; @In private ClientService clientService; @In private ErrorResponseFactory errorResponseFactory; @In private UserGroupService userGroupService; @In private FederationDataService federationDataService; @In private SessionStateService sessionStateService; @In private UserService userService; @In private RedirectionUriService redirectionUriService; @In private AuthenticationService authenticationService; @In private ClientAuthorizationsService clientAuthorizationsService; @In private ExternalAuthenticationService externalAuthenticationService; @In(value = AppInitializer.DEFAULT_AUTH_MODE_NAME, required = false) private String defaultAuthenticationMethod; @In("org.jboss.seam.international.localeSelector") private LocaleSelector localeSelector; @In private NetworkService networkService; @In private Identity identity; // OAuth 2.0 request parameters private String scope; private String responseType; private String clientId; private String redirectUri; private String state; // OpenID Connect request parameters private String responseMode; private String nonce; private String display; private String prompt; private Integer maxAge; private String uiLocales; private String idTokenHint; private String loginHint; private String acrValues; private String amrValues; private String request; private String requestUri; // custom oxAuth parameters private String sessionState; public void checkUiLocales() { List<String> uiLocalesList = null; if (StringUtils.isNotBlank(uiLocales)) { uiLocalesList = Util.splittedStringAsList(uiLocales, " "); FacesContext facesContext = FacesContext.getCurrentInstance(); List<Locale> supportedLocales = new ArrayList<Locale>(); for (Iterator<Locale> it = facesContext.getApplication().getSupportedLocales(); it.hasNext(); ) { supportedLocales.add(it.next()); } Locale matchingLocale = LocaleUtil.localeMatch(uiLocalesList, supportedLocales); if (matchingLocale != null) { localeSelector.setLocale(matchingLocale); } } } public String checkPermissionGranted() { SessionState session = getSession(); List<Prompt> prompts = Prompt.fromString(prompt, " "); try { session = sessionStateService.assertAuthenticatedSessionCorrespondsToNewRequest(session, redirectUri, acrValues); } catch (AcrChangedException e) { log.debug("There is already existing session which has another acr then {0}, session: {1}", acrValues, session.getId()); if (prompts.contains(Prompt.LOGIN)) { session = handleAcrChange(session, prompts); } else { log.error("Please provide prompt=login to force login with new ACR or otherwise perform logout and re-authenticate."); permissionDenied(); return Constants.RESULT_FAILURE; } } if (session == null || session.getUserDn() == null || SessionIdState.AUTHENTICATED != session.getState()) { final ExternalContext externalContext = FacesContext.getCurrentInstance().getExternalContext(); Map<String, String> parameterMap = externalContext.getRequestParameterMap(); Map<String, String> requestParameterMap = authenticationService.getAllowedParameters(parameterMap); String redirectTo = "/login.xhtml"; boolean useExternalAuthenticator = externalAuthenticationService.isEnabled(AuthenticationScriptUsageType.INTERACTIVE); if (useExternalAuthenticator) { List<String> acrValuesList = acrValuesList(); if (acrValuesList.isEmpty()) { if (StringHelper.isNotEmpty(defaultAuthenticationMethod)) { acrValuesList = Arrays.asList(defaultAuthenticationMethod); } else { CustomScriptConfiguration defaultExternalAuthenticator = externalAuthenticationService.getDefaultExternalAuthenticator(AuthenticationScriptUsageType.INTERACTIVE); if (defaultExternalAuthenticator != null) { acrValuesList = Arrays.asList(defaultExternalAuthenticator.getName()); } } } CustomScriptConfiguration customScriptConfiguration = externalAuthenticationService.determineCustomScriptConfiguration(AuthenticationScriptUsageType.INTERACTIVE, acrValuesList); if (customScriptConfiguration == null) { log.error("Failed to get CustomScriptConfiguration. auth_step: {0}, acr_values: {1}", 1, this.acrValues); permissionDenied(); return Constants.RESULT_FAILURE; } String acr = customScriptConfiguration.getName(); requestParameterMap.put(JwtClaimName.AUTHENTICATION_METHOD_REFERENCES, acr); requestParameterMap.put("auth_step", Integer.toString(1)); String tmpRedirectTo = externalAuthenticationService.executeExternalGetPageForStep(customScriptConfiguration, 1); if (StringHelper.isNotEmpty(tmpRedirectTo)) { log.trace("Redirect to person authentication login page: {0}", tmpRedirectTo); redirectTo = tmpRedirectTo; } } // Store Remote IP String remoteIp = networkService.getRemoteIp(); requestParameterMap.put(Constants.REMOTE_IP, remoteIp); // Create unauthenticated session SessionState unauthenticatedSession = sessionStateService.generateSessionState(null, new Date(), SessionIdState.UNAUTHENTICATED, requestParameterMap, false); unauthenticatedSession.setSessionAttributes(requestParameterMap); boolean persisted = sessionStateService.persistSessionState(unauthenticatedSession, !prompts.contains(Prompt.NONE)); // always persist is prompt is not none if (persisted && log.isTraceEnabled()) { log.trace("Session '{0}' persisted to LDAP", unauthenticatedSession.getId()); } this.sessionState = unauthenticatedSession.getId(); sessionStateService.createSessionStateCookie(this.sessionState); FacesManager.instance().redirect(redirectTo, null, false); return Constants.RESULT_FAILURE; } if (clientId != null && !clientId.isEmpty()) { final Client client = clientService.getClient(clientId); if (client != null) { if (StringUtils.isBlank(redirectionUriService.validateRedirectionUri(clientId, redirectUri))) { permissionDenied(); } final User user = userService.getUserByDn(session.getUserDn()); log.trace("checkPermissionGranted, user = " + user); // OXAUTH-87 : if user is not in group then deny permission if (user != null && client.hasUserGroups()) { // if user is not in any group then deny permissions if (!userGroupService.isInAnyGroup(client.getUserGroups(), user.getDn())) { permissionDenied(); } } // OXAUTH-88 : federation support if (ConfigurationFactory.instance().getConfiguration().getFederationEnabled()) { final List<FederationTrust> list = federationDataService.getTrustByClient(client, FederationTrustStatus.ACTIVE); if (list == null || list.isEmpty()) { log.trace("Deny authorization, client is not in any federation trust, client: {0}", client.getDn()); permissionDenied(); } else if (FederationDataService.skipAuthorization(list)) { log.trace("Skip authorization (permissions granted), client is in federation trust where skip is allowed, client: {1}", client.getDn()); permissionGranted(session); } } if (AuthorizeParamsValidator.validatePrompt(prompts)) { ClientAuthorizations clientAuthorizations = clientAuthorizationsService.findClientAuthorizations(user.getAttribute("inum"), client.getClientId()); if (clientAuthorizations != null && clientAuthorizations.getScopes() != null && Arrays.asList(clientAuthorizations.getScopes()).containsAll( org.xdi.oxauth.model.util.StringUtils.spaceSeparatedToList(scope))) { permissionGranted(session); } else if (ConfigurationFactory.instance().getConfiguration().getTrustedClientEnabled()) { // if trusted client = true, then skip authorization page and grant access directly if (client.getTrustedClient() && !prompts.contains(Prompt.CONSENT)) { permissionGranted(session); } } else { consentRequired(); } } else { invalidRequest(); } } } return Constants.RESULT_FAILURE; } private SessionState handleAcrChange(SessionState session, List<Prompt> prompts) { if (session != null && prompts.contains(Prompt.LOGIN)) { // change session state only if prompt=none if (session.getState() == SessionIdState.AUTHENTICATED) { session.getSessionAttributes().put("prompt", prompt); session.setState(SessionIdState.UNAUTHENTICATED); // Update Remote IP String remoteIp = networkService.getRemoteIp(); session.getSessionAttributes().put(Constants.REMOTE_IP, remoteIp); sessionStateService.updateSessionState(session); sessionStateService.reinitLogin(session, false); } } return session; } /** * By definition we expects space separated acr values as it is defined in spec. But we also try maybe some client * sent it to us as json array. So we try both. * * @return acr value list */ private List<String> acrValuesList() { List<String> acrs; try { acrs = Util.jsonArrayStringAsList(this.acrValues); } catch (JSONException ex) { acrs = Util.splittedStringAsList(acrValues, " "); } return acrs; } private SessionState getSession() { if (StringUtils.isBlank(sessionState)) { sessionState = sessionStateService.getSessionStateFromCookie(); if (StringUtils.isBlank(this.sessionState)) { return null; } } if (!identity.isLoggedIn()) { final Authenticator authenticator = (Authenticator) Component.getInstance(Authenticator.class, true); authenticator.authenticateBySessionState(sessionState); } SessionState ldapSessionState = sessionStateService.getSessionState(sessionState); if (ldapSessionState == null) { identity.logout(); } return ldapSessionState; } public List<org.xdi.oxauth.model.common.Scope> getScopes() { List<org.xdi.oxauth.model.common.Scope> scopes = new ArrayList<org.xdi.oxauth.model.common.Scope>(); ScopeService scopeService = ScopeService.instance(); if (scope != null && !scope.isEmpty()) { String[] scopesName = scope.split(" "); for (String scopeName : scopesName) { org.xdi.oxauth.model.common.Scope s = scopeService.getScopeByDisplayName(scopeName); if (s != null && s.getDescription() != null) { scopes.add(s); } } } return scopes; } /** * Returns the scope of the access request. * * @return The scope of the access request. */ public String getScope() { return scope; } /** * Sets the scope of the access request. * * @param scope The scope of the access request. */ public void setScope(String scope) { this.scope = scope; } /** * Returns the response type: <code>code</code> for requesting an authorization code (authorization code grant) or * <strong>token</strong> for requesting an access token (implicit grant). * * @return The response type. */ public String getResponseType() { return responseType; } /** * Sets the response type. * * @param responseType The response type. */ public void setResponseType(String responseType) { this.responseType = responseType; } /** * Returns the client identifier. * * @return The client identifier. */ public String getClientId() { return clientId; } /** * Sets the client identifier. * * @param clientId The client identifier. */ public void setClientId(String clientId) { this.clientId = clientId; } /** * Returns the redirection URI. * * @return The redirection URI. */ public String getRedirectUri() { return redirectUri; } /** * Sets the redirection URI. * * @param redirectUri The redirection URI. */ public void setRedirectUri(String redirectUri) { this.redirectUri = redirectUri; } /** * Returns an opaque value used by the client to maintain state between the request and callback. The authorization * server includes this value when redirecting the user-agent back to the client. The parameter should be used for * preventing cross-site request forgery. * * @return The state between the request and callback. */ public String getState() { return state; } /** * Sets the state between the request and callback. * * @param state The state between the request and callback. */ public void setState(String state) { this.state = state; } /** * Returns the mechanism to be used for returning parameters from the Authorization Endpoint. * * @return The response mode. */ public String getResponseMode() { return responseMode; } /** * Sets the mechanism to be used for returning parameters from the Authorization Endpoint. * * @param responseMode The response mode. */ public void setResponseMode(String responseMode) { this.responseMode = responseMode; } /** * Return a string value used to associate a user agent session with an ID Token, and to mitigate replay attacks. * * @return The nonce value. */ public String getNonce() { return nonce; } /** * Sets a string value used to associate a user agent session with an ID Token, and to mitigate replay attacks. * * @param nonce The nonce value. */ public void setNonce(String nonce) { this.nonce = nonce; } /** * Returns an ASCII string value that specifies how the Authorization Server displays the authentication page * to the End-User. * * @return The display value. */ public String getDisplay() { return display; } /** * Sets an ASCII string value that specifies how the Authorization Server displays the authentication page * to the End-User. * * @param display The display value */ public void setDisplay(String display) { this.display = display; } /** * Returns a space delimited list of ASCII strings that can contain the values * login, consent, select_account, and none. * * @return A list of prompt options. */ public String getPrompt() { return prompt; } /** * Sets a space delimited list of ASCII strings that can contain the values * login, consent, select_account, and none. * * @param prompt A list of prompt options. */ public void setPrompt(String prompt) { this.prompt = prompt; } public Integer getMaxAge() { return maxAge; } public void setMaxAge(Integer maxAge) { this.maxAge = maxAge; } public String getUiLocales() { return uiLocales; } public void setUiLocales(String uiLocales) { this.uiLocales = uiLocales; } public String getIdTokenHint() { return idTokenHint; } public void setIdTokenHint(String idTokenHint) { this.idTokenHint = idTokenHint; } public String getLoginHint() { return loginHint; } public void setLoginHint(String loginHint) { this.loginHint = loginHint; } public String getAcrValues() { return acrValues; } public void setAcrValues(String acrValues) { this.acrValues = acrValues; } public String getAmrValues() { return amrValues; } public void setAmrValues(String amrValues) { this.amrValues = amrValues; } /** * Returns a JWT encoded OpenID Request Object. * * @return A JWT encoded OpenID Request Object. */ public String getRequest() { return request; } /** * Sets a JWT encoded OpenID Request Object. * * @param request A JWT encoded OpenID Request Object. */ public void setRequest(String request) { this.request = request; } /** * Returns an URL that points to an OpenID Request Object. * * @return An URL that points to an OpenID Request Object. */ public String getRequestUri() { return requestUri; } /** * Sets an URL that points to an OpenID Request Object. * * @param requestUri An URL that points to an OpenID Request Object. */ public void setRequestUri(String requestUri) { this.requestUri = requestUri; } public String getSessionState() { return sessionState; } public void setSessionState(String p_sessionState) { sessionState = p_sessionState; } public void permissionGranted() { final SessionState session = getSession(); permissionGranted(session); } public void permissionGranted(SessionState session) { try { final User user = userService.getUserByDn(session.getUserDn()); if (user == null) { log.error("Permission denied. Failed to find session user: userDn = " + session.getUserDn() + "."); permissionDenied(); return; } final Client client = clientService.getClient(clientId); if(client.getPersistClientAuthorizations()){ client.setTrustedClient(true); clientService.merge(client); } final List<String> scopes = org.xdi.oxauth.model.util.StringUtils.spaceSeparatedToList(scope); clientAuthorizationsService.add(user.getAttribute("inum"), client.getClientId(), scopes); session.addPermission(clientId, true); sessionStateService.updateSessionState(session); // OXAUTH-297 - set session_state cookie SessionStateService.instance().createSessionStateCookie(sessionState); Map<String, String> sessionAttribute = authenticationService.getAllowedParameters(session.getSessionAttributes()); final String parametersAsString = authenticationService.parametersAsString(sessionAttribute); final String uri = "seam/resource/restv1/oxauth/authorize?" + parametersAsString; log.trace("permissionGranted, redirectTo: {0}", uri); FacesManager.instance().redirectToExternalURL(uri); } catch (UnsupportedEncodingException e) { log.trace(e.getMessage(), e); } } public void permissionDenied() { log.trace("permissionDenied"); StringBuilder sb = new StringBuilder(); sb.append(redirectUri); if (redirectUri != null && redirectUri.contains("?")) { sb.append("&"); } else { sb.append("?"); } sb.append(errorResponseFactory.getErrorAsQueryString(AuthorizeErrorResponseType.ACCESS_DENIED, getState())); FacesManager.instance().redirectToExternalURL(sb.toString()); } public void invalidRequest() { log.trace("invalidRequest"); StringBuilder sb = new StringBuilder(); sb.append(redirectUri); if (redirectUri != null && redirectUri.contains("?")) { sb.append("&"); } else { sb.append("?"); } sb.append(errorResponseFactory.getErrorAsQueryString(AuthorizeErrorResponseType.INVALID_REQUEST, getState())); FacesManager.instance().redirectToExternalURL(sb.toString()); } public void consentRequired() { StringBuilder sb = new StringBuilder(); sb.append(redirectUri); if (redirectUri != null && redirectUri.contains("?")) { sb.append("&"); } else { sb.append("?"); } sb.append(errorResponseFactory.getErrorAsQueryString(AuthorizeErrorResponseType.CONSENT_REQUIRED, getState())); FacesManager.instance().redirectToExternalURL(sb.toString()); } }
Server/src/main/java/org/xdi/oxauth/authorize/ws/rs/AuthorizeAction.java
/* * oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text. * * Copyright (c) 2014, Gluu */ package org.xdi.oxauth.authorize.ws.rs; import org.apache.commons.lang.StringUtils; import org.codehaus.jettison.json.JSONException; import org.jboss.seam.Component; import org.jboss.seam.ScopeType; import org.jboss.seam.annotations.In; import org.jboss.seam.annotations.Logger; import org.jboss.seam.annotations.Name; import org.jboss.seam.annotations.Scope; import org.jboss.seam.faces.FacesManager; import org.jboss.seam.international.LocaleSelector; import org.jboss.seam.log.Log; import org.jboss.seam.security.Identity; import org.xdi.model.AuthenticationScriptUsageType; import org.xdi.model.custom.script.conf.CustomScriptConfiguration; import org.xdi.oxauth.auth.Authenticator; import org.xdi.oxauth.model.authorize.AuthorizeErrorResponseType; import org.xdi.oxauth.model.authorize.AuthorizeParamsValidator; import org.xdi.oxauth.model.common.Prompt; import org.xdi.oxauth.model.common.SessionIdState; import org.xdi.oxauth.model.common.SessionState; import org.xdi.oxauth.model.common.User; import org.xdi.oxauth.model.config.ConfigurationFactory; import org.xdi.oxauth.model.config.Constants; import org.xdi.oxauth.model.error.ErrorResponseFactory; import org.xdi.oxauth.model.federation.FederationTrust; import org.xdi.oxauth.model.federation.FederationTrustStatus; import org.xdi.oxauth.model.jwt.JwtClaimName; import org.xdi.oxauth.model.ldap.ClientAuthorizations; import org.xdi.oxauth.model.registration.Client; import org.xdi.oxauth.model.util.LocaleUtil; import org.xdi.oxauth.model.util.Util; import org.xdi.oxauth.service.*; import org.xdi.oxauth.service.external.ExternalAuthenticationService; import org.xdi.service.net.NetworkService; import org.xdi.util.StringHelper; import javax.faces.context.ExternalContext; import javax.faces.context.FacesContext; import java.io.UnsupportedEncodingException; import java.util.*; /** * @author Javier Rojas Blum * @author Yuriy Movchan * @version December 15, 2015 */ @Name("authorizeAction") @Scope(ScopeType.EVENT) // Do not change scope, we try to keep server without http sessions public class AuthorizeAction { @Logger private Log log; @In private ClientService clientService; @In private ErrorResponseFactory errorResponseFactory; @In private UserGroupService userGroupService; @In private FederationDataService federationDataService; @In private SessionStateService sessionStateService; @In private UserService userService; @In private RedirectionUriService redirectionUriService; @In private AuthenticationService authenticationService; @In private ClientAuthorizationsService clientAuthorizationsService; @In private ExternalAuthenticationService externalAuthenticationService; @In(value = AppInitializer.DEFAULT_AUTH_MODE_NAME, required = false) private String defaultAuthenticationMethod; @In("org.jboss.seam.international.localeSelector") private LocaleSelector localeSelector; @In private NetworkService networkService; @In private Identity identity; // OAuth 2.0 request parameters private String scope; private String responseType; private String clientId; private String redirectUri; private String state; // OpenID Connect request parameters private String responseMode; private String nonce; private String display; private String prompt; private Integer maxAge; private String uiLocales; private String idTokenHint; private String loginHint; private String acrValues; private String amrValues; private String request; private String requestUri; // custom oxAuth parameters private String sessionState; public void checkUiLocales() { List<String> uiLocalesList = null; if (StringUtils.isNotBlank(uiLocales)) { uiLocalesList = Util.splittedStringAsList(uiLocales, " "); FacesContext facesContext = FacesContext.getCurrentInstance(); List<Locale> supportedLocales = new ArrayList<Locale>(); for (Iterator<Locale> it = facesContext.getApplication().getSupportedLocales(); it.hasNext(); ) { supportedLocales.add(it.next()); } Locale matchingLocale = LocaleUtil.localeMatch(uiLocalesList, supportedLocales); if (matchingLocale != null) { localeSelector.setLocale(matchingLocale); } } } public String checkPermissionGranted() { SessionState session = getSession(); List<Prompt> prompts = Prompt.fromString(prompt, " "); try { session = sessionStateService.assertAuthenticatedSessionCorrespondsToNewRequest(session, redirectUri, acrValues); } catch (AcrChangedException e) { log.debug("There is already existing session which has another acr then {0}, session: {1}", acrValues, session.getId()); if (prompts.contains(Prompt.LOGIN)) { session = handleAcrChange(session, prompts); } else { log.error("Please provide prompt=login to force login with new ACR or otherwise perform logout and re-authenticate."); permissionDenied(); return Constants.RESULT_FAILURE; } } if (session == null || session.getUserDn() == null || SessionIdState.AUTHENTICATED != session.getState()) { final ExternalContext externalContext = FacesContext.getCurrentInstance().getExternalContext(); Map<String, String> parameterMap = externalContext.getRequestParameterMap(); Map<String, String> requestParameterMap = authenticationService.getAllowedParameters(parameterMap); String redirectTo = "/login.xhtml"; boolean useExternalAuthenticator = externalAuthenticationService.isEnabled(AuthenticationScriptUsageType.INTERACTIVE); if (useExternalAuthenticator) { List<String> acrValuesList = acrValuesList(); if (acrValuesList.isEmpty()) { if (StringHelper.isNotEmpty(defaultAuthenticationMethod)) { acrValuesList = Arrays.asList(defaultAuthenticationMethod); } else { CustomScriptConfiguration defaultExternalAuthenticator = externalAuthenticationService.getDefaultExternalAuthenticator(AuthenticationScriptUsageType.INTERACTIVE); if (defaultExternalAuthenticator != null) { acrValuesList = Arrays.asList(defaultExternalAuthenticator.getName()); } } } CustomScriptConfiguration customScriptConfiguration = externalAuthenticationService.determineCustomScriptConfiguration(AuthenticationScriptUsageType.INTERACTIVE, acrValuesList); if (customScriptConfiguration == null) { log.error("Failed to get CustomScriptConfiguration. auth_step: {0}, acr_values: {1}", 1, this.acrValues); permissionDenied(); return Constants.RESULT_FAILURE; } String acr = customScriptConfiguration.getName(); requestParameterMap.put(JwtClaimName.AUTHENTICATION_METHOD_REFERENCES, acr); requestParameterMap.put("auth_step", Integer.toString(1)); String tmpRedirectTo = externalAuthenticationService.executeExternalGetPageForStep(customScriptConfiguration, 1); if (StringHelper.isNotEmpty(tmpRedirectTo)) { log.trace("Redirect to person authentication login page: {0}", tmpRedirectTo); redirectTo = tmpRedirectTo; } } // Store Remote IP String remoteIp = networkService.getRemoteIp(); requestParameterMap.put(Constants.REMOTE_IP, remoteIp); // Create unauthenticated session SessionState unauthenticatedSession = sessionStateService.generateSessionState(null, new Date(), SessionIdState.UNAUTHENTICATED, requestParameterMap, false); unauthenticatedSession.setSessionAttributes(requestParameterMap); boolean persisted = sessionStateService.persistSessionState(unauthenticatedSession, !prompts.contains(Prompt.NONE)); // always persist is prompt is not none if (persisted && log.isTraceEnabled()) { log.trace("Session '{0}' persisted to LDAP", unauthenticatedSession.getId()); } this.sessionState = unauthenticatedSession.getId(); sessionStateService.createSessionStateCookie(this.sessionState); FacesManager.instance().redirect(redirectTo, null, false); return Constants.RESULT_FAILURE; } if (clientId != null && !clientId.isEmpty()) { final Client client = clientService.getClient(clientId); if (client != null) { if(!client.getTrustedClient()){ return Constants.RESULT_SUCCESS; } if (StringUtils.isBlank(redirectionUriService.validateRedirectionUri(clientId, redirectUri))) { permissionDenied(); } final User user = userService.getUserByDn(session.getUserDn()); log.trace("checkPermissionGranted, user = " + user); // OXAUTH-87 : if user is not in group then deny permission if (user != null && client.hasUserGroups()) { // if user is not in any group then deny permissions if (!userGroupService.isInAnyGroup(client.getUserGroups(), user.getDn())) { permissionDenied(); } } // OXAUTH-88 : federation support if (ConfigurationFactory.instance().getConfiguration().getFederationEnabled()) { final List<FederationTrust> list = federationDataService.getTrustByClient(client, FederationTrustStatus.ACTIVE); if (list == null || list.isEmpty()) { log.trace("Deny authorization, client is not in any federation trust, client: {0}", client.getDn()); permissionDenied(); } else if (FederationDataService.skipAuthorization(list)) { log.trace("Skip authorization (permissions granted), client is in federation trust where skip is allowed, client: {1}", client.getDn()); permissionGranted(session); } } if (AuthorizeParamsValidator.validatePrompt(prompts)) { ClientAuthorizations clientAuthorizations = clientAuthorizationsService.findClientAuthorizations(user.getAttribute("inum"), client.getClientId()); if (clientAuthorizations != null && clientAuthorizations.getScopes() != null && Arrays.asList(clientAuthorizations.getScopes()).containsAll( org.xdi.oxauth.model.util.StringUtils.spaceSeparatedToList(scope))) { permissionGranted(session); } else if (ConfigurationFactory.instance().getConfiguration().getTrustedClientEnabled()) { // if trusted client = true, then skip authorization page and grant access directly if (client.getTrustedClient() && !prompts.contains(Prompt.CONSENT)) { permissionGranted(session); } } else { consentRequired(); } } else { invalidRequest(); } } } return Constants.RESULT_FAILURE; } private SessionState handleAcrChange(SessionState session, List<Prompt> prompts) { if (session != null && prompts.contains(Prompt.LOGIN)) { // change session state only if prompt=none if (session.getState() == SessionIdState.AUTHENTICATED) { session.getSessionAttributes().put("prompt", prompt); session.setState(SessionIdState.UNAUTHENTICATED); // Update Remote IP String remoteIp = networkService.getRemoteIp(); session.getSessionAttributes().put(Constants.REMOTE_IP, remoteIp); sessionStateService.updateSessionState(session); sessionStateService.reinitLogin(session, false); } } return session; } /** * By definition we expects space separated acr values as it is defined in spec. But we also try maybe some client * sent it to us as json array. So we try both. * * @return acr value list */ private List<String> acrValuesList() { List<String> acrs; try { acrs = Util.jsonArrayStringAsList(this.acrValues); } catch (JSONException ex) { acrs = Util.splittedStringAsList(acrValues, " "); } return acrs; } private SessionState getSession() { if (StringUtils.isBlank(sessionState)) { sessionState = sessionStateService.getSessionStateFromCookie(); if (StringUtils.isBlank(this.sessionState)) { return null; } } if (!identity.isLoggedIn()) { final Authenticator authenticator = (Authenticator) Component.getInstance(Authenticator.class, true); authenticator.authenticateBySessionState(sessionState); } SessionState ldapSessionState = sessionStateService.getSessionState(sessionState); if (ldapSessionState == null) { identity.logout(); } return ldapSessionState; } public List<org.xdi.oxauth.model.common.Scope> getScopes() { List<org.xdi.oxauth.model.common.Scope> scopes = new ArrayList<org.xdi.oxauth.model.common.Scope>(); ScopeService scopeService = ScopeService.instance(); if (scope != null && !scope.isEmpty()) { String[] scopesName = scope.split(" "); for (String scopeName : scopesName) { org.xdi.oxauth.model.common.Scope s = scopeService.getScopeByDisplayName(scopeName); if (s != null && s.getDescription() != null) { scopes.add(s); } } } return scopes; } /** * Returns the scope of the access request. * * @return The scope of the access request. */ public String getScope() { return scope; } /** * Sets the scope of the access request. * * @param scope The scope of the access request. */ public void setScope(String scope) { this.scope = scope; } /** * Returns the response type: <code>code</code> for requesting an authorization code (authorization code grant) or * <strong>token</strong> for requesting an access token (implicit grant). * * @return The response type. */ public String getResponseType() { return responseType; } /** * Sets the response type. * * @param responseType The response type. */ public void setResponseType(String responseType) { this.responseType = responseType; } /** * Returns the client identifier. * * @return The client identifier. */ public String getClientId() { return clientId; } /** * Sets the client identifier. * * @param clientId The client identifier. */ public void setClientId(String clientId) { this.clientId = clientId; } /** * Returns the redirection URI. * * @return The redirection URI. */ public String getRedirectUri() { return redirectUri; } /** * Sets the redirection URI. * * @param redirectUri The redirection URI. */ public void setRedirectUri(String redirectUri) { this.redirectUri = redirectUri; } /** * Returns an opaque value used by the client to maintain state between the request and callback. The authorization * server includes this value when redirecting the user-agent back to the client. The parameter should be used for * preventing cross-site request forgery. * * @return The state between the request and callback. */ public String getState() { return state; } /** * Sets the state between the request and callback. * * @param state The state between the request and callback. */ public void setState(String state) { this.state = state; } /** * Returns the mechanism to be used for returning parameters from the Authorization Endpoint. * * @return The response mode. */ public String getResponseMode() { return responseMode; } /** * Sets the mechanism to be used for returning parameters from the Authorization Endpoint. * * @param responseMode The response mode. */ public void setResponseMode(String responseMode) { this.responseMode = responseMode; } /** * Return a string value used to associate a user agent session with an ID Token, and to mitigate replay attacks. * * @return The nonce value. */ public String getNonce() { return nonce; } /** * Sets a string value used to associate a user agent session with an ID Token, and to mitigate replay attacks. * * @param nonce The nonce value. */ public void setNonce(String nonce) { this.nonce = nonce; } /** * Returns an ASCII string value that specifies how the Authorization Server displays the authentication page * to the End-User. * * @return The display value. */ public String getDisplay() { return display; } /** * Sets an ASCII string value that specifies how the Authorization Server displays the authentication page * to the End-User. * * @param display The display value */ public void setDisplay(String display) { this.display = display; } /** * Returns a space delimited list of ASCII strings that can contain the values * login, consent, select_account, and none. * * @return A list of prompt options. */ public String getPrompt() { return prompt; } /** * Sets a space delimited list of ASCII strings that can contain the values * login, consent, select_account, and none. * * @param prompt A list of prompt options. */ public void setPrompt(String prompt) { this.prompt = prompt; } public Integer getMaxAge() { return maxAge; } public void setMaxAge(Integer maxAge) { this.maxAge = maxAge; } public String getUiLocales() { return uiLocales; } public void setUiLocales(String uiLocales) { this.uiLocales = uiLocales; } public String getIdTokenHint() { return idTokenHint; } public void setIdTokenHint(String idTokenHint) { this.idTokenHint = idTokenHint; } public String getLoginHint() { return loginHint; } public void setLoginHint(String loginHint) { this.loginHint = loginHint; } public String getAcrValues() { return acrValues; } public void setAcrValues(String acrValues) { this.acrValues = acrValues; } public String getAmrValues() { return amrValues; } public void setAmrValues(String amrValues) { this.amrValues = amrValues; } /** * Returns a JWT encoded OpenID Request Object. * * @return A JWT encoded OpenID Request Object. */ public String getRequest() { return request; } /** * Sets a JWT encoded OpenID Request Object. * * @param request A JWT encoded OpenID Request Object. */ public void setRequest(String request) { this.request = request; } /** * Returns an URL that points to an OpenID Request Object. * * @return An URL that points to an OpenID Request Object. */ public String getRequestUri() { return requestUri; } /** * Sets an URL that points to an OpenID Request Object. * * @param requestUri An URL that points to an OpenID Request Object. */ public void setRequestUri(String requestUri) { this.requestUri = requestUri; } public String getSessionState() { return sessionState; } public void setSessionState(String p_sessionState) { sessionState = p_sessionState; } public void permissionGranted() { final SessionState session = getSession(); permissionGranted(session); } public void permissionGranted(SessionState session) { try { final User user = userService.getUserByDn(session.getUserDn()); if (user == null) { log.error("Permission denied. Failed to find session user: userDn = " + session.getUserDn() + "."); permissionDenied(); return; } final Client client = clientService.getClient(clientId); if(client.getPersistClientAuthorizations()){ client.setTrustedClient(true); clientService.merge(client); } final List<String> scopes = org.xdi.oxauth.model.util.StringUtils.spaceSeparatedToList(scope); clientAuthorizationsService.add(user.getAttribute("inum"), client.getClientId(), scopes); session.addPermission(clientId, true); sessionStateService.updateSessionState(session); // OXAUTH-297 - set session_state cookie SessionStateService.instance().createSessionStateCookie(sessionState); Map<String, String> sessionAttribute = authenticationService.getAllowedParameters(session.getSessionAttributes()); final String parametersAsString = authenticationService.parametersAsString(sessionAttribute); final String uri = "seam/resource/restv1/oxauth/authorize?" + parametersAsString; log.trace("permissionGranted, redirectTo: {0}", uri); FacesManager.instance().redirectToExternalURL(uri); } catch (UnsupportedEncodingException e) { log.trace(e.getMessage(), e); } } public void permissionDenied() { log.trace("permissionDenied"); StringBuilder sb = new StringBuilder(); sb.append(redirectUri); if (redirectUri != null && redirectUri.contains("?")) { sb.append("&"); } else { sb.append("?"); } sb.append(errorResponseFactory.getErrorAsQueryString(AuthorizeErrorResponseType.ACCESS_DENIED, getState())); FacesManager.instance().redirectToExternalURL(sb.toString()); } public void invalidRequest() { log.trace("invalidRequest"); StringBuilder sb = new StringBuilder(); sb.append(redirectUri); if (redirectUri != null && redirectUri.contains("?")) { sb.append("&"); } else { sb.append("?"); } sb.append(errorResponseFactory.getErrorAsQueryString(AuthorizeErrorResponseType.INVALID_REQUEST, getState())); FacesManager.instance().redirectToExternalURL(sb.toString()); } public void consentRequired() { StringBuilder sb = new StringBuilder(); sb.append(redirectUri); if (redirectUri != null && redirectUri.contains("?")) { sb.append("&"); } else { sb.append("?"); } sb.append(errorResponseFactory.getErrorAsQueryString(AuthorizeErrorResponseType.CONSENT_REQUIRED, getState())); FacesManager.instance().redirectToExternalURL(sb.toString()); } }
Remove wrong condition
Server/src/main/java/org/xdi/oxauth/authorize/ws/rs/AuthorizeAction.java
Remove wrong condition
Java
mit
148e301839587b421ff5a1f6d399cd35f4106581
0
se-edu/addressbook-level3
package seedu.address.commons.util; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.FileNotFoundException; import java.util.Optional; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; public class StringUtilTest { @Rule public ExpectedException thrown = ExpectedException.none(); //---------------- Tests for isNonZeroUnsignedInteger -------------------------------------- @Test public void isNonZeroUnsignedInteger() { // EP: empty strings assertFalse(StringUtil.isNonZeroUnsignedInteger("")); // Boundary value assertFalse(StringUtil.isNonZeroUnsignedInteger(" ")); // EP: not a number assertFalse(StringUtil.isNonZeroUnsignedInteger("a")); assertFalse(StringUtil.isNonZeroUnsignedInteger("aaa")); // EP: zero assertFalse(StringUtil.isNonZeroUnsignedInteger("0")); // EP: zero as prefix assertTrue(StringUtil.isNonZeroUnsignedInteger("01")); // EP: signed numbers assertFalse(StringUtil.isNonZeroUnsignedInteger("-1")); assertFalse(StringUtil.isNonZeroUnsignedInteger("+1")); // EP: numbers with white space assertFalse(StringUtil.isNonZeroUnsignedInteger(" 10 ")); // Leading/trailing spaces assertFalse(StringUtil.isNonZeroUnsignedInteger("1 0")); // Spaces in the middle // EP: number larger than Integer.MAX_VALUE assertFalse(StringUtil.isNonZeroUnsignedInteger(Long.toString(Integer.MAX_VALUE + 1))); // EP: valid numbers, should return true assertTrue(StringUtil.isNonZeroUnsignedInteger("1")); // Boundary value assertTrue(StringUtil.isNonZeroUnsignedInteger("10")); } //---------------- Tests for containsWordIgnoreCase -------------------------------------- /* * Invalid equivalence partitions for word: null, empty, multiple words * Invalid equivalence partitions for sentence: null * The four test cases below test one invalid input at a time. */ @Test public void containsWordIgnoreCase_nullWord_throwsNullPointerException() { assertExceptionThrown(NullPointerException.class, "typical sentence", null, Optional.empty()); } private void assertExceptionThrown(Class<? extends Throwable> exceptionClass, String sentence, String word, Optional<String> errorMessage) { thrown.expect(exceptionClass); errorMessage.ifPresent(message -> thrown.expectMessage(message)); StringUtil.containsWordIgnoreCase(sentence, word); } @Test public void containsWordIgnoreCase_emptyWord_throwsIllegalArgumentException() { assertExceptionThrown(IllegalArgumentException.class, "typical sentence", " ", Optional.of("Word parameter cannot be empty")); } @Test public void containsWordIgnoreCase_multipleWords_throwsIllegalArgumentException() { assertExceptionThrown(IllegalArgumentException.class, "typical sentence", "aaa BBB", Optional.of("Word parameter should be a single word")); } @Test public void containsWordIgnoreCase_nullSentence_throwsNullPointerException() { assertExceptionThrown(NullPointerException.class, null, "abc", Optional.empty()); } /* * Valid equivalence partitions for word: * - any word * - word containing symbols/numbers * - word with leading/trailing spaces * * Valid equivalence partitions for sentence: * - empty string * - one word * - multiple words * - sentence with extra spaces * * Possible scenarios returning true: * - matches first word in sentence * - last word in sentence * - middle word in sentence * - matches multiple words * * Possible scenarios returning false: * - query word matches part of a sentence word * - sentence word matches part of the query word * * The test method below tries to verify all above with a reasonably low number of test cases. */ @Test public void containsWordIgnoreCase_validInputs_correctResult() { // Empty sentence assertFalse(StringUtil.containsWordIgnoreCase("", "abc")); // Boundary case assertFalse(StringUtil.containsWordIgnoreCase(" ", "123")); // Matches a partial word only assertFalse(StringUtil.containsWordIgnoreCase("aaa bbb ccc", "bb")); // Sentence word bigger than query word assertFalse(StringUtil.containsWordIgnoreCase("aaa bbb ccc", "bbbb")); // Query word bigger than sentence word // Matches word in the sentence, different upper/lower case letters assertTrue(StringUtil.containsWordIgnoreCase("aaa bBb ccc", "Bbb")); // First word (boundary case) assertTrue(StringUtil.containsWordIgnoreCase("aaa bBb ccc@1", "CCc@1")); // Last word (boundary case) assertTrue(StringUtil.containsWordIgnoreCase(" AAA bBb ccc ", "aaa")); // Sentence has extra spaces assertTrue(StringUtil.containsWordIgnoreCase("Aaa", "aaa")); // Only one word in sentence (boundary case) assertTrue(StringUtil.containsWordIgnoreCase("aaa bbb ccc", " ccc ")); // Leading/trailing spaces // Matches multiple words in sentence assertTrue(StringUtil.containsWordIgnoreCase("AAA bBb ccc bbb", "bbB")); } //---------------- Tests for getDetails -------------------------------------- /* * Equivalence Partitions: null, valid throwable object */ @Test public void getDetails_exceptionGiven() { assertThat(StringUtil.getDetails(new FileNotFoundException("file not found")), containsString("java.io.FileNotFoundException: file not found")); } @Test public void getDetails_nullGiven_throwsNullPointerException() { thrown.expect(NullPointerException.class); StringUtil.getDetails(null); } }
src/test/java/seedu/address/commons/util/StringUtilTest.java
package seedu.address.commons.util; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.FileNotFoundException; import java.util.Optional; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; public class StringUtilTest { @Rule public ExpectedException thrown = ExpectedException.none(); //---------------- Tests for isUnsignedPositiveInteger -------------------------------------- @Test public void isUnsignedPositiveInteger() { // EP: empty strings assertFalse(StringUtil.isNonZeroUnsignedInteger("")); // Boundary value assertFalse(StringUtil.isNonZeroUnsignedInteger(" ")); // EP: not a number assertFalse(StringUtil.isNonZeroUnsignedInteger("a")); assertFalse(StringUtil.isNonZeroUnsignedInteger("aaa")); // EP: zero assertFalse(StringUtil.isNonZeroUnsignedInteger("0")); // EP: zero as prefix assertTrue(StringUtil.isNonZeroUnsignedInteger("01")); // EP: signed numbers assertFalse(StringUtil.isNonZeroUnsignedInteger("-1")); assertFalse(StringUtil.isNonZeroUnsignedInteger("+1")); // EP: numbers with white space assertFalse(StringUtil.isNonZeroUnsignedInteger(" 10 ")); // Leading/trailing spaces assertFalse(StringUtil.isNonZeroUnsignedInteger("1 0")); // Spaces in the middle // EP: number larger than Integer.MAX_VALUE assertFalse(StringUtil.isNonZeroUnsignedInteger(Long.toString(Integer.MAX_VALUE + 1))); // EP: valid numbers, should return true assertTrue(StringUtil.isNonZeroUnsignedInteger("1")); // Boundary value assertTrue(StringUtil.isNonZeroUnsignedInteger("10")); } //---------------- Tests for containsWordIgnoreCase -------------------------------------- /* * Invalid equivalence partitions for word: null, empty, multiple words * Invalid equivalence partitions for sentence: null * The four test cases below test one invalid input at a time. */ @Test public void containsWordIgnoreCase_nullWord_throwsNullPointerException() { assertExceptionThrown(NullPointerException.class, "typical sentence", null, Optional.empty()); } private void assertExceptionThrown(Class<? extends Throwable> exceptionClass, String sentence, String word, Optional<String> errorMessage) { thrown.expect(exceptionClass); errorMessage.ifPresent(message -> thrown.expectMessage(message)); StringUtil.containsWordIgnoreCase(sentence, word); } @Test public void containsWordIgnoreCase_emptyWord_throwsIllegalArgumentException() { assertExceptionThrown(IllegalArgumentException.class, "typical sentence", " ", Optional.of("Word parameter cannot be empty")); } @Test public void containsWordIgnoreCase_multipleWords_throwsIllegalArgumentException() { assertExceptionThrown(IllegalArgumentException.class, "typical sentence", "aaa BBB", Optional.of("Word parameter should be a single word")); } @Test public void containsWordIgnoreCase_nullSentence_throwsNullPointerException() { assertExceptionThrown(NullPointerException.class, null, "abc", Optional.empty()); } /* * Valid equivalence partitions for word: * - any word * - word containing symbols/numbers * - word with leading/trailing spaces * * Valid equivalence partitions for sentence: * - empty string * - one word * - multiple words * - sentence with extra spaces * * Possible scenarios returning true: * - matches first word in sentence * - last word in sentence * - middle word in sentence * - matches multiple words * * Possible scenarios returning false: * - query word matches part of a sentence word * - sentence word matches part of the query word * * The test method below tries to verify all above with a reasonably low number of test cases. */ @Test public void containsWordIgnoreCase_validInputs_correctResult() { // Empty sentence assertFalse(StringUtil.containsWordIgnoreCase("", "abc")); // Boundary case assertFalse(StringUtil.containsWordIgnoreCase(" ", "123")); // Matches a partial word only assertFalse(StringUtil.containsWordIgnoreCase("aaa bbb ccc", "bb")); // Sentence word bigger than query word assertFalse(StringUtil.containsWordIgnoreCase("aaa bbb ccc", "bbbb")); // Query word bigger than sentence word // Matches word in the sentence, different upper/lower case letters assertTrue(StringUtil.containsWordIgnoreCase("aaa bBb ccc", "Bbb")); // First word (boundary case) assertTrue(StringUtil.containsWordIgnoreCase("aaa bBb ccc@1", "CCc@1")); // Last word (boundary case) assertTrue(StringUtil.containsWordIgnoreCase(" AAA bBb ccc ", "aaa")); // Sentence has extra spaces assertTrue(StringUtil.containsWordIgnoreCase("Aaa", "aaa")); // Only one word in sentence (boundary case) assertTrue(StringUtil.containsWordIgnoreCase("aaa bbb ccc", " ccc ")); // Leading/trailing spaces // Matches multiple words in sentence assertTrue(StringUtil.containsWordIgnoreCase("AAA bBb ccc bbb", "bbB")); } //---------------- Tests for getDetails -------------------------------------- /* * Equivalence Partitions: null, valid throwable object */ @Test public void getDetails_exceptionGiven() { assertThat(StringUtil.getDetails(new FileNotFoundException("file not found")), containsString("java.io.FileNotFoundException: file not found")); } @Test public void getDetails_nullGiven_throwsNullPointerException() { thrown.expect(NullPointerException.class); StringUtil.getDetails(null); } }
StringUtilTest: rename isUnsignedPositiveInteger() to isNonZeroUnsignedInteger() Currently, StringUtilTest has a method named isUnsignedPositiveInteger. However, the method tested is actually named isNonZeroUnsignedInteger. This can cause confusion in the naming. It appears that, as early as #85 [1], the tested method was originally isUnsignedInteger(). However, the test method was named isUnsignedPositiveInteger() and was incorrect. Additionally, in #460 [2], the tested method was renamed to isNonZeroUnsignedInteger(), however the test was not renamed then. To maintain consistent naming convention, lets rename the test method to isNonZeroUnsignedInteger(). [1]: https://github.com/se-edu/addressbook-level4/commit/92bc32b631c62041625a021719adf7c280711570 [2]: https://github.com/se-edu/addressbook-level4/commit/0352e6b6e3f51910edbff7a786f6ae24f453d38f
src/test/java/seedu/address/commons/util/StringUtilTest.java
StringUtilTest: rename isUnsignedPositiveInteger() to isNonZeroUnsignedInteger()
Java
mit
f3f9c88fae4617e6f776539587e9eab20de4ac09
0
uservoice/uservoice-android-sdk,uservoice/uservoice-android-sdk
package com.uservoice.uservoicesdk.model; import android.os.Parcel; import android.os.Parcelable; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.json.JSONException; import org.json.JSONObject; import com.uservoice.uservoicesdk.R; import com.uservoice.uservoicesdk.Session; import com.uservoice.uservoicesdk.rest.Callback; import com.uservoice.uservoicesdk.rest.RestTaskCallback; public class Topic extends BaseModel implements Parcelable { protected String name; private int numberOfArticles; public Topic() {} public static Topic ALL_ARTICLES = new Topic() {{ this.name = Session.getInstance().getContext().getString(R.string.uv_all_articles); this.id = -1; }}; public static void loadTopics(final Callback<List<Topic>> callback) { Map<String, String> params = new HashMap<String, String>(); params.put("per_page", "100"); doGet(apiPath("/topics.json"), params, new RestTaskCallback(callback) { @Override public void onComplete(JSONObject object) throws JSONException { List<Topic> allTopics = deserializeList(object, "topics", Topic.class); List<Topic> topicsWithArticles = new ArrayList<Topic>(allTopics.size()); for (Topic topic : allTopics) { if (topic.getNumberOfArticles() > 0) topicsWithArticles.add(topic); } callback.onModel(topicsWithArticles); } }); } public static void loadTopic(int topicId, final Callback<Topic> callback) { doGet(apiPath("/topics/%d.json", topicId), new RestTaskCallback(callback) { @Override public void onComplete(JSONObject object) throws JSONException { callback.onModel(deserializeObject(object, "topic", Topic.class)); } }); } @Override public void load(JSONObject object) throws JSONException { super.load(object); name = getString(object, "name"); numberOfArticles = object.getInt("article_count"); } public String getName() { return name; } public int getNumberOfArticles() { return numberOfArticles; } @Override public String toString() { return name; } // // Parcelable // public int describeContents() { return 0; } public void writeToParcel(Parcel out, int flags) { out.writeInt(id); out.writeString(name); out.writeInt(numberOfArticles); } public static final Parcelable.Creator<Topic> CREATOR = new Parcelable.Creator<Topic>() { public Topic createFromParcel(Parcel in) { return new Topic(in); } public Topic[] newArray(int size) { return new Topic[size]; } }; private Topic(Parcel in) { id = in.readInt(); name = in.readString(); numberOfArticles = in.readInt(); } }
UserVoiceSDK/src/com/uservoice/uservoicesdk/model/Topic.java
package com.uservoice.uservoicesdk.model; import android.os.Parcel; import android.os.Parcelable; import java.util.ArrayList; import java.util.List; import org.json.JSONException; import org.json.JSONObject; import com.uservoice.uservoicesdk.R; import com.uservoice.uservoicesdk.Session; import com.uservoice.uservoicesdk.rest.Callback; import com.uservoice.uservoicesdk.rest.RestTaskCallback; public class Topic extends BaseModel implements Parcelable { protected String name; private int numberOfArticles; public Topic() {} public static Topic ALL_ARTICLES = new Topic() {{ this.name = Session.getInstance().getContext().getString(R.string.uv_all_articles); this.id = -1; }}; public static void loadTopics(final Callback<List<Topic>> callback) { Map<String, String> params = new HashMap<String, String>(); params.put("per_page", "100"); doGet(apiPath("/topics.json"), params, new RestTaskCallback(callback) { @Override public void onComplete(JSONObject object) throws JSONException { List<Topic> allTopics = deserializeList(object, "topics", Topic.class); List<Topic> topicsWithArticles = new ArrayList<Topic>(allTopics.size()); for (Topic topic : allTopics) { if (topic.getNumberOfArticles() > 0) topicsWithArticles.add(topic); } callback.onModel(topicsWithArticles); } }); } public static void loadTopic(int topicId, final Callback<Topic> callback) { doGet(apiPath("/topics/%d.json", topicId), new RestTaskCallback(callback) { @Override public void onComplete(JSONObject object) throws JSONException { callback.onModel(deserializeObject(object, "topic", Topic.class)); } }); } @Override public void load(JSONObject object) throws JSONException { super.load(object); name = getString(object, "name"); numberOfArticles = object.getInt("article_count"); } public String getName() { return name; } public int getNumberOfArticles() { return numberOfArticles; } @Override public String toString() { return name; } // // Parcelable // public int describeContents() { return 0; } public void writeToParcel(Parcel out, int flags) { out.writeInt(id); out.writeString(name); out.writeInt(numberOfArticles); } public static final Parcelable.Creator<Topic> CREATOR = new Parcelable.Creator<Topic>() { public Topic createFromParcel(Parcel in) { return new Topic(in); } public Topic[] newArray(int size) { return new Topic[size]; } }; private Topic(Parcel in) { id = in.readInt(); name = in.readString(); numberOfArticles = in.readInt(); } }
Imports
UserVoiceSDK/src/com/uservoice/uservoicesdk/model/Topic.java
Imports
Java
mit
5a7d33a7196b21d2e248ed5f77c974f045399159
0
sake/bouncycastle-java
package org.bouncycastle.x509.extension; import java.io.IOException; import java.security.InvalidKeyException; import java.security.PublicKey; import java.security.cert.CertificateParsingException; import java.security.cert.X509Certificate; import org.bouncycastle.asn1.ASN1InputStream; import org.bouncycastle.asn1.ASN1OctetString; import org.bouncycastle.asn1.ASN1Sequence; import org.bouncycastle.asn1.x509.AuthorityKeyIdentifier; import org.bouncycastle.asn1.x509.GeneralName; import org.bouncycastle.asn1.x509.GeneralNames; import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo; import org.bouncycastle.asn1.x509.X509Extensions; import org.bouncycastle.jce.PrincipalUtil; /** * A high level authority key identifier. */ public class AuthorityKeyIdentifierStructure extends AuthorityKeyIdentifier { /** * Constructor which will take the byte[] returned from getExtensionValue() * * @param encodedValue a DER octet encoded string with the extension structure in it. * @throws IOException on parsing errors. */ public AuthorityKeyIdentifierStructure( byte[] encodedValue) throws IOException { super((ASN1Sequence)X509ExtensionUtil.fromExtensionValue(encodedValue)); } private static ASN1Sequence fromCertificate( X509Certificate certificate) throws CertificateParsingException { try { if (certificate.getVersion() != 3) { GeneralName genName = new GeneralName(PrincipalUtil.getIssuerX509Principal(certificate)); SubjectPublicKeyInfo info = new SubjectPublicKeyInfo( (ASN1Sequence)new ASN1InputStream(certificate.getPublicKey().getEncoded()).readObject()); return (ASN1Sequence)new AuthorityKeyIdentifier( info, new GeneralNames(genName), certificate.getSerialNumber()).toASN1Object(); } else { GeneralName genName = new GeneralName(PrincipalUtil.getIssuerX509Principal(certificate)); byte[] ext = certificate.getExtensionValue(X509Extensions.SubjectKeyIdentifier.getId()); if (ext != null) { ASN1OctetString str = (ASN1OctetString)X509ExtensionUtil.fromExtensionValue(ext); return (ASN1Sequence)new AuthorityKeyIdentifier( str.getOctets(), new GeneralNames(genName), certificate.getSerialNumber()).toASN1Object(); } else { SubjectPublicKeyInfo info = new SubjectPublicKeyInfo( (ASN1Sequence)new ASN1InputStream(certificate.getPublicKey().getEncoded()).readObject()); return (ASN1Sequence)new AuthorityKeyIdentifier( info, new GeneralNames(genName), certificate.getSerialNumber()).toASN1Object(); } } } catch (Exception e) { throw new CertificateParsingException("Exception extracting certificate details: " + e.toString()); } } private static ASN1Sequence fromKey( PublicKey pubKey) throws InvalidKeyException { try { SubjectPublicKeyInfo info = new SubjectPublicKeyInfo( (ASN1Sequence)new ASN1InputStream(pubKey.getEncoded()).readObject()); return (ASN1Sequence)new AuthorityKeyIdentifier(info).toASN1Object(); } catch (Exception e) { throw new InvalidKeyException("can't process key: " + e); } } /** * Create an AuthorityKeyIdentifier using the passed in certificate's public * key, issuer and serial number. * * @param certificate the certificate providing the information. * @throws CertificateParsingException if there is a problem processing the certificate */ public AuthorityKeyIdentifierStructure( X509Certificate certificate) throws CertificateParsingException { super(fromCertificate(certificate)); } /** * Create an AuthorityKeyIdentifier using just the hash of the * public key. * * @param pubKey the key to generate the hash from. * @throws InvalidKeyException if there is a problem using the key. */ public AuthorityKeyIdentifierStructure( PublicKey pubKey) throws InvalidKeyException { super(fromKey(pubKey)); } }
src/org/bouncycastle/x509/extension/AuthorityKeyIdentifierStructure.java
package org.bouncycastle.x509.extension; import java.io.IOException; import java.security.InvalidKeyException; import java.security.PublicKey; import java.security.cert.CertificateParsingException; import java.security.cert.X509Certificate; import org.bouncycastle.asn1.ASN1InputStream; import org.bouncycastle.asn1.ASN1OctetString; import org.bouncycastle.asn1.ASN1Sequence; import org.bouncycastle.asn1.x509.AuthorityKeyIdentifier; import org.bouncycastle.asn1.x509.GeneralName; import org.bouncycastle.asn1.x509.GeneralNames; import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo; import org.bouncycastle.asn1.x509.X509Extensions; import org.bouncycastle.jce.PrincipalUtil; /** * A high level authority key identifier. */ public class AuthorityKeyIdentifierStructure extends AuthorityKeyIdentifier { private AuthorityKeyIdentifier authKeyID; /** * Constructor which will take the byte[] returned from getExtensionValue() * * @param encodedValue a DER octet encoded string with the extension structure in it. * @throws IOException on parsing errors. */ public AuthorityKeyIdentifierStructure( byte[] encodedValue) throws IOException { super((ASN1Sequence)X509ExtensionUtil.fromExtensionValue(encodedValue)); } private static ASN1Sequence fromCertificate( X509Certificate certificate) throws CertificateParsingException { try { if (certificate.getVersion() != 3) { GeneralName genName = new GeneralName(PrincipalUtil.getSubjectX509Principal(certificate)); SubjectPublicKeyInfo info = new SubjectPublicKeyInfo( (ASN1Sequence)new ASN1InputStream(certificate.getPublicKey().getEncoded()).readObject()); return (ASN1Sequence)new AuthorityKeyIdentifier( info, new GeneralNames(genName), certificate.getSerialNumber()).toASN1Object(); } else { GeneralName genName = new GeneralName(PrincipalUtil.getSubjectX509Principal(certificate)); byte[] ext = certificate.getExtensionValue(X509Extensions.SubjectKeyIdentifier.getId()); if (ext != null) { ASN1OctetString str = (ASN1OctetString)X509ExtensionUtil.fromExtensionValue(ext); return (ASN1Sequence)new AuthorityKeyIdentifier( str.getOctets(), new GeneralNames(genName), certificate.getSerialNumber()).toASN1Object(); } else { SubjectPublicKeyInfo info = new SubjectPublicKeyInfo( (ASN1Sequence)new ASN1InputStream(certificate.getPublicKey().getEncoded()).readObject()); return (ASN1Sequence)new AuthorityKeyIdentifier( info, new GeneralNames(genName), certificate.getSerialNumber()).toASN1Object(); } } } catch (Exception e) { throw new CertificateParsingException("Exception extracting certificate details: " + e.toString()); } } private static ASN1Sequence fromKey( PublicKey pubKey) throws InvalidKeyException { try { SubjectPublicKeyInfo info = new SubjectPublicKeyInfo( (ASN1Sequence)new ASN1InputStream(pubKey.getEncoded()).readObject()); return (ASN1Sequence)new AuthorityKeyIdentifier(info).toASN1Object(); } catch (Exception e) { throw new InvalidKeyException("can't process key: " + e); } } /** * Create an AuthorityKeyIdentifier using the passed in certificate's public * key, issuer and serial number. * * @param certificate the certificate providing the information. * @throws CertificateParsingException if there is a problem processing the certificate */ public AuthorityKeyIdentifierStructure( X509Certificate certificate) throws CertificateParsingException { super(fromCertificate(certificate)); } /** * Create an AuthorityKeyIdentifier using just the hash of the * public key. * * @param pubKey the key to generate the hash from. * @throws InvalidKeyException if there is a problem using the key. */ public AuthorityKeyIdentifierStructure( PublicKey pubKey) throws InvalidKeyException { super(fromKey(pubKey)); } }
fixed to use issuer name
src/org/bouncycastle/x509/extension/AuthorityKeyIdentifierStructure.java
fixed to use issuer name
Java
mit
7074d01a1682ffb40a835a0265165578bd90044f
0
crysxd/Studiportal-Checker
package de.hfu.studiportal.view; import java.text.SimpleDateFormat; import java.util.Date; import android.app.AlertDialog; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.content.SharedPreferences.Editor; import android.content.SharedPreferences.OnSharedPreferenceChangeListener; import android.os.Bundle; import android.preference.Preference; import android.preference.Preference.OnPreferenceClickListener; import android.preference.PreferenceFragment; import android.preference.PreferenceManager; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import de.hfu.funfpunktnull.R; import de.hfu.studiportal.network.RefreshTaskStarter; /** * Fragment to display the settings.xml * @author preussjan * @since 1.0 * @version 1.0 */ public class PreferencesFragment extends PreferenceFragment implements OnSharedPreferenceChangeListener { @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); addPreferencesFromResource(R.xml.preferences); updateSummaries(); PreferenceManager.setDefaultValues(this.getActivity(), R.xml.preferences, false); PreferenceManager.getDefaultSharedPreferences(this.getActivity()).registerOnSharedPreferenceChangeListener(this); Preference login = this.findPreference(getResources().getString(R.string.preference_login)); login.setOnPreferenceClickListener (new OnPreferenceClickListener() { @Override public boolean onPreferenceClick(Preference preference) { Intent i = new Intent(getActivity(), LoginActivity.class); getActivity().startActivity(i); return true; } }); Preference logout = this.findPreference(getResources().getString(R.string.preference_logout)); logout.setOnPreferenceClickListener (new OnPreferenceClickListener() { @Override public boolean onPreferenceClick(Preference preference) { // Use the Builder class for convenient dialog construction AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); builder.setMessage(R.string.text_logout_dialog) .setNegativeButton(R.string.text_cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { } }) .setPositiveButton(R.string.preferences_logout_title, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Stop update task RefreshTaskStarter.cancelRefreshTask(getActivity()); //Delete login-info Editor sp = PreferenceManager.getDefaultSharedPreferences(getActivity()).edit(); sp.putString(getResources().getString(R.string.preference_last_studiportal_data), ""); sp.putString(getResources().getString(R.string.preference_password), ""); sp.apply(); //Restart refresh task, will cause loginActivity to show up RefreshTaskStarter.startRefreshTask(getActivity()); } }); // Create the AlertDialog object and return it builder.create().show(); return true; } }); } @SuppressWarnings("deprecation") @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = super.onCreateView(inflater, container, savedInstanceState); view.setBackgroundDrawable(getResources().getDrawable(R.drawable.bg_stripes)); return view; } @Override public void onDestroy() { super.onDestroy(); PreferenceManager.getDefaultSharedPreferences(this.getActivity()).unregisterOnSharedPreferenceChangeListener(this); } @Override public void onResume() { super.onResume(); updateSummaries(); } private void updateSummaries() { //Get Timestamp of last check String key = getResources().getString(R.string.preference_last_check); long lastCheck = PreferenceManager.getDefaultSharedPreferences(this.getActivity()).getLong(key, 0); //inti date string String dateString = ""; //If it was refreshed if(lastCheck > 0) { Date d = new Date(lastCheck); dateString = getResources().getString(R.string.text_last_updated); dateString += new SimpleDateFormat("dd.MM.yyyy, HH:mm:ss").format(d); Log.i(this.getClass().getSimpleName(), dateString); } //Set the summary or an empty string if never refreshed Preference p = this.findPreference(getResources().getString(R.string.preference_refresh_rate)); p.setSummary(dateString); //Display username key = getResources().getString(R.string.preference_user); String username = PreferenceManager.getDefaultSharedPreferences(this.getActivity()).getString(key, ""); p = this.findPreference(getResources().getString(R.string.preference_login)); p.setSummary(username); } @Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { updateSummaries(); if(key.equals(getResources().getString(R.string.preference_refresh_rate))) { RefreshTaskStarter.startRefreshTask(this.getActivity()); } } }
Studiportal_Checker/src/de/hfu/studiportal/view/PreferencesFragment.java
package de.hfu.studiportal.view; import java.text.SimpleDateFormat; import java.util.Date; import android.app.AlertDialog; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.content.SharedPreferences.Editor; import android.content.SharedPreferences.OnSharedPreferenceChangeListener; import android.os.Bundle; import android.preference.Preference; import android.preference.Preference.OnPreferenceClickListener; import android.preference.PreferenceFragment; import android.preference.PreferenceManager; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import de.hfu.funfpunktnull.R; import de.hfu.studiportal.network.RefreshTaskStarter; /** * Fragment to display the settings.xml * @author preussjan * @since 1.0 * @version 1.0 */ public class PreferencesFragment extends PreferenceFragment implements OnSharedPreferenceChangeListener { @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); addPreferencesFromResource(R.xml.preferences); updateSummaries(); PreferenceManager.setDefaultValues(this.getActivity(), R.xml.preferences, false); PreferenceManager.getDefaultSharedPreferences(this.getActivity()).registerOnSharedPreferenceChangeListener(this); Preference login = this.findPreference(getResources().getString(R.string.preference_login)); login.setOnPreferenceClickListener (new OnPreferenceClickListener() { @Override public boolean onPreferenceClick(Preference preference) { Intent i = new Intent(getActivity(), LoginActivity.class); getActivity().startActivity(i); return true; } }); Preference logout = this.findPreference(getResources().getString(R.string.preference_logout)); logout.setOnPreferenceClickListener (new OnPreferenceClickListener() { @Override public boolean onPreferenceClick(Preference preference) { // Use the Builder class for convenient dialog construction AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); builder.setMessage(R.string.text_logout_dialog) .setNegativeButton(R.string.text_cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { } }) .setPositiveButton(R.string.preferences_logout_title, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { //Stop update task RefreshTaskStarter.cancelRefreshTask(getActivity()); //Delete login-info Editor sp = PreferenceManager.getDefaultSharedPreferences(getActivity()).edit(); sp.putString(getResources().getString(R.string.preference_password), ""); sp.apply(); //Restart refresh task, will cause loginActivity to show up RefreshTaskStarter.startRefreshTask(getActivity()); } }); // Create the AlertDialog object and return it builder.create().show(); return true; } }); } @SuppressWarnings("deprecation") @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = super.onCreateView(inflater, container, savedInstanceState); view.setBackgroundDrawable(getResources().getDrawable(R.drawable.bg_stripes)); return view; } @Override public void onDestroy() { super.onDestroy(); PreferenceManager.getDefaultSharedPreferences(this.getActivity()).unregisterOnSharedPreferenceChangeListener(this); } @Override public void onResume() { super.onResume(); updateSummaries(); } private void updateSummaries() { //Get Timestamp of last check String key = getResources().getString(R.string.preference_last_check); long lastCheck = PreferenceManager.getDefaultSharedPreferences(this.getActivity()).getLong(key, 0); //inti date string String dateString = ""; //If it was refreshed if(lastCheck > 0) { Date d = new Date(lastCheck); dateString = getResources().getString(R.string.text_last_updated); dateString += new SimpleDateFormat("dd.MM.yyyy, HH:mm:ss").format(d); Log.i(this.getClass().getSimpleName(), dateString); } //Set the summary or an empty string if never refreshed Preference p = this.findPreference(getResources().getString(R.string.preference_refresh_rate)); p.setSummary(dateString); //Display username key = getResources().getString(R.string.preference_user); String username = PreferenceManager.getDefaultSharedPreferences(this.getActivity()).getString(key, ""); p = this.findPreference(getResources().getString(R.string.preference_login)); p.setSummary(username); } @Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { updateSummaries(); if(key.equals(getResources().getString(R.string.preference_refresh_rate))) { RefreshTaskStarter.startRefreshTask(this.getActivity()); } } }
Removing saved data when logging out
Studiportal_Checker/src/de/hfu/studiportal/view/PreferencesFragment.java
Removing saved data when logging out
Java
mit
a6f3af94dd55fc7e78d75e76e28e8c502255df72
0
aterai/java-swing-tips,aterai/java-swing-tips,aoguren/java-swing-tips,mhcrnl/java-swing-tips,mhcrnl/java-swing-tips,aterai/java-swing-tips,aoguren/java-swing-tips,aoguren/java-swing-tips,aterai/java-swing-tips,mhcrnl/java-swing-tips
package example; //-*- mode:java; encoding:utf8n; coding:utf-8 -*- // vim:set fileencoding=utf-8: //@homepage@ import java.awt.*; import java.awt.event.*; import java.awt.font.*; import java.awt.geom.*; import javax.swing.*; import javax.swing.border.*; import javax.swing.text.*; public class MainPanel extends JPanel { private final JLabel l0 = new JLabel("打率"); private final JLabel l1 = new JLabel("打率", JLabel.RIGHT); private final JLabel l2 = new JustifiedLabel("打率"); private final JLabel l3 = new JLabel("出塁率", JLabel.CENTER); private final JLabel l4 = new JustifiedLabel("出塁率"); private final JLabel l5 = new JustifiedLabel("チーム出塁率"); public MainPanel() { super(new BorderLayout()); JPanel p = new JPanel(new GridBagLayout()); Border inside = BorderFactory.createEmptyBorder(10,5+2,10,10+2); Border outside = BorderFactory.createTitledBorder("JLabel text-align:justify"); p.setBorder(BorderFactory.createCompoundBorder(outside, inside)); GridBagConstraints c = new GridBagConstraints(); c.gridheight = 1; c.gridx = 0; c.insets = new Insets(5, 5, 5, 0); c.fill = GridBagConstraints.HORIZONTAL; c.gridy = 0; p.add(l0, c); c.gridy = 1; p.add(l1, c); c.gridy = 2; p.add(l2, c); c.gridy = 3; p.add(l3, c); c.gridy = 4; p.add(l4, c); c.gridy = 5; p.add(l5, c); c.gridx = 1; c.weightx = 1.0; c.gridy = 0; p.add(new JTextField(), c); c.gridy = 1; p.add(new JTextField(), c); c.gridy = 2; p.add(new JTextField(), c); c.gridy = 3; p.add(new JTextField(), c); c.gridy = 4; p.add(new JTextField(), c); c.gridy = 5; p.add(new JTextField(), c); add(p); add(new JustifiedLabel("あいうえおかきくけこ"), BorderLayout.SOUTH); setBorder(BorderFactory.createEmptyBorder(5,5,5,5)); setPreferredSize(new Dimension(320, 240)); } public static void main(String[] args) { EventQueue.invokeLater(new Runnable() { @Override public void run() { createAndShowGUI(); } }); } public static void createAndShowGUI() { try{ UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); }catch(Exception e) { e.printStackTrace(); } JFrame frame = new JFrame("@title@"); frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); frame.getContentPane().add(new MainPanel()); frame.pack(); frame.setLocationRelativeTo(null); frame.setVisible(true); } } class JustifiedLabel extends JLabel { private GlyphVector gvtext; private int prev_width = -1; public JustifiedLabel() { this(null); } public JustifiedLabel(String str) { super(str); } @Override protected void paintComponent(Graphics g) { Graphics2D g2 = (Graphics2D)g; Insets i = getInsets(); int w = getWidth() - i.left - i.right; if(w!=prev_width) { gvtext = getWrappedGlyphVector(getText(), w, getFont(), g2.getFontRenderContext()); prev_width = w; } if(gvtext!=null) { g2.drawGlyphVector(gvtext, i.left, i.top + getFont().getSize()); }else{ super.paintComponent(g); } } private GlyphVector getWrappedGlyphVector(String str, float wrapping, Font font, FontRenderContext frc) { GlyphVector gv = font.createGlyphVector(frc, str); float ga = 0.0f; for(int i=0;i<gv.getNumGlyphs();i++) { ga = ga + gv.getGlyphMetrics(i).getAdvance(); } if(wrapping<ga) return null; float xx = (wrapping-ga) / (float)(gv.getNumGlyphs()-1); float xpos = 0.0f; Point2D gmPos = new Point2D.Double(0.0d, 0.0d); for(int i=0;i<gv.getNumGlyphs();i++) { GlyphMetrics gm = gv.getGlyphMetrics(i); gmPos.setLocation(xpos, 0); gv.setGlyphPosition(i, gmPos); xpos = xpos + gm.getAdvance() + xx; } return gv; } }
JustifiedLabel/src/java/example/MainPanel.java
package example; //-*- mode:java; encoding:utf8n; coding:utf-8 -*- // vim:set fileencoding=utf-8: //@homepage@ import java.awt.*; import java.awt.event.*; import java.awt.font.*; import java.awt.geom.*; import javax.swing.*; import javax.swing.border.*; import javax.swing.text.*; public class MainPanel extends JPanel { private final JLabel l0 = new JLabel("打率"); private final JLabel l1 = new JLabel("打率", JLabel.RIGHT); private final JLabel l2 = new JustifiedLabel("打率"); private final JLabel l3 = new JLabel("出塁率", JLabel.CENTER); private final JLabel l4 = new JustifiedLabel("出塁率"); private final JLabel l5 = new JustifiedLabel("チーム出塁率"); public MainPanel() { super(new BorderLayout()); JPanel p = new JPanel(new GridBagLayout()); Border inside = BorderFactory.createEmptyBorder(10,5+2,10,10+2); Border outside = BorderFactory.createTitledBorder("JLabel text-align:justify"); p.setBorder(BorderFactory.createCompoundBorder(outside, inside)); GridBagConstraints c = new GridBagConstraints(); c.gridheight = 1; c.gridx = 0; c.insets = new Insets(5, 5, 5, 0); c.fill = GridBagConstraints.HORIZONTAL; c.gridy = 0; p.add(l0, c); c.gridy = 1; p.add(l1, c); c.gridy = 2; p.add(l2, c); c.gridy = 3; p.add(l3, c); c.gridy = 4; p.add(l4, c); c.gridy = 5; p.add(l5, c); c.gridx = 1; c.weightx = 1.0; c.gridy = 0; p.add(new JTextField(), c); c.gridy = 1; p.add(new JTextField(), c); c.gridy = 2; p.add(new JTextField(), c); c.gridy = 3; p.add(new JTextField(), c); c.gridy = 4; p.add(new JTextField(), c); c.gridy = 5; p.add(new JTextField(), c); add(p); add(new JustifiedLabel("あいうえおかきくけこ"), BorderLayout.SOUTH); setBorder(BorderFactory.createEmptyBorder(5,5,5,5)); setPreferredSize(new Dimension(320, 240)); } public static void main(String[] args) { EventQueue.invokeLater(new Runnable() { @Override public void run() { createAndShowGUI(); } }); } public static void createAndShowGUI() { try{ UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); }catch(Exception e) { e.printStackTrace(); } JFrame frame = new JFrame("@title@"); frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); frame.getContentPane().add(new MainPanel()); frame.pack(); frame.setLocationRelativeTo(null); frame.setVisible(true); } } class JustifiedLabel extends JLabel { private GlyphVector gvtext; private int prev_width = -1; public JustifiedLabel() { this(null); } public JustifiedLabel(String str) { super(str); } @Override protected void paintComponent(Graphics g) { Graphics2D g2 = (Graphics2D)g; Insets i = getInsets(); int w = getWidth() - i.left - i.right; if(w!=prev_width) { gvtext = getWrappedGlyphVector(getText(), w, getFont(), g2.getFontRenderContext()); prev_width = w; } if(gvtext!=null) { g2.drawGlyphVector(gvtext, i.left, (getHeight() + getFont().getSize()) / 2); }else{ super.paintComponent(g); } } private GlyphVector getWrappedGlyphVector(String str, float wrapping, Font font, FontRenderContext frc) { GlyphVector gv = font.createGlyphVector(frc, str); float ga = 0.0f; for(int i=0;i<gv.getNumGlyphs();i++) { ga = ga + gv.getGlyphMetrics(i).getAdvance(); } if(wrapping<ga) return null; float xx = (wrapping-ga) / (float)(gv.getNumGlyphs()-1); float xpos = 0.0f; Point2D gmPos = new Point2D.Double(0.0d, 0.0d); for(int i=0;i<gv.getNumGlyphs();i++) { GlyphMetrics gm = gv.getGlyphMetrics(i); gmPos.setLocation(xpos, 0); gv.setGlyphPosition(i, gmPos); xpos = xpos + gm.getAdvance() + xx; } return gv; } }
FindBugs: ICAST_IDIV_CAST_TO_DOUBLE
JustifiedLabel/src/java/example/MainPanel.java
FindBugs: ICAST_IDIV_CAST_TO_DOUBLE
Java
agpl-3.0
8827e3744631bef11d6ea6fbb38ba1436eb636aa
0
TheLanguageArchive/Arbil,TheLanguageArchive/Arbil,TheLanguageArchive/Arbil,TheLanguageArchive/Arbil,TheLanguageArchive/Arbil
/** * Copyright (C) 2012 Max Planck Institute for Psycholinguistics * * This program is free software; you can redistribute it and/or modify it under * the terms of the GNU General Public License as published by the Free Software * Foundation; either version 2 of the License, or (at your option) any later * version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more * details. * * You should have received a copy of the GNU General Public License along with * this program; if not, write to the Free Software Foundation, Inc., 59 Temple * Place - Suite 330, Boston, MA 02111-1307, USA. */ package nl.mpi.arbilcommons.journal; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.LineNumberReader; import java.util.ArrayList; import java.util.HashSet; import java.util.Set; import nl.mpi.arbil.plugin.JournalWatcherPlugin; import nl.mpi.arbil.plugin.PluginBugCatcher; import nl.mpi.arbil.plugin.PluginDialogHandler; import nl.mpi.arbil.plugin.PluginException; import nl.mpi.arbil.plugin.PluginField; import nl.mpi.arbil.plugin.PluginJournal; import nl.mpi.arbil.plugin.PluginSessionStorage; /** * Document : ArbilJournal Created on : * * @author [email protected] */ public class ArbilJournal implements PluginJournal { private static PluginDialogHandler messageDialogHandler; private final HashSet<JournalWatcherPlugin> jounalWatchers; public static void setMessageDialogHandler(PluginDialogHandler handler) { messageDialogHandler = handler; } private static PluginSessionStorage sessionStorage; public static void setSessionStorage(PluginSessionStorage sessionStorageInstance) { sessionStorage = sessionStorageInstance; } private static PluginBugCatcher bugCatcher; public static void setBugCatcher(PluginBugCatcher bugCatcher) { ArbilJournal.bugCatcher = bugCatcher; } private File getJournalFile() { return new File(sessionStorage.getProjectDirectory(), "ChangeJournal.log"); } public enum UndoType { Value, LanguageId, KeyName } private static class HistoryItem { PluginField targetField; String oldValue; String newValue; UndoType undoType; } private ArbilJournal() { jounalWatchers = new HashSet<JournalWatcherPlugin>(); } static private ArbilJournal singleInstance = null; static synchronized public ArbilJournal getSingleInstance() { if (singleInstance == null) { singleInstance = new ArbilJournal(); } return singleInstance; } ArrayList<HistoryItem> fieldChangeHistory; int currentFieldChangeHistoryItem = 0; public synchronized void recordFieldChange(PluginField targetField, String oldValue, String newValue, UndoType undoType) { if (fieldChangeHistory == null) { fieldChangeHistory = new ArrayList<HistoryItem>(); currentFieldChangeHistoryItem = 0; } if (currentFieldChangeHistoryItem < fieldChangeHistory.size()) { fieldChangeHistory = new ArrayList(fieldChangeHistory.subList(0, currentFieldChangeHistoryItem)); } HistoryItem historyItem = new HistoryItem(); historyItem.targetField = targetField; historyItem.oldValue = oldValue; historyItem.newValue = newValue; historyItem.undoType = undoType; fieldChangeHistory.add(historyItem); currentFieldChangeHistoryItem++; } public boolean canUndo() { //fieldChangeHistory.size(); return currentFieldChangeHistoryItem > 0; } public boolean canRedo() { return fieldChangeHistory != null && currentFieldChangeHistoryItem < fieldChangeHistory.size(); } public void undoFromFieldChangeHistory() { if (canUndo()) { HistoryItem changeHistoryItem = fieldChangeHistory.get(--currentFieldChangeHistoryItem); HistoryItem reversedHistoryItem = new HistoryItem(); reversedHistoryItem.newValue = changeHistoryItem.oldValue; reversedHistoryItem.oldValue = changeHistoryItem.newValue; reversedHistoryItem.targetField = changeHistoryItem.targetField; reversedHistoryItem.undoType = changeHistoryItem.undoType; makeChangeFromHistoryItem(reversedHistoryItem); } } public void redoFromFieldChangeHistory() { if (canRedo()) { HistoryItem changeHistoryItem = fieldChangeHistory.get(currentFieldChangeHistoryItem++); makeChangeFromHistoryItem(changeHistoryItem); } } public void clearFieldChangeHistory() { fieldChangeHistory = null; currentFieldChangeHistoryItem = 0; } private void makeChangeFromHistoryItem(HistoryItem historyItem) { String currentValue = null; switch (historyItem.undoType) { case KeyName: currentValue = historyItem.targetField.getKeyName(); break; case LanguageId: currentValue = historyItem.targetField.getLanguageId(); break; case Value: currentValue = historyItem.targetField.getFieldValue(); break; } if (currentValue != null && !currentValue.equals(historyItem.oldValue)) { messageDialogHandler.addMessageDialogToQueue("The field value is out of sync with the history item", "Undo/Redo"); bugCatcher.logException(new PluginException("ChangeFromHistory old value does not match current value")); } else { switch (historyItem.undoType) { case KeyName: historyItem.targetField.setKeyName(historyItem.newValue, true, true); break; case LanguageId: historyItem.targetField.setLanguageId(historyItem.newValue, true, true); break; case Value: historyItem.targetField.setFieldValue(historyItem.newValue, true, true); break; } } } // this is also use to record an import event public boolean saveJournalEntry(String imdiUrl, String imdiNodePath, String oldValue, String newValue, String eventType) { boolean returnValue = false; FileWriter journalFile = null; try { journalFile = new FileWriter(getJournalFile(), true); System.out.println("Journal: " + imdiUrl + "," + imdiNodePath + "," + oldValue + "," + newValue); journalFile.append("\"" + imdiUrl + imdiNodePath + "\",\"" + oldValue + "\",\"" + newValue + "\",\"" + eventType + "\"\n"); journalFile.close(); journalFile = null; returnValue = true; wakeJounalWatchers(getJournalFile().length()); } catch (IOException ex) { returnValue = false; bugCatcher.logException(new PluginException("failed to write to the journal: " + ex.getMessage())); System.err.println("failed to write to the journal: " + ex.getMessage()); } finally { if (journalFile != null) { try { journalFile.close(); } catch (IOException ioe) { bugCatcher.logException(new PluginException("Failed to close the journal: " + ioe.getMessage())); } } } return (returnValue); } synchronized public long getChangedFiles(long lastChangeIndex, Set<String> changedURIs) throws PluginException { try { final File journalFile = getJournalFile(); final long journalLength = journalFile.length(); if (journalLength > lastChangeIndex) { final FileReader fileReader = new FileReader(journalFile); LineNumberReader lineNumberReader = new LineNumberReader(fileReader); lineNumberReader.skip(lastChangeIndex); String readLine; while (null != (readLine = lineNumberReader.readLine())) { // todo: extract the URI changedURIs.add(readLine); } } return journalLength; } catch (FileNotFoundException exception) { throw new PluginException("Failed to read the journal file: " + exception.getMessage()); } catch (IOException exception) { throw new PluginException("Failed to read the journal file: " + exception.getMessage()); } } private void wakeJounalWatchers(final long journalLength) { new Thread(new Runnable() { public void run() { for (JournalWatcherPlugin jounalWatcher : jounalWatchers) { try { jounalWatcher.journalEvent(journalLength); } catch (PluginException exception) { messageDialogHandler.addMessageDialogToQueue("Journal watcher plugin failed and has been removed:\n" + exception.getMessage(), "Wake Jounal Watchers"); jounalWatchers.remove(jounalWatcher); } } } }, "JounalWatcherPlugins").start(); } public void addJounalWatcher(JournalWatcherPlugin jounalWatcher) { jounalWatchers.add(jounalWatcher); } }
arbil-commons/src/main/java/nl/mpi/arbilcommons/journal/ArbilJournal.java
/** * Copyright (C) 2012 Max Planck Institute for Psycholinguistics * * This program is free software; you can redistribute it and/or modify it under * the terms of the GNU General Public License as published by the Free Software * Foundation; either version 2 of the License, or (at your option) any later * version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more * details. * * You should have received a copy of the GNU General Public License along with * this program; if not, write to the Free Software Foundation, Inc., 59 Temple * Place - Suite 330, Boston, MA 02111-1307, USA. */ package nl.mpi.arbilcommons.journal; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.LineNumberReader; import java.util.ArrayList; import java.util.HashSet; import java.util.Set; import nl.mpi.arbil.plugin.PluginBugCatcher; import nl.mpi.arbil.plugin.PluginDialogHandler; import nl.mpi.arbil.plugin.PluginException; import nl.mpi.arbil.plugin.PluginField; import nl.mpi.arbil.plugin.PluginJournal; import nl.mpi.arbil.plugin.PluginSessionStorage; /** * Document : ArbilJournal Created on : * * @author [email protected] */ public class ArbilJournal implements PluginJournal { private static PluginDialogHandler messageDialogHandler; private final HashSet<Runnable> jounalWatchers; public static void setMessageDialogHandler(PluginDialogHandler handler) { messageDialogHandler = handler; } private static PluginSessionStorage sessionStorage; public static void setSessionStorage(PluginSessionStorage sessionStorageInstance) { sessionStorage = sessionStorageInstance; } private static PluginBugCatcher bugCatcher; public static void setBugCatcher(PluginBugCatcher bugCatcher) { ArbilJournal.bugCatcher = bugCatcher; } private File getJournalFile() { return new File(sessionStorage.getProjectDirectory(), "ChangeJournal.log"); } public enum UndoType { Value, LanguageId, KeyName } private static class HistoryItem { PluginField targetField; String oldValue; String newValue; UndoType undoType; } private ArbilJournal() { jounalWatchers = new HashSet<Runnable>(); } static private ArbilJournal singleInstance = null; static synchronized public ArbilJournal getSingleInstance() { if (singleInstance == null) { singleInstance = new ArbilJournal(); } return singleInstance; } ArrayList<HistoryItem> fieldChangeHistory; int currentFieldChangeHistoryItem = 0; public synchronized void recordFieldChange(PluginField targetField, String oldValue, String newValue, UndoType undoType) { if (fieldChangeHistory == null) { fieldChangeHistory = new ArrayList<HistoryItem>(); currentFieldChangeHistoryItem = 0; } if (currentFieldChangeHistoryItem < fieldChangeHistory.size()) { fieldChangeHistory = new ArrayList(fieldChangeHistory.subList(0, currentFieldChangeHistoryItem)); } HistoryItem historyItem = new HistoryItem(); historyItem.targetField = targetField; historyItem.oldValue = oldValue; historyItem.newValue = newValue; historyItem.undoType = undoType; fieldChangeHistory.add(historyItem); currentFieldChangeHistoryItem++; } public boolean canUndo() { //fieldChangeHistory.size(); return currentFieldChangeHistoryItem > 0; } public boolean canRedo() { return fieldChangeHistory != null && currentFieldChangeHistoryItem < fieldChangeHistory.size(); } public void undoFromFieldChangeHistory() { if (canUndo()) { HistoryItem changeHistoryItem = fieldChangeHistory.get(--currentFieldChangeHistoryItem); HistoryItem reversedHistoryItem = new HistoryItem(); reversedHistoryItem.newValue = changeHistoryItem.oldValue; reversedHistoryItem.oldValue = changeHistoryItem.newValue; reversedHistoryItem.targetField = changeHistoryItem.targetField; reversedHistoryItem.undoType = changeHistoryItem.undoType; makeChangeFromHistoryItem(reversedHistoryItem); } } public void redoFromFieldChangeHistory() { if (canRedo()) { HistoryItem changeHistoryItem = fieldChangeHistory.get(currentFieldChangeHistoryItem++); makeChangeFromHistoryItem(changeHistoryItem); } } public void clearFieldChangeHistory() { fieldChangeHistory = null; currentFieldChangeHistoryItem = 0; } private void makeChangeFromHistoryItem(HistoryItem historyItem) { String currentValue = null; switch (historyItem.undoType) { case KeyName: currentValue = historyItem.targetField.getKeyName(); break; case LanguageId: currentValue = historyItem.targetField.getLanguageId(); break; case Value: currentValue = historyItem.targetField.getFieldValue(); break; } if (currentValue != null && !currentValue.equals(historyItem.oldValue)) { messageDialogHandler.addMessageDialogToQueue("The field value is out of sync with the history item", "Undo/Redo"); bugCatcher.logException(new PluginException("ChangeFromHistory old value does not match current value")); } else { switch (historyItem.undoType) { case KeyName: historyItem.targetField.setKeyName(historyItem.newValue, true, true); break; case LanguageId: historyItem.targetField.setLanguageId(historyItem.newValue, true, true); break; case Value: historyItem.targetField.setFieldValue(historyItem.newValue, true, true); break; } } } // this is also use to record an import event public boolean saveJournalEntry(String imdiUrl, String imdiNodePath, String oldValue, String newValue, String eventType) { boolean returnValue = false; FileWriter journalFile = null; try { journalFile = new FileWriter(getJournalFile(), true); System.out.println("Journal: " + imdiUrl + "," + imdiNodePath + "," + oldValue + "," + newValue); journalFile.append("\"" + imdiUrl + imdiNodePath + "\",\"" + oldValue + "\",\"" + newValue + "\",\"" + eventType + "\"\n"); journalFile.close(); journalFile = null; returnValue = true; wakeJounalWatchers(); } catch (IOException ex) { returnValue = false; bugCatcher.logException(new PluginException("failed to write to the journal: " + ex.getMessage())); System.err.println("failed to write to the journal: " + ex.getMessage()); } finally { if (journalFile != null) { try { journalFile.close(); } catch (IOException ioe) { bugCatcher.logException(new PluginException("Failed to close the journal: " + ioe.getMessage())); } } } return (returnValue); } synchronized public long getChangedFiles(long lastChangeIndex, Set<String> changedURIs) throws PluginException { try { final File journalFile = getJournalFile(); final long journalLength = journalFile.length(); if (journalLength > lastChangeIndex) { final FileReader fileReader = new FileReader(journalFile); LineNumberReader lineNumberReader = new LineNumberReader(fileReader); lineNumberReader.skip(lastChangeIndex); String readLine; while (null != (readLine = lineNumberReader.readLine())) { // todo: extract the URI changedURIs.add(readLine); } } return journalLength; } catch (FileNotFoundException exception) { throw new PluginException("Failed to read the journal file: " + exception.getMessage()); } catch (IOException exception) { throw new PluginException("Failed to read the journal file: " + exception.getMessage()); } } private void wakeJounalWatchers() { for (Runnable jounalWatcher : jounalWatchers) { new Thread(jounalWatcher, "JounalWatcherPlugin").start(); } } public void addJounalWatcher(Runnable runnableWatcher) { jounalWatchers.add(runnableWatcher); } }
Created a journal following debug panel plugin.
arbil-commons/src/main/java/nl/mpi/arbilcommons/journal/ArbilJournal.java
Created a journal following debug panel plugin.
Java
agpl-3.0
a5e563efe46106a5b1855db03e197d4b5a24d8dd
0
rdkgit/opennms,aihua/opennms,roskens/opennms-pre-github,rdkgit/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,rdkgit/opennms,roskens/opennms-pre-github,tdefilip/opennms,rdkgit/opennms,tdefilip/opennms,rdkgit/opennms,aihua/opennms,rdkgit/opennms,roskens/opennms-pre-github,rdkgit/opennms,tdefilip/opennms,aihua/opennms,aihua/opennms,tdefilip/opennms,rdkgit/opennms,tdefilip/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,roskens/opennms-pre-github,tdefilip/opennms,aihua/opennms,aihua/opennms,aihua/opennms,aihua/opennms,rdkgit/opennms,roskens/opennms-pre-github,tdefilip/opennms,rdkgit/opennms,roskens/opennms-pre-github,tdefilip/opennms,aihua/opennms,tdefilip/opennms,roskens/opennms-pre-github
/******************************************************************************* * This file is part of OpenNMS(R). * * Copyright (C) 2012 The OpenNMS Group, Inc. * OpenNMS(R) is Copyright (C) 1999-2012 The OpenNMS Group, Inc. * * OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. * * OpenNMS(R) is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, * or (at your option) any later version. * * OpenNMS(R) is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with OpenNMS(R). If not, see: * http://www.gnu.org/licenses/ * * For more information contact: * OpenNMS(R) Licensing <[email protected]> * http://www.opennms.org/ * http://www.opennms.com/ *******************************************************************************/ package org.opennms.netmgt.provision.service.vmware; import com.vmware.vim25.*; import com.vmware.vim25.mo.*; import org.apache.commons.io.IOExceptionWithCause; import org.apache.commons.lang.StringUtils; import org.apache.http.conn.util.InetAddressUtils; import org.exolab.castor.xml.MarshalException; import org.exolab.castor.xml.ValidationException; import org.opennms.core.utils.BeanUtils; import org.opennms.core.utils.url.GenericURLConnection; import org.opennms.core.xml.JaxbUtils; import org.opennms.netmgt.model.PrimaryType; import org.opennms.netmgt.provision.persist.ForeignSourceRepository; import org.opennms.netmgt.provision.persist.requisition.*; import org.opennms.protocols.vmware.VmwareViJavaAccess; import org.sblim.wbem.cim.CIMException; import org.sblim.wbem.cim.CIMObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.xml.bind.JAXBException; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.*; import java.rmi.RemoteException; import java.util.*; /** * The Class VmwareRequisitionUrlConnection * * <p>This class is used for the automtic requisition of Vmware related entities.</p> * * @author Christian Pape <[email protected]> * @author Alejandro Galue <[email protected]> */ public class VmwareRequisitionUrlConnection extends GenericURLConnection { /** * the logger */ private Logger logger = LoggerFactory.getLogger(VmwareRequisitionUrlConnection.class); private static final String VMWARE_HOSTSYSTEM_SERVICES = "hostSystemServices"; private static final String VMWARE_VIRTUALMACHINE_SERVICES = "virtualMachineServices"; private String[] m_hostSystemServices; private String[] m_virtualMachineServices; private String m_hostname = null; private String m_username = null; private String m_password = null; private String m_foreignSource = null; private boolean m_importVMPoweredOn = true; private boolean m_importVMPoweredOff = false; private boolean m_importVMSuspended = false; private boolean m_importHostPoweredOn = true; private boolean m_importHostPoweredOff = false; private boolean m_importHostStandBy = false; private boolean m_importHostUnknown = false; private boolean m_persistIPv4 = true; private boolean m_persistIPv6 = true; private boolean m_persistVMs = true; private boolean m_persistHosts = true; /* * Host system managedObjectId to name mapping */ private Map<String, String> m_hostSystemMap = new HashMap<String, String>(); /** * the query arguments */ private Map<String, String> m_args = null; /** * requisition object */ private Requisition m_requisition = null; /** * Constructor for creating an instance of this class. * * @param url the URL to use * @throws MalformedURLException * @throws RemoteException */ public VmwareRequisitionUrlConnection(URL url) throws MalformedURLException, RemoteException { super(url); m_hostname = url.getHost(); m_username = getUsername(); m_password = getPassword(); m_args = getQueryArgs(); boolean importVMOnly = queryParameter("importVMOnly", false); boolean importHostOnly = queryParameter("importHostOnly", false); if (importHostOnly && importVMOnly) { throw new MalformedURLException("importHostOnly and importVMOnly can't be true simultaneously"); } if (importHostOnly) { m_persistVMs = false; } if (importVMOnly) { m_persistHosts = false; } boolean importIPv4Only = queryParameter("importIPv4Only", false); boolean importIPv6Only = queryParameter("importIPv6Only", false); if (importIPv4Only && importIPv6Only) { throw new MalformedURLException("importIPv4Only and importIPv6Only can't be true simultaneously"); } if (importIPv4Only) { m_persistIPv6 = false; } if (importIPv6Only) { m_persistIPv4 = false; } m_importVMPoweredOn = queryParameter("importVMPoweredOn", true); m_importVMPoweredOff = queryParameter("importVMPoweredOff", false); m_importVMSuspended = queryParameter("importVMSuspended", false); m_importHostPoweredOn = queryParameter("importHostPoweredOn", true); m_importHostPoweredOff = queryParameter("importHostPoweredOff", false); m_importHostStandBy = queryParameter("importHostStandBy", false); m_importHostUnknown = queryParameter("importHostUnknown", false); if (queryParameter("importHostAll", false)) { m_importHostPoweredOn = true; m_importHostPoweredOff = true; m_importHostStandBy = true; m_importHostUnknown = true; } if (queryParameter("importVMAll", false)) { m_importVMPoweredOff = true; m_importVMPoweredOn = true; m_importVMSuspended = true; } String path = url.getPath(); path = path.replaceAll("^/", ""); path = path.replaceAll("/$", ""); String pathElements[] = path.split("/"); if (pathElements.length == 1) { if ("".equals(pathElements[0])) { m_foreignSource = "vmware-" + m_hostname; } else { m_foreignSource = pathElements[0]; } } else { throw new MalformedURLException("Error processing path element of URL (vmware://username:password@host[/foreign-source]?keyA=valueA;keyB=valueB;...)"); } } /** * Returns a boolean representation for a given on/off parameter. * * @param key the parameter's name * @param defaultValue the default value to use * @return the boolean value */ private boolean queryParameter(String key, boolean defaultValue) { if (m_args.get(key) == null) { return defaultValue; } else { String value = m_args.get(key).toLowerCase(); return ("yes".equals(value) || "true".equals(value) || "on".equals(value) || "1".equals(value)); } } @Override public void connect() throws IOException { // To change body of implemented methods use File | Settings | File // Templates. } private boolean reachableCimService(VmwareViJavaAccess vmwareViJavaAccess, HostSystem hostSystem, String ipAddress) { if (!vmwareViJavaAccess.setTimeout(3000)) { logger.warn("Error setting connection timeout"); } List<CIMObject> cimObjects = null; try { cimObjects = vmwareViJavaAccess.queryCimObjects(hostSystem, "CIM_NumericSensor", ipAddress); } catch (ConnectException e) { return false; } catch (RemoteException e) { return false; } catch (CIMException e) { return false; } return cimObjects != null; } /** * Creates a requisition node for the given managed entity and type. * * @param ipAddresses the set of Ip addresses * @param managedEntity the managed entity * @return the generated requisition node */ private RequisitionNode createRequisitionNode(Set<String> ipAddresses, ManagedEntity managedEntity, int apiVersion, VmwareViJavaAccess vmwareViJavaAccess) { RequisitionNode requisitionNode = new RequisitionNode(); // Setting the node label requisitionNode.setNodeLabel(managedEntity.getName()); // Foreign Id consisting of managed entity Id requisitionNode.setForeignId(managedEntity.getMOR().getVal()); /* * Original version: * * Foreign Id consisting of VMware management server's hostname and managed entity id * * requisitionNode.setForeignId(m_hostname + "/" + managedEntity.getMOR().getVal()); */ if (managedEntity instanceof VirtualMachine) { boolean firstInterface = true; // add all given interfaces for (String ipAddress : ipAddresses) { try { if ((m_persistIPv4 && InetAddressUtils.isIPv4Address(ipAddress)) || (m_persistIPv6 && InetAddressUtils.isIPv6Address(ipAddress))) { InetAddress inetAddress = InetAddress.getByName(ipAddress); if (!inetAddress.isLoopbackAddress()) { RequisitionInterface requisitionInterface = new RequisitionInterface(); requisitionInterface.setIpAddr(ipAddress); // the first one will be primary if (firstInterface) { requisitionInterface.setSnmpPrimary(PrimaryType.PRIMARY); for (String service : m_virtualMachineServices) { requisitionInterface.insertMonitoredService(new RequisitionMonitoredService(service.trim())); } firstInterface = false; } else { requisitionInterface.setSnmpPrimary(PrimaryType.SECONDARY); } requisitionInterface.setManaged(Boolean.TRUE); requisitionInterface.setStatus(Integer.valueOf(1)); requisitionNode.putInterface(requisitionInterface); } } } catch (UnknownHostException unknownHostException) { logger.warn("Invalid IP address '{}'", unknownHostException.getMessage()); } } } else { if (managedEntity instanceof HostSystem) { boolean reachableInterfaceFound = false, firstInterface = true; List<RequisitionInterface> requisitionInterfaceList = new ArrayList<RequisitionInterface>(); RequisitionInterface primaryInterfaceCandidate = null; // add all given interfaces for (String ipAddress : ipAddresses) { try { if ((m_persistIPv4 && InetAddressUtils.isIPv4Address(ipAddress)) || (m_persistIPv6 && InetAddressUtils.isIPv6Address(ipAddress))) { InetAddress inetAddress = InetAddress.getByName(ipAddress); if (!inetAddress.isLoopbackAddress()) { RequisitionInterface requisitionInterface = new RequisitionInterface(); requisitionInterface.setIpAddr(ipAddress); if (firstInterface) { primaryInterfaceCandidate = requisitionInterface; firstInterface = false; } if (!reachableInterfaceFound && reachableCimService(vmwareViJavaAccess, (HostSystem) managedEntity, ipAddress)) { primaryInterfaceCandidate = requisitionInterface; reachableInterfaceFound = true; } requisitionInterface.setManaged(Boolean.TRUE); requisitionInterface.setStatus(Integer.valueOf(1)); requisitionInterface.setSnmpPrimary(PrimaryType.SECONDARY); requisitionInterfaceList.add(requisitionInterface); } } } catch (UnknownHostException unknownHostException) { logger.warn("Invalid IP address '{}'", unknownHostException.getMessage()); } } if (primaryInterfaceCandidate != null) { if (reachableInterfaceFound) { logger.warn("Found reachable primary interface '{}'", primaryInterfaceCandidate.getIpAddr()); } else { logger.warn("Only non-reachable interfaces found, using first one for primary interface '{}'", primaryInterfaceCandidate.getIpAddr()); } primaryInterfaceCandidate.setSnmpPrimary(PrimaryType.PRIMARY); for (String service : m_hostSystemServices) { if (reachableInterfaceFound || !"VMwareCim-HostSystem".equals(service)) { primaryInterfaceCandidate.insertMonitoredService(new RequisitionMonitoredService(service.trim())); } } } else { logger.warn("No primary interface found"); } for (RequisitionInterface requisitionInterface : requisitionInterfaceList) { requisitionNode.putInterface(requisitionInterface); } } else { logger.error("Undefined type of managedEntity '{}'", managedEntity.getMOR().getType()); return null; } } /* * For now we use displaycategory, notifycategory and pollercategory for storing * the vcenter Ip address, the username and the password */ String powerState = "unknown"; StringBuffer vmwareTopologyInfo = new StringBuffer(); // putting parents to topology information ManagedEntity parentEntity = managedEntity.getParent(); // TODO: Is this the best algorithm to build the topology info ? // TODO: How to deal with a big list of networks on the ESX Hosts ? do { if (vmwareTopologyInfo.length() > 0) { vmwareTopologyInfo.append(", "); } try { if (parentEntity != null && parentEntity.getMOR() != null) { vmwareTopologyInfo.append(parentEntity.getMOR().getVal() + "/" + URLEncoder.encode(parentEntity.getName(), "UTF-8")); } else { logger.warn("Can't add topologyInformation because either the parentEntity or the MOR is null for " + managedEntity.getName()); } } catch (UnsupportedEncodingException e) { logger.warn("Unsupported encoding '{}'", e.getMessage()); } parentEntity = parentEntity == null ? null : parentEntity.getParent(); } while (parentEntity != null); if (managedEntity instanceof HostSystem) { HostSystem hostSystem = (HostSystem) managedEntity; HostRuntimeInfo hostRuntimeInfo = hostSystem.getRuntime(); if (hostRuntimeInfo == null) { logger.debug("hostRuntimeInfo=null"); } else { HostSystemPowerState hostSystemPowerState = hostRuntimeInfo.getPowerState(); if (hostSystemPowerState == null) { logger.debug("hostSystemPowerState=null"); } else { powerState = hostSystemPowerState.toString(); } } try { for (Datastore datastore : hostSystem.getDatastores()) { if (vmwareTopologyInfo.length() > 0) { vmwareTopologyInfo.append(", "); } try { vmwareTopologyInfo.append(datastore.getMOR().getVal() + "/" + URLEncoder.encode(datastore.getSummary().getName(), "UTF-8")); } catch (UnsupportedEncodingException e) { logger.warn("Unsupported encoding '{}'", e.getMessage()); } } } catch (RemoteException e) { logger.warn("Cannot retrieve datastores for managedEntity '{}': '{}'", managedEntity.getMOR().getVal(), e.getMessage()); } try { for (Network network : hostSystem.getNetworks()) { if (vmwareTopologyInfo.length() > 0) { vmwareTopologyInfo.append(", "); } try { vmwareTopologyInfo.append(network.getMOR().getVal() + "/" + URLEncoder.encode(network.getSummary().getName(), "UTF-8")); } catch (UnsupportedEncodingException e) { logger.warn("Unsupported encoding '{}'", e.getMessage()); } } } catch (RemoteException e) { logger.warn("Cannot retrieve networks for managedEntity '{}': '{}'", managedEntity.getMOR().getVal(), e.getMessage()); } } else { if (managedEntity instanceof VirtualMachine) { VirtualMachine virtualMachine = (VirtualMachine) managedEntity; VirtualMachineRuntimeInfo virtualMachineRuntimeInfo = virtualMachine.getRuntime(); if (virtualMachineRuntimeInfo == null) { logger.debug("virtualMachineRuntimeInfo=null"); } else { VirtualMachinePowerState virtualMachinePowerState = virtualMachineRuntimeInfo.getPowerState(); if (virtualMachinePowerState == null) { logger.debug("virtualMachinePowerState=null"); } else { powerState = virtualMachinePowerState.toString(); } } try { for (Datastore datastore : virtualMachine.getDatastores()) { if (vmwareTopologyInfo.length() > 0) { vmwareTopologyInfo.append(", "); } try { vmwareTopologyInfo.append(datastore.getMOR().getVal() + "/" + URLEncoder.encode(datastore.getSummary().getName(), "UTF-8")); } catch (UnsupportedEncodingException e) { logger.warn("Unsupported encoding '{}'", e.getMessage()); } } } catch (RemoteException e) { logger.warn("Cannot retrieve datastores for managedEntity '{}': '{}'", managedEntity.getMOR().getVal(), e.getMessage()); } try { for (Network network : virtualMachine.getNetworks()) { if (vmwareTopologyInfo.length() > 0) { vmwareTopologyInfo.append(", "); } try { vmwareTopologyInfo.append(network.getMOR().getVal() + "/" + URLEncoder.encode(network.getSummary().getName(), "UTF-8")); } catch (UnsupportedEncodingException e) { logger.warn("Unsupported encoding '{}'", e.getMessage()); } } } catch (RemoteException e) { logger.warn("Cannot retrieve networks for managedEntity '{}': '{}'", managedEntity.getMOR().getVal(), e.getMessage()); } if (vmwareTopologyInfo.length() > 0) { vmwareTopologyInfo.append(", "); } try { vmwareTopologyInfo.append(virtualMachine.getRuntime().getHost().getVal() + "/" + URLEncoder.encode(m_hostSystemMap.get(virtualMachine.getRuntime().getHost().getVal()), "UTF-8")); } catch (UnsupportedEncodingException e) { logger.warn("Unsupported encoding '{}'", e.getMessage()); } } else { logger.error("Undefined type of managedEntity '{}'", managedEntity.getMOR().getType()); return null; } } RequisitionAsset requisitionAssetHostname = new RequisitionAsset("vmwareManagementServer", m_hostname); requisitionNode.putAsset(requisitionAssetHostname); RequisitionAsset requisitionAssetType = new RequisitionAsset("vmwareManagedEntityType", (managedEntity instanceof HostSystem ? "HostSystem" : "VirtualMachine")); requisitionNode.putAsset(requisitionAssetType); RequisitionAsset requisitionAssetId = new RequisitionAsset("vmwareManagedObjectId", managedEntity.getMOR().getVal()); requisitionNode.putAsset(requisitionAssetId); RequisitionAsset requisitionAssetTopologyInfo = new RequisitionAsset("vmwareTopologyInfo", vmwareTopologyInfo.toString()); requisitionNode.putAsset(requisitionAssetTopologyInfo); RequisitionAsset requisitionAssetState = new RequisitionAsset("vmwareState", powerState); requisitionNode.putAsset(requisitionAssetState); requisitionNode.putCategory(new RequisitionCategory("VMware" + apiVersion)); return requisitionNode; } /** * Builds the complete requisition object. * * @return the requisition object */ private Requisition buildVMwareRequisition() { VmwareViJavaAccess vmwareViJavaAccess = null; // for now, set the foreign source to the specified vcenter host m_requisition = new Requisition(m_foreignSource); if ((m_username == null || "".equals(m_username)) || (m_password == null || "".equals(m_password))) { try { vmwareViJavaAccess = new VmwareViJavaAccess(m_hostname); } catch (MarshalException e) { logger.warn("Error initialising VMware connection to '{}': '{}'", m_hostname, e.getMessage()); return null; } catch (ValidationException e) { logger.warn("Error initialising VMware connection to '{}': '{}'", m_hostname, e.getMessage()); return null; } catch (IOException e) { logger.warn("Error initialising VMware connection to '{}': '{}'", m_hostname, e.getMessage()); return null; } } else { vmwareViJavaAccess = new VmwareViJavaAccess(m_hostname, m_username, m_password); } try { vmwareViJavaAccess.connect(); } catch (MalformedURLException e) { logger.warn("Error connecting VMware management server '{}': '{}'", m_hostname, e.getMessage()); return null; } catch (RemoteException e) { logger.warn("Error connecting VMware management server '{}': '{}'", m_hostname, e.getMessage()); return null; } try { int apiVersion = vmwareViJavaAccess.getMajorApiVersion(); // get services to be added to host systems // m_hostSystemServices = getHostSystemServices(apiVersion); if (m_args != null && m_args.get(VMWARE_HOSTSYSTEM_SERVICES) != null) { m_hostSystemServices = m_args.get(VMWARE_HOSTSYSTEM_SERVICES).split(","); } else { m_hostSystemServices = new String[]{"VMware-ManagedEntity", "VMware-HostSystem", "VMwareCim-HostSystem"}; } // get services to be added to virtual machines // m_virtualMachineServices = getVirtualMachineServices(apiVersion); if (m_args != null && m_args.get(VMWARE_VIRTUALMACHINE_SERVICES) != null) { m_virtualMachineServices = m_args.get(VMWARE_VIRTUALMACHINE_SERVICES).split(","); } else { m_virtualMachineServices = new String[]{"VMware-ManagedEntity", "VMware-VirtualMachine"}; } iterateHostSystems(vmwareViJavaAccess, apiVersion); iterateVirtualMachines(vmwareViJavaAccess, apiVersion); } catch (RemoteException e) { logger.warn("Error retrieving managed objects from VMware management server '{}': '{}'", m_hostname, e.getMessage()); return null; } finally { vmwareViJavaAccess.disconnect(); } return m_requisition; } /** * Checks whether the host system should be imported into the requisition. * * @param hostSystem the system to check * @return true for import, false otherwise */ private boolean checkHostPowerState(HostSystem hostSystem) { String powerState = hostSystem.getRuntime().getPowerState().toString(); if ("poweredOn".equals(powerState) && m_importHostPoweredOn) { return true; } if ("poweredOff".equals(powerState) && m_importHostPoweredOff) { return true; } if ("standBy".equals(powerState) && m_importHostStandBy) { return true; } if ("unknown".equals(powerState) && m_importHostUnknown) { return true; } return false; } /** * Checks whether the virtual machine should be imported into the requisition. * * @param virtualMachine the system to check * @return true for import, false otherwise */ private boolean checkVMPowerState(VirtualMachine virtualMachine) { String powerState = virtualMachine.getRuntime().getPowerState().toString(); if ("poweredOn".equals(powerState) && m_importVMPoweredOn) { return true; } if ("poweredOff".equals(powerState) && m_importVMPoweredOff) { return true; } if ("suspended".equals(powerState) && m_importVMSuspended) { return true; } return false; } /** * Iterates through the host systems and adds them to the requisition object. * * @param vmwareViJavaAccess the access/connection to use * @throws RemoteException */ private void iterateHostSystems(VmwareViJavaAccess vmwareViJavaAccess, int apiVersion) throws RemoteException { ManagedEntity[] hostSystems; // search for host systems (esx hosts) hostSystems = vmwareViJavaAccess.searchManagedEntities("HostSystem"); if (hostSystems != null) { for (ManagedEntity managedEntity : hostSystems) { HostSystem hostSystem = (HostSystem) managedEntity; m_hostSystemMap.put(hostSystem.getMOR().getVal(), hostSystem.getName()); // check for correct key/value-pair if (checkHostPowerState(hostSystem) && checkForAttribute(hostSystem)) { logger.debug("Adding Host System '{}'", hostSystem.getName()); // iterate over all service console networks and add interface Ip addresses TreeSet<String> ipAddresses = vmwareViJavaAccess.getHostSystemIpAddresses(hostSystem); // create the new node... RequisitionNode node = createRequisitionNode(ipAddresses, hostSystem, apiVersion, vmwareViJavaAccess); // ...and add it to the requisition if (node != null && m_persistHosts) { m_requisition.insertNode(node); } } } } } /** * Iterates through the virtual machines and adds them to the requisition object. * * @param vmwareViJavaAccess the access/connection to use * @throws RemoteException */ private void iterateVirtualMachines(VmwareViJavaAccess vmwareViJavaAccess, int apiVersion) throws RemoteException { ManagedEntity[] virtualMachines; // search for all virtual machines virtualMachines = vmwareViJavaAccess.searchManagedEntities("VirtualMachine"); if (virtualMachines != null) { // check for correct key/value-pair for (ManagedEntity managedEntity : virtualMachines) { VirtualMachine virtualMachine = (VirtualMachine) managedEntity; // import only when the specified attributes is set if (checkVMPowerState(virtualMachine) && checkForAttribute(virtualMachine)) { logger.debug("Adding Virtual Machine '{}'", virtualMachine.getName()); LinkedHashSet<String> ipAddresses = new LinkedHashSet<String>(); // add the Ip address reported by VMware tools, this should be primary ipAddresses.add(virtualMachine.getGuest().getIpAddress()); // if possible, iterate over all virtual networks networks and add interface Ip addresses if (virtualMachine.getGuest().getNet() != null) { for (GuestNicInfo guestNicInfo : virtualMachine.getGuest().getNet()) { if (guestNicInfo.getIpAddress() != null) { for (String ipAddress : guestNicInfo.getIpAddress()) { ipAddresses.add(ipAddress); } } } } // create the new node... RequisitionNode node = createRequisitionNode(ipAddresses, virtualMachine, apiVersion, vmwareViJavaAccess); // add the operating system if (virtualMachine.getGuest().getGuestFullName() != null) { RequisitionAsset requisitionAsset = new RequisitionAsset("operatingSystem", virtualMachine.getGuest().getGuestFullName()); node.putAsset(requisitionAsset); } // ...and add it to the requisition if (node != null && m_persistVMs) { m_requisition.insertNode(node); } } } } } /** * Checks whether an attribute/value is defined by a managed entity. * * <p>The old implementation allows the user to specify only one parameter.</p> * <p>The new implementation allows the user to use a regular expression for the value:</p> * <ul><li>key=location&value=~North.*</li></ul> * <p>As an alternative, now it is possible to specify several parameters on the query. * The rule is to add an underscore character ('_') before the patameter's name and use similar rules for the value:</p> * <ul><li>_location=~North.*</li></ul> * <p>With the new parameter specification, it is possible to pass several attributes. The managed entity must match * all of them to be accepted.</p> * <p>The new specification will take precedence over the old specification. If the new specification is not being used, * the old one will be processed. Otherwise, the new one will be processed, and the old one will be ignored. There is no * way to use both at the same time.</p> * * @param managedEntity the managed entity to check * @return true if present and value is equal, false otherwise * @throws RemoteException */ private boolean checkForAttribute(ManagedEntity managedEntity) throws RemoteException { Map<String,String> attribMap = getCustomAttributes(managedEntity); Set<String> keySet = new TreeSet<String>(); for (String k : m_args.keySet()) { if (k.startsWith("_")) { keySet.add(k); } } if (!keySet.isEmpty()) { boolean ok = true; for (String keyName : keySet) { String attribValue = attribMap.get(StringUtils.removeStart(keyName, "_")); if (attribValue == null) { ok = false; } else { String keyValue = m_args.get(keyName); if (keyValue.startsWith("~")) { ok = ok && attribValue.matches(StringUtils.removeStart(keyValue, "~")); } else { ok = ok && attribValue.equals(keyValue); } } } return ok; } String key = m_args.get("key"); String value = m_args.get("value"); // if key/value is not set, return true if (key == null && value == null) { return true; } // if only key or value is set, return false if (key == null || value == null) { return false; } // now search for the correct key/value pair String attribValue = attribMap.get(key); if (attribValue != null) { if (value.startsWith("~")) { return attribValue.matches(StringUtils.removeStart(value, "~")); } else { return attribValue.equals(value); } } return false; } /** * Gets the custom attributes. * * @param entity the entity * @return the custom attributes * @throws RemoteException the remote exception */ private Map<String,String> getCustomAttributes(ManagedEntity entity) throws RemoteException { final Map<String,String> attributes = new TreeMap<String,String>(); CustomFieldDef[] defs = entity.getAvailableField(); CustomFieldValue[] values = entity.getCustomValue(); for (int i = 0; defs != null && i < defs.length; i++) { String key = defs[i].getName(); int targetIndex = defs[i].getKey(); for (int j = 0; values != null && j < values.length; j++) { if (targetIndex == values[j].getKey()) { attributes.put(key, ((CustomFieldStringValue) values[j]).getValue()); } } } return attributes; } /** * {@inheritDoc} * <p/> * Creates a ByteArrayInputStream implementation of InputStream of the XML * marshaled version of the Requisition class. Calling close on this stream * is safe. */ @Override public InputStream getInputStream() throws IOException { InputStream stream = null; try { Requisition curReq = null; try { ForeignSourceRepository repository = BeanUtils.getBean("daoContext", "deployedForeignSourceRepository", ForeignSourceRepository.class); if (repository != null) { curReq = repository.getRequisition(m_foreignSource); } } catch (Exception e) { logger.warn("Can't retrieve requisition {}", m_foreignSource); } Requisition newReq = buildVMwareRequisition(); if (curReq == null) { if (newReq == null) { // FIXME Is this correct ? This is the old behavior newReq = new Requisition(m_foreignSource); } } else { if (newReq == null) { // If there is a requisition and the vCenter is not responding for some reason, it is better to use the old requisition, // instead of returning an empty one, which can cause the lost of all the nodes from the DB. newReq = curReq; } else { // If there is already a requisition, retrieve the custom assets and categories from the old one, and put them on the new one. // The VMWare related assets and categories will be preserved. for (RequisitionNode newNode : newReq.getNodes()) { for (RequisitionNode curNode : curReq.getNodes()) { if (newNode.getForeignId().equals(curNode.getForeignId())) { // Add existing custom assets for (RequisitionAsset asset : curNode.getAssets()) { if (!asset.getName().startsWith("vmware")) { newNode.putAsset(asset); } } // Add existing custom categories for (RequisitionCategory cat : curNode.getCategories()) { if (!cat.getName().startsWith("VMWare")) { newNode.putCategory(cat); } } // Add existing custom services /* * For each interface on the new requisition, * - Retrieve the list of custom services from the corresponding interface on the existing requisition, * matching the interface by the IP address * - If the list of services is not empty, add them to the new interface */ for (RequisitionInterface intf : curNode.getInterfaces()) { List<RequisitionMonitoredService> services = getManualyConfiguredServices(intf); if (!services.isEmpty()) { RequisitionInterface newIntf = getRequisitionInterface(newNode, intf.getIpAddr()); if (newIntf != null) { newIntf.getMonitoredServices().addAll(services); } } } } } } } } stream = new ByteArrayInputStream(jaxBMarshal(newReq).getBytes()); } catch (Throwable e) { logger.warn("Problem getting input stream: '{}'", e); throw new IOExceptionWithCause("Problem getting input stream: " + e, e); } return stream; } private RequisitionInterface getRequisitionInterface(RequisitionNode node, String ipAddr) { for (RequisitionInterface intf : node.getInterfaces()) { if (ipAddr.equals(intf.getIpAddr())) { return intf; } } return null; } private List<RequisitionMonitoredService> getManualyConfiguredServices(RequisitionInterface intf) { List<RequisitionMonitoredService> services = new ArrayList<RequisitionMonitoredService>(); for (RequisitionMonitoredService svc : intf.getMonitoredServices()) { boolean found = false; for (String svcName : m_hostSystemServices) { if (svcName.trim().equals(svc.getServiceName())) { found = true; continue; } } for (String svcName : m_virtualMachineServices) { if (svcName.trim().equals(svc.getServiceName())) { found = true; continue; } } if (!found) { services.add(svc); } } return services; } /** * Utility to marshal the Requisition class into XML. * * @param r the requisition object * @return a String of XML encoding the Requisition class * @throws javax.xml.bind.JAXBException */ private String jaxBMarshal(Requisition r) throws JAXBException { return JaxbUtils.marshal(r); } }
integrations/opennms-vmware/src/main/java/org/opennms/netmgt/provision/service/vmware/VmwareRequisitionUrlConnection.java
/******************************************************************************* * This file is part of OpenNMS(R). * * Copyright (C) 2012 The OpenNMS Group, Inc. * OpenNMS(R) is Copyright (C) 1999-2012 The OpenNMS Group, Inc. * * OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. * * OpenNMS(R) is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, * or (at your option) any later version. * * OpenNMS(R) is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with OpenNMS(R). If not, see: * http://www.gnu.org/licenses/ * * For more information contact: * OpenNMS(R) Licensing <[email protected]> * http://www.opennms.org/ * http://www.opennms.com/ *******************************************************************************/ package org.opennms.netmgt.provision.service.vmware; import com.vmware.vim25.*; import com.vmware.vim25.mo.*; import org.apache.commons.io.IOExceptionWithCause; import org.apache.commons.lang.StringUtils; import org.apache.http.conn.util.InetAddressUtils; import org.exolab.castor.xml.MarshalException; import org.exolab.castor.xml.ValidationException; import org.opennms.core.utils.BeanUtils; import org.opennms.core.utils.url.GenericURLConnection; import org.opennms.core.xml.JaxbUtils; import org.opennms.netmgt.model.PrimaryType; import org.opennms.netmgt.provision.persist.ForeignSourceRepository; import org.opennms.netmgt.provision.persist.requisition.*; import org.opennms.protocols.vmware.VmwareViJavaAccess; import org.sblim.wbem.cim.CIMException; import org.sblim.wbem.cim.CIMObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.xml.bind.JAXBException; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.*; import java.rmi.RemoteException; import java.util.*; /** * The Class VmwareRequisitionUrlConnection * * <p>This class is used for the automtic requisition of Vmware related entities.</p> * * @author Christian Pape <[email protected]> * @author Alejandro Galue <[email protected]> */ public class VmwareRequisitionUrlConnection extends GenericURLConnection { /** * the logger */ private Logger logger = LoggerFactory.getLogger(VmwareRequisitionUrlConnection.class); private static final String VMWARE_HOSTSYSTEM_SERVICES = "hostSystemServices"; private static final String VMWARE_VIRTUALMACHINE_SERVICES = "virtualMachineServices"; private String[] m_hostSystemServices; private String[] m_virtualMachineServices; private String m_hostname = null; private String m_username = null; private String m_password = null; private String m_foreignSource = null; private boolean m_importVMPoweredOn = true; private boolean m_importVMPoweredOff = false; private boolean m_importVMSuspended = false; private boolean m_importHostPoweredOn = true; private boolean m_importHostPoweredOff = false; private boolean m_importHostStandBy = false; private boolean m_importHostUnknown = false; private boolean m_persistIPv4 = true; private boolean m_persistIPv6 = true; private boolean m_persistVMs = true; private boolean m_persistHosts = true; /* * Host system managedObjectId to name mapping */ private Map<String, String> m_hostSystemMap = new HashMap<String, String>(); /** * the query arguments */ private Map<String, String> m_args = null; /** * requisition object */ private Requisition m_requisition = null; /** * Constructor for creating an instance of this class. * * @param url the URL to use * @throws MalformedURLException * @throws RemoteException */ public VmwareRequisitionUrlConnection(URL url) throws MalformedURLException, RemoteException { super(url); m_hostname = url.getHost(); m_username = getUsername(); m_password = getPassword(); m_args = getQueryArgs(); boolean importVMOnly = queryParameter("importVMOnly", false); boolean importHostOnly = queryParameter("importHostOnly", false); if (importHostOnly && importVMOnly) { throw new MalformedURLException("importHostOnly and importVMOnly can't be true simultaneously"); } if (importHostOnly) { m_persistVMs = false; } if (importVMOnly) { m_persistHosts = false; } boolean importIPv4Only = queryParameter("importIPv4Only", false); boolean importIPv6Only = queryParameter("importIPv6Only", false); if (importIPv4Only && importIPv6Only) { throw new MalformedURLException("importIPv4Only and importIPv6Only can't be true simultaneously"); } if (importIPv4Only) { m_persistIPv6 = false; } if (importIPv6Only) { m_persistIPv4 = false; } m_importVMPoweredOn = queryParameter("importVMPoweredOn", true); m_importVMPoweredOff = queryParameter("importVMPoweredOff", false); m_importVMSuspended = queryParameter("importVMSuspended", false); m_importHostPoweredOn = queryParameter("importHostPoweredOn", true); m_importHostPoweredOff = queryParameter("importHostPoweredOff", false); m_importHostStandBy = queryParameter("importHostStandBy", false); m_importHostUnknown = queryParameter("importHostUnknown", false); if (queryParameter("importHostAll", false)) { m_importHostPoweredOn = true; m_importHostPoweredOff = true; m_importHostStandBy = true; m_importHostUnknown = true; } if (queryParameter("importVMAll", false)) { m_importVMPoweredOff = true; m_importVMPoweredOn = true; m_importVMSuspended = true; } String path = url.getPath(); path = path.replaceAll("^/", ""); path = path.replaceAll("/$", ""); String pathElements[] = path.split("/"); if (pathElements.length == 1) { if ("".equals(pathElements[0])) { m_foreignSource = "vmware-" + m_hostname; } else { m_foreignSource = pathElements[0]; } } else { throw new MalformedURLException("Error processing path element of URL (vmware://username:password@host[/foreign-source]?keyA=valueA;keyB=valueB;...)"); } } /** * Returns a boolean representation for a given on/off parameter. * * @param key the parameter's name * @param defaultValue the default value to use * @return the boolean value */ private boolean queryParameter(String key, boolean defaultValue) { if (m_args.get(key) == null) { return defaultValue; } else { String value = m_args.get(key).toLowerCase(); return ("yes".equals(value) || "true".equals(value) || "on".equals(value) || "1".equals(value)); } } @Override public void connect() throws IOException { // To change body of implemented methods use File | Settings | File // Templates. } private boolean reachableCimService(VmwareViJavaAccess vmwareViJavaAccess, HostSystem hostSystem, String ipAddress) { if (!vmwareViJavaAccess.setTimeout(3000)) { logger.warn("Error setting connection timeout"); } List<CIMObject> cimObjects = null; try { cimObjects = vmwareViJavaAccess.queryCimObjects(hostSystem, "CIM_NumericSensor", ipAddress); } catch (ConnectException e) { return false; } catch (RemoteException e) { return false; } catch (CIMException e) { return false; } return cimObjects != null; } /** * Creates a requisition node for the given managed entity and type. * * @param ipAddresses the set of Ip addresses * @param managedEntity the managed entity * @return the generated requisition node */ private RequisitionNode createRequisitionNode(Set<String> ipAddresses, ManagedEntity managedEntity, int apiVersion, VmwareViJavaAccess vmwareViJavaAccess) { RequisitionNode requisitionNode = new RequisitionNode(); // Setting the node label requisitionNode.setNodeLabel(managedEntity.getName()); // Foreign Id consisting of managed entity Id requisitionNode.setForeignId(managedEntity.getMOR().getVal()); /* * Original version: * * Foreign Id consisting of VMware management server's hostname and managed entity id * * requisitionNode.setForeignId(m_hostname + "/" + managedEntity.getMOR().getVal()); */ if (managedEntity instanceof VirtualMachine) { boolean firstInterface = true; // add all given interfaces for (String ipAddress : ipAddresses) { try { if ((m_persistIPv4 && InetAddressUtils.isIPv4Address(ipAddress)) || (m_persistIPv6 && InetAddressUtils.isIPv6Address(ipAddress))) { InetAddress inetAddress = InetAddress.getByName(ipAddress); if (!inetAddress.isLoopbackAddress()) { RequisitionInterface requisitionInterface = new RequisitionInterface(); requisitionInterface.setIpAddr(ipAddress); // the first one will be primary if (firstInterface) { requisitionInterface.setSnmpPrimary(PrimaryType.PRIMARY); for (String service : m_virtualMachineServices) { requisitionInterface.insertMonitoredService(new RequisitionMonitoredService(service.trim())); } firstInterface = false; } else { requisitionInterface.setSnmpPrimary(PrimaryType.SECONDARY); } requisitionInterface.setManaged(Boolean.TRUE); requisitionInterface.setStatus(Integer.valueOf(1)); requisitionNode.putInterface(requisitionInterface); } } } catch (UnknownHostException unknownHostException) { logger.warn("Invalid IP address '{}'", unknownHostException.getMessage()); } } } else { if (managedEntity instanceof HostSystem) { boolean reachableInterfaceFound = false, firstInterface = true; List<RequisitionInterface> requisitionInterfaceList = new ArrayList<RequisitionInterface>(); RequisitionInterface primaryInterfaceCandidate = null; // add all given interfaces for (String ipAddress : ipAddresses) { try { if ((m_persistIPv4 && InetAddressUtils.isIPv4Address(ipAddress)) || (m_persistIPv6 && InetAddressUtils.isIPv6Address(ipAddress))) { InetAddress inetAddress = InetAddress.getByName(ipAddress); if (!inetAddress.isLoopbackAddress()) { RequisitionInterface requisitionInterface = new RequisitionInterface(); requisitionInterface.setIpAddr(ipAddress); if (firstInterface) { primaryInterfaceCandidate = requisitionInterface; firstInterface = false; } if (!reachableInterfaceFound && reachableCimService(vmwareViJavaAccess, (HostSystem) managedEntity, ipAddress)) { primaryInterfaceCandidate = requisitionInterface; reachableInterfaceFound = true; } requisitionInterface.setManaged(Boolean.TRUE); requisitionInterface.setStatus(Integer.valueOf(1)); requisitionInterface.setSnmpPrimary(PrimaryType.SECONDARY); requisitionInterfaceList.add(requisitionInterface); } } } catch (UnknownHostException unknownHostException) { logger.warn("Invalid IP address '{}'", unknownHostException.getMessage()); } } if (primaryInterfaceCandidate != null) { if (reachableInterfaceFound) { logger.warn("Found reachable primary interface '{}'", primaryInterfaceCandidate.getIpAddr()); } else { logger.warn("Only non-reachable interfaces found, using first one for primary interface '{}'", primaryInterfaceCandidate.getIpAddr()); } primaryInterfaceCandidate.setSnmpPrimary(PrimaryType.PRIMARY); for (String service : m_hostSystemServices) { if (reachableInterfaceFound || !"VMwareCim-HostSystem".equals(service)) { primaryInterfaceCandidate.insertMonitoredService(new RequisitionMonitoredService(service.trim())); } } } else { logger.warn("No primary interface found"); } for (RequisitionInterface requisitionInterface : requisitionInterfaceList) { requisitionNode.putInterface(requisitionInterface); } } else { logger.error("Undefined type of managedEntity '{}'", managedEntity.getMOR().getType()); return null; } } /* * For now we use displaycategory, notifycategory and pollercategory for storing * the vcenter Ip address, the username and the password */ String powerState = "unknown"; StringBuffer vmwareTopologyInfo = new StringBuffer(); // putting parents to topology information ManagedEntity parentEntity = managedEntity.getParent(); // TODO: Is this the best algorithm to build the topology info ? // TODO: How to deal with a big list of networks on the ESX Hosts ? do { if (vmwareTopologyInfo.length() > 0) { vmwareTopologyInfo.append(", "); } try { if (parentEntity != null && parentEntity.getMOR() != null) { vmwareTopologyInfo.append(parentEntity.getMOR().getVal() + "/" + URLEncoder.encode(parentEntity.getName(), "UTF-8")); } else { logger.warn("Can't add topologyInformation because either the parentEntity or the MOR is null for " + managedEntity.getName()); } } catch (UnsupportedEncodingException e) { logger.warn("Unsupported encoding '{}'", e.getMessage()); } parentEntity = parentEntity == null ? null : parentEntity.getParent(); } while (parentEntity != null); if (managedEntity instanceof HostSystem) { HostSystem hostSystem = (HostSystem) managedEntity; HostRuntimeInfo hostRuntimeInfo = hostSystem.getRuntime(); if (hostRuntimeInfo == null) { logger.debug("hostRuntimeInfo=null"); } else { HostSystemPowerState hostSystemPowerState = hostRuntimeInfo.getPowerState(); if (hostSystemPowerState == null) { logger.debug("hostSystemPowerState=null"); } else { powerState = hostSystemPowerState.toString(); } } try { for (Datastore datastore : hostSystem.getDatastores()) { if (vmwareTopologyInfo.length() > 0) { vmwareTopologyInfo.append(", "); } try { vmwareTopologyInfo.append(datastore.getMOR().getVal() + "/" + URLEncoder.encode(datastore.getSummary().getName(), "UTF-8")); } catch (UnsupportedEncodingException e) { logger.warn("Unsupported encoding '{}'", e.getMessage()); } } } catch (RemoteException e) { logger.warn("Cannot retrieve datastores for managedEntity '{}': '{}'", managedEntity.getMOR().getVal(), e.getMessage()); } try { for (Network network : hostSystem.getNetworks()) { if (vmwareTopologyInfo.length() > 0) { vmwareTopologyInfo.append(", "); } try { vmwareTopologyInfo.append(network.getMOR().getVal() + "/" + URLEncoder.encode(network.getSummary().getName(), "UTF-8")); } catch (UnsupportedEncodingException e) { logger.warn("Unsupported encoding '{}'", e.getMessage()); } } } catch (RemoteException e) { logger.warn("Cannot retrieve networks for managedEntity '{}': '{}'", managedEntity.getMOR().getVal(), e.getMessage()); } } else { if (managedEntity instanceof VirtualMachine) { VirtualMachine virtualMachine = (VirtualMachine) managedEntity; VirtualMachineRuntimeInfo virtualMachineRuntimeInfo = virtualMachine.getRuntime(); if (virtualMachineRuntimeInfo == null) { logger.debug("virtualMachineRuntimeInfo=null"); } else { VirtualMachinePowerState virtualMachinePowerState = virtualMachineRuntimeInfo.getPowerState(); if (virtualMachinePowerState == null) { logger.debug("virtualMachinePowerState=null"); } else { powerState = virtualMachinePowerState.toString(); } } try { for (Datastore datastore : virtualMachine.getDatastores()) { if (vmwareTopologyInfo.length() > 0) { vmwareTopologyInfo.append(", "); } try { vmwareTopologyInfo.append(datastore.getMOR().getVal() + "/" + URLEncoder.encode(datastore.getSummary().getName(), "UTF-8")); } catch (UnsupportedEncodingException e) { logger.warn("Unsupported encoding '{}'", e.getMessage()); } } } catch (RemoteException e) { logger.warn("Cannot retrieve datastores for managedEntity '{}': '{}'", managedEntity.getMOR().getVal(), e.getMessage()); } try { for (Network network : virtualMachine.getNetworks()) { if (vmwareTopologyInfo.length() > 0) { vmwareTopologyInfo.append(", "); } try { vmwareTopologyInfo.append(network.getMOR().getVal() + "/" + URLEncoder.encode(network.getSummary().getName(), "UTF-8")); } catch (UnsupportedEncodingException e) { logger.warn("Unsupported encoding '{}'", e.getMessage()); } } } catch (RemoteException e) { logger.warn("Cannot retrieve networks for managedEntity '{}': '{}'", managedEntity.getMOR().getVal(), e.getMessage()); } if (vmwareTopologyInfo.length() > 0) { vmwareTopologyInfo.append(", "); } try { vmwareTopologyInfo.append(virtualMachine.getRuntime().getHost().getVal() + "/" + URLEncoder.encode(m_hostSystemMap.get(virtualMachine.getRuntime().getHost().getVal()), "UTF-8")); } catch (UnsupportedEncodingException e) { logger.warn("Unsupported encoding '{}'", e.getMessage()); } } else { logger.error("Undefined type of managedEntity '{}'", managedEntity.getMOR().getType()); return null; } } RequisitionAsset requisitionAssetHostname = new RequisitionAsset("vmwareManagementServer", m_hostname); requisitionNode.putAsset(requisitionAssetHostname); RequisitionAsset requisitionAssetType = new RequisitionAsset("vmwareManagedEntityType", (managedEntity instanceof HostSystem ? "HostSystem" : "VirtualMachine")); requisitionNode.putAsset(requisitionAssetType); RequisitionAsset requisitionAssetId = new RequisitionAsset("vmwareManagedObjectId", managedEntity.getMOR().getVal()); requisitionNode.putAsset(requisitionAssetId); RequisitionAsset requisitionAssetTopologyInfo = new RequisitionAsset("vmwareTopologyInfo", vmwareTopologyInfo.toString()); requisitionNode.putAsset(requisitionAssetTopologyInfo); RequisitionAsset requisitionAssetState = new RequisitionAsset("vmwareState", powerState); requisitionNode.putAsset(requisitionAssetState); requisitionNode.putCategory(new RequisitionCategory("VMware" + apiVersion)); return requisitionNode; } /** * Builds the complete requisition object. * * @return the requisition object */ private Requisition buildVMwareRequisition() { VmwareViJavaAccess vmwareViJavaAccess = null; // for now, set the foreign source to the specified vcenter host m_requisition = new Requisition(m_foreignSource); if ((m_username == null || "".equals(m_username)) || (m_password == null || "".equals(m_password))) { try { vmwareViJavaAccess = new VmwareViJavaAccess(m_hostname); } catch (MarshalException e) { logger.warn("Error initialising VMware connection to '{}': '{}'", m_hostname, e.getMessage()); return null; } catch (ValidationException e) { logger.warn("Error initialising VMware connection to '{}': '{}'", m_hostname, e.getMessage()); return null; } catch (IOException e) { logger.warn("Error initialising VMware connection to '{}': '{}'", m_hostname, e.getMessage()); return null; } } else { vmwareViJavaAccess = new VmwareViJavaAccess(m_hostname, m_username, m_password); } try { vmwareViJavaAccess.connect(); } catch (MalformedURLException e) { logger.warn("Error connecting VMware management server '{}': '{}'", m_hostname, e.getMessage()); return null; } catch (RemoteException e) { logger.warn("Error connecting VMware management server '{}': '{}'", m_hostname, e.getMessage()); return null; } try { int apiVersion = vmwareViJavaAccess.getMajorApiVersion(); // get services to be added to host systems // m_hostSystemServices = getHostSystemServices(apiVersion); if (m_args != null && m_args.get(VMWARE_HOSTSYSTEM_SERVICES) != null) { m_hostSystemServices = m_args.get(VMWARE_HOSTSYSTEM_SERVICES).split(","); } else { m_hostSystemServices = new String[]{"VMware-ManagedEntity", "VMware-HostSystem", "VMwareCim-HostSystem"}; } // get services to be added to virtual machines // m_virtualMachineServices = getVirtualMachineServices(apiVersion); if (m_args != null && m_args.get(VMWARE_VIRTUALMACHINE_SERVICES) != null) { m_virtualMachineServices = m_args.get(VMWARE_VIRTUALMACHINE_SERVICES).split(","); } else { m_virtualMachineServices = new String[]{"VMware-ManagedEntity", "VMware-VirtualMachine"}; } iterateHostSystems(vmwareViJavaAccess, apiVersion); iterateVirtualMachines(vmwareViJavaAccess, apiVersion); } catch (RemoteException e) { logger.warn("Error retrieving managed objects from VMware management server '{}': '{}'", m_hostname, e.getMessage()); return null; } finally { vmwareViJavaAccess.disconnect(); } return m_requisition; } /** * Checks whether the host system should be imported into the requisition. * * @param hostSystem the system to check * @return true for import, false otherwise */ private boolean checkHostPowerState(HostSystem hostSystem) { String powerState = hostSystem.getRuntime().getPowerState().toString(); if ("poweredOn".equals(powerState) && m_importHostPoweredOn) { return true; } if ("poweredOff".equals(powerState) && m_importHostPoweredOff) { return true; } if ("standBy".equals(powerState) && m_importHostStandBy) { return true; } if ("unknown".equals(powerState) && m_importHostUnknown) { return true; } return false; } /** * Checks whether the virtual machine should be imported into the requisition. * * @param virtualMachine the system to check * @return true for import, false otherwise */ private boolean checkVMPowerState(VirtualMachine virtualMachine) { String powerState = virtualMachine.getRuntime().getPowerState().toString(); if ("poweredOn".equals(powerState) && m_importVMPoweredOn) { return true; } if ("poweredOff".equals(powerState) && m_importVMPoweredOff) { return true; } if ("suspended".equals(powerState) && m_importVMSuspended) { return true; } return false; } /** * Iterates through the host systems and adds them to the requisition object. * * @param vmwareViJavaAccess the access/connection to use * @throws RemoteException */ private void iterateHostSystems(VmwareViJavaAccess vmwareViJavaAccess, int apiVersion) throws RemoteException { ManagedEntity[] hostSystems; // search for host systems (esx hosts) hostSystems = vmwareViJavaAccess.searchManagedEntities("HostSystem"); if (hostSystems != null) { for (ManagedEntity managedEntity : hostSystems) { HostSystem hostSystem = (HostSystem) managedEntity; m_hostSystemMap.put(hostSystem.getMOR().getVal(), hostSystem.getName()); // check for correct key/value-pair if (checkHostPowerState(hostSystem) && checkForAttribute(hostSystem)) { logger.debug("Adding Host System '{}'", hostSystem.getName()); // iterate over all service console networks and add interface Ip addresses TreeSet<String> ipAddresses = vmwareViJavaAccess.getHostSystemIpAddresses(hostSystem); // create the new node... RequisitionNode node = createRequisitionNode(ipAddresses, hostSystem, apiVersion, vmwareViJavaAccess); // ...and add it to the requisition if (node != null && m_persistHosts) { m_requisition.insertNode(node); } } } } } /** * Iterates through the virtual machines and adds them to the requisition object. * * @param vmwareViJavaAccess the access/connection to use * @throws RemoteException */ private void iterateVirtualMachines(VmwareViJavaAccess vmwareViJavaAccess, int apiVersion) throws RemoteException { ManagedEntity[] virtualMachines; // search for all virtual machines virtualMachines = vmwareViJavaAccess.searchManagedEntities("VirtualMachine"); if (virtualMachines != null) { // check for correct key/value-pair for (ManagedEntity managedEntity : virtualMachines) { VirtualMachine virtualMachine = (VirtualMachine) managedEntity; // import only when the specified attributes is set if (checkVMPowerState(virtualMachine) && checkForAttribute(virtualMachine)) { logger.debug("Adding Virtual Machine '{}'", virtualMachine.getName()); LinkedHashSet<String> ipAddresses = new LinkedHashSet<String>(); // add the Ip address reported by VMware tools, this should be primary ipAddresses.add(virtualMachine.getGuest().getIpAddress()); // if possible, iterate over all virtual networks networks and add interface Ip addresses if (virtualMachine.getGuest().getNet() != null) { for (GuestNicInfo guestNicInfo : virtualMachine.getGuest().getNet()) { if (guestNicInfo.getIpAddress() != null) { for (String ipAddress : guestNicInfo.getIpAddress()) { ipAddresses.add(ipAddress); } } } } // create the new node... RequisitionNode node = createRequisitionNode(ipAddresses, virtualMachine, apiVersion, vmwareViJavaAccess); // add the operating system if (virtualMachine.getGuest().getGuestFullName() != null) { RequisitionAsset requisitionAsset = new RequisitionAsset("operatingSystem", virtualMachine.getGuest().getGuestFullName()); node.putAsset(requisitionAsset); } // ...and add it to the requisition if (node != null && m_persistVMs) { m_requisition.insertNode(node); } } } } } /** * Checks whether an attribute/value is defined by a managed entity. * * <p>The old implementation allows the user to specify only one parameter.</p> * <p>The new implementation allows the user to use a regular expression for the value:</p> * <ul><li>key=location&value=~North.*</li></ul> * <p>As an alternative, now it is possible to specify several parameters on the query. * The rule is to add an underscore character ('_') before the patameter's name and use similar rules for the value:</p> * <ul><li>_location=~North.*</li></ul> * <p>With the new parameter specification, it is possible to pass several attributes. The managed entity must match * all of them to be accepted.</p> * <p>The new specification will take precedence over the old specification. If the new specification is not being used, * the old one will be processed. Otherwise, the new one will be processed, and the old one will be ignored. There is no * way to use both at the same time.</p> * * @param managedEntity the managed entity to check * @return true if present and value is equal, false otherwise * @throws RemoteException */ private boolean checkForAttribute(ManagedEntity managedEntity) throws RemoteException { Map<String,String> attribMap = getCustomAttributes(managedEntity); Set<String> keySet = new TreeSet<String>(); for (String k : m_args.keySet()) { if (k.startsWith("_")) { keySet.add(k); } } if (!keySet.isEmpty()) { boolean ok = true; for (String keyName : keySet) { String attribValue = attribMap.get(StringUtils.removeStart(keyName, "_")); if (attribValue == null) { ok = false; } else { String keyValue = m_args.get(keyName); if (keyValue.startsWith("~")) { ok = ok && attribValue.matches(StringUtils.removeStart(keyValue, "~")); } else { ok = ok && attribValue.equals(keyValue); } } } return ok; } String key = m_args.get("key"); String value = m_args.get("value"); // if key/value is not set, return true if (key == null && value == null) { return true; } // if only key or value is set, return false if (key == null || value == null) { return false; } // now search for the correct key/value pair String attribValue = attribMap.get(key); if (attribValue != null) { if (value.startsWith("~")) { return attribValue.matches(StringUtils.removeStart(value, "~")); } else { return attribValue.equals(value); } } return false; } /** * Gets the custom attributes. * * @param entity the entity * @return the custom attributes * @throws RemoteException the remote exception */ private Map<String,String> getCustomAttributes(ManagedEntity entity) throws RemoteException { final Map<String,String> attributes = new TreeMap<String,String>(); CustomFieldDef[] defs = entity.getAvailableField(); CustomFieldValue[] values = entity.getCustomValue(); for (int i = 0; defs != null && i < defs.length; i++) { String key = defs[i].getName(); int targetIndex = defs[i].getKey(); for (int j = 0; values != null && j < values.length; j++) { if (targetIndex == values[j].getKey()) { attributes.put(key, ((CustomFieldStringValue) values[j]).getValue()); } } } return attributes; } /** * {@inheritDoc} * <p/> * Creates a ByteArrayInputStream implementation of InputStream of the XML * marshaled version of the Requisition class. Calling close on this stream * is safe. */ @Override public InputStream getInputStream() throws IOException { InputStream stream = null; try { Requisition curReq = null; try { ForeignSourceRepository repository = BeanUtils.getBean("daoContext", "deployedForeignSourceRepository", ForeignSourceRepository.class); if (repository != null) { curReq = repository.getRequisition(m_foreignSource); } } catch (Exception e) { logger.warn("Can't retrieve requisition {}", m_foreignSource); } Requisition newReq = buildVMwareRequisition(); if (curReq == null) { if (newReq == null) { // FIXME Is this correct ? This is the old behavior newReq = new Requisition(m_foreignSource); } } else { if (newReq == null) { // If there is a requisition and the vCenter is not responding for some reason, it is better to use the old requisition, // instead of returning an empty one, which can cause the lost of all the nodes from the DB. newReq = curReq; } else { // If there is already a requisition, retrieve the custom assets and categories from the old one, and put them on the new one. // The VMWare related assets and categories will be preserved. for (RequisitionNode newNode : newReq.getNodes()) { for (RequisitionNode curNode : curReq.getNodes()) { if (newNode.getForeignId().equals(curNode.getForeignId())) { for (RequisitionAsset asset : curNode.getAssets()) { if (!asset.getName().startsWith("vmware")) { newNode.putAsset(asset); } } for (RequisitionCategory cat : curNode.getCategories()) { if (!cat.getName().startsWith("VMWare")) { newNode.putCategory(cat); } } } } } } } stream = new ByteArrayInputStream(jaxBMarshal(newReq).getBytes()); } catch (Throwable e) { logger.warn("Problem getting input stream: '{}'", e); throw new IOExceptionWithCause("Problem getting input stream: " + e, e); } return stream; } /** * Utility to marshal the Requisition class into XML. * * @param r the requisition object * @return a String of XML encoding the Requisition class * @throws javax.xml.bind.JAXBException */ private String jaxBMarshal(Requisition r) throws JAXBException { return JaxbUtils.marshal(r); } }
Fix for NMS-6139 - merge existing services on interfaces (VMWare)
integrations/opennms-vmware/src/main/java/org/opennms/netmgt/provision/service/vmware/VmwareRequisitionUrlConnection.java
Fix for NMS-6139 - merge existing services on interfaces (VMWare)
Java
lgpl-2.1
be08b029367f13b64ce809c2d50ddbe363c4a51a
0
jfdenise/wildfly-core,darranl/wildfly-core,darranl/wildfly-core,aloubyansky/wildfly-core,soul2zimate/wildfly-core,jfdenise/wildfly-core,darranl/wildfly-core,yersan/wildfly-core,JiriOndrusek/wildfly-core,JiriOndrusek/wildfly-core,luck3y/wildfly-core,JiriOndrusek/wildfly-core,bstansberry/wildfly-core,jamezp/wildfly-core,jamezp/wildfly-core,ivassile/wildfly-core,aloubyansky/wildfly-core,luck3y/wildfly-core,yersan/wildfly-core,yersan/wildfly-core,bstansberry/wildfly-core,luck3y/wildfly-core,jfdenise/wildfly-core,bstansberry/wildfly-core,jamezp/wildfly-core,soul2zimate/wildfly-core,ivassile/wildfly-core,soul2zimate/wildfly-core,ivassile/wildfly-core,aloubyansky/wildfly-core
/* * JBoss, Home of Professional Open Source. * Copyright 2010, Red Hat, Inc., and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.as.server.mgmt; import java.net.InetSocketAddress; import java.util.concurrent.ExecutorService; import org.jboss.as.controller.ModelController; import org.jboss.as.domain.http.server.ManagementHttpServer; import org.jboss.as.domain.management.security.SecurityRealmService; import org.jboss.as.server.services.net.NetworkInterfaceBinding; import org.jboss.msc.inject.Injector; import org.jboss.msc.service.Service; import org.jboss.msc.service.ServiceName; import org.jboss.msc.service.StartContext; import org.jboss.msc.service.StartException; import org.jboss.msc.service.StopContext; import org.jboss.msc.value.InjectedValue; /** * A service which launches the domain HTTP API and serverManagement. * * @author Jason T. Greene */ public class HttpManagementService implements Service<HttpManagementService> { public static final ServiceName SERVICE_NAME = ServiceName.JBOSS.append("serverManagement", "controller", "management", "http"); private final InjectedValue<ModelController> modelControllerValue = new InjectedValue<ModelController>(); private final InjectedValue<NetworkInterfaceBinding> interfaceBindingValue = new InjectedValue<NetworkInterfaceBinding>(); private final InjectedValue<Integer> portValue = new InjectedValue<Integer>(); private final InjectedValue<Integer> securePortValue = new InjectedValue<Integer>(); private final InjectedValue<ExecutorService> executorServiceValue = new InjectedValue<ExecutorService>(); private final InjectedValue<String> tempDirValue = new InjectedValue<String>(); private final InjectedValue<SecurityRealmService> securityRealmServiceValue = new InjectedValue<SecurityRealmService>(); private InetSocketAddress bindAddress; private InetSocketAddress secureBindAddress; private ManagementHttpServer serverManagement; /** * Starts the service. * * @param context The start context * @throws StartException If any errors occur */ public synchronized void start(StartContext context) throws StartException { final ModelController modelController = modelControllerValue.getValue(); final ExecutorService executorService = executorServiceValue.getValue(); final NetworkInterfaceBinding interfaceBinding = interfaceBindingValue.getValue(); final int port = portValue.getOptionalValue(); if (port > 0) { bindAddress = new InetSocketAddress(interfaceBinding.getAddress(), port); } final int securePort = securePortValue.getOptionalValue(); if (securePort > 0) { secureBindAddress = new InetSocketAddress(interfaceBinding.getAddress(), securePort); } final SecurityRealmService securityRealmService = securityRealmServiceValue.getOptionalValue(); try { serverManagement = ManagementHttpServer.create(bindAddress, secureBindAddress, 50, modelController, executorService, securityRealmService); serverManagement.start(); } catch (Exception e) { throw new StartException("Failed to start serverManagement socket", e); } } /** * Stops the service. * * @param context The stop context */ public synchronized void stop(StopContext context) { if (serverManagement != null) { serverManagement.stop(); } } /** * {@inheritDoc} */ public HttpManagementService getValue() throws IllegalStateException { return this; } /** * Get the interface binding injector. * * @return The injector */ public Injector<NetworkInterfaceBinding> getInterfaceInjector() { return interfaceBindingValue; } /** * Get the executor service injector. * * @return The injector */ public Injector<ExecutorService> getExecutorServiceInjector() { return executorServiceValue; } /** * Get the management port injector. * * @return The injector */ public Injector<Integer> getPortInjector() { return portValue; } /** * Get the management secure port injector. * * @return The injector */ public Injector<Integer> getSecurePortInjector() { return securePortValue; } /** * Get the model controller injector to dispatch management requests to * * @return the injector */ public Injector<ModelController> getModelControllerInjector() { return modelControllerValue; } /** * Get the temp dir injector. * * @return the tempDirValue */ public InjectedValue<String> getTempDirInjector() { return tempDirValue; } /** * Get the security realm injector. * * @return the securityRealmServiceValue */ public InjectedValue<SecurityRealmService> getSecurityRealmInjector() { return securityRealmServiceValue; } public InetSocketAddress getBindAddress() { return bindAddress; } public InetSocketAddress getSecureBindAddress() { return secureBindAddress; } }
server/src/main/java/org/jboss/as/server/mgmt/HttpManagementService.java
/* * JBoss, Home of Professional Open Source. * Copyright 2010, Red Hat, Inc., and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.as.server.mgmt; import java.net.InetSocketAddress; import java.util.concurrent.ExecutorService; import org.jboss.as.controller.ModelController; import org.jboss.as.domain.http.server.ManagementHttpServer; import org.jboss.as.domain.management.security.SecurityRealmService; import org.jboss.as.server.services.net.NetworkInterfaceBinding; import org.jboss.msc.inject.Injector; import org.jboss.msc.service.Service; import org.jboss.msc.service.ServiceName; import org.jboss.msc.service.StartContext; import org.jboss.msc.service.StartException; import org.jboss.msc.service.StopContext; import org.jboss.msc.value.InjectedValue; /** * A service which launches the domain HTTP API and serverManagement. * * @author Jason T. Greene */ public class HttpManagementService implements Service<HttpManagementService> { public static final ServiceName SERVICE_NAME = ServiceName.JBOSS.append("serverManagement", "controller", "management", "http"); private final InjectedValue<ModelController> modelControllerValue = new InjectedValue<ModelController>(); private final InjectedValue<NetworkInterfaceBinding> interfaceBindingValue = new InjectedValue<NetworkInterfaceBinding>(); private final InjectedValue<Integer> portValue = new InjectedValue<Integer>(); private final InjectedValue<Integer> securePortValue = new InjectedValue<Integer>(); private final InjectedValue<ExecutorService> executorServiceValue = new InjectedValue<ExecutorService>(); private final InjectedValue<String> tempDirValue = new InjectedValue<String>(); private final InjectedValue<SecurityRealmService> securityRealmServiceValue = new InjectedValue<SecurityRealmService>(); private ManagementHttpServer serverManagement; /** * Starts the service. * * @param context The start context * @throws StartException If any errors occur */ public synchronized void start(StartContext context) throws StartException { final ModelController modelController = modelControllerValue.getValue(); final ExecutorService executorService = executorServiceValue.getValue(); final NetworkInterfaceBinding interfaceBinding = interfaceBindingValue.getValue(); final int port = portValue.getOptionalValue(); InetSocketAddress bindAddress = null; if (port > 0) { bindAddress = new InetSocketAddress(interfaceBinding.getAddress(), port); } final int securePort = securePortValue.getOptionalValue(); InetSocketAddress secureBindAddress = null; if (securePort > 0) { secureBindAddress = new InetSocketAddress(interfaceBinding.getAddress(), securePort); } final SecurityRealmService securityRealmService = securityRealmServiceValue.getOptionalValue(); try { serverManagement = ManagementHttpServer.create(bindAddress, secureBindAddress, 50, modelController, executorService, securityRealmService); serverManagement.start(); } catch (Exception e) { throw new StartException("Failed to start serverManagement socket", e); } } /** * Stops the service. * * @param context The stop context */ public synchronized void stop(StopContext context) { if (serverManagement != null) { serverManagement.stop(); } } /** * {@inheritDoc} */ public HttpManagementService getValue() throws IllegalStateException { return this; } /** * Get the interface binding injector. * * @return The injector */ public Injector<NetworkInterfaceBinding> getInterfaceInjector() { return interfaceBindingValue; } /** * Get the executor service injector. * * @return The injector */ public Injector<ExecutorService> getExecutorServiceInjector() { return executorServiceValue; } /** * Get the management port injector. * * @return The injector */ public Injector<Integer> getPortInjector() { return portValue; } /** * Get the management secure port injector. * * @return The injector */ public Injector<Integer> getSecurePortInjector() { return securePortValue; } /** * Get the model controller injector to dispatch management requests to * * @return the injector */ public Injector<ModelController> getModelControllerInjector() { return modelControllerValue; } /** * Get the temp dir injector. * * @return the tempDirValue */ public InjectedValue<String> getTempDirInjector() { return tempDirValue; } /** * Get the security realm injector. * * @return the securityRealmServiceValue */ public InjectedValue<SecurityRealmService> getSecurityRealmInjector() { return securityRealmServiceValue; } }
AS7-997, simplify welcome page and add redirect to admin console was: 7d62c78a6627affe67c7760e3605b7973b363108
server/src/main/java/org/jboss/as/server/mgmt/HttpManagementService.java
AS7-997, simplify welcome page and add redirect to admin console
Java
apache-2.0
4a8047b31c80042b26a12387bdc782da7ea089ef
0
mikosik/smooth-build,mikosik/smooth-build
package org.smoothbuild.acceptance.lang; import static com.google.common.truth.Truth.assertThat; import static java.util.regex.Pattern.DOTALL; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.junit.jupiter.api.Test; import org.smoothbuild.acceptance.AcceptanceTestCase; import org.smoothbuild.acceptance.testing.AddElementOfWrongTypeToArray; import org.smoothbuild.acceptance.testing.BrokenIdentity; import org.smoothbuild.acceptance.testing.DifferentJavaName; import org.smoothbuild.acceptance.testing.EmptyStringArray; import org.smoothbuild.acceptance.testing.FileParameter; import org.smoothbuild.acceptance.testing.IllegalName; import org.smoothbuild.acceptance.testing.NonPublicMethod; import org.smoothbuild.acceptance.testing.NonStaticMethod; import org.smoothbuild.acceptance.testing.OneStringParameter; import org.smoothbuild.acceptance.testing.ReportError; import org.smoothbuild.acceptance.testing.ReportTwoErrors; import org.smoothbuild.acceptance.testing.ReportWarningAndReturnNull; import org.smoothbuild.acceptance.testing.ReturnNull; import org.smoothbuild.acceptance.testing.SameName; import org.smoothbuild.acceptance.testing.SameName2; import org.smoothbuild.acceptance.testing.ThrowException; import org.smoothbuild.acceptance.testing.ThrowRandomException; import org.smoothbuild.acceptance.testing.WithoutContainer; import org.smoothbuild.lang.object.base.Array; import org.smoothbuild.lang.object.base.Blob; import org.smoothbuild.lang.object.base.SString; import org.smoothbuild.lang.object.base.Struct; import org.smoothbuild.lang.plugin.NativeApi; public class NativeFunctionTest extends AcceptanceTestCase { @Test public void native_can_return_passed_argument() throws Exception { givenNativeJar(OneStringParameter.class); givenScript( " String oneStringParameter(String string); ", " result = oneStringParameter('token'); "); whenSmoothBuild("result"); thenFinishedWithSuccess(); assertThat(artifactContent("result")) .isEqualTo("token"); } @Test public void native_declaration_without_native_implementation_causes_error() throws Exception { givenNativeJar(OneStringParameter.class); givenScript( " String function; ", " result = function; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Function 'function' is native but does not have native implementation.\n"); } @Test public void native_jar_with_two_functions_with_same_name_causes_error() throws Exception { givenNativeJar(SameName.class, SameName2.class); givenScript( " result = 'abc'; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains(invalidFunctionProvidedBy(SameName2.class) + ".sameName: " + "Function with the same name is also provided by " + SameName.class.getCanonicalName() + ".sameName.\n"); } @Test public void native_with_illegal_name_causes_error() throws Exception { givenNativeJar(IllegalName.class); givenScript( " result = 'abc'; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains(invalidFunctionProvidedBy(IllegalName.class) + ".illegalName$: Name 'illegalName$' is illegal.\n"); } @Test public void native_name_is_taken_from_annotation_not_java_method_name() throws Exception { givenNativeJar(DifferentJavaName.class); givenScript( " String annotationName(); ", " result = annotationName(); "); whenSmoothBuild("result"); thenFinishedWithSuccess(); assertThat(artifactContent("result")) .isEqualTo("abc"); } @Test public void native_provided_by_non_public_method_causes_error() throws Exception { givenNativeJar(NonPublicMethod.class); givenScript( " String oneStringParameter; ", " result = oneStringParameter; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains(invalidFunctionProvidedBy(NonPublicMethod.class) + ".function: Providing method must be public.\n"); } @Test public void native_provided_by_non_static_method_causes_error() throws Exception { givenNativeJar(NonStaticMethod.class); givenScript( " result = 'abc'; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains(invalidFunctionProvidedBy(NonStaticMethod.class) + ".function: Providing method must be static.\n"); } @Test public void native_without_declared_result_type_causes_error() throws Exception { givenNativeJar(OneStringParameter.class); givenScript( " oneStringParameter; ", " result = oneStringParameter; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains( "Function 'oneStringParameter' is native so should have declared result type.\n"); } @Test public void native_with_different_result_type_causes_error() throws Exception { givenNativeJar(OneStringParameter.class); givenScript( " File oneStringParameter(String string); ", " result = oneStringParameter('abc'); "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Function 'oneStringParameter' has result type 'File' " + "so its native implementation result type must be " + Struct.class.getCanonicalName() + " but it is " + SString.class.getCanonicalName() + ".\n"); } @Test public void native_without_container_parameter_causes_error() throws Exception { givenNativeJar(WithoutContainer.class); givenScript( " result = 'abc'; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains(invalidFunctionProvidedBy(WithoutContainer.class) + ".function: Providing method should have first parameter of type " + NativeApi.class.getCanonicalName() + ".\n"); } @Test public void native_with_too_many_parameters_causes_error() throws Exception { givenNativeJar(OneStringParameter.class); givenScript( " String oneStringParameter; ", " result = oneStringParameter; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains( "Function 'oneStringParameter' has 0 parameter(s) but its native implementation " + "has 1 parameter(s).\n"); } @Test public void native_with_too_few_parameters_causes_error() throws Exception { givenNativeJar(OneStringParameter.class); givenScript( " String oneStringParameter(String a, String b); ", " result = oneStringParameter(a='abc', b='abc'); "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains( "Function 'oneStringParameter' has 2 parameter(s) but its native implementation " + "has 1 parameter(s).\n"); } @Test public void native_with_different_parameter_type_causes_error() throws Exception { givenNativeJar(OneStringParameter.class); givenScript( " String oneStringParameter([String] string); ", " result = oneStringParameter([]); "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Function 'oneStringParameter' parameter 'string' has type [String] " + "so its native implementation type must be " + Array.class.getCanonicalName() + " but it is " + SString.class.getCanonicalName() + ".\n"); } @Test public void native_with_parameter_type_that_is_subtype_of_declared_causes_error() throws Exception { givenNativeJar(FileParameter.class); givenScript( " File fileParameter(Blob file); ", " result = fileParameter(file(toBlob('abc'), 'file.txt')); "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Function 'fileParameter' parameter 'file' has type Blob " + "so its native implementation type must be " + Blob.class.getCanonicalName() + " but it is " + Struct.class.getCanonicalName() + ".\n"); } @Test public void exception_from_native_is_reported_as_error() throws Exception { givenNativeJar(ThrowException.class); givenScript( " Nothing throwException(); ", " result = throwException; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Function throwException threw java exception from its native code."); thenSysOutContains("java.lang.UnsupportedOperationException"); } @Test public void error_thrown_as_exception_from_native_is_reported_along_errors_logged_via_native_api() throws Exception { givenNativeJar(ReportTwoErrors.class); givenScript( " String reportTwoErrors(String message1, String message2); ", " result = reportTwoErrors(message1='first error', message2='second error'); "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("first error\n"); thenSysOutContains("second error\n"); } @Test public void error_wrapping_exception_from_native_is_not_cached() throws Exception { givenNativeJar(ThrowRandomException.class); givenScript( " String throwRandomException(); ", " result = throwRandomException; "); whenSmoothBuild("result"); thenFinishedWithError(); String timestamp1 = fetchTimestamp(sysOut()); whenSmoothBuild("result"); thenFinishedWithError(); String timestamp2 = fetchTimestamp(sysOut()); assertThat(timestamp1) .isNotEqualTo(timestamp2); } private static String fetchTimestamp(String text) { Pattern pattern = Pattern.compile(".*java.lang.UnsupportedOperationException: ([0-9]*).*", DOTALL); Matcher matcher = pattern.matcher(text); matcher.matches(); return matcher.group(1); } @Test public void error_reported_is_logged() throws Exception { givenNativeJar(ReportError.class); givenScript( " Nothing reportError(String message); ", " result = reportError('error_reported_is_logged'); "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("error_reported_is_logged"); } @Test public void returning_null_without_logging_error_causes_error() throws Exception { givenNativeJar(ReturnNull.class); givenScript( " String returnNull(); ", " result = returnNull(); "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Function returnNull has faulty native implementation: " + "it returned 'null' but logged no error."); } @Test public void returning_null_and_logs_only_warning_causes_error() throws Exception { givenNativeJar(ReportWarningAndReturnNull.class); givenScript( " String reportWarning(String message); ", " result = reportWarning('test message'); "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Function reportWarning has faulty native implementation: " + "it returned 'null' but logged no error."); } @Test public void native_that_adds_element_of_wrong_type_to_array_causes_error() throws Exception { givenNativeJar(AddElementOfWrongTypeToArray.class); givenScript( " [Blob] addElementOfWrongTypeToArray(); ", " result = addElementOfWrongTypeToArray; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains( "Function addElementOfWrongTypeToArray threw java exception from its native code."); thenSysOutContains("Element type must be Blob but was String."); } @Test public void native_that_returns_array_of_wrong_type_causes_error() throws Exception { givenNativeJar(EmptyStringArray.class); givenScript( " [Blob] emptyStringArray(); ", " result = emptyStringArray; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Function emptyStringArray has faulty native implementation: " + "Its actual result type is [Blob] but it returned object of type [String]."); } @Test public void native_that_returns_object_of_wrong_type_causes_error() throws Exception { givenNativeJar(BrokenIdentity.class); givenScript( " A brokenIdentity(A value); ", " result = brokenIdentity(value=[]); "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Function brokenIdentity has faulty native implementation: " + "Its actual result type is [Nothing] but it returned object of type String."); } private String invalidFunctionProvidedBy(Class<?> clazz) { return "Invalid function native implementation in " + projectDir() + "/build.jar provided by " + clazz.getCanonicalName(); } }
src/acceptance/org/smoothbuild/acceptance/lang/NativeFunctionTest.java
package org.smoothbuild.acceptance.lang; import static com.google.common.truth.Truth.assertThat; import static java.util.regex.Pattern.DOTALL; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.junit.jupiter.api.Test; import org.smoothbuild.acceptance.AcceptanceTestCase; import org.smoothbuild.acceptance.testing.AddElementOfWrongTypeToArray; import org.smoothbuild.acceptance.testing.BrokenIdentity; import org.smoothbuild.acceptance.testing.DifferentJavaName; import org.smoothbuild.acceptance.testing.EmptyStringArray; import org.smoothbuild.acceptance.testing.FileParameter; import org.smoothbuild.acceptance.testing.IllegalName; import org.smoothbuild.acceptance.testing.NonPublicMethod; import org.smoothbuild.acceptance.testing.NonStaticMethod; import org.smoothbuild.acceptance.testing.OneStringParameter; import org.smoothbuild.acceptance.testing.ReportError; import org.smoothbuild.acceptance.testing.ReportTwoErrors; import org.smoothbuild.acceptance.testing.ReportWarningAndReturnNull; import org.smoothbuild.acceptance.testing.ReturnNull; import org.smoothbuild.acceptance.testing.SameName; import org.smoothbuild.acceptance.testing.SameName2; import org.smoothbuild.acceptance.testing.ThrowException; import org.smoothbuild.acceptance.testing.ThrowRandomException; import org.smoothbuild.acceptance.testing.WithoutContainer; import org.smoothbuild.lang.object.base.Array; import org.smoothbuild.lang.object.base.Blob; import org.smoothbuild.lang.object.base.SString; import org.smoothbuild.lang.object.base.Struct; import org.smoothbuild.lang.plugin.NativeApi; public class NativeFunctionTest extends AcceptanceTestCase { @Test public void native_can_return_passed_argument() throws Exception { givenNativeJar(OneStringParameter.class); givenScript( " String oneStringParameter(String string); ", " result = oneStringParameter('token'); "); whenSmoothBuild("result"); thenFinishedWithSuccess(); assertThat(artifactContent("result")) .isEqualTo("token"); } @Test public void native_declaration_without_native_implementation_causes_error() throws Exception { givenNativeJar(OneStringParameter.class); givenScript( " String function; ", " result = function; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Function 'function' is native but does not have native implementation.\n"); } @Test public void native_jar_with_two_functions_with_same_name_causes_error() throws Exception { givenNativeJar(SameName.class, SameName2.class); givenScript( " result = 'abc'; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains( "Invalid function native implementation in build.jar provided by " + SameName2.class.getCanonicalName() + ".sameName: " + "Function with the same name is also provided by " + SameName.class.getCanonicalName() + ".sameName.\n"); } @Test public void native_with_illegal_name_causes_error() throws Exception { givenNativeJar(IllegalName.class); givenScript( " result = 'abc'; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Invalid function native implementation in build.jar provided by " + IllegalName.class.getCanonicalName() + ".illegalName$: Name 'illegalName$' is illegal.\n"); } @Test public void native_name_is_taken_from_annotation_not_java_method_name() throws Exception { givenNativeJar(DifferentJavaName.class); givenScript( " String annotationName(); ", " result = annotationName(); "); whenSmoothBuild("result"); thenFinishedWithSuccess(); assertThat(artifactContent("result")) .isEqualTo("abc"); } @Test public void native_provided_by_non_public_method_causes_error() throws Exception { givenNativeJar(NonPublicMethod.class); givenScript( " String oneStringParameter; ", " result = oneStringParameter; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Invalid function native implementation in build.jar provided by " + NonPublicMethod.class.getCanonicalName() + ".function: Providing method must be public.\n"); } @Test public void native_provided_by_non_static_method_causes_error() throws Exception { givenNativeJar(NonStaticMethod.class); givenScript( " result = 'abc'; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Invalid function native implementation in build.jar provided by " + NonStaticMethod.class.getCanonicalName() + ".function: Providing method must be static.\n"); } @Test public void native_without_declared_result_type_causes_error() throws Exception { givenNativeJar(OneStringParameter.class); givenScript( " oneStringParameter; ", " result = oneStringParameter; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains( "Function 'oneStringParameter' is native so should have declared result type.\n"); } @Test public void native_with_different_result_type_causes_error() throws Exception { givenNativeJar(OneStringParameter.class); givenScript( " File oneStringParameter(String string); ", " result = oneStringParameter('abc'); "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Function 'oneStringParameter' has result type 'File' " + "so its native implementation result type must be " + Struct.class.getCanonicalName() + " but it is " + SString.class.getCanonicalName() + ".\n"); } @Test public void native_without_container_parameter_causes_error() throws Exception { givenNativeJar(WithoutContainer.class); givenScript( " result = 'abc'; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Invalid function native implementation in build.jar provided by " + WithoutContainer.class.getCanonicalName() + ".function: Providing method should have first parameter of type " + NativeApi.class.getCanonicalName() + ".\n"); } @Test public void native_with_too_many_parameters_causes_error() throws Exception { givenNativeJar(OneStringParameter.class); givenScript( " String oneStringParameter; ", " result = oneStringParameter; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains( "Function 'oneStringParameter' has 0 parameter(s) but its native implementation " + "has 1 parameter(s).\n"); } @Test public void native_with_too_few_parameters_causes_error() throws Exception { givenNativeJar(OneStringParameter.class); givenScript( " String oneStringParameter(String a, String b); ", " result = oneStringParameter(a='abc', b='abc'); "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains( "Function 'oneStringParameter' has 2 parameter(s) but its native implementation " + "has 1 parameter(s).\n"); } @Test public void native_with_different_parameter_type_causes_error() throws Exception { givenNativeJar(OneStringParameter.class); givenScript( " String oneStringParameter([String] string); ", " result = oneStringParameter([]); "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Function 'oneStringParameter' parameter 'string' has type [String] " + "so its native implementation type must be " + Array.class.getCanonicalName() + " but it is " + SString.class.getCanonicalName() + ".\n"); } @Test public void native_with_parameter_type_that_is_subtype_of_declared_causes_error() throws Exception { givenNativeJar(FileParameter.class); givenScript( " File fileParameter(Blob file); ", " result = fileParameter(file(toBlob('abc'), 'file.txt')); "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Function 'fileParameter' parameter 'file' has type Blob " + "so its native implementation type must be " + Blob.class.getCanonicalName() + " but it is " + Struct.class.getCanonicalName() + ".\n"); } @Test public void exception_from_native_is_reported_as_error() throws Exception { givenNativeJar(ThrowException.class); givenScript( " Nothing throwException(); ", " result = throwException; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Function throwException threw java exception from its native code."); thenSysOutContains("java.lang.UnsupportedOperationException"); } @Test public void error_thrown_as_exception_from_native_is_reported_along_errors_logged_via_native_api() throws Exception { givenNativeJar(ReportTwoErrors.class); givenScript( " String reportTwoErrors(String message1, String message2); ", " result = reportTwoErrors(message1='first error', message2='second error'); "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("first error\n"); thenSysOutContains("second error\n"); } @Test public void error_wrapping_exception_from_native_is_not_cached() throws Exception { givenNativeJar(ThrowRandomException.class); givenScript( " String throwRandomException(); ", " result = throwRandomException; "); whenSmoothBuild("result"); thenFinishedWithError(); String timestamp1 = fetchTimestamp(sysOut()); whenSmoothBuild("result"); thenFinishedWithError(); String timestamp2 = fetchTimestamp(sysOut()); assertThat(timestamp1) .isNotEqualTo(timestamp2); } private static String fetchTimestamp(String text) { Pattern pattern = Pattern.compile(".*java.lang.UnsupportedOperationException: ([0-9]*).*", DOTALL); Matcher matcher = pattern.matcher(text); matcher.matches(); return matcher.group(1); } @Test public void error_reported_is_logged() throws Exception { givenNativeJar(ReportError.class); givenScript( " Nothing reportError(String message); ", " result = reportError('error_reported_is_logged'); "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("error_reported_is_logged"); } @Test public void returning_null_without_logging_error_causes_error() throws Exception { givenNativeJar(ReturnNull.class); givenScript( " String returnNull(); ", " result = returnNull(); "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Function returnNull has faulty native implementation: " + "it returned 'null' but logged no error."); } @Test public void returning_null_and_logs_only_warning_causes_error() throws Exception { givenNativeJar(ReportWarningAndReturnNull.class); givenScript( " String reportWarning(String message); ", " result = reportWarning('test message'); "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Function reportWarning has faulty native implementation: " + "it returned 'null' but logged no error."); } @Test public void native_that_adds_element_of_wrong_type_to_array_causes_error() throws Exception { givenNativeJar(AddElementOfWrongTypeToArray.class); givenScript( " [Blob] addElementOfWrongTypeToArray(); ", " result = addElementOfWrongTypeToArray; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains( "Function addElementOfWrongTypeToArray threw java exception from its native code."); thenSysOutContains("Element type must be Blob but was String."); } @Test public void native_that_returns_array_of_wrong_type_causes_error() throws Exception { givenNativeJar(EmptyStringArray.class); givenScript( " [Blob] emptyStringArray(); ", " result = emptyStringArray; "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Function emptyStringArray has faulty native implementation: " + "Its actual result type is [Blob] but it returned object of type [String]."); } @Test public void native_that_returns_object_of_wrong_type_causes_error() throws Exception { givenNativeJar(BrokenIdentity.class); givenScript( " A brokenIdentity(A value); ", " result = brokenIdentity(value=[]); "); whenSmoothBuild("result"); thenFinishedWithError(); thenSysOutContains("Function brokenIdentity has faulty native implementation: " + "Its actual result type is [Nothing] but it returned object of type String."); } }
refactored NativeFunctionTest
src/acceptance/org/smoothbuild/acceptance/lang/NativeFunctionTest.java
refactored NativeFunctionTest
Java
apache-2.0
c8b48fbf79acb553cb028a0cf55003fb2abefe65
0
IWSDevelopers/iws,IWSDevelopers/iws
/* * Licensed to IAESTE A.s.b.l. (IAESTE) under one or more contributor * license agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. The Authors * (See the AUTHORS file distributed with this work) licenses this file to * You under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.iaeste.iws.core.services; import static net.iaeste.iws.core.transformers.ExchangeTransformer.transform; import net.iaeste.iws.api.constants.IWSErrors; import net.iaeste.iws.api.dtos.File; import net.iaeste.iws.api.dtos.exchange.Student; import net.iaeste.iws.api.dtos.exchange.StudentApplication; import net.iaeste.iws.api.enums.exchange.ApplicationStatus; import net.iaeste.iws.api.enums.exchange.OfferState; import net.iaeste.iws.api.exceptions.IWSException; import net.iaeste.iws.api.requests.student.FetchStudentApplicationsRequest; import net.iaeste.iws.api.requests.student.FetchStudentsRequest; import net.iaeste.iws.api.requests.student.ProcessStudentApplicationsRequest; import net.iaeste.iws.api.requests.student.StudentApplicationRequest; import net.iaeste.iws.api.requests.student.StudentRequest; import net.iaeste.iws.api.responses.student.FetchStudentApplicationsResponse; import net.iaeste.iws.api.responses.student.FetchStudentsResponse; import net.iaeste.iws.api.responses.student.StudentApplicationResponse; import net.iaeste.iws.api.responses.student.StudentResponse; import net.iaeste.iws.api.util.DateTime; import net.iaeste.iws.common.configuration.Settings; import net.iaeste.iws.common.exceptions.NotImplementedException; import net.iaeste.iws.common.exceptions.VerificationException; import net.iaeste.iws.core.transformers.StorageTransformer; import net.iaeste.iws.core.transformers.ViewTransformer; import net.iaeste.iws.persistence.AccessDao; import net.iaeste.iws.persistence.Authentication; import net.iaeste.iws.persistence.ExchangeDao; import net.iaeste.iws.persistence.StudentDao; import net.iaeste.iws.persistence.ViewsDao; import net.iaeste.iws.persistence.entities.AttachmentEntity; import net.iaeste.iws.persistence.entities.CountryEntity; import net.iaeste.iws.persistence.entities.FileEntity; import net.iaeste.iws.persistence.entities.GroupEntity; import net.iaeste.iws.persistence.entities.UserEntity; import net.iaeste.iws.persistence.entities.exchange.ApplicationEntity; import net.iaeste.iws.persistence.entities.exchange.OfferEntity; import net.iaeste.iws.persistence.entities.exchange.OfferGroupEntity; import net.iaeste.iws.persistence.entities.exchange.StudentEntity; import net.iaeste.iws.persistence.views.ApplicationView; import net.iaeste.iws.persistence.views.StudentView; import java.util.ArrayList; import java.util.EnumSet; import java.util.List; /** * @author Kim Jensen / last $Author:$ * @version $Revision:$ / $Date:$ * @since IWS 1.0 */ public final class StudentService extends CommonService<StudentDao> { private static final String STUDENT_APPLICATION_TABLE = "student_applications"; private final AccessDao accessDao; private final ExchangeDao exchangeDao; private final ViewsDao viewsDao; public StudentService(final Settings settings, final AccessDao accessDao, final ExchangeDao exchangeDao, final StudentDao studentDao, final ViewsDao viewsDao) { super(settings, studentDao); this.accessDao = accessDao; this.exchangeDao = exchangeDao; this.viewsDao = viewsDao; } public StudentResponse processStudent(final Authentication authentication, final StudentRequest request) { final StudentEntity studentEntity = processStudent(authentication, request.getStudent()); return new StudentResponse(transform(studentEntity)); } private StudentEntity processStudent(final Authentication authentication, final Student student) { final GroupEntity memberGroup = accessDao.findMemberGroup(authentication.getUser()); final UserEntity user = accessDao.findUserByExternalId(student.getStudentId()); final StudentEntity newEntity = transform(student); newEntity.setUser(user); final StudentEntity existingEntity = dao.findStudentByExternal(memberGroup.getId(), student.getStudentId()); if (existingEntity != null) { dao.persist(authentication, existingEntity, newEntity); } else { throw new VerificationException("The student with id '" + student.getStudentId() + "' was not found."); } return existingEntity; } public FetchStudentsResponse fetchStudents(final Authentication authentication, final FetchStudentsRequest request) { final List<StudentView> found = viewsDao.findStudentsForMemberGroup(authentication.getGroup().getParentId(), request.getPage()); final List<Student> result = new ArrayList<>(found.size()); for (final StudentView view : found) { result.add(ViewTransformer.transform(view)); } return new FetchStudentsResponse(result); } public StudentApplicationResponse processStudentApplication(final Authentication authentication, final ProcessStudentApplicationsRequest request) { final ApplicationEntity entity = processStudentApplication(authentication, request.getStudentApplication()); final List<AttachmentEntity> attachments = processAttachments(authentication, entity, request.getStudentApplication().getAttachments()); final StudentApplication application = transform(entity, attachments); return new StudentApplicationResponse(application); } private ApplicationEntity processStudentApplication(final Authentication authentication, final StudentApplication application) { final GroupEntity nationalGroup = accessDao.findNationalGroup(authentication.getUser()); final String externalId = application.getApplicationId(); final ApplicationEntity applicationEntity = dao.findApplicationByExternalId(externalId); final ApplicationEntity processed; if ((applicationEntity == null) || applicationEntity.getOfferGroup().getGroup().getId().equals(nationalGroup.getId())) { //application owner processed = processStudentApplicationByApplicationOwner(authentication, application, applicationEntity); } else { final OfferGroupEntity sharedOfferGroup = applicationEntity.getOfferGroup(); final OfferEntity offer = sharedOfferGroup.getOffer(); if (offer.getEmployer().getGroup().getId().equals(nationalGroup.getId())) { //offer owner processed = processStudentApplicationByOfferOwner(authentication, application, applicationEntity); } else { throw new IWSException(IWSErrors.PROCESSING_FAILURE, "Cannot process student application"); } } return processed; } private ApplicationEntity processStudentApplicationByApplicationOwner(final Authentication authentication, final StudentApplication application, final ApplicationEntity existingApplication) { final GroupEntity nationalGroup = accessDao.findNationalGroup(authentication.getUser()); final OfferGroupEntity sharedOfferGroup; if (existingApplication == null) { sharedOfferGroup = exchangeDao.findInfoForSharedOffer(authentication.getGroup(), application.getOfferId()); } else { sharedOfferGroup = existingApplication.getOfferGroup(); } if ((sharedOfferGroup == null) || !sharedOfferGroup.getGroup().getId().equals(nationalGroup.getId())) { final String offerId = application.getOfferId(); throw new VerificationException("The offer with Id '" + offerId + "' is not shared to the group '" + authentication.getGroup().getGroupName() + "'."); } if (EnumSet.of(OfferState.CLOSED, OfferState.COMPLETED, OfferState.NEW).contains(sharedOfferGroup.getOffer().getStatus())) { throw new VerificationException("It is not possible to create/update application for the offer with status '" + sharedOfferGroup.getOffer().getStatus() + "'."); } final GroupEntity memberGroup = accessDao.findMemberGroup(authentication.getUser()); final StudentEntity student = dao.findStudentByExternal(memberGroup.getId(), application.getStudent().getUser().getUserId()); CountryEntity nationality = null; if (application.getNationality() != null) { nationality = dao.findCountry(application.getNationality().getCountryCode()); } final ApplicationEntity processed; if (existingApplication == null) { processed = createNewApplication(authentication, application, sharedOfferGroup, student, nationality); } else { processed = updateExistingApplication(authentication, application, existingApplication, nationality); } return processed; } private ApplicationEntity createNewApplication(final Authentication authentication, final StudentApplication application, final OfferGroupEntity sharedOfferGroup, final StudentEntity student, final CountryEntity nationality) { final ApplicationEntity processed = transform(application); processed.setOfferGroup(sharedOfferGroup); processAddress(authentication, processed.getHomeAddress()); processAddress(authentication, processed.getAddressDuringTerms()); processed.setNationality(nationality); processed.setStudent(student); dao.persist(authentication, processed); boolean updateOfferGroup = false; if (sharedOfferGroup.getStatus() == OfferState.SHARED) { sharedOfferGroup.setStatus(OfferState.APPLICATIONS); updateOfferGroup = true; } if (!sharedOfferGroup.getHasApplication()) { sharedOfferGroup.setHasApplication(true); updateOfferGroup = true; } if (updateOfferGroup) { dao.persist(sharedOfferGroup); } return processed; } private ApplicationEntity updateExistingApplication(final Authentication authentication, final StudentApplication application, final ApplicationEntity applicationEntity, final CountryEntity nationality) { final ApplicationEntity updated = transform(application); updated.setNationality(nationality); //using OfferGroup from found entity since this field can't be updated updated.setOfferGroup(applicationEntity.getOfferGroup()); //do not allow to change status updated.setStatus(applicationEntity.getStatus()); processAddress(authentication, applicationEntity.getHomeAddress(), application.getHomeAddress()); processAddress(authentication, applicationEntity.getAddressDuringTerms(), application.getAddressDuringTerms()); dao.persist(authentication, applicationEntity, updated); return applicationEntity; } private ApplicationEntity processStudentApplicationByOfferOwner(final Authentication authentication, final StudentApplication application, final ApplicationEntity applicationEntity) { final GroupEntity nationalGroup = accessDao.findNationalGroup(authentication.getUser()); final OfferGroupEntity sharedOfferGroup = applicationEntity.getOfferGroup(); final OfferEntity offer = sharedOfferGroup.getOffer(); if (!offer.getEmployer().getGroup().getId().equals(nationalGroup.getId())) { throw new VerificationException("The group with '" + authentication.getGroup().getGroupName() + "' does not own the offer with id '" + offer.getExternalId() + "'."); } //Offer owner can change only acceptance final StudentApplication updatedApplication = transform(applicationEntity); updatedApplication.setAcceptance(application.getAcceptance()); final ApplicationEntity newEntity = transform(updatedApplication); dao.persist(authentication, applicationEntity, newEntity); return applicationEntity; } private List<AttachmentEntity> processAttachments(final Authentication authentication, final ApplicationEntity applicationEntity, final List<File> files) { final List<AttachmentEntity> attachments = new ArrayList<>(files.size()); for (final File file : files) { final FileEntity fileEntity = processFile(authentication, file); final AttachmentEntity attachmentEntity = processAttachment(authentication, applicationEntity, fileEntity); attachments.add(attachmentEntity); } return attachments; } private AttachmentEntity processAttachment(final Authentication authentication, final ApplicationEntity applicationEntity, final FileEntity fileEntity) { AttachmentEntity attachmentEntity = dao.findAttachment(STUDENT_APPLICATION_TABLE, applicationEntity.getId(), fileEntity.getId()); if (attachmentEntity == null) { attachmentEntity = new AttachmentEntity(); attachmentEntity.setTable(STUDENT_APPLICATION_TABLE); attachmentEntity.setRecord(applicationEntity.getId()); attachmentEntity.setFile(fileEntity); dao.persist(authentication, attachmentEntity); } return attachmentEntity; } public FetchStudentApplicationsResponse fetchStudentApplications(final Authentication authentication, final FetchStudentApplicationsRequest request) { final String offerExternalId = request.getOfferId(); final OfferEntity ownedOffer = exchangeDao.findOfferByExternalId(authentication, offerExternalId); final List<ApplicationView> found; if ((ownedOffer != null) && ownedOffer.getEmployer().getGroup().equals(authentication.getGroup())) { found = dao.findForeignApplicationsForOffer(offerExternalId, authentication.getGroup().getId()); } else { found = dao.findDomesticApplicationsForOffer(offerExternalId, authentication.getGroup().getId()); } final List<StudentApplication> applications = new ArrayList<>(found.size()); for (final ApplicationView entity : found) { final StudentApplication application = ViewTransformer.transform(entity); final List<File> attachments = findAndTransformAttachments(entity); application.setAttachments(attachments); applications.add(application); } return new FetchStudentApplicationsResponse(applications); } private List<File> findAndTransformAttachments(final ApplicationView view) { final List<AttachmentEntity> attachments = dao.findAttachments(STUDENT_APPLICATION_TABLE, view.getId()); final List<File> files = new ArrayList<>(attachments.size()); for (final AttachmentEntity entity : attachments) { final File file = StorageTransformer.transform(entity.getFile()); files.add(file); } return files; } public StudentApplicationResponse processApplicationStatus(final Authentication authentication, final StudentApplicationRequest request) { final ApplicationEntity found = dao.findApplicationByExternalId(request.getApplicationId()); if (found == null) { throw new VerificationException("The application with id '" + request.getApplicationId() + "' was not found."); } final GroupEntity nationalGroup = accessDao.findNationalGroup(authentication.getUser()); final OfferEntity offer = found.getOfferGroup().getOffer(); if (found.getOfferGroup().getGroup().getId().equals(nationalGroup.getId())) { //application owner processApplicationStatusByApplicationOwner(authentication, request, found); } else if (offer.getEmployer().getGroup().getId().equals(nationalGroup.getId())) { //offer owner processApplicationStatusByOfferOwner(authentication, request, found); } else { throw new IWSException(IWSErrors.PROCESSING_FAILURE, "Cannot process student application status."); } return new StudentApplicationResponse(transform(found)); } private void processApplicationStatusByOfferOwner(final Authentication authentication, final StudentApplicationRequest request, final ApplicationEntity applicationEntity) { final OfferGroupEntity sharedOfferGroup = applicationEntity.getOfferGroup(); final StudentApplication studentApplication = transform(applicationEntity); verifyOfferAcceptNewApplicationStatus(sharedOfferGroup.getStatus(), request.getStatus()); verifyApplicationStatusTransition(studentApplication.getStatus(), request.getStatus()); switch (request.getStatus()) { case REJECTED: rejectApplication(authentication, request, applicationEntity); break; case FORWARDED_TO_EMPLOYER: forwardToEmployer(authentication, studentApplication, applicationEntity); break; case ACCEPTED: acceptApplication(authentication, studentApplication, applicationEntity); break; default: throw new NotImplementedException("Action '" + request.getStatus() + "' pending implementation."); } } private void processApplicationStatusByApplicationOwner(final Authentication authentication, final StudentApplicationRequest request, final ApplicationEntity applicationEntity) { final OfferGroupEntity sharedOfferGroup = applicationEntity.getOfferGroup(); final StudentApplication studentApplication = transform(applicationEntity); verifyOfferAcceptNewApplicationStatus(sharedOfferGroup.getStatus(), request.getStatus()); verifyApplicationStatusTransition(studentApplication.getStatus(), request.getStatus()); switch (request.getStatus()) { case NOMINATED: nominateApplication(authentication, studentApplication, applicationEntity); break; case CANCELLED: cancelApplication(authentication, studentApplication, applicationEntity); break; case APPLIED: applyApplication(authentication, studentApplication, applicationEntity); break; case REJECTED_BY_SENDING_COUNTRY: rejectApplicationByApplicationOwner(authentication, request, applicationEntity); break; default: throw new NotImplementedException("Action '" + request.getStatus() + "' pending implementation."); } } private void applyApplication(final Authentication authentication, final StudentApplication application, final ApplicationEntity applicationEntity) { application.setStatus(ApplicationStatus.APPLIED); final ApplicationEntity updated = transform(application); updated.setOfferGroup(applicationEntity.getOfferGroup()); updated.setNationality(applicationEntity.getNationality()); dao.persist(authentication, applicationEntity, updated); } private void forwardToEmployer(final Authentication authentication, final StudentApplication application, final ApplicationEntity applicationEntity) { application.setStatus(ApplicationStatus.FORWARDED_TO_EMPLOYER); final ApplicationEntity updated = transform(application); updated.setOfferGroup(applicationEntity.getOfferGroup()); updated.setNationality(applicationEntity.getNationality()); dao.persist(authentication, applicationEntity, updated); //update status for OfferGroup updateOfferGroupStatus(applicationEntity.getOfferGroup(), OfferState.AT_EMPLOYER); //update status for Offer updateOfferStatus(applicationEntity.getOfferGroup().getOffer(), OfferState.AT_EMPLOYER); } private void acceptApplication(final Authentication authentication, final StudentApplication application, final ApplicationEntity applicationEntity) { application.setStatus(ApplicationStatus.ACCEPTED); final ApplicationEntity updated = transform(application); updated.setOfferGroup(applicationEntity.getOfferGroup()); updated.setNationality(applicationEntity.getNationality()); dao.persist(authentication, applicationEntity, updated); //update status for OfferGroup updateOfferGroupStatus(applicationEntity.getOfferGroup(), OfferState.ACCEPTED); //update status for Offer updateOfferStatus(applicationEntity.getOfferGroup().getOffer(), OfferState.ACCEPTED); } private void nominateApplication(final Authentication authentication, final StudentApplication application, final ApplicationEntity storedApplication) { application.setNominatedAt(new DateTime()); application.setStatus(ApplicationStatus.NOMINATED); final ApplicationEntity updated = transform(application); //using OfferGroup from found entity since this field can't be updated updated.setOfferGroup(storedApplication.getOfferGroup()); updated.setNationality(storedApplication.getNationality()); dao.persist(authentication, storedApplication, updated); //update status for OfferGroup //it can be done either when there are applications (APPLICATIONS) or the nominated application was rejected or cancelled previously if (EnumSet.of(OfferState.APPLICATIONS, OfferState.SHARED).contains(storedApplication.getOfferGroup().getStatus())) { updateOfferGroupStatus(storedApplication.getOfferGroup(), OfferState.NOMINATIONS); } //update status for Offer if (storedApplication.getOfferGroup().getOffer().getStatus() == OfferState.SHARED) { updateOfferStatus(storedApplication.getOfferGroup().getOffer(), OfferState.NOMINATIONS); } } private void rejectApplication(final Authentication authentication, final StudentApplicationRequest request, final ApplicationEntity storedApplication) { final StudentApplication application = transform(storedApplication); application.setStatus(ApplicationStatus.REJECTED); application.setRejectByEmployerReason(request.getRejectByEmployerReason()); application.setRejectDescription(request.getRejectDescription()); application.setRejectInternalComment(request.getRejectInternalComment()); final ApplicationEntity updated = transform(application); //using OfferGroup from stored entity since this field can't be updated updated.setOfferGroup(storedApplication.getOfferGroup()); updated.setNationality(storedApplication.getNationality()); dao.persist(authentication, storedApplication, updated); final OfferState newOfferGroupState = doUpdateOfferGroupStatus(storedApplication.getOfferGroup().getId(), storedApplication.getOfferGroup().getStatus()); if (newOfferGroupState != null) { updateOfferGroupStatus(storedApplication.getOfferGroup(), newOfferGroupState); } if (doUpdateOfferStatusToShared(storedApplication.getOfferGroup().getOffer().getId())) { updateOfferStatus(storedApplication.getOfferGroup().getOffer(), OfferState.SHARED); } } private void rejectApplicationByApplicationOwner(final Authentication authentication, final StudentApplicationRequest request, final ApplicationEntity storedApplication) { //Application owner is allowed to reject only application in state Applied so we don't need to care if Offer status should be changed final StudentApplication application = transform(storedApplication); application.setStatus(ApplicationStatus.REJECTED_BY_SENDING_COUNTRY); application.setRejectDescription(request.getRejectDescription()); application.setRejectInternalComment(request.getRejectInternalComment()); final ApplicationEntity updated = transform(application); //using OfferGroup from stored entity since this field can't be updated updated.setOfferGroup(storedApplication.getOfferGroup()); updated.setNationality(storedApplication.getNationality()); dao.persist(authentication, storedApplication, updated); final OfferState newOfferGroupState = doUpdateOfferGroupStatus(storedApplication.getOfferGroup().getId(), storedApplication.getOfferGroup().getStatus()); if (newOfferGroupState != null) { updateOfferGroupStatus(storedApplication.getOfferGroup(), newOfferGroupState); } } private void cancelApplication(final Authentication authentication, final StudentApplication application, final ApplicationEntity storedApplication) { application.setStatus(ApplicationStatus.CANCELLED); final ApplicationEntity updated = transform(application); //using OfferGroup from stored entity since this field can't be updated updated.setOfferGroup(storedApplication.getOfferGroup()); updated.setNationality(storedApplication.getNationality()); dao.persist(authentication, storedApplication, updated); final OfferState newOfferGroupState = doUpdateOfferGroupStatus(storedApplication.getOfferGroup().getId(), storedApplication.getOfferGroup().getStatus()); if (newOfferGroupState != null) { updateOfferGroupStatus(storedApplication.getOfferGroup(), newOfferGroupState); } if (doUpdateOfferStatusToShared(storedApplication.getOfferGroup().getOffer().getId())) { updateOfferStatus(storedApplication.getOfferGroup().getOffer(), OfferState.SHARED); } } private boolean doUpdateOfferStatusToShared(final Long offerId) { return !dao.otherOfferGroupWithCertainStatus(offerId, EnumSet.of(OfferState.NOMINATIONS, OfferState.AT_EMPLOYER, OfferState.ACCEPTED)); } private OfferState doUpdateOfferGroupStatus(final Long offerGroupId, final OfferState offerGroupState) { OfferState newStatus = null; if (offerGroupState != OfferState.CLOSED) { if (!dao.otherDomesticApplicationsWithCertainStatus(offerGroupId, EnumSet.of(ApplicationStatus.NOMINATED, ApplicationStatus.FORWARDED_TO_EMPLOYER, ApplicationStatus.ACCEPTED, ApplicationStatus.APPLIED))) { newStatus = OfferState.SHARED; } else if (dao.otherDomesticApplicationsWithCertainStatus(offerGroupId, EnumSet.of(ApplicationStatus.APPLIED))) { newStatus = OfferState.APPLICATIONS; } } return newStatus; } private void updateOfferGroupStatus(final OfferGroupEntity offerGroup, final OfferState state) { offerGroup.setStatus(state); dao.persist(offerGroup); } private void updateOfferStatus(final OfferEntity offer, final OfferState state) { offer.setStatus(state); dao.persist(offer); } private static void verifyOfferAcceptNewApplicationStatus(final OfferState offerState, final ApplicationStatus applicationStatus) { final boolean allowChanges; switch (offerState) { case COMPLETED: allowChanges = checkStateNewStateForCompleted(applicationStatus); break; case CLOSED: allowChanges = applicationStatus == ApplicationStatus.REJECTED_BY_SENDING_COUNTRY; break; default: allowChanges = true; } if (!allowChanges) { throw new VerificationException("Offer with status '" + offerState + "' does not accept new application status '" + applicationStatus + '\''); } } private static boolean checkStateNewStateForCompleted(final ApplicationStatus applicationStatus) { final boolean result; switch (applicationStatus) { case REJECTED: case CANCELLED: result = true; break; default: result = false; } return result; } private static void verifyApplicationStatusTransition(final ApplicationStatus oldStatus, final ApplicationStatus newStatus) { final boolean allowChanges; switch (oldStatus) { case ACCEPTED: allowChanges = newStatus == ApplicationStatus.CANCELLED; break; case APPLIED: allowChanges = checkStateNewStateForApplied(newStatus); break; case FORWARDED_TO_EMPLOYER: allowChanges = checkStateNewStateForForwardedToEmployer(newStatus); break; case REJECTED: allowChanges = newStatus == ApplicationStatus.NOMINATED; break; case CANCELLED: allowChanges = checkStateNewStateForCancelled(newStatus); break; case NOMINATED: allowChanges = checkStateNewStateForNominated(newStatus); break; default: throw new VerificationException("Unsupported Status '" + oldStatus + "'."); } if (!allowChanges) { throw new VerificationException("Unsupported transition from '" + oldStatus + "' to " + newStatus); } } private static boolean checkStateNewStateForApplied(final ApplicationStatus newStatus) { final boolean result; switch (newStatus) { case CANCELLED: case NOMINATED: case REJECTED_BY_SENDING_COUNTRY: result = true; break; default: result = false; } return result; } private static boolean checkStateNewStateForForwardedToEmployer(final ApplicationStatus newStatus) { final boolean result; switch (newStatus) { case ACCEPTED: case CANCELLED: case REJECTED: result = true; break; default: result = false; } return result; } private static boolean checkStateNewStateForCancelled(final ApplicationStatus newStatus) { final boolean result; switch (newStatus) { case APPLIED: case NOMINATED: result = true; break; default: result = false; } return result; } private static boolean checkStateNewStateForNominated(final ApplicationStatus newStatus) { final boolean result; switch (newStatus) { case CANCELLED: case FORWARDED_TO_EMPLOYER: case REJECTED: result = true; break; default: result = false; } return result; } }
iws-core/src/main/java/net/iaeste/iws/core/services/StudentService.java
/* * Licensed to IAESTE A.s.b.l. (IAESTE) under one or more contributor * license agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. The Authors * (See the AUTHORS file distributed with this work) licenses this file to * You under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.iaeste.iws.core.services; import static net.iaeste.iws.core.transformers.ExchangeTransformer.transform; import net.iaeste.iws.api.constants.IWSErrors; import net.iaeste.iws.api.dtos.File; import net.iaeste.iws.api.dtos.exchange.Student; import net.iaeste.iws.api.dtos.exchange.StudentApplication; import net.iaeste.iws.api.enums.exchange.ApplicationStatus; import net.iaeste.iws.api.enums.exchange.OfferState; import net.iaeste.iws.api.exceptions.IWSException; import net.iaeste.iws.api.requests.student.FetchStudentApplicationsRequest; import net.iaeste.iws.api.requests.student.FetchStudentsRequest; import net.iaeste.iws.api.requests.student.ProcessStudentApplicationsRequest; import net.iaeste.iws.api.requests.student.StudentApplicationRequest; import net.iaeste.iws.api.requests.student.StudentRequest; import net.iaeste.iws.api.responses.student.FetchStudentApplicationsResponse; import net.iaeste.iws.api.responses.student.FetchStudentsResponse; import net.iaeste.iws.api.responses.student.StudentApplicationResponse; import net.iaeste.iws.api.responses.student.StudentResponse; import net.iaeste.iws.api.util.DateTime; import net.iaeste.iws.common.configuration.Settings; import net.iaeste.iws.common.exceptions.NotImplementedException; import net.iaeste.iws.common.exceptions.VerificationException; import net.iaeste.iws.core.transformers.StorageTransformer; import net.iaeste.iws.core.transformers.ViewTransformer; import net.iaeste.iws.persistence.AccessDao; import net.iaeste.iws.persistence.Authentication; import net.iaeste.iws.persistence.ExchangeDao; import net.iaeste.iws.persistence.StudentDao; import net.iaeste.iws.persistence.ViewsDao; import net.iaeste.iws.persistence.entities.AttachmentEntity; import net.iaeste.iws.persistence.entities.CountryEntity; import net.iaeste.iws.persistence.entities.FileEntity; import net.iaeste.iws.persistence.entities.GroupEntity; import net.iaeste.iws.persistence.entities.UserEntity; import net.iaeste.iws.persistence.entities.exchange.ApplicationEntity; import net.iaeste.iws.persistence.entities.exchange.OfferEntity; import net.iaeste.iws.persistence.entities.exchange.OfferGroupEntity; import net.iaeste.iws.persistence.entities.exchange.StudentEntity; import net.iaeste.iws.persistence.views.ApplicationView; import net.iaeste.iws.persistence.views.StudentView; import java.util.ArrayList; import java.util.EnumSet; import java.util.List; /** * @author Kim Jensen / last $Author:$ * @version $Revision:$ / $Date:$ * @since IWS 1.0 */ public final class StudentService extends CommonService<StudentDao> { private final AccessDao accessDao; private final ExchangeDao exchangeDao; private final ViewsDao viewsDao; public StudentService(final Settings settings, final AccessDao accessDao, final ExchangeDao exchangeDao, final StudentDao studentDao, final ViewsDao viewsDao) { super(settings, studentDao); this.accessDao = accessDao; this.exchangeDao = exchangeDao; this.viewsDao = viewsDao; } public StudentResponse processStudent(final Authentication authentication, final StudentRequest request) { final StudentEntity studentEntity = processStudent(authentication, request.getStudent()); return new StudentResponse(transform(studentEntity)); } public StudentEntity processStudent(final Authentication authentication, final Student student) { final GroupEntity memberGroup = accessDao.findMemberGroup(authentication.getUser()); final UserEntity user = accessDao.findUserByExternalId(student.getStudentId()); final StudentEntity newEntity = transform(student); newEntity.setUser(user); final StudentEntity existingEntity = dao.findStudentByExternal(memberGroup.getId(), student.getStudentId()); if (existingEntity != null) { dao.persist(authentication, existingEntity, newEntity); } else { throw new VerificationException("The student with id '" + student.getStudentId() + "' was not found."); } return existingEntity; } public FetchStudentsResponse fetchStudents(final Authentication authentication, final FetchStudentsRequest request) { final List<StudentView> found = viewsDao.findStudentsForMemberGroup(authentication.getGroup().getParentId(), request.getPage()); final List<Student> result = new ArrayList<>(found.size()); for (final StudentView view : found) { result.add(ViewTransformer.transform(view)); } return new FetchStudentsResponse(result); } public StudentApplicationResponse processStudentApplication(final Authentication authentication, final ProcessStudentApplicationsRequest request) { final ApplicationEntity entity = processStudentApplication(authentication, request.getStudentApplication()); final List<AttachmentEntity> attachments = processAttachments(authentication, entity, request.getStudentApplication().getAttachments()); final StudentApplication application = transform(entity, attachments); return new StudentApplicationResponse(application); } private ApplicationEntity processStudentApplication(final Authentication authentication, final StudentApplication application) { final GroupEntity nationalGroup = accessDao.findNationalGroup(authentication.getUser()); final String externalId = application.getApplicationId(); final ApplicationEntity applicationEntity = dao.findApplicationByExternalId(externalId); if ((applicationEntity == null) || applicationEntity.getOfferGroup().getGroup().getId().equals(nationalGroup.getId())) { //application owner return processStudentApplicationByApplicationOwner(authentication, application, applicationEntity); } else { final OfferGroupEntity sharedOfferGroup = applicationEntity.getOfferGroup(); final OfferEntity offer = sharedOfferGroup.getOffer(); if (offer.getEmployer().getGroup().getId().equals(nationalGroup.getId())) { //offer owner return processStudentApplicationByOfferOwner(authentication, application, applicationEntity); } } throw new IWSException(IWSErrors.PROCESSING_FAILURE, "Cannot process student application"); } private ApplicationEntity processStudentApplicationByApplicationOwner(final Authentication authentication, final StudentApplication application, ApplicationEntity applicationEntity) { final GroupEntity nationalGroup = accessDao.findNationalGroup(authentication.getUser()); final OfferGroupEntity sharedOfferGroup; if (applicationEntity == null) { sharedOfferGroup = exchangeDao.findInfoForSharedOffer(authentication.getGroup(), application.getOfferId()); } else { sharedOfferGroup = applicationEntity.getOfferGroup(); } if ((sharedOfferGroup == null) || !sharedOfferGroup.getGroup().getId().equals(nationalGroup.getId())) { final String offerId = application.getOfferId(); throw new VerificationException("The offer with Id '" + offerId + "' is not shared to the group '" + authentication.getGroup().getGroupName() + "'."); } if (EnumSet.of(OfferState.CLOSED, OfferState.COMPLETED, OfferState.NEW).contains(sharedOfferGroup.getOffer().getStatus())) { throw new VerificationException("It is not possible to create/update application for the offer with status '" + sharedOfferGroup.getOffer().getStatus() + "'."); } final GroupEntity memberGroup = accessDao.findMemberGroup(authentication.getUser()); final StudentEntity student = dao.findStudentByExternal(memberGroup.getId(), application.getStudent().getUser().getUserId()); CountryEntity nationality = null; if (application.getNationality() != null) { nationality = dao.findCountry(application.getNationality().getCountryCode()); } if (applicationEntity == null) { applicationEntity = transform(application); applicationEntity.setOfferGroup(sharedOfferGroup); processAddress(authentication, applicationEntity.getHomeAddress()); processAddress(authentication, applicationEntity.getAddressDuringTerms()); applicationEntity.setNationality(nationality); applicationEntity.setStudent(student); dao.persist(authentication, applicationEntity); boolean updateOfferGroup = false; //TODO complete status list from which we should change the status if (sharedOfferGroup.getStatus() == OfferState.SHARED) { sharedOfferGroup.setStatus(OfferState.APPLICATIONS); updateOfferGroup = true; } if (!sharedOfferGroup.getHasApplication()) { sharedOfferGroup.setHasApplication(true); updateOfferGroup = true; } if (updateOfferGroup) { dao.persist(sharedOfferGroup); } } else { final ApplicationEntity updated = transform(application); updated.setNationality(nationality); //using OfferGroup from found entity since this field can't be updated updated.setOfferGroup(applicationEntity.getOfferGroup()); //do not allow to change status updated.setStatus(applicationEntity.getStatus()); processAddress(authentication, applicationEntity.getHomeAddress(), application.getHomeAddress()); processAddress(authentication, applicationEntity.getAddressDuringTerms(), application.getAddressDuringTerms()); dao.persist(authentication, applicationEntity, updated); } return applicationEntity; } private ApplicationEntity processStudentApplicationByOfferOwner(final Authentication authentication, final StudentApplication application, final ApplicationEntity applicationEntity) { final GroupEntity nationalGroup = accessDao.findNationalGroup(authentication.getUser()); final OfferGroupEntity sharedOfferGroup = applicationEntity.getOfferGroup(); final OfferEntity offer = sharedOfferGroup.getOffer(); if (!offer.getEmployer().getGroup().getId().equals(nationalGroup.getId())) { throw new VerificationException("The group with '" + authentication.getGroup().getGroupName() + "' does not own the offer with id '" + offer.getExternalId() + "'."); } //Offer owner can change only acceptance final StudentApplication updatedApplication = transform(applicationEntity); updatedApplication.setAcceptance(application.getAcceptance()); final ApplicationEntity newEntity = transform(updatedApplication); dao.persist(authentication, applicationEntity, newEntity); return applicationEntity; } private List<AttachmentEntity> processAttachments(final Authentication authentication, final ApplicationEntity applicationEntity, final List<File> files) { final List<AttachmentEntity> attachments = new ArrayList<>(files.size()); for (final File file : files) { final FileEntity fileEntity = processFile(authentication, file); final AttachmentEntity attachmentEntity = processAttachment(authentication, applicationEntity, fileEntity); attachments.add(attachmentEntity); } return attachments; } private AttachmentEntity processAttachment(final Authentication authentication, final ApplicationEntity applicationEntity, final FileEntity fileEntity) { AttachmentEntity attachmentEntity = dao.findAttachment("student_applications", applicationEntity.getId(), fileEntity.getId()); if (attachmentEntity == null) { attachmentEntity = new AttachmentEntity(); attachmentEntity.setTable("student_applications"); attachmentEntity.setRecord(applicationEntity.getId()); attachmentEntity.setFile(fileEntity); dao.persist(authentication, attachmentEntity); } return attachmentEntity; } public FetchStudentApplicationsResponse fetchStudentApplications(final Authentication authentication, final FetchStudentApplicationsRequest request) { final String offerExternalId = request.getOfferId(); final OfferEntity ownedOffer = exchangeDao.findOfferByExternalId(authentication, offerExternalId); final List<ApplicationView> found; if ((ownedOffer != null) && ownedOffer.getEmployer().getGroup().equals(authentication.getGroup())) { found = dao.findForeignApplicationsForOffer(offerExternalId, authentication.getGroup().getId()); } else { found = dao.findDomesticApplicationsForOffer(offerExternalId, authentication.getGroup().getId()); } final List<StudentApplication> applications = new ArrayList<>(found.size()); for (final ApplicationView entity : found) { final StudentApplication application = ViewTransformer.transform(entity); final List<File> attachments = findAndTransformAttachments(entity); application.setAttachments(attachments); applications.add(application); } return new FetchStudentApplicationsResponse(applications); } private List<File> findAndTransformAttachments(final ApplicationView view) { final List<AttachmentEntity> attachments = dao.findAttachments("student_applications", view.getId()); final List<File> files = new ArrayList<>(attachments.size()); for (final AttachmentEntity entity : attachments) { final File file = StorageTransformer.transform(entity.getFile()); files.add(file); } return files; } public StudentApplicationResponse processApplicationStatus(final Authentication authentication, final StudentApplicationRequest request) { final ApplicationEntity found = dao.findApplicationByExternalId(request.getApplicationId()); if (found == null) { throw new VerificationException("The application with id '" + request.getApplicationId() + "' was not found."); } final GroupEntity nationalGroup = accessDao.findNationalGroup(authentication.getUser()); final OfferEntity offer = found.getOfferGroup().getOffer(); if (found.getOfferGroup().getGroup().getId().equals(nationalGroup.getId())) { //application owner processApplicationStatusByApplicationOwner(authentication, request, found); } else if (offer.getEmployer().getGroup().getId().equals(nationalGroup.getId())) { //offer owner processApplicationStatusByOfferOwner(authentication, request, found); } else { throw new IWSException(IWSErrors.PROCESSING_FAILURE, "Cannot process student application status."); } return new StudentApplicationResponse(transform(found)); } private void processApplicationStatusByOfferOwner(final Authentication authentication, final StudentApplicationRequest request, final ApplicationEntity applicationEntity) { final OfferGroupEntity sharedOfferGroup = applicationEntity.getOfferGroup(); final StudentApplication studentApplication = transform(applicationEntity); verifyOfferAcceptNewApplicationStatus(sharedOfferGroup.getStatus(), request.getStatus()); verifyApplicationStatusTransition(studentApplication.getStatus(), request.getStatus()); switch (request.getStatus()) { case REJECTED: rejectApplication(authentication, request, applicationEntity); break; case FORWARDED_TO_EMPLOYER: forwardToEmployer(authentication, studentApplication, applicationEntity); break; case ACCEPTED: acceptApplication(authentication, studentApplication, applicationEntity); break; default: throw new NotImplementedException("Action '" + request.getStatus() + "' pending implementation."); } } private void processApplicationStatusByApplicationOwner(final Authentication authentication, final StudentApplicationRequest request, final ApplicationEntity applicationEntity) { final OfferGroupEntity sharedOfferGroup = applicationEntity.getOfferGroup(); final StudentApplication studentApplication = transform(applicationEntity); verifyOfferAcceptNewApplicationStatus(sharedOfferGroup.getStatus(), request.getStatus()); verifyApplicationStatusTransition(studentApplication.getStatus(), request.getStatus()); //TODO - see #526 //TODO - when application status affects also offer status, change it accordingly switch (request.getStatus()) { case NOMINATED: nominateApplication(authentication, studentApplication, applicationEntity); break; case CANCELLED: cancelApplication(authentication, studentApplication, applicationEntity); break; case APPLIED: applyApplication(authentication, studentApplication, applicationEntity); break; case REJECTED_BY_SENDING_COUNTRY: rejectApplicationByApplicationOwner(authentication, request, applicationEntity); break; default: throw new NotImplementedException("Action '" + request.getStatus() + "' pending implementation."); } } private void applyApplication(final Authentication authentication, final StudentApplication application, final ApplicationEntity applicationEntity) { application.setStatus(ApplicationStatus.APPLIED); final ApplicationEntity updated = transform(application); updated.setOfferGroup(applicationEntity.getOfferGroup()); updated.setNationality(applicationEntity.getNationality()); dao.persist(authentication, applicationEntity, updated); } private void forwardToEmployer(final Authentication authentication, final StudentApplication application, final ApplicationEntity applicationEntity) { application.setStatus(ApplicationStatus.FORWARDED_TO_EMPLOYER); final ApplicationEntity updated = transform(application); updated.setOfferGroup(applicationEntity.getOfferGroup()); updated.setNationality(applicationEntity.getNationality()); dao.persist(authentication, applicationEntity, updated); //update status for OfferGroup updateOfferGroupStatus(applicationEntity.getOfferGroup(), OfferState.AT_EMPLOYER); //update status for Offer updateOfferStatus(applicationEntity.getOfferGroup().getOffer(), OfferState.AT_EMPLOYER); } private void acceptApplication(final Authentication authentication, final StudentApplication application, final ApplicationEntity applicationEntity) { application.setStatus(ApplicationStatus.ACCEPTED); final ApplicationEntity updated = transform(application); updated.setOfferGroup(applicationEntity.getOfferGroup()); updated.setNationality(applicationEntity.getNationality()); dao.persist(authentication, applicationEntity, updated); //update status for OfferGroup updateOfferGroupStatus(applicationEntity.getOfferGroup(), OfferState.ACCEPTED); //update status for Offer updateOfferStatus(applicationEntity.getOfferGroup().getOffer(), OfferState.ACCEPTED); } private void nominateApplication(final Authentication authentication, final StudentApplication application, final ApplicationEntity storedApplication) { application.setNominatedAt(new DateTime()); application.setStatus(ApplicationStatus.NOMINATED); final ApplicationEntity updated = transform(application); //using OfferGroup from found entity since this field can't be updated updated.setOfferGroup(storedApplication.getOfferGroup()); updated.setNationality(storedApplication.getNationality()); dao.persist(authentication, storedApplication, updated); //update status for OfferGroup //it can be done either when there are applications (APPLICATIONS) or the nominated application was rejected or cancelled previously if (EnumSet.of(OfferState.APPLICATIONS, OfferState.SHARED).contains(storedApplication.getOfferGroup().getStatus())) { updateOfferGroupStatus(storedApplication.getOfferGroup(), OfferState.NOMINATIONS); } //update status for Offer if (storedApplication.getOfferGroup().getOffer().getStatus() == OfferState.SHARED) { updateOfferStatus(storedApplication.getOfferGroup().getOffer(), OfferState.NOMINATIONS); } } private void rejectApplication(final Authentication authentication, final StudentApplicationRequest request, final ApplicationEntity storedApplication) { final StudentApplication application = transform(storedApplication); application.setStatus(ApplicationStatus.REJECTED); application.setRejectByEmployerReason(request.getRejectByEmployerReason()); application.setRejectDescription(request.getRejectDescription()); application.setRejectInternalComment(request.getRejectInternalComment()); final ApplicationEntity updated = transform(application); //using OfferGroup from stored entity since this field can't be updated updated.setOfferGroup(storedApplication.getOfferGroup()); updated.setNationality(storedApplication.getNationality()); dao.persist(authentication, storedApplication, updated); final OfferState newOfferGroupState = doUpdateOfferGroupStatus(storedApplication.getOfferGroup().getId(), storedApplication.getOfferGroup().getStatus()); if (newOfferGroupState != null) { updateOfferGroupStatus(storedApplication.getOfferGroup(), newOfferGroupState); } if (doUpdateOfferStatusToShared(storedApplication.getOfferGroup().getOffer().getId())) { updateOfferStatus(storedApplication.getOfferGroup().getOffer(), OfferState.SHARED); } } private void rejectApplicationByApplicationOwner(final Authentication authentication, final StudentApplicationRequest request, final ApplicationEntity storedApplication) { //Application owner is allowed to reject only application in state Applied so we don't need to care if Offer status should be changed final StudentApplication application = transform(storedApplication); application.setStatus(ApplicationStatus.REJECTED_BY_SENDING_COUNTRY); application.setRejectDescription(request.getRejectDescription()); application.setRejectInternalComment(request.getRejectInternalComment()); final ApplicationEntity updated = transform(application); //using OfferGroup from stored entity since this field can't be updated updated.setOfferGroup(storedApplication.getOfferGroup()); updated.setNationality(storedApplication.getNationality()); dao.persist(authentication, storedApplication, updated); final OfferState newOfferGroupState = doUpdateOfferGroupStatus(storedApplication.getOfferGroup().getId(), storedApplication.getOfferGroup().getStatus()); if (newOfferGroupState != null) { updateOfferGroupStatus(storedApplication.getOfferGroup(), newOfferGroupState); } } private void cancelApplication(final Authentication authentication, final StudentApplication application, final ApplicationEntity storedApplication) { application.setStatus(ApplicationStatus.CANCELLED); final ApplicationEntity updated = transform(application); //using OfferGroup from stored entity since this field can't be updated updated.setOfferGroup(storedApplication.getOfferGroup()); updated.setNationality(storedApplication.getNationality()); dao.persist(authentication, storedApplication, updated); final OfferState newOfferGroupState = doUpdateOfferGroupStatus(storedApplication.getOfferGroup().getId(), storedApplication.getOfferGroup().getStatus()); if (newOfferGroupState != null) { updateOfferGroupStatus(storedApplication.getOfferGroup(), newOfferGroupState); } if (doUpdateOfferStatusToShared(storedApplication.getOfferGroup().getOffer().getId())) { updateOfferStatus(storedApplication.getOfferGroup().getOffer(), OfferState.SHARED); } } private boolean doUpdateOfferStatusToShared(final Long offerId) { return !dao.otherOfferGroupWithCertainStatus(offerId, EnumSet.of(OfferState.NOMINATIONS, OfferState.AT_EMPLOYER, OfferState.ACCEPTED)); } private OfferState doUpdateOfferGroupStatus(final Long offerGroupId, final OfferState offerGroupState) { OfferState newStatus = null; if (offerGroupState != OfferState.CLOSED) { if (!dao.otherDomesticApplicationsWithCertainStatus(offerGroupId, EnumSet.of(ApplicationStatus.NOMINATED, ApplicationStatus.FORWARDED_TO_EMPLOYER, ApplicationStatus.ACCEPTED, ApplicationStatus.APPLIED))) { newStatus = OfferState.SHARED; } else if (dao.otherDomesticApplicationsWithCertainStatus(offerGroupId, EnumSet.of(ApplicationStatus.APPLIED))) { newStatus = OfferState.APPLICATIONS; } } return newStatus; } private void updateOfferGroupStatus(final OfferGroupEntity offerGroup, final OfferState state) { offerGroup.setStatus(state); dao.persist(offerGroup); } private void updateOfferStatus(final OfferEntity offer, final OfferState state) { offer.setStatus(state); dao.persist(offer); } private static void verifyOfferAcceptNewApplicationStatus(final OfferState offerState, final ApplicationStatus applicationStatus) { final boolean allowChanges; switch (offerState) { case COMPLETED: allowChanges = checkStateNewStateForCompleted(applicationStatus); break; case CLOSED: allowChanges = applicationStatus == ApplicationStatus.REJECTED_BY_SENDING_COUNTRY; break; default: allowChanges = true; } if (!allowChanges) { throw new VerificationException("Offer with status '" + offerState + "' does not accept new application status '" + applicationStatus + '\''); } } private static boolean checkStateNewStateForCompleted(final ApplicationStatus applicationStatus) { final boolean result; switch (applicationStatus) { case REJECTED: case CANCELLED: result = true; break; default: result = false; } return result; } private static void verifyApplicationStatusTransition(final ApplicationStatus oldStatus, final ApplicationStatus newStatus) { final boolean allowChanges; switch (oldStatus) { case ACCEPTED: allowChanges = newStatus == ApplicationStatus.CANCELLED; break; case APPLIED: allowChanges = checkStateNewStateForApplied(newStatus); break; case FORWARDED_TO_EMPLOYER: allowChanges = checkStateNewStateForForwardedToEmployer(newStatus); break; case REJECTED: allowChanges = newStatus == ApplicationStatus.NOMINATED; break; case CANCELLED: allowChanges = checkStateNewStateForCancelled(newStatus); break; case NOMINATED: allowChanges = checkStateNewStateForNominated(newStatus); break; default: throw new VerificationException("Unsupported Status '" + oldStatus + "'."); } if (!allowChanges) { throw new VerificationException("Unsupported transition from '" + oldStatus + "' to " + newStatus); } } private static boolean checkStateNewStateForApplied(final ApplicationStatus newStatus) { final boolean result; switch (newStatus) { case CANCELLED: case NOMINATED: case REJECTED_BY_SENDING_COUNTRY: result = true; break; default: result = false; } return result; } private static boolean checkStateNewStateForForwardedToEmployer(final ApplicationStatus newStatus) { final boolean result; switch (newStatus) { case ACCEPTED: case CANCELLED: case REJECTED: result = true; break; default: result = false; } return result; } private static boolean checkStateNewStateForCancelled(final ApplicationStatus newStatus) { final boolean result; switch (newStatus) { case APPLIED: case NOMINATED: result = true; break; default: result = false; } return result; } private static boolean checkStateNewStateForNominated(final ApplicationStatus newStatus) { final boolean result; switch (newStatus) { case CANCELLED: case FORWARDED_TO_EMPLOYER: case REJECTED: result = true; break; default: result = false; } return result; } }
#24: Rewritten the method processStudentApplicationByApplicationOwner, so it is clearer. Still the logic is far from pretty and not as well tested as it could be. Yet, the rewriting was made with refactoring, so logically no changes were made, only the structure.
iws-core/src/main/java/net/iaeste/iws/core/services/StudentService.java
#24: Rewritten the method processStudentApplicationByApplicationOwner, so it is clearer. Still the logic is far from pretty and not as well tested as it could be. Yet, the rewriting was made with refactoring, so logically no changes were made, only the structure.
Java
apache-2.0
70925e492a97208fa3b8c47c353cabef5cc37f67
0
JCTools/JCTools
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jctools.queues; import org.jctools.util.InternalAPI; /** * A note to maintainers on index assumptions: in a single threaded world it would seem intuitive to assume: * <pre> * <code>producerIndex >= consumerIndex</code> * </pre> * As an invariant, but in a concurrent, long running settings all of the following need to be considered: * <ul> * <li> <code>consumerIndex > producerIndex</code> : due to counter overflow (unlikey with longs, but easy to reason) * <li> <code>consumerIndex > producerIndex</code> : due to consumer FastFlow like implementation discovering the * element before the counter is updated. * <li> <code>producerIndex - consumerIndex < 0</code> : due to above. * <li> <code>producerIndex - consumerIndex > Integer.MAX_VALUE</code> : as linked buffers allow constructing queues * with more than <code>Integer.MAX_VALUE</code> elements. * * </ul> */ @InternalAPI public final class IndexedQueueSizeUtil { public static final int PLAIN_DIVISOR = 1; public static final int IGNORE_PARITY_DIVISOR = 2; public static int size(IndexedQueue iq, int divisor) { /* * It is possible for a thread to be interrupted or reschedule between the reads of the producer and * consumer indices. It is also for the indices to be updated in a `weakly` visible way. It follows that * the size value needs to be sanitized to match a valid range. */ long after = iq.lvConsumerIndex(); long size; while (true) { final long before = after; // pIndex read is "sandwiched" between 2 cIndex reads final long currentProducerIndex = iq.lvProducerIndex(); after = iq.lvConsumerIndex(); if (before == after) { size = (currentProducerIndex - after) / divisor; break; } } return sanitizedSize(iq.capacity(), size); } public static int sanitizedSize(int capacity, long size) { // Concurrent updates to cIndex and pIndex may lag behind other progress enablers (e.g. FastFlow), so we need // to check bounds [0,capacity] if (size < 0) { return 0; } if (capacity != MessagePassingQueue.UNBOUNDED_CAPACITY && size > capacity) { return capacity; } // Integer overflow is possible for the unbounded indexed queues. if (size > Integer.MAX_VALUE) { return Integer.MAX_VALUE; } return (int) size; } public static boolean isEmpty(IndexedQueue iq) { // Order matters! // Loading consumer before producer allows for producer increments after consumer index is read. // This ensures this method is conservative in it's estimate. Note that as this is an MPMC there is // nothing we can do to make this an exact method. return (iq.lvConsumerIndex() >= iq.lvProducerIndex()); } @InternalAPI public interface IndexedQueue { long lvConsumerIndex(); long lvProducerIndex(); int capacity(); } }
jctools-core/src/main/java/org/jctools/queues/IndexedQueueSizeUtil.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jctools.queues; import org.jctools.util.InternalAPI; /** * A note to maintainers on index assumptions: in a single threaded world it would seem intuitive to assume: * <pre> * <code>producerIndex >= consumerIndex</code> * </pre> * As an invariant, but in a concurrent, long running settings all of the following need to be considered: * <ul> * <li> <code>consumerIndex > producerIndex</code> : due to counter overflow (unlikey with longs, but easy to reason) * <li> <code>consumerIndex > producerIndex</code> : due to consumer FastFlow like implementation discovering the * element before the counter is updated. * <li> <code>producerIndex - consumerIndex < 0</code> : due to above. * <li> <code>producerIndex - consumerIndex > Integer.MAX_VALUE</code> : as linked buffers allow constructing queues * with more than <code>Integer.MAX_VALUE</code> elements. * * </ul> */ @InternalAPI public final class IndexedQueueSizeUtil { public static final int PLAIN_DIVISOR = 1; public static final int IGNORE_PARITY_DIVISOR = 2; public static int size(IndexedQueue iq, int divisor) { /* * It is possible for a thread to be interrupted or reschedule between the reads of the producer and * consumer indices. It is also for the indices to be updated in a `weakly` visible way. It follows that * the size value needs to be sanitized to match a valid range. */ long after = iq.lvConsumerIndex(); long size; while (true) { final long before = after; // pIndex read is "sandwiched" between 2 cIndex reads final long currentProducerIndex = iq.lvProducerIndex(); after = iq.lvConsumerIndex(); if (before == after) { size = (currentProducerIndex - after) / divisor; break; } } return sanitizedSize(iq.capacity(), size); } public static int sanitizedSize(int capacity, long size) { // Long overflow is impossible here, so size is always positive. Integer overflow is possible for the unbounded // indexed queues. if (size > Integer.MAX_VALUE) { return Integer.MAX_VALUE; } // Concurrent updates to cIndex and pIndex may lag behind other progress enablers (e.g. FastFlow), so we need // to check bounds [0,capacity] else if (size < 0) { return 0; } else if (capacity != MessagePassingQueue.UNBOUNDED_CAPACITY && size > capacity) { return capacity; } return (int) size; } public static boolean isEmpty(IndexedQueue iq) { // Order matters! // Loading consumer before producer allows for producer increments after consumer index is read. // This ensures this method is conservative in it's estimate. Note that as this is an MPMC there is // nothing we can do to make this an exact method. return (iq.lvConsumerIndex() >= iq.lvProducerIndex()); } @InternalAPI public interface IndexedQueue { long lvConsumerIndex(); long lvProducerIndex(); int capacity(); } }
Re-order if, fixes potential (but very unlikely) `size` bug Fixes a case where `size` is > `MAX_INT` and the queue has limited capacity. This could hypothetically happen if say the capacity is 2^31 and through incredibly poor observation timing the size was estimated as larger than `MAX_INT` (2^32-1).
jctools-core/src/main/java/org/jctools/queues/IndexedQueueSizeUtil.java
Re-order if, fixes potential (but very unlikely) `size` bug
Java
apache-2.0
70ecc2f5acedb431aef931f4da4bacb5676ee05c
0
iqrfsdk/jsimply,iqrfsdk/jsimply
/* * Copyright 2016 MICRORISC s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.microrisc.opengateway.mqtt; import com.microrisc.opengateway.apps.automation.OpenGatewayAppStd; import com.microrisc.opengateway.dpa.DPA_Request; import com.microrisc.opengateway.dpa.DPA_Result; import com.microrisc.opengateway.dpa.ResponseData; import com.microrisc.opengateway.web.WebRequestParser; import com.microrisc.opengateway.web.WebRequestParserException; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; import java.io.InputStream; import java.security.KeyManagementException; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.security.cert.Certificate; import java.security.cert.CertificateException; import java.security.cert.CertificateFactory; import java.sql.Timestamp; import java.util.logging.Level; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManagerFactory; import org.eclipse.paho.client.mqttv3.IMqttDeliveryToken; import org.eclipse.paho.client.mqttv3.MqttCallback; import org.eclipse.paho.client.mqttv3.MqttClient; import org.eclipse.paho.client.mqttv3.MqttConnectOptions; import org.eclipse.paho.client.mqttv3.MqttException; import org.eclipse.paho.client.mqttv3.MqttMessage; import org.eclipse.paho.client.mqttv3.persist.MqttDefaultFilePersistence; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author Rostislav Spinar */ public class MqttCommunicator implements MqttCallback { // Private instance variables private MqttClient client; private String brokerUrl; private boolean quietMode; private MqttConnectOptions conOpt; private boolean clean; private String password; private String userName; private String certFile; // time between consecutive attempts to reconnection [in ms] private static final int DEFAULT_RECONNECTION_SLEEP_TIME = 3000; private Runnable reconnectionRunnable = new Runnable() { @Override public void run() { while ( (client != null) && !(client.isConnected()) ) { // Connect to the MQTT server log("Reconnecting to " + brokerUrl + " with client ID " + client.getClientId()); try { client.connect(conOpt); } catch ( MqttException ex ) { log( "Reconnecting to " + brokerUrl + " with client " + "ID " + client.getClientId() + " failed: " + ex.getMessage() ); } if ( !client.isConnected() ) { try { Thread.sleep(DEFAULT_RECONNECTION_SLEEP_TIME); } catch ( InterruptedException ex ) { log.warn(ex.toString()); } } } log("Connected"); } }; private Thread reconnectionThread; private static final Logger log = LoggerFactory.getLogger(MqttCommunicator.class); // sets connection options private void setConnectionOptions( boolean isCleanSession, String password, String userName, String certFile ) throws CertificateException, IOException, KeyStoreException, NoSuchAlgorithmException, KeyManagementException { conOpt.setCleanSession(isCleanSession); if ( !password.isEmpty() ) { conOpt.setPassword(password.toCharArray()); } if ( !userName.isEmpty() ) { conOpt.setUserName(userName); } if ( !certFile.isEmpty() ) { CertificateFactory cf = CertificateFactory.getInstance("X.509"); InputStream certFileInputStream = fullStream(certFile); Certificate ca = cf.generateCertificate(certFileInputStream); KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); keyStore.load(null); keyStore.setCertificateEntry("ca", ca); TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); trustManagerFactory.init(keyStore); SSLContext sslContext = SSLContext.getInstance("TLSv1"); sslContext.init(null, trustManagerFactory.getTrustManagers(), new SecureRandom()); conOpt.setSocketFactory(sslContext.getSocketFactory()); } } /** * Constructs an instance of the sample client wrapper * * @param MQTTConfig the configuration params of the server to connect to * @throws MqttException */ public MqttCommunicator(MqttConfiguration mqttConfig) throws MqttException { String brokerUrl = mqttConfig.getProtocol() + mqttConfig.getBroker() + ":" + mqttConfig.getPort(); this.brokerUrl = brokerUrl; this.quietMode = mqttConfig.isQuiteMode(); this.clean = mqttConfig.isCleanSession(); this.certFile = mqttConfig.getCertFilePath(); this.userName = mqttConfig.getUsername(); this.password = mqttConfig.getPassword(); //This sample stores in a temporary directory... where messages temporarily // stored until the message has been delivered to the server. //..a real application ought to store them somewhere // where they are not likely to get deleted or tampered with String tmpDir = System.getProperty("java.io.tmpdir"); MqttDefaultFilePersistence dataStore = new MqttDefaultFilePersistence(tmpDir); try { // Construct the connection options object that contains connection parameters // such as cleanSession and LWT conOpt = new MqttConnectOptions(); setConnectionOptions(clean, password, userName, certFile); // Construct an MQTT blocking mode client client = new MqttClient(this.brokerUrl, mqttConfig.getClientId(), dataStore); // Set this wrapper as the callback handler client.setCallback(this); // Connect to the MQTT server log("Connecting to " + brokerUrl + " with client ID " + client.getClientId()); client.connect(conOpt); log("Connected"); } catch (MqttException e) { e.printStackTrace(); log("Unable to set up client: " + e.toString()); System.exit(1); } catch (CertificateException e) { e.printStackTrace(); log("Unable to set up client - certificate exception: " + e.toString()); System.exit(1); } catch (IOException e) { e.printStackTrace(); log("Unable to set up client - certificate exception in input stream: " + e.toString()); System.exit(1); } catch (KeyStoreException e) { e.printStackTrace(); log("Unable to set up client - certificate exception in key store: " + e.toString()); System.exit(1); } catch (NoSuchAlgorithmException e) { e.printStackTrace(); log("Unable to set up client - certificate exception in loading key store: " + e.toString()); System.exit(1); } catch (KeyManagementException e) { e.printStackTrace(); log("Unable to set up client - certificate exception in ssl context: " + e.toString()); System.exit(1); } } /** * Publish / send a message to an MQTT server * * @param topicName the name of the topic to publish to * @param qos the quality of service to delivery the message at (0,1,2) * @param payload the set of bytes to send to the MQTT server * @throws MqttException */ public synchronized void publish(String topicName, int qos, byte[] payload) throws MqttException { // Connect to the MQTT server //log("Connecting to " + brokerUrl + " with client ID " + client.getClientId()); //client.connect(conOpt); //log("Connected"); String time = new Timestamp(System.currentTimeMillis()).toString(); log("Publishing at: " + time + " to topic \"" + topicName + "\" qos " + qos); // Create and configure a message MqttMessage message = new MqttMessage(payload); message.setQos(qos); // Send the message to the server, control is not returned until // it has been delivered to the server meeting the specified // quality of service. client.publish(topicName, message); // Disconnect the client //client.disconnect(); //log("Disconnected"); } /** * Subscribe to a topic on an MQTT server. Once subscribed this method waits * for the messages to arrive from the server that match the subscription. * It continues listening for messages until the enter key is pressed. * * @param topicName to subscribe to (can be wild carded) * @param qos the maximum quality of service to receive messages at for this * subscription * @throws MqttException */ public void subscribe(String topicName, int qos) throws MqttException { // Connect to the MQTT server //client.connect(conOpt); //log("Connected to " + brokerUrl + " with client ID " + client.getClientId()); // Subscribe to the requested topic // The QoS specified is the maximum level that messages will be sent to the client at. // For instance if QoS 1 is specified, any messages originally published at QoS 2 will // be downgraded to 1 when delivering to the client but messages published at 1 and 0 // will be received at the same level they were published at. log("Subscribing to topic \"" + topicName + "\" qos " + qos); client.subscribe(topicName, qos); // Disconnect the client from the server //client.disconnect(); //log("Disconnected"); } /** * Utility method to handle logging. If 'quietMode' is set, this method does * nothing * * @param message the message to log */ private void log(String message) { if (!quietMode) { System.out.println(message); } } /** * @see MqttCallback#connectionLost(Throwable) */ public void connectionLost(Throwable cause) { log.debug("connectionLost - start: cause=" + cause.getMessage()); // Called when the connection to the server has been lost. // An application may choose to implement reconnection // logic at this point. This sample simply exits. log("Connection to " + brokerUrl + " lost! " + cause); reconnectionThread = new Thread(reconnectionRunnable); reconnectionThread.start(); log.debug("connectionLost - end"); } /** * @see MqttCallback#deliveryComplete(IMqttDeliveryToken) */ public void deliveryComplete(IMqttDeliveryToken token) { // Called when a message has been delivered to the // server. The token passed in here is the same one // that was passed to or returned from the original call to publish. // This allows applications to perform asynchronous // delivery without blocking until delivery completes. // // This sample demonstrates asynchronous deliver and // uses the token.waitForCompletion() call in the main thread which // blocks until the delivery has completed. // Additionally the deliveryComplete method will be called if // the callback is set on the client // // If the connection to the server breaks before delivery has completed // delivery of a message will complete after the client has re-connected. // The getPendingTokens method will provide tokens for any messages // that are still to be delivered. } /** * @see MqttCallback#messageArrived(String, MqttMessage) */ public void messageArrived(String topic, MqttMessage message) throws MqttException { // Called when a message arrives from the server that matches any // subscription made by the client String time = new Timestamp(System.currentTimeMillis()).toString(); System.out.println("Time:\t" + time + " Topic:\t" + topic + " Message:\t" + new String(message.getPayload()) + " QoS:\t" + message.getQos()); // message data final String messageData = new String(message.getPayload()); DPA_Request dpaRequest = null; DPA_Result result = null; try { dpaRequest = WebRequestParser.parse( messageData ); result = OpenGatewayAppStd.sendWebRequestToDpaNetwork(dpaRequest, topic); } catch ( WebRequestParserException ex ) { System.err.println("Error while parsing web request: " + ex); return; } // no result - usually in the case of error if ( result == null ) { System.err.println("Null result from DPA."); return; } // creating data of response to publish ResponseData responseData = createResponseData(dpaRequest, result); // converting DPA result into web response form String webResponse = MqttFormatter.formatResponseData(responseData); try { publish(topic, 2, webResponse.getBytes()); } catch ( MqttException ex ) { System.err.println("Error while publishing web response message: " + ex); } } // creates response data for publishing private ResponseData createResponseData(DPA_Request request, DPA_Result result) { return new ResponseData( request.getN(), request.getSv(), String.valueOf(request.getPid()), result.getRequest().getNadr(), String.valueOf(result.getRequest().getPnum()), result.getRequest().getPcmd(), String.valueOf(result.getDpaAddInfo().getHwProfile()), String.valueOf(result.getDpaAddInfo().getResponseCode()), String.valueOf(result.getDpaAddInfo().getDPA_Value()), result.getRequest().getModuleId() ); } /** * <p>Creates an InputStream from a file, and fills it with the complete * file. Thus, available() on the returned InputStream will return the * full number of bytes the file contains</p> * @param fname The filename * @return The filled InputStream * @exception IOException, if the Streams couldn't be created. **/ private InputStream fullStream ( String fname ) throws IOException { InputStream is = this.getClass().getResourceAsStream(fname); //FileInputStream fis = new FileInputStream(fname); DataInputStream dis = new DataInputStream(is); byte[] bytes = new byte[dis.available()]; dis.readFully(bytes); ByteArrayInputStream bais = new ByteArrayInputStream(bytes); return bais; } }
simply-modules/simply-demos/open-gateway/src/main/java/com/microrisc/opengateway/mqtt/MqttCommunicator.java
/* * Copyright 2016 MICRORISC s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.microrisc.opengateway.mqtt; import com.microrisc.opengateway.apps.automation.OpenGatewayAppStd; import com.microrisc.opengateway.dpa.DPA_Request; import com.microrisc.opengateway.dpa.DPA_Result; import com.microrisc.opengateway.dpa.ResponseData; import com.microrisc.opengateway.web.WebRequestParser; import com.microrisc.opengateway.web.WebRequestParserException; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; import java.io.InputStream; import java.security.KeyManagementException; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.security.cert.Certificate; import java.security.cert.CertificateException; import java.security.cert.CertificateFactory; import java.sql.Timestamp; import java.util.logging.Level; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManagerFactory; import org.eclipse.paho.client.mqttv3.IMqttDeliveryToken; import org.eclipse.paho.client.mqttv3.MqttCallback; import org.eclipse.paho.client.mqttv3.MqttClient; import org.eclipse.paho.client.mqttv3.MqttConnectOptions; import org.eclipse.paho.client.mqttv3.MqttException; import org.eclipse.paho.client.mqttv3.MqttMessage; import org.eclipse.paho.client.mqttv3.persist.MqttDefaultFilePersistence; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author Rostislav Spinar */ public class MqttCommunicator implements MqttCallback { // Private instance variables private MqttClient client; private String brokerUrl; private boolean quietMode; private MqttConnectOptions conOpt; private boolean clean; private String password; private String userName; private String certFile; // time between consecutive attempts to reconnection [in ms] private static final int DEFAULT_RECONNECTION_SLEEP_TIME = 3000; private Runnable reconnectionRunnable = new Runnable() { @Override public void run() { while ( (client != null) && !(client.isConnected()) ) { // Connect to the MQTT server log("Reconnecting to" + brokerUrl + "with client ID " + client.getClientId()); conOpt = new MqttConnectOptions(); try { setConnectionOptions(false, userName, password, certFile); } catch ( Exception ex ) { System.err.println("Error while reconnecting: " + ex); System.exit(1); } try { client.connect(conOpt); } catch ( MqttException ex ) { log( "Reconnecting to" + brokerUrl + "with client " + "ID " + client.getClientId() + "failed: " + ex.getMessage() ); } if ( !client.isConnected() ) { try { Thread.sleep(DEFAULT_RECONNECTION_SLEEP_TIME); } catch ( InterruptedException ex ) { log.warn(ex.toString()); } } } log("Connected"); } }; private Thread reconnectionThread; private static final Logger log = LoggerFactory.getLogger(MqttCommunicator.class); // sets connection options private void setConnectionOptions( boolean isCleanSession, String password, String userName, String certFile ) throws CertificateException, IOException, KeyStoreException, NoSuchAlgorithmException, KeyManagementException { conOpt.setCleanSession(isCleanSession); if ( !password.isEmpty() ) { conOpt.setPassword(password.toCharArray()); } if ( !userName.isEmpty() ) { conOpt.setUserName(userName); } if ( !certFile.isEmpty() ) { CertificateFactory cf = CertificateFactory.getInstance("X.509"); InputStream certFileInputStream = fullStream(certFile); Certificate ca = cf.generateCertificate(certFileInputStream); KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); keyStore.load(null); keyStore.setCertificateEntry("ca", ca); TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); trustManagerFactory.init(keyStore); SSLContext sslContext = SSLContext.getInstance("TLSv1"); sslContext.init(null, trustManagerFactory.getTrustManagers(), new SecureRandom()); conOpt.setSocketFactory(sslContext.getSocketFactory()); } } /** * Constructs an instance of the sample client wrapper * * @param MQTTConfig the configuration params of the server to connect to * @throws MqttException */ public MqttCommunicator(MqttConfiguration mqttConfig) throws MqttException { String brokerUrl = mqttConfig.getProtocol() + mqttConfig.getBroker() + ":" + mqttConfig.getPort(); this.brokerUrl = brokerUrl; this.quietMode = mqttConfig.isQuiteMode(); this.clean = mqttConfig.isCleanSession(); this.certFile = mqttConfig.getCertFilePath(); this.userName = mqttConfig.getUsername(); this.password = mqttConfig.getPassword(); //This sample stores in a temporary directory... where messages temporarily // stored until the message has been delivered to the server. //..a real application ought to store them somewhere // where they are not likely to get deleted or tampered with String tmpDir = System.getProperty("java.io.tmpdir"); MqttDefaultFilePersistence dataStore = new MqttDefaultFilePersistence(tmpDir); try { // Construct the connection options object that contains connection parameters // such as cleanSession and LWT conOpt = new MqttConnectOptions(); setConnectionOptions(clean, password, userName, certFile); // Construct an MQTT blocking mode client client = new MqttClient(this.brokerUrl, mqttConfig.getClientId(), dataStore); // Set this wrapper as the callback handler client.setCallback(this); // Connect to the MQTT server log("Connecting to " + brokerUrl + " with client ID " + client.getClientId()); client.connect(conOpt); log("Connected"); } catch (MqttException e) { e.printStackTrace(); log("Unable to set up client: " + e.toString()); System.exit(1); } catch (CertificateException e) { e.printStackTrace(); log("Unable to set up client - certificate exception: " + e.toString()); System.exit(1); } catch (IOException e) { e.printStackTrace(); log("Unable to set up client - certificate exception in input stream: " + e.toString()); System.exit(1); } catch (KeyStoreException e) { e.printStackTrace(); log("Unable to set up client - certificate exception in key store: " + e.toString()); System.exit(1); } catch (NoSuchAlgorithmException e) { e.printStackTrace(); log("Unable to set up client - certificate exception in loading key store: " + e.toString()); System.exit(1); } catch (KeyManagementException e) { e.printStackTrace(); log("Unable to set up client - certificate exception in ssl context: " + e.toString()); System.exit(1); } } /** * Publish / send a message to an MQTT server * * @param topicName the name of the topic to publish to * @param qos the quality of service to delivery the message at (0,1,2) * @param payload the set of bytes to send to the MQTT server * @throws MqttException */ public synchronized void publish(String topicName, int qos, byte[] payload) throws MqttException { // Connect to the MQTT server //log("Connecting to " + brokerUrl + " with client ID " + client.getClientId()); //client.connect(conOpt); //log("Connected"); String time = new Timestamp(System.currentTimeMillis()).toString(); log("Publishing at: " + time + " to topic \"" + topicName + "\" qos " + qos); // Create and configure a message MqttMessage message = new MqttMessage(payload); message.setQos(qos); // Send the message to the server, control is not returned until // it has been delivered to the server meeting the specified // quality of service. client.publish(topicName, message); // Disconnect the client //client.disconnect(); //log("Disconnected"); } /** * Subscribe to a topic on an MQTT server. Once subscribed this method waits * for the messages to arrive from the server that match the subscription. * It continues listening for messages until the enter key is pressed. * * @param topicName to subscribe to (can be wild carded) * @param qos the maximum quality of service to receive messages at for this * subscription * @throws MqttException */ public void subscribe(String topicName, int qos) throws MqttException { // Connect to the MQTT server //client.connect(conOpt); //log("Connected to " + brokerUrl + " with client ID " + client.getClientId()); // Subscribe to the requested topic // The QoS specified is the maximum level that messages will be sent to the client at. // For instance if QoS 1 is specified, any messages originally published at QoS 2 will // be downgraded to 1 when delivering to the client but messages published at 1 and 0 // will be received at the same level they were published at. log("Subscribing to topic \"" + topicName + "\" qos " + qos); client.subscribe(topicName, qos); // Disconnect the client from the server //client.disconnect(); //log("Disconnected"); } /** * Utility method to handle logging. If 'quietMode' is set, this method does * nothing * * @param message the message to log */ private void log(String message) { if (!quietMode) { System.out.println(message); } } /** * @see MqttCallback#connectionLost(Throwable) */ public void connectionLost(Throwable cause) { log.debug("connectionLost - start: cause=" + cause.getMessage()); // Called when the connection to the server has been lost. // An application may choose to implement reconnection // logic at this point. This sample simply exits. log("Connection to " + brokerUrl + " lost! " + cause); reconnectionThread = new Thread(reconnectionRunnable); reconnectionThread.start(); log.debug("connectionLost - end"); } /** * @see MqttCallback#deliveryComplete(IMqttDeliveryToken) */ public void deliveryComplete(IMqttDeliveryToken token) { // Called when a message has been delivered to the // server. The token passed in here is the same one // that was passed to or returned from the original call to publish. // This allows applications to perform asynchronous // delivery without blocking until delivery completes. // // This sample demonstrates asynchronous deliver and // uses the token.waitForCompletion() call in the main thread which // blocks until the delivery has completed. // Additionally the deliveryComplete method will be called if // the callback is set on the client // // If the connection to the server breaks before delivery has completed // delivery of a message will complete after the client has re-connected. // The getPendingTokens method will provide tokens for any messages // that are still to be delivered. } /** * @see MqttCallback#messageArrived(String, MqttMessage) */ public void messageArrived(String topic, MqttMessage message) throws MqttException { // Called when a message arrives from the server that matches any // subscription made by the client String time = new Timestamp(System.currentTimeMillis()).toString(); System.out.println("Time:\t" + time + " Topic:\t" + topic + " Message:\t" + new String(message.getPayload()) + " QoS:\t" + message.getQos()); // message data final String messageData = new String(message.getPayload()); DPA_Request dpaRequest = null; DPA_Result result = null; try { dpaRequest = WebRequestParser.parse( messageData ); result = OpenGatewayAppStd.sendWebRequestToDpaNetwork(dpaRequest, topic); } catch ( WebRequestParserException ex ) { System.err.println("Error while parsing web request: " + ex); return; } // no result - usually in the case of error if ( result == null ) { System.err.println("Null result from DPA."); return; } // creating data of response to publish ResponseData responseData = createResponseData(dpaRequest, result); // converting DPA result into web response form String webResponse = MqttFormatter.formatResponseData(responseData); try { publish(topic, 2, webResponse.getBytes()); } catch ( MqttException ex ) { System.err.println("Error while publishing web response message: " + ex); } } // creates response data for publishing private ResponseData createResponseData(DPA_Request request, DPA_Result result) { return new ResponseData( request.getN(), request.getSv(), String.valueOf(request.getPid()), result.getRequest().getNadr(), String.valueOf(result.getRequest().getPnum()), result.getRequest().getPcmd(), String.valueOf(result.getDpaAddInfo().getHwProfile()), String.valueOf(result.getDpaAddInfo().getResponseCode()), String.valueOf(result.getDpaAddInfo().getDPA_Value()), result.getRequest().getModuleId() ); } /** * <p>Creates an InputStream from a file, and fills it with the complete * file. Thus, available() on the returned InputStream will return the * full number of bytes the file contains</p> * @param fname The filename * @return The filled InputStream * @exception IOException, if the Streams couldn't be created. **/ private InputStream fullStream ( String fname ) throws IOException { InputStream is = this.getClass().getResourceAsStream(fname); //FileInputStream fis = new FileInputStream(fname); DataInputStream dis = new DataInputStream(is); byte[] bytes = new byte[dis.available()]; dis.readFully(bytes); ByteArrayInputStream bais = new ByteArrayInputStream(bytes); return bais; } }
Fix in reconnect logic
simply-modules/simply-demos/open-gateway/src/main/java/com/microrisc/opengateway/mqtt/MqttCommunicator.java
Fix in reconnect logic
Java
apache-2.0
3eea1a8cdf387b54684ee3c5eb123b1d8e2577b3
0
jfbreault/hawtio,samkeeleyong/hawtio,telefunken/hawtio,andytaylor/hawtio,stalet/hawtio,fortyrunner/hawtio,padmaragl/hawtio,jfbreault/hawtio,uguy/hawtio,skarsaune/hawtio,tadayosi/hawtio,telefunken/hawtio,stalet/hawtio,samkeeleyong/hawtio,jfbreault/hawtio,fortyrunner/hawtio,grgrzybek/hawtio,rajdavies/hawtio,hawtio/hawtio,andytaylor/hawtio,rajdavies/hawtio,samkeeleyong/hawtio,tadayosi/hawtio,mposolda/hawtio,Fatze/hawtio,Fatze/hawtio,voipme2/hawtio,skarsaune/hawtio,voipme2/hawtio,grgrzybek/hawtio,samkeeleyong/hawtio,hawtio/hawtio,skarsaune/hawtio,andytaylor/hawtio,padmaragl/hawtio,uguy/hawtio,grgrzybek/hawtio,hawtio/hawtio,Fatze/hawtio,telefunken/hawtio,stalet/hawtio,tadayosi/hawtio,fortyrunner/hawtio,skarsaune/hawtio,mposolda/hawtio,mposolda/hawtio,stalet/hawtio,hawtio/hawtio,hawtio/hawtio,stalet/hawtio,telefunken/hawtio,grgrzybek/hawtio,padmaragl/hawtio,padmaragl/hawtio,telefunken/hawtio,voipme2/hawtio,uguy/hawtio,rajdavies/hawtio,jfbreault/hawtio,rajdavies/hawtio,andytaylor/hawtio,jfbreault/hawtio,uguy/hawtio,tadayosi/hawtio,samkeeleyong/hawtio,Fatze/hawtio,rajdavies/hawtio,voipme2/hawtio,grgrzybek/hawtio,skarsaune/hawtio,padmaragl/hawtio,voipme2/hawtio,uguy/hawtio,mposolda/hawtio,Fatze/hawtio,fortyrunner/hawtio,mposolda/hawtio,andytaylor/hawtio,tadayosi/hawtio
package io.hawt.maven; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.maven.artifact.Artifact; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; @Mojo(name = "run", defaultPhase = LifecyclePhase.TEST_COMPILE, requiresDependencyResolution = ResolutionScope.RUNTIME) public class RunMojo extends BaseMojo { @Parameter(property = "hawtio.port", defaultValue = "8080") int port; @Parameter(property = "hawtio.context", defaultValue = "hawtio") String context; @Parameter(property = "hawtio.mainClass") String mainClass; @Parameter(property = "hawtio.arguments") String[] arguments; @Parameter(property = "hawtio.systemProperties") Map<String, String> systemProperties; @Override public void execute() throws MojoExecutionException, MojoFailureException { // use hawtio-app extendedPluginDependencyArtifactId = "hawtio-app"; try { doPrepareArguments(); doBeforeExecute(); doExecute(); doAfterExecute(); } catch (Exception e) { throw new MojoExecutionException("Error executing", e); } } protected void doPrepareArguments() throws Exception { List<String> args = new ArrayList<String>(); addCustomArguments(args); if (arguments != null) { args.addAll(Arrays.asList(arguments)); } arguments = new String[args.size()]; args.toArray(arguments); if (getLog().isDebugEnabled()) { StringBuilder msg = new StringBuilder("Invoking: "); msg.append(mainClass); msg.append(".main("); for (int i = 0; i < arguments.length; i++) { if (i > 0) { msg.append(", "); } msg.append(arguments[i]); } msg.append(")"); getLog().debug(msg); } } /** * To add any custom arguments * * @param args the arguments */ protected void addCustomArguments(List<String> args) throws Exception { // noop } protected void doExecute() throws Exception { if (mainClass == null) { throw new IllegalArgumentException("Option mainClass must be specified"); } if (systemProperties != null && !systemProperties.isEmpty()) { for (Map.Entry<String, String> entry : systemProperties.entrySet()) { System.setProperty(entry.getKey(), entry.getValue()); } getLog().info("Adding system properties: " + systemProperties); } final IsolatedThreadGroup threadGroup = new IsolatedThreadGroup(this, mainClass); final Thread bootstrapThread = new Thread(threadGroup, new Runnable() { public void run() { try { beforeBootstrapHawtio(); getLog().info("Starting hawtio ..."); getLog().info("*************************************"); Method hawtioMain = Thread.currentThread().getContextClassLoader().loadClass("io.hawt.app.App") .getMethod("main", String[].class); String[] args = new String[]{"--context", context, "--port", "" + port, "--join", "false"}; hawtioMain.invoke(null, new Object[]{args}); afterBootstrapHawtio(); beforeBootstrapMain(); getLog().info("Starting " + mainClass + "..."); getLog().info("*************************************"); Method main = Thread.currentThread().getContextClassLoader().loadClass(mainClass) .getMethod("main", String[].class); main.invoke(main, new Object[] {arguments}); afterBootstrapMain(); } catch (Exception e) { // just pass it on // let it be printed so end users can see the exception on the console getLog().error("*************************************"); getLog().error("Error occurred while running main from: " + mainClass); getLog().error(e); getLog().error("*************************************"); Thread.currentThread().getThreadGroup().uncaughtException(Thread.currentThread(), e); } } }, mainClass + ".main()"); // resolve artifacts to be used Set<Artifact> artifacts = resolveArtifacts(); resolvedArtifacts(artifacts); bootstrapThread.setContextClassLoader(getClassLoader(artifacts)); bootstrapThread.start(); joinNonDaemonThreads(threadGroup); try { terminateThreads(threadGroup); threadGroup.destroy(); } catch (IllegalThreadStateException e) { getLog().warn("Cannot destroy thread group " + threadGroup, e); } if (threadGroup.getUncaughtException() != null) { throw new MojoExecutionException("Uncaught exception", threadGroup.getUncaughtException()); } } protected void beforeBootstrapMain() { // noop } protected void afterBootstrapMain() { // noop } protected void beforeBootstrapHawtio() { // noop } protected void afterBootstrapHawtio() { // noop } }
hawtio-maven-plugin/src/main/java/io/hawt/maven/RunMojo.java
package io.hawt.maven; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Set; import org.apache.maven.artifact.Artifact; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; @Mojo(name = "run", defaultPhase = LifecyclePhase.TEST_COMPILE, requiresDependencyResolution = ResolutionScope.RUNTIME) public class RunMojo extends BaseMojo { @Parameter(property = "hawtio.port", defaultValue = "8080") int port; @Parameter(property = "hawtio.context", defaultValue = "hawtio") String context; @Parameter(property = "hawtio.mainClass") String mainClass; @Parameter(property = "hawtio.arguments") String[] arguments; @Override public void execute() throws MojoExecutionException, MojoFailureException { // use hawtio-app extendedPluginDependencyArtifactId = "hawtio-app"; try { doPrepareArguments(); doBeforeExecute(); doExecute(); doAfterExecute(); } catch (Exception e) { throw new MojoExecutionException("Error executing", e); } } protected void doPrepareArguments() throws Exception { List<String> args = new ArrayList<String>(); addCustomArguments(args); if (arguments != null) { args.addAll(Arrays.asList(arguments)); } arguments = new String[args.size()]; args.toArray(arguments); if (getLog().isDebugEnabled()) { StringBuilder msg = new StringBuilder("Invoking: "); msg.append(mainClass); msg.append(".main("); for (int i = 0; i < arguments.length; i++) { if (i > 0) { msg.append(", "); } msg.append(arguments[i]); } msg.append(")"); getLog().debug(msg); } } /** * To add any custom arguments * * @param args the arguments */ protected void addCustomArguments(List<String> args) throws Exception { // noop } protected void doExecute() throws Exception { if (mainClass == null) { throw new IllegalArgumentException("Option mainClass must be specified"); } final IsolatedThreadGroup threadGroup = new IsolatedThreadGroup(this, mainClass); final Thread bootstrapThread = new Thread(threadGroup, new Runnable() { public void run() { try { beforeBootstrapHawtio(); getLog().info("Starting hawtio ..."); getLog().info("*************************************"); Method hawtioMain = Thread.currentThread().getContextClassLoader().loadClass("io.hawt.app.App") .getMethod("main", String[].class); String[] args = new String[]{"--context", context, "--port", "" + port, "--join", "false"}; hawtioMain.invoke(null, new Object[]{args}); afterBootstrapHawtio(); beforeBootstrapMain(); getLog().info("Starting " + mainClass + "..."); getLog().info("*************************************"); Method main = Thread.currentThread().getContextClassLoader().loadClass(mainClass) .getMethod("main", String[].class); main.invoke(main, new Object[] {arguments}); afterBootstrapMain(); } catch (Exception e) { // just pass it on // let it be printed so end users can see the exception on the console getLog().error("*************************************"); getLog().error("Error occurred while running main from: " + mainClass); getLog().error(e); getLog().error("*************************************"); Thread.currentThread().getThreadGroup().uncaughtException(Thread.currentThread(), e); } } }, mainClass + ".main()"); // resolve artifacts to be used Set<Artifact> artifacts = resolveArtifacts(); resolvedArtifacts(artifacts); bootstrapThread.setContextClassLoader(getClassLoader(artifacts)); bootstrapThread.start(); joinNonDaemonThreads(threadGroup); try { terminateThreads(threadGroup); threadGroup.destroy(); } catch (IllegalThreadStateException e) { getLog().warn("Cannot destroy thread group " + threadGroup, e); } if (threadGroup.getUncaughtException() != null) { throw new MojoExecutionException("Uncaught exception", threadGroup.getUncaughtException()); } } protected void beforeBootstrapMain() { // noop } protected void afterBootstrapMain() { // noop } protected void beforeBootstrapHawtio() { // noop } protected void afterBootstrapHawtio() { // noop } }
#782: Added support for system properties.
hawtio-maven-plugin/src/main/java/io/hawt/maven/RunMojo.java
#782: Added support for system properties.
Java
apache-2.0
5a9659a36e3debe15d2435134d28f919e905ad46
0
gxa/gxa,gxa/gxa,gxa/gxa,gxa/gxa,gxa/gxa
/* * Copyright 2008-2010 Microarray Informatics Team, EMBL-European Bioinformatics Institute * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * For further details of the Gene Expression Atlas project, including source code, * downloads and documentation, please see: * * http://gxa.github.com/gxa */ package uk.ac.ebi.gxa; import org.apache.solr.client.solrj.SolrServer; import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer; import org.apache.solr.core.CoreContainer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.bridge.SLF4JBridgeHandler; import org.xml.sax.SAXException; import uk.ac.ebi.gxa.dao.AtlasDAOTestCase; import uk.ac.ebi.gxa.efo.Efo; import uk.ac.ebi.gxa.efo.EfoImpl; import uk.ac.ebi.gxa.index.SolrContainerFactory; import uk.ac.ebi.gxa.index.builder.DefaultIndexBuilder; import uk.ac.ebi.gxa.index.builder.IndexAllCommand; import uk.ac.ebi.gxa.index.builder.IndexBuilderException; import uk.ac.ebi.gxa.index.builder.listener.IndexBuilderEvent; import uk.ac.ebi.gxa.index.builder.listener.IndexBuilderListener; import uk.ac.ebi.gxa.index.builder.service.ExperimentAtlasIndexBuilderService; import uk.ac.ebi.gxa.index.builder.service.GeneAtlasIndexBuilderService; import uk.ac.ebi.gxa.netcdf.generator.NetCDFCreatorException; import uk.ac.ebi.gxa.netcdf.reader.AtlasNetCDFDAO; import uk.ac.ebi.gxa.properties.AtlasProperties; import uk.ac.ebi.gxa.properties.ResourceFileStorage; import uk.ac.ebi.gxa.utils.FileUtil; import javax.xml.parsers.ParserConfigurationException; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.Arrays; import java.util.logging.LogManager; /** * Test case that creates Solr indices and NetCDFs from DB unit test. */ public abstract class AbstractIndexNetCDFTestCase extends AtlasDAOTestCase { private File indexLocation; private SolrServer exptServer; private SolrServer atlasServer; private DefaultIndexBuilder indexBuilder; private CoreContainer coreContainer; private File netCDFRepoLocation; private AtlasNetCDFDAO atlasNetCDFDAO; private boolean solrBuildFinished; private final Logger log = LoggerFactory.getLogger(getClass()); protected void setUp() throws Exception { super.setUp(); try { LogManager.getLogManager() .readConfiguration(this.getClass().getClassLoader().getResourceAsStream("logging.properties")); } catch (Exception e) { e.printStackTrace(); } SLF4JBridgeHandler.install(); buildSolrIndexes(); generateNetCDFs(); } private void generateNetCDFs() throws NetCDFCreatorException, InterruptedException { final File classPath = new File(this.getClass().getClassLoader().getResource("").getPath()); netCDFRepoLocation = new File(classPath, "netcdfs"); atlasNetCDFDAO = new AtlasNetCDFDAO(); atlasNetCDFDAO.setAtlasDataRepo(netCDFRepoLocation); } protected void tearDown() throws Exception { super.tearDown(); // delete the repo if (netCDFRepoLocation.exists()) FileUtil.deleteDirectory(netCDFRepoLocation); netCDFRepoLocation = null; // shutdown the indexBuilder and coreContainer if its not already been done indexBuilder.shutdown(); if (coreContainer != null) { coreContainer.shutdown(); } // delete the index if (indexLocation.exists()) FileUtil.deleteDirectory(indexLocation); indexLocation = null; indexBuilder = null; } public AtlasNetCDFDAO getNetCDFDAO() { return atlasNetCDFDAO; } public SolrServer getSolrServerExpt() { return exptServer; } public SolrServer getSolrServerAtlas() { return atlasServer; } private void buildSolrIndexes() throws InterruptedException, IndexBuilderException, URISyntaxException, IOException, SAXException, ParserConfigurationException { indexLocation = new File(new File("target", "test"), "index"); log.debug("Extracting index to " + indexLocation.getAbsolutePath()); createSOLRServers(); ExperimentAtlasIndexBuilderService eaibs = new ExperimentAtlasIndexBuilderService(); eaibs.setAtlasDAO(getAtlasDAO()); eaibs.setSolrServer(exptServer); GeneAtlasIndexBuilderService gaibs = new GeneAtlasIndexBuilderService(); gaibs.setAtlasDAO(getAtlasDAO()); gaibs.setSolrServer(atlasServer); AtlasProperties atlasProperties = new AtlasProperties(); ResourceFileStorage storage = new ResourceFileStorage(); storage.setResourcePath("atlas.properties"); atlasProperties.setStorage(storage); gaibs.setAtlasProperties(atlasProperties); Efo efo = new EfoImpl(); efo.setUri(new URI("resource:META-INF/efo.owl")); //efo.load(); gaibs.setEfo(efo); indexBuilder = new DefaultIndexBuilder(); indexBuilder.setIncludeIndexes(Arrays.asList("experiments", "genes")); indexBuilder.setServices(Arrays.asList(eaibs, gaibs)); indexBuilder.startup(); indexBuilder.doCommand(new IndexAllCommand(), new IndexBuilderListener() { public void buildSuccess() { solrBuildFinished = true; } public void buildError(IndexBuilderEvent event) { solrBuildFinished = true; for (Throwable t : event.getErrors()) { t.printStackTrace(); } fail("Failed to build Solr Indexes: " + event.getErrors()); } public void buildProgress(String progressStatus) { } }); while (!solrBuildFinished) { synchronized (this) { wait(100); } } } private void createSOLRServers() throws IOException, SAXException, ParserConfigurationException { SolrContainerFactory solrContainerFactory = new SolrContainerFactory(); solrContainerFactory.setAtlasIndex(indexLocation); solrContainerFactory.setTemplatePath("solr"); coreContainer = solrContainerFactory.createContainer(); // create an embedded solr server for experiments and genes from this container exptServer = new EmbeddedSolrServer(coreContainer, "expt"); atlasServer = new EmbeddedSolrServer(coreContainer, "atlas"); } }
atlas-test/src/test/java/uk/ac/ebi/gxa/AbstractIndexNetCDFTestCase.java
/* * Copyright 2008-2010 Microarray Informatics Team, EMBL-European Bioinformatics Institute * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * For further details of the Gene Expression Atlas project, including source code, * downloads and documentation, please see: * * http://gxa.github.com/gxa */ package uk.ac.ebi.gxa; import org.apache.solr.client.solrj.SolrServer; import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer; import org.apache.solr.core.CoreContainer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.bridge.SLF4JBridgeHandler; import org.xml.sax.SAXException; import uk.ac.ebi.gxa.dao.AtlasDAOTestCase; import uk.ac.ebi.gxa.efo.Efo; import uk.ac.ebi.gxa.index.SolrContainerFactory; import uk.ac.ebi.gxa.index.builder.DefaultIndexBuilder; import uk.ac.ebi.gxa.index.builder.IndexAllCommand; import uk.ac.ebi.gxa.index.builder.IndexBuilderException; import uk.ac.ebi.gxa.index.builder.listener.IndexBuilderEvent; import uk.ac.ebi.gxa.index.builder.listener.IndexBuilderListener; import uk.ac.ebi.gxa.index.builder.service.ExperimentAtlasIndexBuilderService; import uk.ac.ebi.gxa.index.builder.service.GeneAtlasIndexBuilderService; import uk.ac.ebi.gxa.netcdf.generator.NetCDFCreatorException; import uk.ac.ebi.gxa.netcdf.reader.AtlasNetCDFDAO; import uk.ac.ebi.gxa.properties.AtlasProperties; import uk.ac.ebi.gxa.properties.ResourceFileStorage; import uk.ac.ebi.gxa.utils.FileUtil; import javax.xml.parsers.ParserConfigurationException; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.Arrays; import java.util.logging.LogManager; /** * Test case that creates Solr indices and NetCDFs from DB unit test. */ public abstract class AbstractIndexNetCDFTestCase extends AtlasDAOTestCase { private File indexLocation; private SolrServer exptServer; private SolrServer atlasServer; private DefaultIndexBuilder indexBuilder; private CoreContainer coreContainer; private File netCDFRepoLocation; private AtlasNetCDFDAO atlasNetCDFDAO; private boolean solrBuildFinished; private final Logger log = LoggerFactory.getLogger(getClass()); protected void setUp() throws Exception { super.setUp(); try { LogManager.getLogManager() .readConfiguration(this.getClass().getClassLoader().getResourceAsStream("logging.properties")); } catch (Exception e) { e.printStackTrace(); } SLF4JBridgeHandler.install(); buildSolrIndexes(); generateNetCDFs(); } private void generateNetCDFs() throws NetCDFCreatorException, InterruptedException { final File classPath = new File(this.getClass().getClassLoader().getResource("").getPath()); netCDFRepoLocation = new File(classPath, "netcdfs"); atlasNetCDFDAO = new AtlasNetCDFDAO(); atlasNetCDFDAO.setAtlasDataRepo(netCDFRepoLocation); } protected void tearDown() throws Exception { super.tearDown(); // delete the repo if (netCDFRepoLocation.exists()) FileUtil.deleteDirectory(netCDFRepoLocation); netCDFRepoLocation = null; // shutdown the indexBuilder and coreContainer if its not already been done indexBuilder.shutdown(); if (coreContainer != null) { coreContainer.shutdown(); } // delete the index if (indexLocation.exists()) FileUtil.deleteDirectory(indexLocation); indexLocation = null; indexBuilder = null; } public AtlasNetCDFDAO getNetCDFDAO() { return atlasNetCDFDAO; } public SolrServer getSolrServerExpt() { return exptServer; } public SolrServer getSolrServerAtlas() { return atlasServer; } private void buildSolrIndexes() throws InterruptedException, IndexBuilderException, URISyntaxException, IOException, SAXException, ParserConfigurationException { indexLocation = new File(new File("target", "test"), "index"); log.debug("Extracting index to " + indexLocation.getAbsolutePath()); createSOLRServers(); ExperimentAtlasIndexBuilderService eaibs = new ExperimentAtlasIndexBuilderService(); eaibs.setAtlasDAO(getAtlasDAO()); eaibs.setSolrServer(exptServer); GeneAtlasIndexBuilderService gaibs = new GeneAtlasIndexBuilderService(); gaibs.setAtlasDAO(getAtlasDAO()); gaibs.setSolrServer(atlasServer); AtlasProperties atlasProperties = new AtlasProperties(); ResourceFileStorage storage = new ResourceFileStorage(); storage.setResourcePath("atlas.properties"); atlasProperties.setStorage(storage); gaibs.setAtlasProperties(atlasProperties); Efo efo = new Efo(); efo.setUri(new URI("resource:META-INF/efo.owl")); //efo.load(); gaibs.setEfo(efo); indexBuilder = new DefaultIndexBuilder(); indexBuilder.setIncludeIndexes(Arrays.asList("experiments", "genes")); indexBuilder.setServices(Arrays.asList(eaibs, gaibs)); indexBuilder.startup(); indexBuilder.doCommand(new IndexAllCommand(), new IndexBuilderListener() { public void buildSuccess() { solrBuildFinished = true; } public void buildError(IndexBuilderEvent event) { solrBuildFinished = true; for (Throwable t : event.getErrors()) { t.printStackTrace(); } fail("Failed to build Solr Indexes: " + event.getErrors()); } public void buildProgress(String progressStatus) { } }); while (!solrBuildFinished) { synchronized (this) { wait(100); } } } private void createSOLRServers() throws IOException, SAXException, ParserConfigurationException { SolrContainerFactory solrContainerFactory = new SolrContainerFactory(); solrContainerFactory.setAtlasIndex(indexLocation); solrContainerFactory.setTemplatePath("solr"); coreContainer = solrContainerFactory.createContainer(); // create an embedded solr server for experiments and genes from this container exptServer = new EmbeddedSolrServer(coreContainer, "expt"); atlasServer = new EmbeddedSolrServer(coreContainer, "atlas"); } }
Reflected class to interface change for Efo
atlas-test/src/test/java/uk/ac/ebi/gxa/AbstractIndexNetCDFTestCase.java
Reflected class to interface change for Efo
Java
apache-2.0
43fbad81d6b8aa272b59a99733389579204dc654
0
ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE
package uk.ac.ebi.quickgo.annotation.service.statistics; import java.util.List; import java.util.Objects; import static com.google.common.base.Preconditions.checkArgument; /** * A source of {@link RequiredStatistics} instances. * * @author Tony Wardell * Date: 19/12/2017 * Time: 11:04 * Created with IntelliJ IDEA. */ public class RequiredStatisticsProvider { private final RequiredStatistics standardUsage; private final RequiredStatistics downloadUsage; private final RequiredStatistics standardUsageWithGeneProductFiltering; private final RequiredStatistics downloadUsageWithGeneProductFiltering; public RequiredStatisticsProvider(StatisticsTypeConfigurer standardConfiguration, StatisticsTypeConfigurer downloadConfiguration) { checkArgument(Objects.nonNull(standardConfiguration), "The standard StatisticsTypeConfigurer instance cannot" + " be null"); checkArgument(Objects.nonNull(downloadConfiguration), "The download StatisticsTypeConfigurer instance cannot" + " be null"); standardUsage = new RequiredStatistics(standardConfiguration); downloadUsage = new RequiredStatistics(downloadConfiguration); standardUsageWithGeneProductFiltering = new RequiredStatisticsWithGeneProduct(standardConfiguration); downloadUsageWithGeneProductFiltering = new RequiredStatisticsWithGeneProduct(downloadConfiguration); } public List<RequiredStatistic> getStandardUsage() { return standardUsage.getRequiredStatistics(); } public List<RequiredStatistic> getDownloadUsage() { return downloadUsage.getRequiredStatistics(); } public List<RequiredStatistic> getStandardUsageWithGeneProductFiltering() { return standardUsageWithGeneProductFiltering.getRequiredStatistics(); } public List<RequiredStatistic> getDownloadUsageWithGeneProductFiltering() { return downloadUsageWithGeneProductFiltering.getRequiredStatistics(); } }
annotation-rest/src/main/java/uk/ac/ebi/quickgo/annotation/service/statistics/RequiredStatisticsProvider.java
package uk.ac.ebi.quickgo.annotation.service.statistics; import java.util.List; import java.util.Objects; import static com.google.common.base.Preconditions.checkArgument; /** * A source of {@link RequiredStatistics} instances. * * @author Tony Wardell * Date: 19/12/2017 * Time: 11:04 * Created with IntelliJ IDEA. */ public class RequiredStatisticsProvider { private final RequiredStatistics standardUsage; private final RequiredStatistics downloadUsage; private final RequiredStatistics standardUsageWithGeneProductFiltering; private final RequiredStatistics downloadUsageWithGeneProductFiltering; public RequiredStatisticsProvider(StatisticsTypeConfigurer standardConfiguration, StatisticsTypeConfigurer downloadConfiguration) { checkArgument(Objects.nonNull(standardConfiguration), "The standard StatisticsTypeConfigurer instance cannot" + " be null"); checkArgument(Objects.nonNull(standardConfiguration), "The download StatisticsTypeConfigurer instance cannot" + " be null"); standardUsage = new RequiredStatistics(standardConfiguration); downloadUsage = new RequiredStatistics(downloadConfiguration); standardUsageWithGeneProductFiltering = new RequiredStatisticsWithGeneProduct(standardConfiguration); downloadUsageWithGeneProductFiltering = new RequiredStatisticsWithGeneProduct(downloadConfiguration); } public List<RequiredStatistic> getStandardUsage() { return standardUsage.getRequiredStatistics(); } public List<RequiredStatistic> getDownloadUsage() { return downloadUsage.getRequiredStatistics(); } public List<RequiredStatistic> getStandardUsageWithGeneProductFiltering() { return standardUsageWithGeneProductFiltering.getRequiredStatistics(); } public List<RequiredStatistic> getDownloadUsageWithGeneProductFiltering() { return downloadUsageWithGeneProductFiltering.getRequiredStatistics(); } }
Fix cut-and-paste error, was checking standardConfiguration x2 and not downloadConfiguration.
annotation-rest/src/main/java/uk/ac/ebi/quickgo/annotation/service/statistics/RequiredStatisticsProvider.java
Fix cut-and-paste error, was checking standardConfiguration x2 and not downloadConfiguration.
Java
apache-2.0
63fb7d95099e734d04266890a01b05352824e59e
0
wreckJ/intellij-community,Distrotech/intellij-community,kdwink/intellij-community,diorcety/intellij-community,amith01994/intellij-community,FHannes/intellij-community,holmes/intellij-community,semonte/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,kool79/intellij-community,supersven/intellij-community,michaelgallacher/intellij-community,ftomassetti/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,muntasirsyed/intellij-community,signed/intellij-community,fengbaicanhe/intellij-community,MichaelNedzelsky/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,retomerz/intellij-community,allotria/intellij-community,ol-loginov/intellij-community,clumsy/intellij-community,nicolargo/intellij-community,lucafavatella/intellij-community,alphafoobar/intellij-community,supersven/intellij-community,allotria/intellij-community,apixandru/intellij-community,MER-GROUP/intellij-community,asedunov/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,MichaelNedzelsky/intellij-community,fitermay/intellij-community,SerCeMan/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,holmes/intellij-community,salguarnieri/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,fnouama/intellij-community,vladmm/intellij-community,tmpgit/intellij-community,FHannes/intellij-community,izonder/intellij-community,petteyg/intellij-community,slisson/intellij-community,ibinti/intellij-community,asedunov/intellij-community,ivan-fedorov/intellij-community,fnouama/intellij-community,nicolargo/intellij-community,samthor/intellij-community,Lekanich/intellij-community,kdwink/intellij-community,kool79/intellij-community,ivan-fedorov/intellij-community,fengbaicanhe/intellij-community,xfournet/intellij-community,asedunov/intellij-community,nicolargo/intellij-community,izonder/intellij-community,apixandru/intellij-community,robovm/robovm-studio,kdwink/intellij-community,ahb0327/intellij-community,allotria/intellij-community,supersven/intellij-community,ivan-fedorov/intellij-community,tmpgit/intellij-community,robovm/robovm-studio,Lekanich/intellij-community,FHannes/intellij-community,orekyuu/intellij-community,holmes/intellij-community,supersven/intellij-community,asedunov/intellij-community,akosyakov/intellij-community,muntasirsyed/intellij-community,kool79/intellij-community,samthor/intellij-community,gnuhub/intellij-community,supersven/intellij-community,SerCeMan/intellij-community,mglukhikh/intellij-community,samthor/intellij-community,hurricup/intellij-community,diorcety/intellij-community,izonder/intellij-community,amith01994/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,pwoodworth/intellij-community,samthor/intellij-community,ryano144/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,da1z/intellij-community,apixandru/intellij-community,wreckJ/intellij-community,FHannes/intellij-community,diorcety/intellij-community,fnouama/intellij-community,tmpgit/intellij-community,jagguli/intellij-community,Lekanich/intellij-community,petteyg/intellij-community,robovm/robovm-studio,pwoodworth/intellij-community,signed/intellij-community,semonte/intellij-community,ol-loginov/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,ahb0327/intellij-community,dslomov/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,ol-loginov/intellij-community,TangHao1987/intellij-community,MER-GROUP/intellij-community,wreckJ/intellij-community,signed/intellij-community,allotria/intellij-community,amith01994/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,retomerz/intellij-community,TangHao1987/intellij-community,vvv1559/intellij-community,Lekanich/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,tmpgit/intellij-community,fengbaicanhe/intellij-community,FHannes/intellij-community,ivan-fedorov/intellij-community,jagguli/intellij-community,vladmm/intellij-community,orekyuu/intellij-community,nicolargo/intellij-community,hurricup/intellij-community,samthor/intellij-community,idea4bsd/idea4bsd,dslomov/intellij-community,FHannes/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,amith01994/intellij-community,izonder/intellij-community,slisson/intellij-community,semonte/intellij-community,MER-GROUP/intellij-community,nicolargo/intellij-community,diorcety/intellij-community,pwoodworth/intellij-community,petteyg/intellij-community,wreckJ/intellij-community,kdwink/intellij-community,youdonghai/intellij-community,gnuhub/intellij-community,ahb0327/intellij-community,xfournet/intellij-community,semonte/intellij-community,youdonghai/intellij-community,Lekanich/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,izonder/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,retomerz/intellij-community,hurricup/intellij-community,ryano144/intellij-community,akosyakov/intellij-community,salguarnieri/intellij-community,ol-loginov/intellij-community,izonder/intellij-community,gnuhub/intellij-community,holmes/intellij-community,pwoodworth/intellij-community,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,vladmm/intellij-community,fnouama/intellij-community,diorcety/intellij-community,gnuhub/intellij-community,izonder/intellij-community,ftomassetti/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,tmpgit/intellij-community,da1z/intellij-community,Lekanich/intellij-community,jagguli/intellij-community,FHannes/intellij-community,kdwink/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,amith01994/intellij-community,fnouama/intellij-community,blademainer/intellij-community,ryano144/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,nicolargo/intellij-community,amith01994/intellij-community,salguarnieri/intellij-community,Distrotech/intellij-community,jagguli/intellij-community,dslomov/intellij-community,jagguli/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,clumsy/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,dslomov/intellij-community,robovm/robovm-studio,vladmm/intellij-community,da1z/intellij-community,TangHao1987/intellij-community,salguarnieri/intellij-community,ibinti/intellij-community,adedayo/intellij-community,SerCeMan/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,ibinti/intellij-community,ryano144/intellij-community,amith01994/intellij-community,ol-loginov/intellij-community,izonder/intellij-community,semonte/intellij-community,Lekanich/intellij-community,slisson/intellij-community,robovm/robovm-studio,gnuhub/intellij-community,Lekanich/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,holmes/intellij-community,robovm/robovm-studio,MichaelNedzelsky/intellij-community,asedunov/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,FHannes/intellij-community,ftomassetti/intellij-community,semonte/intellij-community,orekyuu/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,Distrotech/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,lucafavatella/intellij-community,vvv1559/intellij-community,Distrotech/intellij-community,gnuhub/intellij-community,ftomassetti/intellij-community,ftomassetti/intellij-community,robovm/robovm-studio,blademainer/intellij-community,jagguli/intellij-community,ivan-fedorov/intellij-community,orekyuu/intellij-community,da1z/intellij-community,robovm/robovm-studio,robovm/robovm-studio,alphafoobar/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,ivan-fedorov/intellij-community,petteyg/intellij-community,signed/intellij-community,adedayo/intellij-community,slisson/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,nicolargo/intellij-community,ryano144/intellij-community,supersven/intellij-community,TangHao1987/intellij-community,kool79/intellij-community,adedayo/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,signed/intellij-community,supersven/intellij-community,michaelgallacher/intellij-community,slisson/intellij-community,robovm/robovm-studio,ahb0327/intellij-community,vvv1559/intellij-community,supersven/intellij-community,amith01994/intellij-community,holmes/intellij-community,ryano144/intellij-community,wreckJ/intellij-community,idea4bsd/idea4bsd,caot/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,kdwink/intellij-community,kool79/intellij-community,suncycheng/intellij-community,alphafoobar/intellij-community,fnouama/intellij-community,suncycheng/intellij-community,ol-loginov/intellij-community,blademainer/intellij-community,blademainer/intellij-community,lucafavatella/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,vvv1559/intellij-community,adedayo/intellij-community,suncycheng/intellij-community,nicolargo/intellij-community,slisson/intellij-community,orekyuu/intellij-community,vladmm/intellij-community,MER-GROUP/intellij-community,fnouama/intellij-community,SerCeMan/intellij-community,muntasirsyed/intellij-community,MichaelNedzelsky/intellij-community,retomerz/intellij-community,orekyuu/intellij-community,SerCeMan/intellij-community,apixandru/intellij-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,asedunov/intellij-community,suncycheng/intellij-community,izonder/intellij-community,da1z/intellij-community,SerCeMan/intellij-community,pwoodworth/intellij-community,Lekanich/intellij-community,FHannes/intellij-community,retomerz/intellij-community,akosyakov/intellij-community,wreckJ/intellij-community,jagguli/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,Lekanich/intellij-community,idea4bsd/idea4bsd,samthor/intellij-community,pwoodworth/intellij-community,robovm/robovm-studio,fnouama/intellij-community,pwoodworth/intellij-community,da1z/intellij-community,dslomov/intellij-community,petteyg/intellij-community,Lekanich/intellij-community,petteyg/intellij-community,petteyg/intellij-community,clumsy/intellij-community,asedunov/intellij-community,allotria/intellij-community,holmes/intellij-community,signed/intellij-community,FHannes/intellij-community,fengbaicanhe/intellij-community,adedayo/intellij-community,ThiagoGarciaAlves/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,ol-loginov/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,caot/intellij-community,ibinti/intellij-community,hurricup/intellij-community,kool79/intellij-community,fengbaicanhe/intellij-community,retomerz/intellij-community,wreckJ/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,holmes/intellij-community,kdwink/intellij-community,SerCeMan/intellij-community,gnuhub/intellij-community,vladmm/intellij-community,jagguli/intellij-community,suncycheng/intellij-community,diorcety/intellij-community,pwoodworth/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,xfournet/intellij-community,fengbaicanhe/intellij-community,ahb0327/intellij-community,slisson/intellij-community,muntasirsyed/intellij-community,ahb0327/intellij-community,idea4bsd/idea4bsd,samthor/intellij-community,ibinti/intellij-community,adedayo/intellij-community,fitermay/intellij-community,jagguli/intellij-community,kool79/intellij-community,ibinti/intellij-community,caot/intellij-community,tmpgit/intellij-community,akosyakov/intellij-community,kool79/intellij-community,apixandru/intellij-community,Distrotech/intellij-community,gnuhub/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,orekyuu/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,clumsy/intellij-community,blademainer/intellij-community,clumsy/intellij-community,signed/intellij-community,kdwink/intellij-community,fengbaicanhe/intellij-community,signed/intellij-community,fitermay/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,nicolargo/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,fitermay/intellij-community,apixandru/intellij-community,fengbaicanhe/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,blademainer/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,asedunov/intellij-community,suncycheng/intellij-community,vladmm/intellij-community,vladmm/intellij-community,ahb0327/intellij-community,ibinti/intellij-community,muntasirsyed/intellij-community,vvv1559/intellij-community,vladmm/intellij-community,michaelgallacher/intellij-community,akosyakov/intellij-community,lucafavatella/intellij-community,slisson/intellij-community,izonder/intellij-community,da1z/intellij-community,dslomov/intellij-community,kool79/intellij-community,allotria/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,ftomassetti/intellij-community,fengbaicanhe/intellij-community,xfournet/intellij-community,Distrotech/intellij-community,MER-GROUP/intellij-community,caot/intellij-community,MichaelNedzelsky/intellij-community,supersven/intellij-community,ahb0327/intellij-community,MER-GROUP/intellij-community,caot/intellij-community,adedayo/intellij-community,ThiagoGarciaAlves/intellij-community,clumsy/intellij-community,tmpgit/intellij-community,lucafavatella/intellij-community,MichaelNedzelsky/intellij-community,michaelgallacher/intellij-community,salguarnieri/intellij-community,adedayo/intellij-community,ThiagoGarciaAlves/intellij-community,ahb0327/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,akosyakov/intellij-community,fitermay/intellij-community,dslomov/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,holmes/intellij-community,ivan-fedorov/intellij-community,tmpgit/intellij-community,allotria/intellij-community,xfournet/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,gnuhub/intellij-community,vladmm/intellij-community,fnouama/intellij-community,signed/intellij-community,retomerz/intellij-community,holmes/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,fnouama/intellij-community,da1z/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,ol-loginov/intellij-community,retomerz/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,SerCeMan/intellij-community,adedayo/intellij-community,fengbaicanhe/intellij-community,mglukhikh/intellij-community,Distrotech/intellij-community,Lekanich/intellij-community,ol-loginov/intellij-community,vladmm/intellij-community,TangHao1987/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,signed/intellij-community,youdonghai/intellij-community,samthor/intellij-community,dslomov/intellij-community,ryano144/intellij-community,caot/intellij-community,diorcety/intellij-community,allotria/intellij-community,caot/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,tmpgit/intellij-community,suncycheng/intellij-community,muntasirsyed/intellij-community,izonder/intellij-community,supersven/intellij-community,clumsy/intellij-community,blademainer/intellij-community,petteyg/intellij-community,MER-GROUP/intellij-community,da1z/intellij-community,vladmm/intellij-community,supersven/intellij-community,amith01994/intellij-community,ftomassetti/intellij-community,retomerz/intellij-community,tmpgit/intellij-community,samthor/intellij-community,ftomassetti/intellij-community,xfournet/intellij-community,ryano144/intellij-community,kdwink/intellij-community,youdonghai/intellij-community,signed/intellij-community,akosyakov/intellij-community,petteyg/intellij-community,semonte/intellij-community,robovm/robovm-studio,TangHao1987/intellij-community,adedayo/intellij-community,SerCeMan/intellij-community,dslomov/intellij-community,fitermay/intellij-community,kool79/intellij-community,fnouama/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,apixandru/intellij-community,holmes/intellij-community,signed/intellij-community,izonder/intellij-community,xfournet/intellij-community,dslomov/intellij-community,samthor/intellij-community,xfournet/intellij-community,ryano144/intellij-community,xfournet/intellij-community,ryano144/intellij-community,suncycheng/intellij-community,gnuhub/intellij-community,caot/intellij-community,clumsy/intellij-community,diorcety/intellij-community,slisson/intellij-community,allotria/intellij-community,wreckJ/intellij-community,ivan-fedorov/intellij-community,FHannes/intellij-community,ol-loginov/intellij-community,alphafoobar/intellij-community,diorcety/intellij-community,clumsy/intellij-community,slisson/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,tmpgit/intellij-community,ahb0327/intellij-community,holmes/intellij-community,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,muntasirsyed/intellij-community,suncycheng/intellij-community,muntasirsyed/intellij-community,wreckJ/intellij-community,FHannes/intellij-community,apixandru/intellij-community,apixandru/intellij-community,blademainer/intellij-community,semonte/intellij-community,kdwink/intellij-community,mglukhikh/intellij-community,signed/intellij-community,caot/intellij-community,alphafoobar/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,blademainer/intellij-community,semonte/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,fengbaicanhe/intellij-community,jagguli/intellij-community,fitermay/intellij-community,ftomassetti/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,fitermay/intellij-community,slisson/intellij-community,gnuhub/intellij-community,tmpgit/intellij-community,apixandru/intellij-community,ibinti/intellij-community,hurricup/intellij-community,semonte/intellij-community,salguarnieri/intellij-community,vvv1559/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,amith01994/intellij-community,semonte/intellij-community,supersven/intellij-community,MER-GROUP/intellij-community,fnouama/intellij-community,pwoodworth/intellij-community,diorcety/intellij-community,allotria/intellij-community,orekyuu/intellij-community,ThiagoGarciaAlves/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,kool79/intellij-community,blademainer/intellij-community,wreckJ/intellij-community,suncycheng/intellij-community,da1z/intellij-community,kdwink/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,jagguli/intellij-community,apixandru/intellij-community,hurricup/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,alphafoobar/intellij-community,orekyuu/intellij-community,semonte/intellij-community,amith01994/intellij-community,caot/intellij-community,TangHao1987/intellij-community,ivan-fedorov/intellij-community,gnuhub/intellij-community,apixandru/intellij-community,ol-loginov/intellij-community,hurricup/intellij-community,diorcety/intellij-community,clumsy/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,ahb0327/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,amith01994/intellij-community,blademainer/intellij-community,clumsy/intellij-community,ryano144/intellij-community,hurricup/intellij-community,wreckJ/intellij-community,da1z/intellij-community,TangHao1987/intellij-community,pwoodworth/intellij-community,Distrotech/intellij-community,asedunov/intellij-community,orekyuu/intellij-community,MichaelNedzelsky/intellij-community,caot/intellij-community,samthor/intellij-community,mglukhikh/intellij-community,slisson/intellij-community
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.profile.codeInspection.ui.inspectionsTree; import com.intellij.codeHighlighting.HighlightDisplayLevel; import com.intellij.codeInsight.daemon.HighlightDisplayKey; import com.intellij.codeInspection.ex.InspectionProfileImpl; import com.intellij.codeInspection.ex.ScopeToolState; import com.intellij.codeInspection.ex.ToolsImpl; import com.intellij.ide.IdeTooltip; import com.intellij.ide.IdeTooltipManager; import com.intellij.lang.annotation.HighlightSeverity; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.profile.codeInspection.ui.InspectionsAggregationUtil; import com.intellij.profile.codeInspection.ui.SingleInspectionProfilePanel; import com.intellij.profile.codeInspection.ui.table.ScopesAndSeveritiesTable; import com.intellij.profile.codeInspection.ui.table.ThreeStateCheckBoxRenderer; import com.intellij.ui.DoubleClickListener; import com.intellij.ui.treeStructure.treetable.TreeTable; import com.intellij.ui.treeStructure.treetable.TreeTableModel; import com.intellij.ui.treeStructure.treetable.TreeTableTree; import com.intellij.util.ArrayUtil; import com.intellij.util.containers.*; import com.intellij.util.containers.HashSet; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.table.AbstractTableModel; import javax.swing.table.TableColumn; import javax.swing.tree.DefaultTreeModel; import javax.swing.tree.TreeNode; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.event.*; import java.util.*; import java.util.HashMap; import java.util.List; /** * @author Dmitry Batkovich */ public class InspectionsConfigTreeTable extends TreeTable { private final static Logger LOG = Logger.getInstance(InspectionsConfigTreeTable.class); private final static int TREE_COLUMN = 0; private final static int SEVERITIES_COLUMN = 1; private final static int IS_ENABLED_COLUMN = 2; public InspectionsConfigTreeTable(final InspectionsConfigTreeTableSettings settings) { super(new InspectionsConfigTreeTableModel(settings)); final TableColumn severitiesColumn = getColumnModel().getColumn(SEVERITIES_COLUMN); severitiesColumn.setMaxWidth(20); final TableColumn isEnabledColumn = getColumnModel().getColumn(IS_ENABLED_COLUMN); isEnabledColumn.setMaxWidth(20); isEnabledColumn.setCellRenderer(new ThreeStateCheckBoxRenderer()); isEnabledColumn.setCellEditor(new ThreeStateCheckBoxRenderer()); addMouseMotionListener(new MouseAdapter() { @Override public void mouseMoved(final MouseEvent e) { final Point point = e.getPoint(); final int column = columnAtPoint(point); if (column != SEVERITIES_COLUMN) { return; } final int row = rowAtPoint(point); final Object maybeIcon = getModel().getValueAt(row, column); if (maybeIcon instanceof MultiScopeSeverityIcon) { final MultiScopeSeverityIcon icon = (MultiScopeSeverityIcon)maybeIcon; final LinkedHashMap<String, HighlightDisplayLevel> scopeToAverageSeverityMap = icon.getScopeToAverageSeverityMap(); final JComponent component; if (scopeToAverageSeverityMap.size() == 1 && icon.getDefaultScopeName().equals(ContainerUtil.getFirstItem(scopeToAverageSeverityMap.keySet()))) { final HighlightDisplayLevel level = ContainerUtil.getFirstItem(scopeToAverageSeverityMap.values()); final JLabel label = new JLabel(); label.setIcon(level.getIcon()); label.setText(SingleInspectionProfilePanel.renderSeverity(level.getSeverity())); component = label; } else { component = new ScopesAndSeveritiesHintTable(scopeToAverageSeverityMap, icon.getDefaultScopeName()); } IdeTooltipManager.getInstance().show( new IdeTooltip(InspectionsConfigTreeTable.this, point, component), false); } } }); new DoubleClickListener() { @Override protected boolean onDoubleClick(MouseEvent event) { final TreePath path = getTree().getPathForRow(getTree().getLeadSelectionRow()); if (path != null) { final InspectionConfigTreeNode node = (InspectionConfigTreeNode)path.getLastPathComponent(); if (node.isLeaf()) { swapInspectionEnableState(); } } return true; } }.installOn(this); registerKeyboardAction(new ActionListener() { public void actionPerformed(ActionEvent e) { swapInspectionEnableState(); updateUI(); } }, KeyStroke.getKeyStroke(KeyEvent.VK_SPACE, 0), JComponent.WHEN_FOCUSED); getEmptyText().setText("No enabled inspections available"); } private void swapInspectionEnableState() { for (int selectedRow : getSelectedRows()) { final Object value = getValueAt(selectedRow, IS_ENABLED_COLUMN); final boolean newValue = !Boolean.TRUE.equals(value); setValueAt(newValue, selectedRow, IS_ENABLED_COLUMN); } } public abstract static class InspectionsConfigTreeTableSettings { private final TreeNode myRoot; private final Project myProject; public InspectionsConfigTreeTableSettings(final TreeNode root, final Project project) { myRoot = root; myProject = project; } public TreeNode getRoot() { return myRoot; } public Project getProject() { return myProject; } protected abstract InspectionProfileImpl getInspectionProfile(); protected abstract void onChanged(InspectionConfigTreeNode node); } private static class InspectionsConfigTreeTableModel extends DefaultTreeModel implements TreeTableModel { private final InspectionsConfigTreeTableSettings mySettings; private TreeTable myTreeTable; public InspectionsConfigTreeTableModel(final InspectionsConfigTreeTableSettings settings) { super(settings.getRoot()); mySettings = settings; } @Override public int getColumnCount() { return 3; } @Nullable @Override public String getColumnName(final int column) { return null; } @Override public Class getColumnClass(final int column) { switch (column) { case TREE_COLUMN: return TreeTableModel.class; case SEVERITIES_COLUMN: return Icon.class; case IS_ENABLED_COLUMN: return Boolean.class; } throw new IllegalArgumentException(); } @Nullable @Override public Object getValueAt(final Object node, final int column) { if (column == TREE_COLUMN) { return null; } final InspectionConfigTreeNode treeNode = (InspectionConfigTreeNode)node; final List<HighlightDisplayKey> inspectionsKeys = InspectionsAggregationUtil.getInspectionsKeys(treeNode); if (column == SEVERITIES_COLUMN) { final MultiColoredHighlightSeverityIconSink sink = new MultiColoredHighlightSeverityIconSink(); for (final HighlightDisplayKey selectedInspectionsNode : inspectionsKeys) { final String toolId = selectedInspectionsNode.toString(); if (mySettings.getInspectionProfile().getTools(toolId, mySettings.getProject()).isEnabled()) { sink.put(mySettings.getInspectionProfile().getToolDefaultState(toolId, mySettings.getProject()), mySettings.getInspectionProfile().getNonDefaultTools(toolId, mySettings.getProject())); } } return sink.constructIcon(mySettings.getInspectionProfile()); } else if (column == IS_ENABLED_COLUMN) { return isEnabled(inspectionsKeys); } throw new IllegalArgumentException(); } @Nullable private Boolean isEnabled(final List<HighlightDisplayKey> selectedInspectionsNodes) { Boolean isPreviousEnabled = null; for (final HighlightDisplayKey key : selectedInspectionsNodes) { final ToolsImpl tools = mySettings.getInspectionProfile().getTools(key.toString(), mySettings.getProject()); for (final ScopeToolState state : tools.getTools()) { final boolean enabled = state.isEnabled(); if (isPreviousEnabled == null) { isPreviousEnabled = enabled; } else if (!isPreviousEnabled.equals(enabled)) { return null; } } } return isPreviousEnabled; } @Override public boolean isCellEditable(final Object node, final int column) { return column == IS_ENABLED_COLUMN; } @Override public void setValueAt(final Object aValue, final Object node, final int column) { LOG.assertTrue(column == IS_ENABLED_COLUMN); LOG.assertTrue(aValue != null, "node = " + node); final boolean doEnable = (Boolean) aValue; final InspectionProfileImpl profile = mySettings.getInspectionProfile(); for (final InspectionConfigTreeNode aNode : InspectionsAggregationUtil.getInspectionsNodes((InspectionConfigTreeNode) node)) { final String toolId = aNode.getKey().toString(); if (doEnable) { profile.enableTool(toolId, mySettings.getProject()); } else { profile.disableTool(toolId, mySettings.getProject()); } for (ScopeToolState state : profile.getTools(toolId, mySettings.getProject()).getTools()) { state.setEnabled(doEnable); } aNode.dropCache(); mySettings.onChanged(aNode); } if (myTreeTable != null) { UIUtil.invokeLaterIfNeeded(new Runnable() { public void run() { ((AbstractTableModel)myTreeTable.getModel()).fireTableDataChanged(); } }); } } @Override public void setTree(final JTree tree) { myTreeTable = ((TreeTableTree)tree).getTreeTable(); } } private static class SeverityAndOccurrences { private HighlightSeverity myPrimarySeverity; private final Map<String, HighlightSeverity> myOccurrences = new HashMap<String, HighlightSeverity>(); public void setSeverityToMixed() { myPrimarySeverity = ScopesAndSeveritiesTable.MIXED_FAKE_SEVERITY; } public SeverityAndOccurrences incOccurrences(final String toolName, final HighlightSeverity severity) { if (myPrimarySeverity == null) { myPrimarySeverity = severity; } else if (!Comparing.equal(severity, myPrimarySeverity)) { myPrimarySeverity = ScopesAndSeveritiesTable.MIXED_FAKE_SEVERITY; } myOccurrences.put(toolName, severity); return this; } public HighlightSeverity getPrimarySeverity() { return myPrimarySeverity; } public int getOccurrencesSize() { return myOccurrences.size(); } public Map<String, HighlightSeverity> getOccurrences() { return myOccurrences; } } private static class MultiColoredHighlightSeverityIconSink { private final Map<String, SeverityAndOccurrences> myScopeToAverageSeverityMap = new HashMap<String, SeverityAndOccurrences>(); private String myDefaultScopeName; public Icon constructIcon(final InspectionProfileImpl inspectionProfile) { final Map<String, HighlightSeverity> computedSeverities = computeSeverities(inspectionProfile); if (computedSeverities == null) { return null; } boolean allScopesHasMixedSeverity = true; for (HighlightSeverity severity : computedSeverities.values()) { if (!severity.equals(ScopesAndSeveritiesTable.MIXED_FAKE_SEVERITY)) { allScopesHasMixedSeverity = false; break; } } return allScopesHasMixedSeverity ? ScopesAndSeveritiesTable.MIXED_FAKE_LEVEL.getIcon() : new MultiScopeSeverityIcon(computedSeverities, myDefaultScopeName, inspectionProfile); } @Nullable private Map<String, HighlightSeverity> computeSeverities(final InspectionProfileImpl inspectionProfile) { if (myScopeToAverageSeverityMap.isEmpty()) { return null; } final Map<String, HighlightSeverity> result = new HashMap<String, HighlightSeverity>(); final Map.Entry<String, SeverityAndOccurrences> entry = ContainerUtil.getFirstItem(myScopeToAverageSeverityMap.entrySet()); result.put(entry.getKey(), entry.getValue().getPrimarySeverity()); if (myScopeToAverageSeverityMap.size() == 1) { return result; } final SeverityAndOccurrences defaultSeveritiesAndOccurrences = myScopeToAverageSeverityMap.get(myDefaultScopeName); if (defaultSeveritiesAndOccurrences == null) { for (Map.Entry<String, SeverityAndOccurrences> e: myScopeToAverageSeverityMap.entrySet()) { final HighlightSeverity primarySeverity = e.getValue().getPrimarySeverity(); if (primarySeverity != null) { result.put(e.getKey(), primarySeverity); } } return result; } final int allInspectionsCount = defaultSeveritiesAndOccurrences.getOccurrencesSize(); final Map<String, HighlightSeverity> allScopes = defaultSeveritiesAndOccurrences.getOccurrences(); for (String currentScope : myScopeToAverageSeverityMap.keySet()) { final SeverityAndOccurrences currentSeverityAndOccurrences = myScopeToAverageSeverityMap.get(currentScope); if (currentSeverityAndOccurrences == null) { continue; } final HighlightSeverity currentSeverity = currentSeverityAndOccurrences.getPrimarySeverity(); if (currentSeverity == ScopesAndSeveritiesTable.MIXED_FAKE_SEVERITY || currentSeverityAndOccurrences.getOccurrencesSize() == allInspectionsCount || myDefaultScopeName.equals(currentScope)) { result.put(currentScope, currentSeverity); } else { Set<String> toolsToCheck = ContainerUtil.newHashSet(allScopes.keySet()); toolsToCheck.removeAll(currentSeverityAndOccurrences.getOccurrences().keySet()); boolean doContinue = false; final Map<String, HighlightSeverity> lowerScopeOccurrences = myScopeToAverageSeverityMap.get(myDefaultScopeName).getOccurrences(); for (String toolName : toolsToCheck) { final HighlightSeverity currentToolSeverity = lowerScopeOccurrences.get(toolName); if (currentToolSeverity != null) { if (!currentSeverity.equals(currentToolSeverity)) { result.put(currentScope, ScopesAndSeveritiesTable.MIXED_FAKE_SEVERITY); doContinue = true; break; } } } if (doContinue) { continue; } result.put(currentScope, currentSeverity); } } return result; } public void put(@NotNull final ScopeToolState defaultState, @NotNull final List<ScopeToolState> nonDefault) { putOne(defaultState); if (myDefaultScopeName == null) { myDefaultScopeName = defaultState.getScopeName(); } for (final ScopeToolState scopeToolState : nonDefault) { putOne(scopeToolState); } } private void putOne(final ScopeToolState state) { if (!state.isEnabled()) { return; } final Icon icon = state.getLevel().getIcon(); final String scopeName = state.getScopeName(); if (icon instanceof HighlightDisplayLevel.SingleColorIconWithMask) { final SeverityAndOccurrences severityAndOccurrences = myScopeToAverageSeverityMap.get(scopeName); final String inspectionName = state.getTool().getShortName(); if (severityAndOccurrences == null) { myScopeToAverageSeverityMap.put(scopeName, new SeverityAndOccurrences().incOccurrences(inspectionName, state.getLevel().getSeverity())); } else { severityAndOccurrences.incOccurrences(inspectionName, state.getLevel().getSeverity()); } } } } }
platform/lang-impl/src/com/intellij/profile/codeInspection/ui/inspectionsTree/InspectionsConfigTreeTable.java
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.profile.codeInspection.ui.inspectionsTree; import com.intellij.codeHighlighting.HighlightDisplayLevel; import com.intellij.codeInsight.daemon.HighlightDisplayKey; import com.intellij.codeInspection.ex.InspectionProfileImpl; import com.intellij.codeInspection.ex.ScopeToolState; import com.intellij.codeInspection.ex.ToolsImpl; import com.intellij.ide.IdeTooltip; import com.intellij.ide.IdeTooltipManager; import com.intellij.lang.annotation.HighlightSeverity; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.profile.codeInspection.ui.InspectionsAggregationUtil; import com.intellij.profile.codeInspection.ui.SingleInspectionProfilePanel; import com.intellij.profile.codeInspection.ui.table.ScopesAndSeveritiesTable; import com.intellij.profile.codeInspection.ui.table.ThreeStateCheckBoxRenderer; import com.intellij.ui.DoubleClickListener; import com.intellij.ui.treeStructure.treetable.TreeTable; import com.intellij.ui.treeStructure.treetable.TreeTableModel; import com.intellij.ui.treeStructure.treetable.TreeTableTree; import com.intellij.util.ArrayUtil; import com.intellij.util.containers.*; import com.intellij.util.containers.HashSet; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.table.AbstractTableModel; import javax.swing.table.TableColumn; import javax.swing.tree.DefaultTreeModel; import javax.swing.tree.TreeNode; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.event.*; import java.util.*; import java.util.HashMap; import java.util.List; /** * @author Dmitry Batkovich */ public class InspectionsConfigTreeTable extends TreeTable { private final static Logger LOG = Logger.getInstance(InspectionsConfigTreeTable.class); private final static int TREE_COLUMN = 0; private final static int SEVERITIES_COLUMN = 1; private final static int IS_ENABLED_COLUMN = 2; public InspectionsConfigTreeTable(final InspectionsConfigTreeTableSettings settings) { super(new InspectionsConfigTreeTableModel(settings)); final TableColumn severitiesColumn = getColumnModel().getColumn(SEVERITIES_COLUMN); severitiesColumn.setMaxWidth(20); final TableColumn isEnabledColumn = getColumnModel().getColumn(IS_ENABLED_COLUMN); isEnabledColumn.setMaxWidth(20); isEnabledColumn.setCellRenderer(new ThreeStateCheckBoxRenderer()); isEnabledColumn.setCellEditor(new ThreeStateCheckBoxRenderer()); addMouseMotionListener(new MouseAdapter() { @Override public void mouseMoved(final MouseEvent e) { final Point point = e.getPoint(); final int column = columnAtPoint(point); if (column != SEVERITIES_COLUMN) { return; } final int row = rowAtPoint(point); final Object maybeIcon = getModel().getValueAt(row, column); if (maybeIcon instanceof MultiScopeSeverityIcon) { final MultiScopeSeverityIcon icon = (MultiScopeSeverityIcon)maybeIcon; final LinkedHashMap<String, HighlightDisplayLevel> scopeToAverageSeverityMap = icon.getScopeToAverageSeverityMap(); final JComponent component; if (scopeToAverageSeverityMap.size() == 1 && icon.getDefaultScopeName().equals(ContainerUtil.getFirstItem(scopeToAverageSeverityMap.keySet()))) { final HighlightDisplayLevel level = ContainerUtil.getFirstItem(scopeToAverageSeverityMap.values()); final JLabel label = new JLabel(); label.setIcon(level.getIcon()); label.setText(SingleInspectionProfilePanel.renderSeverity(level.getSeverity())); component = label; } else { component = new ScopesAndSeveritiesHintTable(scopeToAverageSeverityMap, icon.getDefaultScopeName()); } IdeTooltipManager.getInstance().show( new IdeTooltip(InspectionsConfigTreeTable.this, point, component), false); } } }); new DoubleClickListener() { @Override protected boolean onDoubleClick(MouseEvent event) { final TreePath path = getTree().getPathForRow(getTree().getLeadSelectionRow()); if (path != null) { final InspectionConfigTreeNode node = (InspectionConfigTreeNode)path.getLastPathComponent(); if (node.isLeaf()) { swapInspectionEnableState(); } } return true; } }.installOn(this); registerKeyboardAction(new ActionListener() { public void actionPerformed(ActionEvent e) { swapInspectionEnableState(); updateUI(); } }, KeyStroke.getKeyStroke(KeyEvent.VK_SPACE, 0), JComponent.WHEN_FOCUSED); getEmptyText().setText("No enabled inspections available"); } private void swapInspectionEnableState() { for (int selectedRow : getSelectedRows()) { final Object value = getValueAt(selectedRow, IS_ENABLED_COLUMN); final boolean newValue = !Boolean.TRUE.equals(value); setValueAt(newValue, selectedRow, IS_ENABLED_COLUMN); } } public abstract static class InspectionsConfigTreeTableSettings { private final TreeNode myRoot; private final Project myProject; public InspectionsConfigTreeTableSettings(final TreeNode root, final Project project) { myRoot = root; myProject = project; } public TreeNode getRoot() { return myRoot; } public Project getProject() { return myProject; } protected abstract InspectionProfileImpl getInspectionProfile(); protected abstract void onChanged(InspectionConfigTreeNode node); } private static class InspectionsConfigTreeTableModel extends DefaultTreeModel implements TreeTableModel { private final InspectionsConfigTreeTableSettings mySettings; private TreeTable myTreeTable; public InspectionsConfigTreeTableModel(final InspectionsConfigTreeTableSettings settings) { super(settings.getRoot()); mySettings = settings; } @Override public int getColumnCount() { return 3; } @Nullable @Override public String getColumnName(final int column) { return null; } @Override public Class getColumnClass(final int column) { switch (column) { case TREE_COLUMN: return TreeTableModel.class; case SEVERITIES_COLUMN: return Icon.class; case IS_ENABLED_COLUMN: return Boolean.class; } throw new IllegalArgumentException(); } @Nullable @Override public Object getValueAt(final Object node, final int column) { if (column == TREE_COLUMN) { return null; } final InspectionConfigTreeNode treeNode = (InspectionConfigTreeNode)node; final List<HighlightDisplayKey> inspectionsKeys = InspectionsAggregationUtil.getInspectionsKeys(treeNode); if (column == SEVERITIES_COLUMN) { final MultiColoredHighlightSeverityIconSink sink = new MultiColoredHighlightSeverityIconSink(); for (final HighlightDisplayKey selectedInspectionsNode : inspectionsKeys) { final String toolId = selectedInspectionsNode.toString(); if (mySettings.getInspectionProfile().getTools(toolId, mySettings.getProject()).isEnabled()) { sink.put(mySettings.getInspectionProfile().getToolDefaultState(toolId, mySettings.getProject()), mySettings.getInspectionProfile().getNonDefaultTools(toolId, mySettings.getProject())); } } return sink.constructIcon(mySettings.getInspectionProfile()); } else if (column == IS_ENABLED_COLUMN) { return isEnabled(inspectionsKeys); } throw new IllegalArgumentException(); } @Nullable private Boolean isEnabled(final List<HighlightDisplayKey> selectedInspectionsNodes) { Boolean isPreviousEnabled = null; for (final HighlightDisplayKey key : selectedInspectionsNodes) { final ToolsImpl tools = mySettings.getInspectionProfile().getTools(key.toString(), mySettings.getProject()); for (final ScopeToolState state : tools.getTools()) { final boolean enabled = state.isEnabled(); if (isPreviousEnabled == null) { isPreviousEnabled = enabled; } else if (!isPreviousEnabled.equals(enabled)) { return null; } } } return isPreviousEnabled; } @Override public boolean isCellEditable(final Object node, final int column) { return column == IS_ENABLED_COLUMN; } @Override public void setValueAt(final Object aValue, final Object node, final int column) { LOG.assertTrue(column == IS_ENABLED_COLUMN); LOG.assertTrue(aValue != null, "node = " + node); final boolean doEnable = (Boolean) aValue; final InspectionProfileImpl profile = mySettings.getInspectionProfile(); for (final InspectionConfigTreeNode aNode : InspectionsAggregationUtil.getInspectionsNodes((InspectionConfigTreeNode) node)) { final String toolId = aNode.getKey().toString(); if (doEnable) { profile.enableTool(toolId, mySettings.getProject()); } else { profile.disableTool(toolId, mySettings.getProject()); } for (ScopeToolState state : profile.getTools(toolId, mySettings.getProject()).getTools()) { state.setEnabled(doEnable); } aNode.dropCache(); mySettings.onChanged(aNode); } if (myTreeTable != null) { UIUtil.invokeLaterIfNeeded(new Runnable() { public void run() { ((AbstractTableModel)myTreeTable.getModel()).fireTableDataChanged(); } }); } } @Override public void setTree(final JTree tree) { myTreeTable = ((TreeTableTree)tree).getTreeTable(); } } private static class SeverityAndOccurrences { private HighlightSeverity myPrimarySeverity; private final Map<String, HighlightSeverity> myOccurrences = new HashMap<String, HighlightSeverity>(); public void setSeverityToMixed() { myPrimarySeverity = ScopesAndSeveritiesTable.MIXED_FAKE_SEVERITY; } public SeverityAndOccurrences incOccurrences(final String toolName, final HighlightSeverity severity) { if (myPrimarySeverity == null) { myPrimarySeverity = severity; } else if (!Comparing.equal(severity, myPrimarySeverity)) { myPrimarySeverity = ScopesAndSeveritiesTable.MIXED_FAKE_SEVERITY; } myOccurrences.put(toolName, severity); return this; } public HighlightSeverity getPrimarySeverity() { return myPrimarySeverity; } public int getOccurrencesSize() { return myOccurrences.size(); } public Map<String, HighlightSeverity> getOccurrences() { return myOccurrences; } } private static class MultiColoredHighlightSeverityIconSink { private final Map<String, SeverityAndOccurrences> myScopeToAverageSeverityMap = new HashMap<String, SeverityAndOccurrences>(); private String myDefaultScopeName; public Icon constructIcon(final InspectionProfileImpl inspectionProfile) { final Map<String, HighlightSeverity> computedSeverities = computeSeverities(inspectionProfile); if (computedSeverities == null) { return null; } boolean allScopesHasMixedSeverity = true; for (HighlightSeverity severity : computedSeverities.values()) { if (!severity.equals(ScopesAndSeveritiesTable.MIXED_FAKE_SEVERITY)) { allScopesHasMixedSeverity = false; break; } } return allScopesHasMixedSeverity ? ScopesAndSeveritiesTable.MIXED_FAKE_LEVEL.getIcon() : new MultiScopeSeverityIcon(computedSeverities, myDefaultScopeName, inspectionProfile); } @Nullable private Map<String, HighlightSeverity> computeSeverities(final InspectionProfileImpl inspectionProfile) { if (myScopeToAverageSeverityMap.isEmpty()) { return null; } final Map<String, HighlightSeverity> result = new HashMap<String, HighlightSeverity>(); final Map.Entry<String, SeverityAndOccurrences> entry = ContainerUtil.getFirstItem(myScopeToAverageSeverityMap.entrySet()); result.put(entry.getKey(), entry.getValue().getPrimarySeverity()); if (myScopeToAverageSeverityMap.size() == 1) { return result; } String[] scopesOrder = inspectionProfile.getScopesOrder(); if (scopesOrder == null || scopesOrder.length == 0) { final ArrayList<String> scopesList = new ArrayList<String>(myScopeToAverageSeverityMap.keySet()); scopesList.remove(myDefaultScopeName); ContainerUtil.sort(scopesList); scopesOrder = ArrayUtil.toStringArray(scopesList); } final SeverityAndOccurrences defaultSeveritiesAndOccurrences = myScopeToAverageSeverityMap.get(myDefaultScopeName); if (defaultSeveritiesAndOccurrences == null) { for (Map.Entry<String, SeverityAndOccurrences> e: myScopeToAverageSeverityMap.entrySet()) { final HighlightSeverity primarySeverity = e.getValue().getPrimarySeverity(); if (primarySeverity != null) { result.put(e.getKey(), primarySeverity); } } return result; } final int allInspectionsCount = defaultSeveritiesAndOccurrences.getOccurrencesSize(); final Map<String, HighlightSeverity> allScopes = defaultSeveritiesAndOccurrences.getOccurrences(); String[] reversedScopesOrder = ArrayUtil.reverseArray(scopesOrder); for (String currentScope : reversedScopesOrder) { final SeverityAndOccurrences currentSeverityAndOccurrences = myScopeToAverageSeverityMap.get(currentScope); if (currentSeverityAndOccurrences == null) { continue; } final HighlightSeverity currentSeverity = currentSeverityAndOccurrences.getPrimarySeverity(); if (currentSeverity == ScopesAndSeveritiesTable.MIXED_FAKE_SEVERITY || currentSeverityAndOccurrences.getOccurrencesSize() == allInspectionsCount) { result.put(currentScope, currentSeverity); } else { Set<String> toolsToCheck = ContainerUtil.newHashSet(allScopes.keySet()); toolsToCheck.removeAll(currentSeverityAndOccurrences.getOccurrences().keySet()); boolean doContinue = false; final Map<String, HighlightSeverity> lowerScopeOccurrences = myScopeToAverageSeverityMap.get(myDefaultScopeName).getOccurrences(); for (String toolName : toolsToCheck) { final HighlightSeverity currentToolSeverity = lowerScopeOccurrences.get(toolName); if (currentToolSeverity != null) { if (!currentSeverity.equals(currentToolSeverity)) { result.put(currentScope, ScopesAndSeveritiesTable.MIXED_FAKE_SEVERITY); doContinue = true; break; } } } if (doContinue) { continue; } result.put(currentScope, currentSeverity); } } return result; } public void put(@NotNull final ScopeToolState defaultState, @NotNull final List<ScopeToolState> nonDefault) { putOne(defaultState); if (myDefaultScopeName == null) { myDefaultScopeName = defaultState.getScopeName(); } for (final ScopeToolState scopeToolState : nonDefault) { putOne(scopeToolState); } } private void putOne(final ScopeToolState state) { if (!state.isEnabled()) { return; } final Icon icon = state.getLevel().getIcon(); final String scopeName = state.getScopeName(); if (icon instanceof HighlightDisplayLevel.SingleColorIconWithMask) { final SeverityAndOccurrences severityAndOccurrences = myScopeToAverageSeverityMap.get(scopeName); final String inspectionName = state.getTool().getShortName(); if (severityAndOccurrences == null) { myScopeToAverageSeverityMap.put(scopeName, new SeverityAndOccurrences().incOccurrences(inspectionName, state.getLevel().getSeverity())); } else { severityAndOccurrences.incOccurrences(inspectionName, state.getLevel().getSeverity()); } } } } }
multiscope severity icon fixed: forgotten all scope added if selected group contains few scopes
platform/lang-impl/src/com/intellij/profile/codeInspection/ui/inspectionsTree/InspectionsConfigTreeTable.java
multiscope severity icon fixed: forgotten all scope added if selected group contains few scopes
Java
apache-2.0
721feca785c292cc26f973bd02934bd69d13a897
0
yangfuhai/jboot,yangfuhai/jboot
package io.jboot.test.codegen; import com.jfinal.kit.PathKit; import io.jboot.app.JbootApplication; import io.jboot.codegen.model.JbootBaseModelGenerator; import io.jboot.codegen.model.JbootModelGenerator; import io.jboot.codegen.service.JbootServiceImplGenerator; import io.jboot.codegen.service.JbootServiceInterfaceGenerator; public class GenTester { public static void main(String[] args) { JbootApplication.setBootArg("jboot.datasource.url", "jdbc:mysql://127.0.0.1:3306/jbootdemo"); JbootApplication.setBootArg("jboot.datasource.user", "root"); JbootApplication.setBootArg("jboot.datasource.password", "123456"); String modelPackage = "io.jboot.test.codegen.model"; String baseModelPackage = modelPackage + ".base"; String modelDir = PathKit.getWebRootPath() + "/src/test/java/" + modelPackage.replace(".", "/"); String baseModelDir = PathKit.getWebRootPath() + "/src/test/java/" + baseModelPackage.replace(".", "/"); System.out.println("start generate..."); System.out.println("generate dir:" + modelDir); new JbootBaseModelGenerator(baseModelPackage, baseModelDir).setGenerateRemarks(true).generate(); new JbootModelGenerator(modelPackage, baseModelPackage, modelDir).generate(); String servicePackage = "io.jboot.test.codegen.service"; String serviceImplPackage = "io.jboot.test.codegen.service.provider"; String serviceOutputDir = PathKit.getWebRootPath() + "/src/test/java/" + servicePackage.replace(".", "/"); String serviceImplOutputDir = PathKit.getWebRootPath() + "/src/test/java/" + serviceImplPackage.replace(".", "/"); new JbootServiceInterfaceGenerator(servicePackage, serviceOutputDir, modelPackage).generate(); new JbootServiceImplGenerator(servicePackage, serviceImplPackage, serviceImplOutputDir, modelPackage).setImplName("provider").generate(); } }
src/test/java/io/jboot/test/codegen/GenTester.java
package io.jboot.test.codegen; import com.jfinal.kit.PathKit; import io.jboot.app.JbootApplication; import io.jboot.codegen.model.JbootBaseModelGenerator; import io.jboot.codegen.model.JbootModelGenerator; import io.jboot.codegen.service.JbootServiceImplGenerator; import io.jboot.codegen.service.JbootServiceInterfaceGenerator; public class GenTester { public static void main(String[] args) { JbootApplication.setBootArg("jboot.datasource.url", "jdbc:mysql://127.0.0.1:3306/jbootdemo"); JbootApplication.setBootArg("jboot.datasource.user", "root"); JbootApplication.setBootArg("jboot.datasource.password", "123456"); String modelPackage = "io.jboot.test.codegen.model"; String baseModelPackage = modelPackage + ".base"; String modelDir = PathKit.getWebRootPath() + "/src/test/java/" + modelPackage.replace(".", "/"); String baseModelDir = PathKit.getWebRootPath() + "/src/test/java/" + baseModelPackage.replace(".", "/"); System.out.println("start generate..."); System.out.println("generate dir:" + modelDir); new JbootBaseModelGenerator(baseModelPackage, baseModelDir).setGenerateRemarks(true).generate(); new JbootModelGenerator(modelPackage, baseModelPackage, modelDir).generate(); String servicePackage = "io.jboot.test.codegen.service"; String serviceImplPackage = "io.jboot.test.codegen.service.provider"; String serviceOutputDir = PathKit.getWebRootPath() + "/src/test/java/" + servicePackage.replace(".", "/"); String serviceImplOutputDir = PathKit.getWebRootPath() + "/src/test/java/" + serviceImplPackage.replace(".", "/"); new JbootServiceInterfaceGenerator(servicePackage, serviceOutputDir, modelPackage).generate(); new JbootServiceImplGenerator(servicePackage, serviceImplPackage,serviceImplOutputDir, modelPackage).setImplName("provider").generate(); } }
optimize GenTester
src/test/java/io/jboot/test/codegen/GenTester.java
optimize GenTester
Java
apache-2.0
92b0f22b41d8dee46c6cce10ed02e64cd6aaf8dc
0
brandt/GridSphere,brandt/GridSphere
src/org/gridlab/gridsphere/portletcontainer/descriptor/ConfigParam.java
/* * @author <a href="mailto:[email protected]">Jason Novotny</a> * @version $Id$ */ package org.gridlab.gridsphere.portletcontainer.descriptor; public class ConfigParam { private String ParamName = ""; private String ParamValue = ""; public ConfigParam() {} public ConfigParam(String ParamName, String ParamValue) { this.ParamName = ParamName; this.ParamValue = ParamValue; } /** * Returns the parameter name * * @returns ParamName */ public String getParamName() { return ParamName; } /** * Sets the parameter name * * @param ParamName the parameter name */ public void setParamName(String ParamName) { this.ParamName = ParamName; } /** * Returns the parameter value * * @returns ParamValue */ public String getParamValue() { return ParamValue; } /** * Sets the parameter value * * @param ParamValue the parameter value */ public void setParamValue(String ParamValue) { this.ParamValue = ParamValue; } }
*** empty log message *** git-svn-id: 616481d960d639df1c769687dde8737486ca2a9a@504 9c99c85f-4d0c-0410-8460-a9a1c48a3a7f
src/org/gridlab/gridsphere/portletcontainer/descriptor/ConfigParam.java
*** empty log message ***
Java
apache-2.0
b750327fe679dfa5a506f0a5ba510ee07c3c1473
0
buttermilk-crypto/buttermilk
/* * This file is part of Buttermilk * Copyright 2011-2014 David R. Smith All Rights Reserved. * */ package com.cryptoregistry.formats; import java.io.IOException; import java.io.Writer; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import com.cryptoregistry.MapData; import com.fasterxml.jackson.core.JsonGenerationException; import com.fasterxml.jackson.core.JsonGenerator; public class MapDataFormatter { private List<MapData> mapData; public MapDataFormatter() { super(); mapData = new ArrayList<MapData>(); } public MapDataFormatter(List<MapData> mapData) { super(); this.mapData = mapData; } public void add(MapData ld){ mapData.add(ld); } public void format(JsonGenerator g, Writer writer) throws JsonGenerationException, IOException{ Iterator<MapData>iter = mapData.iterator(); while(iter.hasNext()){ MapData c = iter.next(); g.writeObjectFieldStart(c.uuid); Iterator<String> inner = c.data.keySet().iterator(); while(inner.hasNext()){ String key = inner.next(); g.writeStringField(key, String.valueOf(c.data.get(key))); } g.writeEndObject(); } } /** * format an output like this: * * "uuid0": { * "Key0": "Item0", * "Key1: "Item1" * }, * "uuid1": { * "Key0": "Item0", * "Key1: "Item1" * } * ... * * @return */ public String formatAsFragment(){ StringBuffer buf = new StringBuffer(); int overallCount = 1; for(MapData data: mapData){ buf.append(quote(data.uuid)); buf.append(": {\n"); int size = data.data.size(); int count = 1; Iterator<String> iter = data.data.keySet().iterator(); while(iter.hasNext()){ String key = iter.next(); String value = data.data.get(key); buf.append(" "); if(count < size) buf.append(keyValuePair(key, value, true)); else buf.append(keyValuePair(key, value, false)); count++; } if(overallCount<mapData.size()) buf.append("},"); else buf.append(" }"); overallCount++; } return buf.toString(); } private String quote(String in){ StringBuffer buf = new StringBuffer(); buf.append(QUOTE); buf.append(in); buf.append(QUOTE); return buf.toString(); } private String keyValuePair(String key, String value, boolean comma){ StringBuffer buf = new StringBuffer(); buf.append(quote(key)); buf.append(":"); buf.append(quote(value)); if(comma) buf.append(","); buf.append("\n"); return buf.toString(); } private static final char QUOTE = '"'; }
buttermilk-core/src/main/java/com/cryptoregistry/formats/MapDataFormatter.java
/* * This file is part of Buttermilk * Copyright 2011-2014 David R. Smith All Rights Reserved. * */ package com.cryptoregistry.formats; import java.io.IOException; import java.io.Writer; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import com.cryptoregistry.MapData; import com.fasterxml.jackson.core.JsonGenerationException; import com.fasterxml.jackson.core.JsonGenerator; public class MapDataFormatter { private List<MapData> mapData; public MapDataFormatter() { super(); mapData = new ArrayList<MapData>(); } public MapDataFormatter(List<MapData> mapData) { super(); this.mapData = mapData; } public void add(MapData ld){ mapData.add(ld); } public void format(JsonGenerator g, Writer writer) throws JsonGenerationException, IOException{ Iterator<MapData>iter = mapData.iterator(); while(iter.hasNext()){ MapData c = iter.next(); g.writeObjectFieldStart(c.uuid); Iterator<String> inner = c.data.keySet().iterator(); while(inner.hasNext()){ String key = inner.next(); g.writeStringField(key, String.valueOf(c.data.get(key))); } g.writeEndObject(); } } }
continued refinement on dialogs
buttermilk-core/src/main/java/com/cryptoregistry/formats/MapDataFormatter.java
continued refinement on dialogs
Java
apache-2.0
6757889d98e0ea66e197732452fb2adb1d291840
0
michalkurka/h2o-3,spennihana/h2o-3,michalkurka/h2o-3,spennihana/h2o-3,h2oai/h2o-dev,spennihana/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,h2oai/h2o-dev,h2oai/h2o-3,michalkurka/h2o-3,spennihana/h2o-3,michalkurka/h2o-3,mathemage/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,mathemage/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,mathemage/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,h2oai/h2o-dev,mathemage/h2o-3,h2oai/h2o-3,mathemage/h2o-3,mathemage/h2o-3,michalkurka/h2o-3,spennihana/h2o-3,spennihana/h2o-3,mathemage/h2o-3,spennihana/h2o-3
package hex.createframe; import hex.createframe.recipes.OriginalCreateFrameRecipe; import org.junit.BeforeClass; import org.junit.Test; import water.TestUtil; import water.api.schemas4.input.CreateFrameOriginalIV4; import water.fvec.Frame; import water.fvec.Vec; import water.util.Log; import static org.junit.Assert.*; /** * Test for the {@link OriginalCreateFrameRecipe} class (and the overall {@link CreateFrameExecutor} mechanism). */ public class OriginalCreateFrameRecipeTest extends TestUtil { @BeforeClass() public static void setup() { stall_till_cloudsize(1); } /** * Simple initial test: verify that the random frame can be created, that it has the correct * dimensions and column names (response, C1, C2, C3, ...) */ @Test public void basicTest() { CreateFrameOriginalIV4 s = new CreateFrameOriginalIV4().fillFromImpl(); s.rows = (int)(Math.random() * 200) + 50; s.cols = (int)(Math.random() * 10) + 5; s.categorical_fraction = 0.1; s.integer_fraction = 1 - s.categorical_fraction; s.binary_fraction = 0; s.factors = 4; s.response_factors = 2; s.positive_response = false; s.has_response = true; s.seed = 1234; OriginalCreateFrameRecipe cf = s.createAndFillImpl(); Frame frame = cf.exec().get(); assertNotNull(frame); assertEquals(s.cols + 1, frame.numCols()); assertEquals(s.rows, frame.numRows()); assertEquals("response", frame.name(0)); for (int i = 1; i < s.cols; i++) assertEquals("C" + i, frame.name(i)); Log.info(frame.toString()); frame.delete(); } /** * Creates frame with binary columns, and test that the <code>binary_ones_fraction</code> setting is respected. * This test is non-deterministic and may fail with probability 0.001%. */ @Test public void binaryFrameTest() { CreateFrameOriginalIV4 s = new CreateFrameOriginalIV4().fillFromImpl(); s.rows = 25000; s.cols = 6; s.categorical_fraction = 0; s.integer_fraction = 0; s.binary_fraction = 1; s.binary_ones_fraction = 0.2; s.missing_fraction = 0; s.has_response = true; s.response_factors = 2; // binomial response Frame frame = s.createAndFillImpl().exec().get(); assertNotNull(frame); assertEquals("response", frame.name(0)); assertEquals(s.cols + 1, frame.numCols()); assertEquals(s.rows, frame.numRows()); long totalCount = 0; for (int i = 0; i < s.cols + 1; i++) { assertTrue(frame.vec(i).isBinary()); if (i > 0) // response column is skipped because its proportion of 1s is always 0.5 totalCount += Math.round(s.rows * frame.vec(i).mean()); } double N = s.rows * s.cols; double p = s.binary_ones_fraction; double ttest = Math.abs(totalCount - N * p) / Math.sqrt(N * p * (1 - p)); assertTrue("Count of 1s is more than 4.417 sigmas away from the expected value: t = " + ttest, ttest < 4.417); frame.delete(); } /** * Test that the produced number of missing values is the same as requested. */ @Test public void missingValuesTest() { CreateFrameOriginalIV4 s = new CreateFrameOriginalIV4().fillFromImpl(); s.rows = 25000; s.cols = 4; s.categorical_fraction = 0; s.integer_fraction = 0; s.binary_fraction = 0; s.string_fraction = 0; s.time_fraction = 0; s.missing_fraction = 0.1; s.has_response = true; s.response_factors = 1; Frame frame = s.createAndFillImpl().exec().get(); assertNotNull(frame); assertEquals(s.cols + 1, frame.numCols()); assertEquals(s.rows, frame.numRows()); long missingCount = 0; for (int i = 0; i < s.cols + 1; i++) { missingCount += frame.vec(i).naCnt(); } double N = s.rows * (s.cols + 1); double p = s.missing_fraction; double ttest = Math.abs(missingCount - N * p) / Math.sqrt(N * p * (1 - p)); assertTrue("Count of NAs is more than 4.417 sigmas away from the expected value: t = " + ttest, ttest < 4.417); frame.delete(); } /** * Test that columns of all types can be created, and that there is the correct number of each * in the resulting frame. */ @Test public void testAllColumnTypes() { CreateFrameOriginalIV4 s = new CreateFrameOriginalIV4().fillFromImpl(); s.rows = 100; s.cols = 100; s.categorical_fraction = 0.10000000000001; s.integer_fraction = 0.099999999999998; s.binary_fraction = 0.10000000000003; s.time_fraction = 0.1200045762024587; s.string_fraction = 0.16000204587202; s.binary_ones_fraction = 0.1; s.factors = 5; s.response_factors = 5; // response is also categorical s.positive_response = false; s.has_response = true; s.seed = 1234567; Frame frame = s.createAndFillImpl().exec().get(); assertNotNull(frame); assertEquals("response", frame.name(0)); assertEquals(s.cols + 1, frame.numCols()); assertEquals(s.rows, frame.numRows()); assertEquals(Math.round(s.cols * s.categorical_fraction) + 1, countVecsOfType(frame, "enum")); assertEquals(Math.round(s.cols * s.time_fraction), countVecsOfType(frame, "time")); assertEquals(Math.round(s.cols * s.string_fraction), countVecsOfType(frame, "str")); assertEquals(Math.round(s.cols * s.integer_fraction), countVecsOfType(frame, "int")); assertEquals(Math.round(s.cols * s.binary_fraction), countVecsOfType(frame, "bool")); Log.info(frame.toString()); frame.delete(); } /** * This test attempts to create the same dataset twice starting from the same seed, and then checks that * the result came out exactly the same both times. * We also verify that the test frame has multiple chunks, since most of the breakages will happen because of * nondeterministic chunk execution. */ @Test public void testReproducibility() { CreateFrameOriginalIV4 s = new CreateFrameOriginalIV4().fillFromImpl(); s.rows = 5000; s.cols = 20; s.time_fraction = 0.1; s.categorical_fraction = 0.2; s.integer_fraction = 0.2; s.binary_fraction = 0.2; s.string_fraction = 0.1; s.missing_fraction = 0.05; s.has_response = false; s.seed = (long)(Math.random() * 100000000000L); Log.info("Using seed " + s.seed); Frame frame1 = s.createAndFillImpl().exec().get(); assertNotNull(frame1); Log.info(frame1.toString()); assertTrue("Please adjust test parameters to have more than 1 chunk in the frame", frame1.vec(0).nChunks() > 1); Frame frame2 = s.createAndFillImpl().exec().get(); assertNotNull(frame2); assertTrue(isBitIdentical(frame1, frame2)); frame1.delete(); frame2.delete(); } private static int countVecsOfType(Frame fr, String type) { int count = 0; for (Vec v : fr.vecs()) { boolean test = false; switch (type) { case "enum": test = v.isCategorical(); break; case "time": test = v.isTime(); break; case "str": test = v.isString(); break; case "int": test = v.isInt() && !v.isTime() && !v.isCategorical() && !v.isBinary(); break; case "bool": test = v.isBinary(); break; } if (test) count++; } return count; } }
h2o-core/src/test/java/hex/createframe/OriginalCreateFrameRecipeTest.java
package hex.createframe; import hex.createframe.recipes.OriginalCreateFrameRecipe; import org.junit.BeforeClass; import org.junit.Test; import water.TestUtil; import water.api.schemas4.input.CreateFrameOriginalIV4; import water.fvec.Frame; import water.fvec.Vec; import water.util.Log; import static org.junit.Assert.*; /** * Test for the {@link OriginalCreateFrameRecipe} class (and the overall {@link CreateFrameExecutor} mechanism). */ public class OriginalCreateFrameRecipeTest extends TestUtil { @BeforeClass() public static void setup() { stall_till_cloudsize(1); } /** * Simple initial test: verify that the random frame can be created, that it has the correct * dimensions and column names (response, C1, C2, C3, ...) */ @Test public void basicTest() { CreateFrameOriginalIV4 s = new CreateFrameOriginalIV4().fillFromImpl(); s.rows = (int)(Math.random() * 200) + 50; s.cols = (int)(Math.random() * 10) + 5; s.categorical_fraction = 0.1; s.integer_fraction = 1 - s.categorical_fraction; s.binary_fraction = 0; s.factors = 4; s.response_factors = 2; s.positive_response = false; s.has_response = true; s.seed = 1234; OriginalCreateFrameRecipe cf = s.createAndFillImpl(); Frame frame = cf.exec().get(); assertNotNull(frame); assertEquals(s.cols + 1, frame.numCols()); assertEquals(s.rows, frame.numRows()); assertEquals("response", frame.name(0)); for (int i = 1; i < s.cols; i++) assertEquals("C" + i, frame.name(i)); Log.info(frame.toString()); frame.delete(); } /** * Creates frame with binary columns, and test that the <code>binary_ones_fraction</code> setting is respected. * This test is non-deterministic and may fail with probability 0.3%. */ @Test public void binaryFrameTest() { CreateFrameOriginalIV4 s = new CreateFrameOriginalIV4().fillFromImpl(); s.rows = 25000; s.cols = 6; s.categorical_fraction = 0; s.integer_fraction = 0; s.binary_fraction = 1; s.binary_ones_fraction = 0.2; s.missing_fraction = 0; s.has_response = true; s.response_factors = 2; // binomial response Frame frame = s.createAndFillImpl().exec().get(); assertNotNull(frame); assertEquals("response", frame.name(0)); assertEquals(s.cols + 1, frame.numCols()); assertEquals(s.rows, frame.numRows()); long totalCount = 0; for (int i = 0; i < s.cols + 1; i++) { assertTrue(frame.vec(i).isBinary()); if (i > 0) // response column is skipped because its proportion of 1s is always 0.5 totalCount += Math.round(s.rows * frame.vec(i).mean()); } double N = s.rows * s.cols; double p = s.binary_ones_fraction; double ttest = Math.abs(totalCount - N * p) / Math.sqrt(N * p * (1 - p)); assertTrue("Count of 1s is more than 3 sigmas away from the expected value: t = " + ttest, ttest < 3); frame.delete(); } /** * Test that the produced number of missing values is the same as requested. */ @Test public void missingValuesTest() { CreateFrameOriginalIV4 s = new CreateFrameOriginalIV4().fillFromImpl(); s.rows = 25000; s.cols = 4; s.categorical_fraction = 0; s.integer_fraction = 0; s.binary_fraction = 0; s.string_fraction = 0; s.time_fraction = 0; s.missing_fraction = 0.1; s.has_response = true; s.response_factors = 1; Frame frame = s.createAndFillImpl().exec().get(); assertNotNull(frame); assertEquals(s.cols + 1, frame.numCols()); assertEquals(s.rows, frame.numRows()); long missingCount = 0; for (int i = 0; i < s.cols + 1; i++) { missingCount += frame.vec(i).naCnt(); } double N = s.rows * (s.cols + 1); double p = s.missing_fraction; double ttest = Math.abs(missingCount - N * p) / Math.sqrt(N * p * (1 - p)); assertTrue("Count of NAs is more than 3 sigmas away from the expected value", ttest < 3); frame.delete(); } /** * Test that columns of all types can be created, and that there is the correct number of each * in the resulting frame. */ @Test public void testAllColumnTypes() { CreateFrameOriginalIV4 s = new CreateFrameOriginalIV4().fillFromImpl(); s.rows = 100; s.cols = 100; s.categorical_fraction = 0.10000000000001; s.integer_fraction = 0.099999999999998; s.binary_fraction = 0.10000000000003; s.time_fraction = 0.1200045762024587; s.string_fraction = 0.16000204587202; s.binary_ones_fraction = 0.1; s.factors = 5; s.response_factors = 5; // response is also categorical s.positive_response = false; s.has_response = true; s.seed = 1234567; Frame frame = s.createAndFillImpl().exec().get(); assertNotNull(frame); assertEquals("response", frame.name(0)); assertEquals(s.cols + 1, frame.numCols()); assertEquals(s.rows, frame.numRows()); assertEquals(Math.round(s.cols * s.categorical_fraction) + 1, countVecsOfType(frame, "enum")); assertEquals(Math.round(s.cols * s.time_fraction), countVecsOfType(frame, "time")); assertEquals(Math.round(s.cols * s.string_fraction), countVecsOfType(frame, "str")); assertEquals(Math.round(s.cols * s.integer_fraction), countVecsOfType(frame, "int")); assertEquals(Math.round(s.cols * s.binary_fraction), countVecsOfType(frame, "bool")); Log.info(frame.toString()); frame.delete(); } /** * This test attempts to create the same dataset twice starting from the same seed, and then checks that * the result came out exactly the same both times. * We also verify that the test frame has multiple chunks, since most of the breakages will happen because of * nondeterministic chunk execution. */ @Test public void testReproducibility() { CreateFrameOriginalIV4 s = new CreateFrameOriginalIV4().fillFromImpl(); s.rows = 5000; s.cols = 20; s.time_fraction = 0.1; s.categorical_fraction = 0.2; s.integer_fraction = 0.2; s.binary_fraction = 0.2; s.string_fraction = 0.1; s.missing_fraction = 0.05; s.has_response = false; s.seed = (long)(Math.random() * 100000000000L); Log.info("Using seed " + s.seed); Frame frame1 = s.createAndFillImpl().exec().get(); assertNotNull(frame1); Log.info(frame1.toString()); assertTrue("Please adjust test parameters to have more than 1 chunk in the frame", frame1.vec(0).nChunks() > 1); Frame frame2 = s.createAndFillImpl().exec().get(); assertNotNull(frame2); assertTrue(isBitIdentical(frame1, frame2)); frame1.delete(); frame2.delete(); } private static int countVecsOfType(Frame fr, String type) { int count = 0; for (Vec v : fr.vecs()) { boolean test = false; switch (type) { case "enum": test = v.isCategorical(); break; case "time": test = v.isTime(); break; case "str": test = v.isString(); break; case "int": test = v.isInt() && !v.isTime() && !v.isCategorical() && !v.isBinary(); break; case "bool": test = v.isBinary(); break; } if (test) count++; } return count; } }
Make test in OriginalCreateFrameRecipeTest more robust (but less powerful) by tweaking the threshold value
h2o-core/src/test/java/hex/createframe/OriginalCreateFrameRecipeTest.java
Make test in OriginalCreateFrameRecipeTest more robust (but less powerful) by tweaking the threshold value
Java
apache-2.0
32c4fe63a3030f964f41750e564a017694b6cd2c
0
baszero/yanel,baszero/yanel,wyona/yanel,baszero/yanel,wyona/yanel,baszero/yanel,wyona/yanel,baszero/yanel,baszero/yanel,wyona/yanel,wyona/yanel,wyona/yanel
/* * Copyright 2006 Wyona * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.wyona.org/licenses/APACHE-LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wyona.yanel.cmdl.communication; import java.io.BufferedReader; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.security.Principal; import java.util.Enumeration; import java.util.Locale; import java.util.Map; import javax.servlet.RequestDispatcher; import javax.servlet.ServletInputStream; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import org.apache.log4j.Category; /** * Not implemented yet. */ public class CommandLineRequest implements HttpServletRequest { private static Category log = Category.getInstance(CommandLineRequest.class); protected String url; /** * */ private class ParameterNames implements Enumeration { private java.util.Vector names; public ParameterNames(Enumeration enum) { names = new java.util.Vector(); while (enum.hasMoreElements()) { names.add(enum.nextElement()); } } public Object nextElement() { String name = (String) names.elementAt(0); names.removeElementAt(0); return name; } public boolean hasMoreElements() { if (names.size() > 0) return true; return false; } } ParameterNames parameterNames; public CommandLineRequest(String url) { this.url = url; } public CommandLineRequest(HttpServletRequest request) { parameterNames = new ParameterNames(request.getParameterNames()); } public StringBuffer getRequestURL() { return new StringBuffer(url); } public String getParameter(String name) { return "not implemented yet"; } public Object getAttribute(String arg0) { // TODO Auto-generated method stub return null; } public Enumeration getAttributeNames() { // TODO Auto-generated method stub return null; } public String getCharacterEncoding() { // TODO Auto-generated method stub return null; } public int getContentLength() { // TODO Auto-generated method stub return 0; } public String getContentType() { // TODO Auto-generated method stub return null; } public ServletInputStream getInputStream() throws IOException { // TODO Auto-generated method stub return null; } public Locale getLocale() { // TODO Auto-generated method stub return null; } public Enumeration getLocales() { // TODO Auto-generated method stub return null; } public Map getParameterMap() { // TODO Auto-generated method stub return null; } /** * */ public Enumeration getParameterNames() { log.error("DEBUG: Use cloned parameter names!"); return parameterNames; } public String[] getParameterValues(String arg0) { // TODO Auto-generated method stub return null; } public String getProtocol() { // TODO Auto-generated method stub return null; } public BufferedReader getReader() throws IOException { // TODO Auto-generated method stub return null; } public String getRealPath(String arg0) { // TODO Auto-generated method stub return null; } public String getRemoteAddr() { // TODO Auto-generated method stub return null; } public String getRemoteHost() { // TODO Auto-generated method stub return null; } public RequestDispatcher getRequestDispatcher(String arg0) { // TODO Auto-generated method stub return null; } public String getScheme() { // TODO Auto-generated method stub return null; } public String getServerName() { // TODO Auto-generated method stub return null; } public int getServerPort() { // TODO Auto-generated method stub return 0; } public boolean isSecure() { // TODO Auto-generated method stub return false; } public void removeAttribute(String arg0) { // TODO Auto-generated method stub } public void setAttribute(String arg0, Object arg1) { // TODO Auto-generated method stub } public void setCharacterEncoding(String arg0) throws UnsupportedEncodingException { // TODO Auto-generated method stub } public String getAuthType() { // TODO Auto-generated method stub return null; } public String getContextPath() { // TODO Auto-generated method stub return null; } public Cookie[] getCookies() { // TODO Auto-generated method stub return null; } public long getDateHeader(String arg0) { // TODO Auto-generated method stub return 0; } public String getHeader(String arg0) { // TODO Auto-generated method stub return null; } public Enumeration getHeaderNames() { // TODO Auto-generated method stub return null; } public Enumeration getHeaders(String arg0) { // TODO Auto-generated method stub return null; } public int getIntHeader(String arg0) { // TODO Auto-generated method stub return 0; } public String getMethod() { // TODO Auto-generated method stub return null; } public String getPathInfo() { // TODO Auto-generated method stub return null; } public String getPathTranslated() { // TODO Auto-generated method stub return null; } public String getQueryString() { // TODO Auto-generated method stub return null; } public String getRemoteUser() { // TODO Auto-generated method stub return null; } public String getRequestedSessionId() { // TODO Auto-generated method stub return null; } public String getRequestURI() { // TODO Auto-generated method stub return null; } public String getServletPath() { // TODO Auto-generated method stub return null; } public HttpSession getSession() { // TODO Auto-generated method stub return null; } public HttpSession getSession(boolean arg0) { // TODO Auto-generated method stub return null; } public Principal getUserPrincipal() { // TODO Auto-generated method stub return null; } public boolean isRequestedSessionIdFromCookie() { // TODO Auto-generated method stub return false; } public boolean isRequestedSessionIdFromUrl() { // TODO Auto-generated method stub return false; } public boolean isRequestedSessionIdFromURL() { // TODO Auto-generated method stub return false; } public boolean isRequestedSessionIdValid() { // TODO Auto-generated method stub return false; } public boolean isUserInRole(String arg0) { // TODO Auto-generated method stub return false; } }
src/core/java/org/wyona/yanel/cmdl/communication/CommandLineRequest.java
/* * Copyright 2006 Wyona * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.wyona.org/licenses/APACHE-LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wyona.yanel.cmdl.communication; import java.io.BufferedReader; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.security.Principal; import java.util.Enumeration; import java.util.Locale; import java.util.Map; import javax.servlet.RequestDispatcher; import javax.servlet.ServletInputStream; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import org.apache.log4j.Category; /** * Not implemented yet. */ public class CommandLineRequest implements HttpServletRequest { private static Category log = Category.getInstance(CommandLineRequest.class); protected String url; /** * */ private class ParameterNames implements Enumeration { private java.util.Vector names; public ParameterNames() { } public Object nextElement() { return null; } public boolean hasMoreElements() { return false; } } ParameterNames parameterNames; public CommandLineRequest(String url) { this.url = url; } public CommandLineRequest(HttpServletRequest request) { //parameterNames = (Enumeration) request.getParameterNames().clone(); } public StringBuffer getRequestURL() { return new StringBuffer(url); } public String getParameter(String name) { return "not implemented yet"; } public Object getAttribute(String arg0) { // TODO Auto-generated method stub return null; } public Enumeration getAttributeNames() { // TODO Auto-generated method stub return null; } public String getCharacterEncoding() { // TODO Auto-generated method stub return null; } public int getContentLength() { // TODO Auto-generated method stub return 0; } public String getContentType() { // TODO Auto-generated method stub return null; } public ServletInputStream getInputStream() throws IOException { // TODO Auto-generated method stub return null; } public Locale getLocale() { // TODO Auto-generated method stub return null; } public Enumeration getLocales() { // TODO Auto-generated method stub return null; } public Map getParameterMap() { // TODO Auto-generated method stub return null; } /** * */ public Enumeration getParameterNames() { log.error("DEBUG: Use cloned parameter names!"); return parameterNames; } public String[] getParameterValues(String arg0) { // TODO Auto-generated method stub return null; } public String getProtocol() { // TODO Auto-generated method stub return null; } public BufferedReader getReader() throws IOException { // TODO Auto-generated method stub return null; } public String getRealPath(String arg0) { // TODO Auto-generated method stub return null; } public String getRemoteAddr() { // TODO Auto-generated method stub return null; } public String getRemoteHost() { // TODO Auto-generated method stub return null; } public RequestDispatcher getRequestDispatcher(String arg0) { // TODO Auto-generated method stub return null; } public String getScheme() { // TODO Auto-generated method stub return null; } public String getServerName() { // TODO Auto-generated method stub return null; } public int getServerPort() { // TODO Auto-generated method stub return 0; } public boolean isSecure() { // TODO Auto-generated method stub return false; } public void removeAttribute(String arg0) { // TODO Auto-generated method stub } public void setAttribute(String arg0, Object arg1) { // TODO Auto-generated method stub } public void setCharacterEncoding(String arg0) throws UnsupportedEncodingException { // TODO Auto-generated method stub } public String getAuthType() { // TODO Auto-generated method stub return null; } public String getContextPath() { // TODO Auto-generated method stub return null; } public Cookie[] getCookies() { // TODO Auto-generated method stub return null; } public long getDateHeader(String arg0) { // TODO Auto-generated method stub return 0; } public String getHeader(String arg0) { // TODO Auto-generated method stub return null; } public Enumeration getHeaderNames() { // TODO Auto-generated method stub return null; } public Enumeration getHeaders(String arg0) { // TODO Auto-generated method stub return null; } public int getIntHeader(String arg0) { // TODO Auto-generated method stub return 0; } public String getMethod() { // TODO Auto-generated method stub return null; } public String getPathInfo() { // TODO Auto-generated method stub return null; } public String getPathTranslated() { // TODO Auto-generated method stub return null; } public String getQueryString() { // TODO Auto-generated method stub return null; } public String getRemoteUser() { // TODO Auto-generated method stub return null; } public String getRequestedSessionId() { // TODO Auto-generated method stub return null; } public String getRequestURI() { // TODO Auto-generated method stub return null; } public String getServletPath() { // TODO Auto-generated method stub return null; } public HttpSession getSession() { // TODO Auto-generated method stub return null; } public HttpSession getSession(boolean arg0) { // TODO Auto-generated method stub return null; } public Principal getUserPrincipal() { // TODO Auto-generated method stub return null; } public boolean isRequestedSessionIdFromCookie() { // TODO Auto-generated method stub return false; } public boolean isRequestedSessionIdFromUrl() { // TODO Auto-generated method stub return false; } public boolean isRequestedSessionIdFromURL() { // TODO Auto-generated method stub return false; } public boolean isRequestedSessionIdValid() { // TODO Auto-generated method stub return false; } public boolean isUserInRole(String arg0) { // TODO Auto-generated method stub return false; } }
parameter names cloned
src/core/java/org/wyona/yanel/cmdl/communication/CommandLineRequest.java
parameter names cloned
Java
apache-2.0
3f93527db20a3a8cef631f0dd86b5ae6f7ef327f
0
torakiki/sambox,torakiki/sambox
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sejda.sambox.pdmodel.graphics.image; import java.awt.Graphics2D; import java.awt.Paint; import java.awt.RenderingHints; import java.awt.image.BufferedImage; import java.awt.image.WritableRaster; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.List; import javax.imageio.ImageIO; import org.apache.commons.io.IOUtils; import org.sejda.sambox.cos.COSArray; import org.sejda.sambox.cos.COSBase; import org.sejda.sambox.cos.COSName; import org.sejda.sambox.cos.COSStream; import org.sejda.sambox.pdmodel.PDDocument; import org.sejda.sambox.pdmodel.PDResources; import org.sejda.sambox.pdmodel.common.PDMetadata; import org.sejda.sambox.pdmodel.common.PDStream; import org.sejda.sambox.pdmodel.graphics.PDXObject; import org.sejda.sambox.pdmodel.graphics.color.PDColorSpace; import org.sejda.sambox.pdmodel.graphics.color.PDDeviceGray; import org.sejda.sambox.util.filetypedetector.FileType; import org.sejda.sambox.util.filetypedetector.FileTypeDetector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An Image XObject. * * @author John Hewson * @author Ben Litchfield */ public final class PDImageXObject extends PDXObject implements PDImage { private static final Logger LOG = LoggerFactory.getLogger(PDImageXObject.class); private BufferedImage cachedImage; private PDColorSpace colorSpace; private PDResources resources; // current resource dictionary (has color spaces) /** * Creates a thumbnail Image XObject from the given COSBase and name. * * @param cosStream the COS stream * @return an XObject * @throws IOException if there is an error creating the XObject. */ public static PDImageXObject createThumbnail(COSStream cosStream) throws IOException { // thumbnails are special, any non-null subtype is treated as being "Image" PDStream pdStream = new PDStream(cosStream); return new PDImageXObject(pdStream, null); } /** * Creates an Image XObject in the given document. * * @param document the current document * @throws java.io.IOException if there is an error creating the XObject. */ public PDImageXObject() throws IOException { this(new PDStream(), null); } /** * Creates an Image XObject in the given document using the given filtered stream. * * @param document the current document * @param filteredStream a filtered stream of image data * @param cosFilter the filter or a COSArray of filters * @param width the image width * @param height the image height * @param bitsPerComponent the bits per component * @param initColorSpace the color space * @throws IOException if there is an error creating the XObject. */ public PDImageXObject(InputStream encodedStream, COSBase cosFilter, int width, int height, int bitsPerComponent, PDColorSpace initColorSpace) throws IOException { super(createRawStream(encodedStream), COSName.IMAGE); getCOSObject().setItem(COSName.FILTER, cosFilter); resources = null; colorSpace = null; setBitsPerComponent(bitsPerComponent); setWidth(width); setHeight(height); setColorSpace(initColorSpace); } /** * Creates a COS stream from raw (encoded) data. */ private static COSStream createRawStream(InputStream rawInput) throws IOException { COSStream stream = new COSStream(); try (OutputStream output = stream.createFilteredStream()) { IOUtils.copy(rawInput, output); } return stream; } /** * Creates an Image XObject with the given stream as its contents and current color spaces. * * @param stream the XObject stream to read * @param resources the current resources * @throws java.io.IOException if there is an error creating the XObject. */ public PDImageXObject(PDStream stream, PDResources resources) throws IOException { super(stream, COSName.IMAGE); stream.getCOSObject().addAll(stream.getCOSObject().getDecodeResult().getParameters()); this.resources = resources; this.colorSpace = stream.getCOSObject().getDecodeResult().getJPXColorSpace(); } /** * Create a PDImageXObject from an image file, see {@link #createFromFile(File, PDDocument)} for more details. * * @param imagePath the image file path. * @param doc the document that shall use this PDImageXObject. * @return a PDImageXObject. * @throws IOException if there is an error when reading the file or creating the PDImageXObject, or if the image * type is not supported. */ public static PDImageXObject createFromFile(String imagePath) throws IOException { return createFromFileByExtension(new File(imagePath)); } /** * Create a PDImageXObject from an image file. The file format is determined by the file name suffix. The following * suffixes are supported: jpg, jpeg, tif, tiff, gif, bmp and png. This is a convenience method that calls * {@link JPEGFactory#createFromStream}, {@link CCITTFactory#createFromFile} or {@link ImageIO#read} combined with * {@link LosslessFactory#createFromImage}. (The later can also be used to create a PDImageXObject from a * BufferedImage). * * Image type is also detected based on the first bytes in the file, for PNG and JPEG. Takes precedence over * extension. Eg: Solves issues with JPEG with .png extension * * @param file the image file. * @param doc the document that shall use this PDImageXObject. * @return a PDImageXObject. * @throws IOException if there is an error when reading the file or creating the PDImageXObject. * @throws IllegalArgumentException if the image type is not supported. */ public static PDImageXObject createFromFileByExtension(File file) throws IOException { String name = file.getName(); int dot = file.getName().lastIndexOf('.'); String ext = "jpg"; if (dot != -1) { ext = name.substring(dot + 1).toLowerCase(); } else { LOG.warn("Unknown extension for image file {}, assuming .jpg", file.getName()); } // Do some basic checks to see if the first bytes match the extension // Eg: a JPEG extension on a PNG image file byte[] jpegFirstBytes = new byte[] { (byte) 0xFF, (byte) 0xD8 }; byte[] pngFirstBytes = new byte[] { (byte) 0x89, (byte) 0x50 }; byte[] tiffLittleEndianFirstBytes = new byte[] { (byte) 0x49, (byte) 0x49 }; byte[] tiffBigEndianFirstBytes = new byte[] { (byte) 0x4D, (byte) 0x4D }; byte[] firstBytes = new byte[2]; try (FileInputStream fin = new FileInputStream(file)) { fin.read(firstBytes); } if (Arrays.equals(firstBytes, jpegFirstBytes)) { ext = "jpg"; } if (Arrays.equals(firstBytes, pngFirstBytes)) { ext = "png"; } if (Arrays.equals(firstBytes, tiffLittleEndianFirstBytes) || Arrays.equals(firstBytes, tiffBigEndianFirstBytes)) { ext = "tiff"; } if ("jpg".equals(ext) || "jpeg".equals(ext)) { return JPEGFactory.createFromFile(file); } if ("tif".equals(ext) || "tiff".equals(ext)) { return CCITTFactory.createFromFile(file); } if ("gif".equals(ext) || "bmp".equals(ext) || "png".equals(ext)) { BufferedImage bim = ImageIO.read(file); return LosslessFactory.createFromImage(bim); } throw new IllegalArgumentException("Image type not supported: " + name); } /** * Create a PDImageXObject from an image file. The file format is determined by the file content. The following file * types are supported: jpg, jpeg, tif, tiff, gif, bmp and png. This is a convenience method that calls * {@link JPEGFactory#createFromStream}, {@link CCITTFactory#createFromFile} or {@link ImageIO#read} combined with * {@link LosslessFactory#createFromImage}. (The later can also be used to create a PDImageXObject from a * BufferedImage). * * @param file the image file. * @param doc the document that shall use this PDImageXObject. * @return a PDImageXObject. * @throws IOException if there is an error when reading the file or creating the PDImageXObject. * @throws IllegalArgumentException if the image type is not supported. */ public static PDImageXObject createFromFileByContent(File file) throws IOException { FileType fileType = null; try (BufferedInputStream bufferedInputStream = new BufferedInputStream( new FileInputStream(file))) { fileType = FileTypeDetector.detectFileType(bufferedInputStream); } catch (IOException e) { throw new IOException("Could not determine file type: " + file.getName(), e); } if (fileType == null) { throw new IllegalArgumentException("Image type not supported: " + file.getName()); } if (fileType.equals(FileType.JPEG)) { return JPEGFactory.createFromFile(file); } if (fileType.equals(FileType.TIFF)) { return CCITTFactory.createFromFile(file); } if (fileType.equals(FileType.BMP) || fileType.equals(FileType.GIF) || fileType.equals(FileType.PNG)) { BufferedImage bim = ImageIO.read(file); return LosslessFactory.createFromImage(bim); } throw new IllegalArgumentException("Image type not supported: " + file.getName()); } /** * Returns the metadata associated with this XObject, or null if there is none. * * @return the metadata associated with this object. */ public PDMetadata getMetadata() { COSStream cosStream = getCOSObject().getDictionaryObject(COSName.METADATA, COSStream.class); if (cosStream != null) { return new PDMetadata(cosStream); } return null; } /** * Sets the metadata associated with this XObject, or null if there is none. * * @param meta the metadata associated with this object */ public void setMetadata(PDMetadata meta) { getCOSObject().setItem(COSName.METADATA, meta); } /** * Returns the key of this XObject in the structural parent tree. * * @return this object's key the structural parent tree */ public int getStructParent() { return getCOSObject().getInt(COSName.STRUCT_PARENT, 0); } /** * Sets the key of this XObject in the structural parent tree. * * @param key the new key for this XObject */ public void setStructParent(int key) { getCOSObject().setInt(COSName.STRUCT_PARENT, key); } /** * {@inheritDoc} The returned images are cached for the lifetime of this XObject. */ @Override public BufferedImage getImage() throws IOException { if (cachedImage != null) { return cachedImage; } // get image as RGB BufferedImage image = SampledImageReader.getRGBImage(this, getColorKeyMask()); // soft mask (overrides explicit mask) PDImageXObject softMask = getSoftMask(); if (softMask != null) { image = applyMask(image, softMask.getOpaqueImage(), true); } else { // explicit mask - to be applied only if /ImageMask true PDImageXObject mask = getMask(); if (mask != null && mask.isStencil()) { image = applyMask(image, mask.getOpaqueImage(), false); } } cachedImage = image; return image; } /** * * @return the image without mask applied. The image is not cached * @throws IOException */ public BufferedImage getImageWithoutMasks() throws IOException { return SampledImageReader.getRGBImage(this, getColorKeyMask()); } /** * {@inheritDoc} The returned images are not cached. */ @Override public BufferedImage getStencilImage(Paint paint) throws IOException { if (!isStencil()) { throw new IllegalStateException("Image is not a stencil"); } return SampledImageReader.getStencilImage(this, paint); } /** * Returns an RGB buffered image containing the opaque image stream without any masks applied. If this Image XObject * is a mask then the buffered image will contain the raw mask. * * @return the image without any masks applied * @throws IOException if the image cannot be read */ public BufferedImage getOpaqueImage() throws IOException { return SampledImageReader.getRGBImage(this, null); } // explicit mask: RGB + Binary -> ARGB // soft mask: RGB + Gray -> ARGB private BufferedImage applyMask(BufferedImage image, BufferedImage mask, boolean isSoft) { if (mask == null) { return image; } int width = image.getWidth(); int height = image.getHeight(); // scale mask to fit image, or image to fit mask, whichever is larger if (mask.getWidth() < width || mask.getHeight() < height) { mask = scaleImage(mask, width, height); } else if (mask.getWidth() > width || mask.getHeight() > height) { width = mask.getWidth(); height = mask.getHeight(); image = scaleImage(image, width, height); } // compose to ARGB BufferedImage masked = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); WritableRaster src = image.getRaster(); WritableRaster dest = masked.getRaster(); WritableRaster alpha = mask.getRaster(); float[] rgb = new float[4]; float[] rgba = new float[4]; float[] alphaPixel = null; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { src.getPixel(x, y, rgb); rgba[0] = rgb[0]; rgba[1] = rgb[1]; rgba[2] = rgb[2]; alphaPixel = alpha.getPixel(x, y, alphaPixel); if (isSoft) { rgba[3] = alphaPixel[0]; } else { rgba[3] = 255 - alphaPixel[0]; } dest.setPixel(x, y, rgba); } } return masked; } /** * High-quality image scaling. */ private BufferedImage scaleImage(BufferedImage image, int width, int height) { BufferedImage image2 = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB); Graphics2D g = image2.createGraphics(); g.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BICUBIC); g.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY); g.drawImage(image, 0, 0, width, height, 0, 0, image.getWidth(), image.getHeight(), null); g.dispose(); return image2; } /** * Returns the Mask Image XObject associated with this image, or null if there is none. * * @return Mask Image XObject */ public PDImageXObject getMask() throws IOException { COSStream cosStream = getCOSObject().getDictionaryObject(COSName.MASK, COSStream.class); if (cosStream != null) { // always DeviceGray return new PDImageXObject(new PDStream(cosStream), null); } return null; } /** * Returns the color key mask array associated with this image, or null if there is none. * * @return Mask Image XObject */ public COSArray getColorKeyMask() { COSBase mask = getCOSObject().getDictionaryObject(COSName.MASK); if (mask instanceof COSArray) { return (COSArray) mask; } return null; } /** * Returns the Soft Mask Image XObject associated with this image, or null if there is none. * * @return the SMask Image XObject, or null. */ public PDImageXObject getSoftMask() throws IOException { COSStream cosStream = getCOSObject().getDictionaryObject(COSName.SMASK, COSStream.class); if (cosStream != null) { // always DeviceGray return new PDImageXObject(new PDStream(cosStream), null); } return null; } @Override public int getBitsPerComponent() { if (isStencil()) { return 1; } return getCOSObject().getInt(COSName.BITS_PER_COMPONENT, COSName.BPC); } @Override public void setBitsPerComponent(int bpc) { getCOSObject().setInt(COSName.BITS_PER_COMPONENT, bpc); } @Override public PDColorSpace getColorSpace() throws IOException { if (colorSpace == null) { COSBase cosBase = getCOSObject().getDictionaryObject(COSName.COLORSPACE, COSName.CS); if (cosBase != null) { colorSpace = PDColorSpace.create(cosBase, resources); } else if (isStencil()) { // stencil mask color space must be gray, it is often missing return PDDeviceGray.INSTANCE; } else { // an image without a color space is always broken throw new IOException("could not determine color space"); } } return colorSpace; } @Override public InputStream createInputStream() throws IOException { return getStream().createInputStream(); } @Override public ByteBuffer asByteBuffer() throws IOException { return getStream().getCOSObject().getUnfilteredByteBuffer(); } @Override public boolean isEmpty() throws IOException { return getStream().getCOSObject().isEmpty(); } @Override public void setColorSpace(PDColorSpace cs) { getCOSObject().setItem(COSName.COLORSPACE, cs != null ? cs.getCOSObject() : null); } @Override public int getHeight() { return getCOSObject().getInt(COSName.HEIGHT); } @Override public void setHeight(int h) { getCOSObject().setInt(COSName.HEIGHT, h); } @Override public int getWidth() { return getCOSObject().getInt(COSName.WIDTH); } @Override public void setWidth(int w) { getCOSObject().setInt(COSName.WIDTH, w); } @Override public boolean getInterpolate() { return getCOSObject().getBoolean(COSName.INTERPOLATE, false); } @Override public void setInterpolate(boolean value) { getCOSObject().setBoolean(COSName.INTERPOLATE, value); } @Override public void setDecode(COSArray decode) { getCOSObject().setItem(COSName.DECODE, decode); } @Override public COSArray getDecode() { COSBase decode = getCOSObject().getDictionaryObject(COSName.DECODE); if (decode instanceof COSArray) { return (COSArray) decode; } return null; } @Override public boolean isStencil() { return getCOSObject().getBoolean(COSName.IMAGE_MASK, false); } @Override public void setStencil(boolean isStencil) { getCOSObject().setBoolean(COSName.IMAGE_MASK, isStencil); } /** * This will get the suffix for this image type, e.g. jpg/png. * * @return The image suffix or null if not available. */ @Override public String getSuffix() { List<COSName> filters = getStream().getFilters(); if (filters == null) { return "png"; } else if (filters.contains(COSName.DCT_DECODE)) { return "jpg"; } else if (filters.contains(COSName.JPX_DECODE)) { return "jpx"; } else if (filters.contains(COSName.CCITTFAX_DECODE)) { return "tiff"; } else if (filters.contains(COSName.FLATE_DECODE) || filters.contains(COSName.LZW_DECODE) || filters.contains(COSName.RUN_LENGTH_DECODE)) { return "png"; } else { LOG.warn("getSuffix() returns null, filters: " + filters); // TODO more... return null; } } }
src/main/java/org/sejda/sambox/pdmodel/graphics/image/PDImageXObject.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sejda.sambox.pdmodel.graphics.image; import java.awt.Graphics2D; import java.awt.Paint; import java.awt.RenderingHints; import java.awt.image.BufferedImage; import java.awt.image.WritableRaster; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.List; import javax.imageio.ImageIO; import org.apache.commons.io.IOUtils; import org.sejda.sambox.cos.COSArray; import org.sejda.sambox.cos.COSBase; import org.sejda.sambox.cos.COSName; import org.sejda.sambox.cos.COSStream; import org.sejda.sambox.pdmodel.PDDocument; import org.sejda.sambox.pdmodel.PDResources; import org.sejda.sambox.pdmodel.common.PDMetadata; import org.sejda.sambox.pdmodel.common.PDStream; import org.sejda.sambox.pdmodel.graphics.PDXObject; import org.sejda.sambox.pdmodel.graphics.color.PDColorSpace; import org.sejda.sambox.pdmodel.graphics.color.PDDeviceGray; import org.sejda.sambox.util.filetypedetector.FileType; import org.sejda.sambox.util.filetypedetector.FileTypeDetector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An Image XObject. * * @author John Hewson * @author Ben Litchfield */ public final class PDImageXObject extends PDXObject implements PDImage { private static final Logger LOG = LoggerFactory.getLogger(PDImageXObject.class); private BufferedImage cachedImage; private BufferedImage cachedImageWithoutMasks; private PDColorSpace colorSpace; private PDResources resources; // current resource dictionary (has color spaces) /** * Creates a thumbnail Image XObject from the given COSBase and name. * * @param cosStream the COS stream * @return an XObject * @throws IOException if there is an error creating the XObject. */ public static PDImageXObject createThumbnail(COSStream cosStream) throws IOException { // thumbnails are special, any non-null subtype is treated as being "Image" PDStream pdStream = new PDStream(cosStream); return new PDImageXObject(pdStream, null); } /** * Creates an Image XObject in the given document. * * @param document the current document * @throws java.io.IOException if there is an error creating the XObject. */ public PDImageXObject() throws IOException { this(new PDStream(), null); } /** * Creates an Image XObject in the given document using the given filtered stream. * * @param document the current document * @param filteredStream a filtered stream of image data * @param cosFilter the filter or a COSArray of filters * @param width the image width * @param height the image height * @param bitsPerComponent the bits per component * @param initColorSpace the color space * @throws IOException if there is an error creating the XObject. */ public PDImageXObject(InputStream encodedStream, COSBase cosFilter, int width, int height, int bitsPerComponent, PDColorSpace initColorSpace) throws IOException { super(createRawStream(encodedStream), COSName.IMAGE); getCOSObject().setItem(COSName.FILTER, cosFilter); resources = null; colorSpace = null; setBitsPerComponent(bitsPerComponent); setWidth(width); setHeight(height); setColorSpace(initColorSpace); } /** * Creates a COS stream from raw (encoded) data. */ private static COSStream createRawStream(InputStream rawInput) throws IOException { COSStream stream = new COSStream(); try (OutputStream output = stream.createFilteredStream()) { IOUtils.copy(rawInput, output); } return stream; } /** * Creates an Image XObject with the given stream as its contents and current color spaces. * * @param stream the XObject stream to read * @param resources the current resources * @throws java.io.IOException if there is an error creating the XObject. */ public PDImageXObject(PDStream stream, PDResources resources) throws IOException { super(stream, COSName.IMAGE); stream.getCOSObject().addAll(stream.getCOSObject().getDecodeResult().getParameters()); this.resources = resources; this.colorSpace = stream.getCOSObject().getDecodeResult().getJPXColorSpace(); } /** * Create a PDImageXObject from an image file, see {@link #createFromFile(File, PDDocument)} for more details. * * @param imagePath the image file path. * @param doc the document that shall use this PDImageXObject. * @return a PDImageXObject. * @throws IOException if there is an error when reading the file or creating the PDImageXObject, or if the image * type is not supported. */ public static PDImageXObject createFromFile(String imagePath) throws IOException { return createFromFileByExtension(new File(imagePath)); } /** * Create a PDImageXObject from an image file. The file format is determined by the file name suffix. The following * suffixes are supported: jpg, jpeg, tif, tiff, gif, bmp and png. This is a convenience method that calls * {@link JPEGFactory#createFromStream}, {@link CCITTFactory#createFromFile} or {@link ImageIO#read} combined with * {@link LosslessFactory#createFromImage}. (The later can also be used to create a PDImageXObject from a * BufferedImage). * * Image type is also detected based on the first bytes in the file, for PNG and JPEG. Takes precedence over * extension. Eg: Solves issues with JPEG with .png extension * * @param file the image file. * @param doc the document that shall use this PDImageXObject. * @return a PDImageXObject. * @throws IOException if there is an error when reading the file or creating the PDImageXObject. * @throws IllegalArgumentException if the image type is not supported. */ public static PDImageXObject createFromFileByExtension(File file) throws IOException { String name = file.getName(); int dot = file.getName().lastIndexOf('.'); String ext = "jpg"; if (dot != -1) { ext = name.substring(dot + 1).toLowerCase(); } else { LOG.warn("Unknown extension for image file {}, assuming .jpg", file.getName()); } // Do some basic checks to see if the first bytes match the extension // Eg: a JPEG extension on a PNG image file byte[] jpegFirstBytes = new byte[] { (byte) 0xFF, (byte) 0xD8 }; byte[] pngFirstBytes = new byte[] { (byte) 0x89, (byte) 0x50 }; byte[] tiffLittleEndianFirstBytes = new byte[] { (byte) 0x49, (byte) 0x49 }; byte[] tiffBigEndianFirstBytes = new byte[] { (byte) 0x4D, (byte) 0x4D }; byte[] firstBytes = new byte[2]; try (FileInputStream fin = new FileInputStream(file)) { fin.read(firstBytes); } if (Arrays.equals(firstBytes, jpegFirstBytes)) { ext = "jpg"; } if (Arrays.equals(firstBytes, pngFirstBytes)) { ext = "png"; } if (Arrays.equals(firstBytes, tiffLittleEndianFirstBytes) || Arrays.equals(firstBytes, tiffBigEndianFirstBytes)) { ext = "tiff"; } if ("jpg".equals(ext) || "jpeg".equals(ext)) { return JPEGFactory.createFromFile(file); } if ("tif".equals(ext) || "tiff".equals(ext)) { return CCITTFactory.createFromFile(file); } if ("gif".equals(ext) || "bmp".equals(ext) || "png".equals(ext)) { BufferedImage bim = ImageIO.read(file); return LosslessFactory.createFromImage(bim); } throw new IllegalArgumentException("Image type not supported: " + name); } /** * Create a PDImageXObject from an image file. The file format is determined by the file content. The following file * types are supported: jpg, jpeg, tif, tiff, gif, bmp and png. This is a convenience method that calls * {@link JPEGFactory#createFromStream}, {@link CCITTFactory#createFromFile} or {@link ImageIO#read} combined with * {@link LosslessFactory#createFromImage}. (The later can also be used to create a PDImageXObject from a * BufferedImage). * * @param file the image file. * @param doc the document that shall use this PDImageXObject. * @return a PDImageXObject. * @throws IOException if there is an error when reading the file or creating the PDImageXObject. * @throws IllegalArgumentException if the image type is not supported. */ public static PDImageXObject createFromFileByContent(File file) throws IOException { FileType fileType = null; try (BufferedInputStream bufferedInputStream = new BufferedInputStream( new FileInputStream(file))) { fileType = FileTypeDetector.detectFileType(bufferedInputStream); } catch (IOException e) { throw new IOException("Could not determine file type: " + file.getName(), e); } if (fileType == null) { throw new IllegalArgumentException("Image type not supported: " + file.getName()); } if (fileType.equals(FileType.JPEG)) { return JPEGFactory.createFromFile(file); } if (fileType.equals(FileType.TIFF)) { return CCITTFactory.createFromFile(file); } if (fileType.equals(FileType.BMP) || fileType.equals(FileType.GIF) || fileType.equals(FileType.PNG)) { BufferedImage bim = ImageIO.read(file); return LosslessFactory.createFromImage(bim); } throw new IllegalArgumentException("Image type not supported: " + file.getName()); } /** * Returns the metadata associated with this XObject, or null if there is none. * * @return the metadata associated with this object. */ public PDMetadata getMetadata() { COSStream cosStream = getCOSObject().getDictionaryObject(COSName.METADATA, COSStream.class); if (cosStream != null) { return new PDMetadata(cosStream); } return null; } /** * Sets the metadata associated with this XObject, or null if there is none. * * @param meta the metadata associated with this object */ public void setMetadata(PDMetadata meta) { getCOSObject().setItem(COSName.METADATA, meta); } /** * Returns the key of this XObject in the structural parent tree. * * @return this object's key the structural parent tree */ public int getStructParent() { return getCOSObject().getInt(COSName.STRUCT_PARENT, 0); } /** * Sets the key of this XObject in the structural parent tree. * * @param key the new key for this XObject */ public void setStructParent(int key) { getCOSObject().setInt(COSName.STRUCT_PARENT, key); } /** * {@inheritDoc} The returned images are cached for the lifetime of this XObject. */ @Override public BufferedImage getImage() throws IOException { if (cachedImage != null) { return cachedImage; } // get image as RGB BufferedImage image = SampledImageReader.getRGBImage(this, getColorKeyMask()); // soft mask (overrides explicit mask) PDImageXObject softMask = getSoftMask(); if (softMask != null) { image = applyMask(image, softMask.getOpaqueImage(), true); } else { // explicit mask - to be applied only if /ImageMask true PDImageXObject mask = getMask(); if (mask != null && mask.isStencil()) { image = applyMask(image, mask.getOpaqueImage(), false); } } cachedImage = image; return image; } /** * {@inheritDoc} The returned images are cached for the lifetime of this XObject. */ public BufferedImage getImageWithoutMasks() throws IOException { if (cachedImageWithoutMasks != null) { return cachedImageWithoutMasks; } // get image as RGB BufferedImage image = SampledImageReader.getRGBImage(this, getColorKeyMask()); cachedImageWithoutMasks = image; return image; } /** * {@inheritDoc} The returned images are not cached. */ @Override public BufferedImage getStencilImage(Paint paint) throws IOException { if (!isStencil()) { throw new IllegalStateException("Image is not a stencil"); } return SampledImageReader.getStencilImage(this, paint); } /** * Returns an RGB buffered image containing the opaque image stream without any masks applied. If this Image XObject * is a mask then the buffered image will contain the raw mask. * * @return the image without any masks applied * @throws IOException if the image cannot be read */ public BufferedImage getOpaqueImage() throws IOException { return SampledImageReader.getRGBImage(this, null); } // explicit mask: RGB + Binary -> ARGB // soft mask: RGB + Gray -> ARGB private BufferedImage applyMask(BufferedImage image, BufferedImage mask, boolean isSoft) { if (mask == null) { return image; } int width = image.getWidth(); int height = image.getHeight(); // scale mask to fit image, or image to fit mask, whichever is larger if (mask.getWidth() < width || mask.getHeight() < height) { mask = scaleImage(mask, width, height); } else if (mask.getWidth() > width || mask.getHeight() > height) { width = mask.getWidth(); height = mask.getHeight(); image = scaleImage(image, width, height); } // compose to ARGB BufferedImage masked = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); WritableRaster src = image.getRaster(); WritableRaster dest = masked.getRaster(); WritableRaster alpha = mask.getRaster(); float[] rgb = new float[4]; float[] rgba = new float[4]; float[] alphaPixel = null; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { src.getPixel(x, y, rgb); rgba[0] = rgb[0]; rgba[1] = rgb[1]; rgba[2] = rgb[2]; alphaPixel = alpha.getPixel(x, y, alphaPixel); if (isSoft) { rgba[3] = alphaPixel[0]; } else { rgba[3] = 255 - alphaPixel[0]; } dest.setPixel(x, y, rgba); } } return masked; } /** * High-quality image scaling. */ private BufferedImage scaleImage(BufferedImage image, int width, int height) { BufferedImage image2 = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB); Graphics2D g = image2.createGraphics(); g.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BICUBIC); g.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY); g.drawImage(image, 0, 0, width, height, 0, 0, image.getWidth(), image.getHeight(), null); g.dispose(); return image2; } /** * Returns the Mask Image XObject associated with this image, or null if there is none. * * @return Mask Image XObject */ public PDImageXObject getMask() throws IOException { COSStream cosStream = getCOSObject().getDictionaryObject(COSName.MASK, COSStream.class); if (cosStream != null) { // always DeviceGray return new PDImageXObject(new PDStream(cosStream), null); } return null; } /** * Returns the color key mask array associated with this image, or null if there is none. * * @return Mask Image XObject */ public COSArray getColorKeyMask() { COSBase mask = getCOSObject().getDictionaryObject(COSName.MASK); if (mask instanceof COSArray) { return (COSArray) mask; } return null; } /** * Returns the Soft Mask Image XObject associated with this image, or null if there is none. * * @return the SMask Image XObject, or null. */ public PDImageXObject getSoftMask() throws IOException { COSStream cosStream = getCOSObject().getDictionaryObject(COSName.SMASK, COSStream.class); if (cosStream != null) { // always DeviceGray return new PDImageXObject(new PDStream(cosStream), null); } return null; } @Override public int getBitsPerComponent() { if (isStencil()) { return 1; } return getCOSObject().getInt(COSName.BITS_PER_COMPONENT, COSName.BPC); } @Override public void setBitsPerComponent(int bpc) { getCOSObject().setInt(COSName.BITS_PER_COMPONENT, bpc); } @Override public PDColorSpace getColorSpace() throws IOException { if (colorSpace == null) { COSBase cosBase = getCOSObject().getDictionaryObject(COSName.COLORSPACE, COSName.CS); if (cosBase != null) { colorSpace = PDColorSpace.create(cosBase, resources); } else if (isStencil()) { // stencil mask color space must be gray, it is often missing return PDDeviceGray.INSTANCE; } else { // an image without a color space is always broken throw new IOException("could not determine color space"); } } return colorSpace; } @Override public InputStream createInputStream() throws IOException { return getStream().createInputStream(); } @Override public ByteBuffer asByteBuffer() throws IOException { return getStream().getCOSObject().getUnfilteredByteBuffer(); } @Override public boolean isEmpty() throws IOException { return getStream().getCOSObject().isEmpty(); } @Override public void setColorSpace(PDColorSpace cs) { getCOSObject().setItem(COSName.COLORSPACE, cs != null ? cs.getCOSObject() : null); } @Override public int getHeight() { return getCOSObject().getInt(COSName.HEIGHT); } @Override public void setHeight(int h) { getCOSObject().setInt(COSName.HEIGHT, h); } @Override public int getWidth() { return getCOSObject().getInt(COSName.WIDTH); } @Override public void setWidth(int w) { getCOSObject().setInt(COSName.WIDTH, w); } @Override public boolean getInterpolate() { return getCOSObject().getBoolean(COSName.INTERPOLATE, false); } @Override public void setInterpolate(boolean value) { getCOSObject().setBoolean(COSName.INTERPOLATE, value); } @Override public void setDecode(COSArray decode) { getCOSObject().setItem(COSName.DECODE, decode); } @Override public COSArray getDecode() { COSBase decode = getCOSObject().getDictionaryObject(COSName.DECODE); if (decode instanceof COSArray) { return (COSArray) decode; } return null; } @Override public boolean isStencil() { return getCOSObject().getBoolean(COSName.IMAGE_MASK, false); } @Override public void setStencil(boolean isStencil) { getCOSObject().setBoolean(COSName.IMAGE_MASK, isStencil); } /** * This will get the suffix for this image type, e.g. jpg/png. * * @return The image suffix or null if not available. */ @Override public String getSuffix() { List<COSName> filters = getStream().getFilters(); if (filters == null) { return "png"; } else if (filters.contains(COSName.DCT_DECODE)) { return "jpg"; } else if (filters.contains(COSName.JPX_DECODE)) { return "jpx"; } else if (filters.contains(COSName.CCITTFAX_DECODE)) { return "tiff"; } else if (filters.contains(COSName.FLATE_DECODE) || filters.contains(COSName.LZW_DECODE) || filters.contains(COSName.RUN_LENGTH_DECODE)) { return "png"; } else { LOG.warn("getSuffix() returns null, filters: " + filters); // TODO more... return null; } } }
don't cache unmasked images
src/main/java/org/sejda/sambox/pdmodel/graphics/image/PDImageXObject.java
don't cache unmasked images
Java
apache-2.0
e32628bb5c4f5e36f98b47e647fafbbab8090711
0
osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi
/* * Copyright (c) OSGi Alliance (2011). All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.osgi.service.subsystem; import java.io.InputStream; import java.util.Collection; import java.util.Locale; import java.util.Map; import org.osgi.framework.BundleContext; import org.osgi.framework.Version; import org.osgi.framework.resource.Resource; /** * A subsystem is a collection of resources constituting a logical, possibly * isolated, unit of functionality. There are three types of standard resources. * <ul> * <li>Bundle - A bundle that is not a fragment. * </li> * <li>Fragment - A fragment bundle. * </li> * <li>Subsystem - A subsystem defined by this specification. * </li> * </ul> * A scoped subsystem is isolated according to its share policy, which may be * implicit or explicit. An unscoped subsystem is not isolated and, therefore, * has no share policy. There are three standard types of subsystems. * <ul> * <li>Application - An implicitly scoped subsystem. Nothing is exported. * Imports are computed based on any requirements not satisfied by the * constituents. * </li> * <li>Composite - An explicitly scoped subsystem. The share policy is * defined by metadata within the subsystem archive. * </li> * <li>Feature - An unscoped subsystem. * </li> * </ul> * A subsystem may have children and, unless it's the root, must have at least * one parent. A subsystem becomes a child of the installing subsystem. An * unscoped subsystem may have more than one parent if, for example, it is a * constituent of more than one subsystem within the same region. A scoped * subsystem always has only one parent. * <p/> * A subsystem has several unique identifiers that may or may not equate to the * same instance. * <ul> * <li>Location - An identifier specified by the client as part of * installation. It is guaranteed to be unique within the same * framework. Subsystems with the same location are the same instance. * </li> * <li>ID - An identifier generated by the implementation as part of * installation. It is guaranteed to be unique within the same * framework. Subsystems with the same ID are the same instance. * </li> * <li>Symbolic Name/Version - The combination of symbolic name and version * uniquely identifies a subsystem but not necessarily the same * instance. * </li> * </ul> * The resolution of a subsystem may be mandatory or optional for a given * resource. Optional resources do not prevent the subsystem from installing, * although they may prevent it from resolving. * <p/> * A subsystem may either accept or reject transitive resources as part of its * provision policy. A transitive resource provides a capability (called a * transitive dependency) for a content resource's requirement that was not * satisfied by any content resource in the subsystem. A transitive resource * becomes a constituent of the subsystem with a provision policy of accept * transitive and that lies on the longest path between the subsystem and the * root subsystem, inclusively. * <p/> * Conceptually, a subsystem may be thought of as existing in a region isolated * by a share policy. Each region has one and only one scoped subsystem, which * dictates the sharing policy. The region may, however, have many unscoped * subsystems. It is therefore possible to have shared constituents across * multiple subsystems within a region. * <p/> * The subsystem graph may be though of as is an acyclic digraph with the root * subsystem as the sole source vertex. The edges have the child as the head and * parent as the tail. * <p/> * A subsystem archive is a ZIP file having an SSA extension and containing * metadata describing the subsystem. The form of the metadata may be a * subsystem or deployment manifest, as well as any resource files constituting * the subsystem. The manifests are optional and will be computed if not * present. The subsystem manifest headers may be retrieved using the default or * a specified locale. * <p/> * A subsystem is installed using one of the two install methods. Because a * subsystem must be used to install other subsystems, a root subsystem is * provided as a starting point. A subsystem may be obtained by invoking one of * the install methods or through the service registry. Every installed * subsystem has a corresponding service registration. A bundle requesting a * subsystem service will receive the subsystem of which it is a constituent. * <p/> * The root subsystem has the following characteristics. * <ul> * <li>An ID of 0.</li> * <li>A symbolic name of org.osgi.service.subsystem.root.</li> * <li>A version of 1.0.0.</li> * <li>Has no parent.</li> * <li>All existing bundles, including the system and subsystems implementation * bundles, are constituents.</li> * <li>A scoped subsystem with a provision policy of accept transitive.</li> * </ul> * A subsystem service has the following properties. * <ul> * <li>subsystem.id</li> * <li>subsystem.symbolic.name</li> * <li>subsystem.version</li> * <li>subsystem.type</li> * <li>subsystem.state</li> * </ul> * <p/> * Each subsystem has an associated bundle providing the region context within * which its constituents operate. All subsystems within the same region have * the same context. This context may be used, for example, to monitor framework * and service events affecting the constituents of subsystems within the * region. A region context bundle has the following characteristics. * <ul> * <li>A symbolic name of * org.osgi.service.subsystem.region.context.&lt;subsystem id&gt;. * </li> * <li>A version of 1.0.0. * </li> * <li>A location string of &lt;subsystem bundle location&gt;/&lt;subsystem * bundle id&gt; * </li> * </ul> * * @ThreadSafe * @noimplement */ public interface Subsystem { /** * Identifies the category a resource falls under for the purpose of * filtering the results when {@link Subsystem#getResources( * ResourceCategory...) retrieving} resources associated with this * subsystem. * <p/> * Resource categories may be compatible or incompatible. A resource may * be in more than one compatible category but never in more than one * incompatible category. In the following table, incompatible categories * are marked with an "X". * <p/> * <table border="1"> * <tr align="center"> * <th>&nbsp;</th> * <th>CONTENT</th> * <th>TRANSITIVE_INTRINSIC</th> * <th>TRANSITIVE_EXTRINSIC</th> * <th>SHARED</th> * </tr> * <tr align="center"> * <th>CONTENT</th> * <td>&nbsp;</td> * <td>X</td> * <td>X</td> * <td>&nbsp;</td> * </tr> * <tr align="center"> * <th>TRANSITIVE_INTRINSIC</th> * <td>X</td> * <td>&nbsp;</td> * <td>X</td> * <td>X</td> * </tr> * <tr align="center"> * <th>TRANSITIVE_EXTRINSIC</th> * <td>X</td> * <td>X</td> * <td>&nbsp;</td> * <td>X</td> * </tr> * <tr align="center"> * <th>SHARED</th> * <td>&nbsp;</td> * <td>X</td> * <td>X</td> * <td>&nbsp;</td> * </tr> * </table> */ public static enum ResourceCategory { /** * A resource contained by this subsystem that was specified in the * Subsystem-Content manifest header or included in the subsystem * archive when the Subsystem-Content header was omitted. */ CONTENT, /** * A transitive resource provisioned on behalf of this subsystem. It may * or may not be contained by this subsystem. */ TRANSITIVE_INTRINSIC, /** * A transitive resource contained by this subsystem but provisioned on * behalf of another subsystem. Only subsystems with a provision policy * of accept transitive may contain this type of resource. */ TRANSITIVE_EXTRINSIC, /** * A content resource contained by this subsystem and at least one other * subsystem. Equivalently, a content resource contained by this * subsystem whose reference count is greater than one. */ SHARED } /** * The states of a subsystem in the framework. These states match those of * a Bundle and are derived using the same rules as CompositeBundles. As * such, they are more a reflection of what content bundles are permitted * to do rather than an aggregation of the content bundle states. */ public static enum State { /** * A subsystem is in the INSTALLING state when it is initially created. */ INSTALLING, /** * A subsystem is in the INSTALLED state when all resources are * successfully installed. */ INSTALLED, /** * A subsystem is in the INSTALL_FAILED state when an unrecoverable * error occurred during installation. */ INSTALL_FAILED, /** *  A subsystem in the RESOLVING is allowed to have its content bundles * resolved. */ RESOLVING, /** *  A subsystem is in the RESOLVED state when all resources are * resolved. */ RESOLVED, /** * A subsystem is in the STARTING state when all its content bundles * are enabled for activation. */ STARTING, /** * A subsystem is in the ACTIVE state when it has reached the beginning * start-level (for starting it's contents), and all its persistently * started content bundles that are resolved and have had their * start-levels met have completed, or failed, their activator start * method. */ ACTIVE, /** *  A subsystem in the STOPPING state is in the process of taking its * its active start level to zero, stopping all the content bundles. */ STOPPING, /** * A subsystem in the UNINSTALLING state is in the process of * uninstalling its constituent resources. */ UNINSTALLING, /** * A subsystem is in the UNINSTALLED state when all its content bundles * and uninstalled and its system bundle context is invalidated. */ UNINSTALLED } /** * Returns the bundle context of the region within which this subsystem * resides. * <p/> * The bundle context offers the same perspective of any resource contained * by a subsystem within the region. It may be used, for example, to monitor * events internal to the region as well as external events visible to the * region. All subsystems within the same region have the same bundle * context. If this subsystem is in a state where the bundle context would * be invalid, null is returned. * * @return The bundle context of the region within which this subsystem * resides or null if this subsystem's state is in {INSTALL_FAILED, * UNINSTALLED}. * @throws SecurityException If the caller does not have the appropriate * SubsystemPermission[this,CONTEXT], and the runtime supports * permissions. */ public BundleContext getBundleContext(); /** * Returns the child subsystems of this subsystem. * <p/> * The returned collection is an immutable snapshot of all subsystems that * are installed in this subsystem. The collection will be empty if no * subsystems are installed in this subsystem. * * @return The child subsystems of this subsystem. * @throws IllegalStateException If this subsystem's state is in * {INSTALL_FAILED, UNINSTALLED}. */ public Collection<Subsystem> getChildren(); /** * Returns the headers from this subsystem's manifest. * <p/> * The returned map is unmodifiable and contains headers from the main * section of the manifest only. Each map key is a header name, and each map * value is the corresponding header value. Because header names are case- * insensitive, the methods of the map must treat them in a case-insensitive * manner. If the manifest was omitted or contained no main section, the map * will be empty. * <p/> * The header values are translated according to the specified locale. If * the specified locale is null or not supported, the raw values are * returned. If the translation for a particular header is not found, the * raw value is returned. * * @param locale The locale for which translations are desired. * @return The headers from this subsystem's manifest. * @throws SecurityException If the caller does not have the appropriate * SubsystemPermission[this,METADATA], and the runtime supports * permissions. */ public Map<String, String> getHeaders(Locale locale); /** * Returns the location identifier of this subsystem. * <p/> * The location identifier is the {@code location} that was passed to the * {@link #install(String, InputStream) install} method of the {@link * #getParents() parent} subsystem. * * @return The location identifier of this subsystem. * @throws SecurityException If the caller does not have the appropriate * SubsystemPermission[this,METADATA], and the runtime supports * permissions. */ public String getLocation(); /** * Returns the parent subsystems of this subsystem. * <p/> * The returned collection is an unmodifiable snapshot of all parent * subsystems of this subsystem at the time this method was invoked. If this * is the root subsystem, an empty collection is returned. Otherwise, the * collection will contain at least one parent. More than one parent will be * present if this subsystem is a constituent of multiple subsystems within * the same region. * <p/> * A subsystem becomes a parent of this subsystem in one of two ways. * <ol> * <li>This subsystem is installed into the parent subsystem by * invoking one of the parent subsystem's install methods. * </li> * <li>This subsystem is nested within the parent subsystem. A * subsystem is nested within another subsystem when specified as * part of the Subsystem-Content header of the other subsystem's * manifest or included in the other subsystem's archive when the * Subsystem-Content header was omitted. * </li> * </ol> * <p/> * @return The parent subsystems of this subsystem or an empty collection if * this is the root subsystem. * @throws IllegalStateException If this subsystem's state is in * {INSTALL_FAILED, UNINSTALLING, UNINSTALLED}. */ public Collection<Subsystem> getParents(); /** * Returns the resources associated with this subsystem according to the * specified categories. * <p/> * Resources are {@link ResourceCategory categorized} based on their * relationship with the subsystem. The returned collection is unmodifiable * and represents a snapshot of all resources associated with this subsystem * that fell under one or more of the specified categories. Resources that * fall under more than one specified category will not appear more than * once in the returned collection. If the specified categories parameter is * null or an empty array, resources from all categories are returned. * <p/> * This method will block if this subsystem's state is in {INSTALLING} until * a state transition occurs. Implementations should be sensitive to the * potential for long running operations and periodically check the current * thread for interruption. An interrupted thread should result in a * SubsystemException being thrown with an InterruptedException as the * cause. * <p/> * @param categories The categories for which resources are desired or null * or an empty array for resources from all categories. * @return The resources associated with this subsystem according to the * specified categories. * @throws IllegalStateException If this subsystem's state is in * {INSTALL_FAILED, UNINSTALLING, UNINSTALLED}. * @throws SubsystemException If the current thread is interrupted while * this subsystem's state is in {INSTALLING}. */ public Collection<Resource> getResources(ResourceCategory...categories); /** * Returns the current state of this subsystem. * <p/> * @return The current state of this subsystem. */ public State getState(); /** * Returns the identifier of this subsystem. * <p/> * The identifier is a monotonically increasing, non-negative integer * automatically generated at installation time and guaranteed to be unique * within the framework. The identifier of the root subsystem is zero. * <p/> * @return The identifier of this subsystem. */ public long getSubsystemId(); /** * Returns the symbolic name of this subsystem. * <p/> * The subsystem symbolic name conforms to the same grammar rules as the * bundle symbolic name and is derived from one of the following, in order. * <ul> * <li>The value of the Subsystem-Content header, if specified. * </li> * <li>The subsystem URI if passed as the location along with the * content to the install method. * </li> * <li>Optionally generated in an implementation specific way. * </li> * </ul> * The combination of symbolic name and version is unique within a region. * The symbolic name of the root subsystem is {@code * org.osgi.service.subsystem.root}. * <p/> * @return The symbolic name of this subsystem. */ public String getSymbolicName(); /** * Returns the version of this subsystem. * <p/> * The subsystem version conforms to the same grammar rules as the bundle * version and is derived from one of the following, in order. * <ul> * <li>The value of the Subsystem-Version header, if specified. * </li> * <li>The subsystem URI if passed as the location along with the * content to the install method. * </li> * <li>Defaults to {@code 0.0.0}. * </li> * </ul> * The combination of symbolic name and version is unique within a region. * The version of the root subsystem is {@code 1.0.0}. * <p/> * @return The version of this subsystem. */ public Version getVersion(); /** * Installs a subsystem from the specified <code>location</code> identifier. * <p/> * This method performs the same function as calling {@link #install(String, * InputStream)} with the specified <code>location</code> identifier and a * <code>null</code> InputStream. * * @param location - The location identifier of the subsystem to install. * @return The installed subsystem. * @throws SubsystemException If the installation failed. * @throws SecurityException If the caller does not have the appropriate * SubsystemPermission[installed subsystem,LIFECYCLE], and the Java * Runtime Environment supports permissions. * @see #install(String, InputStream) */ public Subsystem install(String location) throws SubsystemException; /** * Installs a subsystem from the specified content. * <p/> * If the specified content is null, a new input stream must be created from * which to read the subsystem by interpreting, in an implementation * dependent manner, the specified location. * <p/> * The specified location will be used as an identifier of the subsystem. * Every installed subsystem is uniquely identified by its location, which * is typically in the form of a URI. * <p/> * A subsystem installation must be persistent. That is, an installed * subsystem must remain installed across Framework and VM restarts. * <p/> * The following table shows which actions are associated with each state. * An action of Wait means this method will block until a state transition * occurs, upon which the new state will be evaluated in order to * determine how to proceed. An action of Return means this method returns * immediately without taking any other action. * <p/> * <table border="1""> * <tr> * <th>State</td> * <th>Action</td> * </tr> * <tr align="center"> * <td>INSTALLING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>INSTALLED</td> * <td>Install</td> * </tr> * <tr align="center"> * <td>INSTALL_FAILED</td> * <td>IllegalStateException</td> * </tr> * <tr align="center"> * <td>RESOLVING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>RESOLVED</td> * <td>Install</td> * </tr> * <tr align="center"> * <td>STARTING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>ACTIVE</td> * <td>Install</td> * </tr> * <tr align="center"> * <td>STOPPING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>UNINSTALLING</td> * <td>IllegalStateException</td> * </tr> * <tr align="center"> * <td>UNINSTALLED</td> * <td>IllegalStateException</td> * </tr> * </table> * <p/> * All references to changing the state of this subsystem include both * changing the state of the subsystem object as well as the state property * of the subsystem service registration. * <p/> * All installation failure flows include the following. * <ul> * <li>A state change to INSTALL_FAILED. * </li> * <li>A SubsystemException being thrown, sometimes with a specified * cause. * </li> * <li>Unregistering the subsystem service. * </li> * <li>All resources installed as part of this operation are * uninstalled. * </li> * <li>Uninstalling the region context bundle. * </li> * </ul> * <p/> * Implementations should be sensitive to the potential for long running * operations and periodically check the current thread for interruption. An * interrupted thread should be treated as an installation failure with an * InterruptedException as the cause of the SubsystemException. * <p/> * The following steps are required to install a subsystem. * <ol> * <li>If an installed subsystem with the specified location identifier * already exists, return the installed subsystem. * </li> * <li>Read the specified content in order to determine the symbolic name, * version, and type of the installing subsystem. If an error occurs * while reading the content, an installation failure results. * </li> * <li>If an installed subsystem with the same symbolic name and version * already exists within this subsystem's region, complete the * installation with one of the following. * <ul> * <li>If the installing and installed subsystems' types are not equal, * an installation failure results. * </li> * <li>If the installing and installed subsystems' types are equal, and * the installed subsystem is already a constituent of this * subsystem, return the installed subsystem. * </li> * <li>If the installing and installed subsystems' types are equal, and * the installed subsystem is not already a constituent of this * subsystem, add the installed subsystem as a constituent of this * subsystem, increment the installed subsystem's reference count by * one, and return the installed subsystem. * </li> * </ul> * </li> * <li>Create a new subsystem based on the specified location and content. * </li> * <li>If the subsystem is scoped, install and activate a new region context * bundle. * </li> * <li>Change the state to INSTALLING and register a new subsystem service. * </li> * <li>Discover the subsystem's content resources. If any mandatory resource * is missing, an installation failure results. * </li> * <li>Discover the transitive resources required by the content resources. * If any transitive resource is missing, an installation failure results. * </li> * <li>Disable runtime resolution for the constituent and transitive resources * that are about to be installed. * </li> * <li>Install any transitive resources. A transitive resource becomes a * constituent of the subsystem with a provision policy of accept * transitive and that lies on the longest path between the root subsystem * and this subsystem, inclusively. If any transitive resource fails * to install, an installation failure results. * </li> * <li>Install the content resources. If any content resource fails to * install, an installation failure results. * </li> * <li>If the subsystem is scoped, set up the import sharing policy. * </li> * <li>Enable runtime resolution for the transitive and constituent resources * that got installed. * </li> * <li>Change the state of the subsystem to INSTALLED. * </li> * <li>Return the new subsystem. * </li> * </ol> * * @param location - The location identifier of the subsystem to be * installed. * @param content - The input stream from which this subsystem will be read * or null to indicate the input stream must be created from the * specified location identifier. The input stream will always be * closed when this method completes, even if an exception is thrown. * @return The installed subsystem. * @throws IllegalStateException If this subsystem's state is in * {INSTALL_FAILED, UNINSTALLING, UNINSTALLED}. * @throws SubsystemException If the installation failed. * @throws SecurityException If the caller does not have the appropriate * SubsystemPermission[installed subsystem,LIFECYCLE], and the runtime * supports permissions. * @see #install(String) */ public Subsystem install(String location, InputStream content) throws SubsystemException; /** * Starts this subsystem. * <p/> * The following table shows which actions are associated with each state. * An action of Wait means this method will block until a state transition * occurs, upon which the new state will be evaluated in order to * determine how to proceed. An action of Return means this method returns * immediately without taking any other action. * <p/> * <table border="1""> * <tr> * <th>State</td> * <th>Action</td> * </tr> * <tr align="center"> * <td>INSTALLING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>INSTALLED</td> * <td>Resolve, Start</td> * </tr> * <tr align="center"> * <td>INSTALL_FAILED</td> * <td>IllegalStateException</td> * </tr> * <tr align="center"> * <td>RESOLVING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>RESOLVED</td> * <td>Start</td> * </tr> * <tr align="center"> * <td>STARTING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>ACTIVE</td> * <td>Return</td> * </tr> * <tr align="center"> * <td>STOPPING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>UNINSTALLING</td> * <td>IllegalStateException</td> * </tr> * <tr align="center"> * <td>UNINSTALLED</td> * <td>IllegalStateException</td> * </tr> * </table> * <p/> * All references to changing the state of this subsystem include both * changing the state of the subsystem object as well as the state property * of the subsystem service registration. * <p/> * All start failure flows include the following. * <ul> * <li>A change to some specified state. * </li> * <li>A SubsystemException being thrown, sometimes with a specified * cause. * </li> * <li>All resources started as part of this operation are stopped. * </li> * </ul> * <p/> * Implementations should be sensitive to the potential for long running * operations and periodically check the current thread for interruption. An * interrupted thread should be treated as a start failure with an * InterruptedException as the cause of the SubsystemException. * <p/> * The following steps are required to start this subsystem. * <p/> * <ol> * <li>Set this subsystem's autostart setting to started. That is, a started * subsystem must be restarted across Framework and VM restarts. * </li> * <li>If this subsystem is in the INSTALLED state, change the state to * RESOLVING and proceed to step 3. Otherwise, proceed to step 5. * <li>Resolve the content resources. A resolution failure results in * a start failure with a state of INSTALLED. * </li> * <li>If the resolution succeeded, change the state to RESOLVED and * if the subsystem is a scoped subsystem enable the export sharing * policy. * </li> * <li>Change the state to STARTING. * </li> * <li>Start all transitive resources that require starting. Any * resource that fails to start results in a start failure with a * state of RESOLVED. * </li> * <li>Start all content resources that require starting according to * the specified start order, if any. Any resource that fails to * start results in a start failure with a state of RESOLVED. * <li>If none of the eligible resources failed to start, change the * state to ACTIVE. * </li> * </ol> * <p/> * @throws SubsystemException If this subsystem fails to start. * @throws IllegalStateException If this subsystem's state is in * {INSTALL_FAILED, UNINSTALLING, or UNINSTALLED}. * @throws SecurityException If the caller does not have the appropriate * SubsystemPermission[this,EXECUTE], and the runtime supports * permissions. */ public void start() throws SubsystemException; /** * Stops this subsystem. * <p/> * The following table shows which actions are associated with each state. * An action of Wait means this method will block until a state transition * occurs, upon which the new state will be evaluated in order to * determine how to proceed. An action of Return means this method returns * immediately without taking any other action. * <p/> * <table border="1""> * <tr> * <th>State</td> * <th>Action</td> * </tr> * <tr align="center"> * <td>INSTALLING</td> * <td>Return</td> * </tr> * <tr align="center"> * <td>INSTALLED</td> * <td>Return</td> * </tr> * <tr align="center"> * <td>INSTALL_FAILED</td> * <td>IllegalStateException</td> * </tr> * <tr align="center"> * <td>RESOLVING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>RESOLVED</td> * <td>If this subsystem is in the process of being<br/> * started, Wait. Otherwise, Return.</td> * </tr> * <tr align="center"> * <td>STARTING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>ACTIVE</td> * <td>Stop</td> * </tr> * <tr align="center"> * <td>STOPPING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>UNINSTALLING</td> * <td>IllegalStateException</td> * </tr> * <tr align="center"> * <td>UNINSTALLED</td> * <td>IllegalStateException</td> * </tr> * </table> * <p/> * All references to changing the state of this subsystem include both * changing the state of the subsystem object as well as the state property * of the subsystem service registration. * <p/> * All stop failure flows include the following. * <ul> * <li>Persistently stop all remaining eligible resources, and log any * subsequent errors. * </li> * <li>Change the state to RESOLVED. * </li> * <li>Throw a SubsystemException with the initial error as the cause. * </li> * </ul> * <p/> * Implementations should be sensitive to the potential for long running * operations and periodically check the current thread for interruption. An * interrupted thread should be treated as an installation failure with an * InterruptedException as the cause of the SubsystemException. * <p/> * The following steps are required to stop this subsystem. * <p/> * <ol> * <li>Change the state to STOPPING. * </li> * <li>Persistently stop all eligible resources except for the region * context bundle. If an error occurs while stopping any resource, * a stop failure results. * </li> * <li>Change the state to RESOLVED. * </li> * </ol> * @throws SubsystemException If this subsystem fails to start. * @throws IllegalStateException If this subsystem's state is in * {INSTALL_FAILED, UNINSTALLING, or UNINSTALLED}. * @throws SecurityException If the caller does not have the appropriate * SubsystemPermission[this,EXECUTE], and the runtime supports * permissions. */ public void stop() throws SubsystemException; /** * Uninstalls this subsystem. * <p/> * The following table shows which actions are associated with each state. * An action of Wait means this method will block until a state transition * occurs, upon which the new state will be evaluated in order to * determine how to proceed. An action of Return means this method returns * immediately without taking any other action. * <p/> * <table border="1""> * <tr> * <th>State</td> * <th>Action</td> * </tr> * <tr align="center"> * <td>INSTALLING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>INSTALLED</td> * <td>Uninstall</td> * </tr> * <tr align="center"> * <td>INSTALL_FAILED</td> * <td>IllegalStateException</td> * </tr> * <tr align="center"> * <td>RESOLVING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>RESOLVED</td> * <td>If this subsystem is in the process of being<br/> * started, Wait. Otherwise, Uninstall.</td> * </tr> * <tr align="center"> * <td>STARTING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>ACTIVE</td> * <td>Stop, Uninstall</td> * </tr> * <tr align="center"> * <td>STOPPING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>UNINSTALLING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>UNINSTALLED</td> * <td>Return</td> * </tr> * </table> * <p/> * All references to changing the state of this subsystem include both * changing the state of the subsystem object as well as the state property * of the subsystem service registration. * <p/> * Implementations should be sensitive to the potential for long running * operations and periodically check the current thread for interruption, in * which case a SubsystemException with an InterruptedException as the cause * should be thrown. If an interruption occurs while waiting, this method * should terminate immediately. Once the transition to the UNINSTALLING * state has occurred, however, this method must not terminate due to an * interruption until the uninstall process has completed. * <p/> * The following steps are required to uninstall this subsystem. * <p/> * <ol> * <li>Change the state to UNINSTALLING. * </li> * <li>Uninstall each content resource. * </li> * <li>Uninstall each transitive resource. * </li> * <li>Change the state to UNINSTALLED. * </li> * <li>Unregister the subsystem service. * </li> * <li>Uninstall the region context bundle. * </li> * </ol> * With regard to error handling, once this subsystem has transitioned to * the UNINSTALLING state, every part of each of the above steps must be * attempted. Errors subsequent to the first should be logged. Once the * uninstall process has completed, a SubsystemException must be thrown with * the first error as the cause. * <p/> * @throws SubsystemException If this subsystem fails to uninstall without * error. * @throws IllegalStateException If this subsystem's state is in * {INSTALL_FAILED}. * @throws SecurityException If the caller does not have the appropriate * SubsystemPermission[this,LIFECYCLE] and the Java Runtime * Environment supports permissions. */ public void uninstall() throws SubsystemException; }
org.osgi.service.subsystem/src/org/osgi/service/subsystem/Subsystem.java
/* * Copyright (c) OSGi Alliance (2011). All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.osgi.service.subsystem; import java.io.InputStream; import java.util.Collection; import java.util.Locale; import java.util.Map; import org.osgi.framework.BundleContext; import org.osgi.framework.Version; import org.osgi.framework.resource.Resource; /** * A subsystem is a collection of resources constituting a logical, possibly * isolated, unit of functionality. There are three types of standard resources. * <ul> * <li>Bundle - A bundle that is not a fragment. * </li> * <li>Fragment - A fragment bundle. * </li> * <li>Subsystem - A subsystem defined by this specification. * </li> * </ul> * A scoped subsystem is isolated according to its share policy, which may be * implicit or explicit. An unscoped subsystem is not isolated and, therefore, * has no share policy. There are three standard types of subsystems. * <ul> * <li>Application - An implicitly scoped subsystem. Nothing is exported. * Imports are computed based on any requirements not satisfied by the * constituents. * </li> * <li>Composite - An explicitly scoped subsystem. The share policy is * defined by metadata within the subsystem archive. * </li> * <li>Feature - An unscoped subsystem. * </li> * </ul> * A subsystem may have children and, unless it's the root, must have at least * one parent. A subsystem becomes a child of the installing subsystem. An * unscoped subsystem may have more than one parent if, for example, it is a * constituent of more than one subsystem within the same region. A scoped * subsystem always has only one parent. * <p/> * A subsystem has several unique identifiers that may or may not equate to the * same instance. * <ul> * <li>Location - An identifier specified by the client as part of * installation. It is guaranteed to be unique within the same * framework. Subsystems with the same location are the same instance. * </li> * <li>ID - An identifier generated by the implementation as part of * installation. It is guaranteed to be unique within the same * framework. Subsystems with the same ID are the same instance. * </li> * <li>Symbolic Name/Version - The combination of symbolic name and version * uniquely identifies a subsystem but not necessarily the same * instance. * </li> * </ul> * The resolution of a subsystem may be mandatory or optional for a given * resource. Optional resources do not prevent the subsystem from installing, * although they may prevent it from resolving. * <p/> * A subsystem may either accept or reject transitive resources as part of its * provision policy. A transitive resource provides a capability (called a * transitive dependency) for a content resource's requirement that was not * satisfied by any content resource in the subsystem. A transitive resource * becomes a constituent of the subsystem with a provision policy of accept * transitive and that lies on the longest path between the subsystem and the * root subsystem, inclusively. * <p/> * Conceptually, a subsystem may be thought of as existing in a region isolated * by a share policy. Each region has one and only one scoped subsystem, which * dictates the sharing policy. The region may, however, have many unscoped * subsystems. It is therefore possible to have shared constituents across * multiple subsystems within a region. * <p/> * The subsystem graph may be though of as is an acyclic digraph with the root * subsystem as the sole source vertex. The edges have the child as the head and * parent as the tail. * <p/> * A subsystem archive is a ZIP file having an SSA extension and containing * metadata describing the subsystem. The form of the metadata may be a * subsystem or deployment manifest, as well as any resource files constituting * the subsystem. The manifests are optional and will be computed if not * present. The subsystem manifest headers may be retrieved using the default or * a specified locale. * <p/> * A subsystem is installed using one of the two install methods. Because a * subsystem must be used to install other subsystems, a root subsystem is * provided as a starting point. A subsystem may be obtained by invoking one of * the install methods or through the service registry. Every installed * subsystem has a corresponding service registration. A bundle requesting a * subsystem service will receive the subsystem of which it is a constituent. * <p/> * The root subsystem has the following characteristics. * <ul> * <li>An ID of 0.</li> * <li>A symbolic name of org.osgi.service.subsystem.root.</li> * <li>A version of 1.0.0.</li> * <li>Has no parent.</li> * <li>All existing bundles, including the system and subsystems implementation * bundles, are constituents.</li> * <li>A scoped subsystem with a provision policy of accept transitive.</li> * </ul> * A subsystem service has the following properties. * <ul> * <li>subsystem.id</li> * <li>subsystem.symbolic.name</li> * <li>subsystem.version</li> * <li>subsystem.type</li> * <li>subsystem.state</li> * </ul> * <p/> * Each subsystem has an associated bundle providing the region context within * which its constituents operate. All subsystems within the same region have * the same context. This context may be used, for example, to monitor framework * and service events affecting the constituents of subsystems within the * region. A region context bundle has the following characteristics. * <ul> * <li>A symbolic name of * org.osgi.service.subsystem.region.context.&lt;subsystem id&gt;. * </li> * <li>A version of 1.0.0. * </li> * <li>A location string of &lt;subsystem bundle location&gt;/&lt;subsystem * bundle id&gt; * </li> * </ul> * * @ThreadSafe * @noimplement */ public interface Subsystem { /** * Identifies the category a resource falls under for the purpose of * filtering the results when {@link Subsystem#getResources( * ResourceCategory...) retrieving} resources associated with this * subsystem. * <p/> * Resource categories may be compatible or incompatible. A resource may * be in more than one compatible category but never in more than one * incompatible category. In the following table, incompatible categories * are marked with an "X". * <p/> * <table border="1"> * <tr align="center"> * <th>&nbsp;</th> * <th>CONTENT</th> * <th>TRANSITIVE_INTRINSIC</th> * <th>TRANSITIVE_EXTRINSIC</th> * <th>SHARED</th> * </tr> * <tr align="center"> * <th>CONTENT</th> * <td>&nbsp;</td> * <td>X</td> * <td>X</td> * <td>&nbsp;</td> * </tr> * <tr align="center"> * <th>TRANSITIVE_INTRINSIC</th> * <td>X</td> * <td>&nbsp;</td> * <td>X</td> * <td>X</td> * </tr> * <tr align="center"> * <th>TRANSITIVE_EXTRINSIC</th> * <td>X</td> * <td>X</td> * <td>&nbsp;</td> * <td>X</td> * </tr> * <tr align="center"> * <th>SHARED</th> * <td>&nbsp;</td> * <td>X</td> * <td>X</td> * <td>&nbsp;</td> * </tr> * </table> */ public static enum ResourceCategory { /** * A resource contained by this subsystem that was specified in the * Subsystem-Content manifest header or included in the subsystem * archive when the Subsystem-Content header was omitted. */ CONTENT, /** * A transitive resource provisioned on behalf of this subsystem. It may * or may not be contained by this subsystem. */ TRANSITIVE_INTRINSIC, /** * A transitive resource contained by this subsystem but provisioned on * behalf of another subsystem. Only subsystems with a provision policy * of accept transitive may contain this type of resource. */ TRANSITIVE_EXTRINSIC, /** * A content resource contained by this subsystem and at least one other * subsystem. Equivalently, a content resource contained by this * subsystem whose reference count is greater than one. */ SHARED } /** * The states of a subsystem in the framework. These states match those of * a Bundle and are derived using the same rules as CompositeBundles. As * such, they are more a reflection of what content bundles are permitted * to do rather than an aggregation of the content bundle states. */ public static enum State { /** * A subsystem is in the INSTALLING state when it is initially created. */ INSTALLING, /** * A subsystem is in the INSTALLED state when all resources are * successfully installed. */ INSTALLED, /** * A subsystem is in the INSTALL_FAILED state when an unrecoverable * error occurred during installation. */ INSTALL_FAILED, /** *  A subsystem in the RESOLVING is allowed to have its content bundles * resolved. */ RESOLVING, /** *  A subsystem is in the RESOLVED state when all resources are * resolved. */ RESOLVED, /** * A subsystem is in the STARTING state when all its content bundles * are enabled for activation. */ STARTING, /** * A subsystem is in the ACTIVE state when it has reached the beginning * start-level (for starting it's contents), and all its persistently * started content bundles that are resolved and have had their * start-levels met have completed, or failed, their activator start * method. */ ACTIVE, /** *  A subsystem in the STOPPING state is in the process of taking its * its active start level to zero, stopping all the content bundles. */ STOPPING, /** * A subsystem in the UNINSTALLING state is in the process of * uninstalling its constituent resources. */ UNINSTALLING, /** * A subsystem is in the UNINSTALLED state when all its content bundles * and uninstalled and its system bundle context is invalidated. */ UNINSTALLED } /** * Returns the bundle context of the region within which this subsystem * resides. * <p/> * The bundle context offers the same perspective of any resource contained * by a subsystem within the region. It may be used, for example, to monitor * events internal to the region as well as external events visible to the * region. All subsystems within the same region have the same bundle * context. If this subsystem is in a state where the bundle context would * be invalid, null is returned. * * @return The bundle context of the region within which this subsystem * resides or null if this subsystem's state is in {INSTALL_FAILED, * UNINSTALLED}. * @throws SecurityException If the caller does not have the appropriate * SubsystemPermission[this,CONTEXT], and the runtime supports * permissions. */ public BundleContext getBundleContext(); /** * Returns the child subsystems of this subsystem. * <p/> * The returned collection is an immutable snapshot of all subsystems that * are installed in this subsystem. The collection will be empty if no * subsystems are installed in this subsystem. * * @return The child subsystems of this subsystem. * @throws IllegalStateException If this subsystem's state is in * {INSTALL_FAILED, UNINSTALLED}. */ public Collection<Subsystem> getChildren(); /** * Returns the headers from this subsystem's manifest. * <p/> * The returned map is unmodifiable and contains headers from the main * section of the manifest only. Each map key is a header name, and each map * value is the corresponding header value. Because header names are case- * insensitive, the methods of the map must treat them in a case-insensitive * manner. If the manifest was omitted or contained no main section, the map * will be empty. * <p/> * The header values are translated according to the specified locale. If * the specified locale is null or not supported, the raw values are * returned. If the translation for a particular header is not found, the * raw value is returned. * * @param locale The locale for which translations are desired. * @return The headers from this subsystem's manifest. * @throws SecurityException If the caller does not have the appropriate * SubsystemPermission[this,METADATA], and the runtime supports * permissions. */ public Map<String, String> getHeaders(Locale locale); /** * Returns the location identifier of this subsystem. * <p/> * The location identifier is the {@code location} that was passed to {@link * #install(String) one} of the {@link #install(String, InputStream) two} * install methods of the {@link #getParents() parent} subsystem. * <p/> * @return The location identifier of this subsystem. * @throws SecurityException If the caller does not have the appropriate * SubsystemPermission[this,METADATA], and the runtime supports * permissions. */ public String getLocation(); /** * Returns the parent subsystems of this subsystem. * <p/> * The returned collection is an unmodifiable snapshot of all parent * subsystems of this subsystem at the time this method was invoked. If this * is the root subsystem, an empty collection is returned. Otherwise, the * collection will contain at least one parent. More than one parent will be * present if this subsystem is a constituent of multiple subsystems within * the same region. * <p/> * A subsystem becomes a parent of this subsystem in one of two ways. * <ol> * <li>This subsystem is installed into the parent subsystem by * invoking one of the parent subsystem's install methods. * </li> * <li>This subsystem is nested within the parent subsystem. A * subsystem is nested within another subsystem when specified as * part of the Subsystem-Content header of the other subsystem's * manifest or included in the other subsystem's archive when the * Subsystem-Content header was omitted. * </li> * </ol> * <p/> * @return The parent subsystems of this subsystem or an empty collection if * this is the root subsystem. * @throws IllegalStateException If this subsystem's state is in * {INSTALL_FAILED, UNINSTALLING, UNINSTALLED}. */ public Collection<Subsystem> getParents(); /** * Returns the resources associated with this subsystem according to the * specified categories. * <p/> * Resources are {@link ResourceCategory categorized} based on their * relationship with the subsystem. The returned collection is unmodifiable * and represents a snapshot of all resources associated with this subsystem * that fell under one or more of the specified categories. Resources that * fall under more than one specified category will not appear more than * once in the returned collection. If the specified categories parameter is * null or an empty array, resources from all categories are returned. * <p/> * This method will block if this subsystem's state is in {INSTALLING} until * a state transition occurs. Implementations should be sensitive to the * potential for long running operations and periodically check the current * thread for interruption. An interrupted thread should result in a * SubsystemException being thrown with an InterruptedException as the * cause. * <p/> * @param categories The categories for which resources are desired or null * or an empty array for resources from all categories. * @return The resources associated with this subsystem according to the * specified categories. * @throws IllegalStateException If this subsystem's state is in * {INSTALL_FAILED, UNINSTALLING, UNINSTALLED}. * @throws SubsystemException If the current thread is interrupted while * this subsystem's state is in {INSTALLING}. */ public Collection<Resource> getResources(ResourceCategory...categories); /** * Returns the current state of this subsystem. * <p/> * @return The current state of this subsystem. */ public State getState(); /** * Returns the identifier of this subsystem. * <p/> * The identifier is a monotonically increasing, non-negative integer * automatically generated at installation time and guaranteed to be unique * within the framework. The identifier of the root subsystem is zero. * <p/> * @return The identifier of this subsystem. */ public long getSubsystemId(); /** * Returns the symbolic name of this subsystem. * <p/> * The subsystem symbolic name conforms to the same grammar rules as the * bundle symbolic name and is derived from one of the following, in order. * <ul> * <li>The value of the Subsystem-Content header, if specified. * </li> * <li>The subsystem URI if passed as the location along with the * content to the install method. * </li> * <li>Optionally generated in an implementation specific way. * </li> * </ul> * The combination of symbolic name and version is unique within a region. * The symbolic name of the root subsystem is {@code * org.osgi.service.subsystem.root}. * <p/> * @return The symbolic name of this subsystem. */ public String getSymbolicName(); /** * Returns the version of this subsystem. * <p/> * The subsystem version conforms to the same grammar rules as the bundle * version and is derived from one of the following, in order. * <ul> * <li>The value of the Subsystem-Version header, if specified. * </li> * <li>The subsystem URI if passed as the location along with the * content to the install method. * </li> * <li>Defaults to {@code 0.0.0}. * </li> * </ul> * The combination of symbolic name and version is unique within a region. * The version of the root subsystem is {@code 1.0.0}. * <p/> * @return The version of this subsystem. */ public Version getVersion(); /** * Installs a subsystem from the specified <code>location</code> identifier. * <p/> * This method performs the same function as calling {@link #install(String, * InputStream)} with the specified <code>location</code> identifier and a * <code>null</code> InputStream. * * @param location - The location identifier of the subsystem to install. * @return The installed subsystem. * @throws SubsystemException If the installation failed. * @throws SecurityException If the caller does not have the appropriate * SubsystemPermission[installed subsystem,LIFECYCLE], and the Java * Runtime Environment supports permissions. * @see #install(String, InputStream) */ public Subsystem install(String location) throws SubsystemException; /** * Installs a subsystem from the specified content. * <p/> * If the specified content is null, a new input stream must be created from * which to read the subsystem by interpreting, in an implementation * dependent manner, the specified location. * <p/> * The specified location will be used as an identifier of the subsystem. * Every installed subsystem is uniquely identified by its location, which * is typically in the form of a URI. * <p/> * A subsystem installation must be persistent. That is, an installed * subsystem must remain installed across Framework and VM restarts. * <p/> * The following table shows which actions are associated with each state. * An action of Wait means this method will block until a state transition * occurs, upon which the new state will be evaluated in order to * determine how to proceed. An action of Return means this method returns * immediately without taking any other action. * <p/> * <table border="1""> * <tr> * <th>State</td> * <th>Action</td> * </tr> * <tr align="center"> * <td>INSTALLING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>INSTALLED</td> * <td>Install</td> * </tr> * <tr align="center"> * <td>INSTALL_FAILED</td> * <td>IllegalStateException</td> * </tr> * <tr align="center"> * <td>RESOLVING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>RESOLVED</td> * <td>Install</td> * </tr> * <tr align="center"> * <td>STARTING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>ACTIVE</td> * <td>Install</td> * </tr> * <tr align="center"> * <td>STOPPING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>UNINSTALLING</td> * <td>IllegalStateException</td> * </tr> * <tr align="center"> * <td>UNINSTALLED</td> * <td>IllegalStateException</td> * </tr> * </table> * <p/> * All references to changing the state of this subsystem include both * changing the state of the subsystem object as well as the state property * of the subsystem service registration. * <p/> * All installation failure flows include the following. * <ul> * <li>A state change to INSTALL_FAILED. * </li> * <li>A SubsystemException being thrown, sometimes with a specified * cause. * </li> * <li>Unregistering the subsystem service. * </li> * <li>All resources installed as part of this operation are * uninstalled. * </li> * <li>Uninstalling the region context bundle. * </li> * </ul> * <p/> * Implementations should be sensitive to the potential for long running * operations and periodically check the current thread for interruption. An * interrupted thread should be treated as an installation failure with an * InterruptedException as the cause of the SubsystemException. * <p/> * The following steps are required to install a subsystem. * <ol> * <li>If an installed subsystem with the specified location identifier * already exists, return the installed subsystem. * </li> * <li>Read the specified content in order to determine the symbolic name, * version, and type of the installing subsystem. If an error occurs * while reading the content, an installation failure results. * </li> * <li>If an installed subsystem with the same symbolic name and version * already exists within this subsystem's region, complete the * installation with one of the following. * <ul> * <li>If the installing and installed subsystems' types are not equal, * an installation failure results. * </li> * <li>If the installing and installed subsystems' types are equal, and * the installed subsystem is already a constituent of this * subsystem, return the installed subsystem. * </li> * <li>If the installing and installed subsystems' types are equal, and * the installed subsystem is not already a constituent of this * subsystem, add the installed subsystem as a constituent of this * subsystem, increment the installed subsystem's reference count by * one, and return the installed subsystem. * </li> * </ul> * </li> * <li>Create a new subsystem based on the specified location and content. * </li> * <li>If the subsystem is scoped, install and activate a new region context * bundle. * </li> * <li>Change the state to INSTALLING and register a new subsystem service. * </li> * <li>Discover the subsystem's content resources. If any mandatory resource * is missing, an installation failure results. * </li> * <li>Discover the transitive resources required by the content resources. * If any transitive resource is missing, an installation failure results. * </li> * <li>Disable runtime resolution for the constituent and transitive resources * that are about to be installed. * </li> * <li>Install any transitive resources. A transitive resource becomes a * constituent of the subsystem with a provision policy of accept * transitive and that lies on the longest path between the root subsystem * and this subsystem, inclusively. If any transitive resource fails * to install, an installation failure results. * </li> * <li>Install the content resources. If any content resource fails to * install, an installation failure results. * </li> * <li>If the subsystem is scoped, set up the import sharing policy. * </li> * <li>Enable runtime resolution for the transitive and constituent resources * that got installed. * </li> * <li>Change the state of the subsystem to INSTALLED. * </li> * <li>Return the new subsystem. * </li> * </ol> * * @param location - The location identifier of the subsystem to be * installed. * @param content - The input stream from which this subsystem will be read * or null to indicate the input stream must be created from the * specified location identifier. The input stream will always be * closed when this method completes, even if an exception is thrown. * @return The installed subsystem. * @throws IllegalStateException If this subsystem's state is in * {INSTALL_FAILED, UNINSTALLING, UNINSTALLED}. * @throws SubsystemException If the installation failed. * @throws SecurityException If the caller does not have the appropriate * SubsystemPermission[installed subsystem,LIFECYCLE], and the runtime * supports permissions. * @see #install(String) */ public Subsystem install(String location, InputStream content) throws SubsystemException; /** * Starts this subsystem. * <p/> * The following table shows which actions are associated with each state. * An action of Wait means this method will block until a state transition * occurs, upon which the new state will be evaluated in order to * determine how to proceed. An action of Return means this method returns * immediately without taking any other action. * <p/> * <table border="1""> * <tr> * <th>State</td> * <th>Action</td> * </tr> * <tr align="center"> * <td>INSTALLING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>INSTALLED</td> * <td>Resolve, Start</td> * </tr> * <tr align="center"> * <td>INSTALL_FAILED</td> * <td>IllegalStateException</td> * </tr> * <tr align="center"> * <td>RESOLVING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>RESOLVED</td> * <td>Start</td> * </tr> * <tr align="center"> * <td>STARTING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>ACTIVE</td> * <td>Return</td> * </tr> * <tr align="center"> * <td>STOPPING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>UNINSTALLING</td> * <td>IllegalStateException</td> * </tr> * <tr align="center"> * <td>UNINSTALLED</td> * <td>IllegalStateException</td> * </tr> * </table> * <p/> * All references to changing the state of this subsystem include both * changing the state of the subsystem object as well as the state property * of the subsystem service registration. * <p/> * All start failure flows include the following. * <ul> * <li>A change to some specified state. * </li> * <li>A SubsystemException being thrown, sometimes with a specified * cause. * </li> * <li>All resources started as part of this operation are stopped. * </li> * </ul> * <p/> * Implementations should be sensitive to the potential for long running * operations and periodically check the current thread for interruption. An * interrupted thread should be treated as a start failure with an * InterruptedException as the cause of the SubsystemException. * <p/> * The following steps are required to start this subsystem. * <p/> * <ol> * <li>Set this subsystem's autostart setting to started. That is, a started * subsystem must be restarted across Framework and VM restarts. * </li> * <li>If this subsystem is in the INSTALLED state, change the state to * RESOLVING and proceed to step 3. Otherwise, proceed to step 5. * <li>Resolve the content resources. A resolution failure results in * a start failure with a state of INSTALLED. * </li> * <li>If the resolution succeeded, change the state to RESOLVED and * if the subsystem is a scoped subsystem enable the export sharing * policy. * </li> * <li>Change the state to STARTING. * </li> * <li>Start all transitive resources that require starting. Any * resource that fails to start results in a start failure with a * state of RESOLVED. * </li> * <li>Start all content resources that require starting according to * the specified start order, if any. Any resource that fails to * start results in a start failure with a state of RESOLVED. * <li>If none of the eligible resources failed to start, change the * state to ACTIVE. * </li> * </ol> * <p/> * @throws SubsystemException If this subsystem fails to start. * @throws IllegalStateException If this subsystem's state is in * {INSTALL_FAILED, UNINSTALLING, or UNINSTALLED}. * @throws SecurityException If the caller does not have the appropriate * SubsystemPermission[this,EXECUTE], and the runtime supports * permissions. */ public void start() throws SubsystemException; /** * Stops this subsystem. * <p/> * The following table shows which actions are associated with each state. * An action of Wait means this method will block until a state transition * occurs, upon which the new state will be evaluated in order to * determine how to proceed. An action of Return means this method returns * immediately without taking any other action. * <p/> * <table border="1""> * <tr> * <th>State</td> * <th>Action</td> * </tr> * <tr align="center"> * <td>INSTALLING</td> * <td>Return</td> * </tr> * <tr align="center"> * <td>INSTALLED</td> * <td>Return</td> * </tr> * <tr align="center"> * <td>INSTALL_FAILED</td> * <td>IllegalStateException</td> * </tr> * <tr align="center"> * <td>RESOLVING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>RESOLVED</td> * <td>If this subsystem is in the process of being<br/> * started, Wait. Otherwise, Return.</td> * </tr> * <tr align="center"> * <td>STARTING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>ACTIVE</td> * <td>Stop</td> * </tr> * <tr align="center"> * <td>STOPPING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>UNINSTALLING</td> * <td>IllegalStateException</td> * </tr> * <tr align="center"> * <td>UNINSTALLED</td> * <td>IllegalStateException</td> * </tr> * </table> * <p/> * All references to changing the state of this subsystem include both * changing the state of the subsystem object as well as the state property * of the subsystem service registration. * <p/> * All stop failure flows include the following. * <ul> * <li>Persistently stop all remaining eligible resources, and log any * subsequent errors. * </li> * <li>Change the state to RESOLVED. * </li> * <li>Throw a SubsystemException with the initial error as the cause. * </li> * </ul> * <p/> * Implementations should be sensitive to the potential for long running * operations and periodically check the current thread for interruption. An * interrupted thread should be treated as an installation failure with an * InterruptedException as the cause of the SubsystemException. * <p/> * The following steps are required to stop this subsystem. * <p/> * <ol> * <li>Change the state to STOPPING. * </li> * <li>Persistently stop all eligible resources except for the region * context bundle. If an error occurs while stopping any resource, * a stop failure results. * </li> * <li>Change the state to RESOLVED. * </li> * </ol> * @throws SubsystemException If this subsystem fails to start. * @throws IllegalStateException If this subsystem's state is in * {INSTALL_FAILED, UNINSTALLING, or UNINSTALLED}. * @throws SecurityException If the caller does not have the appropriate * SubsystemPermission[this,EXECUTE], and the runtime supports * permissions. */ public void stop() throws SubsystemException; /** * Uninstalls this subsystem. * <p/> * The following table shows which actions are associated with each state. * An action of Wait means this method will block until a state transition * occurs, upon which the new state will be evaluated in order to * determine how to proceed. An action of Return means this method returns * immediately without taking any other action. * <p/> * <table border="1""> * <tr> * <th>State</td> * <th>Action</td> * </tr> * <tr align="center"> * <td>INSTALLING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>INSTALLED</td> * <td>Uninstall</td> * </tr> * <tr align="center"> * <td>INSTALL_FAILED</td> * <td>IllegalStateException</td> * </tr> * <tr align="center"> * <td>RESOLVING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>RESOLVED</td> * <td>If this subsystem is in the process of being<br/> * started, Wait. Otherwise, Uninstall.</td> * </tr> * <tr align="center"> * <td>STARTING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>ACTIVE</td> * <td>Stop, Uninstall</td> * </tr> * <tr align="center"> * <td>STOPPING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>UNINSTALLING</td> * <td>Wait</td> * </tr> * <tr align="center"> * <td>UNINSTALLED</td> * <td>Return</td> * </tr> * </table> * <p/> * All references to changing the state of this subsystem include both * changing the state of the subsystem object as well as the state property * of the subsystem service registration. * <p/> * Implementations should be sensitive to the potential for long running * operations and periodically check the current thread for interruption, in * which case a SubsystemException with an InterruptedException as the cause * should be thrown. If an interruption occurs while waiting, this method * should terminate immediately. Once the transition to the UNINSTALLING * state has occurred, however, this method must not terminate due to an * interruption until the uninstall process has completed. * <p/> * The following steps are required to uninstall this subsystem. * <p/> * <ol> * <li>Change the state to UNINSTALLING. * </li> * <li>Uninstall each content resource. * </li> * <li>Uninstall each transitive resource. * </li> * <li>Change the state to UNINSTALLED. * </li> * <li>Unregister the subsystem service. * </li> * <li>Uninstall the region context bundle. * </li> * </ol> * With regard to error handling, once this subsystem has transitioned to * the UNINSTALLING state, every part of each of the above steps must be * attempted. Errors subsequent to the first should be logged. Once the * uninstall process has completed, a SubsystemException must be thrown with * the first error as the cause. * <p/> * @throws SubsystemException If this subsystem fails to uninstall without * error. * @throws IllegalStateException If this subsystem's state is in * {INSTALL_FAILED}. * @throws SecurityException If the caller does not have the appropriate * SubsystemPermission[this,LIFECYCLE] and the Java Runtime * Environment supports permissions. */ public void uninstall() throws SubsystemException; }
getLocation javadoc general cleanup.
org.osgi.service.subsystem/src/org/osgi/service/subsystem/Subsystem.java
getLocation javadoc general cleanup.
Java
apache-2.0
97cf2e9677fb2e7f5703fdafb858ff1c7e91ee23
0
jxblum/spring-boot,shangyi0102/spring-boot,bbrouwer/spring-boot,xiaoleiPENG/my-project,jvz/spring-boot,bjornlindstrom/spring-boot,kamilszymanski/spring-boot,bclozel/spring-boot,sebastiankirsch/spring-boot,ihoneymon/spring-boot,felipeg48/spring-boot,mdeinum/spring-boot,tiarebalbi/spring-boot,kdvolder/spring-boot,jvz/spring-boot,mbogoevici/spring-boot,jayarampradhan/spring-boot,michael-simons/spring-boot,mdeinum/spring-boot,philwebb/spring-boot,donhuvy/spring-boot,yhj630520/spring-boot,NetoDevel/spring-boot,spring-projects/spring-boot,royclarkson/spring-boot,bijukunjummen/spring-boot,ilayaperumalg/spring-boot,rweisleder/spring-boot,olivergierke/spring-boot,bclozel/spring-boot,shakuzen/spring-boot,ptahchiev/spring-boot,kdvolder/spring-boot,scottfrederick/spring-boot,sebastiankirsch/spring-boot,ilayaperumalg/spring-boot,mosoft521/spring-boot,ollie314/spring-boot,michael-simons/spring-boot,Nowheresly/spring-boot,lucassaldanha/spring-boot,joshiste/spring-boot,javyzheng/spring-boot,javyzheng/spring-boot,scottfrederick/spring-boot,rweisleder/spring-boot,aahlenst/spring-boot,tiarebalbi/spring-boot,jvz/spring-boot,ptahchiev/spring-boot,wilkinsona/spring-boot,olivergierke/spring-boot,kdvolder/spring-boot,Nowheresly/spring-boot,tiarebalbi/spring-boot,aahlenst/spring-boot,jayarampradhan/spring-boot,eddumelendez/spring-boot,kdvolder/spring-boot,sebastiankirsch/spring-boot,isopov/spring-boot,ihoneymon/spring-boot,jvz/spring-boot,joshthornhill/spring-boot,lburgazzoli/spring-boot,lburgazzoli/spring-boot,bclozel/spring-boot,jayarampradhan/spring-boot,mbenson/spring-boot,deki/spring-boot,vakninr/spring-boot,philwebb/spring-boot,ihoneymon/spring-boot,joshiste/spring-boot,lexandro/spring-boot,candrews/spring-boot,wilkinsona/spring-boot,isopov/spring-boot,vpavic/spring-boot,tiarebalbi/spring-boot,lexandro/spring-boot,ptahchiev/spring-boot,shangyi0102/spring-boot,linead/spring-boot,drumonii/spring-boot,sebastiankirsch/spring-boot,joshiste/spring-boot,donhuvy/spring-boot,jxblum/spring-boot,tsachev/spring-boot,donhuvy/spring-boot,pvorb/spring-boot,ilayaperumalg/spring-boot,mosoft521/spring-boot,bbrouwer/spring-boot,bjornlindstrom/spring-boot,jxblum/spring-boot,mosoft521/spring-boot,kamilszymanski/spring-boot,lexandro/spring-boot,shakuzen/spring-boot,kdvolder/spring-boot,vpavic/spring-boot,royclarkson/spring-boot,mbenson/spring-boot,ollie314/spring-boot,michael-simons/spring-boot,yhj630520/spring-boot,wilkinsona/spring-boot,yangdd1205/spring-boot,isopov/spring-boot,lexandro/spring-boot,chrylis/spring-boot,jvz/spring-boot,chrylis/spring-boot,scottfrederick/spring-boot,htynkn/spring-boot,ihoneymon/spring-boot,htynkn/spring-boot,mdeinum/spring-boot,jxblum/spring-boot,lucassaldanha/spring-boot,rweisleder/spring-boot,drumonii/spring-boot,dreis2211/spring-boot,ptahchiev/spring-boot,jbovet/spring-boot,wilkinsona/spring-boot,habuma/spring-boot,donhuvy/spring-boot,mbenson/spring-boot,joshthornhill/spring-boot,dreis2211/spring-boot,ptahchiev/spring-boot,eddumelendez/spring-boot,royclarkson/spring-boot,felipeg48/spring-boot,tiarebalbi/spring-boot,yhj630520/spring-boot,yangdd1205/spring-boot,olivergierke/spring-boot,yhj630520/spring-boot,hello2009chen/spring-boot,candrews/spring-boot,mosoft521/spring-boot,vpavic/spring-boot,eddumelendez/spring-boot,mbogoevici/spring-boot,hello2009chen/spring-boot,dreis2211/spring-boot,lucassaldanha/spring-boot,jbovet/spring-boot,bijukunjummen/spring-boot,ollie314/spring-boot,bijukunjummen/spring-boot,felipeg48/spring-boot,mdeinum/spring-boot,rweisleder/spring-boot,candrews/spring-boot,jayarampradhan/spring-boot,spring-projects/spring-boot,Nowheresly/spring-boot,Buzzardo/spring-boot,yangdd1205/spring-boot,DeezCashews/spring-boot,yhj630520/spring-boot,mdeinum/spring-boot,royclarkson/spring-boot,jbovet/spring-boot,DeezCashews/spring-boot,olivergierke/spring-boot,chrylis/spring-boot,mbogoevici/spring-boot,Buzzardo/spring-boot,chrylis/spring-boot,drumonii/spring-boot,drumonii/spring-boot,shakuzen/spring-boot,xiaoleiPENG/my-project,bijukunjummen/spring-boot,habuma/spring-boot,hello2009chen/spring-boot,felipeg48/spring-boot,scottfrederick/spring-boot,joshthornhill/spring-boot,habuma/spring-boot,jayarampradhan/spring-boot,hello2009chen/spring-boot,spring-projects/spring-boot,javyzheng/spring-boot,eddumelendez/spring-boot,kamilszymanski/spring-boot,royclarkson/spring-boot,Buzzardo/spring-boot,tsachev/spring-boot,lburgazzoli/spring-boot,NetoDevel/spring-boot,wilkinsona/spring-boot,zhanhb/spring-boot,mbenson/spring-boot,kamilszymanski/spring-boot,scottfrederick/spring-boot,linead/spring-boot,chrylis/spring-boot,philwebb/spring-boot,kdvolder/spring-boot,Nowheresly/spring-boot,felipeg48/spring-boot,Buzzardo/spring-boot,NetoDevel/spring-boot,candrews/spring-boot,zhanhb/spring-boot,lburgazzoli/spring-boot,vpavic/spring-boot,ilayaperumalg/spring-boot,donhuvy/spring-boot,bjornlindstrom/spring-boot,aahlenst/spring-boot,spring-projects/spring-boot,shakuzen/spring-boot,chrylis/spring-boot,jbovet/spring-boot,htynkn/spring-boot,bbrouwer/spring-boot,shangyi0102/spring-boot,habuma/spring-boot,vakninr/spring-boot,mosoft521/spring-boot,lburgazzoli/spring-boot,bijukunjummen/spring-boot,xiaoleiPENG/my-project,pvorb/spring-boot,aahlenst/spring-boot,isopov/spring-boot,ilayaperumalg/spring-boot,wilkinsona/spring-boot,tiarebalbi/spring-boot,ollie314/spring-boot,linead/spring-boot,linead/spring-boot,spring-projects/spring-boot,vakninr/spring-boot,DeezCashews/spring-boot,vpavic/spring-boot,jxblum/spring-boot,deki/spring-boot,deki/spring-boot,jbovet/spring-boot,xiaoleiPENG/my-project,joshiste/spring-boot,habuma/spring-boot,drumonii/spring-boot,vakninr/spring-boot,zhanhb/spring-boot,ollie314/spring-boot,felipeg48/spring-boot,zhanhb/spring-boot,shakuzen/spring-boot,pvorb/spring-boot,ilayaperumalg/spring-boot,htynkn/spring-boot,ihoneymon/spring-boot,bclozel/spring-boot,htynkn/spring-boot,shakuzen/spring-boot,rweisleder/spring-boot,Buzzardo/spring-boot,shangyi0102/spring-boot,joshiste/spring-boot,rweisleder/spring-boot,ihoneymon/spring-boot,lucassaldanha/spring-boot,deki/spring-boot,mbogoevici/spring-boot,mbenson/spring-boot,ptahchiev/spring-boot,candrews/spring-boot,mbogoevici/spring-boot,michael-simons/spring-boot,habuma/spring-boot,donhuvy/spring-boot,philwebb/spring-boot,deki/spring-boot,xiaoleiPENG/my-project,isopov/spring-boot,mbenson/spring-boot,bbrouwer/spring-boot,joshthornhill/spring-boot,Nowheresly/spring-boot,hello2009chen/spring-boot,michael-simons/spring-boot,shangyi0102/spring-boot,aahlenst/spring-boot,sebastiankirsch/spring-boot,mdeinum/spring-boot,javyzheng/spring-boot,NetoDevel/spring-boot,DeezCashews/spring-boot,tsachev/spring-boot,linead/spring-boot,lexandro/spring-boot,eddumelendez/spring-boot,bjornlindstrom/spring-boot,philwebb/spring-boot,lucassaldanha/spring-boot,bjornlindstrom/spring-boot,tsachev/spring-boot,bclozel/spring-boot,DeezCashews/spring-boot,michael-simons/spring-boot,Buzzardo/spring-boot,bclozel/spring-boot,joshthornhill/spring-boot,dreis2211/spring-boot,NetoDevel/spring-boot,zhanhb/spring-boot,pvorb/spring-boot,eddumelendez/spring-boot,htynkn/spring-boot,dreis2211/spring-boot,jxblum/spring-boot,spring-projects/spring-boot,joshiste/spring-boot,isopov/spring-boot,zhanhb/spring-boot,dreis2211/spring-boot,vakninr/spring-boot,vpavic/spring-boot,javyzheng/spring-boot,aahlenst/spring-boot,scottfrederick/spring-boot,olivergierke/spring-boot,drumonii/spring-boot,tsachev/spring-boot,pvorb/spring-boot,bbrouwer/spring-boot,philwebb/spring-boot,kamilszymanski/spring-boot,tsachev/spring-boot
/* * Copyright 2012-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.gradle; import org.gradle.tooling.ProjectConnection; import org.junit.Rule; import org.junit.Test; import org.springframework.boot.test.rule.OutputCapture; /** * Tests for using the old, deprecated plugin ID. * * @author Andy Wilkinson */ public class DeprecatedPluginTests { private ProjectConnection project; private static final String BOOT_VERSION = Versions.getBootVersion(); @Rule public OutputCapture output = new OutputCapture(); @Test public void deprecatedIdWorks() throws Exception { this.project = new ProjectCreator().createProject("deprecated-plugin"); this.project.newBuild().forTasks("build") .withArguments("-PbootVersion=" + BOOT_VERSION, "--stacktrace").run(); } }
spring-boot-integration-tests/spring-boot-gradle-tests/src/test/java/org/springframework/boot/gradle/DeprecatedPluginTests.java
/* * Copyright 2012-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.gradle; import org.gradle.tooling.ProjectConnection; import org.junit.Rule; import org.junit.Test; import org.springframework.boot.test.rule.OutputCapture; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for using the old, deprecated plugin ID. * * @author Andy Wilkinson */ public class DeprecatedPluginTests { private ProjectConnection project; private static final String BOOT_VERSION = Versions.getBootVersion(); @Rule public OutputCapture output = new OutputCapture(); @Test public void deprecatedIdWorksAndLogsAWarning() throws Exception { this.project = new ProjectCreator().createProject("deprecated-plugin"); this.project.newBuild().forTasks("build") .withArguments("-PbootVersion=" + BOOT_VERSION, "--stacktrace").run(); assertThat(this.output.toString()) .contains("The plugin id 'spring-boot' is deprecated"); } }
Polish new tests for the Gradle plugin's deprecated ID The output capture for the deprecation warning only appears to work when the test is run in isolation. I can't figure out why that's the case, particularly as we have another test class (BootRunResourceTests) that uses OutputCapture and works reliably. I'm cutting my loses and removing the use of OutputCapture and the assertion that the warnings is logged. See gh-6997
spring-boot-integration-tests/spring-boot-gradle-tests/src/test/java/org/springframework/boot/gradle/DeprecatedPluginTests.java
Polish new tests for the Gradle plugin's deprecated ID
Java
apache-2.0
2dc56286bb35e7bb5b2c9f3d6d6014a3242c9fec
0
speedment/speedment,speedment/speedment
/* * * Copyright (c) 2006-2019, Speedment, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); You may not * use this file except in compliance with the License. You may obtain a copy of * the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.speedment.runtime.connector.sqlite.internal; import com.speedment.common.injector.annotation.ExecuteBefore; import com.speedment.common.injector.annotation.Inject; import com.speedment.common.injector.annotation.OnlyIfMissing; import com.speedment.common.injector.annotation.WithState; import com.speedment.runtime.core.component.DbmsHandlerComponent; import com.speedment.runtime.core.db.*; import com.speedment.runtime.core.db.metadata.TypeInfoMetaData; import java.util.List; import java.util.Optional; import java.util.Set; import static com.speedment.common.injector.State.CREATED; import static com.speedment.common.injector.State.INITIALIZED; import static java.util.Collections.emptySet; import static java.util.Objects.requireNonNull; /** * Implementation of {@link DbmsType} for the SQLite database type. * * @author Emil Forslund * @since 3.1.10 */ public final class SqliteDbmsType implements DbmsType { private final static String SQLITE = "SQLite"; private final DriverComponent drivers; // Nullable private final SqliteMetadataHandler metadataHandler; private final SqliteOperationHandler operationHandler; @Inject @OnlyIfMissing(DriverComponent.class) SqliteDbmsType(SqliteMetadataHandler metadataHandler, SqliteOperationHandler operationHandler) { this.drivers = null; // Nullable this.metadataHandler = requireNonNull(metadataHandler); this.operationHandler = requireNonNull(operationHandler); } @Inject SqliteDbmsType(DriverComponent drivers, SqliteMetadataHandler metadataHandler, SqliteOperationHandler operationHandler) { this.drivers = requireNonNull(drivers); this.metadataHandler = requireNonNull(metadataHandler); this.operationHandler = requireNonNull(operationHandler); } @ExecuteBefore(INITIALIZED) void install(@WithState(CREATED) DbmsHandlerComponent component) { component.install(this); } @Override public boolean hasSchemaNames() { return false; } @Override public boolean hasDatabaseNames() { return false; } @Override public boolean hasDatabaseUsers() { return false; } @Override public String getName() { return SQLITE; } @Override public String getDriverManagerName() { return "SQLite JDBC Driver"; } @Override public ConnectionType getConnectionType() { return ConnectionType.DBMS_AS_FILE; } @Override public int getDefaultPort() { return 0; // In SQLite, you connect to a file directly. } @Override public String getSchemaTableDelimiter() { return ""; } @Override public String getDbmsNameMeaning() { return "The name of the file where data is persisted."; } @Override public Optional<String> getDefaultDbmsName() { return Optional.empty(); } @Override public boolean isSupported() { return drivers != null && drivers.driver(getDriverName()).isPresent(); } @Override public String getDriverName() { return "org.sqlite.JDBC"; } @Override public DatabaseNamingConvention getDatabaseNamingConvention() { return new SqliteNamingConvention(); } @Override public DbmsMetadataHandler getMetadataHandler() { return metadataHandler; } @Override public DbmsOperationHandler getOperationHandler() { return operationHandler; } @Override public DbmsColumnHandler getColumnHandler() { return new SqliteColumnHandler(); } @Override public String getResultSetTableSchema() { throw new UnsupportedOperationException( "SQLite does not have concept of 'schemas', so this method " + "should not be invoked." ); } @Override public ConnectionUrlGenerator getConnectionUrlGenerator() { return new SqliteConnectionUrlGenerator(); } @Override public FieldPredicateView getFieldPredicateView() { return new SqliteFieldPredicateView(); } @Override public Set<TypeInfoMetaData> getDataTypes() { return emptySet(); } @Override public String getInitialQuery() { return "SELECT 1"; } @Override public SkipLimitSupport getSkipLimitSupport() { return SkipLimitSupport.STANDARD; } @Override public String applySkipLimit(String originalSql, List<Object> params, long skip, long limit) { if (skip == 0 && limit == Long.MAX_VALUE) { return originalSql; } final StringBuilder sb = new StringBuilder(originalSql); if (limit == Long.MAX_VALUE) { sb.append(" LIMIT 223372036854775807"); // Some big number that does not overflow } else { sb.append(" LIMIT ?"); params.add(limit); } if (skip > 0) { sb.append(" OFFSET ?"); params.add(skip); } return sb.toString(); } @Override public SubSelectAlias getSubSelectAlias() { return SubSelectAlias.PROHIBITED; } @Override public SortByNullOrderInsertion getSortByNullOrderInsertion() { return SortByNullOrderInsertion.PRE; } }
runtime-parent/runtime-connector-parent/sqlite/src/main/java/com/speedment/runtime/connector/sqlite/internal/SqliteDbmsType.java
/* * * Copyright (c) 2006-2019, Speedment, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); You may not * use this file except in compliance with the License. You may obtain a copy of * the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.speedment.runtime.connector.sqlite.internal; import com.speedment.common.injector.annotation.ExecuteBefore; import com.speedment.common.injector.annotation.Inject; import com.speedment.common.injector.annotation.OnlyIfMissing; import com.speedment.common.injector.annotation.WithState; import com.speedment.runtime.core.component.DbmsHandlerComponent; import com.speedment.runtime.core.db.*; import com.speedment.runtime.core.db.metadata.TypeInfoMetaData; import java.util.List; import java.util.Optional; import java.util.Set; import static com.speedment.common.injector.State.CREATED; import static com.speedment.common.injector.State.INITIALIZED; import static java.util.Collections.emptySet; import static java.util.Objects.requireNonNull; /** * Implementation of {@link DbmsType} for the SQLite database type. * * @author Emil Forslund * @since 3.1.10 */ public final class SqliteDbmsType implements DbmsType { public final static String SQLITE = "SQLite"; private final DriverComponent drivers; // Nullable private final SqliteMetadataHandler metadataHandler; private final SqliteOperationHandler operationHandler; @Inject @OnlyIfMissing(DriverComponent.class) SqliteDbmsType(SqliteMetadataHandler metadataHandler, SqliteOperationHandler operationHandler) { this.drivers = null; // Nullable this.metadataHandler = requireNonNull(metadataHandler); this.operationHandler = requireNonNull(operationHandler); } @Inject SqliteDbmsType(DriverComponent drivers, SqliteMetadataHandler metadataHandler, SqliteOperationHandler operationHandler) { this.drivers = requireNonNull(drivers); this.metadataHandler = requireNonNull(metadataHandler); this.operationHandler = requireNonNull(operationHandler); } @ExecuteBefore(INITIALIZED) void install(@WithState(CREATED) DbmsHandlerComponent component) { component.install(this); } @Override public boolean hasSchemaNames() { return false; } @Override public boolean hasDatabaseNames() { return false; } @Override public boolean hasDatabaseUsers() { return false; } @Override public String getName() { return SQLITE; } @Override public String getDriverManagerName() { return "SQLite JDBC Driver"; } @Override public ConnectionType getConnectionType() { return ConnectionType.DBMS_AS_FILE; } @Override public int getDefaultPort() { return 0; // In SQLite, you connect to a file directly. } @Override public String getSchemaTableDelimiter() { return ""; } @Override public String getDbmsNameMeaning() { return "The name of the file where data is persisted."; } @Override public Optional<String> getDefaultDbmsName() { return Optional.empty(); } @Override public boolean isSupported() { return drivers != null && drivers.driver(getDriverName()).isPresent(); } @Override public String getDriverName() { return "org.sqlite.JDBC"; } @Override public DatabaseNamingConvention getDatabaseNamingConvention() { return new SqliteNamingConvention(); } @Override public DbmsMetadataHandler getMetadataHandler() { return metadataHandler; } @Override public DbmsOperationHandler getOperationHandler() { return operationHandler; } @Override public DbmsColumnHandler getColumnHandler() { return new SqliteColumnHandler(); } @Override public String getResultSetTableSchema() { throw new UnsupportedOperationException( "SQLite does not have concept of 'schemas', so this method " + "should not be invoked." ); } @Override public ConnectionUrlGenerator getConnectionUrlGenerator() { return new SqliteConnectionUrlGenerator(); } @Override public FieldPredicateView getFieldPredicateView() { return new SqliteFieldPredicateView(); } @Override public Set<TypeInfoMetaData> getDataTypes() { return emptySet(); } @Override public String getInitialQuery() { return "SELECT 1"; } @Override public SkipLimitSupport getSkipLimitSupport() { return SkipLimitSupport.STANDARD; } @Override public String applySkipLimit(String originalSql, List<Object> params, long skip, long limit) { if (skip == 0 && limit == Long.MAX_VALUE) { return originalSql; } final StringBuilder sb = new StringBuilder(originalSql); if (limit == Long.MAX_VALUE) { sb.append(" LIMIT 223372036854775807"); // Some big number that does not overflow } else { sb.append(" LIMIT ?"); params.add(limit); } if (skip > 0) { sb.append(" OFFSET ?"); params.add(skip); } return sb.toString(); } @Override public SubSelectAlias getSubSelectAlias() { return SubSelectAlias.PROHIBITED; } @Override public SortByNullOrderInsertion getSortByNullOrderInsertion() { return SortByNullOrderInsertion.PRE; } }
sqlite: Make constant private
runtime-parent/runtime-connector-parent/sqlite/src/main/java/com/speedment/runtime/connector/sqlite/internal/SqliteDbmsType.java
sqlite: Make constant private
Java
apache-2.0
08d47b5efcbc702eecb6c83b0e7c75cb860573b4
0
quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus
package io.quarkus.hibernate.reactive.runtime; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.persistence.EntityManagerFactory; import javax.persistence.PersistenceException; import javax.persistence.spi.PersistenceProvider; import javax.persistence.spi.PersistenceUnitInfo; import javax.persistence.spi.ProviderUtil; import org.hibernate.boot.registry.StandardServiceRegistry; import org.hibernate.boot.registry.internal.StandardServiceRegistryImpl; import org.hibernate.cfg.AvailableSettings; import org.hibernate.jpa.boot.spi.EntityManagerFactoryBuilder; import org.hibernate.jpa.boot.spi.PersistenceUnitDescriptor; import org.hibernate.reactive.provider.service.ReactiveGenerationTarget; import org.hibernate.service.internal.ProvidedService; import org.hibernate.tool.schema.spi.SchemaManagementTool; import org.jboss.logging.Logger; import io.quarkus.arc.Arc; import io.quarkus.arc.InstanceHandle; import io.quarkus.hibernate.orm.runtime.BuildTimeSettings; import io.quarkus.hibernate.orm.runtime.FastBootHibernatePersistenceProvider; import io.quarkus.hibernate.orm.runtime.IntegrationSettings; import io.quarkus.hibernate.orm.runtime.PersistenceUnitsHolder; import io.quarkus.hibernate.orm.runtime.RuntimeSettings; import io.quarkus.hibernate.orm.runtime.integration.HibernateOrmIntegrations; import io.quarkus.hibernate.orm.runtime.recording.PrevalidatedQuarkusMetadata; import io.quarkus.hibernate.orm.runtime.recording.RecordedState; import io.quarkus.hibernate.reactive.runtime.boot.FastBootReactiveEntityManagerFactoryBuilder; import io.quarkus.hibernate.reactive.runtime.boot.registry.PreconfiguredReactiveServiceRegistryBuilder; import io.quarkus.hibernate.reactive.runtime.customized.QuarkusReactiveConnectionPoolInitiator; import io.vertx.sqlclient.Pool; /** * This can not inherit from ReactivePersistenceProvider because it references HibernatePersistenceProvider * and that would trigger the native-image tool to include all code which could be triggered from using * that: we need to be able to fully exclude HibernatePersistenceProvider from the native image. */ final class FastBootHibernateReactivePersistenceProvider implements PersistenceProvider { private static final Logger log = Logger.getLogger(FastBootHibernateReactivePersistenceProvider.class); private static final String IMPLEMENTATION_NAME = "org.hibernate.reactive.provider.ReactivePersistenceProvider"; private final FastBootHibernatePersistenceProvider delegate = new FastBootHibernatePersistenceProvider(); @Override public EntityManagerFactory createEntityManagerFactory(String emName, Map properties) { if (properties == null) properties = new HashMap<Object, Object>(); try { // These are pre-parsed during image generation: final List<PersistenceUnitDescriptor> units = PersistenceUnitsHolder.getPersistenceUnitDescriptors(); for (PersistenceUnitDescriptor unit : units) { //if the provider is not set, don't use it as people might want to use Hibernate ORM if (IMPLEMENTATION_NAME.equalsIgnoreCase(unit.getProviderClassName()) || unit.getProviderClassName() == null) { EntityManagerFactoryBuilder emfBuilder = getEntityManagerFactoryBuilderOrNull(emName, properties); EntityManagerFactory emf = emfBuilder.build(); return emf; } } //not the right provider return null; } catch (PersistenceException pe) { throw pe; } catch (Exception e) { throw new PersistenceException("Unable to build EntityManagerFactory", e); } } private EntityManagerFactoryBuilder getEntityManagerFactoryBuilderOrNull(String persistenceUnitName, Map properties) { log.tracef("Attempting to obtain correct EntityManagerFactoryBuilder for persistenceUnitName : %s", persistenceUnitName); verifyProperties(properties); // These are pre-parsed during image generation: final List<PersistenceUnitDescriptor> units = PersistenceUnitsHolder.getPersistenceUnitDescriptors(); log.debugf("Located %s persistence units; checking each", units.size()); if (persistenceUnitName == null && units.size() > 1) { // no persistence-unit name to look for was given and we found multiple // persistence-units throw new PersistenceException("No name provided and multiple persistence units found"); } for (PersistenceUnitDescriptor persistenceUnit : units) { log.debugf( "Checking persistence-unit [name=%s, explicit-provider=%s] against incoming persistence unit name [%s]", persistenceUnit.getName(), persistenceUnit.getProviderClassName(), persistenceUnitName); final boolean matches = persistenceUnitName == null || persistenceUnit.getName().equals(persistenceUnitName); if (!matches) { log.debugf("Excluding from consideration '%s' due to name mis-match", persistenceUnit.getName()); continue; } // See if we (Hibernate) are the persistence provider if (!isProvider(persistenceUnit)) { log.debug("Excluding from consideration due to provider mis-match"); continue; } RecordedState recordedState = PersistenceUnitsHolder.getRecordedState(persistenceUnitName); final PrevalidatedQuarkusMetadata metadata = recordedState.getMetadata(); final BuildTimeSettings buildTimeSettings = recordedState.getBuildTimeSettings(); final IntegrationSettings integrationSettings = recordedState.getIntegrationSettings(); RuntimeSettings.Builder runtimeSettingsBuilder = new RuntimeSettings.Builder(buildTimeSettings, integrationSettings); HibernateOrmIntegrations.contributeRuntimeProperties((k, v) -> runtimeSettingsBuilder.put(k, v)); RuntimeSettings runtimeSettings = runtimeSettingsBuilder.build(); StandardServiceRegistry standardServiceRegistry = rewireMetadataAndExtractServiceRegistry( runtimeSettings, recordedState, persistenceUnitName); final Object cdiBeanManager = Arc.container().beanManager(); final Object validatorFactory = Arc.container().instance("quarkus-hibernate-validator-factory").get(); return new FastBootReactiveEntityManagerFactoryBuilder( metadata /* Uses the StandardServiceRegistry references by this! */, persistenceUnitName, standardServiceRegistry /* Mostly ignored! (yet needs to match) */, runtimeSettings, validatorFactory, cdiBeanManager, recordedState.getMultiTenancyStrategy()); } log.debug("Found no matching persistence units"); return null; } private StandardServiceRegistry rewireMetadataAndExtractServiceRegistry(RuntimeSettings runtimeSettings, RecordedState rs, String persistenceUnitName) { PreconfiguredReactiveServiceRegistryBuilder serviceRegistryBuilder = new PreconfiguredReactiveServiceRegistryBuilder( rs); registerVertxPool(persistenceUnitName, runtimeSettings, serviceRegistryBuilder); runtimeSettings.getSettings().forEach((key, value) -> { serviceRegistryBuilder.applySetting(key, value); }); for (ProvidedService<?> providedService : rs.getProvidedServices()) { serviceRegistryBuilder.addService(providedService); } StandardServiceRegistryImpl standardServiceRegistry = serviceRegistryBuilder.buildNewServiceRegistry(); standardServiceRegistry.getService(SchemaManagementTool.class) .setCustomDatabaseGenerationTarget(new ReactiveGenerationTarget(standardServiceRegistry)); return standardServiceRegistry; } @SuppressWarnings("rawtypes") private void verifyProperties(Map properties) { if (properties != null && properties.size() != 0) { throw new PersistenceException( "The FastbootHibernateProvider PersistenceProvider can not support runtime provided properties. " + "Make sure you set all properties you need in the configuration resources before building the application."); } } private boolean isProvider(PersistenceUnitDescriptor persistenceUnit) { Map<Object, Object> props = Collections.emptyMap(); String requestedProviderName = FastBootHibernatePersistenceProvider.extractRequestedProviderName(persistenceUnit, props); if (requestedProviderName == null) { // We'll always assume we are the best possible provider match unless the user // explicitly asks for a different one. return true; } return FastBootHibernateReactivePersistenceProvider.class.getName().equals(requestedProviderName) || IMPLEMENTATION_NAME.equals(requestedProviderName) || FastBootHibernatePersistenceProvider.class.getName().equals(requestedProviderName) || "org.hibernate.jpa.HibernatePersistenceProvider".equals(requestedProviderName); } private void registerVertxPool(String persistenceUnitName, RuntimeSettings runtimeSettings, PreconfiguredReactiveServiceRegistryBuilder serviceRegistry) { if (runtimeSettings.isConfigured(AvailableSettings.URL)) { // the pool has been defined in the persistence unit, we can bail out return; } // for now we only support one pool but this will change InstanceHandle<Pool> poolHandle = Arc.container().instance(Pool.class); if (!poolHandle.isAvailable()) { throw new IllegalStateException("No pool has been defined for persistence unit " + persistenceUnitName); } serviceRegistry.addInitiator(new QuarkusReactiveConnectionPoolInitiator(poolHandle.get())); } @Override public EntityManagerFactory createContainerEntityManagerFactory(PersistenceUnitInfo info, Map map) { final String persistenceProviderClassName = info.getPersistenceProviderClassName(); if (persistenceProviderClassName == null || IMPLEMENTATION_NAME.equals(persistenceProviderClassName)) { Map<Object, Object> protectiveCopy = map != null ? new HashMap<Object, Object>(map) : new HashMap<Object, Object>(); return delegate.createContainerEntityManagerFactory(info, protectiveCopy); } //not the right provider return null; } @Override public ProviderUtil getProviderUtil() { return delegate.getProviderUtil(); } @Override public void generateSchema(PersistenceUnitInfo info, Map map) { delegate.generateSchema(info, map); } @Override public boolean generateSchema(String persistenceUnitName, Map map) { return delegate.generateSchema(persistenceUnitName, map); } }
extensions/hibernate-reactive/runtime/src/main/java/io/quarkus/hibernate/reactive/runtime/FastBootHibernateReactivePersistenceProvider.java
package io.quarkus.hibernate.reactive.runtime; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.persistence.EntityManagerFactory; import javax.persistence.PersistenceException; import javax.persistence.spi.PersistenceProvider; import javax.persistence.spi.PersistenceUnitInfo; import javax.persistence.spi.ProviderUtil; import org.hibernate.boot.registry.StandardServiceRegistry; import org.hibernate.boot.registry.internal.StandardServiceRegistryImpl; import org.hibernate.cfg.AvailableSettings; import org.hibernate.jpa.boot.spi.EntityManagerFactoryBuilder; import org.hibernate.jpa.boot.spi.PersistenceUnitDescriptor; import org.hibernate.reactive.provider.service.ReactiveGenerationTarget; import org.hibernate.service.internal.ProvidedService; import org.hibernate.tool.schema.spi.SchemaManagementTool; import org.jboss.logging.Logger; import io.quarkus.arc.Arc; import io.quarkus.arc.InstanceHandle; import io.quarkus.hibernate.orm.runtime.BuildTimeSettings; import io.quarkus.hibernate.orm.runtime.FastBootHibernatePersistenceProvider; import io.quarkus.hibernate.orm.runtime.IntegrationSettings; import io.quarkus.hibernate.orm.runtime.PersistenceUnitsHolder; import io.quarkus.hibernate.orm.runtime.RuntimeSettings; import io.quarkus.hibernate.orm.runtime.integration.HibernateOrmIntegrations; import io.quarkus.hibernate.orm.runtime.recording.PrevalidatedQuarkusMetadata; import io.quarkus.hibernate.orm.runtime.recording.RecordedState; import io.quarkus.hibernate.reactive.runtime.boot.FastBootReactiveEntityManagerFactoryBuilder; import io.quarkus.hibernate.reactive.runtime.boot.registry.PreconfiguredReactiveServiceRegistryBuilder; import io.quarkus.hibernate.reactive.runtime.customized.QuarkusReactiveConnectionPoolInitiator; import io.vertx.sqlclient.Pool; /** * This can not inherit from ReactivePersistenceProvider because it references HibernatePersistenceProvider * and that would trigger the native-image tool to include all code which could be triggered from using * that: we need to be able to fully exclude HibernatePersistenceProvider from the native image. */ final class FastBootHibernateReactivePersistenceProvider implements PersistenceProvider { private static final Logger log = Logger.getLogger(FastBootHibernateReactivePersistenceProvider.class); private static final String IMPLEMENTATION_NAME = "org.hibernate.reactive.provider.ReactivePersistenceProvider"; private final FastBootHibernatePersistenceProvider delegate = new FastBootHibernatePersistenceProvider(); @Override public EntityManagerFactory createEntityManagerFactory(String emName, Map properties) { if (properties == null) properties = new HashMap<Object, Object>(); try { // These are pre-parsed during image generation: final List<PersistenceUnitDescriptor> units = PersistenceUnitsHolder.getPersistenceUnitDescriptors(); for (PersistenceUnitDescriptor unit : units) { //if the provider is not set, don't use it as people might want to use Hibernate ORM if (IMPLEMENTATION_NAME.equalsIgnoreCase(unit.getProviderClassName()) || unit.getProviderClassName() == null) { EntityManagerFactoryBuilder emfBuilder = getEntityManagerFactoryBuilderOrNull(emName, properties); EntityManagerFactory emf = emfBuilder.build(); return emf; } } //not the right provider return null; } catch (PersistenceException pe) { throw pe; } catch (Exception e) { throw new PersistenceException("Unable to build EntityManagerFactory", e); } } private EntityManagerFactoryBuilder getEntityManagerFactoryBuilderOrNull(String persistenceUnitName, Map properties) { log.tracef("Attempting to obtain correct EntityManagerFactoryBuilder for persistenceUnitName : %s", persistenceUnitName); verifyProperties(properties); // These are pre-parsed during image generation: final List<PersistenceUnitDescriptor> units = PersistenceUnitsHolder.getPersistenceUnitDescriptors(); log.debugf("Located %s persistence units; checking each", units.size()); if (persistenceUnitName == null && units.size() > 1) { // no persistence-unit name to look for was given and we found multiple // persistence-units throw new PersistenceException("No name provided and multiple persistence units found"); } for (PersistenceUnitDescriptor persistenceUnit : units) { log.debugf( "Checking persistence-unit [name=%s, explicit-provider=%s] against incoming persistence unit name [%s]", persistenceUnit.getName(), persistenceUnit.getProviderClassName(), persistenceUnitName); final boolean matches = persistenceUnitName == null || persistenceUnit.getName().equals(persistenceUnitName); if (!matches) { log.debugf("Excluding from consideration '%s' due to name mis-match", persistenceUnit.getName()); continue; } // See if we (Hibernate) are the persistence provider if (!isProvider(persistenceUnit)) { log.debug("Excluding from consideration due to provider mis-match"); continue; } RecordedState recordedState = PersistenceUnitsHolder.getRecordedState(persistenceUnitName); final PrevalidatedQuarkusMetadata metadata = recordedState.getMetadata(); final BuildTimeSettings buildTimeSettings = recordedState.getBuildTimeSettings(); final IntegrationSettings integrationSettings = recordedState.getIntegrationSettings(); RuntimeSettings.Builder runtimeSettingsBuilder = new RuntimeSettings.Builder(buildTimeSettings, integrationSettings); HibernateOrmIntegrations.contributeRuntimeProperties((k, v) -> runtimeSettingsBuilder.put(k, v)); RuntimeSettings runtimeSettings = runtimeSettingsBuilder.build(); StandardServiceRegistry standardServiceRegistry = rewireMetadataAndExtractServiceRegistry( runtimeSettings, recordedState, persistenceUnitName); final Object cdiBeanManager = Arc.container().beanManager(); final Object validatorFactory = Arc.container().instance("quarkus-hibernate-validator-factory").get(); return new FastBootReactiveEntityManagerFactoryBuilder( metadata /* Uses the StandardServiceRegistry references by this! */, persistenceUnitName, standardServiceRegistry /* Mostly ignored! (yet needs to match) */, runtimeSettings, validatorFactory, cdiBeanManager, recordedState.getMultiTenancyStrategy()); } log.debug("Found no matching persistence units"); return null; } private StandardServiceRegistry rewireMetadataAndExtractServiceRegistry(RuntimeSettings runtimeSettings, RecordedState rs, String persistenceUnitName) { PreconfiguredReactiveServiceRegistryBuilder serviceRegistryBuilder = new PreconfiguredReactiveServiceRegistryBuilder( rs); registerVertxPool(persistenceUnitName, runtimeSettings, serviceRegistryBuilder); runtimeSettings.getSettings().forEach((key, value) -> { serviceRegistryBuilder.applySetting(key, value); }); for (ProvidedService<?> providedService : rs.getProvidedServices()) { serviceRegistryBuilder.addService(providedService); } StandardServiceRegistryImpl standardServiceRegistry = serviceRegistryBuilder.buildNewServiceRegistry(); standardServiceRegistry.getService(SchemaManagementTool.class) .setCustomDatabaseGenerationTarget(new ReactiveGenerationTarget(standardServiceRegistry)); return standardServiceRegistry; } @SuppressWarnings("rawtypes") private void verifyProperties(Map properties) { if (properties != null && properties.size() != 0) { throw new PersistenceException( "The FastbootHibernateProvider PersistenceProvider can not support runtime provided properties. " + "Make sure you set all properties you need in the configuration resources before building the application."); } } private boolean isProvider(PersistenceUnitDescriptor persistenceUnit) { Map<Object, Object> props = Collections.emptyMap(); String requestedProviderName = FastBootHibernatePersistenceProvider.extractRequestedProviderName(persistenceUnit, props); if (requestedProviderName == null) { // We'll always assume we are the best possible provider match unless the user // explicitly asks for a different one. return true; } return FastBootHibernateReactivePersistenceProvider.class.getName().equals(requestedProviderName) || IMPLEMENTATION_NAME.equals(requestedProviderName) || FastBootHibernatePersistenceProvider.class.getName().equals(requestedProviderName) || "org.hibernate.jpa.HibernatePersistenceProvider".equals(requestedProviderName); } private void registerVertxPool(String persistenceUnitName, RuntimeSettings runtimeSettings, PreconfiguredReactiveServiceRegistryBuilder serviceRegistry) { if (runtimeSettings.isConfigured(AvailableSettings.URL)) { // the pool has been defined in the persistence unit, we can bail out return; } // for now we only support one pool but this will change InstanceHandle<Pool> poolHandle = Arc.container().instance(Pool.class); if (!poolHandle.isAvailable()) { throw new IllegalStateException("No pool has been defined for persistence unit " + persistenceUnitName); } serviceRegistry.addInitiator(new QuarkusReactiveConnectionPoolInitiator(poolHandle.get())); } @Override public EntityManagerFactory createContainerEntityManagerFactory(PersistenceUnitInfo info, Map map) { final String persistenceProviderClassName = info.getPersistenceProviderClassName(); if (persistenceProviderClassName == null || IMPLEMENTATION_NAME.equals(persistenceProviderClassName)) { Map<Object, Object> protectiveCopy = map != null ? new HashMap<Object, Object>(map) : new HashMap<Object, Object>(); return delegate.createContainerEntityManagerFactory(info, protectiveCopy); } //not the right provider return null; } @Override public ProviderUtil getProviderUtil() { return delegate.getProviderUtil(); } @Override public void generateSchema(PersistenceUnitInfo info, Map map) { throw new IllegalStateException("Hibernate Reactive does not support schema generation"); } @Override public boolean generateSchema(String persistenceUnitName, Map map) { throw new IllegalStateException("Hibernate Reactive does not support schema generation"); } }
Hibernate Reactive now suppporgs schema generation as well
extensions/hibernate-reactive/runtime/src/main/java/io/quarkus/hibernate/reactive/runtime/FastBootHibernateReactivePersistenceProvider.java
Hibernate Reactive now suppporgs schema generation as well
Java
apache-2.0
695c8ca4b843c7250e6d4ffdc425b97a85056789
0
b2ihealthcare/snow-owl,b2ihealthcare/snow-owl,b2ihealthcare/snow-owl,b2ihealthcare/snow-owl
/* * Copyright 2011-2018 B2i Healthcare Pte Ltd, http://b2i.sg * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.b2international.snowowl.snomed.datastore.index.entry; import static com.b2international.index.query.Expressions.exactMatch; import static com.b2international.index.query.Expressions.matchAny; import static com.b2international.index.query.Expressions.matchAnyDecimal; import static com.b2international.index.query.Expressions.matchAnyInt; import static com.b2international.index.query.Expressions.matchRange; import static com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants.CONCEPT_NUMBER; import static com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants.DESCRIPTION_NUMBER; import static com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants.RELATIONSHIP_NUMBER; import static com.google.common.base.Preconditions.checkArgument; import java.math.BigDecimal; import java.util.Collection; import java.util.Date; import java.util.Map; import java.util.Map.Entry; import com.b2international.commons.StringUtils; import com.b2international.index.Doc; import com.b2international.index.Keyword; import com.b2international.index.RevisionHash; import com.b2international.index.query.Expression; import com.b2international.snowowl.core.CoreTerminologyBroker; import com.b2international.snowowl.core.date.DateFormats; import com.b2international.snowowl.core.date.EffectiveTimes; import com.b2international.snowowl.datastore.cdo.CDOIDUtils; import com.b2international.snowowl.snomed.common.SnomedRf2Headers; import com.b2international.snowowl.snomed.core.domain.Acceptability; import com.b2international.snowowl.snomed.core.domain.InactivationIndicator; import com.b2international.snowowl.snomed.core.domain.RelationshipRefinability; import com.b2international.snowowl.snomed.core.domain.SnomedConcept; import com.b2international.snowowl.snomed.core.domain.SnomedCoreComponent; import com.b2international.snowowl.snomed.core.domain.SnomedDescription; import com.b2international.snowowl.snomed.core.domain.SnomedRelationship; import com.b2international.snowowl.snomed.core.domain.refset.SnomedReferenceSetMember; import com.b2international.snowowl.snomed.datastore.SnomedRefSetUtil; import com.b2international.snowowl.snomed.snomedrefset.DataType; import com.b2international.snowowl.snomed.snomedrefset.SnomedAssociationRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedAttributeValueRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedComplexMapRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedConcreteDataTypeRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedDescriptionTypeRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedLanguageRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedMRCMAttributeDomainRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedMRCMAttributeRangeRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedMRCMDomainRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedMRCMModuleScopeRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedModuleDependencyRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedOWLExpressionRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedQueryRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedRefSetType; import com.b2international.snowowl.snomed.snomedrefset.SnomedSimpleMapRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.util.SnomedRefSetSwitch; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder; import com.google.common.base.Function; import com.google.common.base.Objects.ToStringHelper; import com.google.common.base.Strings; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableMap; /** * Lightweight representation of a SNOMED CT reference set member. */ @Doc @JsonDeserialize(builder = SnomedRefSetMemberIndexEntry.Builder.class) @RevisionHash({ SnomedDocument.Fields.ACTIVE, SnomedDocument.Fields.EFFECTIVE_TIME, SnomedDocument.Fields.MODULE_ID, SnomedRefSetMemberIndexEntry.Fields.TARGET_COMPONENT, SnomedRefSetMemberIndexEntry.Fields.VALUE_ID, SnomedRefSetMemberIndexEntry.Fields.ATTRIBUTE_NAME, SnomedRefSetMemberIndexEntry.Fields.STRING_VALUE, SnomedRefSetMemberIndexEntry.Fields.BOOLEAN_VALUE, SnomedRefSetMemberIndexEntry.Fields.INTEGER_VALUE, SnomedRefSetMemberIndexEntry.Fields.DECIMAL_VALUE, SnomedRefSetMemberIndexEntry.Fields.OPERATOR_ID, SnomedRefSetMemberIndexEntry.Fields.CHARACTERISTIC_TYPE_ID, SnomedRefSetMemberIndexEntry.Fields.UNIT_ID, SnomedRefSetMemberIndexEntry.Fields.DESCRIPTION_LENGTH, SnomedRefSetMemberIndexEntry.Fields.DESCRIPTION_FORMAT, SnomedRefSetMemberIndexEntry.Fields.ACCEPTABILITY_ID, SnomedRefSetMemberIndexEntry.Fields.SOURCE_EFFECTIVE_TIME, SnomedRefSetMemberIndexEntry.Fields.TARGET_EFFECTIVE_TIME, SnomedRefSetMemberIndexEntry.Fields.MAP_TARGET, SnomedRefSetMemberIndexEntry.Fields.MAP_TARGET_DESCRIPTION, SnomedRefSetMemberIndexEntry.Fields.MAP_CATEGORY_ID, SnomedRefSetMemberIndexEntry.Fields.CORRELATION_ID, SnomedRefSetMemberIndexEntry.Fields.MAP_ADVICE, SnomedRefSetMemberIndexEntry.Fields.MAP_RULE, SnomedRefSetMemberIndexEntry.Fields.MAP_GROUP, SnomedRefSetMemberIndexEntry.Fields.MAP_PRIORITY, SnomedRefSetMemberIndexEntry.Fields.QUERY }) public final class SnomedRefSetMemberIndexEntry extends SnomedDocument { private static final long serialVersionUID = 5198766293865046258L; public static class Fields extends SnomedDocument.Fields { // known RF2 fields public static final String REFERENCE_SET_ID = "referenceSetId"; // XXX different than the RF2 header field name public static final String REFERENCED_COMPONENT_ID = SnomedRf2Headers.FIELD_REFERENCED_COMPONENT_ID; public static final String ACCEPTABILITY_ID = SnomedRf2Headers.FIELD_ACCEPTABILITY_ID; public static final String VALUE_ID = SnomedRf2Headers.FIELD_VALUE_ID; public static final String TARGET_COMPONENT = SnomedRf2Headers.FIELD_TARGET_COMPONENT; public static final String MAP_TARGET = SnomedRf2Headers.FIELD_MAP_TARGET; public static final String MAP_TARGET_DESCRIPTION = SnomedRf2Headers.FIELD_MAP_TARGET_DESCRIPTION; public static final String MAP_GROUP = SnomedRf2Headers.FIELD_MAP_GROUP; public static final String MAP_PRIORITY = SnomedRf2Headers.FIELD_MAP_PRIORITY; public static final String MAP_RULE = SnomedRf2Headers.FIELD_MAP_RULE; public static final String MAP_ADVICE = SnomedRf2Headers.FIELD_MAP_ADVICE; public static final String MAP_CATEGORY_ID = SnomedRf2Headers.FIELD_MAP_CATEGORY_ID; public static final String CORRELATION_ID = SnomedRf2Headers.FIELD_CORRELATION_ID; public static final String DESCRIPTION_FORMAT = SnomedRf2Headers.FIELD_DESCRIPTION_FORMAT; public static final String DESCRIPTION_LENGTH = SnomedRf2Headers.FIELD_DESCRIPTION_LENGTH; public static final String OPERATOR_ID = SnomedRf2Headers.FIELD_OPERATOR_ID; public static final String UNIT_ID = SnomedRf2Headers.FIELD_UNIT_ID; public static final String QUERY = SnomedRf2Headers.FIELD_QUERY; public static final String CHARACTERISTIC_TYPE_ID = SnomedRf2Headers.FIELD_CHARACTERISTIC_TYPE_ID; public static final String SOURCE_EFFECTIVE_TIME = SnomedRf2Headers.FIELD_SOURCE_EFFECTIVE_TIME; public static final String TARGET_EFFECTIVE_TIME = SnomedRf2Headers.FIELD_TARGET_EFFECTIVE_TIME; private static final String DATA_VALUE = SnomedRf2Headers.FIELD_VALUE; public static final String ATTRIBUTE_NAME = SnomedRf2Headers.FIELD_ATTRIBUTE_NAME; public static final String OWL_EXPRESSION = SnomedRf2Headers.FIELD_OWL_EXPRESSION; public static final String MRCM_DOMAIN_CONSTRAINT = SnomedRf2Headers.FIELD_MRCM_DOMAIN_CONSTRAINT; public static final String MRCM_PARENT_DOMAIN = SnomedRf2Headers.FIELD_MRCM_PARENT_DOMAIN; public static final String MRCM_PROXIMAL_PRIMITIVE_CONSTRAINT = SnomedRf2Headers.FIELD_MRCM_PROXIMAL_PRIMITIVE_CONSTRAINT; public static final String MRCM_PROXIMAL_PRIMITIVE_REFINEMENT = SnomedRf2Headers.FIELD_MRCM_PROXIMAL_PRIMITIVE_REFINEMENT; public static final String MRCM_DOMAIN_TEMPLATE_FOR_PRECOORDINATION = SnomedRf2Headers.FIELD_MRCM_DOMAIN_TEMPLATE_FOR_PRECOORDINATION; public static final String MRCM_DOMAIN_TEMPLATE_FOR_POSTCOORDINATION = SnomedRf2Headers.FIELD_MRCM_DOMAIN_TEMPLATE_FOR_POSTCOORDINATION; public static final String MRCM_EDITORIAL_GUIDE_REFERENCE = SnomedRf2Headers.FIELD_MRCM_EDITORIAL_GUIDE_REFERENCE; public static final String MRCM_DOMAIN_ID = SnomedRf2Headers.FIELD_MRCM_DOMAIN_ID; public static final String MRCM_GROUPED = SnomedRf2Headers.FIELD_MRCM_GROUPED; public static final String MRCM_ATTRIBUTE_CARDINALITY = SnomedRf2Headers.FIELD_MRCM_ATTRIBUTE_CARDINALITY; public static final String MRCM_ATTRIBUTE_IN_GROUP_CARDINALITY = SnomedRf2Headers.FIELD_MRCM_ATTRIBUTE_IN_GROUP_CARDINALITY; public static final String MRCM_RULE_STRENGTH_ID = SnomedRf2Headers.FIELD_MRCM_RULE_STRENGTH_ID; public static final String MRCM_CONTENT_TYPE_ID = SnomedRf2Headers.FIELD_MRCM_CONTENT_TYPE_ID; public static final String MRCM_RANGE_CONSTRAINT = SnomedRf2Headers.FIELD_MRCM_RANGE_CONSTRAINT; public static final String MRCM_ATTRIBUTE_RULE = SnomedRf2Headers.FIELD_MRCM_ATTRIBUTE_RULE; public static final String MRCM_RULE_REFSET_ID = SnomedRf2Headers.FIELD_MRCM_RULE_REFSET_ID; // extra index fields to store datatype and map target type public static final String DATA_TYPE = "dataType"; public static final String REFSET_TYPE = "referenceSetType"; public static final String REFERENCED_COMPONENT_TYPE = "referencedComponentType"; // CD value fields per type public static final String BOOLEAN_VALUE = "booleanValue"; public static final String STRING_VALUE = "stringValue"; public static final String INTEGER_VALUE = "integerValue"; public static final String DECIMAL_VALUE = "decimalValue"; } public static Builder builder() { return new Builder(); } public static Builder builder(final SnomedRefSetMemberIndexEntry source) { return builder() .storageKey(source.getStorageKey()) .active(source.isActive()) .effectiveTime(source.getEffectiveTime()) .id(source.getId()) .moduleId(source.getModuleId()) .referencedComponentId(source.getReferencedComponentId()) .referencedComponentType(source.getReferencedComponentType()) .referenceSetId(source.getReferenceSetId()) .referenceSetType(source.getReferenceSetType()) .released(source.isReleased()) .fields(source.getAdditionalFields()); } public static final Builder builder(final SnomedReferenceSetMember input) { final Builder builder = builder() .storageKey(input.getStorageKey()) .active(input.isActive()) .effectiveTime(EffectiveTimes.getEffectiveTime(input.getEffectiveTime())) .id(input.getId()) .moduleId(input.getModuleId()) .referencedComponentId(input.getReferencedComponent().getId()) .referenceSetId(input.getReferenceSetId()) .referenceSetType(input.type()) .released(input.isReleased()); if (input.getReferencedComponent() instanceof SnomedConcept) { builder.referencedComponentType(CONCEPT_NUMBER); } else if (input.getReferencedComponent() instanceof SnomedDescription) { builder.referencedComponentType(DESCRIPTION_NUMBER); } else if (input.getReferencedComponent() instanceof SnomedRelationship) { builder.referencedComponentType(RELATIONSHIP_NUMBER); } else { builder.referencedComponentType(CoreTerminologyBroker.UNSPECIFIED_NUMBER_SHORT); } for (Entry<String, Object> entry : input.getProperties().entrySet()) { final Object value = entry.getValue(); final String fieldName = entry.getKey(); // certain RF2 fields can be expanded into full blown representation class, get the ID in this case if (value instanceof SnomedCoreComponent) { builder.field(fieldName, ((SnomedCoreComponent) value).getId()); } else { builder.field(fieldName, convertValue(entry.getKey(), value)); } } return builder; } public static Builder builder(SnomedRefSetMember refSetMember) { final Builder builder = SnomedRefSetMemberIndexEntry.builder() .storageKey(CDOIDUtils.asLong(refSetMember.cdoID())) .id(refSetMember.getUuid()) .moduleId(refSetMember.getModuleId()) .active(refSetMember.isActive()) .released(refSetMember.isReleased()) .effectiveTime(refSetMember.isSetEffectiveTime() ? refSetMember.getEffectiveTime().getTime() : EffectiveTimes.UNSET_EFFECTIVE_TIME) .referenceSetId(refSetMember.getRefSetIdentifierId()) .referenceSetType(refSetMember.getRefSet().getType()) .referencedComponentType(refSetMember.getReferencedComponentType()) .referencedComponentId(refSetMember.getReferencedComponentId()); return new SnomedRefSetSwitch<Builder>() { @Override public Builder caseSnomedAssociationRefSetMember(final SnomedAssociationRefSetMember associationMember) { return builder.targetComponent(associationMember.getTargetComponentId()); } @Override public Builder caseSnomedAttributeValueRefSetMember(final SnomedAttributeValueRefSetMember attributeValueMember) { return builder.field(Fields.VALUE_ID, attributeValueMember.getValueId()); } @Override public Builder caseSnomedConcreteDataTypeRefSetMember(final SnomedConcreteDataTypeRefSetMember concreteDataTypeMember) { return builder.field(Fields.ATTRIBUTE_NAME, concreteDataTypeMember.getLabel()) .field(Fields.DATA_TYPE, concreteDataTypeMember.getDataType()) .field(Fields.DATA_VALUE, concreteDataTypeMember.getSerializedValue()) .field(Fields.CHARACTERISTIC_TYPE_ID, concreteDataTypeMember.getCharacteristicTypeId()) .field(Fields.OPERATOR_ID, concreteDataTypeMember.getOperatorComponentId()) .field(Fields.UNIT_ID, concreteDataTypeMember.getUomComponentId()); } @Override public Builder caseSnomedDescriptionTypeRefSetMember(final SnomedDescriptionTypeRefSetMember descriptionTypeMember) { return builder .field(Fields.DESCRIPTION_FORMAT, descriptionTypeMember.getDescriptionFormat()) .field(Fields.DESCRIPTION_LENGTH, descriptionTypeMember.getDescriptionLength()); } @Override public Builder caseSnomedLanguageRefSetMember(final SnomedLanguageRefSetMember languageMember) { return builder.field(Fields.ACCEPTABILITY_ID, languageMember.getAcceptabilityId()); } @Override public Builder caseSnomedQueryRefSetMember(final SnomedQueryRefSetMember queryMember) { return builder.field(Fields.QUERY, queryMember.getQuery()); } @Override public Builder caseSnomedSimpleMapRefSetMember(final SnomedSimpleMapRefSetMember mapRefSetMember) { return builder .field(Fields.MAP_TARGET, mapRefSetMember.getMapTargetComponentId()) .field(Fields.MAP_TARGET_DESCRIPTION, mapRefSetMember.getMapTargetComponentDescription()); } @Override public Builder caseSnomedComplexMapRefSetMember(final SnomedComplexMapRefSetMember mapRefSetMember) { return builder .field(Fields.MAP_TARGET, mapRefSetMember.getMapTargetComponentId()) .field(Fields.CORRELATION_ID, mapRefSetMember.getCorrelationId()) .field(Fields.MAP_GROUP, Integer.valueOf(mapRefSetMember.getMapGroup())) .field(Fields.MAP_ADVICE, Strings.nullToEmpty(mapRefSetMember.getMapAdvice())) .field(Fields.MAP_PRIORITY, Integer.valueOf(mapRefSetMember.getMapPriority())) .field(Fields.MAP_RULE, Strings.nullToEmpty(mapRefSetMember.getMapRule())) // extended refset .field(Fields.MAP_CATEGORY_ID, Strings.nullToEmpty(mapRefSetMember.getMapCategoryId())); } @Override public Builder caseSnomedModuleDependencyRefSetMember(SnomedModuleDependencyRefSetMember member) { return builder .field(Fields.SOURCE_EFFECTIVE_TIME, EffectiveTimes.getEffectiveTime(member.getSourceEffectiveTime())) .field(Fields.TARGET_EFFECTIVE_TIME, EffectiveTimes.getEffectiveTime(member.getTargetEffectiveTime())); } @Override public Builder caseSnomedOWLExpressionRefSetMember(SnomedOWLExpressionRefSetMember member) { return builder .field(Fields.OWL_EXPRESSION, member.getOwlExpression()); }; @Override public Builder caseSnomedMRCMDomainRefSetMember(SnomedMRCMDomainRefSetMember member) { return builder .field(Fields.MRCM_DOMAIN_CONSTRAINT, member.getDomainConstraint()) .field(Fields.MRCM_PARENT_DOMAIN, member.getParentDomain()) .field(Fields.MRCM_PROXIMAL_PRIMITIVE_CONSTRAINT, member.getProximalPrimitiveConstraint()) .field(Fields.MRCM_PROXIMAL_PRIMITIVE_REFINEMENT, member.getProximalPrimitiveRefinement()) .field(Fields.MRCM_DOMAIN_TEMPLATE_FOR_PRECOORDINATION, member.getDomainTemplateForPrecoordination()) .field(Fields.MRCM_DOMAIN_TEMPLATE_FOR_POSTCOORDINATION, member.getDomainTemplateForPostcoordination()) .field(Fields.MRCM_EDITORIAL_GUIDE_REFERENCE, member.getEditorialGuideReference()); }; @Override public Builder caseSnomedMRCMAttributeDomainRefSetMember(SnomedMRCMAttributeDomainRefSetMember member) { return builder .field(Fields.MRCM_DOMAIN_ID, member.getDomainId()) .field(Fields.MRCM_GROUPED, member.isGrouped()) .field(Fields.MRCM_ATTRIBUTE_CARDINALITY, member.getAttributeCardinality()) .field(Fields.MRCM_ATTRIBUTE_IN_GROUP_CARDINALITY, member.getAttributeInGroupCardinality()) .field(Fields.MRCM_RULE_STRENGTH_ID, member.getRuleStrengthId()) .field(Fields.MRCM_CONTENT_TYPE_ID, member.getContentTypeId()); }; @Override public Builder caseSnomedMRCMAttributeRangeRefSetMember(SnomedMRCMAttributeRangeRefSetMember member) { return builder .field(Fields.MRCM_RANGE_CONSTRAINT, member.getRangeConstraint()) .field(Fields.MRCM_ATTRIBUTE_RULE, member.getAttributeRule()) .field(Fields.MRCM_RULE_STRENGTH_ID, member.getRuleStrengthId()) .field(Fields.MRCM_CONTENT_TYPE_ID, member.getContentTypeId()); }; @Override public Builder caseSnomedMRCMModuleScopeRefSetMember(SnomedMRCMModuleScopeRefSetMember member) { return builder .field(Fields.MRCM_RULE_REFSET_ID, member.getMrcmRuleRefsetId()); }; @Override public Builder caseSnomedRefSetMember(SnomedRefSetMember object) { return builder; }; }.doSwitch(refSetMember); } private static Object convertValue(String rf2Field, Object value) { switch (rf2Field) { case SnomedRf2Headers.FIELD_SOURCE_EFFECTIVE_TIME: case SnomedRf2Headers.FIELD_TARGET_EFFECTIVE_TIME: if (value instanceof String && !StringUtils.isEmpty((String) value)) { Date parsedDate = EffectiveTimes.parse((String) value, DateFormats.SHORT); return EffectiveTimes.getEffectiveTime(parsedDate); } else { return EffectiveTimes.UNSET_EFFECTIVE_TIME; } default: return value; } } public static Collection<SnomedRefSetMemberIndexEntry> from(final Iterable<SnomedReferenceSetMember> refSetMembers) { return FluentIterable.from(refSetMembers).transform(new Function<SnomedReferenceSetMember, SnomedRefSetMemberIndexEntry>() { @Override public SnomedRefSetMemberIndexEntry apply(final SnomedReferenceSetMember refSetMember) { return builder(refSetMember).build(); } }).toList(); } public static final class Expressions extends SnomedDocument.Expressions { public static Expression referenceSetId(String referenceSetId) { return exactMatch(Fields.REFERENCE_SET_ID, referenceSetId); } public static Expression referenceSetId(Collection<String> referenceSetIds) { return matchAny(Fields.REFERENCE_SET_ID, referenceSetIds); } public static Expression referencedComponentId(String referencedComponentId) { return exactMatch(Fields.REFERENCED_COMPONENT_ID, referencedComponentId); } public static Expression mapTargets(Collection<String> mapTargets) { return matchAny(Fields.MAP_TARGET, mapTargets); } public static Expression mapTargetDescriptions(Collection<String> mapTargetDescriptions) { return matchAny(Fields.MAP_TARGET_DESCRIPTION, mapTargetDescriptions); } public static Expression referencedComponentIds(Collection<String> referencedComponentIds) { return matchAny(Fields.REFERENCED_COMPONENT_ID, referencedComponentIds); } public static Expression targetComponents(Collection<String> targetComponentIds) { return matchAny(Fields.TARGET_COMPONENT, targetComponentIds); } public static Expression acceptabilityIds(Collection<String> acceptabilityIds) { return matchAny(Fields.ACCEPTABILITY_ID, acceptabilityIds); } public static Expression characteristicTypeIds(Collection<String> characteristicTypeIds) { return matchAny(Fields.CHARACTERISTIC_TYPE_ID, characteristicTypeIds); } public static Expression correlationIds(Collection<String> correlationIds) { return matchAny(Fields.CORRELATION_ID, correlationIds); } public static Expression descriptionFormats(Collection<String> descriptionFormats) { return matchAny(Fields.DESCRIPTION_FORMAT, descriptionFormats); } public static Expression mapCategoryIds(Collection<String> mapCategoryIds) { return matchAny(Fields.MAP_CATEGORY_ID, mapCategoryIds); } public static Expression operatorIds(Collection<String> operatorIds) { return matchAny(Fields.OPERATOR_ID, operatorIds); } public static Expression unitIds(Collection<String> unitIds) { return matchAny(Fields.UNIT_ID, unitIds); } public static Expression valueIds(Collection<String> valueIds) { return matchAny(Fields.VALUE_ID, valueIds); } public static Expression values(DataType type, Collection<? extends Object> values) { switch (type) { case STRING: return matchAny(Fields.STRING_VALUE, FluentIterable.from(values).filter(String.class).toSet()); case INTEGER: return matchAnyInt(Fields.INTEGER_VALUE, FluentIterable.from(values).filter(Integer.class).toSet()); case DECIMAL: return matchAnyDecimal(Fields.DECIMAL_VALUE, FluentIterable.from(values).filter(BigDecimal.class).toSet()); default: throw new UnsupportedOperationException("Unsupported data type when filtering by values, " + type); } } public static Expression valueRange(DataType type, final Object lower, final Object upper, boolean includeLower, boolean includeUpper) { switch (type) { case STRING: return matchRange(Fields.STRING_VALUE, (String) lower, (String) upper, includeLower, includeUpper); case INTEGER: return matchRange(Fields.INTEGER_VALUE, (Integer) lower, (Integer) upper, includeLower, includeUpper); case DECIMAL: return matchRange(Fields.DECIMAL_VALUE, (BigDecimal) lower, (BigDecimal) upper, includeLower, includeUpper); default: throw new UnsupportedOperationException("Unsupported data type when filtering by values, " + type); } } public static Expression dataTypes(Collection<DataType> dataTypes) { return matchAny(Fields.DATA_TYPE, FluentIterable.from(dataTypes).transform(new Function<DataType, String>() { @Override public String apply(DataType input) { return input.name(); } }).toSet()); } public static Expression attributeNames(Collection<String> attributeNames) { return matchAny(Fields.ATTRIBUTE_NAME, attributeNames); } public static Expression sourceEffectiveTime(long effectiveTime) { return exactMatch(Fields.SOURCE_EFFECTIVE_TIME, effectiveTime); } public static Expression targetEffectiveTime(long effectiveTime) { return exactMatch(Fields.TARGET_EFFECTIVE_TIME, effectiveTime); } public static Expression refSetTypes(Collection<SnomedRefSetType> refSetTypes) { return matchAny(Fields.REFSET_TYPE, FluentIterable.from(refSetTypes).transform(type -> type.name()).toSet()); } } @JsonPOJOBuilder(withPrefix="") public static final class Builder extends SnomedDocumentBuilder<Builder> { private String referencedComponentId; private String referenceSetId; private SnomedRefSetType referenceSetType; private short referencedComponentType; // Member specific fields, they can be null or emptyish values // ASSOCIATION reference set members private String targetComponent; // ATTRIBUTE VALUE private String valueId; // CONCRETE DOMAIN reference set members private DataType dataType; private String attributeName; private Object value; private String operatorId; private String characteristicTypeId; private String unitId; // DESCRIPTION private Integer descriptionLength; private String descriptionFormat; // LANGUAGE private String acceptabilityId; // MODULE private Long sourceEffectiveTime; private Long targetEffectiveTime; // SIMPLE MAP reference set members private String mapTarget; private String mapTargetDescription; // COMPLEX MAP private String mapCategoryId; private String correlationId; private String mapAdvice; private String mapRule; private Integer mapGroup; private Integer mapPriority; // QUERY private String query; // OWL Axiom private String owlExpression; // MRCM Domain private String domainConstraint; private String parentDomain; private String proximalPrimitiveConstraint; private String proximalPrimitiveRefinement; private String domainTemplateForPrecoordination; private String domainTemplateForPostcoordination; private String editorialGuideReference; // MRCM Attribute Domain private String domainId; private Boolean grouped; private String attributeCardinality; private String attributeInGroupCardinality; private String ruleStrengthId; private String contentTypeId; // MRCM Attribute Range private String rangeConstraint; private String attributeRule; // MRCM Module Scope private String mrcmRuleRefsetId; @JsonCreator private Builder() { // Disallow instantiation outside static method } public Builder fields(Map<String, Object> fields) { for (Entry<String, Object> entry : fields.entrySet()) { field(entry.getKey(), entry.getValue()); } return this; } public Builder field(String fieldName, Object value) { switch (fieldName) { case Fields.ACCEPTABILITY_ID: this.acceptabilityId = (String) value; break; case Fields.ATTRIBUTE_NAME: this.attributeName = (String) value; break; case Fields.CHARACTERISTIC_TYPE_ID: this.characteristicTypeId = (String) value; break; case Fields.CORRELATION_ID: this.correlationId = (String) value; break; case Fields.DATA_TYPE: this.dataType = (DataType) value; break; case Fields.DATA_VALUE: this.value = value; break; case Fields.DESCRIPTION_FORMAT: this.descriptionFormat = (String) value; break; case Fields.DESCRIPTION_LENGTH: this.descriptionLength = (Integer) value; break; case Fields.MAP_ADVICE: this.mapAdvice = (String) value; break; case Fields.MAP_CATEGORY_ID: this.mapCategoryId = (String) value; break; case Fields.MAP_GROUP: this.mapGroup = (Integer) value; break; case Fields.MAP_PRIORITY: this.mapPriority = (Integer) value; break; case Fields.MAP_RULE: this.mapRule = (String) value; break; case Fields.MAP_TARGET: this.mapTarget = (String) value; break; case Fields.MAP_TARGET_DESCRIPTION: this.mapTargetDescription = (String) value; break; case Fields.OPERATOR_ID: this.operatorId = (String) value; break; case Fields.QUERY: this.query = (String) value; break; case Fields.SOURCE_EFFECTIVE_TIME: this.sourceEffectiveTime = (Long) value; break; case Fields.TARGET_COMPONENT: this.targetComponent = (String) value; break; case Fields.TARGET_EFFECTIVE_TIME: this.targetEffectiveTime = (Long) value; break; case Fields.UNIT_ID: this.unitId = (String) value; break; case Fields.VALUE_ID: this.valueId = (String) value; break; case Fields.OWL_EXPRESSION: this.owlExpression = (String) value; break; case Fields.MRCM_DOMAIN_CONSTRAINT: this.domainConstraint = (String) value; break; case Fields.MRCM_PARENT_DOMAIN: this.parentDomain = (String) value; break; case Fields.MRCM_PROXIMAL_PRIMITIVE_CONSTRAINT: this.proximalPrimitiveConstraint = (String) value; break; case Fields.MRCM_PROXIMAL_PRIMITIVE_REFINEMENT: this.proximalPrimitiveRefinement = (String) value; break; case Fields.MRCM_DOMAIN_TEMPLATE_FOR_PRECOORDINATION: this.domainTemplateForPrecoordination = (String) value; break; case Fields.MRCM_DOMAIN_TEMPLATE_FOR_POSTCOORDINATION: this.domainTemplateForPostcoordination = (String) value; break; case Fields.MRCM_EDITORIAL_GUIDE_REFERENCE: this.editorialGuideReference = (String) value; break; case Fields.MRCM_DOMAIN_ID: this.domainId = (String) value; break; case Fields.MRCM_GROUPED: this.grouped = (Boolean) value; break; case Fields.MRCM_ATTRIBUTE_CARDINALITY: this.attributeCardinality = (String) value; break; case Fields.MRCM_ATTRIBUTE_IN_GROUP_CARDINALITY: this.attributeInGroupCardinality = (String) value; break; case Fields.MRCM_RULE_STRENGTH_ID: this.ruleStrengthId = (String) value; break; case Fields.MRCM_CONTENT_TYPE_ID: this.contentTypeId = (String) value; break; case Fields.MRCM_RANGE_CONSTRAINT: this.rangeConstraint = (String) value; break; case Fields.MRCM_ATTRIBUTE_RULE: this.attributeRule = (String) value; break; case Fields.MRCM_RULE_REFSET_ID: this.mrcmRuleRefsetId = (String) value; break; default: throw new UnsupportedOperationException("Unknown RF2 member field: " + fieldName); } return this; } @Override protected Builder getSelf() { return this; } public Builder referencedComponentId(final String referencedComponentId) { this.referencedComponentId = referencedComponentId; return this; } public Builder referenceSetId(final String referenceSetId) { this.referenceSetId = referenceSetId; return this; } public Builder referenceSetType(final SnomedRefSetType referenceSetType) { this.referenceSetType = referenceSetType; return this; } public Builder referencedComponentType(final short referencedComponentType) { this.referencedComponentType = referencedComponentType; return this; } public Builder targetComponent(String targetComponent) { this.targetComponent = targetComponent; return this; } Builder acceptabilityId(String acceptabilityId) { this.acceptabilityId = acceptabilityId; return getSelf(); } Builder attributeName(String attributeName) { this.attributeName = attributeName; return getSelf(); } Builder characteristicTypeId(final String characteristicTypeId) { this.characteristicTypeId = characteristicTypeId; return getSelf(); } Builder correlationId(final String correlationId) { this.correlationId = correlationId; return getSelf(); } Builder dataType(final DataType dataType) { this.dataType = dataType; return getSelf(); } Builder descriptionFormat(final String descriptionFormat) { this.descriptionFormat = descriptionFormat; return getSelf(); } Builder descriptionLength(final Integer descriptionLength) { this.descriptionLength = descriptionLength; return getSelf(); } Builder mapAdvice(final String mapAdvice) { this.mapAdvice = mapAdvice; return getSelf(); } Builder mapCategoryId(final String mapCategoryId) { this.mapCategoryId = mapCategoryId; return getSelf(); } Builder mapGroup(final Integer mapGroup) { this.mapGroup = mapGroup; return getSelf(); } Builder mapPriority(final Integer mapPriority) { this.mapPriority = mapPriority; return getSelf(); } Builder mapRule(final String mapRule) { this.mapRule = mapRule; return getSelf(); } Builder mapTarget(final String mapTarget) { this.mapTarget = mapTarget; return getSelf(); } Builder mapTargetDescription(final String mapTargetDescription) { this.mapTargetDescription = mapTargetDescription; return getSelf(); } Builder operatorId(final String operatorId) { this.operatorId = operatorId; return getSelf(); } Builder query(final String query) { this.query = query; return getSelf(); } Builder sourceEffectiveTime(final Long sourceEffectiveTime) { this.sourceEffectiveTime = sourceEffectiveTime; return getSelf(); } Builder targetEffectiveTime(final Long targetEffectiveTime) { this.targetEffectiveTime = targetEffectiveTime; return getSelf(); } Builder unitId(final String unitId) { this.unitId = unitId; return getSelf(); } /** * @deprecated - this is no longer a valid refset member index field, but required to make pre-5.4 dataset work with 5.4 without migration */ Builder value(final Object value) { this.value = value; return getSelf(); } Builder decimalValue(final BigDecimal value) { this.value = value; return getSelf(); } Builder booleanValue(final Boolean value) { this.value = value; return getSelf(); } Builder integerValue(final Integer value) { this.value = value; return getSelf(); } Builder stringValue(final String value) { this.value = value; return getSelf(); } Builder valueId(String valueId) { this.valueId = valueId; return getSelf(); } Builder owlExpression(String owlExpression) { this.owlExpression = owlExpression; return getSelf(); } Builder domainConstraint(String domainConstraint) { this.domainConstraint = domainConstraint; return getSelf(); } Builder parentDomain(String parentDomain) { this.parentDomain = parentDomain; return getSelf(); } Builder proximalPrimitiveConstraint(String proximalPrimitiveConstraint) { this.proximalPrimitiveConstraint = proximalPrimitiveConstraint; return getSelf(); } Builder proximalPrimitiveRefinement(String proximalPrimitiveRefinement) { this.proximalPrimitiveRefinement = proximalPrimitiveRefinement; return getSelf(); } Builder domainTemplateForPrecoordination(String domainTemplateForPrecoordination) { this.domainTemplateForPrecoordination = domainTemplateForPrecoordination; return getSelf(); } Builder domainTemplateForPostcoordination(String domainTemplateForPostcoordination) { this.domainTemplateForPostcoordination = domainTemplateForPostcoordination; return getSelf(); } Builder editorialGuideReference(String editorialGuideReference) { this.editorialGuideReference = editorialGuideReference; return getSelf(); } Builder domainId(String domainId) { this.domainId = domainId; return getSelf(); } Builder grouped(Boolean grouped) { this.grouped = grouped; return getSelf(); } Builder attributeCardinality(String attributeCardinality) { this.attributeCardinality = attributeCardinality; return getSelf(); } Builder attributeInGroupCardinality(String attributeInGroupCardinality) { this.attributeInGroupCardinality = attributeInGroupCardinality; return getSelf(); } Builder ruleStrengthId(String ruleStrengthId) { this.ruleStrengthId = ruleStrengthId; return getSelf(); } Builder contentTypeId(String contentTypeId) { this.contentTypeId = contentTypeId; return getSelf(); } Builder rangeConstraint(String rangeConstraint) { this.rangeConstraint = rangeConstraint; return getSelf(); } Builder attributeRule(String attributeRule) { this.attributeRule = attributeRule; return getSelf(); } Builder mrcmRuleRefsetId(String mrcmRuleRefsetId) { this.mrcmRuleRefsetId = mrcmRuleRefsetId; return getSelf(); } public SnomedRefSetMemberIndexEntry build() { final SnomedRefSetMemberIndexEntry doc = new SnomedRefSetMemberIndexEntry(id, label, storageKey, moduleId, released, active, effectiveTime, referencedComponentId, referenceSetId, referenceSetType, referencedComponentType); // association members doc.targetComponent = targetComponent; // attribute value doc.valueId = valueId; // concrete domain members doc.dataType = dataType; doc.attributeName = attributeName; if (dataType != null) { switch (dataType) { case BOOLEAN: if (value instanceof Boolean) { doc.booleanValue = (Boolean) value; } else if (value instanceof String) { doc.booleanValue = SnomedRefSetUtil.deserializeValue(dataType, (String) value); } break; case DECIMAL: if (value instanceof BigDecimal) { doc.decimalValue = (BigDecimal) value; } else if (value instanceof String) { doc.decimalValue = SnomedRefSetUtil.deserializeValue(dataType, (String) value); } break; case INTEGER: if (value instanceof Integer) { doc.integerValue = (Integer) value; } else if (value instanceof String) { doc.integerValue = SnomedRefSetUtil.deserializeValue(dataType, (String) value); } break; case STRING: doc.stringValue = (String) value; break; default: throw new UnsupportedOperationException("Unsupported concrete domain data type: " + dataType); } } doc.characteristicTypeId = characteristicTypeId; doc.operatorId = operatorId; doc.unitId = unitId; // description doc.descriptionFormat = descriptionFormat; doc.descriptionLength = descriptionLength; // language reference set doc.acceptabilityId = acceptabilityId; // module doc.sourceEffectiveTime = sourceEffectiveTime; doc.targetEffectiveTime = targetEffectiveTime; // simple map doc.mapTarget = mapTarget; doc.mapTargetDescription = mapTargetDescription; // complex map doc.mapCategoryId = mapCategoryId; doc.mapAdvice = mapAdvice; doc.correlationId = correlationId; doc.mapGroup = mapGroup; doc.mapPriority = mapPriority; doc.mapRule = mapRule; // query doc.query = query; // OWL Axiom doc.owlExpression = owlExpression; // MRCM Domain doc.domainConstraint = domainConstraint; doc.parentDomain = parentDomain; doc.proximalPrimitiveConstraint = proximalPrimitiveConstraint; doc.proximalPrimitiveRefinement = proximalPrimitiveRefinement; doc.domainTemplateForPrecoordination = domainTemplateForPrecoordination; doc.domainTemplateForPostcoordination = domainTemplateForPostcoordination; doc.editorialGuideReference = editorialGuideReference; // MRCM Attribute Domain doc.domainId = domainId; doc.grouped = grouped; doc.attributeCardinality = attributeCardinality; doc.attributeInGroupCardinality = attributeInGroupCardinality; doc.ruleStrengthId = ruleStrengthId; doc.contentTypeId = contentTypeId; // MRCM Attribute Range doc.rangeConstraint = rangeConstraint; doc.attributeRule = attributeRule; // MRCM Module Scope doc.mrcmRuleRefsetId = mrcmRuleRefsetId; doc.setScore(score); // metadata doc.setCreated(created); doc.setRevised(revised); return doc; } } private final String referencedComponentId; private final String referenceSetId; private final SnomedRefSetType referenceSetType; private final short referencedComponentType; // Member specific fields, they can be null or emptyish values // ASSOCIATION reference set members private String targetComponent; // ATTRIBUTE VALUE private String valueId; // CONCRETE DOMAIN reference set members private DataType dataType; private String attributeName; // only one of these value fields should be set when this represents a concrete domain member private String stringValue; private Boolean booleanValue; private Integer integerValue; private BigDecimal decimalValue; private String operatorId; private String characteristicTypeId; private String unitId; // DESCRIPTION private Integer descriptionLength; private String descriptionFormat; // LANGUAGE private String acceptabilityId; // MODULE private Long sourceEffectiveTime; private Long targetEffectiveTime; // SIMPLE MAP reference set members private String mapTarget; private String mapTargetDescription; // COMPLEX MAP private String mapCategoryId; private String correlationId; private String mapAdvice; private String mapRule; private Integer mapGroup; private Integer mapPriority; // QUERY @Keyword(index = false) private String query; // OWL Axiom private String owlExpression; // MRCM Domain private String domainConstraint; private String parentDomain; private String proximalPrimitiveConstraint; private String proximalPrimitiveRefinement; private String domainTemplateForPrecoordination; private String domainTemplateForPostcoordination; private String editorialGuideReference; // MRCM Attribute Domain private String domainId; private Boolean grouped; private String attributeCardinality; private String attributeInGroupCardinality; private String ruleStrengthId; private String contentTypeId; // MRCM Attribute Range private String rangeConstraint; private String attributeRule; // MRCM Module Scope private String mrcmRuleRefsetId; private SnomedRefSetMemberIndexEntry(final String id, final String label, final long storageKey, final String moduleId, final boolean released, final boolean active, final long effectiveTimeLong, final String referencedComponentId, final String referenceSetId, final SnomedRefSetType referenceSetType, final short referencedComponentType) { super(id, label, referencedComponentId, // XXX: iconId is the referenced component identifier storageKey, moduleId, released, active, effectiveTimeLong); checkArgument(referencedComponentType >= CoreTerminologyBroker.UNSPECIFIED_NUMBER_SHORT, "Referenced component type '%s' is invalid.", referencedComponentType); this.referencedComponentId = referencedComponentId; this.referenceSetId = referenceSetId; this.referenceSetType = referenceSetType; this.referencedComponentType = referencedComponentType; } @Override public String getContainerId() { return getReferencedComponentId(); } /** * @return the referenced component identifier */ public String getReferencedComponentId() { return referencedComponentId; } /** * @return the identifier of the member's reference set */ public String getReferenceSetId() { return referenceSetId; } /** * @return the type of the member's reference set */ public SnomedRefSetType getReferenceSetType() { return referenceSetType; } @JsonIgnore @SuppressWarnings("unchecked") public <T> T getValueAs() { return (T) getValue(); } @JsonIgnore public Object getValue() { if (dataType == null) { return null; } else { switch (dataType) { case BOOLEAN: return booleanValue; case DECIMAL: return decimalValue; case INTEGER: return integerValue; case STRING: return stringValue; default: throw new UnsupportedOperationException("Unsupported concrete domain data type: " + dataType); } } } @JsonProperty BigDecimal getDecimalValue() { return decimalValue; } @JsonProperty Boolean getBooleanValue() { return booleanValue; } @JsonProperty Integer getIntegerValue() { return integerValue; } @JsonProperty String getStringValue() { return stringValue; } public DataType getDataType() { return dataType; } public String getUnitId() { return unitId; } public String getAttributeName() { return attributeName; } public String getOperatorId() { return operatorId; } public String getCharacteristicTypeId() { return characteristicTypeId; } public String getAcceptabilityId() { return acceptabilityId; } public Integer getDescriptionLength() { return descriptionLength; } public String getDescriptionFormat() { return descriptionFormat; } public String getMapTarget() { return mapTarget; } public Integer getMapGroup() { return mapGroup; } public Integer getMapPriority() { return mapPriority; } public String getMapRule() { return mapRule; } public String getMapAdvice() { return mapAdvice; } public String getMapCategoryId() { return mapCategoryId; } public String getCorrelationId() { return correlationId; } public String getMapTargetDescription() { return mapTargetDescription; } public String getQuery() { return query; } public String getTargetComponent() { return targetComponent; } public String getValueId() { return valueId; } public Long getSourceEffectiveTime() { return sourceEffectiveTime; } public Long getTargetEffectiveTime() { return targetEffectiveTime; } public short getReferencedComponentType() { return referencedComponentType; } public String getOwlExpression() { return owlExpression; } public String getDomainConstraint() { return domainConstraint; } public String getParentDomain() { return parentDomain; } public String getProximalPrimitiveConstraint() { return proximalPrimitiveConstraint; } public String getProximalPrimitiveRefinement() { return proximalPrimitiveRefinement; } public String getDomainTemplateForPrecoordination() { return domainTemplateForPrecoordination; } public String getDomainTemplateForPostcoordination() { return domainTemplateForPostcoordination; } public String getEditorialGuideReference() { return editorialGuideReference; } public String getDomainId() { return domainId; } public Boolean isGrouped() { return grouped; } public String getAttributeCardinality() { return attributeCardinality; } public String getAttributeInGroupCardinality() { return attributeInGroupCardinality; } public String getRuleStrengthId() { return ruleStrengthId; } public String getContentTypeId() { return contentTypeId; } public String getRangeConstraint() { return rangeConstraint; } public String getAttributeRule() { return attributeRule; } public String getMrcmRuleRefsetId() { return mrcmRuleRefsetId; } // model helper methods @JsonIgnore public Acceptability getAcceptability() { return Acceptability.getByConceptId(getAcceptabilityId()); } @JsonIgnore public RelationshipRefinability getRefinability() { return RelationshipRefinability.getByConceptId(getValueId()); } @JsonIgnore public InactivationIndicator getInactivationIndicator() { return InactivationIndicator.getByConceptId(getValueId()); } @JsonIgnore public String getSourceEffectiveTimeAsString() { return EffectiveTimes.format(getSourceEffectiveTime(), DateFormats.SHORT); } @JsonIgnore public String getTargetEffectiveTimeAsString() { return EffectiveTimes.format(getTargetEffectiveTime(), DateFormats.SHORT); } /** * @return the {@code String} terminology component identifier of the component referenced in this member */ @JsonIgnore public String getReferencedComponentTypeAsString() { return CoreTerminologyBroker.getInstance().getTerminologyComponentId(referencedComponentType); } /** * Helper which converts all non-null/empty additional fields to a values {@link Map} keyed by their field name; * @return */ @JsonIgnore public Map<String, Object> getAdditionalFields() { final ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder(); // ASSOCIATION refset members putIfPresent(builder, Fields.TARGET_COMPONENT, getTargetComponent()); // ATTRIBUTE_VALUE refset members putIfPresent(builder, Fields.VALUE_ID, getValueId()); // CONCRETE DOMAIN reference set members putIfPresent(builder, Fields.DATA_TYPE, getDataType()); putIfPresent(builder, Fields.ATTRIBUTE_NAME, getAttributeName()); putIfPresent(builder, Fields.DATA_VALUE, getValue()); putIfPresent(builder, Fields.OPERATOR_ID, getOperatorId()); putIfPresent(builder, Fields.CHARACTERISTIC_TYPE_ID, getCharacteristicTypeId()); putIfPresent(builder, Fields.UNIT_ID, getUnitId()); // DESCRIPTION putIfPresent(builder, Fields.DESCRIPTION_LENGTH, getDescriptionLength()); putIfPresent(builder, Fields.DESCRIPTION_FORMAT, getDescriptionFormat()); // LANGUAGE putIfPresent(builder, Fields.ACCEPTABILITY_ID, getAcceptabilityId()); // MODULE putIfPresent(builder, Fields.SOURCE_EFFECTIVE_TIME, getSourceEffectiveTime()); putIfPresent(builder, Fields.TARGET_EFFECTIVE_TIME, getTargetEffectiveTime()); // SIMPLE MAP reference set members putIfPresent(builder, Fields.MAP_TARGET, getMapTarget()); putIfPresent(builder, Fields.MAP_TARGET_DESCRIPTION, getMapTargetDescription()); // COMPLEX MAP putIfPresent(builder, Fields.MAP_CATEGORY_ID, getMapCategoryId()); putIfPresent(builder, Fields.CORRELATION_ID, getCorrelationId()); putIfPresent(builder, Fields.MAP_ADVICE, getMapAdvice()); putIfPresent(builder, Fields.MAP_RULE, getMapRule()); putIfPresent(builder, Fields.MAP_GROUP, getMapGroup()); putIfPresent(builder, Fields.MAP_PRIORITY, getMapPriority()); // QUERY putIfPresent(builder, Fields.QUERY, getQuery()); // OWL Axiom putIfPresent(builder, Fields.OWL_EXPRESSION, getOwlExpression()); // MRCM Domain putIfPresent(builder, Fields.MRCM_DOMAIN_CONSTRAINT, getDomainConstraint()); putIfPresent(builder, Fields.MRCM_PARENT_DOMAIN, getParentDomain()); putIfPresent(builder, Fields.MRCM_PROXIMAL_PRIMITIVE_CONSTRAINT, getProximalPrimitiveConstraint()); putIfPresent(builder, Fields.MRCM_PROXIMAL_PRIMITIVE_REFINEMENT, getProximalPrimitiveRefinement()); putIfPresent(builder, Fields.MRCM_DOMAIN_TEMPLATE_FOR_PRECOORDINATION, getDomainTemplateForPrecoordination()); putIfPresent(builder, Fields.MRCM_DOMAIN_TEMPLATE_FOR_POSTCOORDINATION, getDomainTemplateForPostcoordination()); putIfPresent(builder, Fields.MRCM_EDITORIAL_GUIDE_REFERENCE, getEditorialGuideReference()); // MRCM Attribute Domain putIfPresent(builder, Fields.MRCM_DOMAIN_ID, getDomainId()); putIfPresent(builder, Fields.MRCM_GROUPED, isGrouped()); putIfPresent(builder, Fields.MRCM_ATTRIBUTE_CARDINALITY, getAttributeCardinality()); putIfPresent(builder, Fields.MRCM_ATTRIBUTE_IN_GROUP_CARDINALITY, getAttributeInGroupCardinality()); putIfPresent(builder, Fields.MRCM_RULE_STRENGTH_ID, getRuleStrengthId()); putIfPresent(builder, Fields.MRCM_CONTENT_TYPE_ID, getContentTypeId()); // MRCM Attribute Range putIfPresent(builder, Fields.MRCM_RANGE_CONSTRAINT, getRangeConstraint()); putIfPresent(builder, Fields.MRCM_ATTRIBUTE_RULE, getAttributeRule()); // MRCM Module Scope putIfPresent(builder, Fields.MRCM_RULE_REFSET_ID, getMrcmRuleRefsetId()); return builder.build(); } private static void putIfPresent(ImmutableMap.Builder<String, Object> builder, String key, Object value) { if (key != null && value != null) { builder.put(key, value); } } @Override protected ToStringHelper doToString() { return super.doToString() .add("referencedComponentId", referencedComponentId) .add("referenceSetId", referenceSetId) .add("referenceSetType", referenceSetType) .add("referencedComponentType", referencedComponentType) .add("targetComponent", targetComponent) .add("valueId", valueId) .add("dataType", dataType) .add("attributeName", attributeName) .add("value", getValue()) .add("operatorId", operatorId) .add("characteristicTypeId", characteristicTypeId) .add("unitId", unitId) .add("descriptionLength", descriptionLength) .add("descriptionFormat", descriptionFormat) .add("acceptabilityId", acceptabilityId) .add("sourceEffectiveTime", sourceEffectiveTime) .add("targetEffectiveTime", targetEffectiveTime) .add("mapTarget", mapTarget) .add("mapTargetDescription", mapTargetDescription) .add("mapCategoryId", mapCategoryId) .add("correlationId", correlationId) .add("mapAdvice", mapAdvice) .add("mapRule", mapRule) .add("mapGroup", mapGroup) .add("mapPriority", mapPriority) .add("query", query) .add("owlExpression", owlExpression) .add("domainConstraint", domainConstraint) .add("parentDomain", parentDomain) .add("proximalPrimitiveConstraint", proximalPrimitiveConstraint) .add("proximalPrimitiveRefinement", proximalPrimitiveRefinement) .add("domainTemplateForPrecoordination", domainTemplateForPrecoordination) .add("domainTemplateForPostcoordination", domainTemplateForPostcoordination) .add("editorialGuideReference", editorialGuideReference) .add("domainId", domainId) .add("grouped", grouped) .add("attributeCardinality", attributeCardinality) .add("attributeInGroupCardinality", attributeInGroupCardinality) .add("ruleStrengthId", ruleStrengthId) .add("contentTypeId", contentTypeId) .add("rangeConstraint", rangeConstraint) .add("attributeRule", attributeRule) .add("mrcmRuleRefsetId", mrcmRuleRefsetId); } }
snomed/com.b2international.snowowl.snomed.datastore/src/com/b2international/snowowl/snomed/datastore/index/entry/SnomedRefSetMemberIndexEntry.java
/* * Copyright 2011-2018 B2i Healthcare Pte Ltd, http://b2i.sg * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.b2international.snowowl.snomed.datastore.index.entry; import static com.b2international.index.query.Expressions.exactMatch; import static com.b2international.index.query.Expressions.matchAny; import static com.b2international.index.query.Expressions.matchAnyDecimal; import static com.b2international.index.query.Expressions.matchAnyInt; import static com.b2international.index.query.Expressions.matchRange; import static com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants.CONCEPT_NUMBER; import static com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants.DESCRIPTION_NUMBER; import static com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants.RELATIONSHIP_NUMBER; import static com.google.common.base.Preconditions.checkArgument; import java.math.BigDecimal; import java.util.Collection; import java.util.Date; import java.util.Map; import java.util.Map.Entry; import com.b2international.commons.StringUtils; import com.b2international.index.Doc; import com.b2international.index.Keyword; import com.b2international.index.RevisionHash; import com.b2international.index.query.Expression; import com.b2international.snowowl.core.CoreTerminologyBroker; import com.b2international.snowowl.core.date.DateFormats; import com.b2international.snowowl.core.date.EffectiveTimes; import com.b2international.snowowl.datastore.cdo.CDOIDUtils; import com.b2international.snowowl.snomed.common.SnomedRf2Headers; import com.b2international.snowowl.snomed.core.domain.Acceptability; import com.b2international.snowowl.snomed.core.domain.InactivationIndicator; import com.b2international.snowowl.snomed.core.domain.RelationshipRefinability; import com.b2international.snowowl.snomed.core.domain.SnomedConcept; import com.b2international.snowowl.snomed.core.domain.SnomedCoreComponent; import com.b2international.snowowl.snomed.core.domain.SnomedDescription; import com.b2international.snowowl.snomed.core.domain.SnomedRelationship; import com.b2international.snowowl.snomed.core.domain.refset.SnomedReferenceSetMember; import com.b2international.snowowl.snomed.datastore.SnomedRefSetUtil; import com.b2international.snowowl.snomed.snomedrefset.DataType; import com.b2international.snowowl.snomed.snomedrefset.SnomedAssociationRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedAttributeValueRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedComplexMapRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedConcreteDataTypeRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedDescriptionTypeRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedLanguageRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedMRCMAttributeDomainRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedMRCMAttributeRangeRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedMRCMDomainRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedMRCMModuleScopeRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedModuleDependencyRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedOWLExpressionRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedQueryRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.SnomedRefSetType; import com.b2international.snowowl.snomed.snomedrefset.SnomedSimpleMapRefSetMember; import com.b2international.snowowl.snomed.snomedrefset.util.SnomedRefSetSwitch; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder; import com.google.common.base.Function; import com.google.common.base.Objects.ToStringHelper; import com.google.common.base.Strings; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableMap; /** * Lightweight representation of a SNOMED CT reference set member. */ @Doc @JsonDeserialize(builder = SnomedRefSetMemberIndexEntry.Builder.class) @RevisionHash({ SnomedDocument.Fields.ACTIVE, SnomedDocument.Fields.EFFECTIVE_TIME, SnomedDocument.Fields.MODULE_ID, SnomedRefSetMemberIndexEntry.Fields.TARGET_COMPONENT, SnomedRefSetMemberIndexEntry.Fields.VALUE_ID, SnomedRefSetMemberIndexEntry.Fields.ATTRIBUTE_NAME, SnomedRefSetMemberIndexEntry.Fields.STRING_VALUE, SnomedRefSetMemberIndexEntry.Fields.BOOLEAN_VALUE, SnomedRefSetMemberIndexEntry.Fields.INTEGER_VALUE, SnomedRefSetMemberIndexEntry.Fields.DECIMAL_VALUE, SnomedRefSetMemberIndexEntry.Fields.OPERATOR_ID, SnomedRefSetMemberIndexEntry.Fields.CHARACTERISTIC_TYPE_ID, SnomedRefSetMemberIndexEntry.Fields.UNIT_ID, SnomedRefSetMemberIndexEntry.Fields.DESCRIPTION_LENGTH, SnomedRefSetMemberIndexEntry.Fields.DESCRIPTION_FORMAT, SnomedRefSetMemberIndexEntry.Fields.ACCEPTABILITY_ID, SnomedRefSetMemberIndexEntry.Fields.SOURCE_EFFECTIVE_TIME, SnomedRefSetMemberIndexEntry.Fields.TARGET_EFFECTIVE_TIME, SnomedRefSetMemberIndexEntry.Fields.MAP_TARGET, SnomedRefSetMemberIndexEntry.Fields.MAP_TARGET_DESCRIPTION, SnomedRefSetMemberIndexEntry.Fields.MAP_CATEGORY_ID, SnomedRefSetMemberIndexEntry.Fields.CORRELATION_ID, SnomedRefSetMemberIndexEntry.Fields.MAP_ADVICE, SnomedRefSetMemberIndexEntry.Fields.MAP_RULE, SnomedRefSetMemberIndexEntry.Fields.MAP_GROUP, SnomedRefSetMemberIndexEntry.Fields.MAP_PRIORITY, SnomedRefSetMemberIndexEntry.Fields.QUERY }) public final class SnomedRefSetMemberIndexEntry extends SnomedDocument { private static final long serialVersionUID = 5198766293865046258L; public static class Fields extends SnomedDocument.Fields { // known RF2 fields public static final String REFERENCE_SET_ID = "referenceSetId"; // XXX different than the RF2 header field name public static final String REFERENCED_COMPONENT_ID = SnomedRf2Headers.FIELD_REFERENCED_COMPONENT_ID; public static final String ACCEPTABILITY_ID = SnomedRf2Headers.FIELD_ACCEPTABILITY_ID; public static final String VALUE_ID = SnomedRf2Headers.FIELD_VALUE_ID; public static final String TARGET_COMPONENT = SnomedRf2Headers.FIELD_TARGET_COMPONENT; public static final String MAP_TARGET = SnomedRf2Headers.FIELD_MAP_TARGET; public static final String MAP_TARGET_DESCRIPTION = SnomedRf2Headers.FIELD_MAP_TARGET_DESCRIPTION; public static final String MAP_GROUP = SnomedRf2Headers.FIELD_MAP_GROUP; public static final String MAP_PRIORITY = SnomedRf2Headers.FIELD_MAP_PRIORITY; public static final String MAP_RULE = SnomedRf2Headers.FIELD_MAP_RULE; public static final String MAP_ADVICE = SnomedRf2Headers.FIELD_MAP_ADVICE; public static final String MAP_CATEGORY_ID = SnomedRf2Headers.FIELD_MAP_CATEGORY_ID; public static final String CORRELATION_ID = SnomedRf2Headers.FIELD_CORRELATION_ID; public static final String DESCRIPTION_FORMAT = SnomedRf2Headers.FIELD_DESCRIPTION_FORMAT; public static final String DESCRIPTION_LENGTH = SnomedRf2Headers.FIELD_DESCRIPTION_LENGTH; public static final String OPERATOR_ID = SnomedRf2Headers.FIELD_OPERATOR_ID; public static final String UNIT_ID = SnomedRf2Headers.FIELD_UNIT_ID; public static final String QUERY = SnomedRf2Headers.FIELD_QUERY; public static final String CHARACTERISTIC_TYPE_ID = SnomedRf2Headers.FIELD_CHARACTERISTIC_TYPE_ID; public static final String SOURCE_EFFECTIVE_TIME = SnomedRf2Headers.FIELD_SOURCE_EFFECTIVE_TIME; public static final String TARGET_EFFECTIVE_TIME = SnomedRf2Headers.FIELD_TARGET_EFFECTIVE_TIME; private static final String DATA_VALUE = SnomedRf2Headers.FIELD_VALUE; public static final String ATTRIBUTE_NAME = SnomedRf2Headers.FIELD_ATTRIBUTE_NAME; public static final String OWL_EXPRESSION = SnomedRf2Headers.FIELD_OWL_EXPRESSION; public static final String MRCM_DOMAIN_CONSTRAINT = SnomedRf2Headers.FIELD_MRCM_DOMAIN_CONSTRAINT; public static final String MRCM_PARENT_DOMAIN = SnomedRf2Headers.FIELD_MRCM_PARENT_DOMAIN; public static final String MRCM_PROXIMAL_PRIMITIVE_CONSTRAINT = SnomedRf2Headers.FIELD_MRCM_PROXIMAL_PRIMITIVE_CONSTRAINT; public static final String MRCM_PROXIMAL_PRIMITIVE_REFINEMENT = SnomedRf2Headers.FIELD_MRCM_PROXIMAL_PRIMITIVE_REFINEMENT; public static final String MRCM_DOMAIN_TEMPLATE_FOR_PRECOORDINATION = SnomedRf2Headers.FIELD_MRCM_DOMAIN_TEMPLATE_FOR_PRECOORDINATION; public static final String MRCM_DOMAIN_TEMPLATE_FOR_POSTCOORDINATION = SnomedRf2Headers.FIELD_MRCM_DOMAIN_TEMPLATE_FOR_POSTCOORDINATION; public static final String MRCM_EDITORIAL_GUIDE_REFERENCE = SnomedRf2Headers.FIELD_MRCM_EDITORIAL_GUIDE_REFERENCE; public static final String MRCM_DOMAIN_ID = SnomedRf2Headers.FIELD_MRCM_DOMAIN_ID; public static final String MRCM_GROUPED = SnomedRf2Headers.FIELD_MRCM_GROUPED; public static final String MRCM_ATTRIBUTE_CARDINALITY = SnomedRf2Headers.FIELD_MRCM_ATTRIBUTE_CARDINALITY; public static final String MRCM_ATTRIBUTE_IN_GROUP_CARDINALITY = SnomedRf2Headers.FIELD_MRCM_ATTRIBUTE_IN_GROUP_CARDINALITY; public static final String MRCM_RULE_STRENGTH_ID = SnomedRf2Headers.FIELD_MRCM_RULE_STRENGTH_ID; public static final String MRCM_CONTENT_TYPE_ID = SnomedRf2Headers.FIELD_MRCM_CONTENT_TYPE_ID; public static final String MRCM_RANGE_CONSTRAINT = SnomedRf2Headers.FIELD_MRCM_RANGE_CONSTRAINT; public static final String MRCM_ATTRIBUTE_RULE = SnomedRf2Headers.FIELD_MRCM_ATTRIBUTE_RULE; public static final String MRCM_RULE_REFSET_ID = SnomedRf2Headers.FIELD_MRCM_RULE_REFSET_ID; // extra index fields to store datatype and map target type public static final String DATA_TYPE = "dataType"; public static final String REFSET_TYPE = "referenceSetType"; public static final String REFERENCED_COMPONENT_TYPE = "referencedComponentType"; // CD value fields per type public static final String BOOLEAN_VALUE = "booleanValue"; public static final String STRING_VALUE = "stringValue"; public static final String INTEGER_VALUE = "integerValue"; public static final String DECIMAL_VALUE = "decimalValue"; } public static Builder builder() { return new Builder(); } public static Builder builder(final SnomedRefSetMemberIndexEntry source) { return builder() .storageKey(source.getStorageKey()) .active(source.isActive()) .effectiveTime(source.getEffectiveTime()) .id(source.getId()) .moduleId(source.getModuleId()) .referencedComponentId(source.getReferencedComponentId()) .referencedComponentType(source.getReferencedComponentType()) .referenceSetId(source.getReferenceSetId()) .referenceSetType(source.getReferenceSetType()) .released(source.isReleased()) .fields(source.getAdditionalFields()); } public static final Builder builder(final SnomedReferenceSetMember input) { final Builder builder = builder() .storageKey(input.getStorageKey()) .active(input.isActive()) .effectiveTime(EffectiveTimes.getEffectiveTime(input.getEffectiveTime())) .id(input.getId()) .moduleId(input.getModuleId()) .referencedComponentId(input.getReferencedComponent().getId()) .referenceSetId(input.getReferenceSetId()) .referenceSetType(input.type()) .released(input.isReleased()); if (input.getReferencedComponent() instanceof SnomedConcept) { builder.referencedComponentType(CONCEPT_NUMBER); } else if (input.getReferencedComponent() instanceof SnomedDescription) { builder.referencedComponentType(DESCRIPTION_NUMBER); } else if (input.getReferencedComponent() instanceof SnomedRelationship) { builder.referencedComponentType(RELATIONSHIP_NUMBER); } else { builder.referencedComponentType(CoreTerminologyBroker.UNSPECIFIED_NUMBER_SHORT); } for (Entry<String, Object> entry : input.getProperties().entrySet()) { final Object value = entry.getValue(); final String fieldName = entry.getKey(); // certain RF2 fields can be expanded into full blown representation class, get the ID in this case if (value instanceof SnomedCoreComponent) { builder.field(fieldName, ((SnomedCoreComponent) value).getId()); } else { builder.field(fieldName, convertValue(entry.getKey(), value)); } } return builder; } public static Builder builder(SnomedRefSetMember refSetMember) { final Builder builder = SnomedRefSetMemberIndexEntry.builder() .storageKey(CDOIDUtils.asLong(refSetMember.cdoID())) .id(refSetMember.getUuid()) .moduleId(refSetMember.getModuleId()) .active(refSetMember.isActive()) .released(refSetMember.isReleased()) .effectiveTime(refSetMember.isSetEffectiveTime() ? refSetMember.getEffectiveTime().getTime() : EffectiveTimes.UNSET_EFFECTIVE_TIME) .referenceSetId(refSetMember.getRefSetIdentifierId()) .referenceSetType(refSetMember.getRefSet().getType()) .referencedComponentType(refSetMember.getReferencedComponentType()) .referencedComponentId(refSetMember.getReferencedComponentId()); return new SnomedRefSetSwitch<Builder>() { @Override public Builder caseSnomedAssociationRefSetMember(final SnomedAssociationRefSetMember associationMember) { return builder.targetComponent(associationMember.getTargetComponentId()); } @Override public Builder caseSnomedAttributeValueRefSetMember(final SnomedAttributeValueRefSetMember attributeValueMember) { return builder.field(Fields.VALUE_ID, attributeValueMember.getValueId()); } @Override public Builder caseSnomedConcreteDataTypeRefSetMember(final SnomedConcreteDataTypeRefSetMember concreteDataTypeMember) { return builder.field(Fields.ATTRIBUTE_NAME, concreteDataTypeMember.getLabel()) .field(Fields.DATA_TYPE, concreteDataTypeMember.getDataType()) .field(Fields.DATA_VALUE, concreteDataTypeMember.getSerializedValue()) .field(Fields.CHARACTERISTIC_TYPE_ID, concreteDataTypeMember.getCharacteristicTypeId()) .field(Fields.OPERATOR_ID, concreteDataTypeMember.getOperatorComponentId()) .field(Fields.UNIT_ID, concreteDataTypeMember.getUomComponentId()); } @Override public Builder caseSnomedDescriptionTypeRefSetMember(final SnomedDescriptionTypeRefSetMember descriptionTypeMember) { return builder .field(Fields.DESCRIPTION_FORMAT, descriptionTypeMember.getDescriptionFormat()) .field(Fields.DESCRIPTION_LENGTH, descriptionTypeMember.getDescriptionLength()); } @Override public Builder caseSnomedLanguageRefSetMember(final SnomedLanguageRefSetMember languageMember) { return builder.field(Fields.ACCEPTABILITY_ID, languageMember.getAcceptabilityId()); } @Override public Builder caseSnomedQueryRefSetMember(final SnomedQueryRefSetMember queryMember) { return builder.field(Fields.QUERY, queryMember.getQuery()); } @Override public Builder caseSnomedSimpleMapRefSetMember(final SnomedSimpleMapRefSetMember mapRefSetMember) { return builder .field(Fields.MAP_TARGET, mapRefSetMember.getMapTargetComponentId()) .field(Fields.MAP_TARGET_DESCRIPTION, mapRefSetMember.getMapTargetComponentDescription()); } @Override public Builder caseSnomedComplexMapRefSetMember(final SnomedComplexMapRefSetMember mapRefSetMember) { return builder .field(Fields.MAP_TARGET, mapRefSetMember.getMapTargetComponentId()) .field(Fields.CORRELATION_ID, mapRefSetMember.getCorrelationId()) .field(Fields.MAP_GROUP, Integer.valueOf(mapRefSetMember.getMapGroup())) .field(Fields.MAP_ADVICE, Strings.nullToEmpty(mapRefSetMember.getMapAdvice())) .field(Fields.MAP_PRIORITY, Integer.valueOf(mapRefSetMember.getMapPriority())) .field(Fields.MAP_RULE, Strings.nullToEmpty(mapRefSetMember.getMapRule())) // extended refset .field(Fields.MAP_CATEGORY_ID, Strings.nullToEmpty(mapRefSetMember.getMapCategoryId())); } @Override public Builder caseSnomedModuleDependencyRefSetMember(SnomedModuleDependencyRefSetMember member) { return builder .field(Fields.SOURCE_EFFECTIVE_TIME, EffectiveTimes.getEffectiveTime(member.getSourceEffectiveTime())) .field(Fields.TARGET_EFFECTIVE_TIME, EffectiveTimes.getEffectiveTime(member.getTargetEffectiveTime())); } @Override public Builder caseSnomedOWLExpressionRefSetMember(SnomedOWLExpressionRefSetMember member) { return builder .field(Fields.OWL_EXPRESSION, member.getOwlExpression()); }; @Override public Builder caseSnomedMRCMDomainRefSetMember(SnomedMRCMDomainRefSetMember member) { return builder .field(Fields.MRCM_DOMAIN_CONSTRAINT, member.getDomainConstraint()) .field(Fields.MRCM_PARENT_DOMAIN, member.getParentDomain()) .field(Fields.MRCM_PROXIMAL_PRIMITIVE_CONSTRAINT, member.getProximalPrimitiveConstraint()) .field(Fields.MRCM_PROXIMAL_PRIMITIVE_REFINEMENT, member.getProximalPrimitiveRefinement()) .field(Fields.MRCM_DOMAIN_TEMPLATE_FOR_PRECOORDINATION, member.getDomainTemplateForPrecoordination()) .field(Fields.MRCM_DOMAIN_TEMPLATE_FOR_POSTCOORDINATION, member.getDomainTemplateForPostcoordination()) .field(Fields.MRCM_EDITORIAL_GUIDE_REFERENCE, member.getEditorialGuideReference()); }; @Override public Builder caseSnomedMRCMAttributeDomainRefSetMember(SnomedMRCMAttributeDomainRefSetMember member) { return builder .field(Fields.MRCM_DOMAIN_ID, member.getDomainId()) .field(Fields.MRCM_GROUPED, member.isGrouped()) .field(Fields.MRCM_ATTRIBUTE_CARDINALITY, member.getAttributeCardinality()) .field(Fields.MRCM_ATTRIBUTE_IN_GROUP_CARDINALITY, member.getAttributeInGroupCardinality()) .field(Fields.MRCM_RULE_STRENGTH_ID, member.getRuleStrengthId()) .field(Fields.MRCM_CONTENT_TYPE_ID, member.getContentTypeId()); }; @Override public Builder caseSnomedMRCMAttributeRangeRefSetMember(SnomedMRCMAttributeRangeRefSetMember member) { return builder .field(Fields.MRCM_RANGE_CONSTRAINT, member.getRangeConstraint()) .field(Fields.MRCM_ATTRIBUTE_RULE, member.getAttributeRule()) .field(Fields.MRCM_RULE_STRENGTH_ID, member.getRuleStrengthId()) .field(Fields.MRCM_CONTENT_TYPE_ID, member.getContentTypeId()); }; @Override public Builder caseSnomedMRCMModuleScopeRefSetMember(SnomedMRCMModuleScopeRefSetMember member) { return builder .field(Fields.MRCM_RULE_REFSET_ID, member.getMrcmRuleRefsetId()); }; @Override public Builder caseSnomedRefSetMember(SnomedRefSetMember object) { return builder; }; }.doSwitch(refSetMember); } private static Object convertValue(String rf2Field, Object value) { switch (rf2Field) { case SnomedRf2Headers.FIELD_SOURCE_EFFECTIVE_TIME: case SnomedRf2Headers.FIELD_TARGET_EFFECTIVE_TIME: if (value instanceof String && !StringUtils.isEmpty((String) value)) { Date parsedDate = EffectiveTimes.parse((String) value, DateFormats.SHORT); return EffectiveTimes.getEffectiveTime(parsedDate); } else { return EffectiveTimes.UNSET_EFFECTIVE_TIME; } default: return value; } } public static Collection<SnomedRefSetMemberIndexEntry> from(final Iterable<SnomedReferenceSetMember> refSetMembers) { return FluentIterable.from(refSetMembers).transform(new Function<SnomedReferenceSetMember, SnomedRefSetMemberIndexEntry>() { @Override public SnomedRefSetMemberIndexEntry apply(final SnomedReferenceSetMember refSetMember) { return builder(refSetMember).build(); } }).toList(); } public static final class Expressions extends SnomedDocument.Expressions { public static Expression referenceSetId(String referenceSetId) { return exactMatch(Fields.REFERENCE_SET_ID, referenceSetId); } public static Expression referenceSetId(Collection<String> referenceSetIds) { return matchAny(Fields.REFERENCE_SET_ID, referenceSetIds); } public static Expression referencedComponentId(String referencedComponentId) { return exactMatch(Fields.REFERENCED_COMPONENT_ID, referencedComponentId); } public static Expression mapTargets(Collection<String> mapTargets) { return matchAny(Fields.MAP_TARGET, mapTargets); } public static Expression mapTargetDescriptions(Collection<String> mapTargetDescriptions) { return matchAny(Fields.MAP_TARGET_DESCRIPTION, mapTargetDescriptions); } public static Expression referencedComponentIds(Collection<String> referencedComponentIds) { return matchAny(Fields.REFERENCED_COMPONENT_ID, referencedComponentIds); } public static Expression targetComponents(Collection<String> targetComponentIds) { return matchAny(Fields.TARGET_COMPONENT, targetComponentIds); } public static Expression acceptabilityIds(Collection<String> acceptabilityIds) { return matchAny(Fields.ACCEPTABILITY_ID, acceptabilityIds); } public static Expression characteristicTypeIds(Collection<String> characteristicTypeIds) { return matchAny(Fields.CHARACTERISTIC_TYPE_ID, characteristicTypeIds); } public static Expression correlationIds(Collection<String> correlationIds) { return matchAny(Fields.CORRELATION_ID, correlationIds); } public static Expression descriptionFormats(Collection<String> descriptionFormats) { return matchAny(Fields.DESCRIPTION_FORMAT, descriptionFormats); } public static Expression mapCategoryIds(Collection<String> mapCategoryIds) { return matchAny(Fields.MAP_CATEGORY_ID, mapCategoryIds); } public static Expression operatorIds(Collection<String> operatorIds) { return matchAny(Fields.OPERATOR_ID, operatorIds); } public static Expression unitIds(Collection<String> unitIds) { return matchAny(Fields.UNIT_ID, unitIds); } public static Expression valueIds(Collection<String> valueIds) { return matchAny(Fields.VALUE_ID, valueIds); } public static Expression values(DataType type, Collection<? extends Object> values) { switch (type) { case STRING: return matchAny(Fields.STRING_VALUE, FluentIterable.from(values).filter(String.class).toSet()); case INTEGER: return matchAnyInt(Fields.INTEGER_VALUE, FluentIterable.from(values).filter(Integer.class).toSet()); case DECIMAL: return matchAnyDecimal(Fields.DECIMAL_VALUE, FluentIterable.from(values).filter(BigDecimal.class).toSet()); default: throw new UnsupportedOperationException("Unsupported data type when filtering by values, " + type); } } public static Expression valueRange(DataType type, final Object lower, final Object upper, boolean includeLower, boolean includeUpper) { switch (type) { case STRING: return matchRange(Fields.STRING_VALUE, (String) lower, (String) upper, includeLower, includeUpper); case INTEGER: return matchRange(Fields.INTEGER_VALUE, (Integer) lower, (Integer) upper, includeLower, includeUpper); case DECIMAL: return matchRange(Fields.DECIMAL_VALUE, (BigDecimal) lower, (BigDecimal) upper, includeLower, includeUpper); default: throw new UnsupportedOperationException("Unsupported data type when filtering by values, " + type); } } public static Expression dataTypes(Collection<DataType> dataTypes) { return matchAny(Fields.DATA_TYPE, FluentIterable.from(dataTypes).transform(new Function<DataType, String>() { @Override public String apply(DataType input) { return input.name(); } }).toSet()); } public static Expression attributeNames(Collection<String> attributeNames) { return matchAny(Fields.ATTRIBUTE_NAME, attributeNames); } public static Expression sourceEffectiveTime(long effectiveTime) { return exactMatch(Fields.SOURCE_EFFECTIVE_TIME, effectiveTime); } public static Expression targetEffectiveTime(long effectiveTime) { return exactMatch(Fields.TARGET_EFFECTIVE_TIME, effectiveTime); } public static Expression refSetTypes(Collection<SnomedRefSetType> refSetTypes) { return matchAny(Fields.REFSET_TYPE, FluentIterable.from(refSetTypes).transform(type -> type.name()).toSet()); } } @JsonPOJOBuilder(withPrefix="") public static final class Builder extends SnomedDocumentBuilder<Builder> { private String referencedComponentId; private String referenceSetId; private SnomedRefSetType referenceSetType; private short referencedComponentType; // Member specific fields, they can be null or emptyish values // ASSOCIATION reference set members private String targetComponent; // ATTRIBUTE VALUE private String valueId; // CONCRETE DOMAIN reference set members private DataType dataType; private String attributeName; private Object value; private String operatorId; private String characteristicTypeId; private String unitId; // DESCRIPTION private Integer descriptionLength; private String descriptionFormat; // LANGUAGE private String acceptabilityId; // MODULE private Long sourceEffectiveTime; private Long targetEffectiveTime; // SIMPLE MAP reference set members private String mapTarget; private String mapTargetDescription; // COMPLEX MAP private String mapCategoryId; private String correlationId; private String mapAdvice; private String mapRule; private Integer mapGroup; private Integer mapPriority; // QUERY private String query; // OWL Axiom private String owlExpression; // MRCM Domain private String domainConstraint; private String parentDomain; private String proximalPrimitiveConstraint; private String proximalPrimitiveRefinement; private String domainTemplateForPrecoordination; private String domainTemplateForPostcoordination; private String editorialGuideReference; // MRCM Attribute Domain private String domainId; private Boolean grouped; private String attributeCardinality; private String attributeInGroupCardinality; private String ruleStrengthId; private String contentTypeId; // MRCM Attribute Range private String rangeConstraint; private String attributeRule; // MRCM Module Scope private String mrcmRuleRefsetId; @JsonCreator private Builder() { // Disallow instantiation outside static method } public Builder fields(Map<String, Object> fields) { for (Entry<String, Object> entry : fields.entrySet()) { field(entry.getKey(), entry.getValue()); } return this; } public Builder field(String fieldName, Object value) { switch (fieldName) { case Fields.ACCEPTABILITY_ID: this.acceptabilityId = (String) value; break; case Fields.ATTRIBUTE_NAME: this.attributeName = (String) value; break; case Fields.CHARACTERISTIC_TYPE_ID: this.characteristicTypeId = (String) value; break; case Fields.CORRELATION_ID: this.correlationId = (String) value; break; case Fields.DATA_TYPE: this.dataType = (DataType) value; break; case Fields.DATA_VALUE: this.value = value; break; case Fields.DESCRIPTION_FORMAT: this.descriptionFormat = (String) value; break; case Fields.DESCRIPTION_LENGTH: this.descriptionLength = (Integer) value; break; case Fields.MAP_ADVICE: this.mapAdvice = (String) value; break; case Fields.MAP_CATEGORY_ID: this.mapCategoryId = (String) value; break; case Fields.MAP_GROUP: this.mapGroup = (Integer) value; break; case Fields.MAP_PRIORITY: this.mapPriority = (Integer) value; break; case Fields.MAP_RULE: this.mapRule = (String) value; break; case Fields.MAP_TARGET: this.mapTarget = (String) value; break; case Fields.MAP_TARGET_DESCRIPTION: this.mapTargetDescription = (String) value; break; case Fields.OPERATOR_ID: this.operatorId = (String) value; break; case Fields.QUERY: this.query = (String) value; break; case Fields.SOURCE_EFFECTIVE_TIME: this.sourceEffectiveTime = (Long) value; break; case Fields.TARGET_COMPONENT: this.targetComponent = (String) value; break; case Fields.TARGET_EFFECTIVE_TIME: this.targetEffectiveTime = (Long) value; break; case Fields.UNIT_ID: this.unitId = (String) value; break; case Fields.VALUE_ID: this.valueId = (String) value; break; case Fields.OWL_EXPRESSION: this.owlExpression = (String) value; break; case Fields.MRCM_DOMAIN_CONSTRAINT: this.domainConstraint = (String) value; break; case Fields.MRCM_PARENT_DOMAIN: this.parentDomain = (String) value; break; case Fields.MRCM_PROXIMAL_PRIMITIVE_CONSTRAINT: this.proximalPrimitiveConstraint = (String) value; break; case Fields.MRCM_PROXIMAL_PRIMITIVE_REFINEMENT: this.proximalPrimitiveRefinement = (String) value; break; case Fields.MRCM_DOMAIN_TEMPLATE_FOR_PRECOORDINATION: this.domainTemplateForPrecoordination = (String) value; break; case Fields.MRCM_DOMAIN_TEMPLATE_FOR_POSTCOORDINATION: this.domainTemplateForPostcoordination = (String) value; break; case Fields.MRCM_EDITORIAL_GUIDE_REFERENCE: this.editorialGuideReference = (String) value; break; case Fields.MRCM_DOMAIN_ID: this.domainId = (String) value; break; case Fields.MRCM_GROUPED: this.grouped = (Boolean) value; break; case Fields.MRCM_ATTRIBUTE_CARDINALITY: this.attributeCardinality = (String) value; break; case Fields.MRCM_ATTRIBUTE_IN_GROUP_CARDINALITY: this.attributeInGroupCardinality = (String) value; break; case Fields.MRCM_RULE_STRENGTH_ID: this.ruleStrengthId = (String) value; break; case Fields.MRCM_CONTENT_TYPE_ID: this.contentTypeId = (String) value; break; case Fields.MRCM_RANGE_CONSTRAINT: this.rangeConstraint = (String) value; break; case Fields.MRCM_ATTRIBUTE_RULE: this.attributeRule = (String) value; break; case Fields.MRCM_RULE_REFSET_ID: this.mrcmRuleRefsetId = (String) value; break; default: throw new UnsupportedOperationException("Unknown RF2 member field: " + fieldName); } return this; } @Override protected Builder getSelf() { return this; } public Builder referencedComponentId(final String referencedComponentId) { this.referencedComponentId = referencedComponentId; return this; } public Builder referenceSetId(final String referenceSetId) { this.referenceSetId = referenceSetId; return this; } public Builder referenceSetType(final SnomedRefSetType referenceSetType) { this.referenceSetType = referenceSetType; return this; } public Builder referencedComponentType(final short referencedComponentType) { this.referencedComponentType = referencedComponentType; return this; } public Builder targetComponent(String targetComponent) { this.targetComponent = targetComponent; return this; } Builder acceptabilityId(String acceptabilityId) { this.acceptabilityId = acceptabilityId; return getSelf(); } Builder attributeName(String attributeName) { this.attributeName = attributeName; return getSelf(); } Builder characteristicTypeId(final String characteristicTypeId) { this.characteristicTypeId = characteristicTypeId; return getSelf(); } Builder correlationId(final String correlationId) { this.correlationId = correlationId; return getSelf(); } Builder dataType(final DataType dataType) { this.dataType = dataType; return getSelf(); } Builder descriptionFormat(final String descriptionFormat) { this.descriptionFormat = descriptionFormat; return getSelf(); } Builder descriptionLength(final Integer descriptionLength) { this.descriptionLength = descriptionLength; return getSelf(); } Builder mapAdvice(final String mapAdvice) { this.mapAdvice = mapAdvice; return getSelf(); } Builder mapCategoryId(final String mapCategoryId) { this.mapCategoryId = mapCategoryId; return getSelf(); } Builder mapGroup(final Integer mapGroup) { this.mapGroup = mapGroup; return getSelf(); } Builder mapPriority(final Integer mapPriority) { this.mapPriority = mapPriority; return getSelf(); } Builder mapRule(final String mapRule) { this.mapRule = mapRule; return getSelf(); } Builder mapTarget(final String mapTarget) { this.mapTarget = mapTarget; return getSelf(); } Builder mapTargetDescription(final String mapTargetDescription) { this.mapTargetDescription = mapTargetDescription; return getSelf(); } Builder operatorId(final String operatorId) { this.operatorId = operatorId; return getSelf(); } Builder query(final String query) { this.query = query; return getSelf(); } Builder sourceEffectiveTime(final Long sourceEffectiveTime) { this.sourceEffectiveTime = sourceEffectiveTime; return getSelf(); } Builder targetEffectiveTime(final Long targetEffectiveTime) { this.targetEffectiveTime = targetEffectiveTime; return getSelf(); } Builder unitId(final String unitId) { this.unitId = unitId; return getSelf(); } /** * @deprecated - this is no longer a valid refset member index field, but required to make pre-5.4 dataset work with 5.4 without migration */ Builder value(final Object value) { this.value = value; return getSelf(); } Builder decimalValue(final BigDecimal value) { this.value = value; return getSelf(); } Builder booleanValue(final Boolean value) { this.value = value; return getSelf(); } Builder integerValue(final Integer value) { this.value = value; return getSelf(); } Builder stringValue(final String value) { this.value = value; return getSelf(); } Builder valueId(String valueId) { this.valueId = valueId; return getSelf(); } Builder owlExpression(String owlExpression) { this.owlExpression = owlExpression; return getSelf(); } Builder domainConstraint(String domainConstraint) { this.domainConstraint = domainConstraint; return getSelf(); } Builder parentDomain(String parentDomain) { this.parentDomain = parentDomain; return getSelf(); } Builder proximalPrimitiveConstraint(String proximalPrimitiveConstraint) { this.proximalPrimitiveConstraint = proximalPrimitiveConstraint; return getSelf(); } Builder proximalPrimitiveRefinement(String proximalPrimitiveRefinement) { this.proximalPrimitiveRefinement = proximalPrimitiveRefinement; return getSelf(); } Builder domainTemplateForPrecoordination(String domainTemplateForPrecoordination) { this.domainTemplateForPrecoordination = domainTemplateForPrecoordination; return getSelf(); } Builder domainTemplateForPostcoordination(String domainTemplateForPostcoordination) { this.domainTemplateForPostcoordination = domainTemplateForPostcoordination; return getSelf(); } Builder editorialGuideReference(String editorialGuideReference) { this.editorialGuideReference = editorialGuideReference; return getSelf(); } Builder domainId(String domainId) { this.domainId = domainId; return getSelf(); } Builder grouped(Boolean grouped) { this.grouped = grouped; return getSelf(); } Builder attributeCardinality(String attributeCardinality) { this.attributeCardinality = attributeCardinality; return getSelf(); } Builder attributeInGroupCardinality(String attributeInGroupCardinality) { this.attributeInGroupCardinality = attributeInGroupCardinality; return getSelf(); } Builder ruleStrengthId(String ruleStrengthId) { this.ruleStrengthId = ruleStrengthId; return getSelf(); } Builder contentTypeId(String contentTypeId) { this.contentTypeId = contentTypeId; return getSelf(); } Builder rangeConstraint(String rangeConstraint) { this.rangeConstraint = rangeConstraint; return getSelf(); } Builder attributeRule(String attributeRule) { this.attributeRule = attributeRule; return getSelf(); } Builder mrcmRuleRefsetId(String mrcmRuleRefsetId) { this.mrcmRuleRefsetId = mrcmRuleRefsetId; return getSelf(); } public SnomedRefSetMemberIndexEntry build() { final SnomedRefSetMemberIndexEntry doc = new SnomedRefSetMemberIndexEntry(id, label, storageKey, moduleId, released, active, effectiveTime, referencedComponentId, referenceSetId, referenceSetType, referencedComponentType); // association members doc.targetComponent = targetComponent; // attribute value doc.valueId = valueId; // concrete domain members doc.dataType = dataType; doc.attributeName = attributeName; if (dataType != null) { switch (dataType) { case BOOLEAN: if (value instanceof Boolean) { doc.booleanValue = (Boolean) value; } else if (value instanceof String) { doc.booleanValue = SnomedRefSetUtil.deserializeValue(dataType, (String) value); } break; case DECIMAL: if (value instanceof BigDecimal) { doc.decimalValue = (BigDecimal) value; } else if (value instanceof String) { doc.decimalValue = SnomedRefSetUtil.deserializeValue(dataType, (String) value); } break; case INTEGER: if (value instanceof Integer) { doc.integerValue = (Integer) value; } else if (value instanceof String) { doc.integerValue = SnomedRefSetUtil.deserializeValue(dataType, (String) value); } break; case STRING: doc.stringValue = (String) value; break; default: throw new UnsupportedOperationException("Unsupported concrete domain data type: " + dataType); } } doc.characteristicTypeId = characteristicTypeId; doc.operatorId = operatorId; doc.unitId = unitId; // description doc.descriptionFormat = descriptionFormat; doc.descriptionLength = descriptionLength; // language reference set doc.acceptabilityId = acceptabilityId; // module doc.sourceEffectiveTime = sourceEffectiveTime; doc.targetEffectiveTime = targetEffectiveTime; // simple map doc.mapTarget = mapTarget; doc.mapTargetDescription = mapTargetDescription; // complex map doc.mapCategoryId = mapCategoryId; doc.mapAdvice = mapAdvice; doc.correlationId = correlationId; doc.mapGroup = mapGroup; doc.mapPriority = mapPriority; doc.mapRule = mapRule; // query doc.query = query; // OWL Axiom doc.owlExpression = owlExpression; // MRCM Domain doc.domainConstraint = domainConstraint; doc.parentDomain = parentDomain; doc.proximalPrimitiveConstraint = proximalPrimitiveConstraint; doc.proximalPrimitiveRefinement = proximalPrimitiveRefinement; doc.domainTemplateForPrecoordination = domainTemplateForPrecoordination; doc.domainTemplateForPostcoordination = domainTemplateForPostcoordination; doc.editorialGuideReference = editorialGuideReference; // MRCM Attribute Domain doc.domainId = domainId; doc.grouped = grouped; doc.attributeCardinality = attributeCardinality; doc.attributeInGroupCardinality = attributeInGroupCardinality; doc.ruleStrengthId = ruleStrengthId; doc.contentTypeId = contentTypeId; // MRCM Attribute Range doc.rangeConstraint = rangeConstraint; doc.attributeRule = attributeRule; // MRCM Module Scope doc.mrcmRuleRefsetId = mrcmRuleRefsetId; doc.setScore(score); // metadata doc.setCreated(created); doc.setRevised(revised); return doc; } } private final String referencedComponentId; private final String referenceSetId; private final SnomedRefSetType referenceSetType; private final short referencedComponentType; // Member specific fields, they can be null or emptyish values // ASSOCIATION reference set members private String targetComponent; // ATTRIBUTE VALUE private String valueId; // CONCRETE DOMAIN reference set members private DataType dataType; private String attributeName; // only one of these value fields should be set when this represents a concrete domain member private String stringValue; private Boolean booleanValue; private Integer integerValue; private BigDecimal decimalValue; private String operatorId; private String characteristicTypeId; private String unitId; // DESCRIPTION private Integer descriptionLength; private String descriptionFormat; // LANGUAGE private String acceptabilityId; // MODULE private Long sourceEffectiveTime; private Long targetEffectiveTime; // SIMPLE MAP reference set members private String mapTarget; private String mapTargetDescription; // COMPLEX MAP private String mapCategoryId; private String correlationId; private String mapAdvice; private String mapRule; private Integer mapGroup; private Integer mapPriority; // QUERY @Keyword(index = false) private String query; // OWL Axiom private String owlExpression; // MRCM Domain private String domainConstraint; private String parentDomain; private String proximalPrimitiveConstraint; private String proximalPrimitiveRefinement; private String domainTemplateForPrecoordination; private String domainTemplateForPostcoordination; private String editorialGuideReference; // MRCM Attribute Domain private String domainId; private Boolean grouped; private String attributeCardinality; private String attributeInGroupCardinality; private String ruleStrengthId; private String contentTypeId; // MRCM Attribute Range private String rangeConstraint; private String attributeRule; // MRCM Module Scope private String mrcmRuleRefsetId; private SnomedRefSetMemberIndexEntry(final String id, final String label, final long storageKey, final String moduleId, final boolean released, final boolean active, final long effectiveTimeLong, final String referencedComponentId, final String referenceSetId, final SnomedRefSetType referenceSetType, final short referencedComponentType) { super(id, label, referencedComponentId, // XXX: iconId is the referenced component identifier storageKey, moduleId, released, active, effectiveTimeLong); checkArgument(referencedComponentType >= CoreTerminologyBroker.UNSPECIFIED_NUMBER_SHORT, "Referenced component type '%s' is invalid.", referencedComponentType); this.referencedComponentId = referencedComponentId; this.referenceSetId = referenceSetId; this.referenceSetType = referenceSetType; this.referencedComponentType = referencedComponentType; } @Override public String getContainerId() { // XXX hack to make IHTSDO merge review API tests pass and work as before in 4.5 if (getReferenceSetType() == SnomedRefSetType.MODULE_DEPENDENCY) { return null; } else { return getReferencedComponentId(); } } /** * @return the referenced component identifier */ public String getReferencedComponentId() { return referencedComponentId; } /** * @return the identifier of the member's reference set */ public String getReferenceSetId() { return referenceSetId; } /** * @return the type of the member's reference set */ public SnomedRefSetType getReferenceSetType() { return referenceSetType; } @JsonIgnore @SuppressWarnings("unchecked") public <T> T getValueAs() { return (T) getValue(); } @JsonIgnore public Object getValue() { if (dataType == null) { return null; } else { switch (dataType) { case BOOLEAN: return booleanValue; case DECIMAL: return decimalValue; case INTEGER: return integerValue; case STRING: return stringValue; default: throw new UnsupportedOperationException("Unsupported concrete domain data type: " + dataType); } } } @JsonProperty BigDecimal getDecimalValue() { return decimalValue; } @JsonProperty Boolean getBooleanValue() { return booleanValue; } @JsonProperty Integer getIntegerValue() { return integerValue; } @JsonProperty String getStringValue() { return stringValue; } public DataType getDataType() { return dataType; } public String getUnitId() { return unitId; } public String getAttributeName() { return attributeName; } public String getOperatorId() { return operatorId; } public String getCharacteristicTypeId() { return characteristicTypeId; } public String getAcceptabilityId() { return acceptabilityId; } public Integer getDescriptionLength() { return descriptionLength; } public String getDescriptionFormat() { return descriptionFormat; } public String getMapTarget() { return mapTarget; } public Integer getMapGroup() { return mapGroup; } public Integer getMapPriority() { return mapPriority; } public String getMapRule() { return mapRule; } public String getMapAdvice() { return mapAdvice; } public String getMapCategoryId() { return mapCategoryId; } public String getCorrelationId() { return correlationId; } public String getMapTargetDescription() { return mapTargetDescription; } public String getQuery() { return query; } public String getTargetComponent() { return targetComponent; } public String getValueId() { return valueId; } public Long getSourceEffectiveTime() { return sourceEffectiveTime; } public Long getTargetEffectiveTime() { return targetEffectiveTime; } public short getReferencedComponentType() { return referencedComponentType; } public String getOwlExpression() { return owlExpression; } public String getDomainConstraint() { return domainConstraint; } public String getParentDomain() { return parentDomain; } public String getProximalPrimitiveConstraint() { return proximalPrimitiveConstraint; } public String getProximalPrimitiveRefinement() { return proximalPrimitiveRefinement; } public String getDomainTemplateForPrecoordination() { return domainTemplateForPrecoordination; } public String getDomainTemplateForPostcoordination() { return domainTemplateForPostcoordination; } public String getEditorialGuideReference() { return editorialGuideReference; } public String getDomainId() { return domainId; } public Boolean isGrouped() { return grouped; } public String getAttributeCardinality() { return attributeCardinality; } public String getAttributeInGroupCardinality() { return attributeInGroupCardinality; } public String getRuleStrengthId() { return ruleStrengthId; } public String getContentTypeId() { return contentTypeId; } public String getRangeConstraint() { return rangeConstraint; } public String getAttributeRule() { return attributeRule; } public String getMrcmRuleRefsetId() { return mrcmRuleRefsetId; } // model helper methods @JsonIgnore public Acceptability getAcceptability() { return Acceptability.getByConceptId(getAcceptabilityId()); } @JsonIgnore public RelationshipRefinability getRefinability() { return RelationshipRefinability.getByConceptId(getValueId()); } @JsonIgnore public InactivationIndicator getInactivationIndicator() { return InactivationIndicator.getByConceptId(getValueId()); } @JsonIgnore public String getSourceEffectiveTimeAsString() { return EffectiveTimes.format(getSourceEffectiveTime(), DateFormats.SHORT); } @JsonIgnore public String getTargetEffectiveTimeAsString() { return EffectiveTimes.format(getTargetEffectiveTime(), DateFormats.SHORT); } /** * @return the {@code String} terminology component identifier of the component referenced in this member */ @JsonIgnore public String getReferencedComponentTypeAsString() { return CoreTerminologyBroker.getInstance().getTerminologyComponentId(referencedComponentType); } /** * Helper which converts all non-null/empty additional fields to a values {@link Map} keyed by their field name; * @return */ @JsonIgnore public Map<String, Object> getAdditionalFields() { final ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder(); // ASSOCIATION refset members putIfPresent(builder, Fields.TARGET_COMPONENT, getTargetComponent()); // ATTRIBUTE_VALUE refset members putIfPresent(builder, Fields.VALUE_ID, getValueId()); // CONCRETE DOMAIN reference set members putIfPresent(builder, Fields.DATA_TYPE, getDataType()); putIfPresent(builder, Fields.ATTRIBUTE_NAME, getAttributeName()); putIfPresent(builder, Fields.DATA_VALUE, getValue()); putIfPresent(builder, Fields.OPERATOR_ID, getOperatorId()); putIfPresent(builder, Fields.CHARACTERISTIC_TYPE_ID, getCharacteristicTypeId()); putIfPresent(builder, Fields.UNIT_ID, getUnitId()); // DESCRIPTION putIfPresent(builder, Fields.DESCRIPTION_LENGTH, getDescriptionLength()); putIfPresent(builder, Fields.DESCRIPTION_FORMAT, getDescriptionFormat()); // LANGUAGE putIfPresent(builder, Fields.ACCEPTABILITY_ID, getAcceptabilityId()); // MODULE putIfPresent(builder, Fields.SOURCE_EFFECTIVE_TIME, getSourceEffectiveTime()); putIfPresent(builder, Fields.TARGET_EFFECTIVE_TIME, getTargetEffectiveTime()); // SIMPLE MAP reference set members putIfPresent(builder, Fields.MAP_TARGET, getMapTarget()); putIfPresent(builder, Fields.MAP_TARGET_DESCRIPTION, getMapTargetDescription()); // COMPLEX MAP putIfPresent(builder, Fields.MAP_CATEGORY_ID, getMapCategoryId()); putIfPresent(builder, Fields.CORRELATION_ID, getCorrelationId()); putIfPresent(builder, Fields.MAP_ADVICE, getMapAdvice()); putIfPresent(builder, Fields.MAP_RULE, getMapRule()); putIfPresent(builder, Fields.MAP_GROUP, getMapGroup()); putIfPresent(builder, Fields.MAP_PRIORITY, getMapPriority()); // QUERY putIfPresent(builder, Fields.QUERY, getQuery()); // OWL Axiom putIfPresent(builder, Fields.OWL_EXPRESSION, getOwlExpression()); // MRCM Domain putIfPresent(builder, Fields.MRCM_DOMAIN_CONSTRAINT, getDomainConstraint()); putIfPresent(builder, Fields.MRCM_PARENT_DOMAIN, getParentDomain()); putIfPresent(builder, Fields.MRCM_PROXIMAL_PRIMITIVE_CONSTRAINT, getProximalPrimitiveConstraint()); putIfPresent(builder, Fields.MRCM_PROXIMAL_PRIMITIVE_REFINEMENT, getProximalPrimitiveRefinement()); putIfPresent(builder, Fields.MRCM_DOMAIN_TEMPLATE_FOR_PRECOORDINATION, getDomainTemplateForPrecoordination()); putIfPresent(builder, Fields.MRCM_DOMAIN_TEMPLATE_FOR_POSTCOORDINATION, getDomainTemplateForPostcoordination()); putIfPresent(builder, Fields.MRCM_EDITORIAL_GUIDE_REFERENCE, getEditorialGuideReference()); // MRCM Attribute Domain putIfPresent(builder, Fields.MRCM_DOMAIN_ID, getDomainId()); putIfPresent(builder, Fields.MRCM_GROUPED, isGrouped()); putIfPresent(builder, Fields.MRCM_ATTRIBUTE_CARDINALITY, getAttributeCardinality()); putIfPresent(builder, Fields.MRCM_ATTRIBUTE_IN_GROUP_CARDINALITY, getAttributeInGroupCardinality()); putIfPresent(builder, Fields.MRCM_RULE_STRENGTH_ID, getRuleStrengthId()); putIfPresent(builder, Fields.MRCM_CONTENT_TYPE_ID, getContentTypeId()); // MRCM Attribute Range putIfPresent(builder, Fields.MRCM_RANGE_CONSTRAINT, getRangeConstraint()); putIfPresent(builder, Fields.MRCM_ATTRIBUTE_RULE, getAttributeRule()); // MRCM Module Scope putIfPresent(builder, Fields.MRCM_RULE_REFSET_ID, getMrcmRuleRefsetId()); return builder.build(); } private static void putIfPresent(ImmutableMap.Builder<String, Object> builder, String key, Object value) { if (key != null && value != null) { builder.put(key, value); } } @Override protected ToStringHelper doToString() { return super.doToString() .add("referencedComponentId", referencedComponentId) .add("referenceSetId", referenceSetId) .add("referenceSetType", referenceSetType) .add("referencedComponentType", referencedComponentType) .add("targetComponent", targetComponent) .add("valueId", valueId) .add("dataType", dataType) .add("attributeName", attributeName) .add("value", getValue()) .add("operatorId", operatorId) .add("characteristicTypeId", characteristicTypeId) .add("unitId", unitId) .add("descriptionLength", descriptionLength) .add("descriptionFormat", descriptionFormat) .add("acceptabilityId", acceptabilityId) .add("sourceEffectiveTime", sourceEffectiveTime) .add("targetEffectiveTime", targetEffectiveTime) .add("mapTarget", mapTarget) .add("mapTargetDescription", mapTargetDescription) .add("mapCategoryId", mapCategoryId) .add("correlationId", correlationId) .add("mapAdvice", mapAdvice) .add("mapRule", mapRule) .add("mapGroup", mapGroup) .add("mapPriority", mapPriority) .add("query", query) .add("owlExpression", owlExpression) .add("domainConstraint", domainConstraint) .add("parentDomain", parentDomain) .add("proximalPrimitiveConstraint", proximalPrimitiveConstraint) .add("proximalPrimitiveRefinement", proximalPrimitiveRefinement) .add("domainTemplateForPrecoordination", domainTemplateForPrecoordination) .add("domainTemplateForPostcoordination", domainTemplateForPostcoordination) .add("editorialGuideReference", editorialGuideReference) .add("domainId", domainId) .add("grouped", grouped) .add("attributeCardinality", attributeCardinality) .add("attributeInGroupCardinality", attributeInGroupCardinality) .add("ruleStrengthId", ruleStrengthId) .add("contentTypeId", contentTypeId) .add("rangeConstraint", rangeConstraint) .add("attributeRule", attributeRule) .add("mrcmRuleRefsetId", mrcmRuleRefsetId); } }
SO-3044: remove hack from SnomedRefSetMemberIndexEntry getContainerId method should always return the referencedComponentId for each reference set member.
snomed/com.b2international.snowowl.snomed.datastore/src/com/b2international/snowowl/snomed/datastore/index/entry/SnomedRefSetMemberIndexEntry.java
SO-3044: remove hack from SnomedRefSetMemberIndexEntry
Java
apache-2.0
249c1835f60cf9239e812390a457d9885e15ab7e
0
iBuildApp/android_module_Catalog
/**************************************************************************** * * * Copyright (C) 2014-2015 iBuildApp, Inc. ( http://ibuildapp.com ) * * * * This file is part of iBuildApp. * * * * This Source Code Form is subject to the terms of the iBuildApp License. * * You can obtain one at http://ibuildapp.com/license/ * * * ****************************************************************************/ package com.ibuildapp.romanblack.CataloguePlugin.view; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Path; import android.graphics.RectF; import android.os.Build; import android.util.AttributeSet; import android.view.View; import android.view.animation.AlphaAnimation; import android.view.animation.Animation; import android.widget.ImageView; public class RoundView extends ImageView { private final String TAG = RoundView.class.getCanonicalName(); private int cornerTopLeft = 0; private int cornerTopRight = 0; private int cornerBottomLeft = 0; private int cornerBottomRight = 0; public RoundView(Context context) { super(context); if (Build.VERSION.SDK_INT >= 11 && Build.VERSION.SDK_INT <=17) this.setLayerType(View.LAYER_TYPE_SOFTWARE, null); } public RoundView(Context context, AttributeSet attrs) { super(context, attrs); if (Build.VERSION.SDK_INT >= 11 && Build.VERSION.SDK_INT <=17) this.setLayerType(View.LAYER_TYPE_SOFTWARE, null); } /** * Set bitmap for this imageView with alpha * * @param bm bitmap */ public void setImageBitmapWithAlpha(Bitmap bm) { super.setImageBitmap(bm); Animation alpha = new AlphaAnimation(0.3f, 1.0f); alpha.setDuration(500); startAnimation(alpha); } /** * Set bitmap for this imageView with alpha * * @param cornerTopLeft top left corner * @param cornerTopRight top right corner * @param cornerBottomLeft bottom left corner * @param cornerBottomRight bottom right corner */ public void setCorners( int cornerTopLeft, int cornerTopRight, int cornerBottomLeft, int cornerBottomRight) { this.cornerTopLeft = cornerTopLeft; this.cornerTopRight = cornerTopRight; this.cornerBottomLeft = cornerBottomLeft; this.cornerBottomRight = cornerBottomRight; } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { double res = MeasureSpec.getSize(widthMeasureSpec); res = res * 6; res = res / 5; int parentWidth = MeasureSpec.getSize(widthMeasureSpec); this.setMeasuredDimension(parentWidth, (int) res); } @Override protected void onDraw(Canvas canvas) { Path clipPath = new Path(); RectF rect = new RectF(0, 0, this.getWidth(), this.getHeight()); float[] rad = new float[]{ cornerTopLeft, cornerTopLeft, cornerTopRight, cornerTopRight, cornerBottomRight, cornerBottomRight, cornerBottomLeft, cornerBottomLeft}; clipPath.addRoundRect(rect, rad, Path.Direction.CW); canvas.clipPath(clipPath); super.onDraw(canvas); } }
src/main/java/com/ibuildapp/romanblack/CataloguePlugin/view/RoundView.java
/**************************************************************************** * * * Copyright (C) 2014-2015 iBuildApp, Inc. ( http://ibuildapp.com ) * * * * This file is part of iBuildApp. * * * * This Source Code Form is subject to the terms of the iBuildApp License. * * You can obtain one at http://ibuildapp.com/license/ * * * ****************************************************************************/ package com.ibuildapp.romanblack.CataloguePlugin.view; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Path; import android.graphics.RectF; import android.util.AttributeSet; import android.view.animation.AlphaAnimation; import android.view.animation.Animation; import android.widget.ImageView; public class RoundView extends ImageView { private final String TAG = RoundView.class.getCanonicalName(); private int cornerTopLeft = 0; private int cornerTopRight = 0; private int cornerBottomLeft = 0; private int cornerBottomRight = 0; public RoundView(Context context) { super(context); } public RoundView(Context context, AttributeSet attrs) { super(context, attrs); } /** * Set bitmap for this imageView with alpha * * @param bm bitmap */ public void setImageBitmapWithAlpha(Bitmap bm) { super.setImageBitmap(bm); Animation alpha = new AlphaAnimation(0.3f, 1.0f); alpha.setDuration(500); startAnimation(alpha); } /** * Set bitmap for this imageView with alpha * * @param cornerTopLeft top left corner * @param cornerTopRight top right corner * @param cornerBottomLeft bottom left corner * @param cornerBottomRight bottom right corner */ public void setCorners( int cornerTopLeft, int cornerTopRight, int cornerBottomLeft, int cornerBottomRight) { this.cornerTopLeft = cornerTopLeft; this.cornerTopRight = cornerTopRight; this.cornerBottomLeft = cornerBottomLeft; this.cornerBottomRight = cornerBottomRight; } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { double res = MeasureSpec.getSize(widthMeasureSpec); res = res * 6; res = res / 5; int parentWidth = MeasureSpec.getSize(widthMeasureSpec); this.setMeasuredDimension(parentWidth, (int) res); } @Override protected void onDraw(Canvas canvas) { Path clipPath = new Path(); RectF rect = new RectF(0, 0, this.getWidth(), this.getHeight()); float[] rad = new float[]{ cornerTopLeft, cornerTopLeft, cornerTopRight, cornerTopRight, cornerBottomRight, cornerBottomRight, cornerBottomLeft, cornerBottomLeft}; clipPath.addRoundRect(rect, rad, Path.Direction.CW); canvas.clipPath(clipPath); super.onDraw(canvas); } }
5640 01:00 85% turn off hardware acceleration for versions SDK between 11 and 17 and problem fixed
src/main/java/com/ibuildapp/romanblack/CataloguePlugin/view/RoundView.java
5640 01:00 85% turn off hardware acceleration for versions SDK between 11 and 17 and problem fixed
Java
apache-2.0
4008af8b56c5d62263f793ab5d2befe2fee83680
0
yarish/tinylog,yarish/tinylog,robymus/tinylog,robymus/tinylog
/* * Copyright 2012 Martin Winandy * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.pmw.benchmark; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.text.MessageFormat; import java.util.Arrays; import org.pmw.benchmark.dummy.DummyBenchmark; public abstract class AbstractRunner { private static final int BENCHMARK_ITERATIONS = 1; // Number of benchmark to run private static final int OUTLIERS_CUT = 0; // Number of best and worst results to exclude private static final String RESULT_MESSAGE = "{0}: {1} log entries in {2}ms = {3} log entries per second"; private static final String ERROR_MESSAGE = "{0} lines has been written, but {1} lines expected"; private final String name; private final IBenchmark benchmark; AbstractRunner(final String name, final IBenchmark benchmark) { this.name = name; this.benchmark = benchmark; } public final void start() throws Exception { File[] files = new File[BENCHMARK_ITERATIONS]; for (int i = 0; i < BENCHMARK_ITERATIONS; ++i) { File file = File.createTempFile("log", ".txt"); file.deleteOnExit(); files[i] = file; } long[] times = new long[BENCHMARK_ITERATIONS]; for (int i = 0; i < BENCHMARK_ITERATIONS; ++i) { benchmark.init(files[i]); long start = System.currentTimeMillis(); run(benchmark); benchmark.dispose(); long finished = System.currentTimeMillis(); times[i] = finished - start; } long time = calcTime(times); long iterations = (BENCHMARK_ITERATIONS - OUTLIERS_CUT * 2) * countTriggeredLogEntries(); long iterationsPerSecond = Math.round(iterations * 1000d / time); System.out.println(MessageFormat.format(RESULT_MESSAGE, name, iterations, time, iterationsPerSecond)); if (!(benchmark instanceof DummyBenchmark)) { long lines = 0; for (int i = 0; i < BENCHMARK_ITERATIONS; ++i) { BufferedReader reader = new BufferedReader(new FileReader(files[i])); while (reader.readLine() != null) { ++lines; } reader.close(); } long expected = BENCHMARK_ITERATIONS * countWrittenLogEntries(); if (lines != expected) { System.err.println(MessageFormat.format(ERROR_MESSAGE, lines, expected)); } } for (int i = 0; i < BENCHMARK_ITERATIONS; ++i) { files[i].delete(); } } protected abstract void run(final IBenchmark benchmark) throws Exception; protected static IBenchmark createBenchmark(final String[] arguments) { if (arguments.length == 0) { System.out.println("Require name of benchmark class as first argument"); return null; } try { return (IBenchmark) Class.forName(arguments[0]).newInstance(); } catch (ReflectiveOperationException ex) { ex.printStackTrace(); return null; } } protected abstract long countTriggeredLogEntries(); protected abstract long countWrittenLogEntries(); private long calcTime(final long[] times) { Arrays.sort(times); long time = 0L; for (int i = OUTLIERS_CUT; i < BENCHMARK_ITERATIONS - OUTLIERS_CUT; ++i) { time += times[i]; } return time; } }
benchmark/src/org/pmw/benchmark/AbstractRunner.java
/* * Copyright 2012 Martin Winandy * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.pmw.benchmark; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.text.MessageFormat; import java.util.Arrays; import org.pmw.benchmark.dummy.DummyBenchmark; public abstract class AbstractRunner { private static final int BENCHMARK_ITERATIONS = 120; // Number of benchmark to run private static final int OUTLIERS_CUT = 10; // Number of best and worst results to exclude private static final String RESULT_MESSAGE = "{0}: {1} log entries in {2}ms = {3} log entries per second"; private static final String ERROR_MESSAGE = "{0} lines has been written, but {1} lines expected"; private final String name; private final IBenchmark benchmark; AbstractRunner(final String name, final IBenchmark benchmark) { this.name = name; this.benchmark = benchmark; } public final void start() throws Exception { File[] files = new File[BENCHMARK_ITERATIONS]; for (int i = 0; i < BENCHMARK_ITERATIONS; ++i) { File file = File.createTempFile("log", ".txt"); file.deleteOnExit(); files[i] = file; } long[] times = new long[BENCHMARK_ITERATIONS]; for (int i = 0; i < BENCHMARK_ITERATIONS; ++i) { benchmark.init(files[i]); long start = System.currentTimeMillis(); run(benchmark); benchmark.dispose(); long finished = System.currentTimeMillis(); times[i] = finished - start; } long time = calcTime(times); long iterations = (BENCHMARK_ITERATIONS - OUTLIERS_CUT * 2) * countTriggeredLogEntries(); long iterationsPerSecond = Math.round(iterations * 1000d / time); System.out.println(MessageFormat.format(RESULT_MESSAGE, name, iterations, time, iterationsPerSecond)); if (!(benchmark instanceof DummyBenchmark)) { long lines = 0; for (int i = 0; i < BENCHMARK_ITERATIONS; ++i) { BufferedReader reader = new BufferedReader(new FileReader(files[i])); while (reader.readLine() != null) { ++lines; } reader.close(); } long expected = BENCHMARK_ITERATIONS * countWrittenLogEntries(); if (lines != expected) { System.err.println(MessageFormat.format(ERROR_MESSAGE, lines, expected)); } } for (int i = 0; i < BENCHMARK_ITERATIONS; ++i) { files[i].delete(); } } protected abstract void run(final IBenchmark benchmark) throws Exception; protected static IBenchmark createBenchmark(final String[] arguments) { if (arguments.length == 0) { System.out.println("Require name of benchmark class as first argument"); return null; } try { return (IBenchmark) Class.forName(arguments[0]).newInstance(); } catch (ReflectiveOperationException ex) { ex.printStackTrace(); return null; } } protected abstract long countTriggeredLogEntries(); protected abstract long countWrittenLogEntries(); private long calcTime(final long[] times) { Arrays.sort(times); long time = 0L; for (int i = OUTLIERS_CUT; i < BENCHMARK_ITERATIONS - OUTLIERS_CUT; ++i) { time += times[i]; } return time; } }
Increased default number of benchmark iterations
benchmark/src/org/pmw/benchmark/AbstractRunner.java
Increased default number of benchmark iterations
Java
apache-2.0
4bd5cd1c55cfe7e33bcf71a01b5a6457f111917a
0
rhusar/undertow,jstourac/undertow,darranl/undertow,jstourac/undertow,jstourac/undertow,undertow-io/undertow,Karm/undertow,darranl/undertow,soul2zimate/undertow,ctomc/undertow,baranowb/undertow,soul2zimate/undertow,aldaris/undertow,msfm/undertow,undertow-io/undertow,baranowb/undertow,stuartwdouglas/undertow,rhusar/undertow,pferraro/undertow,stuartwdouglas/undertow,rhusar/undertow,jamezp/undertow,ctomc/undertow,undertow-io/undertow,pferraro/undertow,Karm/undertow,aldaris/undertow,aldaris/undertow,golovnin/undertow,msfm/undertow,darranl/undertow,Karm/undertow,jamezp/undertow,baranowb/undertow,golovnin/undertow,soul2zimate/undertow,golovnin/undertow,stuartwdouglas/undertow,pferraro/undertow,ctomc/undertow,jamezp/undertow,msfm/undertow
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.server.protocol.http; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.net.ssl.SSLEngine; import org.xnio.ChannelListener; import org.xnio.IoUtils; import org.xnio.OptionMap; import org.xnio.Pool; import org.xnio.StreamConnection; import org.xnio.channels.StreamSourceChannel; import org.xnio.ssl.SslConnection; import io.undertow.UndertowLogger; import io.undertow.UndertowMessages; import io.undertow.UndertowOptions; import io.undertow.connector.ByteBufferPool; import io.undertow.connector.PooledByteBuffer; import io.undertow.protocols.alpn.ALPNManager; import io.undertow.protocols.alpn.ALPNProvider; import io.undertow.protocols.ssl.SslConduit; import io.undertow.protocols.ssl.UndertowXnioSsl; import io.undertow.server.AggregateConnectorStatistics; import io.undertow.server.ConnectorStatistics; import io.undertow.server.DelegateOpenListener; import io.undertow.server.HttpHandler; import io.undertow.server.OpenListener; import io.undertow.server.XnioByteBufferPool; /** * Open listener adaptor for ALPN connections * <p> * Not a proper open listener as such, but more a mechanism for selecting between them. * * @author Stuart Douglas */ public class AlpnOpenListener implements ChannelListener<StreamConnection>, OpenListener { /** * HTTP/2 required cipher. Not strictly part of ALPN but it can live here for now till we have a better solution. */ public static final String REQUIRED_CIPHER = "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256"; public static final String REQUIRED_PROTOCOL = "TLSv1.2"; private final ALPNManager alpnManager = ALPNManager.INSTANCE; //todo: configurable private final ByteBufferPool bufferPool; private final Map<String, ListenerEntry> listeners = new HashMap<>(); private String[] protocols; private final String fallbackProtocol; private volatile HttpHandler rootHandler; private volatile OptionMap undertowOptions; private volatile boolean statisticsEnabled; public AlpnOpenListener(Pool<ByteBuffer> bufferPool, OptionMap undertowOptions, DelegateOpenListener httpListener) { this(bufferPool, undertowOptions, "http/1.1", httpListener); } public AlpnOpenListener(Pool<ByteBuffer> bufferPool, OptionMap undertowOptions) { this(bufferPool, undertowOptions, null, null); } public AlpnOpenListener(Pool<ByteBuffer> bufferPool, OptionMap undertowOptions, String fallbackProtocol, DelegateOpenListener fallbackListener) { this(new XnioByteBufferPool(bufferPool), undertowOptions, fallbackProtocol, fallbackListener); } public AlpnOpenListener(ByteBufferPool bufferPool, OptionMap undertowOptions, DelegateOpenListener httpListener) { this(bufferPool, undertowOptions, "http/1.1", httpListener); } public AlpnOpenListener(ByteBufferPool bufferPool) { this(bufferPool, OptionMap.EMPTY, null, null); } public AlpnOpenListener(ByteBufferPool bufferPool, OptionMap undertowOptions) { this(bufferPool, undertowOptions, null, null); } public AlpnOpenListener(ByteBufferPool bufferPool, OptionMap undertowOptions, String fallbackProtocol, DelegateOpenListener fallbackListener) { this.bufferPool = bufferPool; this.undertowOptions = undertowOptions; this.fallbackProtocol = fallbackProtocol; statisticsEnabled = undertowOptions.get(UndertowOptions.ENABLE_CONNECTOR_STATISTICS, false); if (fallbackProtocol != null && fallbackListener != null) { addProtocol(fallbackProtocol, fallbackListener, 0); } } @Override public HttpHandler getRootHandler() { return rootHandler; } @Override public void setRootHandler(HttpHandler rootHandler) { this.rootHandler = rootHandler; for (Map.Entry<String, ListenerEntry> delegate : listeners.entrySet()) { delegate.getValue().listener.setRootHandler(rootHandler); } } @Override public OptionMap getUndertowOptions() { return undertowOptions; } @Override public void setUndertowOptions(OptionMap undertowOptions) { if (undertowOptions == null) { throw UndertowMessages.MESSAGES.argumentCannotBeNull("undertowOptions"); } this.undertowOptions = undertowOptions; for (Map.Entry<String, ListenerEntry> delegate : listeners.entrySet()) { delegate.getValue().listener.setRootHandler(rootHandler); } statisticsEnabled = undertowOptions.get(UndertowOptions.ENABLE_CONNECTOR_STATISTICS, false); } @Override public ByteBufferPool getBufferPool() { return bufferPool; } @Override public ConnectorStatistics getConnectorStatistics() { if (statisticsEnabled) { List<ConnectorStatistics> stats = new ArrayList<>(); for (Map.Entry<String, ListenerEntry> l : listeners.entrySet()) { ConnectorStatistics c = l.getValue().listener.getConnectorStatistics(); if (c != null) { stats.add(c); } } return new AggregateConnectorStatistics(stats.toArray(new ConnectorStatistics[stats.size()])); } return null; } private static class ListenerEntry implements Comparable<ListenerEntry> { final DelegateOpenListener listener; final int weight; final String protocol; ListenerEntry(DelegateOpenListener listener, int weight, String protocol) { this.listener = listener; this.weight = weight; this.protocol = protocol; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof ListenerEntry)) return false; ListenerEntry that = (ListenerEntry) o; if (weight != that.weight) return false; if (!listener.equals(that.listener)) return false; return protocol.equals(that.protocol); } @Override public int hashCode() { int result = listener.hashCode(); result = 31 * result + weight; result = 31 * result + protocol.hashCode(); return result; } @Override public int compareTo(ListenerEntry o) { return -Integer.compare(this.weight, o.weight); } } public AlpnOpenListener addProtocol(String name, DelegateOpenListener listener, int weight) { listeners.put(name, new ListenerEntry(listener, weight, name)); List<ListenerEntry> list = new ArrayList<>(listeners.values()); Collections.sort(list); protocols = new String[list.size()]; for (int i = 0; i < list.size(); ++i) { protocols[i] = list.get(i).protocol; } return this; } public void handleEvent(final StreamConnection channel) { if (UndertowLogger.REQUEST_LOGGER.isTraceEnabled()) { UndertowLogger.REQUEST_LOGGER.tracef("Opened connection with %s", channel.getPeerAddress()); } final SslConduit sslConduit = UndertowXnioSsl.getSslConduit((SslConnection) channel); final SSLEngine sslEngine = sslConduit.getSSLEngine(); if (!engineSupportsHTTP2(sslEngine)) { UndertowLogger.REQUEST_LOGGER.debugf("ALPN has been configured however %s is not present or TLS1.2 is not enabled, falling back to default protocol", REQUIRED_CIPHER); if (fallbackProtocol != null) { ListenerEntry listener = listeners.get(fallbackProtocol); if (listener != null) { listener.listener.handleEvent(channel); return; } } } ALPNProvider provider = alpnManager.getProvider(sslEngine); if (provider == null) { if (fallbackProtocol != null) { ListenerEntry listener = listeners.get(fallbackProtocol); if (listener != null) { listener.listener.handleEvent(channel); return; } } UndertowLogger.REQUEST_LOGGER.debugf("No ALPN provider available and no fallback defined"); IoUtils.safeClose(channel); return; } SSLEngine newEngine = provider.setProtocols(sslEngine, protocols); if (newEngine != sslEngine) { sslConduit.setSslEngine(newEngine); } final AlpnConnectionListener potentialConnection = new AlpnConnectionListener(channel, newEngine, provider); channel.getSourceChannel().setReadListener(potentialConnection); potentialConnection.handleEvent(channel.getSourceChannel()); } public static boolean engineSupportsHTTP2(SSLEngine engine) { //check to make sure the engine meets the minimum requirements for HTTP/2 //if not then ALPN will not be attempted String[] protcols = engine.getEnabledProtocols(); boolean found = false; for(String proto : protcols) { if(proto.equals(REQUIRED_PROTOCOL)) { found = true; break; } } if(!found) { return false; } String[] ciphers = engine.getEnabledCipherSuites(); for (String i : ciphers) { if (i.equals(REQUIRED_CIPHER)) { return true; } } return false; } private class AlpnConnectionListener implements ChannelListener<StreamSourceChannel> { private final StreamConnection channel; private final SSLEngine engine; private final ALPNProvider provider; private AlpnConnectionListener(StreamConnection channel, SSLEngine engine, ALPNProvider provider) { this.channel = channel; this.engine = engine; this.provider = provider; } @Override public void handleEvent(StreamSourceChannel source) { PooledByteBuffer buffer = bufferPool.allocate(); boolean free = true; try { while (true) { int res = channel.getSourceChannel().read(buffer.getBuffer()); if (res == -1) { IoUtils.safeClose(channel); return; } buffer.getBuffer().flip(); final String selected = provider.getSelectedProtocol(engine); if (selected != null) { DelegateOpenListener listener; if (selected.isEmpty()) { //alpn not in use if (fallbackProtocol == null) { UndertowLogger.REQUEST_IO_LOGGER.noALPNFallback(channel.getPeerAddress()); IoUtils.safeClose(channel); return; } listener = listeners.get(fallbackProtocol).listener; } else { listener = listeners.get(selected).listener; } source.getReadSetter().set(null); listener.handleEvent(channel, buffer); free = false; return; } else if (res > 0) { if (fallbackProtocol == null) { UndertowLogger.REQUEST_IO_LOGGER.noALPNFallback(channel.getPeerAddress()); IoUtils.safeClose(channel); return; } DelegateOpenListener listener = listeners.get(fallbackProtocol).listener; source.getReadSetter().set(null); listener.handleEvent(channel, buffer); free = false; return; } else if (res == 0) { channel.getSourceChannel().resumeReads(); return; } } } catch (IOException e) { UndertowLogger.REQUEST_IO_LOGGER.ioException(e); IoUtils.safeClose(channel); } finally { if (free) { buffer.close(); } } } } }
core/src/main/java/io/undertow/server/protocol/http/AlpnOpenListener.java
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.server.protocol.http; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.net.ssl.SSLEngine; import org.xnio.ChannelListener; import org.xnio.IoUtils; import org.xnio.OptionMap; import org.xnio.Pool; import org.xnio.StreamConnection; import org.xnio.channels.StreamSourceChannel; import org.xnio.ssl.SslConnection; import io.undertow.UndertowLogger; import io.undertow.UndertowMessages; import io.undertow.UndertowOptions; import io.undertow.connector.ByteBufferPool; import io.undertow.connector.PooledByteBuffer; import io.undertow.protocols.alpn.ALPNManager; import io.undertow.protocols.alpn.ALPNProvider; import io.undertow.protocols.ssl.SslConduit; import io.undertow.protocols.ssl.UndertowXnioSsl; import io.undertow.server.AggregateConnectorStatistics; import io.undertow.server.ConnectorStatistics; import io.undertow.server.DelegateOpenListener; import io.undertow.server.HttpHandler; import io.undertow.server.OpenListener; import io.undertow.server.XnioByteBufferPool; /** * Open listener adaptor for ALPN connections * <p> * Not a proper open listener as such, but more a mechanism for selecting between them. * * @author Stuart Douglas */ public class AlpnOpenListener implements ChannelListener<StreamConnection>, OpenListener { /** * HTTP/2 required cipher. Not strictly part of ALPN but it can live here for now till we have a better solution. */ public static final String REQUIRED_CIPHER = "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256"; public static final String REQUIRED_PROTOCOL = "TLSv1.2"; private final ALPNManager alpnManager = ALPNManager.INSTANCE; //todo: configurable private final ByteBufferPool bufferPool; private final Map<String, ListenerEntry> listeners = new HashMap<>(); private String[] protocols; private final String fallbackProtocol; private volatile HttpHandler rootHandler; private volatile OptionMap undertowOptions; private volatile boolean statisticsEnabled; public AlpnOpenListener(Pool<ByteBuffer> bufferPool, OptionMap undertowOptions, DelegateOpenListener httpListener) { this(bufferPool, undertowOptions, "http/1.1", httpListener); } public AlpnOpenListener(Pool<ByteBuffer> bufferPool, OptionMap undertowOptions) { this(bufferPool, undertowOptions, null, null); } public AlpnOpenListener(Pool<ByteBuffer> bufferPool, OptionMap undertowOptions, String fallbackProtocol, DelegateOpenListener fallbackListener) { this(new XnioByteBufferPool(bufferPool), undertowOptions, fallbackProtocol, fallbackListener); } public AlpnOpenListener(ByteBufferPool bufferPool, OptionMap undertowOptions, DelegateOpenListener httpListener) { this(bufferPool, undertowOptions, "http/1.1", httpListener); } public AlpnOpenListener(ByteBufferPool bufferPool) { this(bufferPool, OptionMap.EMPTY, null, null); } public AlpnOpenListener(ByteBufferPool bufferPool, OptionMap undertowOptions) { this(bufferPool, undertowOptions, null, null); } public AlpnOpenListener(ByteBufferPool bufferPool, OptionMap undertowOptions, String fallbackProtocol, DelegateOpenListener fallbackListener) { this.bufferPool = bufferPool; this.undertowOptions = undertowOptions; this.fallbackProtocol = fallbackProtocol; statisticsEnabled = undertowOptions.get(UndertowOptions.ENABLE_CONNECTOR_STATISTICS, false); if (fallbackProtocol != null && fallbackListener != null) { addProtocol(fallbackProtocol, fallbackListener, 0); } } @Override public HttpHandler getRootHandler() { return rootHandler; } @Override public void setRootHandler(HttpHandler rootHandler) { this.rootHandler = rootHandler; for (Map.Entry<String, ListenerEntry> delegate : listeners.entrySet()) { delegate.getValue().listener.setRootHandler(rootHandler); } } @Override public OptionMap getUndertowOptions() { return undertowOptions; } @Override public void setUndertowOptions(OptionMap undertowOptions) { if (undertowOptions == null) { throw UndertowMessages.MESSAGES.argumentCannotBeNull("undertowOptions"); } this.undertowOptions = undertowOptions; for (Map.Entry<String, ListenerEntry> delegate : listeners.entrySet()) { delegate.getValue().listener.setRootHandler(rootHandler); } statisticsEnabled = undertowOptions.get(UndertowOptions.ENABLE_CONNECTOR_STATISTICS, false); } @Override public ByteBufferPool getBufferPool() { return bufferPool; } @Override public ConnectorStatistics getConnectorStatistics() { if (statisticsEnabled) { List<ConnectorStatistics> stats = new ArrayList<>(); for (Map.Entry<String, ListenerEntry> l : listeners.entrySet()) { ConnectorStatistics c = l.getValue().listener.getConnectorStatistics(); if (c != null) { stats.add(c); } } return new AggregateConnectorStatistics(stats.toArray(new ConnectorStatistics[stats.size()])); } return null; } private static class ListenerEntry implements Comparable<ListenerEntry> { final DelegateOpenListener listener; final int weight; final String protocol; ListenerEntry(DelegateOpenListener listener, int weight, String protocol) { this.listener = listener; this.weight = weight; this.protocol = protocol; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof ListenerEntry)) return false; ListenerEntry that = (ListenerEntry) o; if (weight != that.weight) return false; if (!listener.equals(that.listener)) return false; return protocol.equals(that.protocol); } @Override public int hashCode() { int result = listener.hashCode(); result = 31 * result + weight; result = 31 * result + protocol.hashCode(); return result; } @Override public int compareTo(ListenerEntry o) { return -Integer.compare(this.weight, o.weight); } } public AlpnOpenListener addProtocol(String name, DelegateOpenListener listener, int weight) { listeners.put(name, new ListenerEntry(listener, weight, name)); List<ListenerEntry> list = new ArrayList<>(listeners.values()); Collections.sort(list); protocols = new String[list.size()]; for (int i = 0; i < list.size(); ++i) { protocols[i] = list.get(i).protocol; } return this; } public void handleEvent(final StreamConnection channel) { if (UndertowLogger.REQUEST_LOGGER.isTraceEnabled()) { UndertowLogger.REQUEST_LOGGER.tracef("Opened connection with %s", channel.getPeerAddress()); } final SslConduit sslConduit = UndertowXnioSsl.getSslConduit((SslConnection) channel); final SSLEngine sslEngine = sslConduit.getSSLEngine(); if (!engineSupportsHTTP2(sslEngine)) { UndertowLogger.REQUEST_LOGGER.debugf("ALPN has been configured however %s is not present, falling back to default protocol", REQUIRED_CIPHER); if (fallbackProtocol != null) { ListenerEntry listener = listeners.get(fallbackProtocol); if (listener != null) { listener.listener.handleEvent(channel); return; } } } ALPNProvider provider = alpnManager.getProvider(sslEngine); if (provider == null) { if (fallbackProtocol != null) { ListenerEntry listener = listeners.get(fallbackProtocol); if (listener != null) { listener.listener.handleEvent(channel); return; } } UndertowLogger.REQUEST_LOGGER.debugf("No ALPN provider available and no fallback defined"); IoUtils.safeClose(channel); return; } SSLEngine newEngine = provider.setProtocols(sslEngine, protocols); if (newEngine != sslEngine) { sslConduit.setSslEngine(newEngine); } final AlpnConnectionListener potentialConnection = new AlpnConnectionListener(channel, newEngine, provider); channel.getSourceChannel().setReadListener(potentialConnection); potentialConnection.handleEvent(channel.getSourceChannel()); } public static boolean engineSupportsHTTP2(SSLEngine engine) { //check to make sure the engine meets the minimum requirements for HTTP/2 //if not then ALPN will not be attempted String[] protcols = engine.getEnabledProtocols(); boolean found = false; for(String proto : protcols) { if(proto.equals(REQUIRED_PROTOCOL)) { found = true; break; } } if(!found) { return false; } String[] ciphers = engine.getEnabledCipherSuites(); for (String i : ciphers) { if (i.equals(REQUIRED_CIPHER)) { return true; } } return false; } private class AlpnConnectionListener implements ChannelListener<StreamSourceChannel> { private final StreamConnection channel; private final SSLEngine engine; private final ALPNProvider provider; private AlpnConnectionListener(StreamConnection channel, SSLEngine engine, ALPNProvider provider) { this.channel = channel; this.engine = engine; this.provider = provider; } @Override public void handleEvent(StreamSourceChannel source) { PooledByteBuffer buffer = bufferPool.allocate(); boolean free = true; try { while (true) { int res = channel.getSourceChannel().read(buffer.getBuffer()); if (res == -1) { IoUtils.safeClose(channel); return; } buffer.getBuffer().flip(); final String selected = provider.getSelectedProtocol(engine); if (selected != null) { DelegateOpenListener listener; if (selected.isEmpty()) { //alpn not in use if (fallbackProtocol == null) { UndertowLogger.REQUEST_IO_LOGGER.noALPNFallback(channel.getPeerAddress()); IoUtils.safeClose(channel); return; } listener = listeners.get(fallbackProtocol).listener; } else { listener = listeners.get(selected).listener; } source.getReadSetter().set(null); listener.handleEvent(channel, buffer); free = false; return; } else if (res > 0) { if (fallbackProtocol == null) { UndertowLogger.REQUEST_IO_LOGGER.noALPNFallback(channel.getPeerAddress()); IoUtils.safeClose(channel); return; } DelegateOpenListener listener = listeners.get(fallbackProtocol).listener; source.getReadSetter().set(null); listener.handleEvent(channel, buffer); free = false; return; } else if (res == 0) { channel.getSourceChannel().resumeReads(); return; } } } catch (IOException e) { UndertowLogger.REQUEST_IO_LOGGER.ioException(e); IoUtils.safeClose(channel); } finally { if (free) { buffer.close(); } } } } }
Minor update to log message
core/src/main/java/io/undertow/server/protocol/http/AlpnOpenListener.java
Minor update to log message
Java
bsd-3-clause
c8e76d678b40680304a9486cc023f30e785a71fb
0
BaseXdb/basex,JensErat/basex,joansmith/basex,JensErat/basex,vincentml/basex,JensErat/basex,deshmnnit04/basex,deshmnnit04/basex,BaseXdb/basex,dimitarp/basex,dimitarp/basex,ksclarke/basex,deshmnnit04/basex,deshmnnit04/basex,BaseXdb/basex,deshmnnit04/basex,drmacro/basex,JensErat/basex,ksclarke/basex,ksclarke/basex,vincentml/basex,vincentml/basex,joansmith/basex,dimitarp/basex,dimitarp/basex,ksclarke/basex,drmacro/basex,drmacro/basex,vincentml/basex,joansmith/basex,ksclarke/basex,deshmnnit04/basex,joansmith/basex,BaseXdb/basex,ksclarke/basex,ksclarke/basex,BaseXdb/basex,deshmnnit04/basex,drmacro/basex,ksclarke/basex,vincentml/basex,drmacro/basex,dimitarp/basex,BaseXdb/basex,BaseXdb/basex,joansmith/basex,BaseXdb/basex,vincentml/basex,joansmith/basex,JensErat/basex,drmacro/basex,deshmnnit04/basex,drmacro/basex,dimitarp/basex,joansmith/basex,vincentml/basex,deshmnnit04/basex,ksclarke/basex,BaseXdb/basex,drmacro/basex,drmacro/basex,dimitarp/basex,JensErat/basex,ksclarke/basex,vincentml/basex,dimitarp/basex,vincentml/basex,vincentml/basex,JensErat/basex,dimitarp/basex,joansmith/basex,JensErat/basex,vincentml/basex,vincentml/basex,BaseXdb/basex,BaseXdb/basex,deshmnnit04/basex,drmacro/basex,joansmith/basex,JensErat/basex,JensErat/basex,drmacro/basex,deshmnnit04/basex,ksclarke/basex,dimitarp/basex,JensErat/basex,joansmith/basex,drmacro/basex,dimitarp/basex,joansmith/basex,dimitarp/basex,BaseXdb/basex,ksclarke/basex,JensErat/basex,joansmith/basex,deshmnnit04/basex
package org.basex.build.fs; import static org.basex.build.fs.FSText.*; import static org.basex.data.DataText.*; import static org.basex.util.Token.*; import java.io.File; import java.io.IOException; import org.basex.BaseX; import org.basex.build.Builder; import org.basex.build.Parser; import org.basex.build.fs.metadata.AbstractExtractor; import org.basex.build.fs.metadata.BMPExtractor; import org.basex.build.fs.metadata.EMLExtractor; import org.basex.build.fs.metadata.GIFExtractor; import org.basex.build.fs.metadata.JPGExtractor; import org.basex.build.fs.metadata.MP3Extractor; import org.basex.build.fs.metadata.MetaDataException; import org.basex.build.fs.metadata.PNGExtractor; import org.basex.build.fs.metadata.TIFExtractor; import org.basex.core.Prop; import org.basex.core.proc.CreateFS; import org.basex.io.BufferInput; import org.basex.io.IO; import org.basex.Text; import org.basex.util.Array; import org.basex.util.Atts; import org.basex.util.Map; /** Imports/shreds/parses a file hierarchy into a BaseX database. * * The overall process of importing a file hierarchy can be described * as follows: * <ol> * <li>The import is invoked by the {@link CreateFS} command. * To import on the command line type: * <tt>$ create fs [path] [dbname]</tt> * </li> * <li>This class {@link FSParser} instantiates the needed components * for the import process in its {@link FSParser#parse(Builder)} method. * The components are: * <ol> * <li>the file hierarchy traversal engine ({@link FSWalker} and</li> * <li>as many visitors ({@link FSVisitor}) as needed to receive * events during the traversal. In this case this class {@link FSParser} * also implements the interface {@link FSVisitor} and as such is * the essential one to shred/import the file hierarchy into an XML * hierarchy.</li> * </ol> * </ol> * * @see FSWalker * @see FSVisitor * @author Workgroup DBIS, University of Konstanz 2005-08, ISC License * @author Alexander Holupirek */ public final class FSParser extends Parser implements FSVisitor { /** The current File being processed. */ private String guimsg = ""; /** Meta data index. */ private final Map<AbstractExtractor> meta = new Map<AbstractExtractor>(); /** Cache for content indexing. */ private byte[] cache; /** Reference to the database builder. */ private Builder builder; /** * Constructor. * @param path the traversal starts from (enter "/" or leave empty to parse * all partitions (C:, D: ...) on Windows) */ public FSParser(final IO path) { super(path); // initialize cache for textual contents if(Prop.fscont) cache = new byte[Prop.fstextmax]; meta.add(TYPEGIF, new GIFExtractor()); meta.add(TYPEPNG, new PNGExtractor()); meta.add(TYPEJPG, new JPGExtractor()); meta.add(TYPEJPEG, new JPGExtractor()); meta.add(TYPEBMP, new BMPExtractor()); meta.add(TYPEGIF, new TIFExtractor()); meta.add(TYPEMP3, new MP3Extractor()); meta.add(TYPEEML, new EMLExtractor()); meta.add(TYPEMBS, new EMLExtractor()); meta.add(TYPEMBX, new EMLExtractor()); } /** * {@inheritDoc} */ public void preTraversal(final String path, final boolean docOpen) throws IOException { if(docOpen) builder.startElem(token(DEEPFS), atts.reset()); builder.startElem(DIR, atts.set(NAME, token(path))); } /** * {@inheritDoc} */ public void preEvent(final File dir) throws IOException { guimsg = dir.toString(); builder.startElem(DIR, atts(dir)); } /** * {@inheritDoc} */ public void postEvent() throws IOException { builder.endElem(DIR); } /** * {@inheritDoc} */ public void postEvent(final File dir) throws IOException { builder.endElem(DIR); } /** * {@inheritDoc} */ public void regfileEvent(final File f) throws IOException { guimsg = f.toString(); builder.startElem(FILE, atts(f)); if (f.canRead()) { if(Prop.fsmeta && f.getName().indexOf('.') != -1) { final String name = f.getName(); final int dot = name.lastIndexOf('.'); final byte[] suffix = lc(token(name.substring(dot + 1))); final AbstractExtractor index = meta.get(suffix); if(index != null && f.length() != 0) { try { index.extract(builder, f); } catch(final MetaDataException ex) { BaseX.debug(ex); } } } // import textual content if(Prop.fscont && f.isFile()) { final int size = BufferInput.read(f, cache); int s = -1; while(++s < size) { final byte b = cache[s]; if(b >= 0 && b < ' ' && !ws(b)) break; } if(s == size) { while(--s >= 0 && cache[s] <= 0x20 && cache[s] >= 0); if(++s != 0) { builder.nodeAndText(CONTENT, atts.reset(), Array.finish(cache, s)); } } } } builder.endElem(FILE); } /** * {@inheritDoc} */ public void symlinkEvent(final File link) { } /** * {@inheritDoc} */ public void postTraversal(final boolean docClose) throws IOException { builder.endElem(DIR); if(docClose) builder.endElem(token(DEEPFS)); } @Override public String head() { return Text.IMPORTPROG; } @Override public String det() { return guimsg; } @Override public double prog() { return 0; } /** * Main entry point for the import of a file hierarchy to BaseX. * Instantiates fht engine and visitors, and starts the traversal. * @param build instance passed by {@link CreateFS}. * @throws IOException I/O exception */ @Override public void parse(final Builder build) throws IOException { builder = build; builder.encoding(Prop.ENCODING); final FSWalker f = new FSWalker(); f.register(this); builder.startDoc(token(io.name())); f.fileHierarchyTraversal(io); builder.endDoc(); } /** Construct attributes for file and directory tags. * @param f file name * @return attributes as byte[][] */ private Atts atts(final File f) { final String name = f.getName(); final int s = name.lastIndexOf('.'); // current time storage: minutes from 1.1.1970 // (values will be smaller than 1GB and will thus be inlined in the storage) final long time = f.lastModified() / 60000; final byte[] suf = s != -1 ? lc(token(name.substring(s + 1))) : EMPTY; atts.reset(); atts.add(NAME, token(name)); atts.add(SUFFIX, suf); atts.add(SIZE, token(f.length())); if(time != 0) atts.add(MTIME, token(time)); return atts; } }
src/org/basex/build/fs/FSParser.java
package org.basex.build.fs; import static org.basex.build.fs.FSText.*; import static org.basex.data.DataText.*; import static org.basex.util.Token.*; import java.io.File; import java.io.IOException; import org.basex.BaseX; import org.basex.build.Builder; import org.basex.build.Parser; import org.basex.build.fs.metadata.AbstractExtractor; import org.basex.build.fs.metadata.BMPExtractor; import org.basex.build.fs.metadata.EMLExtractor; import org.basex.build.fs.metadata.GIFExtractor; import org.basex.build.fs.metadata.JPGExtractor; import org.basex.build.fs.metadata.MP3Extractor; import org.basex.build.fs.metadata.MetaDataException; import org.basex.build.fs.metadata.PNGExtractor; import org.basex.build.fs.metadata.TIFExtractor; import org.basex.core.Prop; import org.basex.core.proc.CreateFS; import org.basex.io.BufferInput; import org.basex.io.IO; import org.basex.Text; import org.basex.util.Array; import org.basex.util.Atts; import org.basex.util.Map; /** Imports/shreds/parses a file hierarchy into a BaseX database. * * The overall process of importing a file hierarchy can be described * as follows: * <ol> * <li>The import is invoked by the {@link CreateFS} command. * To import on the command line type: * <tt>$ create fs [path] [dbname]</tt> * </li> * <li>This class {@link FSParser} instantiates the needed components * for the import process in its {@link FSParser#parse(Builder)} method. * The components are: * <ol> * <li>the file hierarchy traversal engine ({@link FSWalker} and</li> * <li>as many visitors ({@link FSVisitor}) as needed to receive * events during the traversal. In this case this class {@link FSParser} * also implements the interface {@link FSVisitor} and as such is * the essential one to shred/import the file hierarchy into an XML * hierarchy.</li> * </ol> * </ol> * * @see FSWalker * @see FSVisitor * @author Workgroup DBIS, University of Konstanz 2005-08, ISC License * @author Alexander Holupirek */ public final class FSParser extends Parser implements FSVisitor { /** The current File being processed. */ private String guimsg = ""; /** Meta data index. */ private final Map<AbstractExtractor> meta = new Map<AbstractExtractor>(); /** Cache for content indexing. */ private byte[] cache; /** Reference to the database builder. */ private Builder builder; /** * Constructor. * @param path the traversal starts from (enter "/" or leave empty to parse * all partitions (C:, D: ...) on Windows) */ public FSParser(final IO path) { super(path); // initialize cache for textual contents if(Prop.fscont) cache = new byte[Prop.fstextmax]; meta.add(TYPEGIF, new GIFExtractor()); meta.add(TYPEPNG, new PNGExtractor()); meta.add(TYPEJPG, new JPGExtractor()); meta.add(TYPEJPEG, new JPGExtractor()); meta.add(TYPEBMP, new BMPExtractor()); meta.add(TYPEGIF, new TIFExtractor()); meta.add(TYPEMP3, new MP3Extractor()); meta.add(TYPEEML, new EMLExtractor()); meta.add(TYPEMBS, new EMLExtractor()); meta.add(TYPEMBX, new EMLExtractor()); } /** * {@inheritDoc} */ public void preTraversal(final String path, final boolean docOpen) throws IOException { if(docOpen) builder.startElem(token(DEEPFS), atts.reset()); builder.startElem(DIR, atts.set(NAME, token(path))); } /** * {@inheritDoc} */ public void preEvent(final File dir) throws IOException { guimsg = dir.toString(); builder.startElem(DIR, atts(dir)); } /** * {@inheritDoc} */ public void postEvent() throws IOException { builder.endElem(DIR); } /** * {@inheritDoc} */ public void regfileEvent(final File f) throws IOException { guimsg = f.toString(); builder.startElem(FILE, atts(f)); if (f.canRead()) { if(Prop.fsmeta && f.getName().indexOf('.') != -1) { final String name = f.getName(); final int dot = name.lastIndexOf('.'); final byte[] suffix = lc(token(name.substring(dot + 1))); final AbstractExtractor index = meta.get(suffix); if(index != null && f.length() != 0) { try { index.extract(builder, f); } catch(final MetaDataException ex) { BaseX.debug(ex); } } } // import textual content if(Prop.fscont && f.isFile()) { final int size = BufferInput.read(f, cache); int s = -1; while(++s < size) { final byte b = cache[s]; if(b >= 0 && b < ' ' && !ws(b)) break; } if(s == size) { while(--s >= 0 && cache[s] <= 0x20 && cache[s] >= 0); if(++s != 0) { builder.nodeAndText(CONTENT, atts.reset(), Array.finish(cache, s)); } } } } builder.endElem(FILE); } /** * {@inheritDoc} */ public void symlinkEvent(final File link) { } /** * {@inheritDoc} */ public void postTraversal(final boolean docClose) throws IOException { builder.endElem(DIR); if(docClose) builder.endElem(token(DEEPFS)); } @Override public String head() { return Text.IMPORTPROG; } @Override public String det() { return guimsg; } @Override public double prog() { return 0; } /** * Main entry point for the import of a file hierarchy to BaseX. * Instantiates fht engine and visitors, and starts the traversal. * @param build instance passed by {@link CreateFS}. * @throws IOException I/O exception */ @Override public void parse(final Builder build) throws IOException { builder = build; builder.encoding(Prop.ENCODING); final FSWalker f = new FSWalker(); f.register(this); builder.startDoc(token(io.name())); f.fileHierarchyTraversal(io); builder.endDoc(); } /** Construct attributes for file and directory tags. * @param f file name * @return attributes as byte[][] */ private Atts atts(final File f) { final String name = f.getName(); final int s = name.lastIndexOf('.'); // current time storage: minutes from 1.1.1970 // (values will be smaller than 1GB and will thus be inlined in the storage) final long time = f.lastModified() / 60000; final byte[] suf = s != -1 ? lc(token(name.substring(s + 1))) : EMPTY; atts.reset(); atts.add(NAME, token(name)); atts.add(SUFFIX, suf); atts.add(SIZE, token(f.length())); if(time != 0) atts.add(MTIME, token(time)); return atts; } }
FSVisitor inheritance bug fixed
src/org/basex/build/fs/FSParser.java
FSVisitor inheritance bug fixed
Java
mit
b19bbf4a5b8dea23c63b33e33ef7d271f25449ac
0
awong1900/WiFi_Iot_Node_App,awong1900/Wio_Link_Android_App,awong1900/Wio_Link_Android_App
package cc.seeed.iot.ui_setnode; import android.app.ProgressDialog; import android.content.ClipData; import android.content.ClipDescription; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.graphics.Color; import android.graphics.PorterDuff; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.support.design.widget.Snackbar; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.Toolbar; import android.util.Base64; import android.util.Log; import android.util.SparseBooleanArray; import android.util.SparseIntArray; import android.view.DragEvent; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.ImageView; import android.widget.TextView; import com.koushikdutta.urlimageviewhelper.UrlImageViewHelper; import java.util.ArrayList; import java.util.List; import cc.seeed.iot.MyApplication; import cc.seeed.iot.R; import cc.seeed.iot.datastruct.Constant; import cc.seeed.iot.datastruct.User; import cc.seeed.iot.ui_main.NodeApiActivity; import cc.seeed.iot.ui_setnode.View.GrovePinsView; import cc.seeed.iot.ui_setnode.model.NodeConfigHelper; import cc.seeed.iot.ui_setnode.model.PinConfig; import cc.seeed.iot.ui_setnode.model.PinConfigDBHelper; import cc.seeed.iot.util.DBHelper; import cc.seeed.iot.webapi.IotApi; import cc.seeed.iot.webapi.IotService; import cc.seeed.iot.webapi.model.GroverDriver; import cc.seeed.iot.webapi.model.Node; import cc.seeed.iot.webapi.model.OtaStatusResponse; import retrofit.Callback; import retrofit.RetrofitError; import retrofit.client.Response; public class SetupIotNodeActivity extends AppCompatActivity implements GroveFilterRecyclerAdapter.MainViewHolder.MyItemClickListener, View.OnClickListener, View.OnDragListener, View.OnLongClickListener { private static final String TAG = "SetupIotNodeActivity"; public static final String GROVE_REMOVE = "grove/remove"; public static final String GROVE_REMOVE_PIN6 = "grove/remove/6"; public static final String GROVE_ADD = "grove/add"; private static final int ADD_I2C_GROVE = 0x00; private static final int RMV_I2C_GROVE = 0x01; private static final int MESSAGE_UPDATE_DONE = 0x10; public Toolbar mToolbar; Node node; User user; List<PinConfig> pinConfigs = new ArrayList<>(); static View.OnClickListener mainOnClickListener; //Todo, no static static View.OnLongClickListener mainOnLongClickListener; //Todo, no static static View.OnLongClickListener pin6OnLongClickListener; //Todo, no static RecyclerView mGroveI2cListView; GroveI2cListRecyclerAdapter mGroveI2cListAdapter; RecyclerView mGroveListView; GroveListRecyclerAdapter mGroveListAdapter; RecyclerView mGroveTypeListView; GroveFilterRecyclerAdapter mGroveTypeListAdapter; private List<GroverDriver> mGroveDrivers; SparseBooleanArray nodePinSelector; NodeConfigHelper nodeConfigModel; View mSetNodeLayout; GrovePinsView mGrovePinsView; ProgressDialog mProgressDialog; private ImageView mDragRemoveView; private TextView i2cDeviceNumView; private Handler mHandler; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_setup_iot_node); View view = (View) findViewById(R.id.setup_iot_node); mProgressDialog = new ProgressDialog(this); mProgressDialog.setCanceledOnTouchOutside(false); mProgressDialog.setButton(ProgressDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }); // mProgressDialog.getButton(ProgressDialog.BUTTON_POSITIVE).setVisibility(View.INVISIBLE); mainOnClickListener = new MainOnClickListener(this); mainOnLongClickListener = new MainOnClickListener(this); pin6OnLongClickListener = new Pin6OnClickListener(this); nodePinSelector = new SparseBooleanArray(); mGroveDrivers = DBHelper.getGrovesAll(); mToolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(mToolbar); getSupportActionBar().setDisplayHomeAsUpEnabled(true); mDragRemoveView = (ImageView) findViewById(R.id.grove_remove); mDragRemoveView.setOnDragListener(this); mSetNodeLayout = (View) findViewById(R.id.set_node); mSetNodeLayout.setOnClickListener(this); String node_sn = getIntent().getStringExtra("node_sn"); node = DBHelper.getNodes(node_sn).get(0); nodeConfigModel = new NodeConfigHelper(node.node_sn); mGrovePinsView = new GrovePinsView(view, node); for (ImageView pinView : mGrovePinsView.pinViews) { pinView.setOnDragListener(this); pinView.setOnClickListener(this); pinView.setOnLongClickListener(this); } pinConfigs = PinConfigDBHelper.getPinConfigs(node.node_sn); Log.e(TAG, "ori_pinconfig" + pinConfigs.toString()); getSupportActionBar().setTitle(node.name); user = ((MyApplication) SetupIotNodeActivity.this.getApplication()).getUser(); mGroveListView = (RecyclerView) findViewById(R.id.grove_list); if (mGroveListView != null) { mGroveListView.setHasFixedSize(true); LinearLayoutManager layoutManager = new LinearLayoutManager(this); layoutManager.setOrientation(LinearLayoutManager.HORIZONTAL); mGroveListView.setLayoutManager(layoutManager); mGroveListAdapter = new GroveListRecyclerAdapter(mGroveDrivers); mGroveListView.setAdapter(mGroveListAdapter); } mGroveI2cListView = (RecyclerView) findViewById(R.id.grove_i2c_list); if (mGroveI2cListView != null) { mGroveI2cListView.setHasFixedSize(true); LinearLayoutManager layoutManager = new LinearLayoutManager(this); layoutManager.setOrientation(LinearLayoutManager.HORIZONTAL); mGroveI2cListView.setLayoutManager(layoutManager); mGroveI2cListAdapter = new GroveI2cListRecyclerAdapter(pinConfigs); mGroveI2cListView.setAdapter(mGroveI2cListAdapter); } mGroveTypeListView = (RecyclerView) findViewById(R.id.grove_selector); if (mGroveTypeListView != null) { mGroveTypeListView.setHasFixedSize(true); LinearLayoutManager layoutManager = new LinearLayoutManager(this); layoutManager.setOrientation(LinearLayoutManager.HORIZONTAL); mGroveTypeListView.setLayoutManager(layoutManager); setupGroveSelectorAdapter(); } i2cDeviceNumView = (TextView) view.findViewById(R.id.i2c_device_num); i2cDeviceNumViewDisplay(); initData(); } private void i2cDeviceNumViewDisplay() { if (pinDeviceCount(6) > 1) { i2cDeviceNumView.setVisibility(View.VISIBLE); i2cDeviceNumView.setText("+" + String.valueOf(pinDeviceCount(6) - 1)); } else { i2cDeviceNumView.setVisibility(View.GONE); } } private void initData() { mHandler = new Handler() { @Override public void handleMessage(Message msg) { switch (msg.what) { case ADD_I2C_GROVE: //if i2c list visible, dynamic add, move to end position updateI2cGroveList(); scrollI2cGroveListToEnd(); //refresh number display i2cDeviceNumViewDisplay(); break; case RMV_I2C_GROVE: //if i2c list visible, dynamic remove if (pinDeviceCount(6) < 2) mGroveI2cListView.setVisibility(View.INVISIBLE); else updateI2cGroveList(); //refresh number display i2cDeviceNumViewDisplay(); //refresh pin6 image mGrovePinsView.updatePin6(pinConfigs); break; case MESSAGE_UPDATE_DONE: { String message = (String) msg.obj; new AlertDialog.Builder(SetupIotNodeActivity.this) .setTitle(R.string.update) .setMessage(message) .setPositiveButton(R.string.ok, null) .show(); } break; } } }; } private void scrollI2cGroveListToEnd() { mGroveI2cListView.smoothScrollToPosition(mGroveI2cListAdapter.getItemCount() - 1); } private void updateI2cGroveList() { List<PinConfig> pin6Configs = new ArrayList<>(); for (PinConfig p : pinConfigs) { if (p.position == 6) pin6Configs.add(p); } mGroveI2cListAdapter.updateAll(pin6Configs); } @Override protected void onResume() { super.onResume(); getGrovesData(); } private void setupGroveSelectorAdapter() { mGroveTypeListAdapter = new GroveFilterRecyclerAdapter(Constant.groveTypes); mGroveTypeListAdapter.setOnItemClickListener(this); mGroveTypeListView.setAdapter(mGroveTypeListAdapter); mGroveTypeListAdapter.updateSelection(0); } private void updateGroveListAdapter(List<GroverDriver> groverDrivers) { mGroveListAdapter.updateAll(groverDrivers); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.ui_setup, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); if (id == android.R.id.home) { finish(); return true; } else if (id == R.id.api) { Intent intent = new Intent(this, NodeApiActivity.class); intent.putExtra("node_sn", node.node_sn); startActivity(intent); } else if (id == R.id.update) { //TODO update firmware if (node.name == null) return true; // String yaml = "" + // "GroveMultiChannelGas:\r\n" + // " sku: 101020088\r\n" + // " name: Grove-Multichannel Gas Sensor\r\n" + // " construct_arg_list:\r\n" + // " pinsda: 4\r\n" + // " pinscl: 5\r\n"; String yaml = NodeConfigHelper.getConfigYaml(pinConfigs); Log.i(TAG, "yaml:\n" + yaml); if (yaml.isEmpty()) { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setMessage("Forger add grove?"); builder.setTitle("Tip"); builder.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }); builder.create().show(); return true; } String Base64Yaml = Base64.encodeToString(yaml.getBytes(), Base64.DEFAULT); updateNode(node.node_key, Base64Yaml); return true; } return super.onOptionsItemSelected(item); } @Override public boolean onContextItemSelected(MenuItem item) { switch (item.getItemId()) { case 1: // nodeConfigModel.removePinNode(1); // uiStateControl.removeSelectedPin(1); break; case 2: // nodeConfigModel.removePinNode(2); // uiStateControl.removeSelectedPin(2); break; case 3: // nodeConfigModel.removePinNode(3); // uiStateControl.removeSelectedPin(3); break; case 4: // nodeConfigModel.removePinNode(4); // uiStateControl.removeSelectedPin(4); break; case 5: // nodeConfigModel.removePinNode(5); // uiStateControl.removeSelectedPin(5); break; case 6: // nodeConfigModel.removePinNode(6); // uiStateControl.removeSelectedPin(6); break; } return super.onContextItemSelected(item); } private void updateNode(final String node_key, String base64Yaml) { mProgressDialog.show(); mProgressDialog.setMessage("Ready to ota..."); mProgressDialog.getButton(ProgressDialog.BUTTON_POSITIVE).setVisibility(View.INVISIBLE); IotApi api = new IotApi(); final IotService iot = api.getService(); iot.userDownload(node_key, base64Yaml, new Callback<OtaStatusResponse>() { @Override public void success(OtaStatusResponse otaStatusResponse, Response response) { if (otaStatusResponse.status.equals("200")) { mProgressDialog.setMessage(otaStatusResponse.ota_msg); displayStatus(node_key); } else { mProgressDialog.setMessage("Error:" + otaStatusResponse.msg); mProgressDialog.getButton(ProgressDialog.BUTTON_POSITIVE).setVisibility(View.VISIBLE); } } @Override public void failure(RetrofitError error) { mProgressDialog.dismiss(); Log.e(TAG, "error:" + error); } }); } private void displayStatus(final String node_key) { IotApi api = new IotApi(); final IotService iot = api.getService(); iot.otaStatus(node_key, new Callback<OtaStatusResponse>() { @Override public void success(OtaStatusResponse otaStatusResponse, Response response) { if (otaStatusResponse.status.equals("200")) { if (otaStatusResponse.ota_status.equals("going")) { displayStatus(node_key); mProgressDialog.setMessage(otaStatusResponse.ota_msg); } else if (otaStatusResponse.ota_status.equals("done")) { mProgressDialog.dismiss(); Message message = Message.obtain(); message.what = MESSAGE_UPDATE_DONE; message.obj = otaStatusResponse.ota_msg; mHandler.sendMessage(message); } else if (otaStatusResponse.ota_status.equals("error")) { mProgressDialog.setMessage(otaStatusResponse.ota_status + ":" + otaStatusResponse.ota_msg); mProgressDialog.getButton(ProgressDialog.BUTTON_POSITIVE).setVisibility(View.VISIBLE); mProgressDialog.setButton(DialogInterface.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }); } } else { mProgressDialog.setMessage(otaStatusResponse.status + ":" + otaStatusResponse.msg); mProgressDialog.getButton(ProgressDialog.BUTTON_POSITIVE).setVisibility(View.VISIBLE); mProgressDialog.setButton(DialogInterface.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }); } } @Override public void failure(RetrofitError error) { Log.e(TAG, "error:" + error); mProgressDialog.dismiss(); } } ); } @Override public void onItemClick(View view, int position) { String groveType = Constant.groveTypes[position]; List<GroverDriver> inputGroves = new ArrayList<GroverDriver>(); List<GroverDriver> outputGroves = new ArrayList<GroverDriver>(); if (mGroveDrivers == null) return; for (GroverDriver g : mGroveDrivers) { if (!g.Inputs.isEmpty()) { outputGroves.add(g); } if (!g.Outputs.isEmpty()) inputGroves.add(g); } mGroveTypeListAdapter.updateSelection(position); if (groveType.equals("All")) { updateGroveListAdapter(mGroveDrivers); } else if (groveType.equals("Input")) { updateGroveListAdapter(inputGroves); } else if (groveType.equals("Output")) { updateGroveListAdapter(outputGroves); } } @Override public void onClick(View v) { switch (v.getId()) { case R.id.grove_1: // Snackbar.make(v, "Grove name:" + pinDeviceCount(1), Snackbar.LENGTH_LONG).show(); break; case R.id.grove_2: // Snackbar.make(v, "Grove name:" + pinDeviceCount(2), Snackbar.LENGTH_LONG).show(); break; case R.id.grove_3: // Snackbar.make(v, "Grove name:" + pinDeviceCount(3), Snackbar.LENGTH_LONG).show(); break; case R.id.grove_4: // Snackbar.make(v, "Grove name:" + pinDeviceCount(4), Snackbar.LENGTH_LONG).show(); break; case R.id.grove_5: // Snackbar.make(v, "Grove name:" + pinDeviceCount(5), Snackbar.LENGTH_LONG).show(); break; case R.id.grove_6: if (pinDeviceCount(6) == 0) { ; } else if (pinDeviceCount(6) == 1) { Snackbar.make(v, "Grove name:" + pinDeviceCount(5), Snackbar.LENGTH_LONG).show(); } else if (pinDeviceCount(6) > 1) { if (mGroveI2cListView.getVisibility() == View.VISIBLE) mGroveI2cListView.setVisibility(View.INVISIBLE); else { mGroveI2cListView.setVisibility(View.VISIBLE); updateI2cGroveList(); } } break; } } private int pinDeviceCount(int position) { SparseIntArray sparseIntArray = new SparseIntArray(); for (PinConfig pinConfig : pinConfigs) { int count = sparseIntArray.get(pinConfig.position, 0); count = count + 1; sparseIntArray.append(pinConfig.position, count); } return sparseIntArray.get(position, 0); } @Override public boolean onDrag(View v, DragEvent event) { // Log.e(TAG, v.toString()); // Log.e(TAG, event.toString()); int action = event.getAction(); switch (v.getId()) { case R.id.grove_1: case R.id.grove_2: case R.id.grove_3: case R.id.grove_4: case R.id.grove_5: case R.id.grove_6: switch (action) { case DragEvent.ACTION_DRAG_STARTED: { if (!event.getClipDescription().hasMimeType(GROVE_ADD)) return false; GrovePinsView.Tag tag = (GrovePinsView.Tag) v.getTag(); String interfaceType = tag.interfaceType; GroverDriver groverDriver = (GroverDriver) event.getLocalState(); if (!interfaceType.equals(groverDriver.InterfaceType)) { // Log.e(TAG, groverDriver.InterfaceType); return false; } v.setActivated(true); ((ImageView) v).setImageAlpha(64); } break; case DragEvent.ACTION_DRAG_ENTERED: Log.e(TAG, "entered"); v.setActivated(false); ((ImageView) v).setImageAlpha(64); break; case DragEvent.ACTION_DRAG_EXITED: v.setActivated(true); ((ImageView) v).setImageAlpha(64); break; case DragEvent.ACTION_DRAG_ENDED: v.setActivated(false); ((ImageView) v).setImageAlpha(255); break; case DragEvent.ACTION_DROP: { GroverDriver groverDriver = (GroverDriver) event.getLocalState(); Log.e(TAG, "Drop " + groverDriver.GroveName); UrlImageViewHelper.setUrlDrawable((ImageView) v, groverDriver.ImageURL, R.drawable.grove_no, UrlImageViewHelper.CACHE_DURATION_INFINITE); PinConfig pinConfig = new PinConfig(); pinConfig.position = ((GrovePinsView.Tag) v.getTag()).position; pinConfig.selected = true; pinConfig.grove_id = groverDriver.ID; pinConfig.node_sn = node.node_sn; if (pinConfig.position != 6) { //One pin connect one grove Boolean status = false; PinConfig dup_pinConfig = new PinConfig(); for (PinConfig p : pinConfigs) if (p.position == pinConfig.position) { status = true; dup_pinConfig = p; } if (status) pinConfigs.remove(dup_pinConfig); } else { //duplicate i2c grove is not allowed Boolean status = false; PinConfig dup_pinConfig = new PinConfig(); for (PinConfig p : pinConfigs) if (p.grove_id == pinConfig.grove_id) { status = true; dup_pinConfig = p; } if (status) pinConfigs.remove(dup_pinConfig); } String groveInstanceName; // List<String> groveInstanceNames = new ArrayList<>(); // for (PinConfig p : pinConfigs) { // groveInstanceNames.add(p.groveInstanceName); // } // groveInstanceName = groverDriver.ClassName; // int i = 1; // while (true) { // if (groveInstanceNames.contains(groveInstanceName)) { // groveInstanceName = groveInstanceName.split("_0")[0] + "_0" + Integer.toString(i); // } else { // groveInstanceNames.add(groveInstanceName); // break; // } // i++; // } if (pinConfig.position >= 1 && pinConfig.position <= 3) groveInstanceName = groverDriver.ClassName + "_Digital" + (pinConfig.position - 1); else if (pinConfig.position == 4) groveInstanceName = groverDriver.ClassName + "_Analog"; else if (pinConfig.position == 5) groveInstanceName = groverDriver.ClassName + "_UART"; else if (pinConfig.position == 6) groveInstanceName = groverDriver.ClassName + "_I2C"; else groveInstanceName = groverDriver.ClassName; pinConfig.groveInstanceName = groveInstanceName; pinConfigs.add(pinConfig); Log.e(TAG, "drag pinConfigs " + pinConfigs); if (v.getId() == R.id.grove_6) { Message message = Message.obtain(); message.what = ADD_I2C_GROVE; mHandler.sendMessage(message); } } break; } break; case R.id.grove_remove: switch (action) { case DragEvent.ACTION_DRAG_STARTED: { return event.getClipDescription().hasMimeType(GROVE_REMOVE) || event.getClipDescription().hasMimeType(GROVE_REMOVE_PIN6); } case DragEvent.ACTION_DRAG_ENTERED: ((ImageView) v).setColorFilter(Color.RED, PorterDuff.Mode.SRC_IN); break; case DragEvent.ACTION_DRAG_EXITED: ((ImageView) v).setColorFilter(Color.RED, PorterDuff.Mode.DST); case DragEvent.ACTION_DROP: { if (event.getClipDescription().hasMimeType(GROVE_REMOVE)) { ImageView view = (ImageView) event.getLocalState(); Log.e(TAG, ((GrovePinsView.Tag) view.getTag()).position + ""); view.setImageDrawable(null); int position = ((GrovePinsView.Tag) view.getTag()).position; removePinConfig(position); } else if (event.getClipDescription().hasMimeType(GROVE_REMOVE_PIN6)) { PinConfig pinConfig = (PinConfig) event.getLocalState(); Log.e(TAG, pinConfig.groveInstanceName); removePinConfig(pinConfig.groveInstanceName); Message message = Message.obtain(); message.what = RMV_I2C_GROVE; mHandler.sendMessage(message); } break; } case DragEvent.ACTION_DRAG_ENDED: ((ImageView) v).setColorFilter(Color.RED, PorterDuff.Mode.DST); mDragRemoveView.setVisibility(View.INVISIBLE); } break; default: Log.e(TAG, v.toString()); break; } return true; } private void removePinConfig(int position) { if (position < 1 || position > 6) return; PinConfig rp = new PinConfig(); for (PinConfig p : pinConfigs) { if (p.position == position) rp = p; } pinConfigs.remove(rp); } private void removePinConfig(String groveInstanceName) { PinConfig rp = new PinConfig(); for (PinConfig p : pinConfigs) { if (p.groveInstanceName.equals(groveInstanceName)) rp = p; } pinConfigs.remove(rp); } @Override public boolean onLongClick(View v) { switch (v.getId()) { case R.id.grove_1: if (pinDeviceCount(1) > 0) startDragRemove(v); break; case R.id.grove_2: if (pinDeviceCount(2) > 0) startDragRemove(v); break; case R.id.grove_3: if (pinDeviceCount(3) > 0) startDragRemove(v); break; case R.id.grove_4: if (pinDeviceCount(4) > 0) startDragRemove(v); case R.id.grove_5: if (pinDeviceCount(5) > 0) startDragRemove(v); break; case R.id.grove_6: Snackbar.make(v, "Grove name:" + pinDeviceCount(6), Snackbar.LENGTH_LONG).show(); if (pinDeviceCount(6) == 0) { ; } else if (pinDeviceCount(6) == 1) { startDragRemove(v); } else if (pinDeviceCount(6) > 1) { // openI2cDeviceFolder(); } break; } return true; } private void startDragRemove(View v) { mDragRemoveView.setVisibility(View.VISIBLE); String label = "grove_remove"; String[] mimeTypes = {GROVE_REMOVE}; ClipDescription clipDescription = new ClipDescription(label, mimeTypes); ClipData.Item item = new ClipData.Item("drag grove"); ClipData clipData = new ClipData(clipDescription, item); View.DragShadowBuilder shadowBuiler = new View.DragShadowBuilder(v); v.startDrag(clipData, shadowBuiler, v, 0); } private class MainOnClickListener implements View.OnClickListener, View.OnLongClickListener { private final Context context; private MainOnClickListener(Context c) { this.context = c; } @Override public void onClick(View v) { ; } @Override public boolean onLongClick(View v) { // Snackbar.make(v, "Todo:grove detail page", Snackbar.LENGTH_SHORT).show(); String label = "grove_add"; String[] mimeTypes = {GROVE_ADD}; ClipDescription clipDescription = new ClipDescription(label, mimeTypes); ClipData.Item item = new ClipData.Item("drag grove"); ClipData clipData = new ClipData(clipDescription, item); View.DragShadowBuilder shadowBuiler = new View.DragShadowBuilder(v); mGroveListAdapter.selectItem(mGroveListView.getChildAdapterPosition(v)); GroverDriver grove = mGroveListAdapter.getSelectedItem(); v.startDrag(clipData, shadowBuiler, grove, 0); return true; } } private void getGrovesData() { IotApi api = new IotApi(); String token = user.user_key; api.setAccessToken(token); IotService iot = api.getService(); iot.scanDrivers(new Callback<List<GroverDriver>>() { @Override public void success(List<GroverDriver> groverDrivers, retrofit.client.Response response) { for (GroverDriver groveDriver : groverDrivers) { groveDriver.save(); } updateGroveListAdapter(groverDrivers); } @Override public void failure(RetrofitError error) { Log.e(TAG, error.toString()); } }); } private class Pin6OnClickListener implements View.OnLongClickListener { public Pin6OnClickListener(SetupIotNodeActivity setupIotNodeActivity) { } @Override public boolean onLongClick(View v) { mDragRemoveView.setVisibility(View.VISIBLE); String label = "grove_remove_6"; String[] mimeTypes = {GROVE_REMOVE_PIN6}; ClipDescription clipDescription = new ClipDescription(label, mimeTypes); ClipData.Item item = new ClipData.Item("drag grove"); ClipData clipData = new ClipData(clipDescription, item); View.DragShadowBuilder shadowBuiler = new View.DragShadowBuilder(v); // mGroveListAdapter.selectItem(mGroveListView.getChildAdapterPosition(v)); // GroverDriver grove = mGroveListAdapter.getSelectedItem(); PinConfig pinConfig = mGroveI2cListAdapter.getItem(mGroveI2cListView.getChildAdapterPosition(v)); v.startDrag(clipData, shadowBuiler, pinConfig, 0); return true; } } }
android/app/src/main/java/cc/seeed/iot/ui_setnode/SetupIotNodeActivity.java
package cc.seeed.iot.ui_setnode; import android.app.ProgressDialog; import android.content.ClipData; import android.content.ClipDescription; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.graphics.Color; import android.graphics.PorterDuff; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.support.design.widget.Snackbar; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.Toolbar; import android.util.Base64; import android.util.Log; import android.util.SparseBooleanArray; import android.util.SparseIntArray; import android.view.DragEvent; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.ImageView; import android.widget.TextView; import com.koushikdutta.urlimageviewhelper.UrlImageViewHelper; import java.util.ArrayList; import java.util.List; import cc.seeed.iot.MyApplication; import cc.seeed.iot.R; import cc.seeed.iot.datastruct.Constant; import cc.seeed.iot.datastruct.User; import cc.seeed.iot.ui_main.NodeApiActivity; import cc.seeed.iot.ui_setnode.View.GrovePinsView; import cc.seeed.iot.ui_setnode.model.NodeConfigHelper; import cc.seeed.iot.ui_setnode.model.PinConfig; import cc.seeed.iot.ui_setnode.model.PinConfigDBHelper; import cc.seeed.iot.util.DBHelper; import cc.seeed.iot.webapi.IotApi; import cc.seeed.iot.webapi.IotService; import cc.seeed.iot.webapi.model.GroverDriver; import cc.seeed.iot.webapi.model.Node; import cc.seeed.iot.webapi.model.OtaStatusResponse; import retrofit.Callback; import retrofit.RetrofitError; import retrofit.client.Response; public class SetupIotNodeActivity extends AppCompatActivity implements GroveFilterRecyclerAdapter.MainViewHolder.MyItemClickListener, View.OnClickListener, View.OnDragListener, View.OnLongClickListener { private static final String TAG = "SetupIotNodeActivity"; public static final String GROVE_REMOVE = "grove/remove"; public static final String GROVE_REMOVE_PIN6 = "grove/remove/6"; public static final String GROVE_ADD = "grove/add"; private static final int ADD_I2C_GROVE = 0x00; private static final int RMV_I2C_GROVE = 0x01; private static final int MESSAGE_UPDATE_DONE = 0x10; public Toolbar mToolbar; Node node; User user; List<PinConfig> pinConfigs = new ArrayList<>(); static View.OnClickListener mainOnClickListener; //Todo, no static static View.OnLongClickListener mainOnLongClickListener; //Todo, no static static View.OnLongClickListener pin6OnLongClickListener; //Todo, no static RecyclerView mGroveI2cListView; GroveI2cListRecyclerAdapter mGroveI2cListAdapter; RecyclerView mGroveListView; GroveListRecyclerAdapter mGroveListAdapter; RecyclerView mGroveTypeListView; GroveFilterRecyclerAdapter mGroveTypeListAdapter; private List<GroverDriver> mGroveDrivers; SparseBooleanArray nodePinSelector; NodeConfigHelper nodeConfigModel; View mSetNodeLayout; GrovePinsView mGrovePinsView; ProgressDialog mProgressDialog; private ImageView mDragRemoveView; private TextView i2cDeviceNumView; private Handler mHandler; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_setup_iot_node); View view = (View) findViewById(R.id.setup_iot_node); mProgressDialog = new ProgressDialog(this); mProgressDialog.setCanceledOnTouchOutside(false); mProgressDialog.setButton(ProgressDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }); // mProgressDialog.getButton(ProgressDialog.BUTTON_POSITIVE).setVisibility(View.INVISIBLE); mainOnClickListener = new MainOnClickListener(this); mainOnLongClickListener = new MainOnClickListener(this); pin6OnLongClickListener = new Pin6OnClickListener(this); nodePinSelector = new SparseBooleanArray(); mGroveDrivers = DBHelper.getGrovesAll(); mToolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(mToolbar); getSupportActionBar().setDisplayHomeAsUpEnabled(true); mDragRemoveView = (ImageView) findViewById(R.id.grove_remove); mDragRemoveView.setOnDragListener(this); mSetNodeLayout = (View) findViewById(R.id.set_node); mSetNodeLayout.setOnClickListener(this); String node_sn = getIntent().getStringExtra("node_sn"); node = DBHelper.getNodes(node_sn).get(0); nodeConfigModel = new NodeConfigHelper(node.node_sn); mGrovePinsView = new GrovePinsView(view, node); for (ImageView pinView : mGrovePinsView.pinViews) { pinView.setOnDragListener(this); pinView.setOnClickListener(this); pinView.setOnLongClickListener(this); } pinConfigs = PinConfigDBHelper.getPinConfigs(node.node_sn); Log.e(TAG, "ori_pinconfig" + pinConfigs.toString()); getSupportActionBar().setTitle(node.name); user = ((MyApplication) SetupIotNodeActivity.this.getApplication()).getUser(); mGroveListView = (RecyclerView) findViewById(R.id.grove_list); if (mGroveListView != null) { mGroveListView.setHasFixedSize(true); LinearLayoutManager layoutManager = new LinearLayoutManager(this); layoutManager.setOrientation(LinearLayoutManager.HORIZONTAL); mGroveListView.setLayoutManager(layoutManager); mGroveListAdapter = new GroveListRecyclerAdapter(mGroveDrivers); mGroveListView.setAdapter(mGroveListAdapter); } mGroveI2cListView = (RecyclerView) findViewById(R.id.grove_i2c_list); if (mGroveI2cListView != null) { mGroveI2cListView.setHasFixedSize(true); LinearLayoutManager layoutManager = new LinearLayoutManager(this); layoutManager.setOrientation(LinearLayoutManager.HORIZONTAL); mGroveI2cListView.setLayoutManager(layoutManager); mGroveI2cListAdapter = new GroveI2cListRecyclerAdapter(pinConfigs); mGroveI2cListView.setAdapter(mGroveI2cListAdapter); } mGroveTypeListView = (RecyclerView) findViewById(R.id.grove_selector); if (mGroveTypeListView != null) { mGroveTypeListView.setHasFixedSize(true); LinearLayoutManager layoutManager = new LinearLayoutManager(this); layoutManager.setOrientation(LinearLayoutManager.HORIZONTAL); mGroveTypeListView.setLayoutManager(layoutManager); setupGroveSelectorAdapter(); } i2cDeviceNumView = (TextView) view.findViewById(R.id.i2c_device_num); i2cDeviceNumViewDisplay(); initData(); } private void i2cDeviceNumViewDisplay() { if (pinDeviceCount(6) > 1) { i2cDeviceNumView.setVisibility(View.VISIBLE); i2cDeviceNumView.setText("+" + String.valueOf(pinDeviceCount(6) - 1)); } else { i2cDeviceNumView.setVisibility(View.GONE); } } private void initData() { mHandler = new Handler() { @Override public void handleMessage(Message msg) { switch (msg.what) { case ADD_I2C_GROVE: //if i2c list visible, dynamic add, move to end position updateI2cGroveList(); scrollI2cGroveListToEnd(); //refresh number display i2cDeviceNumViewDisplay(); break; case RMV_I2C_GROVE: //if i2c list visible, dynamic remove if (pinDeviceCount(6) < 2) mGroveI2cListView.setVisibility(View.INVISIBLE); else updateI2cGroveList(); //refresh number display i2cDeviceNumViewDisplay(); //refresh pin6 image mGrovePinsView.updatePin6(pinConfigs); break; case MESSAGE_UPDATE_DONE: { String message = (String) msg.obj; new AlertDialog.Builder(SetupIotNodeActivity.this) .setTitle(R.string.update) .setMessage(message) .setPositiveButton(R.string.ok, null) .show(); } break; } } }; } private void scrollI2cGroveListToEnd() { mGroveI2cListView.smoothScrollToPosition(mGroveI2cListAdapter.getItemCount() - 1); } private void updateI2cGroveList() { List<PinConfig> pin6Configs = new ArrayList<>(); for (PinConfig p : pinConfigs) { if (p.position == 6) pin6Configs.add(p); } mGroveI2cListAdapter.updateAll(pin6Configs); } @Override protected void onResume() { super.onResume(); getGrovesData(); } private void setupGroveSelectorAdapter() { mGroveTypeListAdapter = new GroveFilterRecyclerAdapter(Constant.groveTypes); mGroveTypeListAdapter.setOnItemClickListener(this); mGroveTypeListView.setAdapter(mGroveTypeListAdapter); mGroveTypeListAdapter.updateSelection(0); } private void updateGroveListAdapter(List<GroverDriver> groverDrivers) { mGroveListAdapter.updateAll(groverDrivers); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.ui_setup, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); if (id == android.R.id.home) { finish(); return true; } else if(id == R.id.api){ Intent intent = new Intent(this, NodeApiActivity.class); intent.putExtra("node_sn", node.node_sn); startActivity(intent); } else if (id == R.id.update) { //TODO update firmware if (node.name == null) return true; // String yaml = "" + // "GroveMultiChannelGas:\r\n" + // " sku: 101020088\r\n" + // " name: Grove-Multichannel Gas Sensor\r\n" + // " construct_arg_list:\r\n" + // " pinsda: 4\r\n" + // " pinscl: 5\r\n"; String yaml = NodeConfigHelper.getConfigYaml(pinConfigs); Log.i(TAG, "yaml:\n" + yaml); if (yaml.isEmpty()) { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setMessage("Forger add grove?"); builder.setTitle("Tip"); builder.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }); builder.create().show(); return true; } String Base64Yaml = Base64.encodeToString(yaml.getBytes(), Base64.DEFAULT); updateNode(node.node_key, Base64Yaml); return true; } return super.onOptionsItemSelected(item); } @Override public boolean onContextItemSelected(MenuItem item) { switch (item.getItemId()) { case 1: // nodeConfigModel.removePinNode(1); // uiStateControl.removeSelectedPin(1); break; case 2: // nodeConfigModel.removePinNode(2); // uiStateControl.removeSelectedPin(2); break; case 3: // nodeConfigModel.removePinNode(3); // uiStateControl.removeSelectedPin(3); break; case 4: // nodeConfigModel.removePinNode(4); // uiStateControl.removeSelectedPin(4); break; case 5: // nodeConfigModel.removePinNode(5); // uiStateControl.removeSelectedPin(5); break; case 6: // nodeConfigModel.removePinNode(6); // uiStateControl.removeSelectedPin(6); break; } return super.onContextItemSelected(item); } private void updateNode(final String node_key, String base64Yaml) { mProgressDialog.show(); mProgressDialog.setMessage("Ready to ota..."); mProgressDialog.getButton(ProgressDialog.BUTTON_POSITIVE).setVisibility(View.INVISIBLE); IotApi api = new IotApi(); final IotService iot = api.getService(); iot.userDownload(node_key, base64Yaml, new Callback<OtaStatusResponse>() { @Override public void success(OtaStatusResponse otaStatusResponse, Response response) { if (otaStatusResponse.status.equals("200")) { mProgressDialog.setMessage(otaStatusResponse.ota_msg); displayStatus(node_key); } else { mProgressDialog.setMessage("Error:" + otaStatusResponse.msg); mProgressDialog.getButton(ProgressDialog.BUTTON_POSITIVE).setVisibility(View.VISIBLE); } } @Override public void failure(RetrofitError error) { mProgressDialog.dismiss(); Log.e(TAG, "error:" + error); } }); } private void displayStatus(final String node_key) { IotApi api = new IotApi(); final IotService iot = api.getService(); iot.otaStatus(node_key, new Callback<OtaStatusResponse>() { @Override public void success(OtaStatusResponse otaStatusResponse, Response response) { if (otaStatusResponse.status.equals("200")) { if (otaStatusResponse.ota_status.equals("going")) { displayStatus(node_key); mProgressDialog.setMessage(otaStatusResponse.ota_msg); } else if (otaStatusResponse.ota_status.equals("done")) { mProgressDialog.dismiss(); Message message = Message.obtain(); message.what = MESSAGE_UPDATE_DONE; message.obj = otaStatusResponse.ota_msg; mHandler.sendMessage(message); } else if (otaStatusResponse.ota_status.equals("error")) { mProgressDialog.setMessage(otaStatusResponse.ota_status + ":" + otaStatusResponse.ota_msg); mProgressDialog.getButton(ProgressDialog.BUTTON_POSITIVE).setVisibility(View.VISIBLE); mProgressDialog.setButton(DialogInterface.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }); } } else { mProgressDialog.setMessage(otaStatusResponse.status + ":" + otaStatusResponse.msg); mProgressDialog.getButton(ProgressDialog.BUTTON_POSITIVE).setVisibility(View.VISIBLE); mProgressDialog.setButton(DialogInterface.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }); } } @Override public void failure(RetrofitError error) { Log.e(TAG, "error:" + error); mProgressDialog.dismiss(); } } ); } @Override public void onItemClick(View view, int position) { String groveType = Constant.groveTypes[position]; List<GroverDriver> inputGroves = new ArrayList<GroverDriver>(); List<GroverDriver> outputGroves = new ArrayList<GroverDriver>(); if (mGroveDrivers == null) return; for (GroverDriver g : mGroveDrivers) { if (!g.Inputs.isEmpty()) { outputGroves.add(g); } if (!g.Outputs.isEmpty()) inputGroves.add(g); } mGroveTypeListAdapter.updateSelection(position); if (groveType.equals("All")) { updateGroveListAdapter(mGroveDrivers); } else if (groveType.equals("Input")) { updateGroveListAdapter(inputGroves); } else if (groveType.equals("Output")) { updateGroveListAdapter(outputGroves); } } @Override public void onClick(View v) { switch (v.getId()) { case R.id.grove_1: // Snackbar.make(v, "Grove name:" + pinDeviceCount(1), Snackbar.LENGTH_LONG).show(); break; case R.id.grove_2: // Snackbar.make(v, "Grove name:" + pinDeviceCount(2), Snackbar.LENGTH_LONG).show(); break; case R.id.grove_3: // Snackbar.make(v, "Grove name:" + pinDeviceCount(3), Snackbar.LENGTH_LONG).show(); break; case R.id.grove_4: // Snackbar.make(v, "Grove name:" + pinDeviceCount(4), Snackbar.LENGTH_LONG).show(); break; case R.id.grove_5: // Snackbar.make(v, "Grove name:" + pinDeviceCount(5), Snackbar.LENGTH_LONG).show(); break; case R.id.grove_6: if (pinDeviceCount(6) == 0) { ; } else if (pinDeviceCount(6) == 1) { Snackbar.make(v, "Grove name:" + pinDeviceCount(5), Snackbar.LENGTH_LONG).show(); } else if (pinDeviceCount(6) > 1) { if (mGroveI2cListView.getVisibility() == View.VISIBLE) mGroveI2cListView.setVisibility(View.INVISIBLE); else { mGroveI2cListView.setVisibility(View.VISIBLE); updateI2cGroveList(); } } break; } } private int pinDeviceCount(int position) { SparseIntArray sparseIntArray = new SparseIntArray(); for (PinConfig pinConfig : pinConfigs) { int count = sparseIntArray.get(pinConfig.position, 0); count = count + 1; sparseIntArray.append(pinConfig.position, count); } return sparseIntArray.get(position, 0); } @Override public boolean onDrag(View v, DragEvent event) { // Log.e(TAG, v.toString()); // Log.e(TAG, event.toString()); int action = event.getAction(); switch (v.getId()) { case R.id.grove_1: case R.id.grove_2: case R.id.grove_3: case R.id.grove_4: case R.id.grove_5: case R.id.grove_6: switch (action) { case DragEvent.ACTION_DRAG_STARTED: { if (!event.getClipDescription().hasMimeType(GROVE_ADD)) return false; GrovePinsView.Tag tag = (GrovePinsView.Tag) v.getTag(); String interfaceType = tag.interfaceType; GroverDriver groverDriver = (GroverDriver) event.getLocalState(); if (!interfaceType.equals(groverDriver.InterfaceType)) { // Log.e(TAG, groverDriver.InterfaceType); return false; } v.setActivated(true); ((ImageView) v).setImageAlpha(64); } break; case DragEvent.ACTION_DRAG_ENTERED: Log.e(TAG, "entered"); v.setActivated(false); ((ImageView) v).setImageAlpha(64); break; case DragEvent.ACTION_DRAG_EXITED: v.setActivated(true); ((ImageView) v).setImageAlpha(64); break; case DragEvent.ACTION_DRAG_ENDED: v.setActivated(false); ((ImageView) v).setImageAlpha(255); break; case DragEvent.ACTION_DROP: { GroverDriver groverDriver = (GroverDriver) event.getLocalState(); Log.e(TAG, "Drop " + groverDriver.GroveName); UrlImageViewHelper.setUrlDrawable((ImageView) v, groverDriver.ImageURL, R.drawable.grove_no, UrlImageViewHelper.CACHE_DURATION_INFINITE); PinConfig pinConfig = new PinConfig(); pinConfig.position = ((GrovePinsView.Tag) v.getTag()).position; pinConfig.selected = true; pinConfig.grove_id = groverDriver.ID; pinConfig.node_sn = node.node_sn; if (pinConfig.position != 6) { //One pin connect one grove Boolean status = false; PinConfig dup_pinConfig = new PinConfig(); for (PinConfig p : pinConfigs) if (p.position == pinConfig.position) { status = true; dup_pinConfig = p; } if (status) pinConfigs.remove(dup_pinConfig); } else { //duplicate i2c grove is not allowed Boolean status = false; PinConfig dup_pinConfig = new PinConfig(); for (PinConfig p : pinConfigs) if (p.grove_id == pinConfig.grove_id) { status = true; dup_pinConfig = p; } if (status) pinConfigs.remove(dup_pinConfig); } String groveInstanceName; List<String> groveInstanceNames = new ArrayList<>(); for (PinConfig p : pinConfigs) { groveInstanceNames.add(p.groveInstanceName); } groveInstanceName = groverDriver.ClassName; int i = 1; while (true) { if (groveInstanceNames.contains(groveInstanceName)) { groveInstanceName = groveInstanceName.split("_0")[0] + "_0" + Integer.toString(i); } else { groveInstanceNames.add(groveInstanceName); break; } i++; } pinConfig.groveInstanceName = groveInstanceName; pinConfigs.add(pinConfig); Log.e(TAG, "drag pinConfigs " + pinConfigs); if (v.getId() == R.id.grove_6) { Message message = Message.obtain(); message.what = ADD_I2C_GROVE; mHandler.sendMessage(message); } } break; } break; case R.id.grove_remove: switch (action) { case DragEvent.ACTION_DRAG_STARTED: { return event.getClipDescription().hasMimeType(GROVE_REMOVE) || event.getClipDescription().hasMimeType(GROVE_REMOVE_PIN6); } case DragEvent.ACTION_DRAG_ENTERED: ((ImageView) v).setColorFilter(Color.RED, PorterDuff.Mode.SRC_IN); break; case DragEvent.ACTION_DRAG_EXITED: ((ImageView) v).setColorFilter(Color.RED, PorterDuff.Mode.DST); case DragEvent.ACTION_DROP: { if (event.getClipDescription().hasMimeType(GROVE_REMOVE)) { ImageView view = (ImageView) event.getLocalState(); Log.e(TAG, ((GrovePinsView.Tag) view.getTag()).position + ""); view.setImageDrawable(null); int position = ((GrovePinsView.Tag) view.getTag()).position; removePinConfig(position); } else if (event.getClipDescription().hasMimeType(GROVE_REMOVE_PIN6)) { PinConfig pinConfig = (PinConfig) event.getLocalState(); Log.e(TAG, pinConfig.groveInstanceName); removePinConfig(pinConfig.groveInstanceName); Message message = Message.obtain(); message.what = RMV_I2C_GROVE; mHandler.sendMessage(message); } break; } case DragEvent.ACTION_DRAG_ENDED: ((ImageView) v).setColorFilter(Color.RED, PorterDuff.Mode.DST); mDragRemoveView.setVisibility(View.INVISIBLE); } break; default: Log.e(TAG, v.toString()); break; } return true; } private void removePinConfig(int position) { if (position < 1 || position > 6) return; PinConfig rp = new PinConfig(); for (PinConfig p : pinConfigs) { if (p.position == position) rp = p; } pinConfigs.remove(rp); } private void removePinConfig(String groveInstanceName) { PinConfig rp = new PinConfig(); for (PinConfig p : pinConfigs) { if (p.groveInstanceName.equals(groveInstanceName)) rp = p; } pinConfigs.remove(rp); } @Override public boolean onLongClick(View v) { switch (v.getId()) { case R.id.grove_1: if (pinDeviceCount(1) > 0) startDragRemove(v); break; case R.id.grove_2: if (pinDeviceCount(2) > 0) startDragRemove(v); break; case R.id.grove_3: if (pinDeviceCount(3) > 0) startDragRemove(v); break; case R.id.grove_4: if (pinDeviceCount(4) > 0) startDragRemove(v); case R.id.grove_5: if (pinDeviceCount(5) > 0) startDragRemove(v); break; case R.id.grove_6: Snackbar.make(v, "Grove name:" + pinDeviceCount(6), Snackbar.LENGTH_LONG).show(); if (pinDeviceCount(6) == 0) { ; } else if (pinDeviceCount(6) == 1) { startDragRemove(v); } else if (pinDeviceCount(6) > 1) { // openI2cDeviceFolder(); } break; } return true; } private void startDragRemove(View v) { mDragRemoveView.setVisibility(View.VISIBLE); String label = "grove_remove"; String[] mimeTypes = {GROVE_REMOVE}; ClipDescription clipDescription = new ClipDescription(label, mimeTypes); ClipData.Item item = new ClipData.Item("drag grove"); ClipData clipData = new ClipData(clipDescription, item); View.DragShadowBuilder shadowBuiler = new View.DragShadowBuilder(v); v.startDrag(clipData, shadowBuiler, v, 0); } private class MainOnClickListener implements View.OnClickListener, View.OnLongClickListener { private final Context context; private MainOnClickListener(Context c) { this.context = c; } @Override public void onClick(View v) { ; } @Override public boolean onLongClick(View v) { // Snackbar.make(v, "Todo:grove detail page", Snackbar.LENGTH_SHORT).show(); String label = "grove_add"; String[] mimeTypes = {GROVE_ADD}; ClipDescription clipDescription = new ClipDescription(label, mimeTypes); ClipData.Item item = new ClipData.Item("drag grove"); ClipData clipData = new ClipData(clipDescription, item); View.DragShadowBuilder shadowBuiler = new View.DragShadowBuilder(v); mGroveListAdapter.selectItem(mGroveListView.getChildAdapterPosition(v)); GroverDriver grove = mGroveListAdapter.getSelectedItem(); v.startDrag(clipData, shadowBuiler, grove, 0); return true; } } private void getGrovesData() { IotApi api = new IotApi(); String token = user.user_key; api.setAccessToken(token); IotService iot = api.getService(); iot.scanDrivers(new Callback<List<GroverDriver>>() { @Override public void success(List<GroverDriver> groverDrivers, retrofit.client.Response response) { for (GroverDriver groveDriver : groverDrivers) { groveDriver.save(); } updateGroveListAdapter(groverDrivers); } @Override public void failure(RetrofitError error) { Log.e(TAG, error.toString()); } }); } private class Pin6OnClickListener implements View.OnLongClickListener { public Pin6OnClickListener(SetupIotNodeActivity setupIotNodeActivity) { } @Override public boolean onLongClick(View v) { mDragRemoveView.setVisibility(View.VISIBLE); String label = "grove_remove_6"; String[] mimeTypes = {GROVE_REMOVE_PIN6}; ClipDescription clipDescription = new ClipDescription(label, mimeTypes); ClipData.Item item = new ClipData.Item("drag grove"); ClipData clipData = new ClipData(clipDescription, item); View.DragShadowBuilder shadowBuiler = new View.DragShadowBuilder(v); // mGroveListAdapter.selectItem(mGroveListView.getChildAdapterPosition(v)); // GroverDriver grove = mGroveListAdapter.getSelectedItem(); PinConfig pinConfig = mGroveI2cListAdapter.getItem(mGroveI2cListView.getChildAdapterPosition(v)); v.startDrag(clipData, shadowBuiler, pinConfig, 0); return true; } } }
Change Instance name add _Digital0
android/app/src/main/java/cc/seeed/iot/ui_setnode/SetupIotNodeActivity.java
Change Instance name add _Digital0
Java
mit
209117278060829609963e4573a20ee6ac6e3fed
0
ihongs/HongsCORE,ihongs/HongsCORE,ihongs/HongsCORE
package app.hongs.action; import app.hongs.Cnst; import app.hongs.Core; import app.hongs.CoreLocale; import app.hongs.CoreSerial; import app.hongs.HongsException; import app.hongs.util.Data; import app.hongs.util.Synt; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Map; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.regex.Pattern; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; /** * 表单配置. * * <p> * 该工具会将配置数据自动缓存, 会在构建对象时核对配置的修改时间; * 但无法确保其对象在反复使用中会自动重载, * 最好在修改配置后删除临时文件并重启应用. * </p> * * <h3>数据结构:</h3> * <pre> forms = { "form_name" : { "field_name" : { __text__ : "Label", __type__ : "string|number|date|file|enum|form", __rule__ : "rule.class.Name", __required__ : "yes|no", __repeated__ : "yes|no", "param_name" : "Value" ... } ... } ... } enums = { "enum_name" : { "value_code" : "Label" ... } ... } * </pre> * * <h3>异常代码:</h3> * <pre> * 区间: 0x10e8~0x10ef * 0x10e8 配置文件不存在 * 0x10e9 解析文件失败 * </pre> * * @author Hongs */ public class FormSet extends CoreSerial { private final String name; /** * 表单集合 */ public Map<String, Map> forms; /** * 枚举集合 */ public Map<String, Map> enums; public FormSet(String name) throws HongsException { this.name = name; this.init(name + Cnst.FORM_EXT); } @Override protected boolean expired(long time) { File xmlFile = new File(Core.CONF_PATH + File.separator + name + Cnst.FORM_EXT + ".xml"); File serFile = new File(Core.DATA_PATH + File.separator + "serial" + File.separator + name + Cnst.FORM_EXT + ".ser"); return xmlFile.exists() && xmlFile.lastModified() > serFile.lastModified(); } @Override protected void imports() throws HongsException { InputStream is; String fn; try { fn = Core.CONF_PATH + File.separator + name + Cnst.FORM_EXT + ".xml"; is = new FileInputStream(fn); } catch (FileNotFoundException ex) { fn = name.contains(".") || name.contains("/") ? name + Cnst.FORM_EXT + ".xml" : "app/hongs/conf/" + name + Cnst.FORM_EXT + ".xml"; is = this.getClass().getClassLoader().getResourceAsStream(fn); if ( null == is ) { throw new app.hongs.HongsException(0x10e8, "Can not find the config file '" + name + Cnst.FORM_EXT + ".xml'."); } } Element root; try { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder dbn = dbf.newDocumentBuilder(); Document doc = dbn.parse( is ); root = doc.getDocumentElement(); } catch ( IOException ex) { throw new HongsException(0x10e9, "Read '" +name+Cnst.FORM_EXT+".xml error'", ex); } catch (SAXException ex) { throw new HongsException(0x10e9, "Parse '"+name+Cnst.FORM_EXT+".xml error'", ex); } catch (ParserConfigurationException ex) { throw new HongsException(0x10e9, "Parse '"+name+Cnst.FORM_EXT+".xml error'", ex); } this.forms = new HashMap(); this.enums = new HashMap(); this.parse(root, this.forms, this.enums); } private void parse(Element element, Map forms, Map enums) throws HongsException { if (!element.hasChildNodes()) { return; } NodeList nodes = element.getChildNodes(); for (int i = 0; i < nodes.getLength(); i ++) { Node node = nodes.item(i); if (node.getNodeType() != Node.ELEMENT_NODE) { continue; } Element element2 = (Element)node; String tagName2 = element2.getTagName(); if ("form".equals(tagName2)) { String namz = element2.getAttribute("name"); if (namz == null) namz = ""; Map items = new LinkedHashMap(); this.parse(element2, items, null); forms.put(namz, items); } else if ("enum".equals(tagName2)) { String namz = element2.getAttribute("name"); if (namz == null) namz = ""; Map items = new LinkedHashMap(); this.parse(element2, null, items); enums.put(namz, items); } else if ("field".equals(tagName2)) { String namz = element2.getAttribute("name"); if (namz == null || "".equals(namz)) namz = "@"; // 表单参数总叫 @ Map items = new LinkedHashMap(); this.parse(element2, items, null); forms.put(namz, items); items.put("__name__", namz); String namx = namz; namz = element2.getAttribute("type"); items.put("__type__", namz); String typx = namz; namz = element2.getAttribute("rule"); items.put("__rule__", namz); namz = element2.getAttribute("text"); items.put("__text__", namz); namz = element2.getAttribute("hint"); items.put("__hint__", namz); if (element2.hasAttribute("required")) { namz = element2.getAttribute("required"); items.put("__required__", namz ); } else { items.put("__required__", "false"); } if (element2.hasAttribute("repeated")) { namz = element2.getAttribute("repeated"); items.put("__repeated__", namz ); } else { items.put("__repeated__", "false"); } /** * 预优化 * 枚举类型和关联类型缺失配置路径时可自动补上 * 注意规避解析默认表单配置时可能引起无限递归 */ if (!"default".equals(name)) { typx = (String) getInstance().getEnum("__types__").get(typx); if ("fork".equals(typx)) { if (! items.containsKey("data-at") && ! items.containsKey("data-al")) { if (! items.containsKey("form")) { items.put("form", namx.replace("_id", "")); } if (! items.containsKey("conf")) { items.put("conf", name); } } } else if ("form".equals(typx)) { if (! items.containsKey("form")) { items.put("form", namx.replace("_id", "")); } if (! items.containsKey("conf")) { items.put("conf", name); } } else if ("enum".equals(typx)) { if (! items.containsKey("enum")) { items.put("enum", namx); } if (! items.containsKey("conf")) { items.put("conf", name); } } } } else if ("param".equals(tagName2)) { String namz = element2.getAttribute("name"); String typz = element2.getAttribute("type"); String text = element2.getTextContent(); forms.put(namz, parse(typz, text)); } else if ("value".equals(tagName2)) { String namz = element2.getAttribute("code"); String typz = element2.getAttribute("type"); String text = element2.getTextContent(); enums.put(namz, parse(typz, text)); } } } private Object parse(String type, String text) throws HongsException { if (null == type || "".equals(type)) { return text ; } text = text.trim(); if ("bool".equals(type)) { return Synt.defoult(Synt.asBool(text), false); } if ("json".equals(type)) { if (text.startsWith("(") && text.endsWith(")")) { text = text.substring( 1, text.length() - 1 ); return Data.toObject(text); } else { return Data.toObject(text); } } if ("list".equals(type)) { if (text.startsWith("[") && text.endsWith("]")) { return ( List) Data.toObject(text); } else { return new ArrayList ( Arrays.asList(SEXP.split(text)) ); } } if ( "set".equals(type)) { if (text.startsWith("[") && text.endsWith("]")) { return new LinkedHashSet( ( List) Data.toObject(text) ); } else { return new LinkedHashSet( Arrays.asList(SEXP.split(text)) ); } } if ( "map".equals(type)) { if (text.startsWith("{") && text.endsWith("}")) { return ( Map ) Data.toObject(text); } else { Map m = new LinkedHashMap(); for(String s : SEXP.split (text)) { String[] a = MEXP.split (s, 2); if ( 2 > a.length ) { m.put( a[0], a[0] ); } else { m.put( a[0], a[1] ); } } return m; } } throw new HongsException.Common("Unrecognized type '"+type+"'"); } private static final Pattern SEXP = Pattern.compile ( "\\s*,\\s*" ); private static final Pattern MEXP = Pattern.compile ( "\\s*:\\s*" ); public String getName() { return this.name; } public Map getEnum(String name) throws HongsException { if (!enums.containsKey(name)) { throw new HongsException(0x10eb, "Enum "+name+" in "+this.name+" is not exists"); } return enums.get(name); } public Map getForm(String name) throws HongsException { if (!forms.containsKey(name)) { throw new HongsException(0x10ea, "Form "+name+" in "+this.name+" is not exists"); } return forms.get(name); } public CoreLocale getCurrTranslator() { try { return CoreLocale.getInstance(name); } catch (app.hongs.HongsError e) { if ( e.getErrno() != 0x2a) { throw e; } return CoreLocale.getInstance("default"); } } public Map getEnumTranslated(String namc) { Map items = enums.get(namc); Map itemz = new LinkedHashMap(); if (items == null) return itemz; CoreLocale lang = getCurrTranslator(); itemz.putAll(items); for(Object o : itemz.entrySet()) { Map.Entry e = (Map.Entry) o ; String k = (String) e.getKey( ); String n = (String) e.getValue(); if (n == null || "".equals(n)) { n = "fore.enum."+name+"."+namc+"."+k; } e.setValue( lang.translate(n)); } return itemz; } public Map getFormTranslated(String namc) throws HongsException { Map items = getForm(namc); Map itemz = new LinkedHashMap(); if (items == null) return itemz; CoreLocale lang = getCurrTranslator(); for(Object o : items.entrySet()) { Map.Entry e = (Map.Entry) o; Map m = (Map ) e.getValue(); String k = (String) e.getKey(); String n = (String) m.get("__text__"); String h = (String) m.get("__hint__"); Map u = new LinkedHashMap(); u.putAll( m ); if (n == null || "".equals(n)) { n = "fore.form."+name+"."+namc+"."+k; } u.put("__text__", lang.translate(n)); if (h != null &&!"".equals(n)) { u.put("__hint__", lang.translate(h)); } itemz.put(k, u); } return itemz; } //** 工厂方法 **/ public static boolean hasConfFile(String name) { String fn; fn = Core.DATA_PATH + File.separator + "serial" + File.separator + name + Cnst.FORM_EXT + ".ser"; if (new File(fn).exists()) { return true; } fn = Core.CONF_PATH + File.separator + name + Cnst.FORM_EXT + ".xml"; if (new File(fn).exists()) { return true; } fn = name.contains(".") || name.contains("/") ? name + Cnst.FORM_EXT + ".xml" : "app/hongs/conf/" + name + Cnst.FORM_EXT + ".xml"; return null != FormSet.class.getClassLoader().getResourceAsStream(fn); } public static FormSet getInstance(String name) throws HongsException { String key = FormSet.class.getName() + ":" + name; Core core = Core.getInstance(); FormSet inst; if (core.containsKey(key)) { inst = (FormSet)core.get(key); } else { inst = new FormSet(name); core.put( key, inst ); } return inst; } public static FormSet getInstance() throws HongsException { return getInstance("default"); } }
hongs-core/src/main/java/app/hongs/action/FormSet.java
package app.hongs.action; import app.hongs.Cnst; import app.hongs.Core; import app.hongs.CoreLocale; import app.hongs.CoreSerial; import app.hongs.HongsException; import app.hongs.util.Data; import app.hongs.util.Synt; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Map; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.regex.Pattern; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; /** * 表单配置. * * <p> * 该工具会将配置数据自动缓存, 会在构建对象时核对配置的修改时间; * 但无法确保其对象在反复使用中会自动重载, * 最好在修改配置后删除临时文件并重启应用. * </p> * * <h3>数据结构:</h3> * <pre> forms = { "form_name" : { "field_name" : { __text__ : "Label", __type__ : "string|number|date|file|enum|form", __rule__ : "rule.class.Name", __required__ : "yes|no", __repeated__ : "yes|no", "param_name" : "Value" ... } ... } ... } enums = { "enum_name" : { "value_code" : "Label" ... } ... } * </pre> * * <h3>异常代码:</h3> * <pre> * 区间: 0x10e8~0x10ef * 0x10e8 配置文件不存在 * 0x10e9 解析文件失败 * </pre> * * @author Hongs */ public class FormSet extends CoreSerial { private final String name; /** * 表单集合 */ public Map<String, Map> forms; /** * 枚举集合 */ public Map<String, Map> enums; public FormSet(String name) throws HongsException { this.name = name; this.init(name + Cnst.FORM_EXT); } @Override protected boolean expired(long time) { File xmlFile = new File(Core.CONF_PATH + File.separator + name + Cnst.FORM_EXT + ".xml"); File serFile = new File(Core.DATA_PATH + File.separator + "serial" + File.separator + name + Cnst.FORM_EXT + ".ser"); return xmlFile.exists() && xmlFile.lastModified() > serFile.lastModified(); } @Override protected void imports() throws HongsException { InputStream is; String fn; try { fn = Core.CONF_PATH + File.separator + name + Cnst.FORM_EXT + ".xml"; is = new FileInputStream(fn); } catch (FileNotFoundException ex) { fn = name.contains(".") || name.contains("/") ? name + Cnst.FORM_EXT + ".xml" : "app/hongs/conf/" + name + Cnst.FORM_EXT + ".xml"; is = this.getClass().getClassLoader().getResourceAsStream(fn); if ( null == is ) { throw new app.hongs.HongsException(0x10e8, "Can not find the config file '" + name + Cnst.FORM_EXT + ".xml'."); } } Element root; try { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder dbn = dbf.newDocumentBuilder(); Document doc = dbn.parse( is ); root = doc.getDocumentElement(); } catch ( IOException ex) { throw new HongsException(0x10e9, "Read '" +name+Cnst.FORM_EXT+".xml error'", ex); } catch (SAXException ex) { throw new HongsException(0x10e9, "Parse '"+name+Cnst.FORM_EXT+".xml error'", ex); } catch (ParserConfigurationException ex) { throw new HongsException(0x10e9, "Parse '"+name+Cnst.FORM_EXT+".xml error'", ex); } this.forms = new HashMap(); this.enums = new HashMap(); this.parse(root, this.forms, this.enums); } private void parse(Element element, Map forms, Map enums) throws HongsException { if (!element.hasChildNodes()) { return; } NodeList nodes = element.getChildNodes(); for (int i = 0; i < nodes.getLength(); i ++) { Node node = nodes.item(i); if (node.getNodeType() != Node.ELEMENT_NODE) { continue; } Element element2 = (Element)node; String tagName2 = element2.getTagName(); if ("form".equals(tagName2)) { String namz = element2.getAttribute("name"); if (namz == null) namz = ""; Map items = new LinkedHashMap(); this.parse(element2, items, null); forms.put(namz, items); } else if ("enum".equals(tagName2)) { String namz = element2.getAttribute("name"); if (namz == null) namz = ""; Map items = new LinkedHashMap(); this.parse(element2, null, items); enums.put(namz, items); } else if ("field".equals(tagName2)) { String namz = element2.getAttribute("name"); if (namz == null) namz = ""; Map items = new LinkedHashMap(); this.parse(element2, items, null); forms.put(namz, items); items.put("__name__", namz); String namx = namz; namz = element2.getAttribute("type"); items.put("__type__", namz); String typx = namz; namz = element2.getAttribute("rule"); items.put("__rule__", namz); namz = element2.getAttribute("text"); items.put("__text__", namz); namz = element2.getAttribute("hint"); items.put("__hint__", namz); if (element2.hasAttribute("required")) { namz = element2.getAttribute("required"); items.put("__required__", namz ); } else { items.put("__required__", "false"); } if (element2.hasAttribute("repeated")) { namz = element2.getAttribute("repeated"); items.put("__repeated__", namz ); } else { items.put("__repeated__", "false"); } /** * 预优化 * 枚举类型和关联类型缺失配置路径时可自动补上 * 注意规避解析默认表单配置时可能引起无限递归 */ if (!"default".equals(name)) { typx = (String) getInstance().getEnum("__types__").get(typx); if ("fork".equals(typx)) { if (! items.containsKey("data-at") && ! items.containsKey("data-al")) { if (! items.containsKey("form")) { items.put("form", namx.replace("_id", "")); } if (! items.containsKey("conf")) { items.put("conf", name); } } } else if ("form".equals(typx)) { if (! items.containsKey("form")) { items.put("form", namx.replace("_id", "")); } if (! items.containsKey("conf")) { items.put("conf", name); } } else if ("enum".equals(typx)) { if (! items.containsKey("enum")) { items.put("enum", namx); } if (! items.containsKey("conf")) { items.put("conf", name); } } } } else if ("param".equals(tagName2)) { String namz = element2.getAttribute("name"); String typz = element2.getAttribute("type"); String text = element2.getTextContent(); forms.put(namz, parse(typz, text)); } else if ("value".equals(tagName2)) { String namz = element2.getAttribute("code"); String typz = element2.getAttribute("type"); String text = element2.getTextContent(); enums.put(namz, parse(typz, text)); } } } private Object parse(String type, String text) throws HongsException { if (null == type || "".equals(type)) { return text ; } text = text.trim(); if ("bool".equals(type)) { return Synt.defoult(Synt.asBool(text), false); } if ("json".equals(type)) { if (text.startsWith("(") && text.endsWith(")")) { text = text.substring( 1, text.length() - 1 ); return Data.toObject(text); } else { return Data.toObject(text); } } if ("list".equals(type)) { if (text.startsWith("[") && text.endsWith("]")) { return ( List) Data.toObject(text); } else { return new ArrayList ( Arrays.asList(SEXP.split(text)) ); } } if ( "set".equals(type)) { if (text.startsWith("[") && text.endsWith("]")) { return new LinkedHashSet( ( List) Data.toObject(text) ); } else { return new LinkedHashSet( Arrays.asList(SEXP.split(text)) ); } } if ( "map".equals(type)) { if (text.startsWith("{") && text.endsWith("}")) { return ( Map ) Data.toObject(text); } else { Map m = new LinkedHashMap(); for(String s : SEXP.split (text)) { String[] a = MEXP.split (s, 2); if ( 2 > a.length ) { m.put( a[0], a[0] ); } else { m.put( a[0], a[1] ); } } return m; } } throw new HongsException.Common("Unrecognized type '"+type+"'"); } private static final Pattern SEXP = Pattern.compile ( "\\s*,\\s*" ); private static final Pattern MEXP = Pattern.compile ( "\\s*:\\s*" ); public String getName() { return this.name; } public Map getEnum(String name) throws HongsException { if (!enums.containsKey(name)) { throw new HongsException(0x10eb, "Enum "+name+" in "+this.name+" is not exists"); } return enums.get(name); } public Map getForm(String name) throws HongsException { if (!forms.containsKey(name)) { throw new HongsException(0x10ea, "Form "+name+" in "+this.name+" is not exists"); } return forms.get(name); } public CoreLocale getCurrTranslator() { try { return CoreLocale.getInstance(name); } catch (app.hongs.HongsError e) { if ( e.getErrno() != 0x2a) { throw e; } return CoreLocale.getInstance("default"); } } public Map getEnumTranslated(String namc) { Map items = enums.get(namc); Map itemz = new LinkedHashMap(); if (items == null) return itemz; CoreLocale lang = getCurrTranslator(); itemz.putAll(items); for(Object o : itemz.entrySet()) { Map.Entry e = (Map.Entry) o ; String k = (String) e.getKey( ); String n = (String) e.getValue(); if (n == null || "".equals(n)) { n = "fore.enum."+name+"."+namc+"."+k; } e.setValue( lang.translate(n)); } return itemz; } public Map getFormTranslated(String namc) throws HongsException { Map items = getForm(namc); Map itemz = new LinkedHashMap(); if (items == null) return itemz; CoreLocale lang = getCurrTranslator(); for(Object o : items.entrySet()) { Map.Entry e = (Map.Entry) o; Map m = (Map ) e.getValue(); String k = (String) e.getKey(); String n = (String) m.get("__text__"); String h = (String) m.get("__hint__"); Map u = new LinkedHashMap(); u.putAll( m ); if (n == null || "".equals(n)) { n = "fore.form."+name+"."+namc+"."+k; } u.put("__text__", lang.translate(n)); if (h != null &&!"".equals(n)) { u.put("__hint__", lang.translate(h)); } itemz.put(k, u); } return itemz; } //** 工厂方法 **/ public static boolean hasConfFile(String name) { String fn; fn = Core.DATA_PATH + File.separator + "serial" + File.separator + name + Cnst.FORM_EXT + ".ser"; if (new File(fn).exists()) { return true; } fn = Core.CONF_PATH + File.separator + name + Cnst.FORM_EXT + ".xml"; if (new File(fn).exists()) { return true; } fn = name.contains(".") || name.contains("/") ? name + Cnst.FORM_EXT + ".xml" : "app/hongs/conf/" + name + Cnst.FORM_EXT + ".xml"; return null != FormSet.class.getClassLoader().getResourceAsStream(fn); } public static FormSet getInstance(String name) throws HongsException { String key = FormSet.class.getName() + ":" + name; Core core = Core.getInstance(); FormSet inst; if (core.containsKey(key)) { inst = (FormSet)core.get(key); } else { inst = new FormSet(name); core.put( key, inst ); } return inst; } public static FormSet getInstance() throws HongsException { return getInstance("default"); } }
表单参数虚拟字段可省略 name 属性
hongs-core/src/main/java/app/hongs/action/FormSet.java
表单参数虚拟字段可省略 name 属性
Java
mit
43c01553bc0b64f4185857b7c93bea315fb117b9
0
DiddiZ/Utils
package de.diddiz.utils.config; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Properties; import de.diddiz.utils.Utils; import de.diddiz.utils.serialization.SerializedData; import de.diddiz.utils.serialization.SerializedDataException; public abstract class PropertiesConfig extends SerializedData<String, String> { private final Properties properties; private final File file; private boolean modified; public PropertiesConfig(File cfgFile) throws IOException { file = cfgFile; properties = new Properties(); load(); } /** * Copies all non-existent entries from the default properties to the actual file. */ public PropertiesConfig(File cfgFile, Properties defaultProperties) throws IOException { file = cfgFile; properties = new Properties(defaultProperties); load(); for (final String key : defaultProperties.stringPropertyNames()) if (!properties.containsKey(key)) set(key, defaultProperties.getProperty(key)); save(); } @Override public String get(String key) { return properties.getProperty(key); } public File getConfigFile() { return file; } @SuppressWarnings({"unchecked", "rawtypes"}) public Enum<?> getEnum(String key, Class<? extends Enum> enumType) throws SerializedDataException { try { return Enum.valueOf(enumType, getChecked(key).toUpperCase()); } catch (final IllegalArgumentException ex) { throw new SerializedDataException("Unknown constant '" + get(key) + "' for '" + key + "'. Allowed are: " + Arrays.asList(enumType.getEnumConstants())); } } @SuppressWarnings("unchecked") public <T extends Enum<?>> T getEnum(String key, T def) throws SerializedDataException { try { return (T)Enum.valueOf(def.getClass(), getChecked(key).toUpperCase()); } catch (final IllegalArgumentException ex) { return def; } } public List<File> getFileList(String key) throws SerializedDataException { final List<String> strings = getList(key); final List<File> files = new ArrayList<>(strings.size()); for (final String str : strings) files.add(new File(str)); return files; } @Override public List<String> getList(String key) throws SerializedDataException { final String str = getChecked(key); if (str != null && str.length() > 0) { final String[] arr = str.split(";"); for (int i = 0; i < arr.length; i++) arr[i] = arr[i].trim(); return Arrays.asList(arr); } return Collections.emptyList(); } public boolean isModified() { return modified; } public void remove(String key) { if (properties.containsKey(key)) { properties.remove(key); modified = true; } } public void save() throws IOException { if (modified) { try (final OutputStream stream = new FileOutputStream(file)) { properties.store(stream, null); } modified = false; } } public void set(String key, boolean value) { set(key, String.valueOf(value)); } public void set(String key, double value) { set(key, String.valueOf(value)); } public void set(String key, float value) { set(key, String.valueOf(value)); } public void set(String key, int value) { set(key, String.valueOf(value)); } public void set(String key, List<String> list) { set(key, String.valueOf(Utils.join(list, ';'))); } public void set(String key, long value) { set(key, String.valueOf(value)); } public void set(String key, String value) { if (!value.equals(properties.get(key))) { properties.put(key, value); modified = true; } } private void load() throws IOException { if (!file.exists()) file.createNewFile(); try (final InputStream stream = new FileInputStream(file)) { properties.load(stream); } modified = false; } /** * Convenient method to easily read Properties from a resource file (most likely stored inside the jar). * * @param resourceName Resource name */ protected static Properties loadPropertiesFromResource(String resourceName) throws IOException { return loadPropertiesFromResource(resourceName, null); } /** * Convenient method to easily read Properties from a resource file (most likely stored inside the jar). * * @param resourceName Resource name * @param parent May be null */ protected static Properties loadPropertiesFromResource(String resourceName, Properties parent) throws IOException { final URL propURL = System.class.getResource("/" + resourceName); if (propURL == null) throw new FileNotFoundException("Can't find resource " + resourceName); final Properties prop = parent != null ? new Properties(parent) : new Properties(); try (final InputStream stream = propURL.openStream()) { prop.load(stream); } return prop; } }
src/de/diddiz/utils/config/PropertiesConfig.java
package de.diddiz.utils.config; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Properties; import de.diddiz.utils.serialization.SerializedDataException; import de.diddiz.utils.serialization.SerializedData; public abstract class PropertiesConfig extends SerializedData<String, String> { private final Properties properties; private final File file; private boolean modified; public PropertiesConfig(File cfgFile) throws IOException { file = cfgFile; properties = new Properties(); load(); } /** * Copies all non-existent entries from the default properties to the actual file. */ public PropertiesConfig(File cfgFile, Properties defaultProperties) throws IOException { file = cfgFile; properties = new Properties(defaultProperties); load(); for (final String key : defaultProperties.stringPropertyNames()) if (!properties.containsKey(key)) set(key, defaultProperties.getProperty(key)); save(); } @Override public String get(String key) { return properties.getProperty(key); } public File getConfigFile() { return file; } @SuppressWarnings({"unchecked", "rawtypes"}) public Enum<?> getEnum(String key, Class<? extends Enum> enumType) throws SerializedDataException { try { return Enum.valueOf(enumType, getChecked(key).toUpperCase()); } catch (final IllegalArgumentException ex) { throw new SerializedDataException("Unknown constant '" + get(key) + "' for '" + key + "'. Allowed are: " + Arrays.asList(enumType.getEnumConstants())); } } @SuppressWarnings("unchecked") public <T extends Enum<?>> T getEnum(String key, T def) throws SerializedDataException { try { return (T)Enum.valueOf(def.getClass(), getChecked(key).toUpperCase()); } catch (final IllegalArgumentException ex) { return def; } } public List<File> getFileList(String key) throws SerializedDataException { final List<String> strings = getList(key); final List<File> files = new ArrayList<>(strings.size()); for (final String str : strings) files.add(new File(str)); return files; } @Override public List<String> getList(String key) throws SerializedDataException { final String str = getChecked(key); if (str != null && str.length() > 0) { final String[] arr = str.split(";"); for (int i = 0; i < arr.length; i++) arr[i] = arr[i].trim(); return Arrays.asList(arr); } return Collections.emptyList(); } public boolean isModified() { return modified; } public void remove(String key) { if (properties.containsKey(key)) { properties.remove(key); modified = true; } } public void save() throws IOException { if (modified) { try (final OutputStream stream = new FileOutputStream(file)) { properties.store(stream, null); } modified = false; } } public void set(String key, boolean value) { set(key, String.valueOf(value)); } public void set(String key, int value) { set(key, String.valueOf(value)); } public void set(String key, String value) { if (!value.equals(properties.get(key))) { properties.put(key, value); modified = true; } } private void load() throws IOException { if (!file.exists()) file.createNewFile(); try (final InputStream stream = new FileInputStream(file)) { properties.load(stream); } modified = false; } /** * Convenient method to easily read Properties from a resource file (most likely stored inside the jar). * * @param resourceName Resource name */ protected static Properties loadPropertiesFromResource(String resourceName) throws IOException { return loadPropertiesFromResource(resourceName, null); } /** * Convenient method to easily read Properties from a resource file (most likely stored inside the jar). * * @param resourceName Resource name * @param parent May be null */ protected static Properties loadPropertiesFromResource(String resourceName, Properties parent) throws IOException { final URL propURL = System.class.getResource("/" + resourceName); if (propURL == null) throw new FileNotFoundException("Can't find resource " + resourceName); final Properties prop = parent != null ? new Properties(parent) : new Properties(); try (final InputStream stream = propURL.openStream()) { prop.load(stream); } return prop; } }
Added missing set methods to PropertiesConfig;
src/de/diddiz/utils/config/PropertiesConfig.java
Added missing set methods to PropertiesConfig;
Java
mpl-2.0
2c7af3f88aea9b7b031ba1156a8ec71185b78bf4
0
msteinhoff/hello-world
549ab0d7-cb8e-11e5-906e-00264a111016
src/main/java/HelloWorld.java
548cf9de-cb8e-11e5-b251-00264a111016
compiles now
src/main/java/HelloWorld.java
compiles now
Java
agpl-3.0
199731a5646b4405be2b74dbd751822595470ba9
0
ebonnet/Silverpeas-Components,ebonnet/Silverpeas-Components,ebonnet/Silverpeas-Components,NicolasEYSSERIC/Silverpeas-Components,Silverpeas/Silverpeas-Components,CecileBONIN/Silverpeas-Components,auroreallibe/Silverpeas-Components,ebonnet/Silverpeas-Components,stephaneperry/Silverpeas-Components,stephaneperry/Silverpeas-Components,stephaneperry/Silverpeas-Components,mmoqui/Silverpeas-Components,stephaneperry/Silverpeas-Components,NicolasEYSSERIC/Silverpeas-Components,SilverYoCha/Silverpeas-Components,NicolasEYSSERIC/Silverpeas-Components,NicolasEYSSERIC/Silverpeas-Components,NicolasEYSSERIC/Silverpeas-Components,mmoqui/Silverpeas-Components,stephaneperry/Silverpeas-Components,CecileBONIN/Silverpeas-Components,CecileBONIN/Silverpeas-Components,NicolasEYSSERIC/Silverpeas-Components,CecileBONIN/Silverpeas-Components,SilverYoCha/Silverpeas-Components,SilverTeamWork/Silverpeas-Components,SilverYoCha/Silverpeas-Components,Silverpeas/Silverpeas-Components,SilverTeamWork/Silverpeas-Components,CecileBONIN/Silverpeas-Components,SilverTeamWork/Silverpeas-Components,CecileBONIN/Silverpeas-Components,ebonnet/Silverpeas-Components,Silverpeas/Silverpeas-Components,ebonnet/Silverpeas-Components,mmoqui/Silverpeas-Components,auroreallibe/Silverpeas-Components,stephaneperry/Silverpeas-Components,ebonnet/Silverpeas-Components,auroreallibe/Silverpeas-Components
/** * Copyright (C) 2000 - 2009 Silverpeas * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * As a special exception to the terms and conditions of version 3.0 of * the GPL, you may redistribute this Program in connection with Free/Libre * Open Source Software ("FLOSS") applications as described in Silverpeas's * FLOSS exception. You should have received a copy of the text describing * the FLOSS exception, and it is also available here: * "http://repository.silverpeas.com/legal/licensing" * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.silverpeas.processManager.servlets; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import org.apache.commons.fileupload.FileItem; import com.silverpeas.form.DataRecord; import com.silverpeas.form.PagesContext; import com.silverpeas.form.RecordTemplate; import com.silverpeas.processManager.HistoryStepContent; import com.silverpeas.processManager.ProcessFilter; import com.silverpeas.processManager.ProcessManagerException; import com.silverpeas.processManager.ProcessManagerSessionController; import com.silverpeas.util.StringUtil; import com.silverpeas.util.web.servlet.FileUploadUtil; import com.silverpeas.workflow.api.error.WorkflowError; import com.silverpeas.workflow.api.instance.HistoryStep; import com.silverpeas.workflow.api.instance.ProcessInstance; import com.silverpeas.workflow.api.instance.Question; import com.silverpeas.workflow.api.model.AllowedAction; import com.silverpeas.workflow.api.model.AllowedActions; import com.silverpeas.workflow.api.model.Item; import com.silverpeas.workflow.api.model.QualifiedUsers; import com.silverpeas.workflow.api.model.State; import com.silverpeas.workflow.api.task.Task; import com.silverpeas.workflow.api.user.User; import com.silverpeas.workflow.engine.model.ActionRefs; import com.silverpeas.workflow.engine.model.StateImpl; import com.stratelia.silverpeas.peasCore.ComponentContext; import com.stratelia.silverpeas.peasCore.ComponentSessionController; import com.stratelia.silverpeas.peasCore.MainSessionController; import com.stratelia.silverpeas.peasCore.servlets.ComponentRequestRouter; import com.stratelia.silverpeas.silvertrace.SilverTrace; import com.stratelia.silverpeas.versioning.model.DocumentPK; import com.stratelia.silverpeas.versioning.util.VersioningUtil; import com.stratelia.webactiv.util.FileRepositoryManager; import com.stratelia.webactiv.util.FileServerUtils; import com.stratelia.webactiv.util.attachment.control.AttachmentController; import com.stratelia.webactiv.util.attachment.ejb.AttachmentPK; import java.io.File; import java.util.Iterator; public class ProcessManagerRequestRouter extends ComponentRequestRouter { private static final long serialVersionUID = -4758787807784357891L; /** * Returns the name used by the ComponentRequestRequest to store the session controller in the * user session. */ public String getSessionControlBeanName() { return "processManager"; } /** * Return a new ProcessManagerSessionController wich will be used for each request made in the * given componentContext. Returns a ill session controler when the a fatal error occures. This * ill session controller can only display an error page. */ public ComponentSessionController createComponentSessionController( MainSessionController mainSessionCtrl, ComponentContext componentContext) { SilverTrace.info("kmelia", "ProcessManagerRequestRouter.createComponentSessionController()", "root.MSG_GEN_ENTER_METHOD"); try { return new ProcessManagerSessionController( mainSessionCtrl, componentContext); } catch (ProcessManagerException e) { return new ProcessManagerSessionController( mainSessionCtrl, componentContext, e); } } /** * Process the request and returns the response url. * @param function the user request name * @param request the user request params * @param session the user request context */ public String getDestination(String function, ComponentSessionController sessionController, HttpServletRequest request) { SilverTrace.info("processManager", "ProcessManagerRequestRouter.getDestination()", "root.MSG_GEN_ENTER_METHOD", "function = " + function); ProcessManagerSessionController session = (ProcessManagerSessionController) sessionController; FunctionHandler handler = (FunctionHandler) getHandlerMap().get(function); Exception error = session.getFatalException(); if (handler != null && error == null) { try { return handler.getDestination(function, session, request); } catch (ProcessManagerException e) { error = e; } } if (error != null) { request.setAttribute("javax.servlet.jsp.jspException", error); } if ("Main".equals(function) || "listProcess".equals(function)) { return "/admin/jsp/errorpageMain.jsp"; } else { // return "/admin/jsp/errorpage.jsp"; //xoxox pb boucle. return "/admin/jsp/errorpageMain.jsp"; } } /** * Init this servlet, before any request. */ public void init(ServletConfig config) throws ServletException { super.init(config); if (handlerMap == null) initHandlers(); } private Map<String, FunctionHandler> getHandlerMap() { SilverTrace.info("processManager", "ProcessManagerRequestRouter.getHandlerMap()", "root.MSG_GEN_ENTER_METHOD"); if (handlerMap == null) { initHandlers(); } return handlerMap; } /** * Map the function name to the function handler */ static private Map<String, FunctionHandler> handlerMap = null; /** * Inits the function handler */ synchronized private void initHandlers() { if (handlerMap != null) { return; } handlerMap = new HashMap<String, FunctionHandler>(); handlerMap.put("Main", listProcessHandler); handlerMap.put("listProcess", listProcessHandler); handlerMap.put("listSomeProcess", listSomeProcessHandler); handlerMap.put("changeRole", changeRoleHandler); handlerMap.put("filterProcess", filterProcessHandler); handlerMap.put("viewProcess", viewProcessHandler); handlerMap.put("viewHistory", viewHistoryHandler); handlerMap.put("createProcess", createProcessHandler); handlerMap.put("saveCreation", saveCreationHandler); handlerMap.put("listTasks", listTasksHandler); handlerMap.put("editAction", editActionHandler); handlerMap.put("saveAction", saveActionHandler); handlerMap.put("cancelAction", cancelActionHandler); handlerMap.put("editQuestion", editQuestionHandler); handlerMap.put("saveQuestion", saveQuestionHandler); handlerMap.put("editResponse", editResponseHandler); handlerMap.put("cancelResponse", cancelResponseHandler); handlerMap.put("saveResponse", saveResponseHandler); handlerMap.put("listQuestions", listQuestionsHandler); handlerMap.put("printProcessFrameset", printProcessFramesetHandler); handlerMap.put("printProcess", printProcessHandler); handlerMap.put("printButtons", printButtonsHandler); handlerMap.put("editUserSettings", editUserSettingsHandler); handlerMap.put("saveUserSettings", saveUserSettingsHandler); handlerMap.put("searchResult.jsp", searchResultHandler); handlerMap.put("searchResult", searchResultHandler); handlerMap.put("attachmentManager", attachmentManagerHandler); handlerMap.put("exportCSV", exportCSVHandler); // handlerMap.put("adminListProcess", adminListProcessHandler); handlerMap.put("adminRemoveProcess", adminRemoveProcessHandler); // handlerMap.put("adminViewProcess", adminViewProcessHandler); handlerMap.put("adminViewErrors", adminViewErrorsHandler); handlerMap.put("adminReAssign", adminReAssignHandler); handlerMap.put("adminDoReAssign", adminDoReAssignHandler); } /** * The removeProcess handler for the supervisor. */ static private FunctionHandler adminRemoveProcessHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String processId = request.getParameter("processId"); session.removeProcess(processId); return listProcessHandler.getDestination(function, session, request); } }; /** * The viewErrors handler for the supervisor */ static private FunctionHandler adminViewErrorsHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String processId = request.getParameter("processId"); session.resetCurrentProcessInstance(processId); WorkflowError[] errors = session.getProcessInstanceErrors(processId); request.setAttribute("errors", errors); setSharedAttributes(session, request); return "/processManager/jsp/admin/viewErrors.jsp"; } }; /** * The reAssign handler for the supervisor */ static private FunctionHandler adminReAssignHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String processId = request.getParameter("processId"); session.resetCurrentProcessInstance(processId); // Get the associated form com.silverpeas.form.Form form = session.getAssignForm(); request.setAttribute("form", form); // Set the form context PagesContext context = getFormContext("assignForm", "0", session, true); request.setAttribute("context", context); // Get the form data DataRecord data = session.getAssignRecord(); request.setAttribute("data", data); setSharedAttributes(session, request); return "/processManager/jsp/admin/reAssign.jsp"; } }; /** * The doReAssign handler for the supervisor Get the new users affected and creates tasks */ static private FunctionHandler adminDoReAssignHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { // Get the associated form com.silverpeas.form.Form form = session.getAssignForm(); request.setAttribute("form", form); // Set the form context PagesContext context = getFormContext("assignForm", "0", session, true); request.setAttribute("context", context); // Get the form data DataRecord data = session.getAssignRecord(); request.setAttribute("data", data); try { List<FileItem> items = FileUploadUtil.parseRequest(request); form.update(items, data, context); session.reAssign(data); return listProcessHandler.getDestination(function, session, request); } catch (Exception e) { throw new ProcessManagerException("ProcessManagerRequestRouter", "processManager.ILL_CREATE_FORM", e); } } }; /** * The listProcess handler. Used as the Main handler too. */ static private FunctionHandler listProcessHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { if (session.hasUserSettings() && !session.isUserSettingsOK()) { return editUserSettingsHandler.getDestination(function, session, request); } if (session.hasUserSettings()) { request.setAttribute("hasUserSettings", "1"); } else { request.setAttribute("hasUserSettings", "0"); } request.setAttribute("isCSVExportEnabled", new Boolean(session.isCSVExportEnabled())); Item[] items = session.getFolderItems(); request.setAttribute("FolderItems", items); RecordTemplate listHeaders = session.getProcessListHeaders(); request.setAttribute("listHeaders", listHeaders); DataRecord[] processList = null; if (request.getAttribute("dontreset") == null) { processList = session.resetCurrentProcessList(); } else { processList = session.getCurrentProcessList(); } request.setAttribute("processList", processList); setProcessFilterAttributes(session, request, session.getCurrentFilter()); setSharedAttributes(session, request); return "/processManager/jsp/listProcess.jsp"; } }; /** * The listProcess handler (modified in order to skip the list re-computation). */ static private FunctionHandler listSomeProcessHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { request.setAttribute("dontreset", "no, dont"); return listProcessHandler.getDestination(function, session, request); } }; /** * The changeRole handler. */ static private FunctionHandler changeRoleHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String roleName = request.getParameter("role"); session.resetCurrentRole(roleName); return listProcessHandler.getDestination(function, session, request); } }; /** * The filterProcess handler. */ static private FunctionHandler filterProcessHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { ProcessFilter filter = session.getCurrentFilter(); updateProcessFilter(session, request, filter); return listProcessHandler.getDestination(function, session, request); } }; /** * The attachmentManager handler */ static private FunctionHandler attachmentManagerHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String processId = request.getParameter("processId"); if (processId != null) session.resetCurrentProcessInstance(processId); setSharedAttributes(session, request); return "/processManager/jsp/attachmentManager.jsp"; } }; /** * The viewProcess handler */ static private FunctionHandler viewProcessHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String processId = request.getParameter("processId"); String force = request.getParameter("force"); session.resetCurrentProcessInstance(processId); if ((force == null || !force.equals("true")) && (session.hasPendingQuestions())) return listQuestionsHandler.getDestination(function, session, request); com.silverpeas.form.Form form = session.getPresentationForm(); request.setAttribute("form", form); PagesContext context = getFormContext("presentation", "0", session, true); request.setAttribute("context", context); String[] activeStates = session.getActiveStates(); request.setAttribute("activeStates", activeStates); String[] roles = session.getActiveRoles(); request.setAttribute("activeRoles", roles); DataRecord data = session.getFolderRecord(); request.setAttribute("data", data); String[] deleteAction = session.getDeleteAction(); if (deleteAction != null) request.setAttribute("deleteAction", deleteAction); List<User> lockingUsers = session.getLockingUsers(); if (lockingUsers != null) { request.setAttribute("lockingUsers", lockingUsers); request.setAttribute("isCurrentUserIsLockingUser", session.isCurrentUserIsLockingUser()); } else { request.setAttribute("isCurrentUserIsLockingUser", false); } setSharedAttributes(session, request); return "/processManager/jsp/viewProcess.jsp"; } }; /** * The searchResult handler */ static private FunctionHandler searchResultHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String type = request.getParameter("Type"); String todoId = request.getParameter("Id"); // Accept only links coming from todo details if (type == null || (!type.equals("com.stratelia.webactiv.calendar.backbone.TodoDetail") && !type .equals("ProcessInstance"))) return listProcessHandler.getDestination(function, session, request); String processId = todoId; if (type.equals("com.stratelia.webactiv.calendar.backbone.TodoDetail")) { // from todo, todoId is in fact the externalId processId = session.getProcessInstanceIdFromExternalTodoId(todoId); String roleName = session.getRoleNameFromExternalTodoId(todoId); session.resetCurrentRole(roleName); } session.resetCurrentProcessInstance(processId); if (session.hasPendingQuestions()) return listQuestionsHandler.getDestination(function, session, request); com.silverpeas.form.Form form = session.getPresentationForm(); request.setAttribute("form", form); PagesContext context = getFormContext("presentation", "0", session, true); request.setAttribute("context", context); String[] activeStates = session.getActiveStates(); request.setAttribute("activeStates", activeStates); String[] roles = session.getActiveRoles(); request.setAttribute("activeRoles", roles); DataRecord data = session.getFolderRecord(); request.setAttribute("data", data); String[] deleteAction = session.getDeleteAction(); if (deleteAction != null) request.setAttribute("deleteAction", deleteAction); List<User> lockingUsers = session.getLockingUsers(); if (lockingUsers != null) { request.setAttribute("lockingUsers", lockingUsers); request.setAttribute("isCurrentUserIsLockingUser", session.isCurrentUserIsLockingUser()); } else { request.setAttribute("isCurrentUserIsLockingUser", false); } setSharedAttributes(session, request); return "/processManager/jsp/viewProcess.jsp"; } }; /** * The viewHistory handler */ static private FunctionHandler viewHistoryHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String processId = request.getParameter("processId"); session.resetCurrentProcessInstance(processId); String[] stepActivities = session.getStepActivities(); request.setAttribute("stepActivities", stepActivities); String[] stepActors = session.getStepActors(); request.setAttribute("stepActors", stepActors); String[] stepActions = session.getStepActions(); request.setAttribute("stepActions", stepActions); String[] stepDates = session.getStepDates(); request.setAttribute("stepDates", stepDates); String[] stepVisibles = session.getStepVisibles(); request.setAttribute("stepVisibles", stepVisibles); String strEnlightedStep = request.getParameter("enlightedStep"); request.setAttribute("enlightedStep", strEnlightedStep); if ("all".equalsIgnoreCase(strEnlightedStep)) { List<HistoryStepContent> stepContents = new ArrayList<HistoryStepContent>(); for (int i = 0; i < stepVisibles.length; i++) { com.silverpeas.form.Form form = session.getStepForm(i); PagesContext context = getFormContext("dummy", "0", session); DataRecord data = session.getStepRecord(i); HistoryStepContent stepContent = new HistoryStepContent(form, context, data); stepContents.add(stepContent); } request.setAttribute("StepsContent", stepContents); } else { int enlightedStep = intValue(strEnlightedStep, -1); if (enlightedStep != -1) { com.silverpeas.form.Form form = session.getStepForm(enlightedStep); request.setAttribute("form", form); PagesContext context = getFormContext("dummy", "0", session); request.setAttribute("context", context); DataRecord data = session.getStepRecord(enlightedStep); request.setAttribute("data", data); } } setSharedAttributes(session, request); return "/processManager/jsp/viewHistory.jsp"; } }; /** * The createProcess handler */ static private FunctionHandler createProcessHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { com.silverpeas.form.Form form = session.getCreationForm(); request.setAttribute("form", form); PagesContext context = getFormContext("createForm", "0", session, true); request.setAttribute("context", context); DataRecord data = session.getEmptyCreationRecord(); request.setAttribute("data", data); request.setAttribute("isFirstTimeSaved", "yes"); setSharedAttributes(session, request); return "/processManager/jsp/createProcess.jsp"; } }; /** * The saveCreation handler */ static private FunctionHandler saveCreationHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { com.silverpeas.form.Form form = session.getCreationForm(); PagesContext context = getFormContext("createForm", "0", session); DataRecord data = session.getEmptyCreationRecord(); try { List<FileItem> items = FileUploadUtil.parseRequest(request); List<String> attachmentIds = form.update(items, data, context, false); boolean isDraft = StringUtil.getBooleanValue( FileUploadUtil.getParameter(items, "isDraft") ); boolean isFirstTimeSaved = StringUtil.getBooleanValue( FileUploadUtil.getParameter(items, "isFirstTimeSaved") ); String instanceId = session.createProcessInstance(data, isDraft, isFirstTimeSaved); // launch update again to have a correct object id in wysiwyg context.setObjectId(instanceId); form.updateWysiwyg(items, data, context); // Attachment's foreignkey must be set with the just created instanceId AttachmentPK attachmentPK = null; DocumentPK documentPK = null; VersioningUtil versioningUtil = null; for (String attachmentId : attachmentIds) { if (session.isVersionControlled()) { if (versioningUtil == null) { versioningUtil = new VersioningUtil(); } documentPK = new DocumentPK(Integer.parseInt(attachmentId), "useless", session.getComponentId()); versioningUtil.updateDocumentForeignKey(documentPK, instanceId); } else { attachmentPK = new AttachmentPK(attachmentId, "useless", session.getComponentId()); AttachmentController.updateAttachmentForeignKey(attachmentPK, instanceId); } } return listProcessHandler.getDestination(function, session, request); } catch (Exception e) { throw new ProcessManagerException("ProcessManagerRequestRouter", "processManager.ILL_CREATE_FORM", e); } } }; /** * The listTasks handler */ static private FunctionHandler listTasksHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String processId = request.getParameter("processId"); ProcessInstance process = session.resetCurrentProcessInstance(processId); // checking locking users List<User> lockingUsers = session.getLockingUsers(); if ( (!lockingUsers.isEmpty()) && (!session.isCurrentUserIsLockingUser()) ) { return listProcessHandler.getDestination(function, session, request); } // check if an action must be resumed if (!lockingUsers.isEmpty()) { return resumeActionHandler.getDestination(function, session, request); } if (!process.getErrorStatus()) { Task[] tasks = session.getTasks(); for (int i = 0; tasks != null && i < tasks.length; i++) { State state = tasks[i].getState(); AllowedActions filteredActions = new ActionRefs(); if (state.getAllowedActionsEx() != null) { Iterator<AllowedAction> actions = state.getAllowedActionsEx().iterateAllowedAction(); while (actions.hasNext()) { AllowedAction action = actions.next(); QualifiedUsers qualifiedUsers = action.getAction().getAllowedUsers(); if (session.getUsers(qualifiedUsers, true).contains(session.getUserId())) { filteredActions.addAllowedAction(action); } } } state.setFilteredActions(filteredActions); } request.setAttribute("tasks", tasks); request.setAttribute("ViewReturn", new Boolean(session.isViewReturn())); request.setAttribute("Error", Boolean.FALSE); } else { request.setAttribute("Error", Boolean.TRUE); } setSharedAttributes(session, request); return "/processManager/jsp/listTasks.jsp"; } }; /** * The resumeAction handler */ static private FunctionHandler resumeActionHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { // retrieve state name and action name HistoryStep savedStep = session.getSavedStep(); String stateName = savedStep.getResolvedState(); String actionName = savedStep.getAction(); State state = (stateName==null) ? new StateImpl("") : session.getState(stateName); request.setAttribute("state", state); request.setAttribute("action", session.getAction(actionName)); // Get the associated form com.silverpeas.form.Form form = session.getActionForm(stateName, actionName); request.setAttribute("form", form); // Set the form context PagesContext context = getFormContext("actionForm", "0", session, true); request.setAttribute("context", context); // Get the form data DataRecord data = session.getSavedStepRecord(savedStep); request.setAttribute("data", data); // Set flag to indicate action record has already been saved as draft request.setAttribute("isFirstTimeSaved", "no"); // Set flag to indicate instance is in resuming mode session.setResumingInstance(true); // Set global attributes setSharedAttributes(session, request); return "/processManager/jsp/editAction.jsp"; } }; /** * The editAction handler */ static private FunctionHandler editActionHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { // Set process instance String processId = request.getParameter("processId"); session.resetCurrentProcessInstance(processId); // retrieve state name and action name String stateName = request.getParameter("state"); String actionName = request.getParameter("action"); request.setAttribute("state", session.getState(stateName)); request.setAttribute("action", session.getAction(actionName)); // Get the associated form com.silverpeas.form.Form form = session.getActionForm(stateName, actionName); request.setAttribute("form", form); // Set the form context PagesContext context = getFormContext("actionForm", "0", session, true); request.setAttribute("context", context); // Get the form data DataRecord data = session.getActionRecord(stateName, actionName); request.setAttribute("data", data); // Set flag to indicate action record has never been saved as draft for this step request.setAttribute("isFirstTimeSaved", "yes"); // lock the process instance session.lock(stateName); // Set global attributes setSharedAttributes(session, request); return "/processManager/jsp/editAction.jsp"; } }; /** * The saveAction handler */ static private FunctionHandler saveActionHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { try { List<FileItem> items = FileUploadUtil.parseRequest(request); String stateName = FileUploadUtil.getParameter(items, "state"); String actionName = FileUploadUtil.getParameter(items, "action"); boolean isDraft = StringUtil.getBooleanValue( FileUploadUtil.getParameter(items, "isDraft") ); boolean isFirstTimeSaved = StringUtil.getBooleanValue( FileUploadUtil.getParameter(items, "isFirstTimeSaved") ); com.silverpeas.form.Form form = session.getActionForm(stateName, actionName); PagesContext context = getFormContext("actionForm", "0", session); DataRecord data = session.getActionRecord(stateName, actionName); if (form != null) { form.update(items, data, context); } session.processAction(stateName, actionName, data, isDraft, isFirstTimeSaved); return listProcessHandler.getDestination(function, session, request); } catch (Exception e) { throw new ProcessManagerException("ProcessManagerRequestRouter", "processManager.ILL_CREATE_FORM", e); } } }; /** * The cancelAction handler */ static private FunctionHandler cancelActionHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String stateName = request.getParameter("state"); // unlock the process instance session.unlock(stateName); return listTasksHandler.getDestination(function, session, request); } }; /** * The cancelResponse handler */ static private FunctionHandler cancelResponseHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String stateName = request.getParameter("state"); // unlock the process instance session.unlock(stateName); return viewProcessHandler.getDestination(function, session, request); } }; /** * The editQuestion handler */ static private FunctionHandler editQuestionHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String stepId = request.getParameter("stepId"); request.setAttribute("stepId", stepId); request.setAttribute("step", session.getStep(stepId)); String state = request.getParameter("state"); request.setAttribute("state", state); // Get the question form com.silverpeas.form.Form form = session.getQuestionForm(false); request.setAttribute("form", form); // Set the form context PagesContext context = getFormContext("questionForm", "0", session, true); request.setAttribute("context", context); // Get the form data DataRecord data = session.getEmptyQuestionRecord(); request.setAttribute("data", data); // lock the process instance session.lock(state); setSharedAttributes(session, request); return "/processManager/jsp/editQuestion.jsp"; } }; /** * The saveQuestion handler */ static private FunctionHandler saveQuestionHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { try { List<FileItem> items = FileUploadUtil.parseRequest(request); String stepId = FileUploadUtil.getParameter(items, "stepId"); String state = FileUploadUtil.getParameter(items, "state"); com.silverpeas.form.Form form = session.getQuestionForm(false); PagesContext context = getFormContext("questionForm", "0", session); DataRecord data = session.getEmptyQuestionRecord(); form.update(items, data, context); session.processQuestion(stepId, state, data); return listProcessHandler.getDestination(function, session, request); } catch (Exception e) { throw new ProcessManagerException("ProcessManagerRequestRouter", "processManager.ILL_CREATE_FORM", e); } } }; /** * The editResponse handler */ static private FunctionHandler editResponseHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String questionId = request.getParameter("questionId"); request.setAttribute("question", session.getQuestion(questionId)); // Get the question form (readonly) com.silverpeas.form.Form questionForm = session.getQuestionForm(true); request.setAttribute("questionForm", questionForm); // Get the response form (same as the question) com.silverpeas.form.Form responseForm = session.getQuestionForm(false); request.setAttribute("responseForm", responseForm); // Set the form context PagesContext context = getFormContext("responseForm", "0", session, true); request.setAttribute("context", context); // Get the question form data DataRecord questionData = session.getQuestionRecord(questionId); request.setAttribute("questionData", questionData); // Get the response form data DataRecord responseData = session.getEmptyQuestionRecord(); request.setAttribute("responseData", responseData); // lock the process instance Question question = session.getQuestion(questionId); session.lock(question.getTargetState().getName()); setSharedAttributes(session, request); return "/processManager/jsp/editResponse.jsp"; } }; /** * The saveResponse handler */ static private FunctionHandler saveResponseHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { try { List<FileItem> items = FileUploadUtil.parseRequest(request); String questionId = FileUploadUtil.getParameter(items, "questionId"); com.silverpeas.form.Form responseForm = session.getQuestionForm(false); PagesContext context = getFormContext("responseForm", "0", session); DataRecord responseData = session.getEmptyQuestionRecord(); responseForm.update(items, responseData, context); session.processResponse(questionId, responseData); return listProcessHandler.getDestination(function, session, request); } catch (Exception e) { throw new ProcessManagerException("ProcessManagerRequestRouter", "processManager.ILL_CREATE_FORM", e); } } }; /** * The editUserSetting handler */ static private FunctionHandler editUserSettingsHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { // Get the user settings form com.silverpeas.form.Form form = session.getUserSettingsForm(); request.setAttribute("form", form); // Set the form context PagesContext context = getFormContext("userSettingsForm", "0", session); request.setAttribute("context", context); // Get the form data DataRecord data = session.getUserSettingsRecord(); request.setAttribute("data", data); setSharedAttributes(session, request); return "/processManager/jsp/editUserSettings.jsp"; } }; /** * The saveUserSetting handler */ static private FunctionHandler saveUserSettingsHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { com.silverpeas.form.Form form = session.getUserSettingsForm(); PagesContext context = getFormContext("userSettingsForm", "0", session); DataRecord data = session.getEmptyUserSettingsRecord(); try { List<FileItem> items = FileUploadUtil.parseRequest(request); form.update(items, data, context); session.saveUserSettings(data); return listProcessHandler.getDestination(function, session, request); } catch (Exception e) { throw new ProcessManagerException("ProcessManagerRequestRouter", "processManager.ILL_USERSETTINGS_FORM", e); } } }; /** * The listQuestions handler */ static private FunctionHandler listQuestionsHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String processId = request.getParameter("processId"); session.resetCurrentProcessInstance(processId); // Get the question form (readonly) com.silverpeas.form.Form questionForm = session.getQuestionForm(true); request.setAttribute("form", questionForm); // Set the form context PagesContext context = getFormContext("responseForm", "0", session); request.setAttribute("context", context); // Get tasks list Task[] tasks = session.getTasks(); request.setAttribute("tasks", tasks); setSharedAttributes(session, request); return "/processManager/jsp/listQuestions.jsp"; } }; /** * Builds the ProcessFilter from the http request parameters. */ static private void updateProcessFilter( ProcessManagerSessionController session, HttpServletRequest request, ProcessFilter filter) throws ProcessManagerException { try { List<FileItem> items = FileUploadUtil.parseRequest(request); String collapse = FileUploadUtil.getParameter(items, "collapse"); String oldC = filter.getCollapse(); filter.setCollapse(collapse); // unless the filterPanel was not open. if ("false".equals(oldC)) { com.silverpeas.form.Form form = filter.getPresentationForm(); PagesContext context = getFormContext("filter", "1", session); DataRecord data = filter.getCriteriaRecord(); form.update(items, data, context); filter.setCriteriaRecord(data); } } catch (Exception e) { throw new ProcessManagerException("ProcessManagerRequestRouter", "processManager.ERR_ILL_FILTER_FORM", e); } } /** * Send the filter parameters */ static private void setProcessFilterAttributes( ProcessManagerSessionController session, HttpServletRequest request, ProcessFilter filter) throws ProcessManagerException { String collapse = filter.getCollapse(); request.setAttribute("collapse", collapse); com.silverpeas.form.Form form = filter.getPresentationForm(); request.setAttribute("form", form); PagesContext context = getFormContext("filter", "1", session); request.setAttribute("context", context); DataRecord data = filter.getCriteriaRecord(); request.setAttribute("data", data); } /** * The printProcessFrameset handler */ static private FunctionHandler printProcessFramesetHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { setSharedAttributes(session, request); return "/processManager/jsp/printProcessFrameset.jsp"; } }; /** * The printProcess handler */ static private FunctionHandler printProcessHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { // Get the print form com.silverpeas.form.Form form = session.getPrintForm(request); request.setAttribute("form", form); // Set the form context PagesContext context = getFormContext("printForm", "0", session); request.setAttribute("context", context); // Get the form data DataRecord data = session.getPrintRecord(); request.setAttribute("data", data); setSharedAttributes(session, request); return "/processManager/jsp/printProcess.jsp"; } }; /** * The printButtons handler */ static private FunctionHandler printButtonsHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { setSharedAttributes(session, request); return "/processManager/jsp/printButtons.jsp"; } }; static private FunctionHandler exportCSVHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String csvFilename = session.exportListAsCSV(); request.setAttribute("CSVFilename", csvFilename); if (StringUtil.isDefined(csvFilename)) { File file = new File(FileRepositoryManager.getTemporaryPath() + csvFilename); request.setAttribute("CSVFileSize", Long.valueOf(file.length())); request.setAttribute("CSVFileURL", FileServerUtils.getUrlToTempDir(csvFilename, csvFilename, "text/csv")); file = null; } return "/processManager/jsp/downloadCSV.jsp"; } }; /** * Set attributes shared by all the processManager pages. */ static private void setSharedAttributes(ProcessManagerSessionController session, HttpServletRequest request) { String canCreate = (session.getCreationRights()) ? "1" : "0"; boolean isVersionControlled = session.isVersionControlled(); String s_isVersionControlled = (isVersionControlled ? "1" : "0"); request.setAttribute("isVersionControlled", s_isVersionControlled); request.setAttribute("language", session.getLanguage()); request.setAttribute("roles", session.getUserRoleLabels()); request.setAttribute("currentRole", session.getCurrentRole()); request.setAttribute("canCreate", canCreate); request.setAttribute("process", session.getCurrentProcessInstance()); request.setAttribute("isActiveUser", new Boolean(session.isActiveUser())); request.setAttribute("isAttachmentTabEnable", new Boolean(session.isAttachmentTabEnable())); request.setAttribute("isHistoryTabEnable", new Boolean(session.isHistoryTabVisible())); request.setAttribute("isProcessIdVisible", new Boolean(session.isProcessIdVisible())); request.setAttribute("isPrintButtonEnabled", new Boolean(session.isPrintButtonEnabled())); request.setAttribute("isSaveButtonEnabled", new Boolean(session.isSaveButtonEnabled())); } /** * Read an int parameter. */ static int intValue(String parameter, int defaultValue) { try { if (parameter != null) return (new Integer(parameter)).intValue(); else return defaultValue; } catch (NumberFormatException e) { return defaultValue; } } static private PagesContext getFormContext(String formName, String formIndex, ProcessManagerSessionController session) { return getFormContext(formName, formIndex, session, false); } static private PagesContext getFormContext(String formName, String formIndex, ProcessManagerSessionController session, boolean printTitle) { PagesContext pagesContext = new PagesContext(formName, formIndex, session.getLanguage(), printTitle, session .getComponentId(), session.getUserId()); if (session.getCurrentProcessInstance() != null) { String currentInstanceId = session.getCurrentProcessInstance().getInstanceId(); pagesContext.setObjectId(currentInstanceId); } // versioning used ? pagesContext.setVersioningUsed(session.isVersionControlled()); return pagesContext; } }
process-manager/process-manager-war/src/main/java/com/silverpeas/processManager/servlets/ProcessManagerRequestRouter.java
/** * Copyright (C) 2000 - 2009 Silverpeas * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * As a special exception to the terms and conditions of version 3.0 of * the GPL, you may redistribute this Program in connection with Free/Libre * Open Source Software ("FLOSS") applications as described in Silverpeas's * FLOSS exception. You should have received a copy of the text describing * the FLOSS exception, and it is also available here: * "http://repository.silverpeas.com/legal/licensing" * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.silverpeas.processManager.servlets; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import org.apache.commons.fileupload.FileItem; import com.silverpeas.form.DataRecord; import com.silverpeas.form.PagesContext; import com.silverpeas.form.RecordTemplate; import com.silverpeas.processManager.HistoryStepContent; import com.silverpeas.processManager.ProcessFilter; import com.silverpeas.processManager.ProcessManagerException; import com.silverpeas.processManager.ProcessManagerSessionController; import com.silverpeas.util.StringUtil; import com.silverpeas.util.web.servlet.FileUploadUtil; import com.silverpeas.workflow.api.error.WorkflowError; import com.silverpeas.workflow.api.instance.HistoryStep; import com.silverpeas.workflow.api.instance.ProcessInstance; import com.silverpeas.workflow.api.instance.Question; import com.silverpeas.workflow.api.model.AllowedAction; import com.silverpeas.workflow.api.model.AllowedActions; import com.silverpeas.workflow.api.model.Item; import com.silverpeas.workflow.api.model.QualifiedUsers; import com.silverpeas.workflow.api.model.State; import com.silverpeas.workflow.api.task.Task; import com.silverpeas.workflow.api.user.User; import com.silverpeas.workflow.engine.model.ActionRefs; import com.silverpeas.workflow.engine.model.StateImpl; import com.stratelia.silverpeas.peasCore.ComponentContext; import com.stratelia.silverpeas.peasCore.ComponentSessionController; import com.stratelia.silverpeas.peasCore.MainSessionController; import com.stratelia.silverpeas.peasCore.servlets.ComponentRequestRouter; import com.stratelia.silverpeas.silvertrace.SilverTrace; import com.stratelia.silverpeas.versioning.model.DocumentPK; import com.stratelia.silverpeas.versioning.util.VersioningUtil; import com.stratelia.webactiv.util.FileRepositoryManager; import com.stratelia.webactiv.util.FileServerUtils; import com.stratelia.webactiv.util.attachment.control.AttachmentController; import com.stratelia.webactiv.util.attachment.ejb.AttachmentPK; import java.io.File; import java.util.Iterator; public class ProcessManagerRequestRouter extends ComponentRequestRouter { private static final long serialVersionUID = -4758787807784357891L; /** * Returns the name used by the ComponentRequestRequest to store the session controller in the * user session. */ public String getSessionControlBeanName() { return "processManager"; } /** * Return a new ProcessManagerSessionController wich will be used for each request made in the * given componentContext. Returns a ill session controler when the a fatal error occures. This * ill session controller can only display an error page. */ public ComponentSessionController createComponentSessionController( MainSessionController mainSessionCtrl, ComponentContext componentContext) { SilverTrace.info("kmelia", "ProcessManagerRequestRouter.createComponentSessionController()", "root.MSG_GEN_ENTER_METHOD"); try { return new ProcessManagerSessionController( mainSessionCtrl, componentContext); } catch (ProcessManagerException e) { return new ProcessManagerSessionController( mainSessionCtrl, componentContext, e); } } /** * Process the request and returns the response url. * @param function the user request name * @param request the user request params * @param session the user request context */ public String getDestination(String function, ComponentSessionController sessionController, HttpServletRequest request) { SilverTrace.info("processManager", "ProcessManagerRequestRouter.getDestination()", "root.MSG_GEN_ENTER_METHOD", "function = " + function); ProcessManagerSessionController session = (ProcessManagerSessionController) sessionController; FunctionHandler handler = (FunctionHandler) getHandlerMap().get(function); Exception error = session.getFatalException(); if (handler != null && error == null) { try { return handler.getDestination(function, session, request); } catch (ProcessManagerException e) { error = e; } } if (error != null) { request.setAttribute("javax.servlet.jsp.jspException", error); } if ("Main".equals(function) || "listProcess".equals(function)) { return "/admin/jsp/errorpageMain.jsp"; } else { // return "/admin/jsp/errorpage.jsp"; //xoxox pb boucle. return "/admin/jsp/errorpageMain.jsp"; } } /** * Init this servlet, before any request. */ public void init(ServletConfig config) throws ServletException { super.init(config); if (handlerMap == null) initHandlers(); } private Map<String, FunctionHandler> getHandlerMap() { SilverTrace.info("processManager", "ProcessManagerRequestRouter.getHandlerMap()", "root.MSG_GEN_ENTER_METHOD"); if (handlerMap == null) { initHandlers(); } return handlerMap; } /** * Map the function name to the function handler */ static private Map<String, FunctionHandler> handlerMap = null; /** * Inits the function handler */ synchronized private void initHandlers() { if (handlerMap != null) { return; } handlerMap = new HashMap<String, FunctionHandler>(); handlerMap.put("Main", listProcessHandler); handlerMap.put("listProcess", listProcessHandler); handlerMap.put("listSomeProcess", listSomeProcessHandler); handlerMap.put("changeRole", changeRoleHandler); handlerMap.put("filterProcess", filterProcessHandler); handlerMap.put("viewProcess", viewProcessHandler); handlerMap.put("viewHistory", viewHistoryHandler); handlerMap.put("createProcess", createProcessHandler); handlerMap.put("saveCreation", saveCreationHandler); handlerMap.put("listTasks", listTasksHandler); handlerMap.put("editAction", editActionHandler); handlerMap.put("saveAction", saveActionHandler); handlerMap.put("cancelAction", cancelActionHandler); handlerMap.put("editQuestion", editQuestionHandler); handlerMap.put("saveQuestion", saveQuestionHandler); handlerMap.put("editResponse", editResponseHandler); handlerMap.put("cancelResponse", cancelResponseHandler); handlerMap.put("saveResponse", saveResponseHandler); handlerMap.put("listQuestions", listQuestionsHandler); handlerMap.put("printProcessFrameset", printProcessFramesetHandler); handlerMap.put("printProcess", printProcessHandler); handlerMap.put("printButtons", printButtonsHandler); handlerMap.put("editUserSettings", editUserSettingsHandler); handlerMap.put("saveUserSettings", saveUserSettingsHandler); handlerMap.put("searchResult.jsp", searchResultHandler); handlerMap.put("searchResult", searchResultHandler); handlerMap.put("attachmentManager", attachmentManagerHandler); handlerMap.put("exportCSV", exportCSVHandler); // handlerMap.put("adminListProcess", adminListProcessHandler); handlerMap.put("adminRemoveProcess", adminRemoveProcessHandler); // handlerMap.put("adminViewProcess", adminViewProcessHandler); handlerMap.put("adminViewErrors", adminViewErrorsHandler); handlerMap.put("adminReAssign", adminReAssignHandler); handlerMap.put("adminDoReAssign", adminDoReAssignHandler); } /** * The removeProcess handler for the supervisor. */ static private FunctionHandler adminRemoveProcessHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String processId = request.getParameter("processId"); session.removeProcess(processId); return listProcessHandler.getDestination(function, session, request); } }; /** * The viewErrors handler for the supervisor */ static private FunctionHandler adminViewErrorsHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String processId = request.getParameter("processId"); session.resetCurrentProcessInstance(processId); WorkflowError[] errors = session.getProcessInstanceErrors(processId); request.setAttribute("errors", errors); setSharedAttributes(session, request); return "/processManager/jsp/admin/viewErrors.jsp"; } }; /** * The reAssign handler for the supervisor */ static private FunctionHandler adminReAssignHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String processId = request.getParameter("processId"); session.resetCurrentProcessInstance(processId); // Get the associated form com.silverpeas.form.Form form = session.getAssignForm(); request.setAttribute("form", form); // Set the form context PagesContext context = getFormContext("assignForm", "0", session, true); request.setAttribute("context", context); // Get the form data DataRecord data = session.getAssignRecord(); request.setAttribute("data", data); setSharedAttributes(session, request); return "/processManager/jsp/admin/reAssign.jsp"; } }; /** * The doReAssign handler for the supervisor Get the new users affected and creates tasks */ static private FunctionHandler adminDoReAssignHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { // Get the associated form com.silverpeas.form.Form form = session.getAssignForm(); request.setAttribute("form", form); // Set the form context PagesContext context = getFormContext("assignForm", "0", session, true); request.setAttribute("context", context); // Get the form data DataRecord data = session.getAssignRecord(); request.setAttribute("data", data); try { List<FileItem> items = FileUploadUtil.parseRequest(request); form.update(items, data, context); session.reAssign(data); return listProcessHandler.getDestination(function, session, request); } catch (Exception e) { throw new ProcessManagerException("ProcessManagerRequestRouter", "processManager.ILL_CREATE_FORM", e); } } }; /** * The listProcess handler. Used as the Main handler too. */ static private FunctionHandler listProcessHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { if (session.hasUserSettings() && !session.isUserSettingsOK()) { return editUserSettingsHandler.getDestination(function, session, request); } if (session.hasUserSettings()) { request.setAttribute("hasUserSettings", "1"); } else { request.setAttribute("hasUserSettings", "0"); } request.setAttribute("isCSVExportEnabled", new Boolean(session.isCSVExportEnabled())); Item[] items = session.getFolderItems(); request.setAttribute("FolderItems", items); RecordTemplate listHeaders = session.getProcessListHeaders(); request.setAttribute("listHeaders", listHeaders); DataRecord[] processList = null; if (request.getAttribute("dontreset") == null) { processList = session.resetCurrentProcessList(); } else { processList = session.getCurrentProcessList(); } request.setAttribute("processList", processList); setProcessFilterAttributes(session, request, session.getCurrentFilter()); setSharedAttributes(session, request); return "/processManager/jsp/listProcess.jsp"; } }; /** * The listProcess handler (modified in order to skip the list re-computation). */ static private FunctionHandler listSomeProcessHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { request.setAttribute("dontreset", "no, dont"); return listProcessHandler.getDestination(function, session, request); } }; /** * The changeRole handler. */ static private FunctionHandler changeRoleHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String roleName = request.getParameter("role"); session.resetCurrentRole(roleName); return listProcessHandler.getDestination(function, session, request); } }; /** * The filterProcess handler. */ static private FunctionHandler filterProcessHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { ProcessFilter filter = session.getCurrentFilter(); updateProcessFilter(session, request, filter); return listProcessHandler.getDestination(function, session, request); } }; /** * The attachmentManager handler */ static private FunctionHandler attachmentManagerHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String processId = request.getParameter("processId"); if (processId != null) session.resetCurrentProcessInstance(processId); setSharedAttributes(session, request); return "/processManager/jsp/attachmentManager.jsp"; } }; /** * The viewProcess handler */ static private FunctionHandler viewProcessHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String processId = request.getParameter("processId"); String force = request.getParameter("force"); session.resetCurrentProcessInstance(processId); if ((force == null || !force.equals("true")) && (session.hasPendingQuestions())) return listQuestionsHandler.getDestination(function, session, request); com.silverpeas.form.Form form = session.getPresentationForm(); request.setAttribute("form", form); PagesContext context = getFormContext("presentation", "0", session, true); request.setAttribute("context", context); String[] activeStates = session.getActiveStates(); request.setAttribute("activeStates", activeStates); String[] roles = session.getActiveRoles(); request.setAttribute("activeRoles", roles); DataRecord data = session.getFolderRecord(); request.setAttribute("data", data); String[] deleteAction = session.getDeleteAction(); if (deleteAction != null) request.setAttribute("deleteAction", deleteAction); List<User> lockingUsers = session.getLockingUsers(); if (lockingUsers != null) { request.setAttribute("lockingUsers", lockingUsers); request.setAttribute("isCurrentUserIsLockingUser", session.isCurrentUserIsLockingUser()); } else { request.setAttribute("isCurrentUserIsLockingUser", false); } setSharedAttributes(session, request); return "/processManager/jsp/viewProcess.jsp"; } }; /** * The searchResult handler */ static private FunctionHandler searchResultHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String type = request.getParameter("Type"); String todoId = request.getParameter("Id"); // Accept only links coming from todo details if (type == null || (!type.equals("com.stratelia.webactiv.calendar.backbone.TodoDetail") && !type .equals("ProcessInstance"))) return listProcessHandler.getDestination(function, session, request); String processId = todoId; if (type.equals("com.stratelia.webactiv.calendar.backbone.TodoDetail")) { // from todo, todoId is in fact the externalId processId = session.getProcessInstanceIdFromExternalTodoId(todoId); String roleName = session.getRoleNameFromExternalTodoId(todoId); session.resetCurrentRole(roleName); } session.resetCurrentProcessInstance(processId); if (session.hasPendingQuestions()) return listQuestionsHandler.getDestination(function, session, request); com.silverpeas.form.Form form = session.getPresentationForm(); request.setAttribute("form", form); PagesContext context = getFormContext("presentation", "0", session, true); request.setAttribute("context", context); String[] activeStates = session.getActiveStates(); request.setAttribute("activeStates", activeStates); String[] roles = session.getActiveRoles(); request.setAttribute("activeRoles", roles); DataRecord data = session.getFolderRecord(); request.setAttribute("data", data); String[] deleteAction = session.getDeleteAction(); if (deleteAction != null) request.setAttribute("deleteAction", deleteAction); List<User> lockingUsers = session.getLockingUsers(); if (lockingUsers != null) { request.setAttribute("lockingUsers", lockingUsers); request.setAttribute("isCurrentUserIsLockingUser", session.isCurrentUserIsLockingUser()); } else { request.setAttribute("isCurrentUserIsLockingUser", false); } setSharedAttributes(session, request); return "/processManager/jsp/viewProcess.jsp"; } }; /** * The viewHistory handler */ static private FunctionHandler viewHistoryHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String processId = request.getParameter("processId"); session.resetCurrentProcessInstance(processId); String[] stepActivities = session.getStepActivities(); request.setAttribute("stepActivities", stepActivities); String[] stepActors = session.getStepActors(); request.setAttribute("stepActors", stepActors); String[] stepActions = session.getStepActions(); request.setAttribute("stepActions", stepActions); String[] stepDates = session.getStepDates(); request.setAttribute("stepDates", stepDates); String[] stepVisibles = session.getStepVisibles(); request.setAttribute("stepVisibles", stepVisibles); String strEnlightedStep = request.getParameter("enlightedStep"); request.setAttribute("enlightedStep", strEnlightedStep); if ("all".equalsIgnoreCase(strEnlightedStep)) { List<HistoryStepContent> stepContents = new ArrayList<HistoryStepContent>(); for (int i = 0; i < stepVisibles.length; i++) { com.silverpeas.form.Form form = session.getStepForm(i); PagesContext context = getFormContext("dummy", "0", session); DataRecord data = session.getStepRecord(i); HistoryStepContent stepContent = new HistoryStepContent(form, context, data); stepContents.add(stepContent); } request.setAttribute("StepsContent", stepContents); } else { int enlightedStep = intValue(strEnlightedStep, -1); if (enlightedStep != -1) { com.silverpeas.form.Form form = session.getStepForm(enlightedStep); request.setAttribute("form", form); PagesContext context = getFormContext("dummy", "0", session); request.setAttribute("context", context); DataRecord data = session.getStepRecord(enlightedStep); request.setAttribute("data", data); } } setSharedAttributes(session, request); return "/processManager/jsp/viewHistory.jsp"; } }; /** * The createProcess handler */ static private FunctionHandler createProcessHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { com.silverpeas.form.Form form = session.getCreationForm(); request.setAttribute("form", form); PagesContext context = getFormContext("createForm", "0", session, true); request.setAttribute("context", context); DataRecord data = session.getEmptyCreationRecord(); request.setAttribute("data", data); request.setAttribute("isFirstTimeSaved", "yes"); setSharedAttributes(session, request); return "/processManager/jsp/createProcess.jsp"; } }; /** * The saveCreation handler */ static private FunctionHandler saveCreationHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { com.silverpeas.form.Form form = session.getCreationForm(); PagesContext context = getFormContext("createForm", "0", session); DataRecord data = session.getEmptyCreationRecord(); try { List<FileItem> items = FileUploadUtil.parseRequest(request); List<String> attachmentIds = form.update(items, data, context); boolean isDraft = StringUtil.getBooleanValue( FileUploadUtil.getParameter(items, "isDraft") ); boolean isFirstTimeSaved = StringUtil.getBooleanValue( FileUploadUtil.getParameter(items, "isFirstTimeSaved") ); String instanceId = session.createProcessInstance(data, isDraft, isFirstTimeSaved); // launch update again to have a correct object id in wysiwyg context.setObjectId(instanceId); form.update(items, data, context); // Attachment's foreignkey must be set with the just created instanceId AttachmentPK attachmentPK = null; DocumentPK documentPK = null; VersioningUtil versioningUtil = null; for (String attachmentId : attachmentIds) { if (session.isVersionControlled()) { if (versioningUtil == null) { versioningUtil = new VersioningUtil(); } documentPK = new DocumentPK(Integer.parseInt(attachmentId), "useless", session.getComponentId()); versioningUtil.updateDocumentForeignKey(documentPK, instanceId); } else { attachmentPK = new AttachmentPK(attachmentId, "useless", session.getComponentId()); AttachmentController.updateAttachmentForeignKey(attachmentPK, instanceId); } } return listProcessHandler.getDestination(function, session, request); } catch (Exception e) { throw new ProcessManagerException("ProcessManagerRequestRouter", "processManager.ILL_CREATE_FORM", e); } } }; /** * The listTasks handler */ static private FunctionHandler listTasksHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String processId = request.getParameter("processId"); ProcessInstance process = session.resetCurrentProcessInstance(processId); // checking locking users List<User> lockingUsers = session.getLockingUsers(); if ( (!lockingUsers.isEmpty()) && (!session.isCurrentUserIsLockingUser()) ) { return listProcessHandler.getDestination(function, session, request); } // check if an action must be resumed if (!lockingUsers.isEmpty()) { return resumeActionHandler.getDestination(function, session, request); } if (!process.getErrorStatus()) { Task[] tasks = session.getTasks(); for (int i = 0; tasks != null && i < tasks.length; i++) { State state = tasks[i].getState(); AllowedActions filteredActions = new ActionRefs(); if (state.getAllowedActionsEx() != null) { Iterator<AllowedAction> actions = state.getAllowedActionsEx().iterateAllowedAction(); while (actions.hasNext()) { AllowedAction action = actions.next(); QualifiedUsers qualifiedUsers = action.getAction().getAllowedUsers(); if (session.getUsers(qualifiedUsers, true).contains(session.getUserId())) { filteredActions.addAllowedAction(action); } } } state.setFilteredActions(filteredActions); } request.setAttribute("tasks", tasks); request.setAttribute("ViewReturn", new Boolean(session.isViewReturn())); request.setAttribute("Error", Boolean.FALSE); } else { request.setAttribute("Error", Boolean.TRUE); } setSharedAttributes(session, request); return "/processManager/jsp/listTasks.jsp"; } }; /** * The resumeAction handler */ static private FunctionHandler resumeActionHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { // retrieve state name and action name HistoryStep savedStep = session.getSavedStep(); String stateName = savedStep.getResolvedState(); String actionName = savedStep.getAction(); State state = (stateName==null) ? new StateImpl("") : session.getState(stateName); request.setAttribute("state", state); request.setAttribute("action", session.getAction(actionName)); // Get the associated form com.silverpeas.form.Form form = session.getActionForm(stateName, actionName); request.setAttribute("form", form); // Set the form context PagesContext context = getFormContext("actionForm", "0", session, true); request.setAttribute("context", context); // Get the form data DataRecord data = session.getSavedStepRecord(savedStep); request.setAttribute("data", data); // Set flag to indicate action record has already been saved as draft request.setAttribute("isFirstTimeSaved", "no"); // Set flag to indicate instance is in resuming mode session.setResumingInstance(true); // Set global attributes setSharedAttributes(session, request); return "/processManager/jsp/editAction.jsp"; } }; /** * The editAction handler */ static private FunctionHandler editActionHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { // Set process instance String processId = request.getParameter("processId"); session.resetCurrentProcessInstance(processId); // retrieve state name and action name String stateName = request.getParameter("state"); String actionName = request.getParameter("action"); request.setAttribute("state", session.getState(stateName)); request.setAttribute("action", session.getAction(actionName)); // Get the associated form com.silverpeas.form.Form form = session.getActionForm(stateName, actionName); request.setAttribute("form", form); // Set the form context PagesContext context = getFormContext("actionForm", "0", session, true); request.setAttribute("context", context); // Get the form data DataRecord data = session.getActionRecord(stateName, actionName); request.setAttribute("data", data); // Set flag to indicate action record has never been saved as draft for this step request.setAttribute("isFirstTimeSaved", "yes"); // lock the process instance session.lock(stateName); // Set global attributes setSharedAttributes(session, request); return "/processManager/jsp/editAction.jsp"; } }; /** * The saveAction handler */ static private FunctionHandler saveActionHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { try { List<FileItem> items = FileUploadUtil.parseRequest(request); String stateName = FileUploadUtil.getParameter(items, "state"); String actionName = FileUploadUtil.getParameter(items, "action"); boolean isDraft = StringUtil.getBooleanValue( FileUploadUtil.getParameter(items, "isDraft") ); boolean isFirstTimeSaved = StringUtil.getBooleanValue( FileUploadUtil.getParameter(items, "isFirstTimeSaved") ); com.silverpeas.form.Form form = session.getActionForm(stateName, actionName); PagesContext context = getFormContext("actionForm", "0", session); DataRecord data = session.getActionRecord(stateName, actionName); if (form != null) { form.update(items, data, context); } session.processAction(stateName, actionName, data, isDraft, isFirstTimeSaved); return listProcessHandler.getDestination(function, session, request); } catch (Exception e) { throw new ProcessManagerException("ProcessManagerRequestRouter", "processManager.ILL_CREATE_FORM", e); } } }; /** * The cancelAction handler */ static private FunctionHandler cancelActionHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String stateName = request.getParameter("state"); // unlock the process instance session.unlock(stateName); return listTasksHandler.getDestination(function, session, request); } }; /** * The cancelResponse handler */ static private FunctionHandler cancelResponseHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String stateName = request.getParameter("state"); // unlock the process instance session.unlock(stateName); return viewProcessHandler.getDestination(function, session, request); } }; /** * The editQuestion handler */ static private FunctionHandler editQuestionHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String stepId = request.getParameter("stepId"); request.setAttribute("stepId", stepId); request.setAttribute("step", session.getStep(stepId)); String state = request.getParameter("state"); request.setAttribute("state", state); // Get the question form com.silverpeas.form.Form form = session.getQuestionForm(false); request.setAttribute("form", form); // Set the form context PagesContext context = getFormContext("questionForm", "0", session, true); request.setAttribute("context", context); // Get the form data DataRecord data = session.getEmptyQuestionRecord(); request.setAttribute("data", data); // lock the process instance session.lock(state); setSharedAttributes(session, request); return "/processManager/jsp/editQuestion.jsp"; } }; /** * The saveQuestion handler */ static private FunctionHandler saveQuestionHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { try { List<FileItem> items = FileUploadUtil.parseRequest(request); String stepId = FileUploadUtil.getParameter(items, "stepId"); String state = FileUploadUtil.getParameter(items, "state"); com.silverpeas.form.Form form = session.getQuestionForm(false); PagesContext context = getFormContext("questionForm", "0", session); DataRecord data = session.getEmptyQuestionRecord(); form.update(items, data, context); session.processQuestion(stepId, state, data); return listProcessHandler.getDestination(function, session, request); } catch (Exception e) { throw new ProcessManagerException("ProcessManagerRequestRouter", "processManager.ILL_CREATE_FORM", e); } } }; /** * The editResponse handler */ static private FunctionHandler editResponseHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String questionId = request.getParameter("questionId"); request.setAttribute("question", session.getQuestion(questionId)); // Get the question form (readonly) com.silverpeas.form.Form questionForm = session.getQuestionForm(true); request.setAttribute("questionForm", questionForm); // Get the response form (same as the question) com.silverpeas.form.Form responseForm = session.getQuestionForm(false); request.setAttribute("responseForm", responseForm); // Set the form context PagesContext context = getFormContext("responseForm", "0", session, true); request.setAttribute("context", context); // Get the question form data DataRecord questionData = session.getQuestionRecord(questionId); request.setAttribute("questionData", questionData); // Get the response form data DataRecord responseData = session.getEmptyQuestionRecord(); request.setAttribute("responseData", responseData); // lock the process instance Question question = session.getQuestion(questionId); session.lock(question.getTargetState().getName()); setSharedAttributes(session, request); return "/processManager/jsp/editResponse.jsp"; } }; /** * The saveResponse handler */ static private FunctionHandler saveResponseHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { try { List<FileItem> items = FileUploadUtil.parseRequest(request); String questionId = FileUploadUtil.getParameter(items, "questionId"); com.silverpeas.form.Form responseForm = session.getQuestionForm(false); PagesContext context = getFormContext("responseForm", "0", session); DataRecord responseData = session.getEmptyQuestionRecord(); responseForm.update(items, responseData, context); session.processResponse(questionId, responseData); return listProcessHandler.getDestination(function, session, request); } catch (Exception e) { throw new ProcessManagerException("ProcessManagerRequestRouter", "processManager.ILL_CREATE_FORM", e); } } }; /** * The editUserSetting handler */ static private FunctionHandler editUserSettingsHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { // Get the user settings form com.silverpeas.form.Form form = session.getUserSettingsForm(); request.setAttribute("form", form); // Set the form context PagesContext context = getFormContext("userSettingsForm", "0", session); request.setAttribute("context", context); // Get the form data DataRecord data = session.getUserSettingsRecord(); request.setAttribute("data", data); setSharedAttributes(session, request); return "/processManager/jsp/editUserSettings.jsp"; } }; /** * The saveUserSetting handler */ static private FunctionHandler saveUserSettingsHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { com.silverpeas.form.Form form = session.getUserSettingsForm(); PagesContext context = getFormContext("userSettingsForm", "0", session); DataRecord data = session.getEmptyUserSettingsRecord(); try { List<FileItem> items = FileUploadUtil.parseRequest(request); form.update(items, data, context); session.saveUserSettings(data); return listProcessHandler.getDestination(function, session, request); } catch (Exception e) { throw new ProcessManagerException("ProcessManagerRequestRouter", "processManager.ILL_USERSETTINGS_FORM", e); } } }; /** * The listQuestions handler */ static private FunctionHandler listQuestionsHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String processId = request.getParameter("processId"); session.resetCurrentProcessInstance(processId); // Get the question form (readonly) com.silverpeas.form.Form questionForm = session.getQuestionForm(true); request.setAttribute("form", questionForm); // Set the form context PagesContext context = getFormContext("responseForm", "0", session); request.setAttribute("context", context); // Get tasks list Task[] tasks = session.getTasks(); request.setAttribute("tasks", tasks); setSharedAttributes(session, request); return "/processManager/jsp/listQuestions.jsp"; } }; /** * Builds the ProcessFilter from the http request parameters. */ static private void updateProcessFilter( ProcessManagerSessionController session, HttpServletRequest request, ProcessFilter filter) throws ProcessManagerException { try { List<FileItem> items = FileUploadUtil.parseRequest(request); String collapse = FileUploadUtil.getParameter(items, "collapse"); String oldC = filter.getCollapse(); filter.setCollapse(collapse); // unless the filterPanel was not open. if ("false".equals(oldC)) { com.silverpeas.form.Form form = filter.getPresentationForm(); PagesContext context = getFormContext("filter", "1", session); DataRecord data = filter.getCriteriaRecord(); form.update(items, data, context); filter.setCriteriaRecord(data); } } catch (Exception e) { throw new ProcessManagerException("ProcessManagerRequestRouter", "processManager.ERR_ILL_FILTER_FORM", e); } } /** * Send the filter parameters */ static private void setProcessFilterAttributes( ProcessManagerSessionController session, HttpServletRequest request, ProcessFilter filter) throws ProcessManagerException { String collapse = filter.getCollapse(); request.setAttribute("collapse", collapse); com.silverpeas.form.Form form = filter.getPresentationForm(); request.setAttribute("form", form); PagesContext context = getFormContext("filter", "1", session); request.setAttribute("context", context); DataRecord data = filter.getCriteriaRecord(); request.setAttribute("data", data); } /** * The printProcessFrameset handler */ static private FunctionHandler printProcessFramesetHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { setSharedAttributes(session, request); return "/processManager/jsp/printProcessFrameset.jsp"; } }; /** * The printProcess handler */ static private FunctionHandler printProcessHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { // Get the print form com.silverpeas.form.Form form = session.getPrintForm(request); request.setAttribute("form", form); // Set the form context PagesContext context = getFormContext("printForm", "0", session); request.setAttribute("context", context); // Get the form data DataRecord data = session.getPrintRecord(); request.setAttribute("data", data); setSharedAttributes(session, request); return "/processManager/jsp/printProcess.jsp"; } }; /** * The printButtons handler */ static private FunctionHandler printButtonsHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { setSharedAttributes(session, request); return "/processManager/jsp/printButtons.jsp"; } }; static private FunctionHandler exportCSVHandler = new FunctionHandler() { public String getDestination(String function, ProcessManagerSessionController session, HttpServletRequest request) throws ProcessManagerException { String csvFilename = session.exportListAsCSV(); request.setAttribute("CSVFilename", csvFilename); if (StringUtil.isDefined(csvFilename)) { File file = new File(FileRepositoryManager.getTemporaryPath() + csvFilename); request.setAttribute("CSVFileSize", Long.valueOf(file.length())); request.setAttribute("CSVFileURL", FileServerUtils.getUrlToTempDir(csvFilename, csvFilename, "text/csv")); file = null; } return "/processManager/jsp/downloadCSV.jsp"; } }; /** * Set attributes shared by all the processManager pages. */ static private void setSharedAttributes(ProcessManagerSessionController session, HttpServletRequest request) { String canCreate = (session.getCreationRights()) ? "1" : "0"; boolean isVersionControlled = session.isVersionControlled(); String s_isVersionControlled = (isVersionControlled ? "1" : "0"); request.setAttribute("isVersionControlled", s_isVersionControlled); request.setAttribute("language", session.getLanguage()); request.setAttribute("roles", session.getUserRoleLabels()); request.setAttribute("currentRole", session.getCurrentRole()); request.setAttribute("canCreate", canCreate); request.setAttribute("process", session.getCurrentProcessInstance()); request.setAttribute("isActiveUser", new Boolean(session.isActiveUser())); request.setAttribute("isAttachmentTabEnable", new Boolean(session.isAttachmentTabEnable())); request.setAttribute("isHistoryTabEnable", new Boolean(session.isHistoryTabVisible())); request.setAttribute("isProcessIdVisible", new Boolean(session.isProcessIdVisible())); request.setAttribute("isPrintButtonEnabled", new Boolean(session.isPrintButtonEnabled())); request.setAttribute("isSaveButtonEnabled", new Boolean(session.isSaveButtonEnabled())); } /** * Read an int parameter. */ static int intValue(String parameter, int defaultValue) { try { if (parameter != null) return (new Integer(parameter)).intValue(); else return defaultValue; } catch (NumberFormatException e) { return defaultValue; } } static private PagesContext getFormContext(String formName, String formIndex, ProcessManagerSessionController session) { return getFormContext(formName, formIndex, session, false); } static private PagesContext getFormContext(String formName, String formIndex, ProcessManagerSessionController session, boolean printTitle) { PagesContext pagesContext = new PagesContext(formName, formIndex, session.getLanguage(), printTitle, session .getComponentId(), session.getUserId()); if (session.getCurrentProcessInstance() != null) { String currentInstanceId = session.getCurrentProcessInstance().getInstanceId(); pagesContext.setObjectId(currentInstanceId); } // versioning used ? pagesContext.setVersioningUsed(session.isVersionControlled()); return pagesContext; } }
fixes bug #546 - Champs wysiwyg dans les formulaires git-svn-id: 7f0d99e15c3d719532f2b0716a1f95728c3076a0@1646 a8e77078-a1c7-4fa5-b8fc-53c5178a176c
process-manager/process-manager-war/src/main/java/com/silverpeas/processManager/servlets/ProcessManagerRequestRouter.java
fixes bug #546 - Champs wysiwyg dans les formulaires
Java
agpl-3.0
1488259a8542453698176c30ab1de3df0e0c65cc
0
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
252613a4-2e60-11e5-9284-b827eb9e62be
hello.java
25209302-2e60-11e5-9284-b827eb9e62be
252613a4-2e60-11e5-9284-b827eb9e62be
hello.java
252613a4-2e60-11e5-9284-b827eb9e62be
Java
lgpl-2.1
56fd673eabfd0f4480feb06685562fde960f707d
0
xwiki/xwiki-commons,xwiki/xwiki-commons
/* * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.xwiki.environment.internal; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import javax.inject.Singleton; import javax.servlet.ServletContext; import org.apache.commons.lang3.exception.ExceptionUtils; import org.xwiki.component.annotation.Component; /** * Defines what an Environment means in a Servlet environment. * * @version $Id$ * @since 3.5M1 */ @Component @Singleton public class ServletEnvironment extends AbstractEnvironment { /** * @see #getServletContext() */ private ServletContext servletContext; /** * @param servletContext see {@link #getServletContext()} */ public void setServletContext(ServletContext servletContext) { this.servletContext = servletContext; } /** * @return the Servlet Context */ public ServletContext getServletContext() { if (this.servletContext == null) { throw new RuntimeException("The Servlet Environment has not been properly initialized " + "(The Servlet Context is not set)"); } return this.servletContext; } @Override public InputStream getResourceAsStream(String resourceName) { return getServletContext().getResourceAsStream(resourceName); } @Override public URL getResource(String resourceName) { URL url; try { url = getServletContext().getResource(resourceName); } catch (MalformedURLException e) { url = null; this.logger.warn("Error getting resource [{}] because of invalid path format. Reason: [{}]", resourceName, e.getMessage()); } return url; } @Override protected String getTemporaryDirectoryName() { final String tmpDirectory = super.getTemporaryDirectoryName(); try { if (tmpDirectory == null) { File tempDir = (File) this.getServletContext().getAttribute(ServletContext.TEMPDIR); return tempDir == null ? null : tempDir.getCanonicalPath(); } } catch (IOException e) { this.logger.warn("Unable to get Servlet temporary directory due to error [{}], " + "falling back on the default System temporary directory.", ExceptionUtils.getMessage(e)); } return tmpDirectory; } }
xwiki-commons-core/xwiki-commons-environment/xwiki-commons-environment-servlet/src/main/java/org/xwiki/environment/internal/ServletEnvironment.java
/* * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.xwiki.environment.internal; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import javax.inject.Singleton; import javax.servlet.ServletContext; import org.apache.commons.lang3.exception.ExceptionUtils; import org.xwiki.component.annotation.Component; /** * Defines what an Environment means in a Servlet environment. * * @version $Id$ * @since 3.5M1 */ @Component @Singleton public class ServletEnvironment extends AbstractEnvironment { /** * @see #getServletContext() */ private ServletContext servletContext; /** * @param servletContext see {@link #getServletContext()} */ public void setServletContext(ServletContext servletContext) { this.servletContext = servletContext; } /** * @return the Servlet Context */ public ServletContext getServletContext() { if (this.servletContext == null) { throw new RuntimeException("The Servlet Environment has not been properly initialized " + "(The Servlet Context is not set)"); } return this.servletContext; } @Override public InputStream getResourceAsStream(String resourceName) { return getServletContext().getResourceAsStream(resourceName); } @Override public URL getResource(String resourceName) { URL url; try { url = getServletContext().getResource(resourceName); } catch (MalformedURLException e) { url = null; this.logger.warn("Error getting resource [{}] because of invalid path format. Reason: [{}]", resourceName, e.getMessage()); } return url; } @Override protected String getTemporaryDirectoryName() { final String tmpDirectory = super.getTemporaryDirectoryName(); try { if (tmpDirectory == null) { File tempDir = (File) this.getServletContext().getAttribute("javax.servlet.context.tempdir"); return tempDir == null ? null : tempDir.getCanonicalPath(); } } catch (IOException e) { this.logger.warn("Unable to get Servlet temporary directory due to error [{}], " + "falling back on the default System temporary directory.", ExceptionUtils.getMessage(e)); } return tmpDirectory; } }
[misc] codestyle
xwiki-commons-core/xwiki-commons-environment/xwiki-commons-environment-servlet/src/main/java/org/xwiki/environment/internal/ServletEnvironment.java
[misc] codestyle
Java
apache-2.0
451284d5d1ddc36135cfffc807b902fd2478ca00
0
smarthi/nd4j,huitseeker/nd4j,deeplearning4j/nd4j,huitseeker/nd4j,drlebedev/nd4j,ambraspace/nd4j,smarthi/nd4j,drlebedev/nd4j,deeplearning4j/nd4j,ambraspace/nd4j,gagatust/nd4j,gagatust/nd4j
package org.nd4j.jita.allocator.impl; import jcuda.runtime.JCuda; import jcuda.runtime.cudaEvent_t; import lombok.Getter; import lombok.NonNull; import lombok.Setter; import org.bytedeco.javacpp.Pointer; import org.nd4j.jita.allocator.concurrency.AtomicState; import org.nd4j.jita.allocator.enums.AllocationStatus; import org.nd4j.jita.allocator.enums.SyncState; import org.nd4j.jita.allocator.garbage.GarbageReference; import org.nd4j.jita.allocator.pointers.PointersPair; import org.nd4j.jita.allocator.time.RateTimer; import org.nd4j.jita.allocator.time.TimeProvider; import org.nd4j.jita.allocator.time.impl.SimpleTimer; import org.nd4j.jita.allocator.time.providers.MillisecondsProvider; import org.nd4j.jita.allocator.time.providers.OperativeProvider; import org.nd4j.linalg.api.buffer.BaseDataBuffer; import org.nd4j.linalg.api.buffer.DataBuffer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.ref.Reference; import java.lang.ref.WeakReference; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReentrantReadWriteLock; /** * This class describes top-level allocation unit. * Every buffer passed into CUDA wii have allocation point entry, describing allocation state. * * @author [email protected] */ public class AllocationPoint { private static Logger log = LoggerFactory.getLogger(AllocationPoint.class); // thread safety is guaranteed by cudaLock private volatile PointersPair pointerInfo; @Getter @Setter private Long objectId; @Getter @Setter private Long bucketId; // thread safety is guaranteed by allocLock private volatile AllocationStatus allocationStatus = AllocationStatus.UNDEFINED; private transient TimeProvider timeProvider = new OperativeProvider(); private transient TimeProvider realTimeProvider = new MillisecondsProvider(); // corresponding access times in TimeProvider quants private final AtomicLong accessHostRead = new AtomicLong(0); private final AtomicLong accessDeviceRead = new AtomicLong(0); private final AtomicLong accessHostWrite = new AtomicLong(0); private final AtomicLong accessDeviceWrite = new AtomicLong(0); // real time here private final AtomicLong deviceAccessTime = new AtomicLong(0); @Getter @Setter private boolean constant; // TODO: timer should be instantiated externally // @Getter private final RateTimer timerShort = new SimpleTimer(10, TimeUnit.SECONDS); //new BinaryTimer(5, TimeUnit.SECONDS); // @Getter private final RateTimer timerLong = new SimpleTimer(60, TimeUnit.SECONDS); /* device, where memory was/will be allocated. Valid integer >= 0 is deviceId, null for undefined */ @Getter @Setter private volatile Integer deviceId; /* We assume 1D memory chunk allocations. */ @Getter @Setter private AllocationShape shape; private AtomicLong deviceTicks = new AtomicLong(0); // private Map<AllocationShape, NestedPoint> usedChunks = new ConcurrentHashMap<>(); // @Getter private AtomicState accessState = new AtomicState(); private volatile WeakReference<BaseDataBuffer> originalDataBufferReference; private volatile GarbageReference garbageReference; private cudaEvent_t lastEvent; public void setLastEvent(cudaEvent_t event) { if (event != null) { if (lastEvent != null) JCuda.cudaEventDestroy(lastEvent); } lastEvent = event; } public cudaEvent_t getLastEvent() { return lastEvent; } /** * This method stores WeakReference to original BaseCudaDataBuffer * * @param buffer */ public void attachBuffer(@NonNull BaseDataBuffer buffer) { originalDataBufferReference = new WeakReference<BaseDataBuffer>(buffer); } public void attachReference(GarbageReference reference) { garbageReference = reference; } /** * This method returns previously stored BaseCudaDataBuffer instance * * PLEASE NOTE: Return value CAN be null * * @return */ public DataBuffer getBuffer() { if (originalDataBufferReference != null) { return originalDataBufferReference.get(); } else return null; } /** * This method returns current AllocationStatus for this point * @return */ public AllocationStatus getAllocationStatus() { return allocationStatus; } /** * This method sets specified AllocationStatus for this point * @param status */ public void setAllocationStatus(@NonNull AllocationStatus status) { allocationStatus = status; } /** * This method returns CUDA pointer object for this allocation. * It can be either device pointer or pinned memory pointer, or null. * * PLEASE NOTE: Thread safety is guaranteed by reentrant read/write lock * @return */ public Pointer getDevicePointer() { if (pointerInfo == null) { log.info("pointerInfo is null"); return null; } return pointerInfo.getDevicePointer(); } /** * This method returns CUDA pointer object for this allocation. * It can be either device pointer or pinned memory pointer, or null. * * PLEASE NOTE: Thread safety is guaranteed by reentrant read/write lock * @return */ public Pointer getHostPointer() { if (pointerInfo == null) return null; return pointerInfo.getHostPointer(); } /** * This method sets CUDA pointer for this allocation. * It can be either device pointer, or pinned memory pointer, or null. * * PLEASE NOTE: Thread safety is guaranteed by reentrant read/write lock * @param pointerInfo CUDA pointers wrapped into DevicePointerInfo */ public void setPointers(@NonNull PointersPair pointerInfo) { this.pointerInfo = pointerInfo; } public PointersPair getPointers() { return this.pointerInfo; } public long getDeviceTicks() { return deviceTicks.get(); } public void tickDeviceRead() { // this.deviceTicks.incrementAndGet(); // this.timerShort.triggerEvent(); // this.timerLong.triggerEvent(); //this.deviceAccessTime.set(realTimeProvider.getCurrentTime()); this.accessDeviceRead.set(timeProvider.getCurrentTime()); } public void tackDevice() { //this.deviceTicks.incrementAndGet(); this.accessDeviceRead.set(timeProvider.getCurrentTime()); this.deviceAccessTime.set(realTimeProvider.getCurrentTime()); } /** * Returns time, in milliseconds, when this point was accessed on host side * * @return */ public long getHostReadTime() { return accessHostRead.get(); } public long getHostWriteTime() { return accessHostWrite.get(); } public long getRealDeviceAccessTime() { return deviceAccessTime.get(); } /** * Returns time, in milliseconds, when this point was accessed on device side * * @return */ public long getDeviceAccessTime() { return accessDeviceRead.get(); } /** * Returns time when point was written on device last time * * @return */ public long getDeviceWriteTime() { return accessDeviceWrite.get(); } public void tickHostRead() { accessHostRead.set(timeProvider.getCurrentTime()); } /** * This method sets time when this point was changed on device * */ public void tickDeviceWrite() { // deviceAccessTime.set(realTimeProvider.getCurrentTime()); tickDeviceRead(); accessDeviceWrite.set(timeProvider.getCurrentTime()); } /** * This method sets time when this point was changed on host */ public void tickHostWrite() { tickHostRead(); accessHostWrite.set(timeProvider.getCurrentTime()); } /** * This method returns, if host side has actual copy of data * * @return true, if data is actual, false otherwise */ public boolean isActualOnHostSide() { //log.info("isActuialOnHostSide() -> Host side: [{}], Device side: [{}]", accessHostRead.get(), accessDeviceRead.get()); boolean result = accessHostWrite.get() >= accessDeviceWrite.get() || accessHostRead.get() >= accessDeviceWrite.get(); //log.info("isActuialOnHostSide() -> {}, shape: {}", result, shape); return result; } /** * This method returns, if device side has actual copy of data * * @return */ public boolean isActualOnDeviceSide() { //log.info("isActuialOnDeviceSide() -> Host side: [{}], Device side: [{}]", accessHostWrite.get(), accessDeviceWrite.get()); boolean result = accessDeviceWrite.get() >= accessHostWrite.get() || accessDeviceRead.get() >= accessHostWrite.get(); //accessHostWrite.get() <= getDeviceAccessTime(); // log.info("isActuialOnDeviceSide() -> {} ({}), Shape: {}", result, objectId, shape); return result; } /** * This method sets device access time equal to host write time */ public void tickDeviceToHost() { accessDeviceRead.set(accessHostRead.get()); this.deviceAccessTime.set(realTimeProvider.getCurrentTime()); } @Override public String toString() { return "AllocationPoint{" + "deviceId=" + deviceId + ", objectId=" + objectId + ", shape=" + shape + '}'; } }
nd4j-backends/nd4j-backend-impls/nd4j-cuda-7.5/src/main/java/org/nd4j/jita/allocator/impl/AllocationPoint.java
package org.nd4j.jita.allocator.impl; import jcuda.runtime.cudaEvent_t; import lombok.Getter; import lombok.NonNull; import lombok.Setter; import org.bytedeco.javacpp.Pointer; import org.nd4j.jita.allocator.concurrency.AtomicState; import org.nd4j.jita.allocator.enums.AllocationStatus; import org.nd4j.jita.allocator.enums.SyncState; import org.nd4j.jita.allocator.garbage.GarbageReference; import org.nd4j.jita.allocator.pointers.PointersPair; import org.nd4j.jita.allocator.time.RateTimer; import org.nd4j.jita.allocator.time.TimeProvider; import org.nd4j.jita.allocator.time.impl.SimpleTimer; import org.nd4j.jita.allocator.time.providers.MillisecondsProvider; import org.nd4j.jita.allocator.time.providers.OperativeProvider; import org.nd4j.linalg.api.buffer.BaseDataBuffer; import org.nd4j.linalg.api.buffer.DataBuffer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.ref.Reference; import java.lang.ref.WeakReference; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReentrantReadWriteLock; /** * This class describes top-level allocation unit. * Every buffer passed into CUDA wii have allocation point entry, describing allocation state. * * @author [email protected] */ public class AllocationPoint { private static Logger log = LoggerFactory.getLogger(AllocationPoint.class); // thread safety is guaranteed by cudaLock private volatile PointersPair pointerInfo; @Getter @Setter private Long objectId; @Getter @Setter private Long bucketId; // thread safety is guaranteed by allocLock private volatile AllocationStatus allocationStatus = AllocationStatus.UNDEFINED; private transient TimeProvider timeProvider = new OperativeProvider(); private transient TimeProvider realTimeProvider = new MillisecondsProvider(); // corresponding access times in TimeProvider quants private final AtomicLong accessHostRead = new AtomicLong(0); private final AtomicLong accessDeviceRead = new AtomicLong(0); private final AtomicLong accessHostWrite = new AtomicLong(0); private final AtomicLong accessDeviceWrite = new AtomicLong(0); // real time here private final AtomicLong deviceAccessTime = new AtomicLong(0); @Getter @Setter private boolean constant; // TODO: timer should be instantiated externally // @Getter private final RateTimer timerShort = new SimpleTimer(10, TimeUnit.SECONDS); //new BinaryTimer(5, TimeUnit.SECONDS); // @Getter private final RateTimer timerLong = new SimpleTimer(60, TimeUnit.SECONDS); /* device, where memory was/will be allocated. Valid integer >= 0 is deviceId, null for undefined */ @Getter @Setter private volatile Integer deviceId; /* We assume 1D memory chunk allocations. */ @Getter @Setter private AllocationShape shape; private AtomicLong deviceTicks = new AtomicLong(0); // private Map<AllocationShape, NestedPoint> usedChunks = new ConcurrentHashMap<>(); // @Getter private AtomicState accessState = new AtomicState(); private volatile WeakReference<BaseDataBuffer> originalDataBufferReference; private volatile GarbageReference garbageReference; private cudaEvent_t lastEvent; public void setLastEvent(cudaEvent_t event) { lastEvent = event; } public cudaEvent_t getLastEvent() { return lastEvent; } /** * This method stores WeakReference to original BaseCudaDataBuffer * * @param buffer */ public void attachBuffer(@NonNull BaseDataBuffer buffer) { originalDataBufferReference = new WeakReference<BaseDataBuffer>(buffer); } public void attachReference(GarbageReference reference) { garbageReference = reference; } /** * This method returns previously stored BaseCudaDataBuffer instance * * PLEASE NOTE: Return value CAN be null * * @return */ public DataBuffer getBuffer() { if (originalDataBufferReference != null) { return originalDataBufferReference.get(); } else return null; } /** * This method returns current AllocationStatus for this point * @return */ public AllocationStatus getAllocationStatus() { return allocationStatus; } /** * This method sets specified AllocationStatus for this point * @param status */ public void setAllocationStatus(@NonNull AllocationStatus status) { allocationStatus = status; } /** * This method returns CUDA pointer object for this allocation. * It can be either device pointer or pinned memory pointer, or null. * * PLEASE NOTE: Thread safety is guaranteed by reentrant read/write lock * @return */ public Pointer getDevicePointer() { if (pointerInfo == null) { log.info("pointerInfo is null"); return null; } return pointerInfo.getDevicePointer(); } /** * This method returns CUDA pointer object for this allocation. * It can be either device pointer or pinned memory pointer, or null. * * PLEASE NOTE: Thread safety is guaranteed by reentrant read/write lock * @return */ public Pointer getHostPointer() { if (pointerInfo == null) return null; return pointerInfo.getHostPointer(); } /** * This method sets CUDA pointer for this allocation. * It can be either device pointer, or pinned memory pointer, or null. * * PLEASE NOTE: Thread safety is guaranteed by reentrant read/write lock * @param pointerInfo CUDA pointers wrapped into DevicePointerInfo */ public void setPointers(@NonNull PointersPair pointerInfo) { this.pointerInfo = pointerInfo; } public PointersPair getPointers() { return this.pointerInfo; } public long getDeviceTicks() { return deviceTicks.get(); } public void tickDeviceRead() { // this.deviceTicks.incrementAndGet(); // this.timerShort.triggerEvent(); // this.timerLong.triggerEvent(); //this.deviceAccessTime.set(realTimeProvider.getCurrentTime()); this.accessDeviceRead.set(timeProvider.getCurrentTime()); } public void tackDevice() { //this.deviceTicks.incrementAndGet(); this.accessDeviceRead.set(timeProvider.getCurrentTime()); this.deviceAccessTime.set(realTimeProvider.getCurrentTime()); } /** * Returns time, in milliseconds, when this point was accessed on host side * * @return */ public long getHostReadTime() { return accessHostRead.get(); } public long getHostWriteTime() { return accessHostWrite.get(); } public long getRealDeviceAccessTime() { return deviceAccessTime.get(); } /** * Returns time, in milliseconds, when this point was accessed on device side * * @return */ public long getDeviceAccessTime() { return accessDeviceRead.get(); } /** * Returns time when point was written on device last time * * @return */ public long getDeviceWriteTime() { return accessDeviceWrite.get(); } public void tickHostRead() { accessHostRead.set(timeProvider.getCurrentTime()); } /** * This method sets time when this point was changed on device * */ public void tickDeviceWrite() { // deviceAccessTime.set(realTimeProvider.getCurrentTime()); tickDeviceRead(); accessDeviceWrite.set(timeProvider.getCurrentTime()); } /** * This method sets time when this point was changed on host */ public void tickHostWrite() { tickHostRead(); accessHostWrite.set(timeProvider.getCurrentTime()); } /** * This method returns, if host side has actual copy of data * * @return true, if data is actual, false otherwise */ public boolean isActualOnHostSide() { //log.info("isActuialOnHostSide() -> Host side: [{}], Device side: [{}]", accessHostRead.get(), accessDeviceRead.get()); boolean result = accessHostWrite.get() >= accessDeviceWrite.get() || accessHostRead.get() >= accessDeviceWrite.get(); //log.info("isActuialOnHostSide() -> {}, shape: {}", result, shape); return result; } /** * This method returns, if device side has actual copy of data * * @return */ public boolean isActualOnDeviceSide() { //log.info("isActuialOnDeviceSide() -> Host side: [{}], Device side: [{}]", accessHostWrite.get(), accessDeviceWrite.get()); boolean result = accessDeviceWrite.get() >= accessHostWrite.get() || accessDeviceRead.get() >= accessHostWrite.get(); //accessHostWrite.get() <= getDeviceAccessTime(); // log.info("isActuialOnDeviceSide() -> {} ({}), Shape: {}", result, objectId, shape); return result; } /** * This method sets device access time equal to host write time */ public void tickDeviceToHost() { accessDeviceRead.set(accessHostRead.get()); this.deviceAccessTime.set(realTimeProvider.getCurrentTime()); } @Override public String toString() { return "AllocationPoint{" + "deviceId=" + deviceId + ", objectId=" + objectId + ", shape=" + shape + '}'; } }
async enabled, not leaking
nd4j-backends/nd4j-backend-impls/nd4j-cuda-7.5/src/main/java/org/nd4j/jita/allocator/impl/AllocationPoint.java
async enabled, not leaking
Java
apache-2.0
b80c3bd8159a54fe3a86985e3f0ee249238be428
0
apucher/pinot,linkedin/pinot,linkedin/pinot,linkedin/pinot,apucher/pinot,apucher/pinot,linkedin/pinot,linkedin/pinot,fx19880617/pinot-1,apucher/pinot,apucher/pinot,fx19880617/pinot-1,fx19880617/pinot-1,fx19880617/pinot-1,fx19880617/pinot-1
/** * Copyright (C) 2014-2018 LinkedIn Corp. ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.linkedin.pinot.controller.helix.core.realtime; import com.linkedin.pinot.common.config.TableNameBuilder; import com.linkedin.pinot.common.metadata.segment.LLCRealtimeSegmentZKMetadata; import com.linkedin.pinot.common.metrics.ControllerMeter; import com.linkedin.pinot.common.metrics.ControllerMetrics; import com.linkedin.pinot.common.protocols.SegmentCompletionProtocol; import com.linkedin.pinot.common.utils.CommonConstants; import com.linkedin.pinot.common.utils.LLCSegmentName; import com.linkedin.pinot.controller.ControllerConf; import com.linkedin.pinot.controller.helix.core.realtime.segment.CommittingSegmentDescriptor; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import org.apache.helix.HelixManager; import org.apache.helix.ZNRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This is a singleton class in the controller that drives the state machines for segments that are in the * committing stage. * * SegmentCompletionManager has a sub-class that represents the FSM that the segment goes through while * executing the segment completion protocol between pinot servers and pinot controller. The protocol is * described in SegmentCompletionProtocol. */ public class SegmentCompletionManager { // TODO Can we log using the segment name in the log message? public static Logger LOGGER = LoggerFactory.getLogger(SegmentCompletionManager.class); private enum State { PARTIAL_CONSUMING, // Indicates that at least one replica has reported that it has stopped consuming. HOLDING, // the segment has started finalizing. COMMITTER_DECIDED, // We know who the committer will be, we will let them know next time they call segmentConsumed() COMMITTER_NOTIFIED, // we notified the committer to commit. COMMITTER_UPLOADING, // committer is uploading. COMMITTING, // we are in the process of committing to zk COMMITTED, // We already committed a segment. ABORTED, // state machine is aborted. we will start a fresh one when the next segmentConsumed comes in. } private static SegmentCompletionManager _instance = null; private final HelixManager _helixManager; // A map that holds the FSM for each segment. private final Map<String, SegmentCompletionFSM> _fsmMap = new ConcurrentHashMap<>(); private final Map<String, Long> _commitTimeMap = new ConcurrentHashMap<>(); private final PinotLLCRealtimeSegmentManager _segmentManager; private final ControllerMetrics _controllerMetrics; private static final int MAX_COMMIT_TIME_FOR_ALL_SEGMENTS_SECONDS = 1800; // Half hour max commit time for all segments public static int getMaxCommitTimeForAllSegmentsSeconds() { return MAX_COMMIT_TIME_FOR_ALL_SEGMENTS_SECONDS; } // TODO keep some history of past committed segments so that we can avoid looking up PROPERTYSTORE if some server comes in late. protected SegmentCompletionManager(HelixManager helixManager, PinotLLCRealtimeSegmentManager segmentManager, ControllerMetrics controllerMetrics) { _helixManager = helixManager; _segmentManager = segmentManager; _controllerMetrics = controllerMetrics; } public boolean isSplitCommitEnabled() { return _segmentManager.getIsSplitCommitEnabled(); } public String getControllerVipUrl() { return _segmentManager.getControllerVipUrl(); } public static SegmentCompletionManager create(HelixManager helixManager, PinotLLCRealtimeSegmentManager segmentManager, ControllerConf controllerConf, ControllerMetrics controllerMetrics) { if (_instance != null) { throw new RuntimeException("Cannot create multiple instances"); } _instance = new SegmentCompletionManager(helixManager, segmentManager, controllerMetrics); SegmentCompletionProtocol.setMaxSegmentCommitTimeMs( TimeUnit.MILLISECONDS.convert(controllerConf.getSegmentCommitTimeoutSeconds(), TimeUnit.SECONDS)); return _instance; } public static SegmentCompletionManager getInstance() { if (_instance == null) { throw new RuntimeException("Not yet created"); } return _instance; } protected long getCurrentTimeMs() { return System.currentTimeMillis(); } // We need to make sure that we never create multiple FSMs for the same segment, so this method must be synchronized. private synchronized SegmentCompletionFSM lookupOrCreateFsm(final LLCSegmentName segmentName, String msgType) { final String segmentNameStr = segmentName.getSegmentName(); SegmentCompletionFSM fsm = _fsmMap.get(segmentNameStr); if (fsm == null) { // Look up propertystore to see if this is a completed segment ZNRecord segment; try { // TODO if we keep a list of last few committed segments, we don't need to go to zk for this. final String realtimeTableName = TableNameBuilder.REALTIME.tableNameWithType(segmentName.getTableName()); LLCRealtimeSegmentZKMetadata segmentMetadata = _segmentManager.getRealtimeSegmentZKMetadata(realtimeTableName, segmentName.getSegmentName(), null); if (segmentMetadata.getStatus().equals(CommonConstants.Segment.Realtime.Status.DONE)) { // Best to go through the state machine for this case as well, so that all code regarding state handling is in one place // Also good for synchronization, because it is possible that multiple threads take this path, and we don't want // multiple instances of the FSM to be created for the same commit sequence at the same time. final long endOffset = segmentMetadata.getEndOffset(); fsm = SegmentCompletionFSM.fsmInCommit(_segmentManager, this, segmentName, segmentMetadata.getNumReplicas(), endOffset); } else if (msgType.equals(SegmentCompletionProtocol.MSG_TYPE_STOPPED_CONSUMING)) { fsm = SegmentCompletionFSM.fsmStoppedConsuming(_segmentManager, this, segmentName, segmentMetadata.getNumReplicas()); } else { // Segment is in the process of completing, and this is the first one to respond. Create fsm fsm = SegmentCompletionFSM.fsmInHolding(_segmentManager, this, segmentName, segmentMetadata.getNumReplicas()); } LOGGER.info("Created FSM {}", fsm); _fsmMap.put(segmentNameStr, fsm); } catch (Exception e) { // Server gone wonky. Segment does not exist in propstore LOGGER.error("Exception creating FSM for segment {}", segmentNameStr, e); throw new RuntimeException("Exception creating FSM for segment " + segmentNameStr, e); } } return fsm; } /** * This method is to be called when a server calls in with the segmentConsumed() API, reporting an offset in kafka * that it currently has (i.e. next offset that it will consume, if it continues to consume). */ public SegmentCompletionProtocol.Response segmentConsumed(SegmentCompletionProtocol.Request.Params reqParams) { if (!_helixManager.isLeader() || !_helixManager.isConnected()) { _controllerMetrics.addMeteredGlobalValue(ControllerMeter.CONTROLLER_NOT_LEADER, 1L); return SegmentCompletionProtocol.RESP_NOT_LEADER; } final String segmentNameStr = reqParams.getSegmentName(); final String instanceId = reqParams.getInstanceId(); final String stopReason = reqParams.getReason(); final long offset = reqParams.getOffset(); LLCSegmentName segmentName = new LLCSegmentName(segmentNameStr); SegmentCompletionProtocol.Response response = SegmentCompletionProtocol.RESP_FAILED; SegmentCompletionFSM fsm = null; try { fsm = lookupOrCreateFsm(segmentName, SegmentCompletionProtocol.MSG_TYPE_CONSUMED); response = fsm.segmentConsumed(instanceId, offset, stopReason); } catch (Exception e) { // Return failed response } if (fsm != null && fsm.isDone()) { LOGGER.info("Removing FSM (if present):{}", fsm.toString()); _fsmMap.remove(segmentNameStr); } return response; } /** * This method is to be called when a server calls in with the segmentCommit() API. The server sends in the segment * along with the API, but it is the caller's responsibility to save the segment after this call (and before the * segmentCommitEnd() call). * * If successful, this method will return Response.COMMIT_CONTINUE, in which case, the caller should save the incoming * segment and then call segmentCommitEnd(). * * Otherwise, this method will return a protocol response to be returned to the client right away (without saving the * incoming segment). */ public SegmentCompletionProtocol.Response segmentCommitStart(final SegmentCompletionProtocol.Request.Params reqParams) { if (!_helixManager.isLeader() || !_helixManager.isConnected()) { _controllerMetrics.addMeteredGlobalValue(ControllerMeter.CONTROLLER_NOT_LEADER, 1L); return SegmentCompletionProtocol.RESP_NOT_LEADER; } final String segmentNameStr = reqParams.getSegmentName(); final String instanceId = reqParams.getInstanceId(); final long offset = reqParams.getOffset(); LLCSegmentName segmentName = new LLCSegmentName(segmentNameStr); SegmentCompletionFSM fsm = null; SegmentCompletionProtocol.Response response = SegmentCompletionProtocol.RESP_FAILED; try { fsm = lookupOrCreateFsm(segmentName, SegmentCompletionProtocol.MSG_TYPE_COMMIT); response = fsm.segmentCommitStart(instanceId, offset); } catch (Exception e) { LOGGER.error("Caught exception in segmentCommitStart for segment {}", segmentNameStr, e); } if (fsm != null && fsm.isDone()) { LOGGER.info("Removing FSM (if present):{}", fsm.toString()); _fsmMap.remove(segmentNameStr); } return response; } public SegmentCompletionProtocol.Response extendBuildTime(final SegmentCompletionProtocol.Request.Params reqParams) { if (!_helixManager.isLeader() || !_helixManager.isConnected()) { _controllerMetrics.addMeteredGlobalValue(ControllerMeter.CONTROLLER_NOT_LEADER, 1L); return SegmentCompletionProtocol.RESP_NOT_LEADER; } final String segmentNameStr = reqParams.getSegmentName(); final String instanceId = reqParams.getInstanceId(); final long offset = reqParams.getOffset(); final int extTimeSec = reqParams.getExtraTimeSec(); LLCSegmentName segmentName = new LLCSegmentName(segmentNameStr); SegmentCompletionFSM fsm = null; SegmentCompletionProtocol.Response response = SegmentCompletionProtocol.RESP_FAILED; try { fsm = lookupOrCreateFsm(segmentName, SegmentCompletionProtocol.MSG_TYPE_COMMIT); response = fsm.extendBuildTime(instanceId, offset, extTimeSec); } catch (Exception e) { LOGGER.error("Caught exception in extendBuildTime for segment {}", segmentNameStr, e); } if (fsm != null && fsm.isDone()) { LOGGER.info("Removing FSM (if present):{}", fsm.toString()); _fsmMap.remove(segmentNameStr); } return response; } /** * This method is to be called when a server reports that it has stopped consuming a real-time segment. * * @return */ public SegmentCompletionProtocol.Response segmentStoppedConsuming(SegmentCompletionProtocol.Request.Params reqParams) { if (!_helixManager.isLeader() || !_helixManager.isConnected()) { _controllerMetrics.addMeteredGlobalValue(ControllerMeter.CONTROLLER_NOT_LEADER, 1L); return SegmentCompletionProtocol.RESP_NOT_LEADER; } final String segmentNameStr = reqParams.getSegmentName(); final String instanceId = reqParams.getInstanceId(); final long offset = reqParams.getOffset(); final String reason = reqParams.getReason(); LLCSegmentName segmentName = new LLCSegmentName(segmentNameStr); SegmentCompletionFSM fsm = null; SegmentCompletionProtocol.Response response = SegmentCompletionProtocol.RESP_FAILED; try { fsm = lookupOrCreateFsm(segmentName, SegmentCompletionProtocol.MSG_TYPE_STOPPED_CONSUMING); response = fsm.stoppedConsuming(instanceId, offset, reason); } catch (Exception e) { LOGGER.error("Caught exception in segmentStoppedConsuming for segment {}", segmentNameStr, e); } if (fsm != null && fsm.isDone()) { LOGGER.info("Removing FSM (if present):{}", fsm.toString()); _fsmMap.remove(segmentNameStr); } return response; } /** * This method is to be called when the segment sent in by the server has been saved locally in the correct path that * is downloadable by the servers. * * It returns a response code to be sent back to the client. * * If the repsonse code is not COMMIT_SUCCESS, then the caller may remove the segment that has been saved. * * @return */ public SegmentCompletionProtocol.Response segmentCommitEnd(SegmentCompletionProtocol.Request.Params reqParams, boolean success, boolean isSplitCommit) { if (!_helixManager.isLeader() || !_helixManager.isConnected()) { _controllerMetrics.addMeteredGlobalValue(ControllerMeter.CONTROLLER_NOT_LEADER, 1L); return SegmentCompletionProtocol.RESP_NOT_LEADER; } final String segmentNameStr = reqParams.getSegmentName(); LLCSegmentName segmentName = new LLCSegmentName(segmentNameStr); SegmentCompletionFSM fsm = null; SegmentCompletionProtocol.Response response = SegmentCompletionProtocol.RESP_FAILED; try { fsm = lookupOrCreateFsm(segmentName, SegmentCompletionProtocol.MSG_TYPE_COMMIT); response = fsm.segmentCommitEnd(reqParams, success, isSplitCommit); } catch (Exception e) { LOGGER.error("Caught exception in segmentCommitEnd for segment {}", segmentNameStr, e); } if (fsm != null && fsm.isDone()) { LOGGER.info("Removing FSM (if present):{}", fsm.toString()); _fsmMap.remove(segmentNameStr); } return response; } /** * This class implements the FSM on the controller side for each completing segment. * * An FSM is is created when we first hear about a segment (typically through the segmentConsumed message). * When an FSM is created, it may have one of two start states (HOLDING, or COMMITTED), depending on the * constructor used. * * We kick off an FSM in the COMMITTED state (rare) when we find that PROPERTYSTORE already has the segment * with the Status set to DONE. * * We kick off an FSM in the HOLDING state (typical) when a sementConsumed() message arrives from the * first server we hear from. * * The FSM does not have a timer. It is clocked by the servers, which, typically, are retransmitting their * segmentConsumed() message every so often (SegmentCompletionProtocol.MAX_HOLD_TIME_MS). * * See https://github.com/linkedin/pinot/wiki/Low-level-kafka-consumers */ private static class SegmentCompletionFSM { // We will have some variation between hosts, so we add 10% to the max hold time to pick a winner. // If there is more than 10% variation, then it is handled as an error case (i.e. the first few to // come in will have a winner, and the later ones will just download the segment) private static final long MAX_TIME_TO_PICK_WINNER_MS = SegmentCompletionProtocol.MAX_HOLD_TIME_MS + (SegmentCompletionProtocol.MAX_HOLD_TIME_MS / 10); // Once we pick a winner, the winner may get notified in the next call, so add one hold time plus some. // It may be that the winner is not the server that we are currently processing a segmentConsumed() // message from. In that case, we will wait for the next segmetnConsumed() message from the picked winner. // If the winner does not come back to us within that time, we abort the state machine and start over. private static final long MAX_TIME_TO_NOTIFY_WINNER_MS = MAX_TIME_TO_PICK_WINNER_MS + SegmentCompletionProtocol.MAX_HOLD_TIME_MS + (SegmentCompletionProtocol.MAX_HOLD_TIME_MS / 10); public final Logger LOGGER; State _state = State.HOLDING; // Typically start off in HOLDING state. final long _startTimeMs; private final LLCSegmentName _segmentName; private final int _numReplicas; private final Set<String> _excludedServerStateMap; private final Map<String, Long> _commitStateMap; private long _winningOffset = -1L; private String _winner; private final PinotLLCRealtimeSegmentManager _segmentManager; private final SegmentCompletionManager _segmentCompletionManager; private final long _maxTimeToPickWinnerMs; private final long _maxTimeToNotifyWinnerMs; private final long _initialCommitTimeMs; // Once the winner is notified, they are expected to commit right away. At this point, it is the segment build // time that we need to consider. // We may need to add some time here to allow for getting the lock? For now 0 // We may need to add some time for the committer come back to us (after the build)? For now 0. private long _maxTimeAllowedToCommitMs; private final boolean _isSplitCommitEnabled; private final String _controllerVipUrl; public static SegmentCompletionFSM fsmInHolding(PinotLLCRealtimeSegmentManager segmentManager, SegmentCompletionManager segmentCompletionManager, LLCSegmentName segmentName, int numReplicas) { return new SegmentCompletionFSM(segmentManager, segmentCompletionManager, segmentName, numReplicas); } public static SegmentCompletionFSM fsmInCommit(PinotLLCRealtimeSegmentManager segmentManager, SegmentCompletionManager segmentCompletionManager, LLCSegmentName segmentName, int numReplicas, long winningOffset) { return new SegmentCompletionFSM(segmentManager, segmentCompletionManager, segmentName, numReplicas, winningOffset); } public static SegmentCompletionFSM fsmStoppedConsuming(PinotLLCRealtimeSegmentManager segmentManager, SegmentCompletionManager segmentCompletionManager, LLCSegmentName segmentName, int numReplicas) { SegmentCompletionFSM fsm = new SegmentCompletionFSM(segmentManager, segmentCompletionManager, segmentName, numReplicas); fsm._state = State.PARTIAL_CONSUMING; return fsm; } // Ctor that starts the FSM in HOLDING state private SegmentCompletionFSM(PinotLLCRealtimeSegmentManager segmentManager, SegmentCompletionManager segmentCompletionManager, LLCSegmentName segmentName, int numReplicas) { _segmentName = segmentName; _numReplicas = numReplicas; _segmentManager = segmentManager; _commitStateMap = new HashMap<>(_numReplicas); _excludedServerStateMap = new HashSet<>(_numReplicas); _segmentCompletionManager = segmentCompletionManager; _startTimeMs = _segmentCompletionManager.getCurrentTimeMs(); _maxTimeToPickWinnerMs = _startTimeMs + MAX_TIME_TO_PICK_WINNER_MS; _maxTimeToNotifyWinnerMs = _startTimeMs + MAX_TIME_TO_NOTIFY_WINNER_MS; long initialCommitTimeMs = MAX_TIME_TO_NOTIFY_WINNER_MS + _segmentManager.getCommitTimeoutMS(_segmentName.getTableName()); Long savedCommitTime = _segmentCompletionManager._commitTimeMap.get(segmentName.getTableName()); if (savedCommitTime != null && savedCommitTime > initialCommitTimeMs) { initialCommitTimeMs = savedCommitTime; } LOGGER = LoggerFactory.getLogger("SegmentCompletionFSM_" + segmentName.getSegmentName()); if (initialCommitTimeMs > MAX_COMMIT_TIME_FOR_ALL_SEGMENTS_SECONDS * 1000) { // The table has a really high value configured for max commit time. Set it to a higher value than default // and go from there. LOGGER.info("Configured max commit time {}s too high for table {}, changing to {}s", initialCommitTimeMs/1000, segmentName.getTableName(), MAX_COMMIT_TIME_FOR_ALL_SEGMENTS_SECONDS); initialCommitTimeMs = MAX_COMMIT_TIME_FOR_ALL_SEGMENTS_SECONDS * 1000; } _initialCommitTimeMs = initialCommitTimeMs; _maxTimeAllowedToCommitMs = _startTimeMs + _initialCommitTimeMs; _isSplitCommitEnabled = segmentCompletionManager.isSplitCommitEnabled(); _controllerVipUrl = segmentCompletionManager.getControllerVipUrl(); } // Ctor that starts the FSM in COMMITTED state private SegmentCompletionFSM(PinotLLCRealtimeSegmentManager segmentManager, SegmentCompletionManager segmentCompletionManager, LLCSegmentName segmentName, int numReplicas, long winningOffset) { // Constructor used when we get an event after a segment is committed. this(segmentManager, segmentCompletionManager, segmentName, numReplicas); _state = State.COMMITTED; _winningOffset = winningOffset; _winner = "UNKNOWN"; } @Override public String toString() { return "{" + _segmentName.getSegmentName() + "," + _state + "," + _startTimeMs + "," + _winner + "," + _winningOffset + "," + _isSplitCommitEnabled + "," + _controllerVipUrl + "}"; } // SegmentCompletionManager releases the FSM from the hashtable when it is done. public boolean isDone() { return _state.equals(State.COMMITTED) || _state.equals(State.ABORTED); } /* * We just heard from a server that it has reached completion stage, and is reporting the offset * that the server is at. Since multiple servers can come in at the same time for this segment, * we need to synchronize on the FSM to handle the messages. The processing time itself is small, * so we should be OK with this synchronization. */ public SegmentCompletionProtocol.Response segmentConsumed(String instanceId, long offset, final String stopReason) { final long now = _segmentCompletionManager.getCurrentTimeMs(); // We can synchronize the entire block for the SegmentConsumed message. synchronized (this) { LOGGER.info("Processing segmentConsumed({}, {})", instanceId, offset); if (_excludedServerStateMap.contains(instanceId)) { // Could be that the server was restarted, and it started consuning again,and somehow got to complete // consumption up to this point. We will acccept it. LOGGER.info("Marking instance {} alive again", instanceId); _excludedServerStateMap.remove(instanceId); } _commitStateMap.put(instanceId, offset); switch (_state) { case PARTIAL_CONSUMING: return PARTIAL_CONSUMING__consumed(instanceId, offset, now, stopReason); case HOLDING: return HOLDING__consumed(instanceId, offset, now, stopReason); case COMMITTER_DECIDED: // This must be a retransmit return COMMITTER_DECIDED__consumed(instanceId, offset, now); case COMMITTER_NOTIFIED: return COMMITTER_NOTIFIED__consumed(instanceId, offset, now); case COMMITTER_UPLOADING: return COMMITTER_UPLOADING__consumed(instanceId, offset, now); case COMMITTING: return COMMITTING__consumed(instanceId, offset, now); case COMMITTED: return COMMITTED__consumed(instanceId, offset); case ABORTED: // FSM has been aborted, just return HOLD return hold(instanceId, offset); default: return fail(instanceId, offset); } } } /* * A server has sent segmentConsumed() message. The caller will save the segment if we return * COMMIT_CONTINUE. We need to verify that it is the same server that we notified as the winner * and the offset is the same as what is coming in with the commit. We can then move to * COMMITTER_UPLOADING and wait for the segmentCommitEnd() call. * * In case of discrepancy we move the state machine to ABORTED state so that this FSM is removed * from the map, and things start over. In this case, we respond to the server with a 'hold' so * that they re-transmit their segmentConsumed() message and start over. */ public SegmentCompletionProtocol.Response segmentCommitStart(String instanceId, long offset) { long now = _segmentCompletionManager.getCurrentTimeMs(); if (_excludedServerStateMap.contains(instanceId)) { LOGGER.warn("Not accepting commit from {} since it had stoppd consuming", instanceId); return SegmentCompletionProtocol.RESP_FAILED; } synchronized (this) { LOGGER.info("Processing segmentCommit({}, {})", instanceId, offset); switch (_state) { case PARTIAL_CONSUMING: return PARTIAL_CONSUMING__commit(instanceId, offset, now); case HOLDING: return HOLDING__commit(instanceId, offset, now); case COMMITTER_DECIDED: return COMMITTER_DECIDED__commit(instanceId, offset, now); case COMMITTER_NOTIFIED: return COMMITTER_NOTIFIED__commit(instanceId, offset, now); case COMMITTER_UPLOADING: return COMMITTER_UPLOADING__commit(instanceId, offset, now); case COMMITTING: return COMMITTING__commit(instanceId, offset, now); case COMMITTED: return COMMITTED__commit(instanceId, offset); case ABORTED: return hold(instanceId, offset); default: return fail(instanceId, offset); } } } public SegmentCompletionProtocol.Response stoppedConsuming(String instanceId, long offset, String reason) { synchronized (this) { LOGGER.info("Processing stoppedConsuming({}, {})", instanceId, offset); _excludedServerStateMap.add(instanceId); switch (_state) { case PARTIAL_CONSUMING: return PARTIAL_CONSUMING__stoppedConsuming(instanceId, offset, reason); case HOLDING: return HOLDING_stoppedConsuming(instanceId, offset, reason); case COMMITTER_DECIDED: return COMMITTER_DECIDED__stoppedConsuming(instanceId, offset, reason); case COMMITTER_NOTIFIED: return COMMITTER_NOTIFIED__stoppedConsuming(instanceId, offset, reason); case COMMITTER_UPLOADING: return COMMITTER_UPLOADING__stoppedConsuming(instanceId, offset, reason); case COMMITTING: return COMMITTING__stoppedConsuming(instanceId, offset, reason); case COMMITTED: return COMMITTED__stoppedConsuming(instanceId, offset, reason); case ABORTED: LOGGER.info("Ignoring StoppedConsuming message from {} in state {}", instanceId, _state); return SegmentCompletionProtocol.RESP_PROCESSED; default: return fail(instanceId, offset); } } } public SegmentCompletionProtocol.Response extendBuildTime(final String instanceId, final long offset, final int extTimeSec) { final long now = _segmentCompletionManager.getCurrentTimeMs(); synchronized (this) { LOGGER.info("Processing extendBuildTime({}, {}, {})", instanceId, offset, extTimeSec); switch (_state) { case PARTIAL_CONSUMING: case HOLDING: case COMMITTER_DECIDED: return fail(instanceId, offset); case COMMITTER_NOTIFIED: return COMMITTER_NOTIFIED__extendBuildlTime(instanceId, offset, extTimeSec, now); case COMMITTER_UPLOADING: case COMMITTING: case COMMITTED: case ABORTED: default: return fail(instanceId, offset); } } } /* * We can get this call only when the state is COMMITTER_UPLOADING. Also, the instanceId should be equal to * the _winner. */ public SegmentCompletionProtocol.Response segmentCommitEnd(SegmentCompletionProtocol.Request.Params reqParams, boolean success, boolean isSplitCommit) { String instanceId = reqParams.getInstanceId(); long offset = reqParams.getOffset(); synchronized (this) { if (_excludedServerStateMap.contains(instanceId)) { LOGGER.warn("Not accepting commitEnd from {} since it had stoppd consuming", instanceId); return abortAndReturnFailed(); } LOGGER.info("Processing segmentCommit({}, {})", instanceId, offset); if (!_state.equals(State.COMMITTER_UPLOADING) || !instanceId.equals(_winner) || offset != _winningOffset) { // State changed while we were out of sync. return a failed commit. LOGGER.warn("State change during upload: state={} segment={} winner={} winningOffset={}", _state, _segmentName.getSegmentName(), _winner, _winningOffset); return abortAndReturnFailed(); } if (!success) { LOGGER.error("Segment upload failed"); return abortAndReturnFailed(); } SegmentCompletionProtocol.Response response = commitSegment(reqParams, isSplitCommit); if (!response.equals(SegmentCompletionProtocol.RESP_COMMIT_SUCCESS)) { return abortAndReturnFailed(); } else { return response; } } } // Helper methods that log the current state and the response sent private SegmentCompletionProtocol.Response fail(String instanceId, long offset) { LOGGER.info("{}:FAIL for instance={} offset={}", _state, instanceId, offset); return SegmentCompletionProtocol.RESP_FAILED; } private SegmentCompletionProtocol.Response commit(String instanceId, long offset) { long allowedBuildTimeSec = (_maxTimeAllowedToCommitMs - _startTimeMs)/1000; LOGGER.info("{}:COMMIT for instance={} offset={} buldTimeSec={}", _state, instanceId, offset, allowedBuildTimeSec); SegmentCompletionProtocol.Response.Params params = new SegmentCompletionProtocol.Response.Params().withOffset(offset).withBuildTimeSeconds(allowedBuildTimeSec) .withStatus(SegmentCompletionProtocol.ControllerResponseStatus.COMMIT) .withSplitCommit(_isSplitCommitEnabled); if (_isSplitCommitEnabled) { params.withControllerVipUrl(_controllerVipUrl); } return new SegmentCompletionProtocol.Response(params); } private SegmentCompletionProtocol.Response discard(String instanceId, long offset) { LOGGER.warn("{}:DISCARD for instance={} offset={}", _state, instanceId, offset); return SegmentCompletionProtocol.RESP_DISCARD; } private SegmentCompletionProtocol.Response keep(String instanceId, long offset) { LOGGER.info("{}:KEEP for instance={} offset={}", _state, instanceId, offset); return new SegmentCompletionProtocol.Response(new SegmentCompletionProtocol.Response.Params().withOffset(offset).withStatus( SegmentCompletionProtocol.ControllerResponseStatus.KEEP)); } private SegmentCompletionProtocol.Response catchup(String instanceId, long offset) { LOGGER.info("{}:CATCHUP for instance={} offset={}", _state, instanceId, offset); return new SegmentCompletionProtocol.Response(new SegmentCompletionProtocol.Response.Params().withOffset( _winningOffset).withStatus(SegmentCompletionProtocol.ControllerResponseStatus.CATCH_UP)); } private SegmentCompletionProtocol.Response hold(String instanceId, long offset) { LOGGER.info("{}:HOLD for instance={} offset={}", _state, instanceId, offset); return new SegmentCompletionProtocol.Response( new SegmentCompletionProtocol.Response.Params().withStatus(SegmentCompletionProtocol.ControllerResponseStatus.HOLD).withOffset(offset)); } private SegmentCompletionProtocol.Response abortAndReturnHold(long now, String instanceId, long offset) { _state = State.ABORTED; _segmentCompletionManager._controllerMetrics.addMeteredTableValue(_segmentName.getTableName(), ControllerMeter.LLC_STATE_MACHINE_ABORTS, 1); return hold(instanceId, offset); } private SegmentCompletionProtocol.Response abortAndReturnFailed() { _state = State.ABORTED; _segmentCompletionManager._controllerMetrics.addMeteredTableValue(_segmentName.getTableName(), ControllerMeter.LLC_STATE_MACHINE_ABORTS, 1); return SegmentCompletionProtocol.RESP_FAILED; } private SegmentCompletionProtocol.Response abortIfTooLateAndReturnHold(long now, String instanceId, long offset) { if (now > _maxTimeAllowedToCommitMs) { LOGGER.warn("{}:Aborting FSM (too late) instance={} offset={} now={} start={}", _state, instanceId, offset, now, _startTimeMs); return abortAndReturnHold(now, instanceId, offset); } return null; } private int numReplicasToLookFor() { return _numReplicas - _excludedServerStateMap.size(); } private SegmentCompletionProtocol.Response PARTIAL_CONSUMING__consumed(String instanceId, long offset, long now, final String stopReason) { // This is the first time we are getting segmentConsumed() for this segment. // Some instance thinks we can close this segment, so go to HOLDING state, and process as normal. // We will just be looking for less replicas. _state = State.HOLDING; return HOLDING__consumed(instanceId, offset, now, stopReason); } /* * This is not a good state to get a commit message, but it is possible that the controller failed while in * COMMITTER_NOTIFIED state, and the first message we got in the new controller was a stoppedConsuming * message. As long as the committer is not the one who stopped consuming (which we have already checked before * coming here), we will trust the server that this is a valid commit. */ private SegmentCompletionProtocol.Response PARTIAL_CONSUMING__commit(String instanceId, long offset, long now) { // Do the same as HOLDING__commit return processCommitWhileHoldingOrPartialConsuming(instanceId, offset, now); } private SegmentCompletionProtocol.Response PARTIAL_CONSUMING__stoppedConsuming(String instanceId, long offset, String reason) { return processStoppedConsuming(instanceId, offset, reason, true); } /* * If we have waited "enough", or we have all replicas reported, then we can pick a winner. * * Otherwise, we ask the server that is reporting to come back again later until one of these conditions hold. * * If we can pick a winner then we go to COMMITTER_DECIDED or COMMITTER_NOTIIFIED (if the instance * in this call is the same as winner). * * If we can go to COMMITTER_NOTIFIED then we respond with a COMMIT message, otherwise with a HOLD message. */ private SegmentCompletionProtocol.Response HOLDING__consumed(String instanceId, long offset, long now, final String stopReason) { SegmentCompletionProtocol.Response response; // If we are past the max time to pick a winner, or we have heard from all replicas, // we are ready to pick a winner. if (isWinnerPicked(instanceId, now, stopReason)) { if (_winner.equals(instanceId)) { LOGGER.info("{}:Committer notified winner instance={} offset={}", _state, instanceId, offset); response = commit(instanceId, offset); _state = State.COMMITTER_NOTIFIED; } else { LOGGER.info("{}:Committer decided winner={} offset={}", _state, _winner, _winningOffset); response = catchup(instanceId, offset); _state = State.COMMITTER_DECIDED; } } else { response = hold(instanceId, offset); } return response; } /* * This not a good state to receive a commit message, but then it may be that the controller * failed over while in the COMMITTER_NOTIFIED state... */ private SegmentCompletionProtocol.Response HOLDING__commit(String instanceId, long offset, long now) { return processCommitWhileHoldingOrPartialConsuming(instanceId, offset, now); } private SegmentCompletionProtocol.Response HOLDING_stoppedConsuming(String instanceId, long offset, String reason) { return processStoppedConsuming(instanceId, offset, reason, true); } /* * We have already decided who the committer is, but have not let them know yet. If this is the committer that * we decided, then respond back with COMMIT. Otherwise, if the offset is smaller, respond back with a CATCHUP. * Otherwise, just have the server HOLD. Since the segment is not committed yet, we cannot ask them to KEEP or * DISCARD etc. If the committer fails for any reason, we will need a new committer. */ private SegmentCompletionProtocol.Response COMMITTER_DECIDED__consumed(String instanceId, long offset, long now) { if (offset > _winningOffset) { LOGGER.warn("{}:Aborting FSM (offset larger than winning) instance={} offset={} now={} winning={}", _state, instanceId, offset, now, _winningOffset); return abortAndReturnHold(now, instanceId, offset); } SegmentCompletionProtocol.Response response; if (_winner.equals(instanceId)) { if (_winningOffset == offset) { LOGGER.info("{}:Notifying winner instance={} offset={}", _state, instanceId, offset); response = commit(instanceId, offset); _state = State.COMMITTER_NOTIFIED; } else { // Winner coming back with a different offset. LOGGER.warn("{}:Winner coming back with different offset for instance={} offset={} prevWinnOffset={}", _state, instanceId, offset, _winningOffset); response = abortAndReturnHold(now, instanceId, offset); } } else if (offset == _winningOffset) { // Wait until winner has posted the segment. response = hold(instanceId, offset); } else { response = catchup(instanceId, offset); } if (now > _maxTimeToNotifyWinnerMs) { // Winner never got back to us. Abort the completion protocol and start afresh. // We can potentially optimize here to see if this instance has the highest so far, and re-elect them to // be winner, but for now, we will abort it and restart response = abortAndReturnHold(now, instanceId, offset); } return response; } /* * We have already decided who the committer is, but have not let them know yet. So, we don't expect * a commit() call here. */ private SegmentCompletionProtocol.Response COMMITTER_DECIDED__commit(String instanceId, long offset, long now) { return processCommitWhileHoldingOrPartialConsuming(instanceId, offset, now); } private SegmentCompletionProtocol.Response COMMITTER_DECIDED__stoppedConsuming(String instanceId, long offset, String reason) { return processStoppedConsuming(instanceId, offset, reason, false); } /* * We have notified the committer. If we get a consumed message from another server, we can ask them to * catchup (if the offset is lower). If anything else, then we pretty much ask them to hold. */ private SegmentCompletionProtocol.Response COMMITTER_NOTIFIED__consumed(String instanceId, long offset, long now) { SegmentCompletionProtocol.Response response; // We have already picked a winner and notified them but we have not heard from them yet. // Common case here is that another server is coming back to us with its offset. We either respond back with HOLD or CATCHUP. // If the winner is coming back again, then we have some more conditions to look at. response = abortIfTooLateAndReturnHold(now, instanceId, offset); if (response != null) { return response; } if (instanceId.equals(_winner)) { // Winner is coming back to after holding. Somehow they never heard us return COMMIT. // Allow them to be winner again, since we are still within time to pick a winner. if (offset == _winningOffset) { response = commit(instanceId, offset); } else { // Something seriously wrong. Abort the FSM response = discard(instanceId, offset); LOGGER.warn("{}:Aborting for instance={} offset={}", _state, instanceId, offset); _state = State.ABORTED; } } else { // Common case: A different instance is reporting. if (offset == _winningOffset) { // Wait until winner has posted the segment before asking this server to KEEP the segment. response = hold(instanceId, offset); } else if (offset < _winningOffset) { response = catchup(instanceId, offset); } else { // We have not yet committed, so ask the new responder to hold. They may be the new leader in case the // committer fails. response = hold(instanceId, offset); } } return response; } /* * We have notified the committer. If we get a consumed message from another server, we can ask them to * catchup (if the offset is lower). If anything else, then we pretty much ask them to hold. */ private SegmentCompletionProtocol.Response COMMITTER_NOTIFIED__commit(String instanceId, long offset, long now) { SegmentCompletionProtocol.Response response = null; response = checkBadCommitRequest(instanceId, offset, now); if (response != null) { return response; } LOGGER.info("{}:Uploading for instance={} offset={}", _state, instanceId, offset); _state = State.COMMITTER_UPLOADING; long commitTimeMs = now - _startTimeMs; if (commitTimeMs > _initialCommitTimeMs) { // We assume that the commit time holds for all partitions. It is possible, though, that one partition // commits at a lower time than another partition, and the two partitions are going simultaneously, // and we may not get the maximum value all the time. _segmentCompletionManager._commitTimeMap.put(_segmentName.getTableName(), commitTimeMs); } return SegmentCompletionProtocol.RESP_COMMIT_CONTINUE; } private SegmentCompletionProtocol.Response COMMITTER_NOTIFIED__stoppedConsuming(String instanceId, long offset, String reason) { return processStoppedConsuming(instanceId, offset, reason, false); } private SegmentCompletionProtocol.Response COMMITTER_NOTIFIED__extendBuildlTime(String instanceId, long offset, int extTimeSec, long now) { SegmentCompletionProtocol.Response response = abortIfTooLateAndReturnHold(now, instanceId, offset); if (response == null) { long maxTimeAllowedToCommitMs = now + extTimeSec * 1000; if (maxTimeAllowedToCommitMs > _startTimeMs + MAX_COMMIT_TIME_FOR_ALL_SEGMENTS_SECONDS * 1000) { LOGGER.warn("Not accepting lease extension from {} startTime={} requestedTime={}", instanceId, _startTimeMs, maxTimeAllowedToCommitMs); return abortAndReturnFailed(); } _maxTimeAllowedToCommitMs = maxTimeAllowedToCommitMs; response = SegmentCompletionProtocol.RESP_PROCESSED; } return response; } private SegmentCompletionProtocol.Response COMMITTER_UPLOADING__consumed(String instanceId, long offset, long now) { return processConsumedAfterCommitStart(instanceId, offset, now); } private SegmentCompletionProtocol.Response COMMITTER_UPLOADING__commit(String instanceId, long offset, long now) { return processCommitWhileUploading(instanceId, offset, now); } private SegmentCompletionProtocol.Response COMMITTER_UPLOADING__stoppedConsuming(String instanceId, long offset, String reason) { return processStoppedConsuming(instanceId, offset, reason, false); } private SegmentCompletionProtocol.Response COMMITTING__consumed(String instanceId, long offset, long now) { return processConsumedAfterCommitStart(instanceId, offset, now); } private SegmentCompletionProtocol.Response COMMITTING__commit(String instanceId, long offset, long now) { return processCommitWhileUploading(instanceId, offset, now); } private SegmentCompletionProtocol.Response COMMITTING__stoppedConsuming(String instanceId, long offset, String reason) { return processStoppedConsuming(instanceId, offset, reason, false); } private SegmentCompletionProtocol.Response COMMITTED__consumed(String instanceId, long offset) { SegmentCompletionProtocol.Response response;// Server reporting an offset on an already completed segment. Depending on the offset, either KEEP or DISCARD. if (offset == _winningOffset) { response = keep(instanceId, offset); } else { // Return DISCARD. It is hard to say how long the server will take to complete things. response = discard(instanceId, offset); } return response; } private SegmentCompletionProtocol.Response COMMITTED__commit(String instanceId, long offset) { if (offset == _winningOffset) { return keep(instanceId, offset); } return discard(instanceId, offset); } private SegmentCompletionProtocol.Response COMMITTED__stoppedConsuming(String instanceId, long offset, String reason) { return processStoppedConsuming(instanceId, offset, reason, false); } private SegmentCompletionProtocol.Response processStoppedConsuming(String instanceId, long offset, String reason, boolean createNew) { LOGGER.info("Instance {} stopped consuming segment {} at offset {}, state {}, createNew: {}, reason:{}", instanceId, _segmentName, offset, _state, createNew, reason); _segmentManager.segmentStoppedConsuming(_segmentName, instanceId); return SegmentCompletionProtocol.RESP_PROCESSED; } // A common method when the state is > COMMITTER_NOTIFIED. private SegmentCompletionProtocol.Response processConsumedAfterCommitStart(String instanceId, long offset, long now) { SegmentCompletionProtocol.Response response; // We have already picked a winner, and may or many not have heard from them. // Common case here is that another server is coming back to us with its offset. We either respond back with HOLD or CATCHUP. // It may be that we never heard from the committer, or the committer is taking too long to commit the segment. // In that case, we abort the FSM and start afresh (i.e, return HOLD). // If the winner is coming back again, then we have some more conditions to look at. response = abortIfTooLateAndReturnHold(now, instanceId, offset); if (response != null) { return null; } if (instanceId.equals(_winner)) { // The winner is coming back to report its offset. Take a decision based on the offset reported, and whether we // already notified them // Winner is supposedly already in the commit call. Something wrong. LOGGER.warn("{}:Aborting FSM because winner is reporting a segment while it is also committing instance={} offset={} now={}", _state, instanceId, offset, now); // Ask them to hold, just in case the committer fails for some reason.. return abortAndReturnHold(now, instanceId, offset); } else { // Common case: A different instance is reporting. if (offset == _winningOffset) { // Wait until winner has posted the segment before asking this server to KEEP the segment. response = hold(instanceId, offset); } else if (offset < _winningOffset) { response = catchup(instanceId, offset); } else { // We have not yet committed, so ask the new responder to hold. They may be the new leader in case the // committer fails. response = hold(instanceId, offset); } } return response; } private SegmentCompletionProtocol.Response commitSegment(SegmentCompletionProtocol.Request.Params reqParams, boolean isSplitCommit) { boolean success; String instanceId = reqParams.getInstanceId(); long offset = reqParams.getOffset(); if (!_state.equals(State.COMMITTER_UPLOADING)) { // State changed while we were out of sync. return a failed commit. LOGGER.warn("State change during upload: state={} segment={} winner={} winningOffset={}", _state, _segmentName.getSegmentName(), _winner, _winningOffset); return SegmentCompletionProtocol.RESP_FAILED; } LOGGER.info("Committing segment {} at offset {} winner {}", _segmentName.getSegmentName(), offset, instanceId); _state = State.COMMITTING; // In case of splitCommit, the segment is uploaded to a unique file name indicated by segmentLocation, // so we need to move the segment file to its permanent location first before committing the metadata. CommittingSegmentDescriptor committingSegmentDescriptor = CommittingSegmentDescriptor.fromSegmentCompletionReqParams(reqParams); if (isSplitCommit) { if (!_segmentManager.commitSegmentFile(_segmentName.getTableName(), committingSegmentDescriptor)) { return SegmentCompletionProtocol.RESP_FAILED; } } success = _segmentManager.commitSegmentMetadata(_segmentName.getTableName(), committingSegmentDescriptor); if (success) { _state = State.COMMITTED; LOGGER.info("Committed segment {} at offset {} winner {}", _segmentName.getSegmentName(), offset, instanceId); return SegmentCompletionProtocol.RESP_COMMIT_SUCCESS; } return SegmentCompletionProtocol.RESP_FAILED; } private SegmentCompletionProtocol.Response processCommitWhileUploading(String instanceId, long offset, long now) { LOGGER.info("Processing segmentCommit({}, {})", instanceId, offset); SegmentCompletionProtocol.Response response = abortIfTooLateAndReturnHold(now, instanceId, offset); if (response != null) { return response; } // Another committer (or same) came in while one was uploading. Ask them to hold in case this one fails. return new SegmentCompletionProtocol.Response(new SegmentCompletionProtocol.Response.Params().withOffset(offset).withStatus( SegmentCompletionProtocol.ControllerResponseStatus.HOLD)); } private SegmentCompletionProtocol.Response checkBadCommitRequest(String instanceId, long offset, long now) { SegmentCompletionProtocol.Response response = abortIfTooLateAndReturnHold(now, instanceId, offset); if (response != null) { return response; } else if (instanceId.equals(_winner) && offset != _winningOffset) { // Hmm. Committer has been notified, but either a different one is committing, or offset is different LOGGER.warn("{}:Aborting FSM (bad commit req) instance={} offset={} now={} winning={}", _state, instanceId, offset, now, _winningOffset); return abortAndReturnHold(now, instanceId, offset); } return null; } private SegmentCompletionProtocol.Response processCommitWhileHoldingOrPartialConsuming(String instanceId, long offset, long now) { LOGGER.info("Processing segmentCommit({}, {})", instanceId, offset); SegmentCompletionProtocol.Response response = abortIfTooLateAndReturnHold(now, instanceId, offset); if (response != null) { return response; } // We cannot get a commit if we are in this state, so ask them to hold. Maybe we are starting after a failover. // The server will re-send the segmentConsumed message. return hold(instanceId, offset); } /** * Pick a winner if we can, preferring the instance that we are handling right now, * * We accept the first server to report an offset as long as the server stopped consumption * due to row limit. The premise is that other servers will also stop at row limit, and there * is no need to wait for them to report an offset in order to decide on a winner. The state machine takes care * of the cases where other servers may report different offsets (just in case). * * If the above condition is not satisfied (i.e. either this is not the first server, or it did not reach * row limit), then we can pick a winner only if it is too late to pick a winner, or we have heard from all * servers. * * Otherwise, we wait to hear from more servers. * * @param preferredInstance The instance that is reporting in this thread. * @param now current time * @param stopReason reason reported by instance for stopping consumption. * @return true if winner picked, false otherwise. */ private boolean isWinnerPicked(String preferredInstance, long now, final String stopReason) { if (SegmentCompletionProtocol.REASON_ROW_LIMIT.equals(stopReason) && _commitStateMap.size() == 1) { _winner = preferredInstance; _winningOffset = _commitStateMap.get(preferredInstance); return true; } else if (now > _maxTimeToPickWinnerMs || _commitStateMap.size() == numReplicasToLookFor()) { LOGGER.info("{}:Picking winner time={} size={}", _state, now- _startTimeMs, _commitStateMap.size()); long maxOffsetSoFar = -1; String winnerSoFar = null; for (Map.Entry<String, Long> entry : _commitStateMap.entrySet()) { if (entry.getValue() > maxOffsetSoFar) { maxOffsetSoFar = entry.getValue(); winnerSoFar = entry.getKey(); } } _winningOffset = maxOffsetSoFar; if (_commitStateMap.get(preferredInstance) == maxOffsetSoFar) { winnerSoFar = preferredInstance; } _winner = winnerSoFar; return true; } return false; } } }
pinot-controller/src/main/java/com/linkedin/pinot/controller/helix/core/realtime/SegmentCompletionManager.java
/** * Copyright (C) 2014-2018 LinkedIn Corp. ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.linkedin.pinot.controller.helix.core.realtime; import com.linkedin.pinot.common.config.TableNameBuilder; import com.linkedin.pinot.common.metadata.segment.LLCRealtimeSegmentZKMetadata; import com.linkedin.pinot.common.metrics.ControllerMeter; import com.linkedin.pinot.common.metrics.ControllerMetrics; import com.linkedin.pinot.common.protocols.SegmentCompletionProtocol; import com.linkedin.pinot.common.utils.CommonConstants; import com.linkedin.pinot.common.utils.LLCSegmentName; import com.linkedin.pinot.controller.ControllerConf; import com.linkedin.pinot.controller.helix.core.realtime.segment.CommittingSegmentDescriptor; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import org.apache.helix.HelixManager; import org.apache.helix.ZNRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This is a singleton class in the controller that drives the state machines for segments that are in the * committing stage. * * SegmentCompletionManager has a sub-class that represents the FSM that the segment goes through while * executing the segment completion protocol between pinot servers and pinot controller. The protocol is * described in SegmentCompletionProtocol. */ public class SegmentCompletionManager { // TODO Can we log using the segment name in the log message? public static Logger LOGGER = LoggerFactory.getLogger(SegmentCompletionManager.class); private enum State { PARTIAL_CONSUMING, // Indicates that at least one replica has reported that it has stopped consuming. HOLDING, // the segment has started finalizing. COMMITTER_DECIDED, // We know who the committer will be, we will let them know next time they call segmentConsumed() COMMITTER_NOTIFIED, // we notified the committer to commit. COMMITTER_UPLOADING, // committer is uploading. COMMITTING, // we are in the process of committing to zk COMMITTED, // We already committed a segment. ABORTED, // state machine is aborted. we will start a fresh one when the next segmentConsumed comes in. } private static SegmentCompletionManager _instance = null; private final HelixManager _helixManager; // A map that holds the FSM for each segment. private final Map<String, SegmentCompletionFSM> _fsmMap = new ConcurrentHashMap<>(); private final Map<String, Long> _commitTimeMap = new ConcurrentHashMap<>(); private final PinotLLCRealtimeSegmentManager _segmentManager; private final ControllerMetrics _controllerMetrics; private static final int MAX_COMMIT_TIME_FOR_ALL_SEGMENTS_SECONDS = 1800; // Half hour max commit time for all segments public static int getMaxCommitTimeForAllSegmentsSeconds() { return MAX_COMMIT_TIME_FOR_ALL_SEGMENTS_SECONDS; } // TODO keep some history of past committed segments so that we can avoid looking up PROPERTYSTORE if some server comes in late. protected SegmentCompletionManager(HelixManager helixManager, PinotLLCRealtimeSegmentManager segmentManager, ControllerMetrics controllerMetrics) { _helixManager = helixManager; _segmentManager = segmentManager; _controllerMetrics = controllerMetrics; } public boolean isSplitCommitEnabled() { return _segmentManager.getIsSplitCommitEnabled(); } public String getControllerVipUrl() { return _segmentManager.getControllerVipUrl(); } public static SegmentCompletionManager create(HelixManager helixManager, PinotLLCRealtimeSegmentManager segmentManager, ControllerConf controllerConf, ControllerMetrics controllerMetrics) { if (_instance != null) { throw new RuntimeException("Cannot create multiple instances"); } _instance = new SegmentCompletionManager(helixManager, segmentManager, controllerMetrics); SegmentCompletionProtocol.setMaxSegmentCommitTimeMs( TimeUnit.MILLISECONDS.convert(controllerConf.getSegmentCommitTimeoutSeconds(), TimeUnit.SECONDS)); return _instance; } public static SegmentCompletionManager getInstance() { if (_instance == null) { throw new RuntimeException("Not yet created"); } return _instance; } protected long getCurrentTimeMs() { return System.currentTimeMillis(); } // We need to make sure that we never create multiple FSMs for the same segment, so this method must be synchronized. private synchronized SegmentCompletionFSM lookupOrCreateFsm(final LLCSegmentName segmentName, String msgType) { final String segmentNameStr = segmentName.getSegmentName(); SegmentCompletionFSM fsm = _fsmMap.get(segmentNameStr); if (fsm == null) { // Look up propertystore to see if this is a completed segment ZNRecord segment; try { // TODO if we keep a list of last few committed segments, we don't need to go to zk for this. final String realtimeTableName = TableNameBuilder.REALTIME.tableNameWithType(segmentName.getTableName()); LLCRealtimeSegmentZKMetadata segmentMetadata = _segmentManager.getRealtimeSegmentZKMetadata(realtimeTableName, segmentName.getSegmentName(), null); if (segmentMetadata.getStatus().equals(CommonConstants.Segment.Realtime.Status.DONE)) { // Best to go through the state machine for this case as well, so that all code regarding state handling is in one place // Also good for synchronization, because it is possible that multiple threads take this path, and we don't want // multiple instances of the FSM to be created for the same commit sequence at the same time. final long endOffset = segmentMetadata.getEndOffset(); fsm = SegmentCompletionFSM.fsmInCommit(_segmentManager, this, segmentName, segmentMetadata.getNumReplicas(), endOffset); } else if (msgType.equals(SegmentCompletionProtocol.MSG_TYPE_STOPPED_CONSUMING)) { fsm = SegmentCompletionFSM.fsmStoppedConsuming(_segmentManager, this, segmentName, segmentMetadata.getNumReplicas()); } else { // Segment is in the process of completing, and this is the first one to respond. Create fsm fsm = SegmentCompletionFSM.fsmInHolding(_segmentManager, this, segmentName, segmentMetadata.getNumReplicas()); } LOGGER.info("Created FSM {}", fsm); _fsmMap.put(segmentNameStr, fsm); } catch (Exception e) { // Server gone wonky. Segment does not exist in propstore LOGGER.error("Exception creating FSM for segment {}", segmentNameStr, e); throw new RuntimeException("Exception creating FSM for segment " + segmentNameStr, e); } } return fsm; } /** * This method is to be called when a server calls in with the segmentConsumed() API, reporting an offset in kafka * that it currently has (i.e. next offset that it will consume, if it continues to consume). */ public SegmentCompletionProtocol.Response segmentConsumed(SegmentCompletionProtocol.Request.Params reqParams) { if (!_helixManager.isLeader() || !_helixManager.isConnected()) { _controllerMetrics.addMeteredGlobalValue(ControllerMeter.CONTROLLER_NOT_LEADER, 1L); return SegmentCompletionProtocol.RESP_NOT_LEADER; } final String segmentNameStr = reqParams.getSegmentName(); final String instanceId = reqParams.getInstanceId(); final String stopReason = reqParams.getReason(); final long offset = reqParams.getOffset(); LLCSegmentName segmentName = new LLCSegmentName(segmentNameStr); SegmentCompletionProtocol.Response response = SegmentCompletionProtocol.RESP_FAILED; SegmentCompletionFSM fsm = null; try { fsm = lookupOrCreateFsm(segmentName, SegmentCompletionProtocol.MSG_TYPE_CONSUMED); response = fsm.segmentConsumed(instanceId, offset, stopReason); } catch (Exception e) { // Return failed response } if (fsm != null && fsm.isDone()) { LOGGER.info("Removing FSM (if present):{}", fsm.toString()); _fsmMap.remove(segmentNameStr); } return response; } /** * This method is to be called when a server calls in with the segmentCommit() API. The server sends in the segment * along with the API, but it is the caller's responsibility to save the segment after this call (and before the * segmentCommitEnd() call). * * If successful, this method will return Response.COMMIT_CONTINUE, in which case, the caller should save the incoming * segment and then call segmentCommitEnd(). * * Otherwise, this method will return a protocol response to be returned to the client right away (without saving the * incoming segment). */ public SegmentCompletionProtocol.Response segmentCommitStart(final SegmentCompletionProtocol.Request.Params reqParams) { if (!_helixManager.isLeader() || !_helixManager.isConnected()) { _controllerMetrics.addMeteredGlobalValue(ControllerMeter.CONTROLLER_NOT_LEADER, 1L); return SegmentCompletionProtocol.RESP_NOT_LEADER; } final String segmentNameStr = reqParams.getSegmentName(); final String instanceId = reqParams.getInstanceId(); final long offset = reqParams.getOffset(); LLCSegmentName segmentName = new LLCSegmentName(segmentNameStr); SegmentCompletionFSM fsm = null; SegmentCompletionProtocol.Response response = SegmentCompletionProtocol.RESP_FAILED; try { fsm = lookupOrCreateFsm(segmentName, SegmentCompletionProtocol.MSG_TYPE_COMMIT); response = fsm.segmentCommitStart(instanceId, offset); } catch (Exception e) { // Return failed response } if (fsm != null && fsm.isDone()) { LOGGER.info("Removing FSM (if present):{}", fsm.toString()); _fsmMap.remove(segmentNameStr); } return response; } public SegmentCompletionProtocol.Response extendBuildTime(final SegmentCompletionProtocol.Request.Params reqParams) { if (!_helixManager.isLeader() || !_helixManager.isConnected()) { _controllerMetrics.addMeteredGlobalValue(ControllerMeter.CONTROLLER_NOT_LEADER, 1L); return SegmentCompletionProtocol.RESP_NOT_LEADER; } final String segmentNameStr = reqParams.getSegmentName(); final String instanceId = reqParams.getInstanceId(); final long offset = reqParams.getOffset(); final int extTimeSec = reqParams.getExtraTimeSec(); LLCSegmentName segmentName = new LLCSegmentName(segmentNameStr); SegmentCompletionFSM fsm = null; SegmentCompletionProtocol.Response response = SegmentCompletionProtocol.RESP_FAILED; try { fsm = lookupOrCreateFsm(segmentName, SegmentCompletionProtocol.MSG_TYPE_COMMIT); response = fsm.extendBuildTime(instanceId, offset, extTimeSec); } catch (Exception e) { // Return failed response } if (fsm != null && fsm.isDone()) { LOGGER.info("Removing FSM (if present):{}", fsm.toString()); _fsmMap.remove(segmentNameStr); } return response; } /** * This method is to be called when a server reports that it has stopped consuming a real-time segment. * * @return */ public SegmentCompletionProtocol.Response segmentStoppedConsuming(SegmentCompletionProtocol.Request.Params reqParams) { if (!_helixManager.isLeader() || !_helixManager.isConnected()) { _controllerMetrics.addMeteredGlobalValue(ControllerMeter.CONTROLLER_NOT_LEADER, 1L); return SegmentCompletionProtocol.RESP_NOT_LEADER; } final String segmentNameStr = reqParams.getSegmentName(); final String instanceId = reqParams.getInstanceId(); final long offset = reqParams.getOffset(); final String reason = reqParams.getReason(); LLCSegmentName segmentName = new LLCSegmentName(segmentNameStr); SegmentCompletionFSM fsm = null; SegmentCompletionProtocol.Response response = SegmentCompletionProtocol.RESP_FAILED; try { fsm = lookupOrCreateFsm(segmentName, SegmentCompletionProtocol.MSG_TYPE_STOPPED_CONSUMING); response = fsm.stoppedConsuming(instanceId, offset, reason); } catch (Exception e) { // Return failed response } if (fsm != null && fsm.isDone()) { LOGGER.info("Removing FSM (if present):{}", fsm.toString()); _fsmMap.remove(segmentNameStr); } return response; } /** * This method is to be called when the segment sent in by the server has been saved locally in the correct path that * is downloadable by the servers. * * It returns a response code to be sent back to the client. * * If the repsonse code is not COMMIT_SUCCESS, then the caller may remove the segment that has been saved. * * @return */ public SegmentCompletionProtocol.Response segmentCommitEnd(SegmentCompletionProtocol.Request.Params reqParams, boolean success, boolean isSplitCommit) { if (!_helixManager.isLeader() || !_helixManager.isConnected()) { _controllerMetrics.addMeteredGlobalValue(ControllerMeter.CONTROLLER_NOT_LEADER, 1L); return SegmentCompletionProtocol.RESP_NOT_LEADER; } final String segmentNameStr = reqParams.getSegmentName(); LLCSegmentName segmentName = new LLCSegmentName(segmentNameStr); SegmentCompletionFSM fsm = null; SegmentCompletionProtocol.Response response = SegmentCompletionProtocol.RESP_FAILED; try { fsm = lookupOrCreateFsm(segmentName, SegmentCompletionProtocol.MSG_TYPE_COMMIT); response = fsm.segmentCommitEnd(reqParams, success, isSplitCommit); } catch (Exception e) { // Return failed response } if (fsm != null && fsm.isDone()) { LOGGER.info("Removing FSM (if present):{}", fsm.toString()); _fsmMap.remove(segmentNameStr); } return response; } /** * This class implements the FSM on the controller side for each completing segment. * * An FSM is is created when we first hear about a segment (typically through the segmentConsumed message). * When an FSM is created, it may have one of two start states (HOLDING, or COMMITTED), depending on the * constructor used. * * We kick off an FSM in the COMMITTED state (rare) when we find that PROPERTYSTORE already has the segment * with the Status set to DONE. * * We kick off an FSM in the HOLDING state (typical) when a sementConsumed() message arrives from the * first server we hear from. * * The FSM does not have a timer. It is clocked by the servers, which, typically, are retransmitting their * segmentConsumed() message every so often (SegmentCompletionProtocol.MAX_HOLD_TIME_MS). * * See https://github.com/linkedin/pinot/wiki/Low-level-kafka-consumers */ private static class SegmentCompletionFSM { // We will have some variation between hosts, so we add 10% to the max hold time to pick a winner. // If there is more than 10% variation, then it is handled as an error case (i.e. the first few to // come in will have a winner, and the later ones will just download the segment) private static final long MAX_TIME_TO_PICK_WINNER_MS = SegmentCompletionProtocol.MAX_HOLD_TIME_MS + (SegmentCompletionProtocol.MAX_HOLD_TIME_MS / 10); // Once we pick a winner, the winner may get notified in the next call, so add one hold time plus some. // It may be that the winner is not the server that we are currently processing a segmentConsumed() // message from. In that case, we will wait for the next segmetnConsumed() message from the picked winner. // If the winner does not come back to us within that time, we abort the state machine and start over. private static final long MAX_TIME_TO_NOTIFY_WINNER_MS = MAX_TIME_TO_PICK_WINNER_MS + SegmentCompletionProtocol.MAX_HOLD_TIME_MS + (SegmentCompletionProtocol.MAX_HOLD_TIME_MS / 10); public final Logger LOGGER; State _state = State.HOLDING; // Typically start off in HOLDING state. final long _startTimeMs; private final LLCSegmentName _segmentName; private final int _numReplicas; private final Set<String> _excludedServerStateMap; private final Map<String, Long> _commitStateMap; private long _winningOffset = -1L; private String _winner; private final PinotLLCRealtimeSegmentManager _segmentManager; private final SegmentCompletionManager _segmentCompletionManager; private final long _maxTimeToPickWinnerMs; private final long _maxTimeToNotifyWinnerMs; private final long _initialCommitTimeMs; // Once the winner is notified, they are expected to commit right away. At this point, it is the segment build // time that we need to consider. // We may need to add some time here to allow for getting the lock? For now 0 // We may need to add some time for the committer come back to us (after the build)? For now 0. private long _maxTimeAllowedToCommitMs; private final boolean _isSplitCommitEnabled; private final String _controllerVipUrl; public static SegmentCompletionFSM fsmInHolding(PinotLLCRealtimeSegmentManager segmentManager, SegmentCompletionManager segmentCompletionManager, LLCSegmentName segmentName, int numReplicas) { return new SegmentCompletionFSM(segmentManager, segmentCompletionManager, segmentName, numReplicas); } public static SegmentCompletionFSM fsmInCommit(PinotLLCRealtimeSegmentManager segmentManager, SegmentCompletionManager segmentCompletionManager, LLCSegmentName segmentName, int numReplicas, long winningOffset) { return new SegmentCompletionFSM(segmentManager, segmentCompletionManager, segmentName, numReplicas, winningOffset); } public static SegmentCompletionFSM fsmStoppedConsuming(PinotLLCRealtimeSegmentManager segmentManager, SegmentCompletionManager segmentCompletionManager, LLCSegmentName segmentName, int numReplicas) { SegmentCompletionFSM fsm = new SegmentCompletionFSM(segmentManager, segmentCompletionManager, segmentName, numReplicas); fsm._state = State.PARTIAL_CONSUMING; return fsm; } // Ctor that starts the FSM in HOLDING state private SegmentCompletionFSM(PinotLLCRealtimeSegmentManager segmentManager, SegmentCompletionManager segmentCompletionManager, LLCSegmentName segmentName, int numReplicas) { _segmentName = segmentName; _numReplicas = numReplicas; _segmentManager = segmentManager; _commitStateMap = new HashMap<>(_numReplicas); _excludedServerStateMap = new HashSet<>(_numReplicas); _segmentCompletionManager = segmentCompletionManager; _startTimeMs = _segmentCompletionManager.getCurrentTimeMs(); _maxTimeToPickWinnerMs = _startTimeMs + MAX_TIME_TO_PICK_WINNER_MS; _maxTimeToNotifyWinnerMs = _startTimeMs + MAX_TIME_TO_NOTIFY_WINNER_MS; long initialCommitTimeMs = MAX_TIME_TO_NOTIFY_WINNER_MS + _segmentManager.getCommitTimeoutMS(_segmentName.getTableName()); Long savedCommitTime = _segmentCompletionManager._commitTimeMap.get(segmentName.getTableName()); if (savedCommitTime != null && savedCommitTime > initialCommitTimeMs) { initialCommitTimeMs = savedCommitTime; } LOGGER = LoggerFactory.getLogger("SegmentCompletionFSM_" + segmentName.getSegmentName()); if (initialCommitTimeMs > MAX_COMMIT_TIME_FOR_ALL_SEGMENTS_SECONDS * 1000) { // The table has a really high value configured for max commit time. Set it to a higher value than default // and go from there. LOGGER.info("Configured max commit time {}s too high for table {}, changing to {}s", initialCommitTimeMs/1000, segmentName.getTableName(), MAX_COMMIT_TIME_FOR_ALL_SEGMENTS_SECONDS); initialCommitTimeMs = MAX_COMMIT_TIME_FOR_ALL_SEGMENTS_SECONDS * 1000; } _initialCommitTimeMs = initialCommitTimeMs; _maxTimeAllowedToCommitMs = _startTimeMs + _initialCommitTimeMs; _isSplitCommitEnabled = segmentCompletionManager.isSplitCommitEnabled(); _controllerVipUrl = segmentCompletionManager.getControllerVipUrl(); } // Ctor that starts the FSM in COMMITTED state private SegmentCompletionFSM(PinotLLCRealtimeSegmentManager segmentManager, SegmentCompletionManager segmentCompletionManager, LLCSegmentName segmentName, int numReplicas, long winningOffset) { // Constructor used when we get an event after a segment is committed. this(segmentManager, segmentCompletionManager, segmentName, numReplicas); _state = State.COMMITTED; _winningOffset = winningOffset; _winner = "UNKNOWN"; } @Override public String toString() { return "{" + _segmentName.getSegmentName() + "," + _state + "," + _startTimeMs + "," + _winner + "," + _winningOffset + "," + _isSplitCommitEnabled + "," + _controllerVipUrl + "}"; } // SegmentCompletionManager releases the FSM from the hashtable when it is done. public boolean isDone() { return _state.equals(State.COMMITTED) || _state.equals(State.ABORTED); } /* * We just heard from a server that it has reached completion stage, and is reporting the offset * that the server is at. Since multiple servers can come in at the same time for this segment, * we need to synchronize on the FSM to handle the messages. The processing time itself is small, * so we should be OK with this synchronization. */ public SegmentCompletionProtocol.Response segmentConsumed(String instanceId, long offset, final String stopReason) { final long now = _segmentCompletionManager.getCurrentTimeMs(); // We can synchronize the entire block for the SegmentConsumed message. synchronized (this) { LOGGER.info("Processing segmentConsumed({}, {})", instanceId, offset); if (_excludedServerStateMap.contains(instanceId)) { // Could be that the server was restarted, and it started consuning again,and somehow got to complete // consumption up to this point. We will acccept it. LOGGER.info("Marking instance {} alive again", instanceId); _excludedServerStateMap.remove(instanceId); } _commitStateMap.put(instanceId, offset); switch (_state) { case PARTIAL_CONSUMING: return PARTIAL_CONSUMING__consumed(instanceId, offset, now, stopReason); case HOLDING: return HOLDING__consumed(instanceId, offset, now, stopReason); case COMMITTER_DECIDED: // This must be a retransmit return COMMITTER_DECIDED__consumed(instanceId, offset, now); case COMMITTER_NOTIFIED: return COMMITTER_NOTIFIED__consumed(instanceId, offset, now); case COMMITTER_UPLOADING: return COMMITTER_UPLOADING__consumed(instanceId, offset, now); case COMMITTING: return COMMITTING__consumed(instanceId, offset, now); case COMMITTED: return COMMITTED__consumed(instanceId, offset); case ABORTED: // FSM has been aborted, just return HOLD return hold(instanceId, offset); default: return fail(instanceId, offset); } } } /* * A server has sent segmentConsumed() message. The caller will save the segment if we return * COMMIT_CONTINUE. We need to verify that it is the same server that we notified as the winner * and the offset is the same as what is coming in with the commit. We can then move to * COMMITTER_UPLOADING and wait for the segmentCommitEnd() call. * * In case of discrepancy we move the state machine to ABORTED state so that this FSM is removed * from the map, and things start over. In this case, we respond to the server with a 'hold' so * that they re-transmit their segmentConsumed() message and start over. */ public SegmentCompletionProtocol.Response segmentCommitStart(String instanceId, long offset) { long now = _segmentCompletionManager.getCurrentTimeMs(); if (_excludedServerStateMap.contains(instanceId)) { LOGGER.warn("Not accepting commit from {} since it had stoppd consuming", instanceId); return SegmentCompletionProtocol.RESP_FAILED; } synchronized (this) { LOGGER.info("Processing segmentCommit({}, {})", instanceId, offset); switch (_state) { case PARTIAL_CONSUMING: return PARTIAL_CONSUMING__commit(instanceId, offset, now); case HOLDING: return HOLDING__commit(instanceId, offset, now); case COMMITTER_DECIDED: return COMMITTER_DECIDED__commit(instanceId, offset, now); case COMMITTER_NOTIFIED: return COMMITTER_NOTIFIED__commit(instanceId, offset, now); case COMMITTER_UPLOADING: return COMMITTER_UPLOADING__commit(instanceId, offset, now); case COMMITTING: return COMMITTING__commit(instanceId, offset, now); case COMMITTED: return COMMITTED__commit(instanceId, offset); case ABORTED: return hold(instanceId, offset); default: return fail(instanceId, offset); } } } public SegmentCompletionProtocol.Response stoppedConsuming(String instanceId, long offset, String reason) { synchronized (this) { LOGGER.info("Processing stoppedConsuming({}, {})", instanceId, offset); _excludedServerStateMap.add(instanceId); switch (_state) { case PARTIAL_CONSUMING: return PARTIAL_CONSUMING__stoppedConsuming(instanceId, offset, reason); case HOLDING: return HOLDING_stoppedConsuming(instanceId, offset, reason); case COMMITTER_DECIDED: return COMMITTER_DECIDED__stoppedConsuming(instanceId, offset, reason); case COMMITTER_NOTIFIED: return COMMITTER_NOTIFIED__stoppedConsuming(instanceId, offset, reason); case COMMITTER_UPLOADING: return COMMITTER_UPLOADING__stoppedConsuming(instanceId, offset, reason); case COMMITTING: return COMMITTING__stoppedConsuming(instanceId, offset, reason); case COMMITTED: return COMMITTED__stoppedConsuming(instanceId, offset, reason); case ABORTED: LOGGER.info("Ignoring StoppedConsuming message from {} in state {}", instanceId, _state); return SegmentCompletionProtocol.RESP_PROCESSED; default: return fail(instanceId, offset); } } } public SegmentCompletionProtocol.Response extendBuildTime(final String instanceId, final long offset, final int extTimeSec) { final long now = _segmentCompletionManager.getCurrentTimeMs(); synchronized (this) { LOGGER.info("Processing extendBuildTime({}, {}, {})", instanceId, offset, extTimeSec); switch (_state) { case PARTIAL_CONSUMING: case HOLDING: case COMMITTER_DECIDED: return fail(instanceId, offset); case COMMITTER_NOTIFIED: return COMMITTER_NOTIFIED__extendBuildlTime(instanceId, offset, extTimeSec, now); case COMMITTER_UPLOADING: case COMMITTING: case COMMITTED: case ABORTED: default: return fail(instanceId, offset); } } } /* * We can get this call only when the state is COMMITTER_UPLOADING. Also, the instanceId should be equal to * the _winner. */ public SegmentCompletionProtocol.Response segmentCommitEnd(SegmentCompletionProtocol.Request.Params reqParams, boolean success, boolean isSplitCommit) { String instanceId = reqParams.getInstanceId(); long offset = reqParams.getOffset(); synchronized (this) { if (_excludedServerStateMap.contains(instanceId)) { LOGGER.warn("Not accepting commitEnd from {} since it had stoppd consuming", instanceId); return abortAndReturnFailed(); } LOGGER.info("Processing segmentCommit({}, {})", instanceId, offset); if (!_state.equals(State.COMMITTER_UPLOADING) || !instanceId.equals(_winner) || offset != _winningOffset) { // State changed while we were out of sync. return a failed commit. LOGGER.warn("State change during upload: state={} segment={} winner={} winningOffset={}", _state, _segmentName.getSegmentName(), _winner, _winningOffset); return abortAndReturnFailed(); } if (!success) { LOGGER.error("Segment upload failed"); return abortAndReturnFailed(); } SegmentCompletionProtocol.Response response = commitSegment(reqParams, isSplitCommit); if (!response.equals(SegmentCompletionProtocol.RESP_COMMIT_SUCCESS)) { return abortAndReturnFailed(); } else { return response; } } } // Helper methods that log the current state and the response sent private SegmentCompletionProtocol.Response fail(String instanceId, long offset) { LOGGER.info("{}:FAIL for instance={} offset={}", _state, instanceId, offset); return SegmentCompletionProtocol.RESP_FAILED; } private SegmentCompletionProtocol.Response commit(String instanceId, long offset) { long allowedBuildTimeSec = (_maxTimeAllowedToCommitMs - _startTimeMs)/1000; LOGGER.info("{}:COMMIT for instance={} offset={} buldTimeSec={}", _state, instanceId, offset, allowedBuildTimeSec); SegmentCompletionProtocol.Response.Params params = new SegmentCompletionProtocol.Response.Params().withOffset(offset).withBuildTimeSeconds(allowedBuildTimeSec) .withStatus(SegmentCompletionProtocol.ControllerResponseStatus.COMMIT) .withSplitCommit(_isSplitCommitEnabled); if (_isSplitCommitEnabled) { params.withControllerVipUrl(_controllerVipUrl); } return new SegmentCompletionProtocol.Response(params); } private SegmentCompletionProtocol.Response discard(String instanceId, long offset) { LOGGER.warn("{}:DISCARD for instance={} offset={}", _state, instanceId, offset); return SegmentCompletionProtocol.RESP_DISCARD; } private SegmentCompletionProtocol.Response keep(String instanceId, long offset) { LOGGER.info("{}:KEEP for instance={} offset={}", _state, instanceId, offset); return new SegmentCompletionProtocol.Response(new SegmentCompletionProtocol.Response.Params().withOffset(offset).withStatus( SegmentCompletionProtocol.ControllerResponseStatus.KEEP)); } private SegmentCompletionProtocol.Response catchup(String instanceId, long offset) { LOGGER.info("{}:CATCHUP for instance={} offset={}", _state, instanceId, offset); return new SegmentCompletionProtocol.Response(new SegmentCompletionProtocol.Response.Params().withOffset( _winningOffset).withStatus(SegmentCompletionProtocol.ControllerResponseStatus.CATCH_UP)); } private SegmentCompletionProtocol.Response hold(String instanceId, long offset) { LOGGER.info("{}:HOLD for instance={} offset={}", _state, instanceId, offset); return new SegmentCompletionProtocol.Response( new SegmentCompletionProtocol.Response.Params().withStatus(SegmentCompletionProtocol.ControllerResponseStatus.HOLD).withOffset(offset)); } private SegmentCompletionProtocol.Response abortAndReturnHold(long now, String instanceId, long offset) { _state = State.ABORTED; _segmentCompletionManager._controllerMetrics.addMeteredTableValue(_segmentName.getTableName(), ControllerMeter.LLC_STATE_MACHINE_ABORTS, 1); return hold(instanceId, offset); } private SegmentCompletionProtocol.Response abortAndReturnFailed() { _state = State.ABORTED; _segmentCompletionManager._controllerMetrics.addMeteredTableValue(_segmentName.getTableName(), ControllerMeter.LLC_STATE_MACHINE_ABORTS, 1); return SegmentCompletionProtocol.RESP_FAILED; } private SegmentCompletionProtocol.Response abortIfTooLateAndReturnHold(long now, String instanceId, long offset) { if (now > _maxTimeAllowedToCommitMs) { LOGGER.warn("{}:Aborting FSM (too late) instance={} offset={} now={} start={}", _state, instanceId, offset, now, _startTimeMs); return abortAndReturnHold(now, instanceId, offset); } return null; } private int numReplicasToLookFor() { return _numReplicas - _excludedServerStateMap.size(); } private SegmentCompletionProtocol.Response PARTIAL_CONSUMING__consumed(String instanceId, long offset, long now, final String stopReason) { // This is the first time we are getting segmentConsumed() for this segment. // Some instance thinks we can close this segment, so go to HOLDING state, and process as normal. // We will just be looking for less replicas. _state = State.HOLDING; return HOLDING__consumed(instanceId, offset, now, stopReason); } /* * This is not a good state to get a commit message, but it is possible that the controller failed while in * COMMITTER_NOTIFIED state, and the first message we got in the new controller was a stoppedConsuming * message. As long as the committer is not the one who stopped consuming (which we have already checked before * coming here), we will trust the server that this is a valid commit. */ private SegmentCompletionProtocol.Response PARTIAL_CONSUMING__commit(String instanceId, long offset, long now) { // Do the same as HOLDING__commit return processCommitWhileHoldingOrPartialConsuming(instanceId, offset, now); } private SegmentCompletionProtocol.Response PARTIAL_CONSUMING__stoppedConsuming(String instanceId, long offset, String reason) { return processStoppedConsuming(instanceId, offset, reason, true); } /* * If we have waited "enough", or we have all replicas reported, then we can pick a winner. * * Otherwise, we ask the server that is reporting to come back again later until one of these conditions hold. * * If we can pick a winner then we go to COMMITTER_DECIDED or COMMITTER_NOTIIFIED (if the instance * in this call is the same as winner). * * If we can go to COMMITTER_NOTIFIED then we respond with a COMMIT message, otherwise with a HOLD message. */ private SegmentCompletionProtocol.Response HOLDING__consumed(String instanceId, long offset, long now, final String stopReason) { SegmentCompletionProtocol.Response response; // If we are past the max time to pick a winner, or we have heard from all replicas, // we are ready to pick a winner. if (isWinnerPicked(instanceId, now, stopReason)) { if (_winner.equals(instanceId)) { LOGGER.info("{}:Committer notified winner instance={} offset={}", _state, instanceId, offset); response = commit(instanceId, offset); _state = State.COMMITTER_NOTIFIED; } else { LOGGER.info("{}:Committer decided winner={} offset={}", _state, _winner, _winningOffset); response = catchup(instanceId, offset); _state = State.COMMITTER_DECIDED; } } else { response = hold(instanceId, offset); } return response; } /* * This not a good state to receive a commit message, but then it may be that the controller * failed over while in the COMMITTER_NOTIFIED state... */ private SegmentCompletionProtocol.Response HOLDING__commit(String instanceId, long offset, long now) { return processCommitWhileHoldingOrPartialConsuming(instanceId, offset, now); } private SegmentCompletionProtocol.Response HOLDING_stoppedConsuming(String instanceId, long offset, String reason) { return processStoppedConsuming(instanceId, offset, reason, true); } /* * We have already decided who the committer is, but have not let them know yet. If this is the committer that * we decided, then respond back with COMMIT. Otherwise, if the offset is smaller, respond back with a CATCHUP. * Otherwise, just have the server HOLD. Since the segment is not committed yet, we cannot ask them to KEEP or * DISCARD etc. If the committer fails for any reason, we will need a new committer. */ private SegmentCompletionProtocol.Response COMMITTER_DECIDED__consumed(String instanceId, long offset, long now) { if (offset > _winningOffset) { LOGGER.warn("{}:Aborting FSM (offset larger than winning) instance={} offset={} now={} winning={}", _state, instanceId, offset, now, _winningOffset); return abortAndReturnHold(now, instanceId, offset); } SegmentCompletionProtocol.Response response; if (_winner.equals(instanceId)) { if (_winningOffset == offset) { LOGGER.info("{}:Notifying winner instance={} offset={}", _state, instanceId, offset); response = commit(instanceId, offset); _state = State.COMMITTER_NOTIFIED; } else { // Winner coming back with a different offset. LOGGER.warn("{}:Winner coming back with different offset for instance={} offset={} prevWinnOffset={}", _state, instanceId, offset, _winningOffset); response = abortAndReturnHold(now, instanceId, offset); } } else if (offset == _winningOffset) { // Wait until winner has posted the segment. response = hold(instanceId, offset); } else { response = catchup(instanceId, offset); } if (now > _maxTimeToNotifyWinnerMs) { // Winner never got back to us. Abort the completion protocol and start afresh. // We can potentially optimize here to see if this instance has the highest so far, and re-elect them to // be winner, but for now, we will abort it and restart response = abortAndReturnHold(now, instanceId, offset); } return response; } /* * We have already decided who the committer is, but have not let them know yet. So, we don't expect * a commit() call here. */ private SegmentCompletionProtocol.Response COMMITTER_DECIDED__commit(String instanceId, long offset, long now) { return processCommitWhileHoldingOrPartialConsuming(instanceId, offset, now); } private SegmentCompletionProtocol.Response COMMITTER_DECIDED__stoppedConsuming(String instanceId, long offset, String reason) { return processStoppedConsuming(instanceId, offset, reason, false); } /* * We have notified the committer. If we get a consumed message from another server, we can ask them to * catchup (if the offset is lower). If anything else, then we pretty much ask them to hold. */ private SegmentCompletionProtocol.Response COMMITTER_NOTIFIED__consumed(String instanceId, long offset, long now) { SegmentCompletionProtocol.Response response; // We have already picked a winner and notified them but we have not heard from them yet. // Common case here is that another server is coming back to us with its offset. We either respond back with HOLD or CATCHUP. // If the winner is coming back again, then we have some more conditions to look at. response = abortIfTooLateAndReturnHold(now, instanceId, offset); if (response != null) { return response; } if (instanceId.equals(_winner)) { // Winner is coming back to after holding. Somehow they never heard us return COMMIT. // Allow them to be winner again, since we are still within time to pick a winner. if (offset == _winningOffset) { response = commit(instanceId, offset); } else { // Something seriously wrong. Abort the FSM response = discard(instanceId, offset); LOGGER.warn("{}:Aborting for instance={} offset={}", _state, instanceId, offset); _state = State.ABORTED; } } else { // Common case: A different instance is reporting. if (offset == _winningOffset) { // Wait until winner has posted the segment before asking this server to KEEP the segment. response = hold(instanceId, offset); } else if (offset < _winningOffset) { response = catchup(instanceId, offset); } else { // We have not yet committed, so ask the new responder to hold. They may be the new leader in case the // committer fails. response = hold(instanceId, offset); } } return response; } /* * We have notified the committer. If we get a consumed message from another server, we can ask them to * catchup (if the offset is lower). If anything else, then we pretty much ask them to hold. */ private SegmentCompletionProtocol.Response COMMITTER_NOTIFIED__commit(String instanceId, long offset, long now) { SegmentCompletionProtocol.Response response = null; response = checkBadCommitRequest(instanceId, offset, now); if (response != null) { return response; } LOGGER.info("{}:Uploading for instance={} offset={}", _state, instanceId, offset); _state = State.COMMITTER_UPLOADING; long commitTimeMs = now - _startTimeMs; if (commitTimeMs > _initialCommitTimeMs) { // We assume that the commit time holds for all partitions. It is possible, though, that one partition // commits at a lower time than another partition, and the two partitions are going simultaneously, // and we may not get the maximum value all the time. _segmentCompletionManager._commitTimeMap.put(_segmentName.getTableName(), commitTimeMs); } return SegmentCompletionProtocol.RESP_COMMIT_CONTINUE; } private SegmentCompletionProtocol.Response COMMITTER_NOTIFIED__stoppedConsuming(String instanceId, long offset, String reason) { return processStoppedConsuming(instanceId, offset, reason, false); } private SegmentCompletionProtocol.Response COMMITTER_NOTIFIED__extendBuildlTime(String instanceId, long offset, int extTimeSec, long now) { SegmentCompletionProtocol.Response response = abortIfTooLateAndReturnHold(now, instanceId, offset); if (response == null) { long maxTimeAllowedToCommitMs = now + extTimeSec * 1000; if (maxTimeAllowedToCommitMs > _startTimeMs + MAX_COMMIT_TIME_FOR_ALL_SEGMENTS_SECONDS * 1000) { LOGGER.warn("Not accepting lease extension from {} startTime={} requestedTime={}", instanceId, _startTimeMs, maxTimeAllowedToCommitMs); return abortAndReturnFailed(); } _maxTimeAllowedToCommitMs = maxTimeAllowedToCommitMs; response = SegmentCompletionProtocol.RESP_PROCESSED; } return response; } private SegmentCompletionProtocol.Response COMMITTER_UPLOADING__consumed(String instanceId, long offset, long now) { return processConsumedAfterCommitStart(instanceId, offset, now); } private SegmentCompletionProtocol.Response COMMITTER_UPLOADING__commit(String instanceId, long offset, long now) { return processCommitWhileUploading(instanceId, offset, now); } private SegmentCompletionProtocol.Response COMMITTER_UPLOADING__stoppedConsuming(String instanceId, long offset, String reason) { return processStoppedConsuming(instanceId, offset, reason, false); } private SegmentCompletionProtocol.Response COMMITTING__consumed(String instanceId, long offset, long now) { return processConsumedAfterCommitStart(instanceId, offset, now); } private SegmentCompletionProtocol.Response COMMITTING__commit(String instanceId, long offset, long now) { return processCommitWhileUploading(instanceId, offset, now); } private SegmentCompletionProtocol.Response COMMITTING__stoppedConsuming(String instanceId, long offset, String reason) { return processStoppedConsuming(instanceId, offset, reason, false); } private SegmentCompletionProtocol.Response COMMITTED__consumed(String instanceId, long offset) { SegmentCompletionProtocol.Response response;// Server reporting an offset on an already completed segment. Depending on the offset, either KEEP or DISCARD. if (offset == _winningOffset) { response = keep(instanceId, offset); } else { // Return DISCARD. It is hard to say how long the server will take to complete things. response = discard(instanceId, offset); } return response; } private SegmentCompletionProtocol.Response COMMITTED__commit(String instanceId, long offset) { if (offset == _winningOffset) { return keep(instanceId, offset); } return discard(instanceId, offset); } private SegmentCompletionProtocol.Response COMMITTED__stoppedConsuming(String instanceId, long offset, String reason) { return processStoppedConsuming(instanceId, offset, reason, false); } private SegmentCompletionProtocol.Response processStoppedConsuming(String instanceId, long offset, String reason, boolean createNew) { LOGGER.info("Instance {} stopped consuming segment {} at offset {}, state {}, createNew: {}, reason:{}", instanceId, _segmentName, offset, _state, createNew, reason); _segmentManager.segmentStoppedConsuming(_segmentName, instanceId); return SegmentCompletionProtocol.RESP_PROCESSED; } // A common method when the state is > COMMITTER_NOTIFIED. private SegmentCompletionProtocol.Response processConsumedAfterCommitStart(String instanceId, long offset, long now) { SegmentCompletionProtocol.Response response; // We have already picked a winner, and may or many not have heard from them. // Common case here is that another server is coming back to us with its offset. We either respond back with HOLD or CATCHUP. // It may be that we never heard from the committer, or the committer is taking too long to commit the segment. // In that case, we abort the FSM and start afresh (i.e, return HOLD). // If the winner is coming back again, then we have some more conditions to look at. response = abortIfTooLateAndReturnHold(now, instanceId, offset); if (response != null) { return null; } if (instanceId.equals(_winner)) { // The winner is coming back to report its offset. Take a decision based on the offset reported, and whether we // already notified them // Winner is supposedly already in the commit call. Something wrong. LOGGER.warn("{}:Aborting FSM because winner is reporting a segment while it is also committing instance={} offset={} now={}", _state, instanceId, offset, now); // Ask them to hold, just in case the committer fails for some reason.. return abortAndReturnHold(now, instanceId, offset); } else { // Common case: A different instance is reporting. if (offset == _winningOffset) { // Wait until winner has posted the segment before asking this server to KEEP the segment. response = hold(instanceId, offset); } else if (offset < _winningOffset) { response = catchup(instanceId, offset); } else { // We have not yet committed, so ask the new responder to hold. They may be the new leader in case the // committer fails. response = hold(instanceId, offset); } } return response; } private SegmentCompletionProtocol.Response commitSegment(SegmentCompletionProtocol.Request.Params reqParams, boolean isSplitCommit) { boolean success; String instanceId = reqParams.getInstanceId(); long offset = reqParams.getOffset(); if (!_state.equals(State.COMMITTER_UPLOADING)) { // State changed while we were out of sync. return a failed commit. LOGGER.warn("State change during upload: state={} segment={} winner={} winningOffset={}", _state, _segmentName.getSegmentName(), _winner, _winningOffset); return SegmentCompletionProtocol.RESP_FAILED; } LOGGER.info("Committing segment {} at offset {} winner {}", _segmentName.getSegmentName(), offset, instanceId); _state = State.COMMITTING; // In case of splitCommit, the segment is uploaded to a unique file name indicated by segmentLocation, // so we need to move the segment file to its permanent location first before committing the metadata. CommittingSegmentDescriptor committingSegmentDescriptor = CommittingSegmentDescriptor.fromSegmentCompletionReqParams(reqParams); if (isSplitCommit) { if (!_segmentManager.commitSegmentFile(_segmentName.getTableName(), committingSegmentDescriptor)) { return SegmentCompletionProtocol.RESP_FAILED; } } success = _segmentManager.commitSegmentMetadata(_segmentName.getTableName(), committingSegmentDescriptor); if (success) { _state = State.COMMITTED; LOGGER.info("Committed segment {} at offset {} winner {}", _segmentName.getSegmentName(), offset, instanceId); return SegmentCompletionProtocol.RESP_COMMIT_SUCCESS; } return SegmentCompletionProtocol.RESP_FAILED; } private SegmentCompletionProtocol.Response processCommitWhileUploading(String instanceId, long offset, long now) { LOGGER.info("Processing segmentCommit({}, {})", instanceId, offset); SegmentCompletionProtocol.Response response = abortIfTooLateAndReturnHold(now, instanceId, offset); if (response != null) { return response; } // Another committer (or same) came in while one was uploading. Ask them to hold in case this one fails. return new SegmentCompletionProtocol.Response(new SegmentCompletionProtocol.Response.Params().withOffset(offset).withStatus( SegmentCompletionProtocol.ControllerResponseStatus.HOLD)); } private SegmentCompletionProtocol.Response checkBadCommitRequest(String instanceId, long offset, long now) { SegmentCompletionProtocol.Response response = abortIfTooLateAndReturnHold(now, instanceId, offset); if (response != null) { return response; } else if (instanceId.equals(_winner) && offset != _winningOffset) { // Hmm. Committer has been notified, but either a different one is committing, or offset is different LOGGER.warn("{}:Aborting FSM (bad commit req) instance={} offset={} now={} winning={}", _state, instanceId, offset, now, _winningOffset); return abortAndReturnHold(now, instanceId, offset); } return null; } private SegmentCompletionProtocol.Response processCommitWhileHoldingOrPartialConsuming(String instanceId, long offset, long now) { LOGGER.info("Processing segmentCommit({}, {})", instanceId, offset); SegmentCompletionProtocol.Response response = abortIfTooLateAndReturnHold(now, instanceId, offset); if (response != null) { return response; } // We cannot get a commit if we are in this state, so ask them to hold. Maybe we are starting after a failover. // The server will re-send the segmentConsumed message. return hold(instanceId, offset); } /** * Pick a winner if we can, preferring the instance that we are handling right now, * * We accept the first server to report an offset as long as the server stopped consumption * due to row limit. The premise is that other servers will also stop at row limit, and there * is no need to wait for them to report an offset in order to decide on a winner. The state machine takes care * of the cases where other servers may report different offsets (just in case). * * If the above condition is not satisfied (i.e. either this is not the first server, or it did not reach * row limit), then we can pick a winner only if it is too late to pick a winner, or we have heard from all * servers. * * Otherwise, we wait to hear from more servers. * * @param preferredInstance The instance that is reporting in this thread. * @param now current time * @param stopReason reason reported by instance for stopping consumption. * @return true if winner picked, false otherwise. */ private boolean isWinnerPicked(String preferredInstance, long now, final String stopReason) { if (SegmentCompletionProtocol.REASON_ROW_LIMIT.equals(stopReason) && _commitStateMap.size() == 1) { _winner = preferredInstance; _winningOffset = _commitStateMap.get(preferredInstance); return true; } else if (now > _maxTimeToPickWinnerMs || _commitStateMap.size() == numReplicasToLookFor()) { LOGGER.info("{}:Picking winner time={} size={}", _state, now- _startTimeMs, _commitStateMap.size()); long maxOffsetSoFar = -1; String winnerSoFar = null; for (Map.Entry<String, Long> entry : _commitStateMap.entrySet()) { if (entry.getValue() > maxOffsetSoFar) { maxOffsetSoFar = entry.getValue(); winnerSoFar = entry.getKey(); } } _winningOffset = maxOffsetSoFar; if (_commitStateMap.get(preferredInstance) == maxOffsetSoFar) { winnerSoFar = preferredInstance; } _winner = winnerSoFar; return true; } return false; } } }
Log exception on controller for realtime segment commit operations (#3226)
pinot-controller/src/main/java/com/linkedin/pinot/controller/helix/core/realtime/SegmentCompletionManager.java
Log exception on controller for realtime segment commit operations (#3226)
Java
apache-2.0
a40fb8d573d2caa779422d60acd2fec041ad6580
0
mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.usergrid.persistence.index.impl; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.usergrid.persistence.core.guice.MigrationManagerRule; import org.apache.usergrid.persistence.core.metrics.MetricsFactory; import org.apache.usergrid.persistence.core.scope.ApplicationScope; import org.apache.usergrid.persistence.core.scope.ApplicationScopeImpl; import org.apache.usergrid.persistence.core.test.UseModules; import org.apache.usergrid.persistence.index.ApplicationEntityIndex; import org.apache.usergrid.persistence.index.EntityIndex; import org.apache.usergrid.persistence.index.EntityIndexFactory; import org.apache.usergrid.persistence.index.IndexScope; import org.apache.usergrid.persistence.index.SearchTypes; import org.apache.usergrid.persistence.index.guice.IndexTestFig; import org.apache.usergrid.persistence.index.guice.TestIndexModule; import org.apache.usergrid.persistence.index.query.CandidateResults; import org.apache.usergrid.persistence.index.query.Query; import org.apache.usergrid.persistence.model.entity.Entity; import org.apache.usergrid.persistence.model.entity.Id; import org.apache.usergrid.persistence.model.entity.SimpleId; import org.apache.usergrid.persistence.model.field.ArrayField; import org.apache.usergrid.persistence.model.field.BooleanField; import org.apache.usergrid.persistence.model.field.DoubleField; import org.apache.usergrid.persistence.model.field.IntegerField; import org.apache.usergrid.persistence.model.field.StringField; import org.apache.usergrid.persistence.model.util.EntityUtils; import org.apache.usergrid.persistence.model.util.UUIDGenerator; import com.codahale.metrics.Meter; import com.codahale.metrics.Slf4jReporter; import com.codahale.metrics.Timer; import com.google.inject.Inject; import rx.Observable; import rx.schedulers.Schedulers; import static org.junit.Assert.assertEquals; /** * This is configuration via the properties in the IndexTestFig object. Most of these values you won't need to touch. * To run this against a live cluster. You execute this maven command. * * <command> mvn test -Dtest=IndexLoadTestsIT#testHeavyLoadValidate -Dstresstest.numWorkers=16 * -Dstresstest.numberOfRecords=10000 </command> * * This will insert 10000 records for each worker thread. There will be 16 worker threads. Validation will occur after * the wait timeout (stresstest.validate.wait) of 2 seconds. Up to 40 concurrent queries (stresstest.readThreads) will * be executed to validate each result. * * By default this test is excluded from surefire, and will need to be run manually */ @RunWith( EsRunner.class ) @UseModules( { TestIndexModule.class } ) public class IndexLoadTestsIT extends BaseIT { private static final Logger log = LoggerFactory.getLogger( IndexLoadTestsIT.class ); public static final String FIELD_WORKER_INDEX = "workerIndex"; private static final String FIELD_ORDINAL = "ordinal"; private static final String FIELD_UNIQUE_IDENTIFIER = "uniqueIdentifier"; @Inject @Rule public MigrationManagerRule migrationManagerRule; @ClassRule public static ElasticSearchResource es = new ElasticSearchResource(); @Inject public IndexTestFig indexTestFig; @Inject public EntityIndexFactory entityIndexFactory; @Inject public EntityIndex entityIndex; @Inject public MetricsFactory metricsFactory; private Meter batchWriteTPS; private Timer batchWriteTimer; private Meter queryTps; private Timer queryTimer; private Slf4jReporter reporter; @Before public void setupIndexAndMeters() { entityIndex.initializeIndex(); batchWriteTPS = metricsFactory.getMeter( IndexLoadTestsIT.class, "write.tps" ); batchWriteTimer = metricsFactory.getTimer( IndexLoadTestsIT.class, "write.timer" ); queryTps = metricsFactory.getMeter( IndexLoadTestsIT.class, "query.tps" ); queryTimer = metricsFactory.getTimer( IndexLoadTestsIT.class, "query.timer" ); reporter = Slf4jReporter.forRegistry( metricsFactory.getRegistry() ).outputTo( log ).convertRatesTo( TimeUnit.SECONDS ) .convertDurationsTo( TimeUnit.MILLISECONDS ).build(); reporter.start( 30, TimeUnit.SECONDS ); } @After public void printMetricsBeforeShutdown() { //stop the log reporter and print the last report reporter.stop(); reporter.report(); } /** * Perform the following 1, spin up the specified number of workers For each worker, insert the specified number of * elements * * Wait the wait time after buffer execution before beginning validate * * Validate every entity inserted is returned by a search. */ @Test public void testHeavyLoadValidate() { final String userAppId = indexTestFig.getApplicationId(); //if it's unset, generate one final String uniqueIdentifier = UUIDGenerator.newTimeUUID().toString(); //use the appId supplied, or generate one final UUID applicationUUID = UUID.fromString( userAppId ); final Id applicationId = new SimpleId( applicationUUID, "application" ); final ApplicationScope scope = new ApplicationScopeImpl( applicationId ); final IndexScope indexScope = new IndexScopeImpl( applicationId, "test" ); final ApplicationEntityIndex appEntityIndex = entityIndexFactory.createApplicationEntityIndex( scope ); //create our index if it doesn't exist //delay our verification for indexing to happen final Observable<DataLoadResult> dataLoadResults = createStreamFromWorkers( appEntityIndex, indexScope, uniqueIdentifier ).buffer( indexTestFig.getBufferSize() ) //perform a delay to let ES index from our batches .delay( indexTestFig.getValidateWait(), TimeUnit.MILLISECONDS ) //do our search in parallel, otherwise this test will take far too long .flatMap( entitiesToValidate -> { return Observable.from( entitiesToValidate ).map( entityObservable -> { final int workerIndex = ( int ) entityObservable.getField( FIELD_WORKER_INDEX ).getValue(); final int ordinal = ( int ) entityObservable.getField( FIELD_ORDINAL ).getValue(); final Timer.Context queryTimerContext = queryTimer.time(); //execute our search final CandidateResults results = appEntityIndex .search( indexScope, SearchTypes.fromTypes( indexScope.getName() ), Query.fromQLNullSafe( "select * where " + FIELD_WORKER_INDEX + " = " + workerIndex + " AND " + FIELD_ORDINAL + " = " + ordinal + " AND " + FIELD_UNIQUE_IDENTIFIER + " = '" + uniqueIdentifier + "'" ) ); queryTps.mark(); queryTimerContext.stop(); boolean found; if ( !results.isEmpty() && results.get( 0 ).getId().equals( entityObservable.getId() ) ) { found = true; } else { found = false; } return new EntitySearchResult( entityObservable, found ); } ).subscribeOn( Schedulers.io() ); }, indexTestFig.getConcurrentReadThreads() ) //collect all the results into a single data load result .collect( () -> new DataLoadResult(), ( dataloadResult, entitySearchResult ) -> { if ( entitySearchResult.found ) { dataloadResult.success(); return; } final int ordinal = ( int ) entitySearchResult.searched.getField( FIELD_ORDINAL ).getValue(); final int worker = ( int ) entitySearchResult.searched.getField( FIELD_WORKER_INDEX ).getValue(); dataloadResult.failed(); log.error( "Could not find entity with worker {}, ordinal {}, and Id {} after waiting {} milliseconds", worker, ordinal, entitySearchResult.searched.getId(), indexTestFig.getValidateWait() ); } ); //wait for processing to finish final DataLoadResult result = dataLoadResults.toBlocking().last(); final long expectedCount = indexTestFig.getNumberOfRecords() * indexTestFig.getNumberOfWorkers(); assertEquals( "Excepted to have no failures", 0, result.getFailCount() ); assertEquals( "Excepted to find all records", expectedCount, result.getSuccessCount() ); } public Observable<Entity> createStreamFromWorkers( final ApplicationEntityIndex entityIndex, final IndexScope indexScope, final String uniqueIdentifier ) { //create a sequence of observables. Each index will be it's own worker thread using the Schedulers.newthread() return Observable.range( 0, indexTestFig.getNumberOfWorkers() ).flatMap( integer -> createWriteObservable( entityIndex, indexScope, uniqueIdentifier, integer ) .subscribeOn( Schedulers.newThread() ) ); } private Observable<Entity> createWriteObservable( final ApplicationEntityIndex entityIndex, final IndexScope indexScope, final String uniqueIdentifier, final int workerIndex ) { return Observable.range( 0, indexTestFig.getNumberOfRecords() ) //create our entity .map( integer -> { final Entity entity = new Entity( indexScope.getName() ); entity.setField( new IntegerField( FIELD_WORKER_INDEX, workerIndex ) ); entity.setField( new IntegerField( FIELD_ORDINAL, integer ) ); entity.setField( new StringField( FIELD_UNIQUE_IDENTIFIER, uniqueIdentifier ) ); EntityUtils.setVersion( entity, UUIDGenerator.newTimeUUID() ); //add some fields for indexing entity.setField( new StringField( "emtpyField", "" ) ); entity.setField( new StringField( "singleCharField1", "L" ) ); entity.setField( new StringField( "longStringField", "000000000000001051" ) ); entity.setField( new StringField( "singleCharField2", "0" ) ); entity.setField( new StringField( "singleCharField3", "0" ) ); entity.setField( new StringField( "singleCharField4", "0" ) ); entity.setField( new StringField( "dept", "VALUE" ) ); entity.setField( new StringField( "description", "I'm a longer description" ) ); ArrayField<Long> array = new ArrayField<>("longs"); array.add( 9315321008910l ); array.add( 9315321009016l ); array.add( 9315321009115l ); array.add( 9315321009313l ); array.add( 9315321009320l ); array.add( 9315321984955l ); entity.setField( array ); entity.setField( new StringField( "singleCharField5", "N" ) ); entity.setField( new BooleanField( "booleanField1", true ) ); entity.setField( new BooleanField( "booleanField2", false ) ); entity.setField( new StringField( "singleCharField5", "N" ) ); entity.setField( new StringField( "singleCharField6", "N" ) ); entity.setField( new StringField( "stringField", "ALL CAPS)); I MEAN IT" ) ); entity.setField( new DoubleField( "doubleField1", 750.0 ) ); entity.setField( new StringField( "charField", "AB" ) ); entity.setField( new StringField( "name", "000000000000001051-1004" ) ); return entity; } ) //buffer up a batch size .buffer( indexTestFig.getBufferSize() ).doOnNext( entities -> { //take our entities and roll them into a batch Observable.from( entities ).collect( () -> entityIndex.createBatch(), ( entityIndexBatch, entity ) -> { entityIndexBatch.index( indexScope, entity ); } ).doOnNext( entityIndexBatch -> { log.info( "Indexing next {} in batch", entityIndexBatch.size() ); //gather the metrics final Timer.Context time = batchWriteTimer.time(); batchWriteTPS.mark(); //execute entityIndexBatch.execute(); //stop time.close(); } ).toBlocking().last(); } ) //translate back into a stream of entities for the caller to use .flatMap( entities -> Observable.from( entities ) ); } /** * Class for entity search results */ private static class EntitySearchResult { public final Entity searched; public final boolean found; private EntitySearchResult( final Entity searched, final boolean found ) { this.searched = searched; this.found = found; } } /** * Class for collecting results */ private static final class DataLoadResult { private final AtomicLong successCount = new AtomicLong( 0 ); private final AtomicLong failCount = new AtomicLong( 0 ); public void success() { successCount.addAndGet( 1 ); } public long getSuccessCount() { return successCount.get(); } public void failed() { failCount.addAndGet( 1 ); } public long getFailCount() { return failCount.get(); } } }
stack/corepersistence/queryindex/src/test/java/org/apache/usergrid/persistence/index/impl/IndexLoadTestsIT.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.usergrid.persistence.index.impl; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.usergrid.persistence.core.guice.MigrationManagerRule; import org.apache.usergrid.persistence.core.metrics.MetricsFactory; import org.apache.usergrid.persistence.core.scope.ApplicationScope; import org.apache.usergrid.persistence.core.scope.ApplicationScopeImpl; import org.apache.usergrid.persistence.core.test.UseModules; import org.apache.usergrid.persistence.index.EntityIndex; import org.apache.usergrid.persistence.index.EntityIndexFactory; import org.apache.usergrid.persistence.index.IndexScope; import org.apache.usergrid.persistence.index.SearchTypes; import org.apache.usergrid.persistence.index.guice.IndexTestFig; import org.apache.usergrid.persistence.index.guice.TestIndexModule; import org.apache.usergrid.persistence.index.query.CandidateResults; import org.apache.usergrid.persistence.index.query.Query; import org.apache.usergrid.persistence.model.entity.Entity; import org.apache.usergrid.persistence.model.entity.Id; import org.apache.usergrid.persistence.model.entity.SimpleId; import org.apache.usergrid.persistence.model.field.ArrayField; import org.apache.usergrid.persistence.model.field.BooleanField; import org.apache.usergrid.persistence.model.field.DoubleField; import org.apache.usergrid.persistence.model.field.IntegerField; import org.apache.usergrid.persistence.model.field.StringField; import org.apache.usergrid.persistence.model.util.EntityUtils; import org.apache.usergrid.persistence.model.util.UUIDGenerator; import com.codahale.metrics.Meter; import com.codahale.metrics.Slf4jReporter; import com.codahale.metrics.Timer; import com.google.inject.Inject; import rx.Observable; import rx.schedulers.Schedulers; import static org.junit.Assert.assertEquals; /** * This is configuration via the properties in the IndexTestFig object. Most of these values you won't need to touch. * To run this against a live cluster. You execute this maven command. * * <command> mvn test -Dtest=IndexLoadTestsIT#testHeavyLoadValidate -Dstresstest.numWorkers=16 * -Dstresstest.numberOfRecords=10000 </command> * * This will insert 10000 records for each worker thread. There will be 16 worker threads. Validation will occur after * the wait timeout (stresstest.validate.wait) of 2 seconds. Up to 40 concurrent queries (stresstest.readThreads) will * be executed to validate each result. * * By default this test is excluded from surefire, and will need to be run manually */ @RunWith( EsRunner.class ) @UseModules( { TestIndexModule.class } ) public class IndexLoadTestsIT extends BaseIT { private static final Logger log = LoggerFactory.getLogger( IndexLoadTestsIT.class ); public static final String FIELD_WORKER_INDEX = "workerIndex"; private static final String FIELD_ORDINAL = "ordinal"; private static final String FIELD_UNIQUE_IDENTIFIER = "uniqueIdentifier"; @Inject @Rule public MigrationManagerRule migrationManagerRule; @ClassRule public static ElasticSearchResource es = new ElasticSearchResource(); @Inject public IndexTestFig indexTestFig; @Inject public EntityIndexFactory entityIndexFactory; @Inject public MetricsFactory metricsFactory; private Meter batchWriteTPS; private Timer batchWriteTimer; private Meter queryTps; private Timer queryTimer; private Slf4jReporter reporter; @Before public void setupMeters() { batchWriteTPS = metricsFactory.getMeter( IndexLoadTestsIT.class, "write.tps" ); batchWriteTimer = metricsFactory.getTimer( IndexLoadTestsIT.class, "write.timer" ); queryTps = metricsFactory.getMeter( IndexLoadTestsIT.class, "query.tps" ); queryTimer = metricsFactory.getTimer( IndexLoadTestsIT.class, "query.timer" ); reporter = Slf4jReporter.forRegistry( metricsFactory.getRegistry() ).outputTo( log ).convertRatesTo( TimeUnit.SECONDS ) .convertDurationsTo( TimeUnit.MILLISECONDS ).build(); reporter.start( 30, TimeUnit.SECONDS ); } @After public void printMetricsBeforeShutdown() { //stop the log reporter and print the last report reporter.stop(); reporter.report(); } /** * Perform the following 1, spin up the specified number of workers For each worker, insert the specified number of * elements * * Wait the wait time after buffer execution before beginning validate * * Validate every entity inserted is returned by a search. */ @Test public void testHeavyLoadValidate() { final String userAppId = indexTestFig.getApplicationId(); //if it's unset, generate one final String uniqueIdentifier = UUIDGenerator.newTimeUUID().toString(); //use the appId supplied, or generate one final UUID applicationUUID = UUID.fromString( userAppId ); final Id applicationId = new SimpleId( applicationUUID, "application" ); final ApplicationScope scope = new ApplicationScopeImpl( applicationId ); final IndexScope indexScope = new IndexScopeImpl( applicationId, "test" ); //create our index if it doesn't exist index.initializeIndex(); final Observable<Entity> createEntities = createStreamFromWorkers( index, applicationId ); //delay our verification for indexing to happen final Observable<DataLoadResult> dataLoadResults = createStreamFromWorkers( index, indexScope, uniqueIdentifier ).buffer( indexTestFig.getBufferSize() ) //perform a delay to let ES index from our batches .delay( indexTestFig.getValidateWait(), TimeUnit.MILLISECONDS ) //do our search in parallel, otherwise this test will take far too long .flatMap( entitiesToValidate -> { return Observable.from( entitiesToValidate ).map( entityObservable -> { final int workerIndex = ( int ) entityObservable.getField( FIELD_WORKER_INDEX ).getValue(); final int ordinal = ( int ) entityObservable.getField( FIELD_ORDINAL ).getValue(); final Timer.Context queryTimerContext = queryTimer.time(); //execute our search final CandidateResults results = index .search( indexScope, SearchTypes.fromTypes( indexScope.getName() ), Query.fromQLNullSafe( "select * where " + FIELD_WORKER_INDEX + " = " + workerIndex + " AND " + FIELD_ORDINAL + " = " + ordinal + " AND " + FIELD_UNIQUE_IDENTIFIER + " = '" + uniqueIdentifier + "'" ) ); queryTps.mark(); queryTimerContext.stop(); boolean found; if ( !results.isEmpty() && results.get( 0 ).getId().equals( entityObservable.getId() ) ) { found = true; } else { found = false; } return new EntitySearchResult( entityObservable, found ); } ).subscribeOn( Schedulers.io() ); }, indexTestFig.getConcurrentReadThreads() ) //collect all the results into a single data load result .collect( () -> new DataLoadResult(), ( dataloadResult, entitySearchResult ) -> { if ( entitySearchResult.found ) { dataloadResult.success(); return; } final int ordinal = ( int ) entitySearchResult.searched.getField( FIELD_ORDINAL ).getValue(); final int worker = ( int ) entitySearchResult.searched.getField( FIELD_WORKER_INDEX ).getValue(); dataloadResult.failed(); log.error( "Could not find entity with worker {}, ordinal {}, and Id {} after waiting {} milliseconds", worker, ordinal, entitySearchResult.searched.getId(), indexTestFig.getValidateWait() ); } ); //wait for processing to finish final DataLoadResult result = dataLoadResults.toBlocking().last(); final long expectedCount = indexTestFig.getNumberOfRecords() * indexTestFig.getNumberOfWorkers(); assertEquals( "Excepted to have no failures", 0, result.getFailCount() ); assertEquals( "Excepted to find all records", expectedCount, result.getSuccessCount() ); } public Observable<Entity> createStreamFromWorkers( final EntityIndex entityIndex, final IndexScope indexScope, final String uniqueIdentifier ) { //create a sequence of observables. Each index will be it's own worker thread using the Schedulers.newthread() return Observable.range( 0, indexTestFig.getNumberOfWorkers() ).flatMap( integer -> createWriteObservable( entityIndex, indexScope, uniqueIdentifier, integer ) .subscribeOn( Schedulers.newThread() ) ); } private Observable<Entity> createWriteObservable( final EntityIndex entityIndex, final IndexScope indexScope, final String uniqueIdentifier, final int workerIndex ) { return Observable.range( 0, indexTestFig.getNumberOfRecords() ) //create our entity .map( integer -> { final Entity entity = new Entity( indexScope.getName() ); entity.setField( new IntegerField( FIELD_WORKER_INDEX, workerIndex ) ); entity.setField( new IntegerField( FIELD_ORDINAL, integer ) ); entity.setField( new StringField( FIELD_UNIQUE_IDENTIFIER, uniqueIdentifier ) ); EntityUtils.setVersion( entity, UUIDGenerator.newTimeUUID() ); //add some fields for indexing entity.setField( new StringField( "emtpyField", "" ) ); entity.setField( new StringField( "singleCharField1", "L" ) ); entity.setField( new StringField( "longStringField", "000000000000001051" ) ); entity.setField( new StringField( "singleCharField2", "0" ) ); entity.setField( new StringField( "singleCharField3", "0" ) ); entity.setField( new StringField( "singleCharField4", "0" ) ); entity.setField( new StringField( "dept", "VALUE" ) ); entity.setField( new StringField( "description", "I'm a longer description" ) ); ArrayField<Long> array = new ArrayField<>("longs"); array.add( 9315321008910l ); array.add( 9315321009016l ); array.add( 9315321009115l ); array.add( 9315321009313l ); array.add( 9315321009320l ); array.add( 9315321984955l ); entity.setField( array ); entity.setField( new StringField( "singleCharField5", "N" ) ); entity.setField( new BooleanField( "booleanField1", true ) ); entity.setField( new BooleanField( "booleanField2", false ) ); entity.setField( new StringField( "singleCharField5", "N" ) ); entity.setField( new StringField( "singleCharField6", "N" ) ); entity.setField( new StringField( "stringField", "ALL CAPS)); I MEAN IT" ) ); entity.setField( new DoubleField( "doubleField1", 750.0 ) ); entity.setField( new StringField( "charField", "AB" ) ); entity.setField( new StringField( "name", "000000000000001051-1004" ) ); return entity; } ) //buffer up a batch size .buffer( indexTestFig.getBufferSize() ).doOnNext( entities -> { //take our entities and roll them into a batch Observable.from( entities ).collect( () -> entityIndex.createBatch(), ( entityIndexBatch, entity ) -> { entityIndexBatch.index( indexScope, entity ); } ).doOnNext( entityIndexBatch -> { log.info( "Indexing next {} in batch", entityIndexBatch.size() ); //gather the metrics final Timer.Context time = batchWriteTimer.time(); batchWriteTPS.mark(); //execute entityIndexBatch.execute(); //stop time.close(); } ).toBlocking().last(); } ) //translate back into a stream of entities for the caller to use .flatMap( entities -> Observable.from( entities ) ); } /** * Class for entity search results */ private static class EntitySearchResult { public final Entity searched; public final boolean found; private EntitySearchResult( final Entity searched, final boolean found ) { this.searched = searched; this.found = found; } } /** * Class for collecting results */ private static final class DataLoadResult { private final AtomicLong successCount = new AtomicLong( 0 ); private final AtomicLong failCount = new AtomicLong( 0 ); public void success() { successCount.addAndGet( 1 ); } public long getSuccessCount() { return successCount.get(); } public void failed() { failCount.addAndGet( 1 ); } public long getFailCount() { return failCount.get(); } } }
Updated load tests to match new index structure
stack/corepersistence/queryindex/src/test/java/org/apache/usergrid/persistence/index/impl/IndexLoadTestsIT.java
Updated load tests to match new index structure
Java
apache-2.0
139b9834a3523f0e9686645b79edaca01a52550f
0
nicolargo/intellij-community,robovm/robovm-studio,salguarnieri/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,ivan-fedorov/intellij-community,fnouama/intellij-community,amith01994/intellij-community,signed/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,SerCeMan/intellij-community,jagguli/intellij-community,ivan-fedorov/intellij-community,dslomov/intellij-community,jagguli/intellij-community,dslomov/intellij-community,asedunov/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,mglukhikh/intellij-community,vladmm/intellij-community,dslomov/intellij-community,ol-loginov/intellij-community,MichaelNedzelsky/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,kdwink/intellij-community,ol-loginov/intellij-community,MichaelNedzelsky/intellij-community,vladmm/intellij-community,salguarnieri/intellij-community,izonder/intellij-community,robovm/robovm-studio,slisson/intellij-community,semonte/intellij-community,fengbaicanhe/intellij-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,signed/intellij-community,ahb0327/intellij-community,nicolargo/intellij-community,pwoodworth/intellij-community,kdwink/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,ibinti/intellij-community,fengbaicanhe/intellij-community,akosyakov/intellij-community,MichaelNedzelsky/intellij-community,alphafoobar/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,da1z/intellij-community,hurricup/intellij-community,samthor/intellij-community,tmpgit/intellij-community,robovm/robovm-studio,slisson/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,kool79/intellij-community,Lekanich/intellij-community,supersven/intellij-community,akosyakov/intellij-community,adedayo/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,robovm/robovm-studio,hurricup/intellij-community,izonder/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,fitermay/intellij-community,ibinti/intellij-community,muntasirsyed/intellij-community,supersven/intellij-community,retomerz/intellij-community,ibinti/intellij-community,slisson/intellij-community,retomerz/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,dslomov/intellij-community,ryano144/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,orekyuu/intellij-community,semonte/intellij-community,adedayo/intellij-community,suncycheng/intellij-community,amith01994/intellij-community,orekyuu/intellij-community,Lekanich/intellij-community,idea4bsd/idea4bsd,kool79/intellij-community,FHannes/intellij-community,petteyg/intellij-community,allotria/intellij-community,fitermay/intellij-community,fengbaicanhe/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,kdwink/intellij-community,akosyakov/intellij-community,clumsy/intellij-community,blademainer/intellij-community,ol-loginov/intellij-community,amith01994/intellij-community,fitermay/intellij-community,signed/intellij-community,Distrotech/intellij-community,alphafoobar/intellij-community,jagguli/intellij-community,pwoodworth/intellij-community,fitermay/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,ahb0327/intellij-community,asedunov/intellij-community,izonder/intellij-community,fnouama/intellij-community,da1z/intellij-community,vvv1559/intellij-community,samthor/intellij-community,samthor/intellij-community,fnouama/intellij-community,ahb0327/intellij-community,tmpgit/intellij-community,supersven/intellij-community,apixandru/intellij-community,signed/intellij-community,semonte/intellij-community,nicolargo/intellij-community,vvv1559/intellij-community,clumsy/intellij-community,retomerz/intellij-community,semonte/intellij-community,adedayo/intellij-community,lucafavatella/intellij-community,ivan-fedorov/intellij-community,salguarnieri/intellij-community,supersven/intellij-community,ol-loginov/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,ryano144/intellij-community,robovm/robovm-studio,slisson/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,MichaelNedzelsky/intellij-community,caot/intellij-community,diorcety/intellij-community,retomerz/intellij-community,nicolargo/intellij-community,samthor/intellij-community,supersven/intellij-community,amith01994/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,Lekanich/intellij-community,fitermay/intellij-community,SerCeMan/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,suncycheng/intellij-community,wreckJ/intellij-community,salguarnieri/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,muntasirsyed/intellij-community,petteyg/intellij-community,vvv1559/intellij-community,SerCeMan/intellij-community,ol-loginov/intellij-community,salguarnieri/intellij-community,supersven/intellij-community,izonder/intellij-community,vladmm/intellij-community,TangHao1987/intellij-community,da1z/intellij-community,Lekanich/intellij-community,fengbaicanhe/intellij-community,ahb0327/intellij-community,ryano144/intellij-community,kool79/intellij-community,ftomassetti/intellij-community,pwoodworth/intellij-community,slisson/intellij-community,wreckJ/intellij-community,pwoodworth/intellij-community,clumsy/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,supersven/intellij-community,ernestp/consulo,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,nicolargo/intellij-community,xfournet/intellij-community,allotria/intellij-community,retomerz/intellij-community,alphafoobar/intellij-community,ftomassetti/intellij-community,ol-loginov/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,diorcety/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,holmes/intellij-community,fnouama/intellij-community,fitermay/intellij-community,petteyg/intellij-community,michaelgallacher/intellij-community,clumsy/intellij-community,vladmm/intellij-community,alphafoobar/intellij-community,izonder/intellij-community,robovm/robovm-studio,slisson/intellij-community,lucafavatella/intellij-community,allotria/intellij-community,fengbaicanhe/intellij-community,muntasirsyed/intellij-community,caot/intellij-community,robovm/robovm-studio,blademainer/intellij-community,adedayo/intellij-community,allotria/intellij-community,SerCeMan/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,wreckJ/intellij-community,robovm/robovm-studio,clumsy/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,nicolargo/intellij-community,clumsy/intellij-community,kdwink/intellij-community,Distrotech/intellij-community,tmpgit/intellij-community,consulo/consulo,apixandru/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,ol-loginov/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,consulo/consulo,kool79/intellij-community,ryano144/intellij-community,pwoodworth/intellij-community,idea4bsd/idea4bsd,muntasirsyed/intellij-community,MER-GROUP/intellij-community,holmes/intellij-community,diorcety/intellij-community,ibinti/intellij-community,allotria/intellij-community,ahb0327/intellij-community,fitermay/intellij-community,lucafavatella/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,SerCeMan/intellij-community,adedayo/intellij-community,dslomov/intellij-community,MichaelNedzelsky/intellij-community,FHannes/intellij-community,apixandru/intellij-community,akosyakov/intellij-community,supersven/intellij-community,kool79/intellij-community,mglukhikh/intellij-community,Distrotech/intellij-community,nicolargo/intellij-community,lucafavatella/intellij-community,petteyg/intellij-community,MichaelNedzelsky/intellij-community,mglukhikh/intellij-community,orekyuu/intellij-community,alphafoobar/intellij-community,wreckJ/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,suncycheng/intellij-community,caot/intellij-community,amith01994/intellij-community,gnuhub/intellij-community,youdonghai/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,fnouama/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,kdwink/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,alphafoobar/intellij-community,ivan-fedorov/intellij-community,gnuhub/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,lucafavatella/intellij-community,vvv1559/intellij-community,TangHao1987/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,allotria/intellij-community,blademainer/intellij-community,apixandru/intellij-community,robovm/robovm-studio,diorcety/intellij-community,adedayo/intellij-community,samthor/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,da1z/intellij-community,tmpgit/intellij-community,retomerz/intellij-community,asedunov/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,Lekanich/intellij-community,MER-GROUP/intellij-community,wreckJ/intellij-community,caot/intellij-community,gnuhub/intellij-community,da1z/intellij-community,tmpgit/intellij-community,orekyuu/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,wreckJ/intellij-community,semonte/intellij-community,FHannes/intellij-community,caot/intellij-community,ibinti/intellij-community,tmpgit/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,pwoodworth/intellij-community,izonder/intellij-community,petteyg/intellij-community,petteyg/intellij-community,nicolargo/intellij-community,vladmm/intellij-community,hurricup/intellij-community,pwoodworth/intellij-community,Distrotech/intellij-community,muntasirsyed/intellij-community,kool79/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,ftomassetti/intellij-community,holmes/intellij-community,jagguli/intellij-community,caot/intellij-community,FHannes/intellij-community,xfournet/intellij-community,clumsy/intellij-community,kool79/intellij-community,fnouama/intellij-community,apixandru/intellij-community,ernestp/consulo,mglukhikh/intellij-community,muntasirsyed/intellij-community,slisson/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,signed/intellij-community,diorcety/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,allotria/intellij-community,MER-GROUP/intellij-community,blademainer/intellij-community,consulo/consulo,mglukhikh/intellij-community,ryano144/intellij-community,Lekanich/intellij-community,ryano144/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,blademainer/intellij-community,diorcety/intellij-community,adedayo/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,gnuhub/intellij-community,pwoodworth/intellij-community,retomerz/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,retomerz/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,suncycheng/intellij-community,kdwink/intellij-community,youdonghai/intellij-community,ol-loginov/intellij-community,semonte/intellij-community,alphafoobar/intellij-community,gnuhub/intellij-community,ryano144/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,wreckJ/intellij-community,Distrotech/intellij-community,xfournet/intellij-community,kool79/intellij-community,gnuhub/intellij-community,kool79/intellij-community,Lekanich/intellij-community,idea4bsd/idea4bsd,kool79/intellij-community,fnouama/intellij-community,semonte/intellij-community,ernestp/consulo,da1z/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,MER-GROUP/intellij-community,lucafavatella/intellij-community,diorcety/intellij-community,xfournet/intellij-community,tmpgit/intellij-community,dslomov/intellij-community,adedayo/intellij-community,akosyakov/intellij-community,jagguli/intellij-community,orekyuu/intellij-community,robovm/robovm-studio,holmes/intellij-community,ivan-fedorov/intellij-community,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,caot/intellij-community,apixandru/intellij-community,ftomassetti/intellij-community,dslomov/intellij-community,hurricup/intellij-community,ryano144/intellij-community,ernestp/consulo,TangHao1987/intellij-community,orekyuu/intellij-community,diorcety/intellij-community,TangHao1987/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,izonder/intellij-community,blademainer/intellij-community,ibinti/intellij-community,jagguli/intellij-community,slisson/intellij-community,muntasirsyed/intellij-community,ivan-fedorov/intellij-community,Lekanich/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,vvv1559/intellij-community,izonder/intellij-community,MER-GROUP/intellij-community,caot/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,suncycheng/intellij-community,gnuhub/intellij-community,ivan-fedorov/intellij-community,holmes/intellij-community,jagguli/intellij-community,nicolargo/intellij-community,xfournet/intellij-community,ahb0327/intellij-community,fnouama/intellij-community,gnuhub/intellij-community,slisson/intellij-community,blademainer/intellij-community,fengbaicanhe/intellij-community,vladmm/intellij-community,kdwink/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,Lekanich/intellij-community,ernestp/consulo,vvv1559/intellij-community,diorcety/intellij-community,allotria/intellij-community,SerCeMan/intellij-community,michaelgallacher/intellij-community,izonder/intellij-community,signed/intellij-community,asedunov/intellij-community,akosyakov/intellij-community,gnuhub/intellij-community,apixandru/intellij-community,vladmm/intellij-community,gnuhub/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,orekyuu/intellij-community,MER-GROUP/intellij-community,ahb0327/intellij-community,gnuhub/intellij-community,holmes/intellij-community,wreckJ/intellij-community,ahb0327/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,dslomov/intellij-community,amith01994/intellij-community,muntasirsyed/intellij-community,wreckJ/intellij-community,ibinti/intellij-community,fnouama/intellij-community,holmes/intellij-community,ryano144/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,diorcety/intellij-community,ftomassetti/intellij-community,ol-loginov/intellij-community,hurricup/intellij-community,apixandru/intellij-community,holmes/intellij-community,blademainer/intellij-community,amith01994/intellij-community,ryano144/intellij-community,alphafoobar/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,supersven/intellij-community,fengbaicanhe/intellij-community,holmes/intellij-community,fengbaicanhe/intellij-community,supersven/intellij-community,samthor/intellij-community,vvv1559/intellij-community,salguarnieri/intellij-community,petteyg/intellij-community,muntasirsyed/intellij-community,alphafoobar/intellij-community,muntasirsyed/intellij-community,petteyg/intellij-community,youdonghai/intellij-community,slisson/intellij-community,holmes/intellij-community,ol-loginov/intellij-community,MichaelNedzelsky/intellij-community,signed/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,SerCeMan/intellij-community,consulo/consulo,ftomassetti/intellij-community,suncycheng/intellij-community,caot/intellij-community,allotria/intellij-community,retomerz/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,SerCeMan/intellij-community,Distrotech/intellij-community,blademainer/intellij-community,asedunov/intellij-community,kool79/intellij-community,signed/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,da1z/intellij-community,wreckJ/intellij-community,orekyuu/intellij-community,wreckJ/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,alphafoobar/intellij-community,SerCeMan/intellij-community,Lekanich/intellij-community,asedunov/intellij-community,samthor/intellij-community,da1z/intellij-community,Lekanich/intellij-community,izonder/intellij-community,ThiagoGarciaAlves/intellij-community,MER-GROUP/intellij-community,kdwink/intellij-community,ftomassetti/intellij-community,amith01994/intellij-community,da1z/intellij-community,samthor/intellij-community,samthor/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,TangHao1987/intellij-community,fengbaicanhe/intellij-community,petteyg/intellij-community,signed/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,fnouama/intellij-community,nicolargo/intellij-community,jagguli/intellij-community,amith01994/intellij-community,caot/intellij-community,jagguli/intellij-community,ThiagoGarciaAlves/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,clumsy/intellij-community,blademainer/intellij-community,retomerz/intellij-community,ftomassetti/intellij-community,fnouama/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,suncycheng/intellij-community,caot/intellij-community,samthor/intellij-community,ibinti/intellij-community,amith01994/intellij-community,fitermay/intellij-community,consulo/consulo,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,kdwink/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,caot/intellij-community,muntasirsyed/intellij-community,tmpgit/intellij-community,slisson/intellij-community,adedayo/intellij-community,signed/intellij-community,ivan-fedorov/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,fnouama/intellij-community,dslomov/intellij-community,kool79/intellij-community,Lekanich/intellij-community,adedayo/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,ahb0327/intellij-community,supersven/intellij-community,adedayo/intellij-community,idea4bsd/idea4bsd,dslomov/intellij-community,youdonghai/intellij-community,dslomov/intellij-community,retomerz/intellij-community,holmes/intellij-community,semonte/intellij-community,fitermay/intellij-community,xfournet/intellij-community,izonder/intellij-community,kdwink/intellij-community,samthor/intellij-community,slisson/intellij-community,wreckJ/intellij-community,hurricup/intellij-community,ryano144/intellij-community,vvv1559/intellij-community,robovm/robovm-studio,semonte/intellij-community,ernestp/consulo,vvv1559/intellij-community,semonte/intellij-community,allotria/intellij-community,izonder/intellij-community,da1z/intellij-community,vladmm/intellij-community,semonte/intellij-community,asedunov/intellij-community,clumsy/intellij-community,SerCeMan/intellij-community,Distrotech/intellij-community,TangHao1987/intellij-community,vladmm/intellij-community,vladmm/intellij-community,robovm/robovm-studio,FHannes/intellij-community,FHannes/intellij-community,consulo/consulo,hurricup/intellij-community,MichaelNedzelsky/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,TangHao1987/intellij-community,semonte/intellij-community,akosyakov/intellij-community,signed/intellij-community,asedunov/intellij-community,signed/intellij-community,ol-loginov/intellij-community,samthor/intellij-community,ivan-fedorov/intellij-community,TangHao1987/intellij-community,michaelgallacher/intellij-community,holmes/intellij-community,da1z/intellij-community,blademainer/intellij-community,orekyuu/intellij-community,ahb0327/intellij-community,nicolargo/intellij-community,supersven/intellij-community,asedunov/intellij-community,ivan-fedorov/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,tmpgit/intellij-community
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.io; import com.intellij.openapi.Forceable; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.SystemInfo; import com.intellij.util.containers.hash.LinkedHashMap; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Map; import java.util.Set; /** * @author max */ public class PagedFileStorage implements Forceable { protected static final Logger LOG = Logger.getInstance("#com.intellij.util.io.PagedFileStorage"); private static final int MEGABYTE = 1024 * 1024; final static int DEFAULT_BUFFER_SIZE; private final static int UPPER_LIMIT; public static final int LOWER_LIMIT_IN_MEGABYTES = 100; private final static int LOWER_LIMIT = LOWER_LIMIT_IN_MEGABYTES * MEGABYTE; static { String maxPagedStorageCacheProperty = System.getProperty("idea.max.paged.storage.cache"); int defaultMaxPagedStorageCacheInMegabytes = SystemInfo.is64Bit ? 500:200; UPPER_LIMIT = (maxPagedStorageCacheProperty == null ? defaultMaxPagedStorageCacheInMegabytes: Math.max(Integer.valueOf(maxPagedStorageCacheProperty), LOWER_LIMIT_IN_MEGABYTES))*MEGABYTE; String pagedStoragePageSizeProperty = System.getProperty("idea.paged.storage.page.size"); int defaultPagedStoragePageSizeInMegabytes = 10; DEFAULT_BUFFER_SIZE = (pagedStoragePageSizeProperty == null ? defaultPagedStoragePageSizeInMegabytes:Math.max(1,Integer.valueOf(pagedStoragePageSizeProperty)))* MEGABYTE; } private final StorageLock myLock; public static class StorageLock { private final boolean checkThreadAccess; public StorageLock() { this(true); } public StorageLock(boolean checkThreadAccess) { this.checkThreadAccess = checkThreadAccess; } final BuffersCache myBuffersCache = new BuffersCache(); private class BuffersCache extends MyCache { public BuffersCache() { super(UPPER_LIMIT); } @NotNull public MappedBufferWrapper createValue(PageKey key) { if (checkThreadAccess && !Thread.holdsLock(StorageLock.this)) { throw new IllegalStateException("Must hold StorageLock lock to access PagedFileStorage"); } int off = key.page * key.owner.myPageSize; if (off > key.owner.length()) { throw new IndexOutOfBoundsException("off=" + off + " key.owner.length()=" + key.owner.length()); } ReadWriteMappedBufferWrapper wrapper = new ReadWriteMappedBufferWrapper(key.owner.myFile, off, Math.min((int)(key.owner.length() - off), key.owner.myPageSize)); IOException oome = null; while (true) { try { // ensure it's allocated wrapper.buf(); if (oome != null) { LOG.info("Successfully recovered OOME in memory mapping: -Xmx=" + Runtime.getRuntime().maxMemory() / MEGABYTE + "MB " + "new size limit: " + mySizeLimit / MEGABYTE + "MB " + "trying to allocate " + wrapper.myLength + " block"); } return wrapper; } catch (IOException e) { if (e.getCause() instanceof OutOfMemoryError) { oome = e; if (mySizeLimit > LOWER_LIMIT) { mySizeLimit -= key.owner.myPageSize; } long newSize = getSize() - key.owner.myPageSize; if (newSize >= 0) { ensureSize(newSize); continue; // next try } else { throw new MappingFailedException("Cannot recover from OOME in memory mapping: -Xmx=" + Runtime.getRuntime().maxMemory() / MEGABYTE + "MB " + "new size limit: " + mySizeLimit / MEGABYTE + "MB " + "trying to allocate " + wrapper.myLength + " block", e); } } throw new MappingFailedException("Cannot map buffer", e); } } } public void onDropFromCache(PageKey key, MappedBufferWrapper buf) { buf.dispose(); } } } private static class PageKey { private final PagedFileStorage owner; private final int page; public PageKey(PagedFileStorage owner, int page) { this.owner = owner; this.page = page; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof PageKey)) return false; PageKey pageKey = (PageKey)o; if (!owner.equals(pageKey.owner)) return false; if (page != pageKey.page) return false; return true; } @Override public int hashCode() { return 31 * owner.hashCode() + page; } } private final byte[] myTypedIOBuffer = new byte[8]; private boolean isDirty = false; private final File myFile; protected long mySize = -1; protected final int myPageSize; @NonNls private static final String RW = "rw"; public PagedFileStorage(File file, StorageLock lock, int pageSize) throws IOException { myFile = file; myLock = lock; myPageSize = Math.max(pageSize, Page.PAGE_SIZE); } public PagedFileStorage(File file, StorageLock lock) throws IOException { this(file, lock, DEFAULT_BUFFER_SIZE); } public File getFile() { return myFile; } public void putInt(int addr, int value) { Bits.putInt(myTypedIOBuffer, 0, value); put(addr, myTypedIOBuffer, 0, 4); } public int getInt(int addr) { get(addr, myTypedIOBuffer, 0, 4); return Bits.getInt(myTypedIOBuffer, 0); } public void putLong(int addr, long value) { Bits.putLong(myTypedIOBuffer, 0, value); put(addr, myTypedIOBuffer, 0, 8); } @SuppressWarnings({"UnusedDeclaration"}) public void putByte(final int addr, final byte b) { myTypedIOBuffer[0] = b; put(addr, myTypedIOBuffer, 0, 1); } public byte getByte(int addr) { get(addr, myTypedIOBuffer, 0, 1); return myTypedIOBuffer[0]; } public long getLong(int addr) { get(addr, myTypedIOBuffer, 0, 8); return Bits.getLong(myTypedIOBuffer, 0); } public byte get(int index) { int page = index / myPageSize; int offset = index % myPageSize; return getBuffer(page).get(offset); } public void put(int index, byte value) { isDirty = true; int page = index / myPageSize; int offset = index % myPageSize; getBuffer(page).put(offset, value); } public void get(int index, byte[] dst, int offset, int length) { int i = index; int o = offset; int l = length; while (l > 0) { int page = i / myPageSize; int page_offset = i % myPageSize; int page_len = Math.min(l, myPageSize - page_offset); final ByteBuffer buffer = getBuffer(page); try { buffer.position(page_offset); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("can't position buffer to offset " + page_offset + ", " + "buffer.limit=" + buffer.limit() + ", " + "page=" + page + ", " + "file=" + myFile.getName() + ", "+ "file.length=" + mySize); } buffer.get(dst, o, page_len); l -= page_len; o += page_len; i += page_len; } } public void put(int index, byte[] src, int offset, int length) { isDirty = true; int i = index; int o = offset; int l = length; while (l > 0) { int page = i / myPageSize; int page_offset = i % myPageSize; int page_len = Math.min(l, myPageSize - page_offset); final ByteBuffer buffer = getBuffer(page); try { buffer.position(page_offset); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("can't position buffer to offset " + page_offset); } buffer.put(src, o, page_len); l -= page_len; o += page_len; i += page_len; } } public void close() { try { force(); } finally { unmapAll(); } } private void unmapAll() { for (Map.Entry<PageKey, MappedBufferWrapper> entry : myLock.myBuffersCache.entrySet()) { if (entry.getKey().owner == this) { myLock.myBuffersCache.remove(entry.getKey()); } } } public void resize(int newSize) throws IOException { int oldSize = (int)myFile.length(); if (oldSize == newSize) return; final long started = IOStatistics.DEBUG ? System.currentTimeMillis():0; unmapAll(); final long unmapAllFinished = IOStatistics.DEBUG ? System.currentTimeMillis():0; resizeFile(newSize); // it is not guaranteed that new partition will consist of null // after resize, so we should fill it manually int delta = newSize - oldSize; if (delta > 0) fillWithZeros(oldSize, delta); if (IOStatistics.DEBUG) { long finished = System.currentTimeMillis(); if (finished - started > IOStatistics.MIN_IO_TIME_TO_REPORT) { IOStatistics.dump("Resized "+myFile + " from " + oldSize + " to " + newSize + " for " + (finished - started) + ", unmap all:" + (finished - unmapAllFinished)); } } } private void resizeFile(int newSize) throws IOException { RandomAccessFile raf = new RandomAccessFile(myFile, RW); try { raf.setLength(newSize); } finally { raf.close(); } mySize = newSize; } private final static int MAX_FILLER_SIZE = 8192; private void fillWithZeros(int from, int length) { byte[] buff = new byte[MAX_FILLER_SIZE]; Arrays.fill(buff, (byte)0); while (length > 0) { final int filled = Math.min(length, MAX_FILLER_SIZE); put(from, buff, 0, filled); length -= filled; from += filled; } } public final long length() { if (mySize == -1) { mySize = myFile.length(); } return mySize; } private ByteBuffer getBuffer(int page) { try { return myLock.myBuffersCache.get(new PageKey(this, page)).buf(); } catch (IOException e) { throw new MappingFailedException("Cannot map buffer", e); } } public void force() { long started = IOStatistics.DEBUG ? System.currentTimeMillis():0; for (Map.Entry<PageKey,MappedBufferWrapper> entry : myLock.myBuffersCache.entrySet()) { if (entry.getKey().owner == this) { entry.getValue().flush(); } } isDirty = false; if (IOStatistics.DEBUG) { long finished = System.currentTimeMillis(); if (finished - started > IOStatistics.MIN_IO_TIME_TO_REPORT) { IOStatistics.dump("Flushed "+myFile + " for " + (finished - started)); } } } public boolean isDirty() { return isDirty; } private static abstract class MyCache { private final LinkedHashMap<PageKey, MappedBufferWrapper> myMap; protected long mySizeLimit; private long mySize; protected MyCache(long sizeLimit) { mySizeLimit = sizeLimit; myMap = new LinkedHashMap<PageKey, MappedBufferWrapper>(10) { @Override protected boolean removeEldestEntry(Map.Entry<PageKey, MappedBufferWrapper> eldest) { return mySize > mySizeLimit; } @Nullable @Override public MappedBufferWrapper remove(Object key) { // this method can be called after removeEldestEntry MappedBufferWrapper wrapper = super.remove(key); if (wrapper != null) { mySize -= wrapper.myLength; onDropFromCache((PageKey)key, wrapper); } return wrapper; } }; } public MappedBufferWrapper get(PageKey key) { MappedBufferWrapper wrapper = myMap.get(key); if (wrapper != null) { return wrapper; } long started = IOStatistics.DEBUG ? System.currentTimeMillis() : 0; wrapper = createValue(key); mySize += wrapper.myLength; if (IOStatistics.DEBUG) { long finished = System.currentTimeMillis(); if (finished - started > IOStatistics.MIN_IO_TIME_TO_REPORT) { IOStatistics.dump("Mapping " + wrapper.myLength + " from " + wrapper.myPosition + " file:"+wrapper.myFile + " for "+(finished - started)); } } myMap.put(key, wrapper); ensureSize(mySizeLimit); return wrapper; } protected void ensureSize(long sizeLimit) { while (mySize > sizeLimit) { // we still have to drop something myMap.doRemoveEldestEntry(); } } public long getSize() { return mySize; } public Set<Map.Entry<PageKey, MappedBufferWrapper>> entrySet() { return myMap.entrySet(); } public void remove(PageKey key) { myMap.remove(key); } protected abstract MappedBufferWrapper createValue(PageKey key); protected abstract void onDropFromCache(PageKey key, MappedBufferWrapper wrapper); } }
platform/util/src/com/intellij/util/io/PagedFileStorage.java
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.io; import com.intellij.openapi.Forceable; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.SystemInfo; import com.intellij.util.containers.hash.LinkedHashMap; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Map; import java.util.Set; /** * @author max */ public class PagedFileStorage implements Forceable { protected static final Logger LOG = Logger.getInstance("#com.intellij.util.io.PagedFileStorage"); private static final int MEGABYTE = 1024 * 1024; final static int DEFAULT_BUFFER_SIZE; private final static int UPPER_LIMIT; public static final int LOWER_LIMIT_IN_MEGABYTES = 100; private final static int LOWER_LIMIT = LOWER_LIMIT_IN_MEGABYTES * MEGABYTE; static { String maxPagedStorageCacheProperty = System.getProperty("idea.max.paged.storage.cache"); int defaultMaxPagedStorageCacheInMegabytes = SystemInfo.is64Bit ? 500:200; UPPER_LIMIT = (maxPagedStorageCacheProperty == null ? defaultMaxPagedStorageCacheInMegabytes: Math.max(Integer.valueOf(maxPagedStorageCacheProperty), LOWER_LIMIT_IN_MEGABYTES))*MEGABYTE; String pagedStoragePageSizeProperty = System.getProperty("idea.paged.storage.page.size"); int defaultPagedStoragePageSizeInMegabytes = 10; DEFAULT_BUFFER_SIZE = (pagedStoragePageSizeProperty == null ? defaultPagedStoragePageSizeInMegabytes:Math.max(1,Integer.valueOf(pagedStoragePageSizeProperty)))* MEGABYTE; } private final StorageLock myLock; public static class StorageLock { private final boolean checkThreadAccess; public StorageLock() { this(true); } public StorageLock(boolean checkThreadAccess) { this.checkThreadAccess = checkThreadAccess; } final BuffersCache myBuffersCache = new BuffersCache(); private class BuffersCache extends MyCache { public BuffersCache() { super(UPPER_LIMIT); } @NotNull public MappedBufferWrapper createValue(PageKey key) { if (checkThreadAccess && !Thread.holdsLock(StorageLock.this)) { throw new IllegalStateException("Must hold StorageLock lock to access PagedFileStorage"); } int off = key.page * key.owner.myPageSize; if (off > key.owner.length()) { throw new IndexOutOfBoundsException("off=" + off + " key.owner.length()=" + key.owner.length()); } ReadWriteMappedBufferWrapper wrapper = new ReadWriteMappedBufferWrapper(key.owner.myFile, off, Math.min((int)(key.owner.length() - off), key.owner.myPageSize)); IOException oome = null; while (true) { try { // ensure it's allocated wrapper.buf(); if (oome != null) { LOG.info("Successfully recovered OOME in memory mapping: -Xmx=" + Runtime.getRuntime().maxMemory() / MEGABYTE + "MB " + "new size limit: " + mySizeLimit / MEGABYTE + "MB " + "trying to allocate " + wrapper.myLength + " block"); } return wrapper; } catch (IOException e) { if (e.getCause() instanceof OutOfMemoryError) { oome = e; if (mySizeLimit > LOWER_LIMIT) { mySizeLimit -= key.owner.myPageSize; } long newSize = getSize() - key.owner.myPageSize; if (newSize >= 0) { ensureSize(newSize); continue; // next try } else { throw new MappingFailedException("Cannot recover from OOME in memory mapping: -Xmx=" + Runtime.getRuntime().maxMemory() / MEGABYTE + "MB " + "new size limit: " + mySizeLimit / MEGABYTE + "MB " + "trying to allocate " + wrapper.myLength + " block", e); } } throw new MappingFailedException("Cannot map buffer", e); } } } public void onDropFromCache(PageKey key, MappedBufferWrapper buf) { buf.dispose(); } } } private static class PageKey { private final PagedFileStorage owner; private final int page; public PageKey(PagedFileStorage owner, int page) { this.owner = owner; this.page = page; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof PageKey)) return false; PageKey pageKey = (PageKey)o; if (!owner.equals(pageKey.owner)) return false; if (page != pageKey.page) return false; return true; } @Override public int hashCode() { return 31 * owner.hashCode() + page; } } private final byte[] myTypedIOBuffer = new byte[8]; private boolean isDirty = false; private final File myFile; protected long mySize = -1; protected final int myPageSize; @NonNls private static final String RW = "rw"; public PagedFileStorage(File file, StorageLock lock, int pageSize) throws IOException { myFile = file; myLock = lock; myPageSize = Math.max(pageSize, Page.PAGE_SIZE); } public PagedFileStorage(File file, StorageLock lock) throws IOException { this(file, lock, DEFAULT_BUFFER_SIZE); } public File getFile() { return myFile; } public void putInt(int addr, int value) { Bits.putInt(myTypedIOBuffer, 0, value); put(addr, myTypedIOBuffer, 0, 4); } public int getInt(int addr) { get(addr, myTypedIOBuffer, 0, 4); return Bits.getInt(myTypedIOBuffer, 0); } public void putLong(int addr, long value) { Bits.putLong(myTypedIOBuffer, 0, value); put(addr, myTypedIOBuffer, 0, 8); } @SuppressWarnings({"UnusedDeclaration"}) public void putByte(final int addr, final byte b) { myTypedIOBuffer[0] = b; put(addr, myTypedIOBuffer, 0, 1); } public byte getByte(int addr) { get(addr, myTypedIOBuffer, 0, 1); return myTypedIOBuffer[0]; } public long getLong(int addr) { get(addr, myTypedIOBuffer, 0, 8); return Bits.getLong(myTypedIOBuffer, 0); } public byte get(int index) { int page = index / myPageSize; int offset = index % myPageSize; return getBuffer(page).get(offset); } public void put(int index, byte value) { isDirty = true; int page = index / myPageSize; int offset = index % myPageSize; getBuffer(page).put(offset, value); } public void get(int index, byte[] dst, int offset, int length) { int i = index; int o = offset; int l = length; while (l > 0) { int page = i / myPageSize; int page_offset = i % myPageSize; int page_len = Math.min(l, myPageSize - page_offset); final ByteBuffer buffer = getBuffer(page); try { buffer.position(page_offset); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("can't position buffer to offset " + page_offset + ", " + "buffer.limit=" + buffer.limit() + ", " + "page=" + page + ", " + "file=" + myFile.getName() + ", "+ "file.length=" + mySize); } buffer.get(dst, o, page_len); l -= page_len; o += page_len; i += page_len; } } public void put(int index, byte[] src, int offset, int length) { isDirty = true; int i = index; int o = offset; int l = length; while (l > 0) { int page = i / myPageSize; int page_offset = i % myPageSize; int page_len = Math.min(l, myPageSize - page_offset); final ByteBuffer buffer = getBuffer(page); try { buffer.position(page_offset); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("can't position buffer to offset " + page_offset); } buffer.put(src, o, page_len); l -= page_len; o += page_len; i += page_len; } } public void close() { try { force(); } finally { unmapAll(); } } private void unmapAll() { for (Map.Entry<PageKey, MappedBufferWrapper> entry : myLock.myBuffersCache.entrySet()) { if (entry.getKey().owner == this) { myLock.myBuffersCache.remove(entry.getKey()); } } } public void resize(int newSize) throws IOException { int oldSize = (int)myFile.length(); if (oldSize == newSize) return; final long started = IOStatistics.DEBUG ? System.currentTimeMillis():0; unmapAll(); final long unmapAllFinished = IOStatistics.DEBUG ? System.currentTimeMillis():0; resizeFile(newSize); // it is not guaranteed that new partition will consist of null // after resize, so we should fill it manually int delta = newSize - oldSize; if (delta > 0) fillWithZeros(oldSize, delta); if (IOStatistics.DEBUG) { long finished = System.currentTimeMillis(); if (finished - started > IOStatistics.MIN_IO_TIME_TO_REPORT) { IOStatistics.dump("Resized "+myFile + " from " + oldSize + " to " + newSize + " for " + (finished - started) + ", unmap all:" + (finished - unmapAllFinished)); } } } private void resizeFile(int newSize) throws IOException { RandomAccessFile raf = new RandomAccessFile(myFile, RW); try { raf.setLength(newSize); } finally { raf.close(); } mySize = newSize; } private final static int MAX_FILLER_SIZE = 8192; private void fillWithZeros(int from, int length) { byte[] buff = new byte[MAX_FILLER_SIZE]; Arrays.fill(buff, (byte)0); while (length > 0) { final int filled = Math.min(length, MAX_FILLER_SIZE); put(from, buff, 0, filled); length -= filled; from += filled; } } public final long length() { if (mySize == -1) { mySize = myFile.length(); } return mySize; } private ByteBuffer getBuffer(int page) { try { return myLock.myBuffersCache.get(new PageKey(this, page)).buf(); } catch (IOException e) { throw new MappingFailedException("Cannot map buffer", e); } } public void force() { long started = IOStatistics.DEBUG ? System.currentTimeMillis():0; for (Map.Entry<PageKey,MappedBufferWrapper> entry : myLock.myBuffersCache.entrySet()) { if (entry.getKey().owner == this) { entry.getValue().flush(); } } isDirty = false; if (IOStatistics.DEBUG) { long finished = System.currentTimeMillis(); if (finished - started > IOStatistics.MIN_IO_TIME_TO_REPORT) { IOStatistics.dump("Flushed "+myFile + " for " + (finished - started)); } } } public boolean isDirty() { return isDirty; } private static abstract class MyCache { private final LinkedHashMap<PageKey, MappedBufferWrapper> myMap; protected long mySizeLimit; private long mySize; protected MyCache(long sizeLimit) { mySizeLimit = sizeLimit; myMap = new LinkedHashMap<PageKey, MappedBufferWrapper>(10) { @Override protected boolean removeEldestEntry(Map.Entry<PageKey, MappedBufferWrapper> eldest) { return mySize > mySizeLimit; } @Nullable @Override public MappedBufferWrapper remove(Object key) { // this method can be called after removeEldestEntry MappedBufferWrapper wrapper = super.remove(key); if (wrapper != null) { mySize -= wrapper.myLength; onDropFromCache((PageKey)key, wrapper); } return wrapper; } }; } public MappedBufferWrapper get(PageKey key) { MappedBufferWrapper wrapper = myMap.get(key); if (wrapper != null) { return wrapper; } long started = IOStatistics.DEBUG ? System.currentTimeMillis() : 0; wrapper = createValue(key); mySize += wrapper.myLength; if (IOStatistics.DEBUG) { long finished = System.currentTimeMillis(); if (finished - started > IOStatistics.MIN_IO_TIME_TO_REPORT) { IOStatistics.dump("Mapping " + wrapper.myLength + " from " + wrapper.myPosition + " file:"+wrapper.myFile); } } myMap.put(key, wrapper); ensureSize(mySizeLimit); return wrapper; } protected void ensureSize(long sizeLimit) { while (mySize > sizeLimit) { // we still have to drop something myMap.doRemoveEldestEntry(); } } public long getSize() { return mySize; } public Set<Map.Entry<PageKey, MappedBufferWrapper>> entrySet() { return myMap.entrySet(); } public void remove(PageKey key) { myMap.remove(key); } protected abstract MappedBufferWrapper createValue(PageKey key); protected abstract void onDropFromCache(PageKey key, MappedBufferWrapper wrapper); } }
report time of long mapping
platform/util/src/com/intellij/util/io/PagedFileStorage.java
report time of long mapping
Java
apache-2.0
35945d7fbaca24086bed63d4b8f103b402d15e4b
0
argv-minus-one/fop,argv-minus-one/fop,argv-minus-one/fop,StrategyObject/fop,StrategyObject/fop,argv-minus-one/fop,StrategyObject/fop,argv-minus-one/fop,StrategyObject/fop,StrategyObject/fop
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.apps.io; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.net.URI; import java.util.Collections; import java.util.HashMap; import java.util.Map; /** * A factory class for {@link ResourceResolver}s. */ public final class ResourceResolverFactory { private ResourceResolverFactory() { } /** * Returns the default resource resolver, this is most basic resolver which can be used when * no there are no I/O or file access restrictions. * * @return the default resource resolver */ public static ResourceResolver createDefaultResourceResolver() { return DefaultResourceResolver.INSTANCE; } /** * A helper merthod that creates an internal resource resolver using the default resover: * {@link ResourceResolverFactory#createDefaultResourceResolver()}. * * @param baseURI the base URI from which to resolve URIs * @return the default internal resource resolver */ public static InternalResourceResolver createDefaultInternalResourceResolver(URI baseURI) { return new InternalResourceResolver(baseURI, createDefaultResourceResolver()); } /** * Creates an interal resource resolver given a base URI and a resource resolver. * * @param baseURI the base URI from which to resolve URIs * @param resolver the resource resolver * @return the internal resource resolver */ public static InternalResourceResolver createInternalResourceResolver(URI baseURI, ResourceResolver resolver) { return new InternalResourceResolver(baseURI, resolver); } /** * Creates a temporary-resource-schema aware resource resolver. Temporary resource URIs are * created by {@link TempResourceURIGenerator}. * * @param tempResourceResolver the temporary-resource-schema resolver to use * @param defaultResourceResolver the default resource resolver to use * @return the ressource resolver */ public static ResourceResolver createTempAwareResourceResolver( TempResourceResolver tempResourceResolver, ResourceResolver defaultResourceResolver) { return new TempAwareResourceResolver(tempResourceResolver, defaultResourceResolver); } /** * This creates the builder class for binding URI schemas to implementations of * {@link ResourceResolver}. This allows users to define their own URI schemas such that they * have finer control over the acquisition of resources. * * @param defaultResolver the default resource resolver that should be used in the event that * none of the other registered resolvers match the schema * @return the schema aware {@link ResourceResolver} builder */ public static SchemaAwareResourceResolverBuilder createSchemaAwareResourceResolverBuilder( ResourceResolver defaultResolver) { return new SchemaAwareResourceResolverBuilderImpl(defaultResolver); } private static final class DefaultResourceResolver implements ResourceResolver { private static final ResourceResolver INSTANCE = new DefaultResourceResolver(); private final TempAwareResourceResolver delegate; private DefaultResourceResolver() { delegate = new TempAwareResourceResolver(new DefaultTempResourceResolver(), new NormalResourceResolver()); } /** {@inheritDoc} */ public Resource getResource(URI uri) throws IOException { return delegate.getResource(uri); } /** {@inheritDoc} */ public OutputStream getOutputStream(URI uri) throws IOException { return delegate.getOutputStream(uri); } } private static final class TempAwareResourceResolver implements ResourceResolver { private final TempResourceResolver tempResourceResolver; private final ResourceResolver defaultResourceResolver; public TempAwareResourceResolver(TempResourceResolver tempResourceHandler, ResourceResolver defaultResourceResolver) { this.tempResourceResolver = tempResourceHandler; this.defaultResourceResolver = defaultResourceResolver; } private static boolean isTempUri(URI uri) { return TempResourceURIGenerator.isTempUri(uri); } /** {@inheritDoc} */ public Resource getResource(URI uri) throws IOException { if (isTempUri(uri)) { return tempResourceResolver.getResource(uri.getPath()); } else { return defaultResourceResolver.getResource(uri); } } /** {@inheritDoc} */ public OutputStream getOutputStream(URI uri) throws IOException { if (isTempUri(uri)) { return tempResourceResolver.getOutputStream(uri.getPath()); } else { return defaultResourceResolver.getOutputStream(uri); } } } private static class DefaultTempResourceResolver implements TempResourceResolver { private static File getTempFile(String path) throws IOException { File file = new File(System.getProperty("java.io.tmpdir"), path); file.deleteOnExit(); return file; } /** {@inheritDoc} */ public Resource getResource(String id) throws IOException { return new Resource(getTempFile(id).toURI().toURL().openStream()); } /** {@inheritDoc} */ public OutputStream getOutputStream(String id) throws IOException { File file = getTempFile(id); if (file.createNewFile()) { return new FileOutputStream(file); } else { throw new IOException("Filed to create temporary file: " + id); } } } private static class NormalResourceResolver implements ResourceResolver { public Resource getResource(URI uri) throws IOException { return new Resource(uri.toURL().openStream()); } public OutputStream getOutputStream(URI uri) throws IOException { return new FileOutputStream(new File(uri)); } } private static final class SchemaAwareResourceResolver implements ResourceResolver { private final Map<String, ResourceResolver> schemaHandlingResourceResolvers; private final ResourceResolver defaultResolver; private SchemaAwareResourceResolver( Map<String, ResourceResolver> schemaHandlingResourceResolvers, ResourceResolver defaultResolver) { this.schemaHandlingResourceResolvers = schemaHandlingResourceResolvers; this.defaultResolver = defaultResolver; } private ResourceResolver getResourceResolverForSchema(URI uri) { String schema = uri.getScheme(); if (schemaHandlingResourceResolvers.containsKey(schema)) { return schemaHandlingResourceResolvers.get(schema); } else { return defaultResolver; } } /** {@inheritDoc} */ public Resource getResource(URI uri) throws IOException { return getResourceResolverForSchema(uri).getResource(uri); } /** {@inheritDoc} */ public OutputStream getOutputStream(URI uri) throws IOException { return getResourceResolverForSchema(uri).getOutputStream(uri); } } /** * Implementations of this interface will be builders for {@link ResourceResolver}, they bind * URI schemas to their respective resolver. This gives users more control over the mechanisms * by which URIs are resolved. * <p> * Here is an example of how this could be used: * </p> * <p><code> * SchemaAwareResourceResolverBuilder builder * = ResourceResolverFactory.createSchemaAwareResourceResolverBuilder(defaultResolver); * builder.registerResourceResolverForSchema("test", testResolver); * builder.registerResourceResolverForSchema("anotherTest", test2Resolver); * ResourceResolver resolver = builder.build(); * </code></p> * This will result in all URIs for the form "test:///..." will be resolved using the * <code>testResolver</code> object; URIs of the form "anotherTest:///..." will be resolved * using <code>test2Resolver</code>; all other URIs will be resolved from the defaultResolver. */ public interface SchemaAwareResourceResolverBuilder { /** * Register a schema with its respective {@link ResourceResolver}. This resolver will be * used as the only resolver for the specified schema. * * @param schema the schema to be used with the given resolver * @param resourceResolver the resource resolver */ void registerResourceResolverForSchema(String schema, ResourceResolver resourceResolver); /** * Builds a {@link ResourceResolver} that will delegate to the respective resource resolver * when a registered URI schema is given * * @return a resolver that delegates to the appropriate schema resolver */ ResourceResolver build(); } private static final class CompletedSchemaAwareResourceResolverBuilder implements SchemaAwareResourceResolverBuilder { private static final SchemaAwareResourceResolverBuilder INSTANCE = new CompletedSchemaAwareResourceResolverBuilder(); /** {@inheritDoc} */ public ResourceResolver build() { throw new IllegalStateException("Resource resolver already built"); } /** {@inheritDoc} */ public void registerResourceResolverForSchema(String schema, ResourceResolver resourceResolver) { throw new IllegalStateException("Resource resolver already built"); } } private static final class ActiveSchemaAwareResourceResolverBuilder implements SchemaAwareResourceResolverBuilder { private final Map<String, ResourceResolver> schemaHandlingResourceResolvers = new HashMap<String, ResourceResolver>(); private final ResourceResolver defaultResolver; private ActiveSchemaAwareResourceResolverBuilder(ResourceResolver defaultResolver) { this.defaultResolver = defaultResolver; } /** {@inheritDoc} */ public void registerResourceResolverForSchema(String schema, ResourceResolver resourceResolver) { schemaHandlingResourceResolvers.put(schema, resourceResolver); } /** {@inheritDoc} */ public ResourceResolver build() { return new SchemaAwareResourceResolver( Collections.unmodifiableMap(schemaHandlingResourceResolvers), defaultResolver); } } private static final class SchemaAwareResourceResolverBuilderImpl implements SchemaAwareResourceResolverBuilder { private SchemaAwareResourceResolverBuilder delegate; private SchemaAwareResourceResolverBuilderImpl(ResourceResolver defaultResolver) { this.delegate = new ActiveSchemaAwareResourceResolverBuilder(defaultResolver); } /** {@inheritDoc} */ public void registerResourceResolverForSchema(String schema, ResourceResolver resourceResolver) { delegate.registerResourceResolverForSchema(schema, resourceResolver); } /** {@inheritDoc} */ public ResourceResolver build() { ResourceResolver resourceResolver = delegate.build(); delegate = CompletedSchemaAwareResourceResolverBuilder.INSTANCE; return resourceResolver; } } }
src/java/org/apache/fop/apps/io/ResourceResolverFactory.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.apps.io; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.net.URI; import java.util.Collections; import java.util.HashMap; import java.util.Map; /** * A factory class for {@link ResourceResolver}s. */ public final class ResourceResolverFactory { private ResourceResolverFactory() { } /** * Returns the default resource resolver, this is most basic resolver which can be used when * no there are no I/O or file access restrictions. * * @return the default resource resolver */ public static ResourceResolver createDefaultResourceResolver() { return DefaultResourceResolver.INSTANCE; } /** * A helper merthod that creates an internal resource resolver using the default resover: * {@link ResourceResolverFactory#createDefaultResourceResolver()}. * * @param baseURI the base URI from which to resolve URIs * @return the default internal resource resolver */ public static InternalResourceResolver createDefaultInternalResourceResolver(URI baseURI) { return new InternalResourceResolver(baseURI, createDefaultResourceResolver()); } /** * Creates an interal resource resolver given a base URI and a resource resolver. * * @param baseURI the base URI from which to resolve URIs * @param resolver the resource resolver * @return the internal resource resolver */ public static InternalResourceResolver createInternalResourceResolver(URI baseURI, ResourceResolver resolver) { return new InternalResourceResolver(baseURI, resolver); } /** * Creates a temporary-resource-schema aware resource resolver. Temporary resource URIs are * created by {@link TempResourceURIGenerator}. * * @param tempResourceResolver the temporary-resource-schema resolver to use * @param defaultResourceResolver the default resource resolver to use * @return the ressource resolver */ public static ResourceResolver createTempAwareResourceResolver( TempResourceResolver tempResourceResolver, ResourceResolver defaultResourceResolver) { return new TempAwareResourceResolver(tempResourceResolver, defaultResourceResolver); } public static SchemaAwareResourceResolverBuilder createSchemaAwareResourceResolverBuilder( ResourceResolver defaultResolver) { return new SchemaAwareResourceResolverBuilderImpl(defaultResolver); } private static final class DefaultResourceResolver implements ResourceResolver { private static final ResourceResolver INSTANCE = new DefaultResourceResolver(); private final TempAwareResourceResolver delegate; private DefaultResourceResolver() { delegate = new TempAwareResourceResolver(new DefaultTempResourceResolver(), new NormalResourceResolver()); } public Resource getResource(URI uri) throws IOException { return delegate.getResource(uri); } public OutputStream getOutputStream(URI uri) throws IOException { return delegate.getOutputStream(uri); } } private static final class TempAwareResourceResolver implements ResourceResolver { private final TempResourceResolver tempResourceResolver; private final ResourceResolver defaultResourceResolver; public TempAwareResourceResolver(TempResourceResolver tempResourceHandler, ResourceResolver defaultResourceResolver) { this.tempResourceResolver = tempResourceHandler; this.defaultResourceResolver = defaultResourceResolver; } private static boolean isTempUri(URI uri) { return TempResourceURIGenerator.isTempUri(uri); } public Resource getResource(URI uri) throws IOException { if (isTempUri(uri)) { return tempResourceResolver.getResource(uri.getPath()); } else { return defaultResourceResolver.getResource(uri); } } public OutputStream getOutputStream(URI uri) throws IOException { if (isTempUri(uri)) { return tempResourceResolver.getOutputStream(uri.getPath()); } else { return defaultResourceResolver.getOutputStream(uri); } } } private static class DefaultTempResourceResolver implements TempResourceResolver { private static File getTempFile(String path) throws IOException { File file = new File(System.getProperty("java.io.tmpdir"), path); file.deleteOnExit(); return file; } public Resource getResource(String id) throws IOException { return new Resource(getTempFile(id).toURI().toURL().openStream()); } public OutputStream getOutputStream(String id) throws IOException { File file = getTempFile(id); if (file.createNewFile()) { return new FileOutputStream(file); } else { throw new IOException("Filed to create temporary file: " + id); } } } private static class NormalResourceResolver implements ResourceResolver { public Resource getResource(URI uri) throws IOException { return new Resource(uri.toURL().openStream()); } public OutputStream getOutputStream(URI uri) throws IOException { return new FileOutputStream(new File(uri)); } } private static final class SchemaAwareResourceResolver implements ResourceResolver { private final Map<String, ResourceResolver> schemaHandlingResourceResolvers; private final ResourceResolver defaultResolver; private SchemaAwareResourceResolver( Map<String, ResourceResolver> schemaHandlingResourceResolvers, ResourceResolver defaultResolver) { this.schemaHandlingResourceResolvers = schemaHandlingResourceResolvers; this.defaultResolver = defaultResolver; } private ResourceResolver getResourceResolverForSchema(URI uri) { String schema = uri.getScheme(); if (schemaHandlingResourceResolvers.containsKey(schema)) { return schemaHandlingResourceResolvers.get(schema); } else { return defaultResolver; } } public Resource getResource(URI uri) throws IOException { return getResourceResolverForSchema(uri).getResource(uri); } public OutputStream getOutputStream(URI uri) throws IOException { return getResourceResolverForSchema(uri).getOutputStream(uri); } } public interface SchemaAwareResourceResolverBuilder { void registerResourceResolverForSchema(String schema, ResourceResolver resourceResolver); ResourceResolver build(); } private static final class CompletedSchemaAwareResourceResolverBuilder implements SchemaAwareResourceResolverBuilder { private static final SchemaAwareResourceResolverBuilder INSTANCE = new CompletedSchemaAwareResourceResolverBuilder(); public ResourceResolver build() { throw new IllegalStateException("Resource resolver already built"); } public void registerResourceResolverForSchema(String schema, ResourceResolver resourceResolver) { throw new IllegalStateException("Resource resolver already built"); } } private static final class ActiveSchemaAwareResourceResolverBuilder implements SchemaAwareResourceResolverBuilder { private final Map<String, ResourceResolver> schemaHandlingResourceResolvers = new HashMap<String, ResourceResolver>(); private final ResourceResolver defaultResolver; private ActiveSchemaAwareResourceResolverBuilder(ResourceResolver defaultResolver) { this.defaultResolver = defaultResolver; } public void registerResourceResolverForSchema(String schema, ResourceResolver resourceResolver) { schemaHandlingResourceResolvers.put(schema, resourceResolver); } public ResourceResolver build() { return new SchemaAwareResourceResolver( Collections.unmodifiableMap(schemaHandlingResourceResolvers), defaultResolver); } } private static final class SchemaAwareResourceResolverBuilderImpl implements SchemaAwareResourceResolverBuilder { private SchemaAwareResourceResolverBuilder delegate; private SchemaAwareResourceResolverBuilderImpl(ResourceResolver defaultResolver) { this.delegate = new ActiveSchemaAwareResourceResolverBuilder(defaultResolver); } public void registerResourceResolverForSchema(String schema, ResourceResolver resourceResolver) { delegate.registerResourceResolverForSchema(schema, resourceResolver); } public ResourceResolver build() { ResourceResolver resourceResolver = delegate.build(); delegate = CompletedSchemaAwareResourceResolverBuilder.INSTANCE; return resourceResolver; } } }
Added javadocs to the URI schema resolution mechanisms available git-svn-id: 102839466c3b40dd9c7e25c0a1a6d26afc40150a@1366000 13f79535-47bb-0310-9956-ffa450edef68
src/java/org/apache/fop/apps/io/ResourceResolverFactory.java
Added javadocs to the URI schema resolution mechanisms available
Java
apache-2.0
23c19221f46c3e16e432c9a526a6d46d1db8168c
0
gsheldon/optaplanner,droolsjbpm/optaplanner,baldimir/optaplanner,eshen1991/optaplanner,bernardator/optaplanner,kunallimaye/optaplanner,glamperi/optaplanner,tomasdavidorg/optaplanner,gsheldon/optaplanner,tkobayas/optaplanner,elsam/drools-planner-old,oskopek/optaplanner,glamperi/optaplanner,tomasdavidorg/optaplanner,DieterDePaepe/optaplanner,bernardator/optaplanner,glamperi/optaplanner,kunallimaye/optaplanner,baldimir/optaplanner,codeaudit/optaplanner,bernardator/optaplanner,tkobayas/optaplanner,tomasdavidorg/optaplanner,netinept/Court-Scheduler,tkobayas/optaplanner,gsheldon/optaplanner,eshen1991/optaplanner,netinept/Court-Scheduler,eshen1991/optaplanner,droolsjbpm/optaplanner,DieterDePaepe/optaplanner,kunallimaye/optaplanner,oskopek/optaplanner,snurkabill/optaplanner,baldimir/optaplanner,oskopek/optaplanner,codeaudit/optaplanner,codeaudit/optaplanner,gsheldon/optaplanner,snurkabill/optaplanner,snurkabill/optaplanner,tkobayas/optaplanner,droolsjbpm/optaplanner,elsam/drools-planner-old,elsam/drools-planner-old,baldimir/optaplanner,oskopek/optaplanner,droolsjbpm/optaplanner,netinept/Court-Scheduler
package org.drools.solver.examples.pas.solver.solution.initializer; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Comparator; import org.drools.FactHandle; import org.drools.WorkingMemory; import org.drools.solver.core.localsearch.LocalSearchSolverScope; import org.drools.solver.core.score.DefaultHardAndSoftScore; import org.drools.solver.core.score.Score; import org.drools.solver.core.solution.initializer.AbstractStartingSolutionInitializer; import org.drools.solver.examples.common.domain.PersistableIdComparator; import org.drools.solver.examples.pas.domain.AdmissionPart; import org.drools.solver.examples.pas.domain.Bed; import org.drools.solver.examples.pas.domain.BedDesignation; import org.drools.solver.examples.pas.domain.PatientAdmissionSchedule; import org.drools.solver.examples.pas.domain.Room; import org.drools.solver.examples.itc2007.examination.domain.Period; import org.apache.commons.lang.builder.CompareToBuilder; /** * @author Geoffrey De Smet */ public class PatientAdmissionScheduleStartingSolutionInitializer extends AbstractStartingSolutionInitializer { @Override public boolean isSolutionInitialized(LocalSearchSolverScope localSearchSolverScope) { PatientAdmissionSchedule patientAdmissionSchedule = (PatientAdmissionSchedule) localSearchSolverScope.getWorkingSolution(); return patientAdmissionSchedule.isInitialized(); } public void initializeSolution(LocalSearchSolverScope localSearchSolverScope) { PatientAdmissionSchedule patientAdmissionSchedule = (PatientAdmissionSchedule) localSearchSolverScope.getWorkingSolution(); initializeBedDesignationList(localSearchSolverScope, patientAdmissionSchedule); } private void initializeBedDesignationList(LocalSearchSolverScope localSearchSolverScope, PatientAdmissionSchedule patientAdmissionSchedule) { WorkingMemory workingMemory = localSearchSolverScope.getWorkingMemory(); List<BedDesignation> bedDesignationList = createBedDesignationList(patientAdmissionSchedule); // Assign one admissionPart at a time List<Bed> bedListInPriority = new ArrayList(patientAdmissionSchedule.getBedList()); // TODO try LinkedList int stillRunningCounter = 0; // TODO https://jira.jboss.org/jira/browse/JBRULES-2145 for (BedDesignation bedDesignation : bedDesignationList) { System.out.println("Trunk is bugged " + ++stillRunningCounter +"/" + bedDesignationList.size() + " but we do not use trunk. See JBRULES-2145."); Score unscheduledScore = localSearchSolverScope.calculateScoreFromWorkingMemory(); boolean perfectMatch = false; Score bestScore = DefaultHardAndSoftScore.valueOf(Integer.MIN_VALUE); Bed bestBed = null; FactHandle bedDesignationHandle = null; // Try every bed for that admissionPart // TODO by reordening the beds so index 0 has a different table then index 1 and so on, // this will probably be faster because perfectMatch will be true sooner for (Bed bed : bedListInPriority) { if (bed.allowsAdmissionPart(bedDesignation.getAdmissionPart())) { if (bedDesignationHandle == null) { bedDesignation.setBed(bed); bedDesignationHandle = workingMemory.insert(bedDesignation); } else { workingMemory.modifyRetract(bedDesignationHandle); bedDesignation.setBed(bed); workingMemory.modifyInsert(bedDesignationHandle, bedDesignation); } Score score = localSearchSolverScope.calculateScoreFromWorkingMemory(); if (score.compareTo(unscheduledScore) < 0) { if (score.compareTo(bestScore) > 0) { bestScore = score; bestBed = bed; } } else if (score.equals(unscheduledScore)) { perfectMatch = true; bestScore = score; bestBed = bed; break; } else { throw new IllegalStateException("The score (" + score + ") cannot be higher than unscheduledScore (" + unscheduledScore + ")."); } } if (perfectMatch) { break; } } if (bestBed == null) { throw new IllegalStateException("The bestBed (" + bestBed + ") cannot be null."); } if (!perfectMatch) { workingMemory.modifyRetract(bedDesignationHandle); bedDesignation.setBed(bestBed); workingMemory.modifyInsert(bedDesignationHandle, bedDesignation); } // put the occupied bed at the end of the list bedListInPriority.remove(bestBed); bedListInPriority.add(bestBed); } // For the GUI's combobox list mainly, not really needed Collections.sort(bedDesignationList, new PersistableIdComparator()); patientAdmissionSchedule.setBedDesignationList(bedDesignationList); } private List<BedDesignation> createBedDesignationList(PatientAdmissionSchedule patientAdmissionSchedule) { List<BedDesignationInitializationWeight> initializationWeightList = new ArrayList<BedDesignationInitializationWeight>( patientAdmissionSchedule.getAdmissionPartList().size()); for (AdmissionPart admissionPart : patientAdmissionSchedule.getAdmissionPartList()) { BedDesignation bedDesignation = new BedDesignation(); bedDesignation.setId(admissionPart.getId()); bedDesignation.setAdmissionPart(admissionPart); int disallowedCount = 0; for (Room room : patientAdmissionSchedule.getRoomList()) { disallowedCount += (room.getCapacity() * room.countDisallowedAdmissionPart(admissionPart)); } initializationWeightList.add(new BedDesignationInitializationWeight(bedDesignation, disallowedCount, bedDesignation.getAdmissionPart().getNightCount())); } Collections.sort(initializationWeightList); List<BedDesignation> bedDesignationList = new ArrayList<BedDesignation>( patientAdmissionSchedule.getAdmissionPartList().size()); for (BedDesignationInitializationWeight bedDesignationInitializationWeight : initializationWeightList) { bedDesignationList.add(bedDesignationInitializationWeight.getBedDesignation()); } return bedDesignationList; } private class BedDesignationInitializationWeight implements Comparable<BedDesignationInitializationWeight> { private BedDesignation bedDesignation; private int disallowedCount; private int nightCount; private BedDesignationInitializationWeight(BedDesignation bedDesignation, int disallowedCount, int nightCount) { this.bedDesignation = bedDesignation; this.disallowedCount = disallowedCount; this.nightCount = nightCount; } public BedDesignation getBedDesignation() { return bedDesignation; } public int compareTo(BedDesignationInitializationWeight other) { return -new CompareToBuilder() .append(disallowedCount, other.disallowedCount) .append(nightCount, other.nightCount) .toComparison(); } } }
drools-solver/drools-solver-examples/src/main/java/org/drools/solver/examples/pas/solver/solution/initializer/PatientAdmissionScheduleStartingSolutionInitializer.java
package org.drools.solver.examples.pas.solver.solution.initializer; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Comparator; import org.drools.FactHandle; import org.drools.WorkingMemory; import org.drools.solver.core.localsearch.LocalSearchSolverScope; import org.drools.solver.core.score.DefaultHardAndSoftScore; import org.drools.solver.core.score.Score; import org.drools.solver.core.solution.initializer.AbstractStartingSolutionInitializer; import org.drools.solver.examples.common.domain.PersistableIdComparator; import org.drools.solver.examples.pas.domain.AdmissionPart; import org.drools.solver.examples.pas.domain.Bed; import org.drools.solver.examples.pas.domain.BedDesignation; import org.drools.solver.examples.pas.domain.PatientAdmissionSchedule; import org.drools.solver.examples.pas.domain.Room; import org.drools.solver.examples.itc2007.examination.domain.Period; import org.apache.commons.lang.builder.CompareToBuilder; /** * @author Geoffrey De Smet */ public class PatientAdmissionScheduleStartingSolutionInitializer extends AbstractStartingSolutionInitializer { @Override public boolean isSolutionInitialized(LocalSearchSolverScope localSearchSolverScope) { PatientAdmissionSchedule patientAdmissionSchedule = (PatientAdmissionSchedule) localSearchSolverScope.getWorkingSolution(); return patientAdmissionSchedule.isInitialized(); } public void initializeSolution(LocalSearchSolverScope localSearchSolverScope) { PatientAdmissionSchedule patientAdmissionSchedule = (PatientAdmissionSchedule) localSearchSolverScope.getWorkingSolution(); initializeBedDesignationList(localSearchSolverScope, patientAdmissionSchedule); } private void initializeBedDesignationList(LocalSearchSolverScope localSearchSolverScope, PatientAdmissionSchedule patientAdmissionSchedule) { WorkingMemory workingMemory = localSearchSolverScope.getWorkingMemory(); List<BedDesignation> bedDesignationList = createBedDesignationList(patientAdmissionSchedule); // Assign one admissionPart at a time List<Bed> bedListInPriority = new ArrayList(patientAdmissionSchedule.getBedList()); // TODO try LinkedList int stillRunningCounter = 0; // TODO https://jira.jboss.org/jira/browse/JBRULES-2145 for (BedDesignation bedDesignation : bedDesignationList) { System.out.println("Trunk is bugged " + ++stillRunningCounter +"/" + bedDesignationList.size() + " but we do not use trunk. See JBRULES-2145."); Score unscheduledScore = localSearchSolverScope.calculateScoreFromWorkingMemory(); boolean perfectMatch = false; Score bestScore = DefaultHardAndSoftScore.valueOf(Integer.MIN_VALUE); Bed bestBed = null; FactHandle bedDesignationHandle = null; // Try every bed for that admissionPart // TODO by reordening the beds so index 0 has a different table then index 1 and so on, // this will probably be faster because perfectMatch will be true sooner for (Bed bed : bedListInPriority) { if (bed.allowsAdmissionPart(bedDesignation.getAdmissionPart())) { if (bedDesignationHandle == null) { bedDesignation.setBed(bed); bedDesignationHandle = workingMemory.insert(bedDesignation); } else { workingMemory.modifyRetract(bedDesignationHandle); bedDesignation.setBed(bed); workingMemory.modifyInsert(bedDesignationHandle, bedDesignation); } Score score = localSearchSolverScope.calculateScoreFromWorkingMemory(); if (score.compareTo(unscheduledScore) < 0) { if (score.compareTo(bestScore) > 0) { bestScore = score; bestBed = bed; } } else if (score.equals(unscheduledScore)) { perfectMatch = true; bestScore = score; bestBed = bed; break; } else { throw new IllegalStateException("The score (" + score + ") cannot be higher than unscheduledScore (" + unscheduledScore + ")."); } } if (perfectMatch) { break; } } if (bestBed == null) { throw new IllegalStateException("The bestBed (" + bestBed + ") cannot be null."); } if (!perfectMatch) { workingMemory.modifyRetract(bedDesignationHandle); bedDesignation.setBed(bestBed); workingMemory.modifyInsert(bedDesignationHandle, bedDesignation); } // put the occupied bed at the end of the list bedListInPriority.remove(bestBed); bedListInPriority.add(bestBed); } // For the GUI's combobox list mainly, not really needed Collections.sort(bedDesignationList, new PersistableIdComparator()); patientAdmissionSchedule.setBedDesignationList(bedDesignationList); } private List<BedDesignation> createBedDesignationList(PatientAdmissionSchedule patientAdmissionSchedule) { List<BedDesignationInitializationWeight> initializationWeightList = new ArrayList<BedDesignationInitializationWeight>( patientAdmissionSchedule.getAdmissionPartList().size()); for (AdmissionPart admissionPart : patientAdmissionSchedule.getAdmissionPartList()) { BedDesignation bedDesignation = new BedDesignation(); bedDesignation.setId(admissionPart.getId()); bedDesignation.setAdmissionPart(admissionPart); int weight = 0; for (Room room : patientAdmissionSchedule.getRoomList()) { weight += (room.getCapacity() * room.countDisallowedAdmissionPart(admissionPart)); } weight *= 1000; weight += bedDesignation.getAdmissionPart().getNightCount(); initializationWeightList.add(new BedDesignationInitializationWeight(bedDesignation, weight)); } Collections.sort(initializationWeightList); List<BedDesignation> bedDesignationList = new ArrayList<BedDesignation>( patientAdmissionSchedule.getAdmissionPartList().size()); for (BedDesignationInitializationWeight bedDesignationInitializationWeight : initializationWeightList) { bedDesignationList.add(bedDesignationInitializationWeight.getBedDesignation()); } return bedDesignationList; } private class BedDesignationInitializationWeight implements Comparable<BedDesignationInitializationWeight> { private BedDesignation bedDesignation; private int weight; private BedDesignationInitializationWeight(BedDesignation bedDesignation, int weight) { this.bedDesignation = bedDesignation; this.weight = weight; } public BedDesignation getBedDesignation() { return bedDesignation; } public int compareTo(BedDesignationInitializationWeight other) { return -new CompareToBuilder() .append(weight, other.weight) .toComparison(); } } }
pas: initializer weight sorting yet another improvement 2 git-svn-id: a243bed356d289ca0d1b6d299a0597bdc4ecaa09@27802 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70
drools-solver/drools-solver-examples/src/main/java/org/drools/solver/examples/pas/solver/solution/initializer/PatientAdmissionScheduleStartingSolutionInitializer.java
pas: initializer weight sorting yet another improvement 2
Java
apache-2.0
15ad0f73df0a8d0efec5167a2141cbd53afd862d
0
sjamesr/jfreesane,sjamesr/jfreesane
package au.com.southsky.jfreesane; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.io.Closeables; import com.google.common.io.Files; import com.google.common.net.HostAndPort; import com.google.common.util.concurrent.SettableFuture; import java.awt.Color; import java.awt.image.BufferedImage; import java.awt.image.Raster; import java.io.File; import java.io.IOException; import java.net.InetAddress; import java.util.EnumSet; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.logging.Logger; import javax.imageio.ImageIO; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; /** * Tests JFreeSane's interactions with the backend. * * <p> * This test assumes a sane daemon is listening on port 6566 on the local host. The daemon must have * a password-protected device named 'test'. The username should be 'testuser' and the password * should be 'goodpass'. * * <p> * If you cannot run a SANE server locally, you can set the {@code SANE_TEST_SERVER_ADDRESS} * environment variable to the address of a SANE server in {@link HostAndPort} format. * * <p> * If you can't create this test environment, feel free to add the {@link org.junit.Ignore} * annotation to the test class. * * @author James Ring ([email protected]) */ @RunWith(JUnit4.class) public class SaneSessionTest { private static final Logger log = Logger.getLogger(SaneSessionTest.class.getName()); private SaneSession session; private SanePasswordProvider correctPasswordProvider = SanePasswordProvider.forUsernameAndPassword("testuser", "goodpass"); @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); @Rule public ExpectedException expectedException = ExpectedException.none(); @Before public void initSession() throws Exception { HostAndPort hostAndPort; String address = System.getenv("SANE_TEST_SERVER_ADDRESS"); if (address == null) { address = "localhost"; } hostAndPort = HostAndPort.fromString(address); this.session = SaneSession.withRemoteSane( InetAddress.getByName(hostAndPort.getHostText()), hostAndPort.getPortOrDefault(6566)); session.setPasswordProvider(correctPasswordProvider); } @After public void closeSession() throws Exception { Closeables.close(session, false); } @Test public void listDevicesSucceeds() throws Exception { List<SaneDevice> devices = session.listDevices(); log.info("Got " + devices.size() + " device(s): " + devices); // Sadly the test device apparently does not show up in the device list. // assertThat(devices).isNotEmpty(); } @Test public void openDeviceSucceeds() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); } } @Test public void optionGroupsArePopulated() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); assertThat(device.getOptionGroups()).isNotEmpty(); } } @Test public void imageAcquisitionSucceeds() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); BufferedImage image = device.acquireImage(); File file = File.createTempFile("image", ".png", tempFolder.getRoot()); ImageIO.write(image, "png", file); System.out.println("Successfully wrote " + file); } } @Test public void listOptionsSucceeds() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); List<SaneOption> options = device.listOptions(); Assert.assertTrue("Expect multiple SaneOptions", options.size() > 0); System.out.println("We found " + options.size() + " options"); for (SaneOption option : options) { System.out.println(option.toString()); if (option.getType() != OptionValueType.BUTTON) { System.out.println(option.getValueCount()); } } } } @Test public void getOptionValueSucceeds() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); List<SaneOption> options = device.listOptions(); Assert.assertTrue("Expect multiple SaneOptions", options.size() > 0); // option 0 is always "Number of options" // must be greater than zero int optionCount = options.get(0).getIntegerValue(); Assert.assertTrue("Option count must be > 0", optionCount > 0); // print out the value of all integer-valued options for (SaneOption option : options) { System.out.print(option.getTitle()); if (!option.isActive()) { System.out.print(" [inactive]"); } else { if (option.getType() == OptionValueType.INT && option.getValueCount() == 1 && option.isActive()) { System.out.print("=" + option.getIntegerValue()); } else if (option.getType() == OptionValueType.STRING) { System.out.print("=" + option.getStringValue(Charsets.US_ASCII)); } } System.out.println(); } } } @Test public void setOptionValueSucceedsForString() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); SaneOption modeOption = device.getOption("mode"); assertThat(modeOption.setStringValue("Gray")).isEqualTo("Gray"); } } @Test public void adfAcquisitionSucceeds() throws Exception { SaneDevice device = session.getDevice("test"); device.open(); assertThat(device.getOption("source").getStringConstraints()) .contains("Automatic Document Feeder"); device.getOption("source").setStringValue("Automatic Document Feeder"); for (int i = 0; i < 20; i++) { try { device.acquireImage(); } catch (SaneException e) { if (e.getStatus() == SaneStatus.STATUS_NO_DOCS) { // out of documents to read, that's fine break; } else { throw e; } } } } @Test public void acquireImageSucceedsAfterOutOfPaperCondition() throws Exception { SaneDevice device = session.getDevice("test"); device.open(); assertThat(device.getOption("source").getStringConstraints()) .contains("Automatic Document Feeder"); device.getOption("source").setStringValue("Automatic Document Feeder"); expectedException.expect(SaneException.class); expectedException.expectMessage("STATUS_NO_DOCS"); for (int i = 0; i < 20; i++) { device.acquireImage(); } } @Test public void acquireMonoImage() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); SaneOption modeOption = device.getOption("mode"); assertEquals("Gray", modeOption.setStringValue("Gray")); BufferedImage image = device.acquireImage(); File file = File.createTempFile("mono-image", ".png", tempFolder.getRoot()); ImageIO.write(image, "png", file); System.out.println("Successfully wrote " + file); } } /** * Tests that this SANE client produces images that match * {@link "http://www.meier-geinitz.de/sane/test-backend/test-pictures.html"} . */ @Test public void producesCorrectImages() throws Exception { // Solid black and white try (SaneDevice device = session.getDevice("test")) { device.open(); device.getOption("br-x").setFixedValue(200); device.getOption("br-y").setFixedValue(200); /* * assertProducesCorrectImage(device, "Gray", 1, "Solid white"); * assertProducesCorrectImage(device, "Gray", 8, "Solid white"); * assertProducesCorrectImage(device, "Gray", 16, "Solid white"); * assertProducesCorrectImage(device, "Gray", 1, "Solid black"); * assertProducesCorrectImage(device, "Gray", 8, "Solid black"); * assertProducesCorrectImage(device, "Gray", 16, "Solid black"); * * assertProducesCorrectImage(device, "Color", 1, "Solid white"); * assertProducesCorrectImage(device, "Color", 8, "Solid white"); * assertProducesCorrectImage(device, "Color", 16, "Solid white"); * assertProducesCorrectImage(device, "Color", 1, "Solid black"); * assertProducesCorrectImage(device, "Color", 8, "Solid black"); * assertProducesCorrectImage(device, "Color", 16, "Solid black"); * * assertProducesCorrectImage(device, "Gray", 1, "Color pattern"); * assertProducesCorrectImage(device, "Color", 1, "Color pattern"); * * assertProducesCorrectImage(device, "Gray", 8, "Color pattern"); * assertProducesCorrectImage(device, "Color", 8, "Color pattern"); */ assertProducesCorrectImage(device, "Gray", 1, "Grid"); // assertProducesCorrectImage(device, "Color", 1, "Color pattern"); assertProducesCorrectImage(device, "Color", 8, "Color pattern"); assertProducesCorrectImage(device, "Color", 16, "Color pattern"); } } @Test public void readsAndSetsStringsCorrectly() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); assertThat(device.getOption("mode").getStringValue(Charsets.US_ASCII)).matches("Gray|Color"); assertThat(device.getOption("mode").setStringValue("Gray")).isEqualTo("Gray"); assertThat(device.getOption("mode").getStringValue(Charsets.US_ASCII)).isEqualTo("Gray"); assertThat(device.getOption("read-return-value").getStringValue(Charsets.US_ASCII)) .isEqualTo("Default"); } } @Test public void readsFixedPrecisionCorrectly() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); // this option gets rounded to the nearest whole number by the backend assertEquals(123, device.getOption("br-x").setFixedValue(123.456), 0.0001); assertEquals(123, device.getOption("br-x").getFixedValue(), 0.0001); } } @Test public void readsBooleanOptionsCorrectly() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); SaneOption option = device.getOption("hand-scanner"); assertThat(option.setBooleanValue(true)).isTrue(); assertThat(option.getBooleanValue()).isTrue(); assertThat(option.setBooleanValue(false)).isFalse(); assertThat(option.getBooleanValue()).isFalse(); } } @Test public void readsStringListConstraintsCorrectly() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); SaneOption option = device.getOption("string-constraint-string-list"); assertThat(option).isNotNull(); assertThat(option.getConstraintType()) .isEqualTo(OptionValueConstraintType.STRING_LIST_CONSTRAINT); assertThat(option.getStringConstraints()) .has() .exactly( "First entry", "Second entry", "This is the very long third entry. Maybe the frontend has an idea how to display it"); } } @Test public void readIntegerValueListConstraintsCorrectly() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); SaneOption option = device.getOption("int-constraint-word-list"); assertNotNull(option); assertEquals(OptionValueConstraintType.VALUE_LIST_CONSTRAINT, option.getConstraintType()); assertEquals( ImmutableList.of(-42, -8, 0, 17, 42, 256, 65536, 16777216, 1073741824), option.getIntegerValueListConstraint()); } } @Test public void readFixedValueListConstraintsCorrectly() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); SaneOption option = device.getOption("fixed-constraint-word-list"); assertNotNull(option); assertEquals(OptionValueConstraintType.VALUE_LIST_CONSTRAINT, option.getConstraintType()); List<Double> expected = ImmutableList.of(-32.7d, 12.1d, 42d, 129.5d); List<Double> actual = option.getFixedValueListConstraint(); assertEquals(expected.size(), actual.size()); for (int i = 0; i < expected.size(); i++) { assertEquals(expected.get(i), actual.get(i), 0.00001); } } } @Test public void readIntegerConstraintRangeCorrectly() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); SaneOption option = device.getOption("int-constraint-range"); assertNotNull(option); assertEquals(OptionValueConstraintType.RANGE_CONSTRAINT, option.getConstraintType()); assertEquals(4, option.getRangeConstraints().getMinimumInteger()); assertEquals(192, option.getRangeConstraints().getMaximumInteger()); assertEquals(2, option.getRangeConstraints().getQuantumInteger()); } } @Test public void readFixedConstraintRangeCorrectly() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); SaneOption option = device.getOption("fixed-constraint-range"); assertNotNull(option); assertEquals(OptionValueConstraintType.RANGE_CONSTRAINT, option.getConstraintType()); assertEquals(-42.17, option.getRangeConstraints().getMinimumFixed(), 0.00001); assertEquals(32767.9999, option.getRangeConstraints().getMaximumFixed(), 0.00001); assertEquals(2.0, option.getRangeConstraints().getQuantumFixed(), 0.00001); } } @Test public void arrayOption() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); device.getOption("enable-test-options").setBooleanValue(true); SaneOption option = device.getOption("int-constraint-array-constraint-range"); assertNotNull(option); assertThat(option.isConstrained()).isTrue(); assertThat(option.getConstraintType()).isEqualTo(OptionValueConstraintType.RANGE_CONSTRAINT); assertEquals(OptionValueType.INT, option.getType()); List<Integer> values = Lists.newArrayList(); RangeConstraint constraints = option.getRangeConstraints(); for (int i = 0; i < option.getValueCount(); i++) { values.add(constraints.getMinimumInteger() + i * constraints.getQuantumInteger()); } assertEquals(values, option.setIntegerValue(values)); assertEquals(values, option.getIntegerArrayValue()); } } @Test @Ignore // This test fails on Travis with UNSUPPORTED. public void multipleListDevicesCalls() throws Exception { session.listDevices(); session.listDevices(); } @Test public void multipleGetDeviceCalls() throws Exception { session.getDevice("test"); session.getDevice("test"); } @Test public void multipleOpenDeviceCalls() throws Exception { { SaneDevice device = session.getDevice("test"); openAndCloseDevice(device); } { SaneDevice device = session.getDevice("test"); openAndCloseDevice(device); } } @Test public void handScanning() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); device.getOption("hand-scanner").setBooleanValue(true); device.acquireImage(); } } @Test public void threePassScanning() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); assertEquals( "Color pattern", device.getOption("test-picture").setStringValue("Color pattern")); assertEquals("Color", device.getOption("mode").setStringValue("Color")); assertEquals(true, device.getOption("three-pass").setBooleanValue(true)); for (int i = 0; i < 5; i++) { File file = File.createTempFile("three-pass", ".png", tempFolder.getRoot()); ImageIO.write(device.acquireImage(), "png", file); System.out.println("Wrote three-pass test to " + file); } } } @Test public void reducedArea() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); device.getOption("mode").setStringValue("Color"); device.getOption("resolution").setFixedValue(200); device.getOption("tl-x").setFixedValue(0.0); device.getOption("tl-y").setFixedValue(0.0); device.getOption("br-x").setFixedValue(105.0); device.getOption("br-y").setFixedValue(149.0); device.acquireImage(); } } @Test public void passwordAuthentication() throws Exception { // assumes that test is a password-authenticated device SaneDevice device = session.getDevice("test"); device.open(); device.acquireImage(); } /** * This test assumes that you have protected the "test" device with a username of "testuser" and a * password other than "badpassword". */ @Test public void invalidPasswordCausesAccessDeniedError() throws Exception { session.setPasswordProvider( SanePasswordProvider.forUsernameAndPassword("testuser", "badpassword")); try (SaneDevice device = session.getDevice("test")) { expectedException.expect(SaneException.class); expectedException.expectMessage("STATUS_ACCESS_DENIED"); device.open(); } } /** * Checks to ensure a STATUS_ACCESS_DENIED exception is raised if the authenticator is unable to * authenticate. */ @Test public void cannotAuthenticateThrowsAccessDeniedError() throws Exception { session.setPasswordProvider( new SanePasswordProvider() { @Override public String getUsername(String resource) { return null; } @Override public String getPassword(String resource) { return null; } @Override public boolean canAuthenticate(String resource) { return false; } }); try (SaneDevice device = session.getDevice("test")) { expectedException.expect(SaneException.class); expectedException.expectMessage("STATUS_ACCESS_DENIED"); device.open(); } } @Test public void passwordAuthenticationFromLocalFileSpecified() throws Exception { File passwordFile = tempFolder.newFile("sane.pass"); Files.write("testuser:goodpass:test", passwordFile, Charsets.ISO_8859_1); session.setPasswordProvider( SanePasswordProvider.usingSanePassFile(passwordFile.getAbsolutePath())); SaneDevice device = session.getDevice("test"); device.open(); device.acquireImage(); } @Test public void listenerReceivesScanStartedEvent() throws Exception { final SettableFuture<SaneDevice> notifiedDevice = SettableFuture.create(); final AtomicInteger frameCount = new AtomicInteger(); final Set<FrameType> framesSeen = EnumSet.noneOf(FrameType.class); ScanListener listener = new ScanListenerAdapter() { @Override public void scanningStarted(SaneDevice device) { notifiedDevice.set(device); } @Override public void frameAcquisitionStarted( SaneDevice device, SaneParameters parameters, int currentFrame, int likelyTotalFrames) { frameCount.incrementAndGet(); framesSeen.add(parameters.getFrameType()); } }; SaneDevice device = session.getDevice("test"); device.open(); device.getOption("resolution").setFixedValue(1200); device.getOption("mode").setStringValue("Color"); device.getOption("three-pass").setBooleanValue(true); device.acquireImage(listener); assertThat(notifiedDevice.get()).isSameAs(device); assertThat(frameCount.get()).isEqualTo(3); assertThat(framesSeen).containsExactly(FrameType.RED, FrameType.GREEN, FrameType.BLUE); } private void openAndCloseDevice(SaneDevice device) throws Exception { try { device.open(); device.listOptions(); } finally { device.close(); } } private void assertProducesCorrectImage( SaneDevice device, String mode, int sampleDepth, String testPicture) throws IOException, SaneException { BufferedImage actualImage = acquireImage(device, mode, sampleDepth, testPicture); writeImage(mode, sampleDepth, testPicture, actualImage); if (testPicture.startsWith("Solid")) { assertImageSolidColor(testPicture.endsWith("black") ? Color.black : Color.white, actualImage); } // TODO(sjr): compare with reference images. } private void writeImage( String mode, int sampleDepth, String testPicture, BufferedImage actualImage) throws IOException { File file = File.createTempFile( String.format("image-%s-%d-%s", mode, sampleDepth, testPicture.replace(' ', '_')), ".png", tempFolder.getRoot()); ImageIO.write(actualImage, "png", file); System.out.println("Successfully wrote " + file); } private void assertImageSolidColor(Color color, BufferedImage image) { for (int x = 0; x < image.getWidth(); x++) { for (int y = 0; y < image.getHeight(); y++) { assertEquals(color.getRGB(), image.getRGB(x, y)); } } } private BufferedImage acquireImage( SaneDevice device, String mode, int sampleDepth, String testPicture) throws IOException, SaneException { device.getOption("mode").setStringValue(mode); device.getOption("depth").setIntegerValue(sampleDepth); device.getOption("test-picture").setStringValue(testPicture); return device.acquireImage(); } private void assertImagesEqual(BufferedImage expected, BufferedImage actual) { assertEquals("image widths differ", expected.getWidth(), actual.getWidth()); assertEquals("image heights differ", expected.getHeight(), actual.getHeight()); Raster expectedRaster = expected.getRaster(); Raster actualRaster = actual.getRaster(); for (int x = 0; x < expected.getWidth(); x++) { for (int y = 0; y < expected.getHeight(); y++) { int[] expectedPixels = expectedRaster.getPixel(x, y, (int[]) null); int[] actualPixels = actualRaster.getPixel(x, y, (int[]) null); // assert that all the samples are the same for the given pixel Assert.assertArrayEquals(expectedPixels, actualPixels); } } } }
src/test/java/au/com/southsky/jfreesane/SaneSessionTest.java
package au.com.southsky.jfreesane; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.io.Closeables; import com.google.common.io.Files; import com.google.common.net.HostAndPort; import com.google.common.util.concurrent.SettableFuture; import java.awt.Color; import java.awt.image.BufferedImage; import java.awt.image.Raster; import java.io.File; import java.io.IOException; import java.net.InetAddress; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import java.util.logging.Logger; import javax.imageio.ImageIO; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; /** * Tests JFreeSane's interactions with the backend. * * <p> * This test assumes a sane daemon is listening on port 6566 on the local host. The daemon must have * a password-protected device named 'test'. The username should be 'testuser' and the password * should be 'goodpass'. * * <p> * If you cannot run a SANE server locally, you can set the {@code SANE_TEST_SERVER_ADDRESS} * environment variable to the address of a SANE server in {@link HostAndPort} format. * * <p> * If you can't create this test environment, feel free to add the {@link org.junit.Ignore} * annotation to the test class. * * @author James Ring ([email protected]) */ @RunWith(JUnit4.class) public class SaneSessionTest { private static final Logger log = Logger.getLogger(SaneSessionTest.class.getName()); private SaneSession session; private SanePasswordProvider correctPasswordProvider = SanePasswordProvider.forUsernameAndPassword("testuser", "goodpass"); @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); @Rule public ExpectedException expectedException = ExpectedException.none(); @Before public void initSession() throws Exception { HostAndPort hostAndPort; String address = System.getenv("SANE_TEST_SERVER_ADDRESS"); if (address == null) { address = "localhost"; } hostAndPort = HostAndPort.fromString(address); this.session = SaneSession.withRemoteSane( InetAddress.getByName(hostAndPort.getHostText()), hostAndPort.getPortOrDefault(6566)); session.setPasswordProvider(correctPasswordProvider); } @After public void closeSession() throws Exception { Closeables.close(session, false); } @Test public void listDevicesSucceeds() throws Exception { List<SaneDevice> devices = session.listDevices(); log.info("Got " + devices.size() + " device(s): " + devices); // Sadly the test device apparently does not show up in the device list. // assertThat(devices).isNotEmpty(); } @Test public void openDeviceSucceeds() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); } } @Test public void optionGroupsArePopulated() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); assertThat(device.getOptionGroups()).isNotEmpty(); } } @Test public void imageAcquisitionSucceeds() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); BufferedImage image = device.acquireImage(); File file = File.createTempFile("image", ".png", tempFolder.getRoot()); ImageIO.write(image, "png", file); System.out.println("Successfully wrote " + file); } } @Test public void listOptionsSucceeds() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); List<SaneOption> options = device.listOptions(); Assert.assertTrue("Expect multiple SaneOptions", options.size() > 0); System.out.println("We found " + options.size() + " options"); for (SaneOption option : options) { System.out.println(option.toString()); if (option.getType() != OptionValueType.BUTTON) { System.out.println(option.getValueCount()); } } } } @Test public void getOptionValueSucceeds() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); List<SaneOption> options = device.listOptions(); Assert.assertTrue("Expect multiple SaneOptions", options.size() > 0); // option 0 is always "Number of options" // must be greater than zero int optionCount = options.get(0).getIntegerValue(); Assert.assertTrue("Option count must be > 0", optionCount > 0); // print out the value of all integer-valued options for (SaneOption option : options) { System.out.print(option.getTitle()); if (!option.isActive()) { System.out.print(" [inactive]"); } else { if (option.getType() == OptionValueType.INT && option.getValueCount() == 1 && option.isActive()) { System.out.print("=" + option.getIntegerValue()); } else if (option.getType() == OptionValueType.STRING) { System.out.print("=" + option.getStringValue(Charsets.US_ASCII)); } } System.out.println(); } } } @Test public void setOptionValueSucceedsForString() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); SaneOption modeOption = device.getOption("mode"); assertThat(modeOption.setStringValue("Gray")).isEqualTo("Gray"); } } @Test public void adfAcquisitionSucceeds() throws Exception { SaneDevice device = session.getDevice("test"); device.open(); assertThat(device.getOption("source").getStringConstraints()) .contains("Automatic Document Feeder"); device.getOption("source").setStringValue("Automatic Document Feeder"); for (int i = 0; i < 20; i++) { try { device.acquireImage(); } catch (SaneException e) { if (e.getStatus() == SaneStatus.STATUS_NO_DOCS) { // out of documents to read, that's fine break; } else { throw e; } } } } @Test public void acquireImageSucceedsAfterOutOfPaperCondition() throws Exception { SaneDevice device = session.getDevice("test"); device.open(); assertThat(device.getOption("source").getStringConstraints()) .contains("Automatic Document Feeder"); device.getOption("source").setStringValue("Automatic Document Feeder"); expectedException.expect(SaneException.class); expectedException.expectMessage("STATUS_NO_DOCS"); for (int i = 0; i < 20; i++) { device.acquireImage(); } } @Test public void acquireMonoImage() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); SaneOption modeOption = device.getOption("mode"); assertEquals("Gray", modeOption.setStringValue("Gray")); BufferedImage image = device.acquireImage(); File file = File.createTempFile("mono-image", ".png", tempFolder.getRoot()); ImageIO.write(image, "png", file); System.out.println("Successfully wrote " + file); } } /** * Tests that this SANE client produces images that match * {@link "http://www.meier-geinitz.de/sane/test-backend/test-pictures.html"} . */ @Test public void producesCorrectImages() throws Exception { // Solid black and white try (SaneDevice device = session.getDevice("test")) { device.open(); device.getOption("br-x").setFixedValue(200); device.getOption("br-y").setFixedValue(200); /* * assertProducesCorrectImage(device, "Gray", 1, "Solid white"); * assertProducesCorrectImage(device, "Gray", 8, "Solid white"); * assertProducesCorrectImage(device, "Gray", 16, "Solid white"); * assertProducesCorrectImage(device, "Gray", 1, "Solid black"); * assertProducesCorrectImage(device, "Gray", 8, "Solid black"); * assertProducesCorrectImage(device, "Gray", 16, "Solid black"); * * assertProducesCorrectImage(device, "Color", 1, "Solid white"); * assertProducesCorrectImage(device, "Color", 8, "Solid white"); * assertProducesCorrectImage(device, "Color", 16, "Solid white"); * assertProducesCorrectImage(device, "Color", 1, "Solid black"); * assertProducesCorrectImage(device, "Color", 8, "Solid black"); * assertProducesCorrectImage(device, "Color", 16, "Solid black"); * * assertProducesCorrectImage(device, "Gray", 1, "Color pattern"); * assertProducesCorrectImage(device, "Color", 1, "Color pattern"); * * assertProducesCorrectImage(device, "Gray", 8, "Color pattern"); * assertProducesCorrectImage(device, "Color", 8, "Color pattern"); */ assertProducesCorrectImage(device, "Gray", 1, "Grid"); // assertProducesCorrectImage(device, "Color", 1, "Color pattern"); assertProducesCorrectImage(device, "Color", 8, "Color pattern"); assertProducesCorrectImage(device, "Color", 16, "Color pattern"); } } @Test public void readsAndSetsStringsCorrectly() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); assertThat(device.getOption("mode").getStringValue(Charsets.US_ASCII)).matches("Gray|Color"); assertThat(device.getOption("mode").setStringValue("Gray")).isEqualTo("Gray"); assertThat(device.getOption("mode").getStringValue(Charsets.US_ASCII)).isEqualTo("Gray"); assertThat(device.getOption("read-return-value").getStringValue(Charsets.US_ASCII)) .isEqualTo("Default"); } } @Test public void readsFixedPrecisionCorrectly() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); // this option gets rounded to the nearest whole number by the backend assertEquals(123, device.getOption("br-x").setFixedValue(123.456), 0.0001); assertEquals(123, device.getOption("br-x").getFixedValue(), 0.0001); } } @Test public void readsBooleanOptionsCorrectly() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); SaneOption option = device.getOption("hand-scanner"); assertThat(option.setBooleanValue(true)).isTrue(); assertThat(option.getBooleanValue()).isTrue(); assertThat(option.setBooleanValue(false)).isFalse(); assertThat(option.getBooleanValue()).isFalse(); } } @Test public void readsStringListConstraintsCorrectly() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); SaneOption option = device.getOption("string-constraint-string-list"); assertThat(option).isNotNull(); assertThat(option.getConstraintType()) .isEqualTo(OptionValueConstraintType.STRING_LIST_CONSTRAINT); assertThat(option.getStringConstraints()) .has() .exactly( "First entry", "Second entry", "This is the very long third entry. Maybe the frontend has an idea how to display it"); } } @Test public void readIntegerValueListConstraintsCorrectly() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); SaneOption option = device.getOption("int-constraint-word-list"); assertNotNull(option); assertEquals(OptionValueConstraintType.VALUE_LIST_CONSTRAINT, option.getConstraintType()); assertEquals( ImmutableList.of(-42, -8, 0, 17, 42, 256, 65536, 16777216, 1073741824), option.getIntegerValueListConstraint()); } } @Test public void readFixedValueListConstraintsCorrectly() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); SaneOption option = device.getOption("fixed-constraint-word-list"); assertNotNull(option); assertEquals(OptionValueConstraintType.VALUE_LIST_CONSTRAINT, option.getConstraintType()); List<Double> expected = ImmutableList.of(-32.7d, 12.1d, 42d, 129.5d); List<Double> actual = option.getFixedValueListConstraint(); assertEquals(expected.size(), actual.size()); for (int i = 0; i < expected.size(); i++) { assertEquals(expected.get(i), actual.get(i), 0.00001); } } } @Test public void readIntegerConstraintRangeCorrectly() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); SaneOption option = device.getOption("int-constraint-range"); assertNotNull(option); assertEquals(OptionValueConstraintType.RANGE_CONSTRAINT, option.getConstraintType()); assertEquals(4, option.getRangeConstraints().getMinimumInteger()); assertEquals(192, option.getRangeConstraints().getMaximumInteger()); assertEquals(2, option.getRangeConstraints().getQuantumInteger()); } } @Test public void readFixedConstraintRangeCorrectly() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); SaneOption option = device.getOption("fixed-constraint-range"); assertNotNull(option); assertEquals(OptionValueConstraintType.RANGE_CONSTRAINT, option.getConstraintType()); assertEquals(-42.17, option.getRangeConstraints().getMinimumFixed(), 0.00001); assertEquals(32767.9999, option.getRangeConstraints().getMaximumFixed(), 0.00001); assertEquals(2.0, option.getRangeConstraints().getQuantumFixed(), 0.00001); } } @Test public void arrayOption() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); device.getOption("enable-test-options").setBooleanValue(true); SaneOption option = device.getOption("int-constraint-array-constraint-range"); assertNotNull(option); assertThat(option.isConstrained()).isTrue(); assertThat(option.getConstraintType()).isEqualTo(OptionValueConstraintType.RANGE_CONSTRAINT); assertEquals(OptionValueType.INT, option.getType()); List<Integer> values = Lists.newArrayList(); RangeConstraint constraints = option.getRangeConstraints(); for (int i = 0; i < option.getValueCount(); i++) { values.add(constraints.getMinimumInteger() + i * constraints.getQuantumInteger()); } assertEquals(values, option.setIntegerValue(values)); assertEquals(values, option.getIntegerArrayValue()); } } @Test @Ignore // This test fails on Travis with UNSUPPORTED. public void multipleListDevicesCalls() throws Exception { session.listDevices(); session.listDevices(); } @Test public void multipleGetDeviceCalls() throws Exception { session.getDevice("test"); session.getDevice("test"); } @Test public void multipleOpenDeviceCalls() throws Exception { { SaneDevice device = session.getDevice("test"); openAndCloseDevice(device); } { SaneDevice device = session.getDevice("test"); openAndCloseDevice(device); } } @Test public void handScanning() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); device.getOption("hand-scanner").setBooleanValue(true); device.acquireImage(); } } @Test public void threePassScanning() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); assertEquals( "Color pattern", device.getOption("test-picture").setStringValue("Color pattern")); assertEquals("Color", device.getOption("mode").setStringValue("Color")); assertEquals(true, device.getOption("three-pass").setBooleanValue(true)); for (int i = 0; i < 5; i++) { File file = File.createTempFile("three-pass", ".png", tempFolder.getRoot()); ImageIO.write(device.acquireImage(), "png", file); System.out.println("Wrote three-pass test to " + file); } } } @Test public void reducedArea() throws Exception { try (SaneDevice device = session.getDevice("test")) { device.open(); device.getOption("mode").setStringValue("Color"); device.getOption("resolution").setFixedValue(200); device.getOption("tl-x").setFixedValue(0.0); device.getOption("tl-y").setFixedValue(0.0); device.getOption("br-x").setFixedValue(105.0); device.getOption("br-y").setFixedValue(149.0); device.acquireImage(); } } @Test public void passwordAuthentication() throws Exception { // assumes that test is a password-authenticated device SaneDevice device = session.getDevice("test"); device.open(); device.acquireImage(); } /** * This test assumes that you have protected the "test" device with a username of "testuser" and a * password other than "badpassword". */ @Test public void invalidPasswordCausesAccessDeniedError() throws Exception { session.setPasswordProvider( SanePasswordProvider.forUsernameAndPassword("testuser", "badpassword")); try (SaneDevice device = session.getDevice("test")) { expectedException.expect(SaneException.class); expectedException.expectMessage("STATUS_ACCESS_DENIED"); device.open(); } } /** * Checks to ensure a STATUS_ACCESS_DENIED exception is raised if the authenticator is unable to * authenticate. */ @Test public void cannotAuthenticateThrowsAccessDeniedError() throws Exception { session.setPasswordProvider( new SanePasswordProvider() { @Override public String getUsername(String resource) { return null; } @Override public String getPassword(String resource) { return null; } @Override public boolean canAuthenticate(String resource) { return false; } }); try (SaneDevice device = session.getDevice("test")) { expectedException.expect(SaneException.class); expectedException.expectMessage("STATUS_ACCESS_DENIED"); device.open(); } } @Test public void passwordAuthenticationFromLocalFileSpecified() throws Exception { File passwordFile = tempFolder.newFile("sane.pass"); Files.write("testuser:goodpass:test", passwordFile, Charsets.ISO_8859_1); session.setPasswordProvider( SanePasswordProvider.usingSanePassFile(passwordFile.getAbsolutePath())); SaneDevice device = session.getDevice("test"); device.open(); device.acquireImage(); } @Test public void listenerReceivesScanStartedEvent() throws Exception { final SettableFuture<SaneDevice> notifiedDevice = SettableFuture.create(); final AtomicInteger frameCount = new AtomicInteger(); ScanListener listener = new ScanListenerAdapter() { @Override public void scanningStarted(SaneDevice device) { notifiedDevice.set(device); } @Override public void frameAcquisitionStarted( SaneDevice device, SaneParameters parameters, int currentFrame, int likelyTotalFrames) { frameCount.incrementAndGet(); } }; SaneDevice device = session.getDevice("test"); device.open(); device.getOption("resolution").setFixedValue(1200); device.getOption("mode").setStringValue("Color"); device.getOption("three-pass").setBooleanValue(true); device.acquireImage(listener); assertThat(notifiedDevice.get()).isSameAs(device); assertThat(frameCount.get()).isEqualTo(3); } private void openAndCloseDevice(SaneDevice device) throws Exception { try { device.open(); device.listOptions(); } finally { device.close(); } } private void assertProducesCorrectImage( SaneDevice device, String mode, int sampleDepth, String testPicture) throws IOException, SaneException { BufferedImage actualImage = acquireImage(device, mode, sampleDepth, testPicture); writeImage(mode, sampleDepth, testPicture, actualImage); if (testPicture.startsWith("Solid")) { assertImageSolidColor(testPicture.endsWith("black") ? Color.black : Color.white, actualImage); } // TODO(sjr): compare with reference images. } private void writeImage( String mode, int sampleDepth, String testPicture, BufferedImage actualImage) throws IOException { File file = File.createTempFile( String.format("image-%s-%d-%s", mode, sampleDepth, testPicture.replace(' ', '_')), ".png", tempFolder.getRoot()); ImageIO.write(actualImage, "png", file); System.out.println("Successfully wrote " + file); } private void assertImageSolidColor(Color color, BufferedImage image) { for (int x = 0; x < image.getWidth(); x++) { for (int y = 0; y < image.getHeight(); y++) { assertEquals(color.getRGB(), image.getRGB(x, y)); } } } private BufferedImage acquireImage( SaneDevice device, String mode, int sampleDepth, String testPicture) throws IOException, SaneException { device.getOption("mode").setStringValue(mode); device.getOption("depth").setIntegerValue(sampleDepth); device.getOption("test-picture").setStringValue(testPicture); return device.acquireImage(); } private void assertImagesEqual(BufferedImage expected, BufferedImage actual) { assertEquals("image widths differ", expected.getWidth(), actual.getWidth()); assertEquals("image heights differ", expected.getHeight(), actual.getHeight()); Raster expectedRaster = expected.getRaster(); Raster actualRaster = actual.getRaster(); for (int x = 0; x < expected.getWidth(); x++) { for (int y = 0; y < expected.getHeight(); y++) { int[] expectedPixels = expectedRaster.getPixel(x, y, (int[]) null); int[] actualPixels = actualRaster.getPixel(x, y, (int[]) null); // assert that all the samples are the same for the given pixel Assert.assertArrayEquals(expectedPixels, actualPixels); } } } }
Add frame type test (#83)
src/test/java/au/com/southsky/jfreesane/SaneSessionTest.java
Add frame type test (#83)
Java
apache-2.0
4773026fb0c2520095e05988e942bc19c373e3dd
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.uiDesigner.i18n; import com.intellij.codeInspection.BatchQuickFix; import com.intellij.codeInspection.CommonProblemDescriptor; import com.intellij.codeInspection.LocalQuickFix; import com.intellij.codeInspection.ProblemDescriptor; import com.intellij.codeInspection.i18n.JavaI18nUtil; import com.intellij.codeInspection.i18n.batch.I18nizeMultipleStringsDialog; import com.intellij.codeInspection.i18n.batch.I18nizedPropertyData; import com.intellij.ide.highlighter.XmlFileType; import com.intellij.java.i18n.JavaI18nBundle; import com.intellij.lang.properties.psi.PropertiesFile; import com.intellij.lang.properties.references.I18nizeQuickFixDialog; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.xml.XmlFile; import com.intellij.psi.xml.XmlTag; import com.intellij.ui.TitledSeparator; import com.intellij.ui.components.labels.LinkLabel; import com.intellij.uiDesigner.*; import com.intellij.uiDesigner.compiler.Utils; import com.intellij.uiDesigner.inspections.FormElementProblemDescriptor; import com.intellij.uiDesigner.lw.*; import com.intellij.uiDesigner.propertyInspector.IntrospectedProperty; import com.intellij.uiDesigner.propertyInspector.properties.BorderProperty; import com.intellij.uiDesigner.radComponents.RadComponent; import com.intellij.uiDesigner.radComponents.RadContainer; import com.intellij.uiDesigner.radComponents.RadRootContainer; import com.intellij.usageView.UsageInfo; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.text.UniqueNameGenerator; import icons.UIDesignerIcons; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.util.*; public class I18nizeFormBatchFix implements LocalQuickFix, BatchQuickFix<CommonProblemDescriptor> { private static final Logger LOG = Logger.getInstance(I18nizeFormBatchFix.class); private static final List<DefaultPrefixSuggestion> PREFIX_SUGGESTIONS = Arrays.asList( new DefaultPrefixSuggestion(LinkLabel.class, "text", "link"), new DefaultPrefixSuggestion(JLabel.class, "text", "label"), new DefaultPrefixSuggestion(JLabel.class, "toolTipText", "tooltip"), new DefaultPrefixSuggestion(JRadioButton.class, "text", "radio.button"), new DefaultPrefixSuggestion(JCheckBox.class, "text", "checkbox"), new DefaultPrefixSuggestion(JButton.class, "text", "button"), new DefaultPrefixSuggestion(TitledSeparator.class, "text", "separator.title") ); @Override public void applyFix(@NotNull Project project, CommonProblemDescriptor @NotNull [] descriptors, @NotNull List<PsiElement> psiElementsToIgnore, @Nullable Runnable refreshViews) { List<I18nizedPropertyData<HardcodedStringInFormData>> dataList = new ArrayList<>(); HashSet<PsiFile> contextFiles = new HashSet<>(); Map<VirtualFile, RadRootContainer> containerMap = new HashMap<>(); UniqueNameGenerator uniqueNameGenerator = new UniqueNameGenerator(); Map<String, List<I18nizedPropertyData<HardcodedStringInFormData>>> duplicates = new HashMap<>(); for (CommonProblemDescriptor descriptor : descriptors) { FormElementProblemDescriptor formElementProblemDescriptor = (FormElementProblemDescriptor)descriptor; PsiFile containingFile = formElementProblemDescriptor.getPsiElement().getContainingFile(); contextFiles.add(containingFile); VirtualFile virtualFile = containingFile.getVirtualFile(); final RadRootContainer rootContainer = containerMap.computeIfAbsent(virtualFile, f -> { try { final ClassLoader classLoader = LoaderFactory.getInstance(project).getLoader(virtualFile); LwRootContainer lwRootContainer = Utils.getRootContainer(containingFile.getText(), new CompiledClassPropertiesProvider(classLoader)); Module module = ModuleUtilCore.findModuleForFile(virtualFile, project); ModuleProvider moduleProvider = new ModuleProvider() { @Override public Module getModule() { return module; } @Override public Project getProject() { return project; } }; return XmlReader.createRoot(moduleProvider, lwRootContainer, LoaderFactory.getInstance(project).getLoader(virtualFile), null); } catch (Exception e) { LOG.error(e); return null; } }); if (rootContainer == null) continue; RadComponent component = (RadComponent)FormEditingUtil.findComponent(rootContainer, formElementProblemDescriptor.getComponentId()); if (component == null) continue; String propertyName = formElementProblemDescriptor.getPropertyName(); String value = getValue(component, propertyName); if (value == null) continue; String keyPrefix = suggestPropertyKeyPrefix(component, propertyName); String defaultKey = keyPrefix != null ? keyPrefix + "." + I18nizeQuickFixDialog.generateDefaultPropertyKey(value) : null; String key = uniqueNameGenerator.generateUniqueName(I18nizeQuickFixDialog.suggestUniquePropertyKey(value, defaultKey, null)); I18nizedPropertyData<HardcodedStringInFormData> data = new I18nizedPropertyData<>(key, value, new HardcodedStringInFormData(component, propertyName, containingFile)); if (duplicates.containsKey(value)) { duplicates.computeIfAbsent(value, k -> new ArrayList<>(1)).add(data); } else { dataList.add(data); duplicates.put(value, null); } } I18nizeMultipleStringsDialog<HardcodedStringInFormData> dialog = new I18nizeMultipleStringsDialog<>(project, dataList, contextFiles, I18nizeFormBatchFix::createUsageInfo, UIDesignerIcons.InspectionSuppression); if (dialog.showAndGet()) { PropertiesFile propertiesFile = dialog.getPropertiesFile(); PsiManager manager = PsiManager.getInstance(project); Set<PsiFile> files = new HashSet<>(); for (VirtualFile file : containerMap.keySet()) { ContainerUtil.addIfNotNull(files, manager.findFile(file)); } if (files.isEmpty()) { return; } files.add(propertiesFile.getContainingFile()); String bundleName = I18nizeFormQuickFix.getBundleName(project, propertiesFile); if (bundleName == null) { return; } WriteCommandAction.runWriteCommandAction(project, getFamilyName(), null, () -> { for (I18nizedPropertyData<HardcodedStringInFormData> data : dataList) { StringDescriptor valueDescriptor; if (!data.isMarkAsNonNls()) { JavaI18nUtil.DEFAULT_PROPERTY_CREATION_HANDLER.createProperty(project, Collections.singletonList(propertiesFile), data.getKey(), data.getValue(), PsiExpression.EMPTY_ARRAY); valueDescriptor = new StringDescriptor(bundleName, data.getKey()); } else { valueDescriptor = StringDescriptor.create(data.getValue()); valueDescriptor.setNoI18n(true); } setPropertyValue(data.getContextData().getComponent(), data.getContextData().getPropertyName(), valueDescriptor); List<I18nizedPropertyData<HardcodedStringInFormData>> duplicateValues = duplicates.get(data.getValue()); if (duplicateValues != null) { for (I18nizedPropertyData<HardcodedStringInFormData> duplicateBean : duplicateValues) { setPropertyValue(duplicateBean.getContextData().getComponent(), duplicateBean.getContextData().getPropertyName(), valueDescriptor); } } } for (Map.Entry<VirtualFile, RadRootContainer> entry : containerMap.entrySet()) { try { final XmlWriter writer = new XmlWriter(); entry.getValue().write(writer); VfsUtil.saveText(entry.getKey(), writer.getText()); } catch (Exception e) { LOG.error(e); } } }, files.toArray(PsiFile.EMPTY_ARRAY)); } } @Nullable private static String suggestPropertyKeyPrefix(@NotNull RadComponent component, @NotNull String propertyName) { Class<?> componentClass = component.getComponentClass(); for (DefaultPrefixSuggestion suggestion : PREFIX_SUGGESTIONS) { if (suggestion.getPropertyName().equals(propertyName) && suggestion.getComponentClass().isAssignableFrom(componentClass)) { return suggestion.getDefaultPrefix(); } } if (BorderProperty.NAME.equals(propertyName)) { return "border.title"; } return null; } private static List<UsageInfo> createUsageInfo(HardcodedStringInFormData data) { RadComponent component = data.getComponent(); PsiFile file = data.getContainingFile(); TextRange range = getComponentRange(component, file); UsageInfo usageInfo = range != null ? new UsageInfo(file, range.getStartOffset(), range.getEndOffset()) : new UsageInfo(file); return Collections.singletonList(usageInfo); } private static TextRange getComponentRange(RadComponent component, PsiFile file) { CharSequence contents = file.getViewProvider().getContents(); int componentId = StringUtil.indexOf(contents, "id=\"" + component.getId() + "\""); if (componentId == -1) return null; PsiFileFactory fileFactory = PsiFileFactory.getInstance(file.getProject()); XmlFile xmlFile = (XmlFile)fileFactory.createFileFromText("form.xml", XmlFileType.INSTANCE, contents); XmlTag componentTag = PsiTreeUtil.getParentOfType(xmlFile.findElementAt(componentId), XmlTag.class); return componentTag != null ? componentTag.getTextRange() : null; } private static void setPropertyValue(RadComponent component, String propertyName, StringDescriptor stringDescriptor) { if (BorderProperty.NAME.equals(propertyName)) { ((RadContainer)component).setBorderTitle(stringDescriptor); } else if (propertyName.equals(ITabbedPane.TAB_TITLE_PROPERTY) || propertyName.equals(ITabbedPane.TAB_TOOLTIP_PROPERTY)) { try { new TabTitleStringDescriptorAccessor(component, propertyName).setStringDescriptorValue(stringDescriptor); } catch (Exception e) { LOG.error(e); } } else { IProperty property = ContainerUtil.find(component.getModifiedProperties(), p -> propertyName.equals(p.getName())); if (property != null) { try { new FormPropertyStringDescriptorAccessor(component, (IntrospectedProperty<?>)property).setStringDescriptorValue(stringDescriptor); } catch (Exception e) { LOG.error(e); } } else { LOG.error("Property '" + propertyName + "' not found in modified properties for component " + component.getId()); } } } private static String getValue(IComponent component, String propertyName) { if (BorderProperty.NAME.equals(propertyName)) { return ((IContainer)component).getBorderTitle().getValue(); } else if (propertyName.equals(ITabbedPane.TAB_TITLE_PROPERTY) || propertyName.equals(ITabbedPane.TAB_TOOLTIP_PROPERTY)) { return ((ITabbedPane)component.getParentContainer()).getTabProperty(component, propertyName).getValue(); } for (IProperty property : component.getModifiedProperties()) { if (property.getName().equals(propertyName)) { return ((StringDescriptor)property.getPropertyValue(component)).getValue(); } } return null; } @Override public @Nls(capitalization = Nls.Capitalization.Sentence) @NotNull String getFamilyName() { return JavaI18nBundle.message("inspection.i18n.quickfix"); } @Override public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) { } private static class HardcodedStringInFormData { private final RadComponent myComponent; private final String myPropertyName; private final PsiFile myContainingFile; private HardcodedStringInFormData(@NotNull RadComponent component, @NotNull String propertyName, @NotNull PsiFile containingFile) { myComponent = component; myPropertyName = propertyName; myContainingFile = containingFile; } private RadComponent getComponent() { return myComponent; } private String getPropertyName() { return myPropertyName; } private PsiFile getContainingFile() { return myContainingFile; } } private static class DefaultPrefixSuggestion { private final Class<?> myComponentClass; private final String myPropertyName; private final String myDefaultPrefix; private DefaultPrefixSuggestion(Class<?> componentClass, String propertyName, String defaultPrefix) { myComponentClass = componentClass; myPropertyName = propertyName; myDefaultPrefix = defaultPrefix; } private Class<?> getComponentClass() { return myComponentClass; } private String getPropertyName() { return myPropertyName; } private String getDefaultPrefix() { return myDefaultPrefix; } } }
plugins/ui-designer/src/com/intellij/uiDesigner/i18n/I18nizeFormBatchFix.java
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.uiDesigner.i18n; import com.intellij.codeInspection.BatchQuickFix; import com.intellij.codeInspection.CommonProblemDescriptor; import com.intellij.codeInspection.LocalQuickFix; import com.intellij.codeInspection.ProblemDescriptor; import com.intellij.codeInspection.i18n.JavaI18nUtil; import com.intellij.codeInspection.i18n.batch.I18nizeMultipleStringsDialog; import com.intellij.codeInspection.i18n.batch.I18nizedPropertyData; import com.intellij.ide.highlighter.XmlFileType; import com.intellij.java.i18n.JavaI18nBundle; import com.intellij.lang.properties.psi.PropertiesFile; import com.intellij.lang.properties.references.I18nizeQuickFixDialog; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileEditor.FileEditor; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.xml.XmlFile; import com.intellij.psi.xml.XmlTag; import com.intellij.ui.TitledSeparator; import com.intellij.ui.components.labels.LinkLabel; import com.intellij.uiDesigner.*; import com.intellij.uiDesigner.compiler.Utils; import com.intellij.uiDesigner.editor.UIFormEditor; import com.intellij.uiDesigner.inspections.FormElementProblemDescriptor; import com.intellij.uiDesigner.lw.*; import com.intellij.uiDesigner.propertyInspector.IntrospectedProperty; import com.intellij.uiDesigner.propertyInspector.properties.BorderProperty; import com.intellij.uiDesigner.radComponents.RadComponent; import com.intellij.uiDesigner.radComponents.RadContainer; import com.intellij.uiDesigner.radComponents.RadRootContainer; import com.intellij.usageView.UsageInfo; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.text.UniqueNameGenerator; import icons.UIDesignerIcons; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.util.*; public class I18nizeFormBatchFix implements LocalQuickFix, BatchQuickFix<CommonProblemDescriptor> { private static final Logger LOG = Logger.getInstance(I18nizeFormBatchFix.class); private static final List<DefaultPrefixSuggestion> PREFIX_SUGGESTIONS = Arrays.asList( new DefaultPrefixSuggestion(LinkLabel.class, "text", "link"), new DefaultPrefixSuggestion(JLabel.class, "text", "label"), new DefaultPrefixSuggestion(JLabel.class, "toolTipText", "tooltip"), new DefaultPrefixSuggestion(JRadioButton.class, "text", "radio.button"), new DefaultPrefixSuggestion(JCheckBox.class, "text", "checkbox"), new DefaultPrefixSuggestion(JButton.class, "text", "button"), new DefaultPrefixSuggestion(TitledSeparator.class, "text", "separator.title") ); @Override public void applyFix(@NotNull Project project, CommonProblemDescriptor @NotNull [] descriptors, @NotNull List<PsiElement> psiElementsToIgnore, @Nullable Runnable refreshViews) { List<I18nizedPropertyData<HardcodedStringInFormData>> dataList = new ArrayList<>(); HashSet<PsiFile> contextFiles = new HashSet<>(); Map<VirtualFile, RadRootContainer> containerMap = new HashMap<>(); UniqueNameGenerator uniqueNameGenerator = new UniqueNameGenerator(); Map<String, List<I18nizedPropertyData<HardcodedStringInFormData>>> duplicates = new HashMap<>(); for (CommonProblemDescriptor descriptor : descriptors) { FormElementProblemDescriptor formElementProblemDescriptor = (FormElementProblemDescriptor)descriptor; PsiFile containingFile = formElementProblemDescriptor.getPsiElement().getContainingFile(); contextFiles.add(containingFile); VirtualFile virtualFile = containingFile.getVirtualFile(); final RadRootContainer rootContainer = containerMap.computeIfAbsent(virtualFile, f -> { try { final ClassLoader classLoader = LoaderFactory.getInstance(project).getLoader(virtualFile); LwRootContainer lwRootContainer = Utils.getRootContainer(containingFile.getText(), new CompiledClassPropertiesProvider(classLoader)); Module module = ModuleUtilCore.findModuleForFile(virtualFile, project); ModuleProvider moduleProvider = new ModuleProvider() { @Override public Module getModule() { return module; } @Override public Project getProject() { return project; } }; return XmlReader.createRoot(moduleProvider, lwRootContainer, LoaderFactory.getInstance(project).getLoader(virtualFile), null); } catch (Exception e) { LOG.error(e); return null; } }); if (rootContainer == null) continue; RadComponent component = (RadComponent)FormEditingUtil.findComponent(rootContainer, formElementProblemDescriptor.getComponentId()); if (component == null) continue; String propertyName = formElementProblemDescriptor.getPropertyName(); String value = getValue(component, propertyName); if (value == null) continue; String keyPrefix = suggestPropertyKeyPrefix(component, propertyName); String defaultKey = keyPrefix != null ? keyPrefix + "." + I18nizeQuickFixDialog.generateDefaultPropertyKey(value) : null; String key = uniqueNameGenerator.generateUniqueName(I18nizeQuickFixDialog.suggestUniquePropertyKey(value, defaultKey, null)); I18nizedPropertyData<HardcodedStringInFormData> data = new I18nizedPropertyData<>(key, value, new HardcodedStringInFormData(component, propertyName, containingFile)); if (duplicates.containsKey(value)) { duplicates.computeIfAbsent(value, k -> new ArrayList<>(1)).add(data); } else { dataList.add(data); duplicates.put(value, null); } } I18nizeMultipleStringsDialog<HardcodedStringInFormData> dialog = new I18nizeMultipleStringsDialog<>(project, dataList, contextFiles, I18nizeFormBatchFix::createUsageInfo, UIDesignerIcons.InspectionSuppression); if (dialog.showAndGet()) { PropertiesFile propertiesFile = dialog.getPropertiesFile(); PsiManager manager = PsiManager.getInstance(project); Set<PsiFile> files = new HashSet<>(); for (VirtualFile file : containerMap.keySet()) { ContainerUtil.addIfNotNull(files, manager.findFile(file)); } if (files.isEmpty()) { return; } files.add(propertiesFile.getContainingFile()); String bundleName = I18nizeFormQuickFix.getBundleName(project, propertiesFile); if (bundleName == null) { return; } WriteCommandAction.runWriteCommandAction(project, getFamilyName(), null, () -> { for (I18nizedPropertyData<HardcodedStringInFormData> data : dataList) { StringDescriptor valueDescriptor; if (!data.isMarkAsNonNls()) { JavaI18nUtil.DEFAULT_PROPERTY_CREATION_HANDLER.createProperty(project, Collections.singletonList(propertiesFile), data.getKey(), data.getValue(), PsiExpression.EMPTY_ARRAY); valueDescriptor = new StringDescriptor(bundleName, data.getKey()); } else { valueDescriptor = StringDescriptor.create(data.getValue()); valueDescriptor.setNoI18n(true); } setPropertyValue(data.getContextData().getComponent(), data.getContextData().getPropertyName(), valueDescriptor); List<I18nizedPropertyData<HardcodedStringInFormData>> duplicateValues = duplicates.get(data.getValue()); if (duplicateValues != null) { for (I18nizedPropertyData<HardcodedStringInFormData> duplicateBean : duplicateValues) { setPropertyValue(duplicateBean.getContextData().getComponent(), duplicateBean.getContextData().getPropertyName(), valueDescriptor); } } } for (Map.Entry<VirtualFile, RadRootContainer> entry : containerMap.entrySet()) { try { final XmlWriter writer = new XmlWriter(); entry.getValue().write(writer); FileUtil.writeToFile(VfsUtilCore.virtualToIoFile(entry.getKey()), writer.getText()); FileEditor[] editors = FileEditorManager.getInstance(project).getAllEditors(entry.getKey()); for (FileEditor editor : editors) { if (editor instanceof UIFormEditor) { ((UIFormEditor)editor).getEditor().refresh(); } } } catch (Exception e) { LOG.error(e); } } }, files.toArray(PsiFile.EMPTY_ARRAY)); } } @Nullable private static String suggestPropertyKeyPrefix(@NotNull RadComponent component, @NotNull String propertyName) { Class<?> componentClass = component.getComponentClass(); for (DefaultPrefixSuggestion suggestion : PREFIX_SUGGESTIONS) { if (suggestion.getPropertyName().equals(propertyName) && suggestion.getComponentClass().isAssignableFrom(componentClass)) { return suggestion.getDefaultPrefix(); } } if (BorderProperty.NAME.equals(propertyName)) { return "border.title"; } return null; } private static List<UsageInfo> createUsageInfo(HardcodedStringInFormData data) { RadComponent component = data.getComponent(); PsiFile file = data.getContainingFile(); TextRange range = getComponentRange(component, file); UsageInfo usageInfo = range != null ? new UsageInfo(file, range.getStartOffset(), range.getEndOffset()) : new UsageInfo(file); return Collections.singletonList(usageInfo); } private static TextRange getComponentRange(RadComponent component, PsiFile file) { CharSequence contents = file.getViewProvider().getContents(); int componentId = StringUtil.indexOf(contents, "id=\"" + component.getId() + "\""); if (componentId == -1) return null; PsiFileFactory fileFactory = PsiFileFactory.getInstance(file.getProject()); XmlFile xmlFile = (XmlFile)fileFactory.createFileFromText("form.xml", XmlFileType.INSTANCE, contents); XmlTag componentTag = PsiTreeUtil.getParentOfType(xmlFile.findElementAt(componentId), XmlTag.class); return componentTag != null ? componentTag.getTextRange() : null; } private static void setPropertyValue(RadComponent component, String propertyName, StringDescriptor stringDescriptor) { if (BorderProperty.NAME.equals(propertyName)) { ((RadContainer)component).setBorderTitle(stringDescriptor); } else if (propertyName.equals(ITabbedPane.TAB_TITLE_PROPERTY) || propertyName.equals(ITabbedPane.TAB_TOOLTIP_PROPERTY)) { try { new TabTitleStringDescriptorAccessor(component, propertyName).setStringDescriptorValue(stringDescriptor); } catch (Exception e) { LOG.error(e); } } else { IProperty property = ContainerUtil.find(component.getModifiedProperties(), p -> propertyName.equals(p.getName())); if (property != null) { try { new FormPropertyStringDescriptorAccessor(component, (IntrospectedProperty<?>)property).setStringDescriptorValue(stringDescriptor); } catch (Exception e) { LOG.error(e); } } else { LOG.error("Property '" + propertyName + "' not found in modified properties for component " + component.getId()); } } } private static String getValue(IComponent component, String propertyName) { if (BorderProperty.NAME.equals(propertyName)) { return ((IContainer)component).getBorderTitle().getValue(); } else if (propertyName.equals(ITabbedPane.TAB_TITLE_PROPERTY) || propertyName.equals(ITabbedPane.TAB_TOOLTIP_PROPERTY)) { return ((ITabbedPane)component.getParentContainer()).getTabProperty(component, propertyName).getValue(); } for (IProperty property : component.getModifiedProperties()) { if (property.getName().equals(propertyName)) { return ((StringDescriptor)property.getPropertyValue(component)).getValue(); } } return null; } @Override public @Nls(capitalization = Nls.Capitalization.Sentence) @NotNull String getFamilyName() { return JavaI18nBundle.message("inspection.i18n.quickfix"); } @Override public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) { } private static class HardcodedStringInFormData { private final RadComponent myComponent; private final String myPropertyName; private final PsiFile myContainingFile; private HardcodedStringInFormData(@NotNull RadComponent component, @NotNull String propertyName, @NotNull PsiFile containingFile) { myComponent = component; myPropertyName = propertyName; myContainingFile = containingFile; } private RadComponent getComponent() { return myComponent; } private String getPropertyName() { return myPropertyName; } private PsiFile getContainingFile() { return myContainingFile; } } private static class DefaultPrefixSuggestion { private final Class<?> myComponentClass; private final String myPropertyName; private final String myDefaultPrefix; private DefaultPrefixSuggestion(Class<?> componentClass, String propertyName, String defaultPrefix) { myComponentClass = componentClass; myPropertyName = propertyName; myDefaultPrefix = defaultPrefix; } private Class<?> getComponentClass() { return myComponentClass; } private String getPropertyName() { return myPropertyName; } private String getDefaultPrefix() { return myDefaultPrefix; } } }
[i18n plugin] update form file via VFS in batch quick fix for hardcoded strings in UI forms Otherwise modifications aren't shown until File | Reload All From Disk is invoked. Also we don't need to manually refresh GUI editor anymore. GitOrigin-RevId: 7e0ed9b251b4582ce86d7da013bc186ca81b58a6
plugins/ui-designer/src/com/intellij/uiDesigner/i18n/I18nizeFormBatchFix.java
[i18n plugin] update form file via VFS in batch quick fix for hardcoded strings in UI forms
Java
apache-2.0
0936ad10b8cca70dc676a7b870a8dda2cd816de8
0
caot/intellij-community,vladmm/intellij-community,ryano144/intellij-community,fitermay/intellij-community,fnouama/intellij-community,fnouama/intellij-community,suncycheng/intellij-community,allotria/intellij-community,kdwink/intellij-community,petteyg/intellij-community,muntasirsyed/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,dslomov/intellij-community,semonte/intellij-community,supersven/intellij-community,Distrotech/intellij-community,xfournet/intellij-community,TangHao1987/intellij-community,ol-loginov/intellij-community,MER-GROUP/intellij-community,da1z/intellij-community,fnouama/intellij-community,robovm/robovm-studio,idea4bsd/idea4bsd,izonder/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,ibinti/intellij-community,ivan-fedorov/intellij-community,joewalnes/idea-community,salguarnieri/intellij-community,nicolargo/intellij-community,izonder/intellij-community,ryano144/intellij-community,apixandru/intellij-community,allotria/intellij-community,kool79/intellij-community,MER-GROUP/intellij-community,clumsy/intellij-community,samthor/intellij-community,ryano144/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,ftomassetti/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,nicolargo/intellij-community,caot/intellij-community,diorcety/intellij-community,muntasirsyed/intellij-community,izonder/intellij-community,vvv1559/intellij-community,Lekanich/intellij-community,jagguli/intellij-community,kdwink/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,orekyuu/intellij-community,retomerz/intellij-community,samthor/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,amith01994/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,jexp/idea2,FHannes/intellij-community,da1z/intellij-community,kool79/intellij-community,Lekanich/intellij-community,caot/intellij-community,SerCeMan/intellij-community,akosyakov/intellij-community,samthor/intellij-community,petteyg/intellij-community,semonte/intellij-community,pwoodworth/intellij-community,ol-loginov/intellij-community,retomerz/intellij-community,ibinti/intellij-community,signed/intellij-community,ibinti/intellij-community,apixandru/intellij-community,fitermay/intellij-community,semonte/intellij-community,semonte/intellij-community,samthor/intellij-community,ivan-fedorov/intellij-community,MichaelNedzelsky/intellij-community,dslomov/intellij-community,diorcety/intellij-community,signed/intellij-community,clumsy/intellij-community,apixandru/intellij-community,fengbaicanhe/intellij-community,nicolargo/intellij-community,nicolargo/intellij-community,alphafoobar/intellij-community,xfournet/intellij-community,blademainer/intellij-community,da1z/intellij-community,tmpgit/intellij-community,ivan-fedorov/intellij-community,xfournet/intellij-community,fengbaicanhe/intellij-community,retomerz/intellij-community,slisson/intellij-community,dslomov/intellij-community,ernestp/consulo,salguarnieri/intellij-community,da1z/intellij-community,nicolargo/intellij-community,adedayo/intellij-community,ibinti/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,xfournet/intellij-community,TangHao1987/intellij-community,petteyg/intellij-community,kool79/intellij-community,orekyuu/intellij-community,SerCeMan/intellij-community,samthor/intellij-community,diorcety/intellij-community,gnuhub/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,fnouama/intellij-community,caot/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,asedunov/intellij-community,adedayo/intellij-community,akosyakov/intellij-community,apixandru/intellij-community,jexp/idea2,slisson/intellij-community,fengbaicanhe/intellij-community,adedayo/intellij-community,fnouama/intellij-community,retomerz/intellij-community,orekyuu/intellij-community,MichaelNedzelsky/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,nicolargo/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,ernestp/consulo,ol-loginov/intellij-community,SerCeMan/intellij-community,joewalnes/idea-community,MichaelNedzelsky/intellij-community,holmes/intellij-community,clumsy/intellij-community,consulo/consulo,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,supersven/intellij-community,izonder/intellij-community,salguarnieri/intellij-community,youdonghai/intellij-community,lucafavatella/intellij-community,petteyg/intellij-community,supersven/intellij-community,retomerz/intellij-community,diorcety/intellij-community,ernestp/consulo,tmpgit/intellij-community,ol-loginov/intellij-community,alphafoobar/intellij-community,signed/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,akosyakov/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,ryano144/intellij-community,ryano144/intellij-community,tmpgit/intellij-community,ibinti/intellij-community,signed/intellij-community,salguarnieri/intellij-community,izonder/intellij-community,Lekanich/intellij-community,dslomov/intellij-community,da1z/intellij-community,ol-loginov/intellij-community,ThiagoGarciaAlves/intellij-community,fengbaicanhe/intellij-community,amith01994/intellij-community,adedayo/intellij-community,vladmm/intellij-community,vladmm/intellij-community,ernestp/consulo,samthor/intellij-community,pwoodworth/intellij-community,vvv1559/intellij-community,caot/intellij-community,hurricup/intellij-community,alphafoobar/intellij-community,holmes/intellij-community,SerCeMan/intellij-community,ftomassetti/intellij-community,MichaelNedzelsky/intellij-community,dslomov/intellij-community,fengbaicanhe/intellij-community,alphafoobar/intellij-community,suncycheng/intellij-community,izonder/intellij-community,muntasirsyed/intellij-community,MER-GROUP/intellij-community,signed/intellij-community,blademainer/intellij-community,asedunov/intellij-community,asedunov/intellij-community,kool79/intellij-community,jagguli/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,orekyuu/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,ol-loginov/intellij-community,gnuhub/intellij-community,MER-GROUP/intellij-community,MER-GROUP/intellij-community,tmpgit/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,adedayo/intellij-community,da1z/intellij-community,gnuhub/intellij-community,supersven/intellij-community,jexp/idea2,alphafoobar/intellij-community,adedayo/intellij-community,suncycheng/intellij-community,ryano144/intellij-community,ernestp/consulo,diorcety/intellij-community,MER-GROUP/intellij-community,Distrotech/intellij-community,pwoodworth/intellij-community,amith01994/intellij-community,Lekanich/intellij-community,tmpgit/intellij-community,gnuhub/intellij-community,TangHao1987/intellij-community,kool79/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,lucafavatella/intellij-community,petteyg/intellij-community,ibinti/intellij-community,supersven/intellij-community,akosyakov/intellij-community,apixandru/intellij-community,hurricup/intellij-community,idea4bsd/idea4bsd,caot/intellij-community,vvv1559/intellij-community,petteyg/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,michaelgallacher/intellij-community,gnuhub/intellij-community,wreckJ/intellij-community,fitermay/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,wreckJ/intellij-community,pwoodworth/intellij-community,izonder/intellij-community,supersven/intellij-community,xfournet/intellij-community,fnouama/intellij-community,vladmm/intellij-community,ftomassetti/intellij-community,blademainer/intellij-community,akosyakov/intellij-community,vvv1559/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,tmpgit/intellij-community,kdwink/intellij-community,robovm/robovm-studio,salguarnieri/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,ryano144/intellij-community,alphafoobar/intellij-community,pwoodworth/intellij-community,michaelgallacher/intellij-community,adedayo/intellij-community,youdonghai/intellij-community,holmes/intellij-community,caot/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,kdwink/intellij-community,holmes/intellij-community,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,apixandru/intellij-community,vladmm/intellij-community,jexp/idea2,ftomassetti/intellij-community,ivan-fedorov/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,kool79/intellij-community,blademainer/intellij-community,vladmm/intellij-community,xfournet/intellij-community,petteyg/intellij-community,mglukhikh/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,jexp/idea2,Distrotech/intellij-community,fitermay/intellij-community,Lekanich/intellij-community,gnuhub/intellij-community,clumsy/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,salguarnieri/intellij-community,dslomov/intellij-community,dslomov/intellij-community,slisson/intellij-community,orekyuu/intellij-community,vladmm/intellij-community,holmes/intellij-community,FHannes/intellij-community,amith01994/intellij-community,signed/intellij-community,clumsy/intellij-community,clumsy/intellij-community,pwoodworth/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,ibinti/intellij-community,SerCeMan/intellij-community,caot/intellij-community,amith01994/intellij-community,robovm/robovm-studio,blademainer/intellij-community,youdonghai/intellij-community,robovm/robovm-studio,petteyg/intellij-community,michaelgallacher/intellij-community,slisson/intellij-community,kdwink/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,joewalnes/idea-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,da1z/intellij-community,slisson/intellij-community,kdwink/intellij-community,suncycheng/intellij-community,ahb0327/intellij-community,asedunov/intellij-community,MER-GROUP/intellij-community,supersven/intellij-community,ryano144/intellij-community,dslomov/intellij-community,ryano144/intellij-community,blademainer/intellij-community,SerCeMan/intellij-community,samthor/intellij-community,amith01994/intellij-community,tmpgit/intellij-community,signed/intellij-community,ftomassetti/intellij-community,ivan-fedorov/intellij-community,amith01994/intellij-community,ahb0327/intellij-community,ftomassetti/intellij-community,hurricup/intellij-community,allotria/intellij-community,ftomassetti/intellij-community,supersven/intellij-community,robovm/robovm-studio,petteyg/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,caot/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,ibinti/intellij-community,alphafoobar/intellij-community,SerCeMan/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,apixandru/intellij-community,da1z/intellij-community,SerCeMan/intellij-community,alphafoobar/intellij-community,joewalnes/idea-community,hurricup/intellij-community,caot/intellij-community,idea4bsd/idea4bsd,jexp/idea2,orekyuu/intellij-community,jagguli/intellij-community,blademainer/intellij-community,vladmm/intellij-community,jagguli/intellij-community,akosyakov/intellij-community,Lekanich/intellij-community,TangHao1987/intellij-community,FHannes/intellij-community,kool79/intellij-community,diorcety/intellij-community,holmes/intellij-community,joewalnes/idea-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,fitermay/intellij-community,vladmm/intellij-community,muntasirsyed/intellij-community,FHannes/intellij-community,allotria/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,vvv1559/intellij-community,robovm/robovm-studio,asedunov/intellij-community,fengbaicanhe/intellij-community,slisson/intellij-community,clumsy/intellij-community,suncycheng/intellij-community,robovm/robovm-studio,ahb0327/intellij-community,youdonghai/intellij-community,diorcety/intellij-community,joewalnes/idea-community,lucafavatella/intellij-community,lucafavatella/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,kool79/intellij-community,wreckJ/intellij-community,consulo/consulo,ftomassetti/intellij-community,idea4bsd/idea4bsd,muntasirsyed/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,salguarnieri/intellij-community,vladmm/intellij-community,nicolargo/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,allotria/intellij-community,fnouama/intellij-community,apixandru/intellij-community,asedunov/intellij-community,da1z/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,caot/intellij-community,ryano144/intellij-community,petteyg/intellij-community,asedunov/intellij-community,tmpgit/intellij-community,slisson/intellij-community,Lekanich/intellij-community,nicolargo/intellij-community,akosyakov/intellij-community,retomerz/intellij-community,caot/intellij-community,vvv1559/intellij-community,supersven/intellij-community,xfournet/intellij-community,hurricup/intellij-community,amith01994/intellij-community,ThiagoGarciaAlves/intellij-community,ivan-fedorov/intellij-community,fitermay/intellij-community,tmpgit/intellij-community,samthor/intellij-community,holmes/intellij-community,FHannes/intellij-community,ivan-fedorov/intellij-community,amith01994/intellij-community,lucafavatella/intellij-community,wreckJ/intellij-community,orekyuu/intellij-community,ftomassetti/intellij-community,TangHao1987/intellij-community,da1z/intellij-community,signed/intellij-community,slisson/intellij-community,xfournet/intellij-community,signed/intellij-community,amith01994/intellij-community,ftomassetti/intellij-community,kdwink/intellij-community,jexp/idea2,TangHao1987/intellij-community,tmpgit/intellij-community,izonder/intellij-community,joewalnes/idea-community,clumsy/intellij-community,fengbaicanhe/intellij-community,consulo/consulo,slisson/intellij-community,ryano144/intellij-community,retomerz/intellij-community,ernestp/consulo,TangHao1987/intellij-community,FHannes/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,semonte/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,Lekanich/intellij-community,ahb0327/intellij-community,dslomov/intellij-community,semonte/intellij-community,retomerz/intellij-community,kool79/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,joewalnes/idea-community,fnouama/intellij-community,MichaelNedzelsky/intellij-community,pwoodworth/intellij-community,apixandru/intellij-community,allotria/intellij-community,gnuhub/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,pwoodworth/intellij-community,fnouama/intellij-community,Distrotech/intellij-community,akosyakov/intellij-community,jexp/idea2,MER-GROUP/intellij-community,amith01994/intellij-community,dslomov/intellij-community,fengbaicanhe/intellij-community,ol-loginov/intellij-community,wreckJ/intellij-community,xfournet/intellij-community,robovm/robovm-studio,ivan-fedorov/intellij-community,michaelgallacher/intellij-community,ahb0327/intellij-community,hurricup/intellij-community,fnouama/intellij-community,kdwink/intellij-community,akosyakov/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,lucafavatella/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,izonder/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,signed/intellij-community,adedayo/intellij-community,FHannes/intellij-community,Distrotech/intellij-community,idea4bsd/idea4bsd,slisson/intellij-community,Distrotech/intellij-community,robovm/robovm-studio,muntasirsyed/intellij-community,ivan-fedorov/intellij-community,Lekanich/intellij-community,MER-GROUP/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,petteyg/intellij-community,consulo/consulo,suncycheng/intellij-community,izonder/intellij-community,ol-loginov/intellij-community,supersven/intellij-community,gnuhub/intellij-community,michaelgallacher/intellij-community,holmes/intellij-community,SerCeMan/intellij-community,retomerz/intellij-community,allotria/intellij-community,petteyg/intellij-community,xfournet/intellij-community,gnuhub/intellij-community,slisson/intellij-community,orekyuu/intellij-community,orekyuu/intellij-community,joewalnes/idea-community,TangHao1987/intellij-community,jagguli/intellij-community,lucafavatella/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,apixandru/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,adedayo/intellij-community,michaelgallacher/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,consulo/consulo,adedayo/intellij-community,idea4bsd/idea4bsd,retomerz/intellij-community,vladmm/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,MichaelNedzelsky/intellij-community,fnouama/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,supersven/intellij-community,dslomov/intellij-community,samthor/intellij-community,kdwink/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,jagguli/intellij-community,robovm/robovm-studio,ahb0327/intellij-community,alphafoobar/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,jagguli/intellij-community,orekyuu/intellij-community,FHannes/intellij-community,fnouama/intellij-community,wreckJ/intellij-community,nicolargo/intellij-community,ivan-fedorov/intellij-community,robovm/robovm-studio,allotria/intellij-community,mglukhikh/intellij-community,wreckJ/intellij-community,semonte/intellij-community,pwoodworth/intellij-community,ol-loginov/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,clumsy/intellij-community,ol-loginov/intellij-community,asedunov/intellij-community,orekyuu/intellij-community,blademainer/intellij-community,semonte/intellij-community,MER-GROUP/intellij-community,Distrotech/intellij-community,gnuhub/intellij-community,asedunov/intellij-community,MER-GROUP/intellij-community,ahb0327/intellij-community,wreckJ/intellij-community,holmes/intellij-community,kool79/intellij-community,FHannes/intellij-community,TangHao1987/intellij-community,gnuhub/intellij-community,blademainer/intellij-community,kool79/intellij-community,semonte/intellij-community,hurricup/intellij-community,jagguli/intellij-community,diorcety/intellij-community,semonte/intellij-community,adedayo/intellij-community,ibinti/intellij-community,ol-loginov/intellij-community,ftomassetti/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,wreckJ/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,vladmm/intellij-community,samthor/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,ahb0327/intellij-community,clumsy/intellij-community,muntasirsyed/intellij-community,da1z/intellij-community,consulo/consulo,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,FHannes/intellij-community,fitermay/intellij-community,ThiagoGarciaAlves/intellij-community,tmpgit/intellij-community,salguarnieri/intellij-community,jagguli/intellij-community,allotria/intellij-community,retomerz/intellij-community,Distrotech/intellij-community,dslomov/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,signed/intellij-community,Distrotech/intellij-community,supersven/intellij-community,TangHao1987/intellij-community,alphafoobar/intellij-community,ftomassetti/intellij-community,blademainer/intellij-community,Distrotech/intellij-community,slisson/intellij-community,blademainer/intellij-community,kdwink/intellij-community,alphafoobar/intellij-community,akosyakov/intellij-community,ThiagoGarciaAlves/intellij-community,clumsy/intellij-community,MichaelNedzelsky/intellij-community,samthor/intellij-community,FHannes/intellij-community,hurricup/intellij-community,idea4bsd/idea4bsd,retomerz/intellij-community,allotria/intellij-community,Distrotech/intellij-community,signed/intellij-community,holmes/intellij-community,holmes/intellij-community,nicolargo/intellij-community,youdonghai/intellij-community,Lekanich/intellij-community,nicolargo/intellij-community,signed/intellij-community,SerCeMan/intellij-community,da1z/intellij-community
package org.jetbrains.plugins.groovy.lang.dynamic; import com.intellij.codeInsight.intention.IntentionAction; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.Ref; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.PsiType; import com.intellij.psi.codeStyle.CodeStyleSettingsManager; import com.intellij.testFramework.fixtures.CodeInsightTestFixture; import com.intellij.testFramework.fixtures.IdeaProjectTestFixture; import com.intellij.testFramework.fixtures.IdeaTestFixtureFactory; import com.intellij.testFramework.fixtures.TestFixtureBuilder; import com.intellij.util.IncorrectOperationException; import junit.framework.Test; import org.jetbrains.plugins.groovy.annotator.intentions.QuickfixUtil; import org.jetbrains.plugins.groovy.annotator.intentions.dynamic.DynamicFix; import org.jetbrains.plugins.groovy.annotator.intentions.dynamic.DynamicManager; import org.jetbrains.plugins.groovy.annotator.intentions.dynamic.MyPair; import org.jetbrains.plugins.groovy.annotator.intentions.dynamic.elements.*; import org.jetbrains.plugins.groovy.lang.psi.GroovyFile; import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrReferenceExpression; import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition; import org.jetbrains.plugins.groovy.lang.psi.util.PsiUtil; import org.jetbrains.plugins.groovy.testcases.GroovyFileSetTestCase; import org.jetbrains.plugins.groovy.util.TestUtils; import java.io.File; import java.util.List; /** * User: Dmitry.Krasilschikov * Date: 01.04.2008 */ public class DynamicTest extends GroovyFileSetTestCase { private CodeInsightTestFixture myCodeInsightFixture; public DynamicTest() { super(TestUtils.getTestDataPath() + "/dynamic/"); } public String getSearchPattern() { return "(.*)\\.groovy"; } protected void runTest(final File file) throws Throwable { final Ref<Pair<String, DItemElement>> result = new Ref<Pair<String, DItemElement>>(); CommandProcessor.getInstance().executeCommand(myProject, new Runnable() { public void run() { ApplicationManager.getApplication().runWriteAction(new Runnable() { public void run() { try { result.set(doDynamicFix(myProject, file.getName(), file)); } catch (IncorrectOperationException e) { e.printStackTrace(); } catch (Throwable throwable) { throwable.printStackTrace(); } } }); } }, null, null); final Pair<String, DItemElement> pair = result.get(); final DRootElement rootElement = DynamicManager.getInstance(myProject).getRootElement(); final DClassElement classElement = rootElement.getClassElement(pair.getFirst()); assert classElement != null; final DItemElement itemElement = pair.getSecond(); if (itemElement instanceof DMethodElement) { final String[] types = QuickfixUtil.getArgumentsTypes(((DMethodElement) itemElement).getPairs()); assert classElement.getMethod(itemElement.getName(), types) != null; } else { assert classElement.getPropertyByName(itemElement.getName()) != null; } // return fileText; } private Pair<String, DItemElement> doDynamicFix(Project project, String relPath, File file) throws Throwable { final VirtualFile virtualFile = LocalFileSystem.getInstance().findFileByIoFile(file); assert virtualFile != null; final PsiFile myFile = PsiManager.getInstance(project).findFile(virtualFile); assert myFile != null; final TestFixtureBuilder<IdeaProjectTestFixture> projectBuilder = IdeaTestFixtureFactory.getFixtureFactory().createFixtureBuilder(); myCodeInsightFixture = IdeaTestFixtureFactory.getFixtureFactory().createCodeInsightFixture(projectBuilder.getFixture()); myCodeInsightFixture.setTestDataPath(TestUtils.getTestDataPath()); myCodeInsightFixture.setUp(); final List<IntentionAction> actions = myCodeInsightFixture.getAvailableIntentions("/dynamic/" + relPath); DynamicFix dynamicFix = null; for (IntentionAction action : actions) { if (action instanceof DynamicFix) { dynamicFix = ((DynamicFix) action); break; } } if (dynamicFix == null) return null; dynamicFix.invoke(project); final GroovyFile groovyFile = (GroovyFile) myFile; final GrTypeDefinition[] grTypeDefinitions = groovyFile.getTypeDefinitions(); final PsiClass classDefinition; if (!groovyFile.isScript()) { classDefinition = grTypeDefinitions[0]; } else { classDefinition = groovyFile.getScriptClass(); } assert classDefinition != null; DItemElement itemElement; final GrReferenceExpression referenceExpression = dynamicFix.getReferenceExpression(); if (dynamicFix.isMethod()) { final PsiType[] psiTypes = PsiUtil.getArgumentTypes(referenceExpression, false, false); final String[] methodArgumentsNames = QuickfixUtil.getMethodArgumentsNames(project, psiTypes); final List<MyPair> pairs = QuickfixUtil.swapArgumentsAndTypes(methodArgumentsNames, psiTypes); itemElement = new DMethodElement(false, referenceExpression.getName(), "java.lang.Object", pairs); } else { itemElement = new DPropertyElement(false, referenceExpression.getName(), "java.lang.Object"); } return new Pair<String, DItemElement>(classDefinition.getQualifiedName(), itemElement); } protected void tearDown() { try { CodeStyleSettingsManager.getInstance(myProject).dropTemporarySettings(); myCodeInsightFixture.tearDown(); myCodeInsightFixture = null; myFixture.tearDown(); } catch (Exception e) { e.printStackTrace(); } super.tearDown(); } public static Test suite() { return new DynamicTest(); } }
plugins/groovy/test/org/jetbrains/plugins/groovy/lang/dynamic/DynamicTest.java
package org.jetbrains.plugins.groovy.lang.dynamic; import com.intellij.codeInsight.intention.IntentionAction; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.Ref; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.PsiType; import com.intellij.psi.codeStyle.CodeStyleSettingsManager; import com.intellij.testFramework.fixtures.CodeInsightTestFixture; import com.intellij.testFramework.fixtures.IdeaProjectTestFixture; import com.intellij.testFramework.fixtures.IdeaTestFixtureFactory; import com.intellij.testFramework.fixtures.TestFixtureBuilder; import com.intellij.util.IncorrectOperationException; import junit.framework.Test; import org.jetbrains.plugins.groovy.annotator.intentions.QuickfixUtil; import org.jetbrains.plugins.groovy.annotator.intentions.dynamic.DynamicFix; import org.jetbrains.plugins.groovy.annotator.intentions.dynamic.DynamicManager; import org.jetbrains.plugins.groovy.annotator.intentions.dynamic.MyPair; import org.jetbrains.plugins.groovy.annotator.intentions.dynamic.elements.*; import org.jetbrains.plugins.groovy.lang.psi.GroovyFile; import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrReferenceExpression; import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition; import org.jetbrains.plugins.groovy.lang.psi.util.PsiUtil; import org.jetbrains.plugins.groovy.testcases.GroovyFileSetTestCase; import org.jetbrains.plugins.groovy.util.TestUtils; import java.io.File; import java.util.List; /** * User: Dmitry.Krasilschikov * Date: 01.04.2008 */ public class DynamicTest extends GroovyFileSetTestCase { private CodeInsightTestFixture myCodeInsightFixture; public DynamicTest() { super(TestUtils.getTestDataPath() + "/dynamic/"); } public String getSearchPattern() { return "(.*)\\.groovy"; } protected void runTest(final File file) throws Throwable { final Ref<Pair<String, DItemElement>> result = new Ref<Pair<String, DItemElement>>(); CommandProcessor.getInstance().executeCommand(myProject, new Runnable() { public void run() { ApplicationManager.getApplication().runWriteAction(new Runnable() { public void run() { try { result.set(doDynamicFix(myProject, file.getName(), file)); } catch (IncorrectOperationException e) { e.printStackTrace(); } catch (Throwable throwable) { throwable.printStackTrace(); } } }); } }, null, null); final Pair<String, DItemElement> pair = result.get(); final DRootElement rootElement = DynamicManager.getInstance(myProject).getRootElement(); final DClassElement classElement = rootElement.getClassElement(pair.getFirst()); assert classElement != null; final DItemElement itemElement = pair.getSecond(); if (itemElement instanceof DMethodElement) { final String[] types = QuickfixUtil.getArgumentsTypes(((DMethodElement) itemElement).getPairs()); assert classElement.getMethod(itemElement.getName(), types) != null; } else { assert classElement.getPropertyByName(itemElement.getName()) != null; } // return fileText; } private Pair<String, DItemElement> doDynamicFix(Project project, String relPath, File file) throws Throwable { final VirtualFile virtualFile = LocalFileSystem.getInstance().findFileByIoFile(file); assert virtualFile != null; final PsiFile myFile = PsiManager.getInstance(project).findFile(virtualFile); assert myFile != null; final TestFixtureBuilder<IdeaProjectTestFixture> projectBuilder = IdeaTestFixtureFactory.getFixtureFactory().createFixtureBuilder(); myCodeInsightFixture = IdeaTestFixtureFactory.getFixtureFactory().createCodeInsightFixture(projectBuilder.getFixture()); myCodeInsightFixture.setTestDataPath(TestUtils.getTestDataPath()); myCodeInsightFixture.setUp(); final List<IntentionAction> actions = myCodeInsightFixture.getAvailableIntentions("/dynamic/" + relPath); DynamicFix dynamicFix = null; for (IntentionAction action : actions) { if (action instanceof DynamicFix) { dynamicFix = ((DynamicFix) action); break; } } if (dynamicFix == null) return null; dynamicFix.invoke(project); final GroovyFile groovyFile = (GroovyFile) myFile; final GrTypeDefinition[] grTypeDefinitions = groovyFile.getTypeDefinitions(); final PsiClass classDefinition; if (!groovyFile.isScript()) { classDefinition = grTypeDefinitions[0]; } else { classDefinition = groovyFile.getScriptClass(); } assert classDefinition != null; DItemElement itemElement; final GrReferenceExpression referenceExpression = dynamicFix.getReferenceExpression(); if (dynamicFix.isMethod()) { final PsiType[] psiTypes = PsiUtil.getArgumentTypes(referenceExpression, false, false); final String[] methodArgumentsNames = QuickfixUtil.getMethodArgumentsNames(project, psiTypes); final List<MyPair> pairs = QuickfixUtil.swapArgumentsAndTypes(methodArgumentsNames, psiTypes); itemElement = new DMethodElement(false, referenceExpression.getName(), "java.lang.Object", pairs); } else { itemElement = new DPropertyElement(false, referenceExpression.getName(), "java.lang.Object"); } return new Pair<String, DItemElement>(classDefinition.getQualifiedName(), itemElement); } protected void tearDown() { try { final CodeStyleSettingsManager manager = CodeStyleSettingsManager.getInstance(myProject); manager.dropTemporarySettings(); myCodeInsightFixture.tearDown(); myCodeInsightFixture = null; myFixture.tearDown(); } catch (Exception e) { e.printStackTrace(); } super.tearDown(); } public static Test suite() { return new DynamicTest(); } }
default PsiElementFinder moved to the first place
plugins/groovy/test/org/jetbrains/plugins/groovy/lang/dynamic/DynamicTest.java
default PsiElementFinder moved to the first place
Java
apache-2.0
b83fd0c0521d6dbc62bbfe1754c13b26711bcc4b
0
troopson/cc,troopson/cc,troopson/cc
src/main/java/org/wertx/container/Singleton.java
/** * */ package org.wertx.container; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** * @author 瞿建军 Email: [email protected] 2017年2月15日 */ public final class Singleton { private static Singleton holder; @SuppressWarnings("rawtypes") private Map<Class, Object> map; private Singleton() { map = new ConcurrentHashMap<>(); } private static Singleton getSelf() { if (holder == null) { synchronized (Singleton.class) { if (holder == null) holder = new Singleton(); } } return holder; } @SuppressWarnings("unchecked") public static <T> T getInstance(Class<T> c) { Singleton s = getSelf(); try { T o = (T) s.map.get(c); if (o == null) { synchronized (c) { if (s.map.get(c) == null) { o = c.newInstance(); s.map.put(c, o); } } } return o; } catch (Exception e) { throw new RuntimeException(e); } } }
remove container
src/main/java/org/wertx/container/Singleton.java
remove container
Java
apache-2.0
17098e8da84347c88c8cce60d54e0d8bf4ecce24
0
tylerparsons/surfdep
/* ###################################### SurfaceGrowth.java @author Tyler Parsons @created 7 May 2014 A runnable class that manages instant- iation of models, visualization, data analysis, UI and I/O of parameters. ###################################### */ package ch13; import org.opensourcephysics.controls.AbstractSimulation; import org.opensourcephysics.controls.SimulationControl; import org.opensourcephysics.frames.PlotFrame; import org.opensourcephysics.frames.LatticeFrame; import java.awt.Color; import ch13.Parameter; import ch13.LinearRegression.Function; import java.util.ArrayList; import java.util.Scanner; public class SurfaceGrowth extends AbstractSimulation { private LatticeFrame lattice; private PlotFrame width_vs_time, width_vs_length; private LinearRegression lnw_vs_lnL; private ArrayList<Deposition> models; private Deposition model; private DepositionDataManager dataManager; /************************** * Initialization Methods * **************************/ public SurfaceGrowth() { //set up visualizations model = new BallisticDeposition(); models = new ArrayList<Deposition>(); lattice = new LatticeFrame(model.getClass().getName()); width_vs_time = new PlotFrame("ln t (t in steps)", "ln w", "ln w = b*ln t + C"); width_vs_time.setAutoscaleX(true); width_vs_time.setAutoscaleY(true); width_vs_length = new PlotFrame("ln L", "ln w_avg", "ln w = a*ln L + C (After Saturation)"); width_vs_length.setAutoscaleX(true); width_vs_length.setAutoscaleY(true); dataManager = new DepositionDataManager( "C:\\Users\\Tyler\\Documents\\Classes\\CurrentClasses\\PHYS436\\workspace\\csm\\data\\id_log.txt", "C:\\Users\\Tyler\\Documents\\Classes\\CurrentClasses\\PHYS436\\workspace\\csm\\data\\deposition_data.txt", "C:\\Users\\Tyler\\Documents\\Classes\\CurrentClasses\\PHYS436\\workspace\\csm\\data\\deposition_data.csv" ); dataManager.startTrial(); } public void initialize() { //Create a new model for each simulation model = new BallisticDeposition(); lattice.clearDrawables(); lattice.setVisible(false); width_vs_length.setVisible(false); //Set Parameters ArrayList<Parameter> params = model.parameters(); Parameter p; for (int i = 0; i < params.size(); i++) { p = params.get(i); p.value = control.getDouble(p.name); } model.init(); if (control.getBoolean("Enable Visualizations")) { lattice.addDrawable(model); lattice.setVisible(true); lattice.setPreferredMinMax( 0, model.getLength()*model.getXSpacing(), 0, model.getHeight()*model.getYSpacing() ); } } /***************** * Control Setup * *****************/ public void reset() { //Add Parameters to control ArrayList<Parameter> params = model.parameters(); Parameter p; for (int i = 0; i < params.size(); i++) { p = params.get(i); control.setValue(p.name, p.defaultValue); } //Control Values control.setValue("Save Data", true); control.setValue("Plot All", false); control.setValue("Enable Visualizations", false); enableStepsPerDisplay(true); } /**************** * Calculations * ****************/ protected void doStep() { if(model.getAverageHeight() > 0.9*model.getHeight()) { stopSimulation(); return; } try { model.step(); } catch(ArrayIndexOutOfBoundsException e) { stopSimulation(); return; } int time = model.getTime(); if(time%pointModulus(time) == 0) { width_vs_time.append(model.getLength(), Math.log(time), Math.log(model.getWidth(time))); } } public void stopRunning() { //Save model for later use, avoiding duplicate entries in model set if (!exists(model)) models.add(model); //Estimate t_cross Scanner in = new Scanner(System.in); System.out.println("Define regions over which to run the regression:"); System.out.print("ln(t_0) = "); int t_0 = (int)Math.exp(in.nextDouble()); System.out.print("ln(t_cross1) = "); int t_cross1 = (int)Math.exp(in.nextDouble()); System.out.print("ln(t_cross2) = "); int t_cross2 = (int)Math.exp(in.nextDouble()); //Run calculations model.calculateBeta(t_0, t_cross1); model.calculatelnw_avg(t_cross2); double beta_avg = calculateAverageBeta(); double alpha = calculateAlpha(); //Wrap data in Parameters to pass to dataManager as list ArrayList<Parameter> addlParams = new ArrayList<Parameter>(); addlParams.add(new Parameter("averageHeight", model.getAverageHeight())); addlParams.add(new Parameter("width", model.getWidth(model.getTime()))); addlParams.add(new Parameter("numsteps", model.getTime())); addlParams.add(new Parameter("t_cross1", t_cross1)); addlParams.add(new Parameter("t_cross2", t_cross2)); addlParams.add(new Parameter("lnw_avg", model.getlnw_avg())); addlParams.add(new Parameter("beta", model.getBeta())); addlParams.add(new Parameter("beta_avg", beta_avg)); addlParams.add(new Parameter("alpha", alpha)); addlParams.add(new Parameter("R2", lnw_vs_lnL.R2())); //Print params to control for(Parameter p: model.parameters()) control.println(p.name + " = " + p.value); for(Parameter p: addlParams) control.println(p.name + " = " + p.value); //Save, display data if (control.getBoolean("Save Data")) { dataManager.saveToTxt(model, addlParams); dataManager.saveToCSV(model, addlParams); String fileName = "L"+model.getLength()+"H"+model.getHeight(); dataManager.saveImage(lattice, "lattices", fileName + ".jpeg"); dataManager.saveImage(width_vs_time, "plots", fileName + ".jpeg"); } if (control.getBoolean("Plot All")) { plotAll(); dataManager.saveImage(width_vs_time, ".", "masterPlot.jpeg"); dataManager.saveImage(width_vs_length, ".", "alphaPlot.jpeg"); } } private double calculateAverageBeta() { double sum = 0; for(Deposition m: models) { sum += m.getBeta(); } return sum/(double)models.size(); } // Runs regression of lnw_avg vs lnL private double calculateAlpha() { //wrap lnL, lnw_avg in Functions Function lnw_avg = new Function() { public double val(double x) { return models.get((int)x).getlnw_avg(); } }; Function lnL = new Function() { public double val(double x) { return Math.log(models.get((int)x).getLength()); } }; //Pass functions to regression lnw_vs_lnL = new LinearRegression(lnL, lnw_avg, 0, (double)models.size()-1, 1); return lnw_vs_lnL.m(); } public boolean exists(Deposition m) { for (Deposition mod: models) if (m.equals(mod)) return true; return false; } /*************************** * Visualization Functions * ***************************/ /* * Point Density Calculator * - Determines density of plotted points * based on time elapsed and N_max, the * plot's point capacity. * - Density decays such that lim t->inf- * inity N(t) = N_max * - Returns a max of 1000 to continue p- * lotting for very large t * */ final static int N_max = 10000; final static int mod_max = 10000; private int pointModulus(int t) { if (t > N_max*((int)(Math.log(mod_max)))) return mod_max; return (int)(Math.exp(((double)t)/((double)N_max))) + 1; } /* * Plots all models on two different plots * - width_vs_time * -> used to measure beta * -> plots entire width array up to * total run time for each model * - lnw_vs_lnA * -> plots ln of avgerage width ag- * ainst ln L * -> runs linear regression for alpha * * */ private void plotAll() { // width_vs_time width_vs_time.clearDrawables(); for (int i = 0; i < models.size(); i++) { Deposition m = models.get(i); //plot entire width array, set color width_vs_time.setMarkerColor(i, colors[i%colors.length]); int time = m.getTime(); for (int t = 1; t <= time; t++) { long mod = pointModulus(t)*models.size(); if (t % mod == 0) { width_vs_time.append(i, Math.log(t), Math.log(m.getWidth(t))); } } } // width_vs_length width_vs_length.clearDrawables(); for (int i = 0; i < models.size(); i++) { Deposition m = models.get(i); width_vs_length.append(i, Math.log(m.getLength()), m.getlnw_avg()); } // Draw linear regression width_vs_length.addDrawable(lnw_vs_lnL); } // Color palette for plotting models private Color[] colors = {Color.CYAN, Color.ORANGE, Color.MAGENTA, Color.PINK, Color.YELLOW, Color.RED, Color.GREEN, Color.BLUE}; /******** * Main * ********/ public static void main(String[] args) { SimulationControl.createApp(new SurfaceGrowth()); } }
SurfaceGrowth.java
/* ###################################### SurfaceGrowth.java @author Tyler Parsons @created 7 May 2014 A runnable class that manages instant- iation of models, visualization, data analysis, UI and I/O of parameters. ###################################### */ package ch13; import org.opensourcephysics.controls.AbstractSimulation; import org.opensourcephysics.controls.SimulationControl; import org.opensourcephysics.frames.PlotFrame; import org.opensourcephysics.frames.LatticeFrame; import java.awt.Color; import ch13.Parameter; import ch13.LinearRegression.Function; import java.util.ArrayList; import java.util.Scanner; public class SurfaceGrowth extends AbstractSimulation { private LatticeFrame lattice; private PlotFrame width_vs_time, width_vs_length; private LinearRegression lnw_vs_lnL; private ArrayList<Deposition> models; private Deposition model; private DepositionDataManager dataManager; /************************** * Initialization Methods * **************************/ public SurfaceGrowth() { //set up visualizations model = new BallisticDiffusionModel(); models = new ArrayList<Deposition>(); lattice = new LatticeFrame(model.getClass().getName()); width_vs_time = new PlotFrame("ln t (t in steps)", "ln w", "ln w = b*ln t + C"); width_vs_time.setAutoscaleX(true); width_vs_time.setAutoscaleY(true); width_vs_length = new PlotFrame("ln L", "ln w_avg", "ln w = a*ln L + C (After Saturation)"); width_vs_length.setAutoscaleX(true); width_vs_length.setAutoscaleY(true); dataManager = new DepositionDataManager( "C:\\Users\\Tyler\\Documents\\Classes\\CurrentClasses\\PHYS436\\workspace\\csm\\data\\id_log.txt", "C:\\Users\\Tyler\\Documents\\Classes\\CurrentClasses\\PHYS436\\workspace\\csm\\data\\deposition_data.txt", "C:\\Users\\Tyler\\Documents\\Classes\\CurrentClasses\\PHYS436\\workspace\\csm\\data\\deposition_data.csv" ); dataManager.startTrial(); } public void initialize() { //Create a new model for each simulation model = new BallisticDiffusionModel(); lattice.clearDrawables(); lattice.setVisible(false); width_vs_length.setVisible(false); //Set Parameters ArrayList<Parameter> params = model.parameters(); Parameter p; for (int i = 0; i < params.size(); i++) { p = params.get(i); p.value = control.getDouble(p.name); } model.init(); if (control.getBoolean("Enable Visualizations")) { lattice.addDrawable(model); lattice.setVisible(true); lattice.setPreferredMinMax( 0, model.getLength()*model.getXSpacing(), 0, model.getHeight()*model.getYSpacing() ); } } /***************** * Control Setup * *****************/ public void reset() { //Add Parameters to control ArrayList<Parameter> params = model.parameters(); Parameter p; for (int i = 0; i < params.size(); i++) { p = params.get(i); control.setValue(p.name, p.defaultValue); } //Control Values control.setValue("Save Data", true); control.setValue("Plot All", false); control.setValue("Enable Visualizations", false); enableStepsPerDisplay(true); } /**************** * Calculations * ****************/ protected void doStep() { if(model.getAverageHeight() > 0.9*model.getHeight()) { stopSimulation(); return; } try { model.step(); } catch(ArrayIndexOutOfBoundsException e) { stopSimulation(); return; } int time = model.getTime(); if(time%pointModulus(time) == 0) { width_vs_time.append(model.getLength(), Math.log(time), Math.log(model.getWidth(time))); } } public void stopRunning() { //Save model for later use, avoiding duplicate entries in model set if (!exists(model)) models.add(model); //Estimate t_cross Scanner in = new Scanner(System.in); System.out.println("Define regions over which to run the regression:"); System.out.print("ln(t_0) = "); int t_0 = (int)Math.exp(in.nextDouble()); System.out.print("ln(t_cross1) = "); int t_cross1 = (int)Math.exp(in.nextDouble()); System.out.print("ln(t_cross2) = "); int t_cross2 = (int)Math.exp(in.nextDouble()); //Run calculations model.calculateBeta(t_0, t_cross1); model.calculatelnw_avg(t_cross2); double beta_avg = calculateAverageBeta(); double alpha = calculateAlpha(); //Wrap data in Parameters to pass to dataManager as list ArrayList<Parameter> addlParams = new ArrayList<Parameter>(); addlParams.add(new Parameter("averageHeight", model.getAverageHeight())); addlParams.add(new Parameter("width", model.getWidth(model.getTime()))); addlParams.add(new Parameter("numsteps", model.getTime())); addlParams.add(new Parameter("t_cross1", t_cross1)); addlParams.add(new Parameter("t_cross2", t_cross2)); addlParams.add(new Parameter("lnw_avg", model.getlnw_avg())); addlParams.add(new Parameter("beta", model.getBeta())); addlParams.add(new Parameter("beta_avg", beta_avg)); addlParams.add(new Parameter("alpha", alpha)); addlParams.add(new Parameter("R2", lnw_vs_lnL.R2())); //Print params to control for(Parameter p: model.parameters()) control.println(p.name + " = " + p.value); for(Parameter p: addlParams) control.println(p.name + " = " + p.value); //Save, display data if (control.getBoolean("Save Data")) { dataManager.saveToTxt(model, addlParams); dataManager.saveToCSV(model, addlParams); String fileName = "L"+model.getLength()+"H"+model.getHeight(); dataManager.saveImage(lattice, "lattices", fileName + ".jpeg"); dataManager.saveImage(width_vs_time, "plots", fileName + ".jpeg"); } if (control.getBoolean("Plot All")) { plotAll(); dataManager.saveImage(width_vs_time, ".", "masterPlot.jpeg"); dataManager.saveImage(width_vs_length, ".", "alphaPlot.jpeg"); } } private double calculateAverageBeta() { double sum = 0; for(Deposition m: models) { sum += m.getBeta(); } return sum/(double)models.size(); } // Runs regression of lnw_avg vs lnL private double calculateAlpha() { //wrap lnL, lnw_avg in Functions Function lnw_avg = new Function() { public double val(double x) { return models.get((int)x).getlnw_avg(); } }; Function lnL = new Function() { public double val(double x) { return Math.log(models.get((int)x).getLength()); } }; //Pass functions to regression lnw_vs_lnL = new LinearRegression(lnL, lnw_avg, 0, (double)models.size()-1, 1); return lnw_vs_lnL.m(); } public boolean exists(Deposition m) { for (Deposition mod: models) if (m.equals(mod)) return true; return false; } /*************************** * Visualization Functions * ***************************/ /* * Point Density Calculator * - Determines density of plotted points * based on time elapsed and N_max, the * plot's point capacity. * - Density decays such that lim t->inf- * inity N(t) = N_max * - Returns a max of 1000 to continue p- * lotting for very large t * */ final static int N_max = 10000; final static int mod_max = 10000; private int pointModulus(int t) { if (t > N_max*((int)(Math.log(mod_max)))) return mod_max; return (int)(Math.exp(((double)t)/((double)N_max))) + 1; } /* * Plots all models on two different plots * - width_vs_time * -> used to measure beta * -> plots entire width array up to * total run time for each model * - lnw_vs_lnA * -> plots ln of avgerage width ag- * ainst ln L * -> runs linear regression for alpha * * */ private void plotAll() { // width_vs_time width_vs_time.clearDrawables(); for (int i = 0; i < models.size(); i++) { Deposition m = models.get(i); //plot entire width array, set color width_vs_time.setMarkerColor(i, colors[i%colors.length]); int time = m.getTime(); for (int t = 1; t <= time; t++) { long mod = pointModulus(t)*models.size(); if (t % mod == 0) { width_vs_time.append(i, Math.log(t), Math.log(m.getWidth(t))); } } } // width_vs_length width_vs_length.clearDrawables(); for (int i = 0; i < models.size(); i++) { Deposition m = models.get(i); width_vs_length.append(i, Math.log(m.getLength()), m.getlnw_avg()); } // Draw linear regression width_vs_length.addDrawable(lnw_vs_lnL); } // Color palette for plotting models private Color[] colors = {Color.CYAN, Color.ORANGE, Color.MAGENTA, Color.PINK, Color.YELLOW, Color.RED, Color.GREEN, Color.BLUE}; /******** * Main * ********/ public static void main(String[] args) { SimulationControl.createApp(new SurfaceGrowth()); } }
Obscuring implementation of Ballistic Diffusion Model
SurfaceGrowth.java
Obscuring implementation of Ballistic Diffusion Model
Java
apache-2.0
3d7ea2c955bb4886ffc1954badd98ce0eff9a8a4
0
shinfan/gcloud-java,tangiel/google-cloud-java,shinfan/gcloud-java,mbrukman/gcloud-java,aozarov/gcloud-java,FirebasePrivate/google-cloud-java,jabubake/google-cloud-java,rborer/google-cloud-java,vam-google/google-cloud-java,aozarov/gcloud-java,tangiel/google-cloud-java,vam-google/google-cloud-java,mbrukman/gcloud-java,vam-google/google-cloud-java,FirebasePrivate/google-cloud-java,tangiel/google-cloud-java,vam-google/google-cloud-java,omaray/google-cloud-java,shinfan/gcloud-java,FirebasePrivate/google-cloud-java,rborer/google-cloud-java,vam-google/google-cloud-java,jabubake/google-cloud-java,rborer/google-cloud-java,jabubake/google-cloud-java,vam-google/google-cloud-java,jabubake/google-cloud-java,aozarov/gcloud-java,shinfan/gcloud-java,mbrukman/gcloud-java,shinfan/gcloud-java,FirebasePrivate/google-cloud-java,jabubake/google-cloud-java,mbrukman/gcloud-java,rborer/google-cloud-java,FirebasePrivate/google-cloud-java,omaray/google-cloud-java,omaray/google-cloud-java,tangiel/google-cloud-java,rborer/google-cloud-java,tangiel/google-cloud-java,omaray/google-cloud-java,omaray/google-cloud-java,aozarov/gcloud-java
/* * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gcloud.datastore; import java.util.Iterator; /** * The result of a Google Cloud Datastore query submission. * When the result is not typed it is possible to cast it to its appropriate type according to * the {@link #resultClass} value. * Results are loaded lazily; therefore it is possible to get a {@code DatastoreException} * upon {@link Iterator#hasNext hasNext} or {@link Iterator#next next} calls. * * @param <V> the type of the results value. */ public interface QueryResults<V> extends Iterator<V> { /** * Returns the actual class of the result's values. */ Class<?> resultClass(); /** * Returns the Cursor for the point after the value returned in the last {@link #next} call. This * cursor can be used to issue subsequent queries (with the same constraints) that may return * additional results. * * <p>A simple use case: * <pre> {@code * Query<Entity> query = Query.entityQueryBuilder() * .kind("Person") * .filter(PropertyFilter.eq("favoriteFood", "pizza")) * .build(); * QueryResults<Entity> results = datastore.run(query); * // Consume some results (using results.next()) and do any other actions as necessary. * query = query.toBuilder().startCursor(results.cursorAfter()).build(); * results = datastore.run(query); // now we will iterate over all entities not yet consumed * </pre> */ Cursor cursorAfter(); }
gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/QueryResults.java
/* * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gcloud.datastore; import java.util.Iterator; /** * The result of a Google Cloud Datastore query submission. * When the result is not typed it is possible to cast it to its appropriate type according to * the {@link #resultClass} value. * Results are loaded lazily; therefore it is possible to get a {@code DatastoreException} * upon {@link Iterator#hasNext hasNext} or {@link Iterator#next next} calls. * * @param <V> the type of the results value. */ public interface QueryResults<V> extends Iterator<V> { /** * Returns the actual class of the result's values. */ Class<?> resultClass(); /** * Returns the Cursor for point after the value returned in the last {@link #next} call. */ Cursor cursorAfter(); }
update afterCursor javadoc
gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/QueryResults.java
update afterCursor javadoc
Java
apache-2.0
c8b6dd91c1f0ea4d8b7f7330c00105c26d9c9d4d
0
crate/crate,crate/crate,crate/crate
/* * Licensed to Crate.io GmbH ("Crate") under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. Crate licenses * this file to you under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * However, if you have executed another commercial license agreement * with Crate these terms will supersede the license and you may use the * software solely pursuant to the terms of the relevant commercial agreement. */ package io.crate.integrationtests; import static io.crate.protocols.postgres.PostgresNetty.PSQL_PORT_SETTING; import static org.assertj.core.api.Assertions.assertThat; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_COMPRESSION; import java.io.IOException; import java.lang.annotation.Annotation; import java.lang.annotation.Documented; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.net.InetSocketAddress; import java.util.Collection; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Random; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import javax.annotation.Nullable; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.ConfigurationException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Netty4Plugin; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.rules.TestName; import org.junit.rules.Timeout; import com.carrotsearch.randomizedtesting.RandomizedContext; import com.carrotsearch.randomizedtesting.annotations.Listeners; import com.carrotsearch.randomizedtesting.annotations.TestGroup; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import io.crate.Constants; import io.crate.action.sql.SQLOperations; import io.crate.action.sql.Session; import io.crate.action.sql.SessionContext; import io.crate.analyze.Analyzer; import io.crate.analyze.ParamTypeHints; import io.crate.common.collections.Lists2; import io.crate.common.unit.TimeValue; import io.crate.data.Paging; import io.crate.data.Row; import io.crate.execution.dml.TransportShardAction; import io.crate.execution.dml.delete.TransportShardDeleteAction; import io.crate.execution.dml.upsert.TransportShardUpsertAction; import io.crate.execution.engine.collect.stats.JobsLogService; import io.crate.execution.engine.collect.stats.JobsLogs; import io.crate.execution.jobs.NodeLimits; import io.crate.execution.jobs.RootTask; import io.crate.execution.jobs.TasksService; import io.crate.execution.jobs.kill.KillableCallable; import io.crate.expression.symbol.Literal; import io.crate.expression.symbol.Symbol; import io.crate.expression.symbol.Symbols; import io.crate.metadata.ColumnIdent; import io.crate.metadata.CoordinatorTxnCtx; import io.crate.metadata.FunctionImplementation; import io.crate.metadata.Functions; import io.crate.metadata.NodeContext; import io.crate.metadata.RelationName; import io.crate.metadata.RoutingProvider; import io.crate.metadata.Schemas; import io.crate.metadata.SearchPath; import io.crate.metadata.settings.SessionSettings; import io.crate.metadata.table.TableInfo; import io.crate.planner.DependencyCarrier; import io.crate.planner.Plan; import io.crate.planner.Planner; import io.crate.planner.PlannerContext; import io.crate.planner.operators.SubQueryResults; import io.crate.protocols.postgres.PostgresNetty; import io.crate.sql.Identifiers; import io.crate.sql.parser.SqlParser; import io.crate.test.integration.SystemPropsTestLoggingListener; import io.crate.testing.SQLResponse; import io.crate.testing.SQLTransportExecutor; import io.crate.testing.TestExecutionConfig; import io.crate.testing.TestingRowConsumer; import io.crate.testing.UseHashJoins; import io.crate.testing.UseJdbc; import io.crate.testing.UseRandomizedSchema; import io.crate.types.DataType; import io.crate.user.User; import io.crate.user.UserLookup; @Listeners({SystemPropsTestLoggingListener.class}) @UseJdbc @UseHashJoins @UseRandomizedSchema public abstract class SQLIntegrationTestCase extends ESIntegTestCase { private static final Logger LOGGER = LogManager.getLogger(SQLIntegrationTestCase.class); private static final int ORIGINAL_PAGE_SIZE = Paging.PAGE_SIZE; protected static SessionSettings DUMMY_SESSION_INFO = new SessionSettings( "dummyUser", SearchPath.createSearchPathFrom("dummySchema")); /** * Annotation for tests that are slow. Slow tests do not run by default but can be * enabled. */ @Documented @Inherited @Retention(RetentionPolicy.RUNTIME) @TestGroup(enabled = false, sysProperty = RUN_SLOW_TESTS_PROP) public @interface Slow {} public static final String RUN_SLOW_TESTS_PROP = "tests.crate.slow"; @Rule public Timeout globalTimeout = new Timeout(5, TimeUnit.MINUTES); @Rule public TestName testName = new TestName(); protected final SQLTransportExecutor sqlExecutor; @Override protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder() .put(super.nodeSettings(nodeOrdinal)) .put(SETTING_HTTP_COMPRESSION.getKey(), false) .put(PSQL_PORT_SETTING.getKey(), 0); if (randomBoolean()) { builder.put("memory.allocation.type", "off-heap"); } return builder.build(); } @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return List.of(Netty4Plugin.class); } protected SQLResponse response; public SQLIntegrationTestCase() { this(false); } public SQLIntegrationTestCase(boolean useSSL) { this(new SQLTransportExecutor( new SQLTransportExecutor.ClientProvider() { @Override public Client client() { return ESIntegTestCase.client(); } @Override public String pgUrl() { PostgresNetty postgresNetty = internalCluster().getInstance(PostgresNetty.class); BoundTransportAddress boundTransportAddress = postgresNetty.boundAddress(); if (boundTransportAddress != null) { InetSocketAddress address = boundTransportAddress.publishAddress().address(); return String.format(Locale.ENGLISH, "jdbc:postgresql://%s:%d/?ssl=%s&sslmode=%s", address.getHostName(), address.getPort(), useSSL, useSSL ? "require" : "disable"); } return null; } @Override public SQLOperations sqlOperations() { return internalCluster().getInstance(SQLOperations.class); } })); } public static SQLTransportExecutor executor(String nodeName) { return executor(nodeName, false); } public static SQLTransportExecutor executor(String nodeName, boolean useSSL) { return new SQLTransportExecutor( new SQLTransportExecutor.ClientProvider() { @Override public Client client() { return ESIntegTestCase.client(nodeName); } @Override public String pgUrl() { PostgresNetty postgresNetty = internalCluster().getInstance(PostgresNetty.class, nodeName); BoundTransportAddress boundTransportAddress = postgresNetty.boundAddress(); if (boundTransportAddress != null) { InetSocketAddress address = boundTransportAddress.publishAddress().address(); return String.format(Locale.ENGLISH, "jdbc:postgresql://%s:%d/?ssl=%s&sslmode=%s", address.getHostName(), address.getPort(), useSSL, useSSL ? "require" : "disable"); } return null; } @Override public SQLOperations sqlOperations() { return internalCluster().getInstance(SQLOperations.class); } }); } public String getFqn(String schema, String tableName) { if (schema.equals(Schemas.DOC_SCHEMA_NAME)) { return tableName; } return String.format(Locale.ENGLISH, "%s.%s", schema, tableName); } public String getFqn(String tableName) { return getFqn(sqlExecutor.getCurrentSchema(), tableName); } @Before public void setSearchPath() throws Exception { sqlExecutor.setSearchPath(randomizedSchema()); } @After public void resetPageSize() { Paging.PAGE_SIZE = ORIGINAL_PAGE_SIZE; } public SQLIntegrationTestCase(SQLTransportExecutor sqlExecutor) { this.sqlExecutor = sqlExecutor; } @Override public Settings indexSettings() { // set number of replicas to 0 for getting a green cluster when using only one node return Settings.builder().put("number_of_replicas", 0).build(); } @After public void assertNoTasksAreLeftOpen() throws Exception { final Field activeTasks = TasksService.class.getDeclaredField("activeTasks"); final Field activeOperationsSb = TransportShardAction.class.getDeclaredField("activeOperations"); activeTasks.setAccessible(true); activeOperationsSb.setAccessible(true); try { assertBusy(() -> { for (TasksService tasksService : internalCluster().getInstances(TasksService.class)) { try { //noinspection unchecked Map<UUID, RootTask> contexts = (Map<UUID, RootTask>) activeTasks.get(tasksService); assertThat(contexts).isEmpty(); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } for (TransportShardUpsertAction action : internalCluster().getInstances(TransportShardUpsertAction.class)) { try { @SuppressWarnings("unchecked") ConcurrentHashMap<TaskId, KillableCallable<?>> operations = (ConcurrentHashMap<TaskId, KillableCallable<?>>) activeOperationsSb.get(action); assertThat(operations).isEmpty(); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } for (TransportShardDeleteAction action : internalCluster().getInstances(TransportShardDeleteAction.class)) { try { @SuppressWarnings("unchecked") ConcurrentHashMap<TaskId, KillableCallable<?>> operations = (ConcurrentHashMap<TaskId, KillableCallable<?>>) activeOperationsSb.get(action); assertThat(operations).isEmpty(); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } }, 10L, TimeUnit.SECONDS); } catch (AssertionError e) { StringBuilder errorMessageBuilder = new StringBuilder(); errorMessageBuilder.append("Open jobs:\n"); for (var jobsLogService : internalCluster().getInstances(JobsLogService.class)) { JobsLogs jobsLogs = jobsLogService.get(); for (var jobContent : jobsLogs.activeJobs()) { errorMessageBuilder.append(jobContent.toString()).append("\n"); } } errorMessageBuilder.append("Active tasks:\n"); String[] nodeNames = internalCluster().getNodeNames(); for (String nodeName : nodeNames) { TasksService tasksService = internalCluster().getInstance(TasksService.class, nodeName); try { //noinspection unchecked Map<UUID, RootTask> contexts = (Map<UUID, RootTask>) activeTasks.get(tasksService); String contextsString = contexts.toString(); if (!"{}".equals(contextsString)) { errorMessageBuilder.append("## node: "); errorMessageBuilder.append(nodeName); errorMessageBuilder.append("\n"); errorMessageBuilder.append(contextsString); errorMessageBuilder.append("\n"); } contexts.clear(); } catch (IllegalAccessException ex) { throw new RuntimeException(ex); } } throw new AssertionError(errorMessageBuilder.toString(), e); } } @After public void ensureNoInflightRequestsLeft() throws Exception { assertBusy(() -> { for (var nodeLimits : internalCluster().getInstances(NodeLimits.class)) { assertThat(nodeLimits.totalNumInflight()).isEqualTo(0L); } }); } @After public void ensure_one_node_limit_instance_per_node() { Iterable<NodeLimits> nodeLimitsInstances = internalCluster().getInstances(NodeLimits.class); int numInstances = 0; for (var nodeLimits : nodeLimitsInstances) { numInstances++; } assertThat(numInstances) .as("There must only be as many NodeLimits instances as there are nodes in the cluster") .isEqualTo(internalCluster().numNodes()); } public void waitUntilShardOperationsFinished() throws Exception { assertBusy(() -> { Iterable<IndicesService> indexServices = internalCluster().getInstances(IndicesService.class); for (IndicesService indicesService : indexServices) { for (IndexService indexService : indicesService) { for (IndexShard indexShard : indexService) { assertThat(indexShard.getActiveOperationsCount()).isEqualTo(0); } } } }, 5, TimeUnit.SECONDS); } public void waitUntilThreadPoolTasksFinished(final String name) throws Exception { assertBusy(() -> { Iterable<ThreadPool> threadPools = internalCluster().getInstances(ThreadPool.class); for (ThreadPool threadPool : threadPools) { ThreadPoolExecutor executor = (ThreadPoolExecutor) threadPool.executor(name); assertThat(executor.getActiveCount()).isEqualTo(0); } }, 5, TimeUnit.SECONDS); } /** * Execute a SQL statement as system query on a specific node in the cluster * * @param stmt the SQL statement * @param schema the schema that should be used for this statement * schema is nullable, which means the default schema ("doc") is used * @param node the name of the node on which the stmt is executed * @return the SQL Response */ public SQLResponse systemExecute(String stmt, @Nullable String schema, String node) { SQLOperations sqlOperations = internalCluster().getInstance(SQLOperations.class, node); UserLookup userLookup; try { userLookup = internalCluster().getInstance(UserLookup.class, node); } catch (ConfigurationException ignored) { // If enterprise is not enabled there is no UserLookup instance bound in guice userLookup = () -> List.of(User.CRATE_USER); } try (Session session = sqlOperations.createSession(schema, userLookup.findUser("crate"))) { response = sqlExecutor.exec(stmt, session); } return response; } private void dumpActiveTasks() { for (var tasksService : internalCluster().getInstances(TasksService.class)) { tasksService.logActiveTasksToError(); } } /** * Execute an SQL Statement on a random node of the cluster * * @param stmt the SQL Statement * @param args the arguments to replace placeholders ("?") in the statement * @return the SQLResponse */ public SQLResponse execute(String stmt, Object[] args) { try { SQLResponse response = sqlExecutor.exec(new TestExecutionConfig(isJdbcEnabled(), isHashJoinEnabled()), stmt, args); this.response = response; return response; } catch (ElasticsearchTimeoutException e) { LOGGER.error("Timeout on SQL statement: {} {}", stmt, e); dumpActiveTasks(); throw e; } } /** * Execute an SQL Statement on a random node of the cluster * * @param stmt the SQL Statement * @param args the arguments of the statement * @param timeout internal timeout of the statement * @return the SQLResponse */ public SQLResponse execute(String stmt, Object[] args, TimeValue timeout) { try { SQLResponse response = sqlExecutor.exec(new TestExecutionConfig(isJdbcEnabled(), isHashJoinEnabled()), stmt, args, timeout); this.response = response; return response; } catch (ElasticsearchTimeoutException e) { LOGGER.error("Timeout on SQL statement: {} {}", stmt, e); dumpActiveTasks(); throw e; } } /** * Execute an SQL Statement on a random node of the cluster * * @param stmt the SQL statement * @param schema the schema that should be used for this statement * schema is nullable, which means default schema ("doc") is used * @return the SQLResponse */ public SQLResponse execute(String stmt, @Nullable String schema) { return execute(stmt, null, createSession(schema)); } public static class PlanForNode { public final Plan plan; final String nodeName; public final PlannerContext plannerContext; private PlanForNode(Plan plan, String nodeName, PlannerContext plannerContext) { this.plan = plan; this.nodeName = nodeName; this.plannerContext = plannerContext; } } public PlanForNode plan(String stmt) { String[] nodeNames = internalCluster().getNodeNames(); String nodeName = nodeNames[randomIntBetween(1, nodeNames.length) - 1]; Analyzer analyzer = internalCluster().getInstance(Analyzer.class, nodeName); Planner planner = internalCluster().getInstance(Planner.class, nodeName); NodeContext nodeCtx = internalCluster().getInstance(NodeContext.class, nodeName); SessionContext sessionContext = new SessionContext( User.CRATE_USER, sqlExecutor.getCurrentSchema() ); CoordinatorTxnCtx coordinatorTxnCtx = new CoordinatorTxnCtx(sessionContext); RoutingProvider routingProvider = new RoutingProvider(Randomness.get().nextInt(), planner.getAwarenessAttributes()); PlannerContext plannerContext = new PlannerContext( planner.currentClusterState(), routingProvider, UUID.randomUUID(), coordinatorTxnCtx, nodeCtx, 0, null ); Plan plan = planner.plan( analyzer.analyze( SqlParser.createStatement(stmt), coordinatorTxnCtx.sessionContext(), ParamTypeHints.EMPTY), plannerContext); return new PlanForNode(plan, nodeName, plannerContext); } public TestingRowConsumer execute(PlanForNode planForNode) { DependencyCarrier dependencyCarrier = internalCluster().getInstance(DependencyCarrier.class, planForNode.nodeName); TestingRowConsumer downstream = new TestingRowConsumer(); planForNode.plan.execute( dependencyCarrier, planForNode.plannerContext, downstream, Row.EMPTY, SubQueryResults.EMPTY ); return downstream; } /** * Execute an SQL Statement on a random node of the cluster * * @param stmt the SQL Statement * @param bulkArgs the bulk arguments of the statement * @return the SQLResponse */ public long[] execute(String stmt, Object[][] bulkArgs) { return sqlExecutor.execBulk(stmt, bulkArgs); } /** * Execute an SQL Statement on a random node of the cluster * * @param stmt the SQL Statement * @param bulkArgs the bulk arguments of the statement * @return the SQLResponse */ public long[] execute(String stmt, Object[][] bulkArgs, TimeValue timeout) { return sqlExecutor.execBulk(stmt, bulkArgs, timeout); } /** * Execute an SQL Statement on a random node of the cluster * * @param stmt the SQL Statement * @return the SQLResponse */ public SQLResponse execute(String stmt) { return execute(stmt, (Object[]) null); } /** * Execute an SQL Statement using a specific {@link Session} * This is useful to execute a query on a specific node or to test using * session options like default schema. * * @param stmt the SQL Statement * @param session the Session to use * @return the SQLResponse */ public SQLResponse execute(String stmt, Session session) { return execute(stmt, null, session); } /** * Execute an SQL Statement using a specific {@link Session} * This is useful to execute a query on a specific node or to test using * session options like default schema. * * @param stmt the SQL Statement * @param session the Session to use * @return the SQLResponse */ public SQLResponse execute(String stmt, Object[] args, Session session) { var response = sqlExecutor.exec(stmt, args, session); this.response = response; return response; } public SQLResponse execute(String stmt, Object[] args, String node) { return execute(stmt, args, node, SQLTransportExecutor.REQUEST_TIMEOUT); } public SQLResponse execute(String stmt, Object[] args, String node, TimeValue timeout) { SQLOperations sqlOperations = internalCluster().getInstance(SQLOperations.class, node); try (Session session = sqlOperations.createSession(sqlExecutor.getCurrentSchema(), User.CRATE_USER)) { SQLResponse response = sqlExecutor.exec(stmt, args, session, timeout); this.response = response; return response; } } /** * Get all mappings from an index as JSON String * * @param index the name of the index * @return the index mapping as String * @throws IOException */ protected String getIndexMapping(String index) throws IOException { ClusterStateRequest request = new ClusterStateRequest() .routingTable(false) .nodes(false) .metadata(true) .indices(index); ClusterStateResponse response = FutureUtils.get(client().admin().cluster().state(request)); Metadata metadata = response.getState().metadata(); XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); IndexMetadata indexMetadata = metadata.iterator().next(); builder.field(Constants.DEFAULT_MAPPING_TYPE); builder.map(indexMetadata.mapping().sourceAsMap()); builder.endObject(); return Strings.toString(builder); } public void waitForMappingUpdateOnAll(final RelationName relationName, final String... fieldNames) throws Exception { assertBusy(() -> { Iterable<Schemas> referenceInfosIterable = internalCluster().getInstances(Schemas.class); for (Schemas schemas : referenceInfosIterable) { TableInfo tableInfo = schemas.getTableInfo(relationName); assertThat(tableInfo).isNotNull(); for (String fieldName : fieldNames) { ColumnIdent columnIdent = ColumnIdent.fromPath(fieldName); assertThat(tableInfo.getReference(columnIdent)).isNotNull(); } } }, 20L, TimeUnit.SECONDS); } public void assertFunctionIsCreatedOnAll(String schema, String name, List<DataType<?>> argTypes) throws Exception { SearchPath searchPath = SearchPath.pathWithPGCatalogAndDoc(); assertBusy(() -> { Iterable<Functions> functions = internalCluster().getInstances(Functions.class); for (Functions function : functions) { FunctionImplementation func = function.get( schema, name, Lists2.map(argTypes, t -> Literal.of(t, null)), searchPath); assertThat(func).isNotNull(); assertThat(func.boundSignature().getArgumentDataTypes()).isEqualTo(argTypes); } }, 20L, TimeUnit.SECONDS); } public void assertFunctionIsDeletedOnAll(String schema, String name, List<Symbol> arguments) throws Exception { assertBusy(() -> { Iterable<Functions> functions = internalCluster().getInstances(Functions.class); for (Functions function : functions) { try { var func = function.get(schema, name, arguments, SearchPath.createSearchPathFrom(schema)); if (func != null) { // if no exact function match is found for given arguments, // the function with arguments that can be casted to provided // arguments will be returned. Therefore, we have to assert that // the provided arguments do not match the arguments of the resolved // function if the function was deleted. assertThat(func.boundSignature().getArgumentDataTypes()).isNotEqualTo(Symbols.typeView(arguments)); } } catch (UnsupportedOperationException e) { assertThat(e.getMessage()).startsWith("Unknown function"); } } }, 20L, TimeUnit.SECONDS); } public void waitForMappingUpdateOnAll(final String tableOrPartition, final String... fieldNames) throws Exception { waitForMappingUpdateOnAll(new RelationName(sqlExecutor.getCurrentSchema(), tableOrPartition), fieldNames); } /** * Get the IndexSettings as JSON String * * @param index the name of the index * @return the IndexSettings as JSON String * @throws IOException */ protected String getIndexSettings(String index) throws IOException { ClusterStateRequest request = new ClusterStateRequest() .routingTable(false) .nodes(false) .metadata(true) .indices(index); ClusterStateResponse response = FutureUtils.get(client().admin().cluster().state(request)); Metadata metadata = response.getState().metadata(); XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); for (IndexMetadata indexMetadata : metadata) { builder.startObject(indexMetadata.getIndex().getName()); builder.startObject("settings"); Settings settings = indexMetadata.getSettings(); for (String settingName : settings.keySet()) { builder.field(settingName, settings.get(settingName)); } builder.endObject(); builder.endObject(); } builder.endObject(); return Strings.toString(builder); } /** * Creates an {@link Session} on a specific node. * This can be used to ensure that a request is performed on a specific node. * * @param nodeName The name of the node to create the session * @return The created session */ Session createSessionOnNode(String nodeName) { SQLOperations sqlOperations = internalCluster().getInstance(SQLOperations.class, nodeName); return sqlOperations.createSession( sqlExecutor.getCurrentSchema(), User.CRATE_USER); } /** * Creates a {@link Session} with the given default schema * and an options list. This is useful if you require a session which differs * from the default one. * * @param defaultSchema The default schema to use. Can be null. * @return The created session */ Session createSession(@Nullable String defaultSchema) { SQLOperations sqlOperations = internalCluster().getInstance(SQLOperations.class); return sqlOperations.createSession(defaultSchema, User.CRATE_USER); } /** * If the Test class or method contains a @UseJdbc annotation then, * based on the ratio provided, a random value of true or false is returned. * For more details on the ratio see {@link UseJdbc} * <p> * Method annotations have higher priority than class annotations. */ private boolean isJdbcEnabled() { UseJdbc useJdbc = getTestAnnotation(UseJdbc.class); if (useJdbc == null) { return false; } return isFeatureEnabled(useJdbc.value()); } /** * If the Test class or method is annotated with {@link UseHashJoins} then, * based on the provided ratio, a random value of true or false is returned. * For more details on the ratio see {@link UseHashJoins} * <p> * Method annotations have higher priority than class annotations. */ private boolean isHashJoinEnabled() { UseHashJoins useHashJoins = getTestAnnotation(UseHashJoins.class); if (useHashJoins == null) { return false; } return isFeatureEnabled(useHashJoins.value()); } /** * Checks if the current test method or test class is annotated with the provided {@param annotationClass} * * @return the annotation if one is present or null otherwise */ @Nullable private <T extends Annotation> T getTestAnnotation(Class<T> annotationClass) { try { Class<?> clazz = this.getClass(); String testMethodName = testName.getMethodName(); String[] split = testName.getMethodName().split(" "); if (split.length > 1) { // When we annotate tests with @Repeat the test method name gets augmented with a seed and we won't // be able to find it in the class methods, so just grab the method name. testMethodName = split[0]; } Method method = clazz.getMethod(testMethodName); T annotation = method.getAnnotation(annotationClass); if (annotation == null) { annotation = clazz.getAnnotation(annotationClass); } return annotation; } catch (NoSuchMethodException e) { return null; } } /** * We sometimes randomize the use of features in tests. * This method verifies the provided ratio and based on it and a random number * indicates if a feature should be enabled or not. * * @param ratio a number between [0, 1] that indicates a "randomness factor" * (1 = always enabled, 0 = always disabled) * * @return true if a feature should be active/used and false otherwise */ private boolean isFeatureEnabled(double ratio) { if (ratio == 0) { return false; } assert ratio >= 0.0 && ratio <= 1.0; return ratio == 1 || RandomizedContext.current().getRandom().nextDouble() < ratio; } /** * If the Test class or method contains a @UseRandomizedSchema annotation then, * based on the schema argument, a random (unquoted) schema name is returned. The schema name consists * of a 1-20 character long ASCII string. * For more details on the schema parameter see {@link UseRandomizedSchema} * <p> * Method annotations have higher priority than class annotations. */ private String randomizedSchema() { UseRandomizedSchema annotation = getTestAnnotation(UseRandomizedSchema.class); if (annotation == null || annotation.random() == false) { return Schemas.DOC_SCHEMA_NAME; } Random random = RandomizedContext.current().getRandom(); while (true) { String schemaName = RandomStrings.randomAsciiLettersOfLengthBetween(random, 1, 20).toLowerCase(); if (!Schemas.READ_ONLY_SYSTEM_SCHEMAS.contains(schemaName) && !Identifiers.isKeyWord(schemaName) && !containsExtendedAsciiChars(schemaName)) { return schemaName; } } } private boolean containsExtendedAsciiChars(String value) { for (char c : value.toCharArray()) { if ((short) c > 127) { return true; } } return false; } public static Index resolveIndex(String index) { ClusterService clusterService = internalCluster().getInstance(ClusterService.class); IndexMetadata indexMetadata = clusterService.state().metadata().index(index); return new Index(index, indexMetadata.getIndexUUID()); } }
server/src/testFixtures/java/io/crate/integrationtests/SQLIntegrationTestCase.java
/* * Licensed to Crate.io GmbH ("Crate") under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. Crate licenses * this file to you under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * However, if you have executed another commercial license agreement * with Crate these terms will supersede the license and you may use the * software solely pursuant to the terms of the relevant commercial agreement. */ package io.crate.integrationtests; import static io.crate.protocols.postgres.PostgresNetty.PSQL_PORT_SETTING; import static org.assertj.core.api.Assertions.assertThat; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_COMPRESSION; import java.io.IOException; import java.lang.annotation.Annotation; import java.lang.annotation.Documented; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.net.InetSocketAddress; import java.util.Collection; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Random; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import javax.annotation.Nullable; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.ConfigurationException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Netty4Plugin; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.rules.TestName; import org.junit.rules.Timeout; import com.carrotsearch.randomizedtesting.RandomizedContext; import com.carrotsearch.randomizedtesting.annotations.Listeners; import com.carrotsearch.randomizedtesting.annotations.TestGroup; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import io.crate.Constants; import io.crate.action.sql.SQLOperations; import io.crate.action.sql.Session; import io.crate.action.sql.SessionContext; import io.crate.analyze.Analyzer; import io.crate.analyze.ParamTypeHints; import io.crate.common.collections.Lists2; import io.crate.common.unit.TimeValue; import io.crate.data.Paging; import io.crate.data.Row; import io.crate.execution.dml.TransportShardAction; import io.crate.execution.dml.delete.TransportShardDeleteAction; import io.crate.execution.dml.upsert.TransportShardUpsertAction; import io.crate.execution.engine.collect.stats.JobsLogService; import io.crate.execution.engine.collect.stats.JobsLogs; import io.crate.execution.jobs.NodeLimits; import io.crate.execution.jobs.RootTask; import io.crate.execution.jobs.TasksService; import io.crate.execution.jobs.kill.KillableCallable; import io.crate.expression.symbol.Literal; import io.crate.expression.symbol.Symbol; import io.crate.expression.symbol.Symbols; import io.crate.metadata.ColumnIdent; import io.crate.metadata.CoordinatorTxnCtx; import io.crate.metadata.FunctionImplementation; import io.crate.metadata.Functions; import io.crate.metadata.NodeContext; import io.crate.metadata.RelationName; import io.crate.metadata.RoutingProvider; import io.crate.metadata.Schemas; import io.crate.metadata.SearchPath; import io.crate.metadata.settings.SessionSettings; import io.crate.metadata.table.TableInfo; import io.crate.planner.DependencyCarrier; import io.crate.planner.Plan; import io.crate.planner.Planner; import io.crate.planner.PlannerContext; import io.crate.planner.operators.SubQueryResults; import io.crate.protocols.postgres.PostgresNetty; import io.crate.sql.Identifiers; import io.crate.sql.parser.SqlParser; import io.crate.test.integration.SystemPropsTestLoggingListener; import io.crate.testing.SQLResponse; import io.crate.testing.SQLTransportExecutor; import io.crate.testing.TestExecutionConfig; import io.crate.testing.TestingRowConsumer; import io.crate.testing.UseHashJoins; import io.crate.testing.UseJdbc; import io.crate.testing.UseRandomizedSchema; import io.crate.types.DataType; import io.crate.user.User; import io.crate.user.UserLookup; @Listeners({SystemPropsTestLoggingListener.class}) @UseJdbc @UseHashJoins @UseRandomizedSchema public abstract class SQLIntegrationTestCase extends ESIntegTestCase { private static final Logger LOGGER = LogManager.getLogger(SQLIntegrationTestCase.class); private static final int ORIGINAL_PAGE_SIZE = Paging.PAGE_SIZE; protected static SessionSettings DUMMY_SESSION_INFO = new SessionSettings( "dummyUser", SearchPath.createSearchPathFrom("dummySchema")); /** * Annotation for tests that are slow. Slow tests do not run by default but can be * enabled. */ @Documented @Inherited @Retention(RetentionPolicy.RUNTIME) @TestGroup(enabled = false, sysProperty = RUN_SLOW_TESTS_PROP) public @interface Slow {} public static final String RUN_SLOW_TESTS_PROP = "tests.crate.slow"; @Rule public Timeout globalTimeout = new Timeout(5, TimeUnit.MINUTES); @Rule public TestName testName = new TestName(); protected final SQLTransportExecutor sqlExecutor; @Override protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder() .put(super.nodeSettings(nodeOrdinal)) .put(SETTING_HTTP_COMPRESSION.getKey(), false) .put(PSQL_PORT_SETTING.getKey(), 0); if (randomBoolean()) { builder.put("memory.allocation.type", "off-heap"); } return builder.build(); } @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return List.of(Netty4Plugin.class); } protected SQLResponse response; public SQLIntegrationTestCase() { this(false); } public SQLIntegrationTestCase(boolean useSSL) { this(new SQLTransportExecutor( new SQLTransportExecutor.ClientProvider() { @Override public Client client() { return ESIntegTestCase.client(); } @Override public String pgUrl() { PostgresNetty postgresNetty = internalCluster().getInstance(PostgresNetty.class); BoundTransportAddress boundTransportAddress = postgresNetty.boundAddress(); if (boundTransportAddress != null) { InetSocketAddress address = boundTransportAddress.publishAddress().address(); return String.format(Locale.ENGLISH, "jdbc:postgresql://%s:%d/?ssl=%s&sslmode=%s", address.getHostName(), address.getPort(), useSSL, useSSL ? "require" : "disable"); } return null; } @Override public SQLOperations sqlOperations() { return internalCluster().getInstance(SQLOperations.class); } })); } public static SQLTransportExecutor executor(String nodeName) { return executor(nodeName, false); } public static SQLTransportExecutor executor(String nodeName, boolean useSSL) { return new SQLTransportExecutor( new SQLTransportExecutor.ClientProvider() { @Override public Client client() { return ESIntegTestCase.client(nodeName); } @Override public String pgUrl() { PostgresNetty postgresNetty = internalCluster().getInstance(PostgresNetty.class, nodeName); BoundTransportAddress boundTransportAddress = postgresNetty.boundAddress(); if (boundTransportAddress != null) { InetSocketAddress address = boundTransportAddress.publishAddress().address(); return String.format(Locale.ENGLISH, "jdbc:postgresql://%s:%d/?ssl=%s&sslmode=%s", address.getHostName(), address.getPort(), useSSL, useSSL ? "require" : "disable"); } return null; } @Override public SQLOperations sqlOperations() { return internalCluster().getInstance(SQLOperations.class); } }); } public String getFqn(String schema, String tableName) { if (schema.equals(Schemas.DOC_SCHEMA_NAME)) { return tableName; } return String.format(Locale.ENGLISH, "%s.%s", schema, tableName); } public String getFqn(String tableName) { return getFqn(sqlExecutor.getCurrentSchema(), tableName); } @Before public void setSearchPath() throws Exception { sqlExecutor.setSearchPath(randomizedSchema()); } @After public void resetPageSize() { Paging.PAGE_SIZE = ORIGINAL_PAGE_SIZE; } public SQLIntegrationTestCase(SQLTransportExecutor sqlExecutor) { this.sqlExecutor = sqlExecutor; } @Override public Settings indexSettings() { // set number of replicas to 0 for getting a green cluster when using only one node return Settings.builder().put("number_of_replicas", 0).build(); } @After public void assertNoTasksAreLeftOpen() throws Exception { final Field activeTasks = TasksService.class.getDeclaredField("activeTasks"); final Field activeOperationsSb = TransportShardAction.class.getDeclaredField("activeOperations"); activeTasks.setAccessible(true); activeOperationsSb.setAccessible(true); try { assertBusy(() -> { for (TasksService tasksService : internalCluster().getInstances(TasksService.class)) { try { //noinspection unchecked Map<UUID, RootTask> contexts = (Map<UUID, RootTask>) activeTasks.get(tasksService); assertThat(contexts).isEmpty(); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } for (TransportShardUpsertAction action : internalCluster().getInstances(TransportShardUpsertAction.class)) { try { @SuppressWarnings("unchecked") ConcurrentHashMap<TaskId, KillableCallable<?>> operations = (ConcurrentHashMap<TaskId, KillableCallable<?>>) activeOperationsSb.get(action); assertThat(operations).isEmpty(); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } for (TransportShardDeleteAction action : internalCluster().getInstances(TransportShardDeleteAction.class)) { try { @SuppressWarnings("unchecked") ConcurrentHashMap<TaskId, KillableCallable<?>> operations = (ConcurrentHashMap<TaskId, KillableCallable<?>>) activeOperationsSb.get(action); assertThat(operations).isEmpty(); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } }, 10L, TimeUnit.SECONDS); } catch (AssertionError e) { StringBuilder errorMessageBuilder = new StringBuilder(); errorMessageBuilder.append("Open jobs:\n"); for (var jobsLogService : internalCluster().getInstances(JobsLogService.class)) { JobsLogs jobsLogs = jobsLogService.get(); for (var jobContent : jobsLogs.activeJobs()) { errorMessageBuilder.append(jobContent.toString()).append("\n"); } } errorMessageBuilder.append("Active tasks:\n"); String[] nodeNames = internalCluster().getNodeNames(); for (String nodeName : nodeNames) { TasksService tasksService = internalCluster().getInstance(TasksService.class, nodeName); try { //noinspection unchecked Map<UUID, RootTask> contexts = (Map<UUID, RootTask>) activeTasks.get(tasksService); String contextsString = contexts.toString(); if (!"{}".equals(contextsString)) { errorMessageBuilder.append("## node: "); errorMessageBuilder.append(nodeName); errorMessageBuilder.append("\n"); errorMessageBuilder.append(contextsString); errorMessageBuilder.append("\n"); } contexts.clear(); } catch (IllegalAccessException ex) { throw new RuntimeException(ex); } } throw new AssertionError(errorMessageBuilder.toString(), e); } } @After public void ensureNoInflightRequestsLeft() throws Exception { assertBusy(() -> { for (var nodeLimits : internalCluster().getInstances(NodeLimits.class)) { assertThat(nodeLimits.totalNumInflight()).isEqualTo(0L); } }); } @After public void ensure_one_node_limit_instance_per_node() { Iterable<NodeLimits> nodeLimitsInstances = internalCluster().getInstances(NodeLimits.class); int numInstances = 0; for (var nodeLimits : nodeLimitsInstances) { numInstances++; } assertThat(numInstances) .as("There must only be as many NodeLimits instances as there are nodes in the cluster") .isEqualTo(internalCluster().numNodes()); } public void waitUntilShardOperationsFinished() throws Exception { assertBusy(() -> { Iterable<IndicesService> indexServices = internalCluster().getInstances(IndicesService.class); for (IndicesService indicesService : indexServices) { for (IndexService indexService : indicesService) { for (IndexShard indexShard : indexService) { assertThat(indexShard.getActiveOperationsCount()).isEqualTo(0); } } } }, 5, TimeUnit.SECONDS); } public void waitUntilThreadPoolTasksFinished(final String name) throws Exception { assertBusy(() -> { Iterable<ThreadPool> threadPools = internalCluster().getInstances(ThreadPool.class); for (ThreadPool threadPool : threadPools) { ThreadPoolExecutor executor = (ThreadPoolExecutor) threadPool.executor(name); assertThat(executor.getActiveCount()).isEqualTo(0); } }, 5, TimeUnit.SECONDS); } /** * Execute a SQL statement as system query on a specific node in the cluster * * @param stmt the SQL statement * @param schema the schema that should be used for this statement * schema is nullable, which means the default schema ("doc") is used * @param node the name of the node on which the stmt is executed * @return the SQL Response */ public SQLResponse systemExecute(String stmt, @Nullable String schema, String node) { SQLOperations sqlOperations = internalCluster().getInstance(SQLOperations.class, node); UserLookup userLookup; try { userLookup = internalCluster().getInstance(UserLookup.class, node); } catch (ConfigurationException ignored) { // If enterprise is not enabled there is no UserLookup instance bound in guice userLookup = () -> List.of(User.CRATE_USER); } try (Session session = sqlOperations.createSession(schema, userLookup.findUser("crate"))) { response = sqlExecutor.exec(stmt, session); } return response; } private void dumpActiveTasks() { for (var tasksService : internalCluster().getInstances(TasksService.class)) { tasksService.logActiveTasksToError(); } } /** * Execute an SQL Statement on a random node of the cluster * * @param stmt the SQL Statement * @param args the arguments to replace placeholders ("?") in the statement * @return the SQLResponse */ public SQLResponse execute(String stmt, Object[] args) { try { SQLResponse response = sqlExecutor.exec(new TestExecutionConfig(isJdbcEnabled(), isHashJoinEnabled()), stmt, args); this.response = response; return response; } catch (ElasticsearchTimeoutException e) { LOGGER.error("Timeout on SQL statement: {} {}", stmt, e); dumpActiveTasks(); throw e; } } /** * Execute an SQL Statement on a random node of the cluster * * @param stmt the SQL Statement * @param args the arguments of the statement * @param timeout internal timeout of the statement * @return the SQLResponse */ public SQLResponse execute(String stmt, Object[] args, TimeValue timeout) { try { SQLResponse response = sqlExecutor.exec(new TestExecutionConfig(isJdbcEnabled(), isHashJoinEnabled()), stmt, args, timeout); this.response = response; return response; } catch (ElasticsearchTimeoutException e) { LOGGER.error("Timeout on SQL statement: {} {}", stmt, e); dumpActiveTasks(); throw e; } } /** * Executes {@code statement} once for each entry in {@code setSessionStatementsList} * * The inner lists of {@code setSessionStatementsList} will be executed before the statement is executed. * This is intended to change session settings using `SET ..` statements * * @param matcher matcher used to assert the result of {@code statement} */ public void executeWith(List<List<String>> setSessionStatementsList, String statement, Consumer<SQLResponse> matcher) { for (List<String> setSessionStatements : setSessionStatementsList) { try (Session session = sqlExecutor.newSession()) { for (String setSessionStatement : setSessionStatements) { sqlExecutor.exec(setSessionStatement, session); } SQLResponse resp = sqlExecutor.exec(statement, session); assertThat(resp) .as("The query:\n\t" + statement + "\nwith session statements: [" + String.join(", ", setSessionStatements) + "] must produce correct result") .satisfies(matcher); } } } /** * Execute an SQL Statement on a random node of the cluster * * @param stmt the SQL statement * @param schema the schema that should be used for this statement * schema is nullable, which means default schema ("doc") is used * @return the SQLResponse */ public SQLResponse execute(String stmt, @Nullable String schema) { return execute(stmt, null, createSession(schema)); } public static class PlanForNode { public final Plan plan; final String nodeName; public final PlannerContext plannerContext; private PlanForNode(Plan plan, String nodeName, PlannerContext plannerContext) { this.plan = plan; this.nodeName = nodeName; this.plannerContext = plannerContext; } } public PlanForNode plan(String stmt) { String[] nodeNames = internalCluster().getNodeNames(); String nodeName = nodeNames[randomIntBetween(1, nodeNames.length) - 1]; Analyzer analyzer = internalCluster().getInstance(Analyzer.class, nodeName); Planner planner = internalCluster().getInstance(Planner.class, nodeName); NodeContext nodeCtx = internalCluster().getInstance(NodeContext.class, nodeName); SessionContext sessionContext = new SessionContext( User.CRATE_USER, sqlExecutor.getCurrentSchema() ); CoordinatorTxnCtx coordinatorTxnCtx = new CoordinatorTxnCtx(sessionContext); RoutingProvider routingProvider = new RoutingProvider(Randomness.get().nextInt(), planner.getAwarenessAttributes()); PlannerContext plannerContext = new PlannerContext( planner.currentClusterState(), routingProvider, UUID.randomUUID(), coordinatorTxnCtx, nodeCtx, 0, null ); Plan plan = planner.plan( analyzer.analyze( SqlParser.createStatement(stmt), coordinatorTxnCtx.sessionContext(), ParamTypeHints.EMPTY), plannerContext); return new PlanForNode(plan, nodeName, plannerContext); } public TestingRowConsumer execute(PlanForNode planForNode) { DependencyCarrier dependencyCarrier = internalCluster().getInstance(DependencyCarrier.class, planForNode.nodeName); TestingRowConsumer downstream = new TestingRowConsumer(); planForNode.plan.execute( dependencyCarrier, planForNode.plannerContext, downstream, Row.EMPTY, SubQueryResults.EMPTY ); return downstream; } /** * Execute an SQL Statement on a random node of the cluster * * @param stmt the SQL Statement * @param bulkArgs the bulk arguments of the statement * @return the SQLResponse */ public long[] execute(String stmt, Object[][] bulkArgs) { return sqlExecutor.execBulk(stmt, bulkArgs); } /** * Execute an SQL Statement on a random node of the cluster * * @param stmt the SQL Statement * @param bulkArgs the bulk arguments of the statement * @return the SQLResponse */ public long[] execute(String stmt, Object[][] bulkArgs, TimeValue timeout) { return sqlExecutor.execBulk(stmt, bulkArgs, timeout); } /** * Execute an SQL Statement on a random node of the cluster * * @param stmt the SQL Statement * @return the SQLResponse */ public SQLResponse execute(String stmt) { return execute(stmt, (Object[]) null); } /** * Execute an SQL Statement using a specific {@link Session} * This is useful to execute a query on a specific node or to test using * session options like default schema. * * @param stmt the SQL Statement * @param session the Session to use * @return the SQLResponse */ public SQLResponse execute(String stmt, Session session) { return execute(stmt, null, session); } /** * Execute an SQL Statement using a specific {@link Session} * This is useful to execute a query on a specific node or to test using * session options like default schema. * * @param stmt the SQL Statement * @param session the Session to use * @return the SQLResponse */ public SQLResponse execute(String stmt, Object[] args, Session session) { var response = sqlExecutor.exec(stmt, args, session); this.response = response; return response; } public SQLResponse execute(String stmt, Object[] args, String node) { return execute(stmt, args, node, SQLTransportExecutor.REQUEST_TIMEOUT); } public SQLResponse execute(String stmt, Object[] args, String node, TimeValue timeout) { SQLOperations sqlOperations = internalCluster().getInstance(SQLOperations.class, node); try (Session session = sqlOperations.createSession(sqlExecutor.getCurrentSchema(), User.CRATE_USER)) { SQLResponse response = sqlExecutor.exec(stmt, args, session, timeout); this.response = response; return response; } } /** * Get all mappings from an index as JSON String * * @param index the name of the index * @return the index mapping as String * @throws IOException */ protected String getIndexMapping(String index) throws IOException { ClusterStateRequest request = new ClusterStateRequest() .routingTable(false) .nodes(false) .metadata(true) .indices(index); ClusterStateResponse response = FutureUtils.get(client().admin().cluster().state(request)); Metadata metadata = response.getState().metadata(); XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); IndexMetadata indexMetadata = metadata.iterator().next(); builder.field(Constants.DEFAULT_MAPPING_TYPE); builder.map(indexMetadata.mapping().sourceAsMap()); builder.endObject(); return Strings.toString(builder); } public void waitForMappingUpdateOnAll(final RelationName relationName, final String... fieldNames) throws Exception { assertBusy(() -> { Iterable<Schemas> referenceInfosIterable = internalCluster().getInstances(Schemas.class); for (Schemas schemas : referenceInfosIterable) { TableInfo tableInfo = schemas.getTableInfo(relationName); assertThat(tableInfo).isNotNull(); for (String fieldName : fieldNames) { ColumnIdent columnIdent = ColumnIdent.fromPath(fieldName); assertThat(tableInfo.getReference(columnIdent)).isNotNull(); } } }, 20L, TimeUnit.SECONDS); } public void assertFunctionIsCreatedOnAll(String schema, String name, List<DataType<?>> argTypes) throws Exception { SearchPath searchPath = SearchPath.pathWithPGCatalogAndDoc(); assertBusy(() -> { Iterable<Functions> functions = internalCluster().getInstances(Functions.class); for (Functions function : functions) { FunctionImplementation func = function.get( schema, name, Lists2.map(argTypes, t -> Literal.of(t, null)), searchPath); assertThat(func).isNotNull(); assertThat(func.boundSignature().getArgumentDataTypes()).isEqualTo(argTypes); } }, 20L, TimeUnit.SECONDS); } public void assertFunctionIsDeletedOnAll(String schema, String name, List<Symbol> arguments) throws Exception { assertBusy(() -> { Iterable<Functions> functions = internalCluster().getInstances(Functions.class); for (Functions function : functions) { try { var func = function.get(schema, name, arguments, SearchPath.createSearchPathFrom(schema)); if (func != null) { // if no exact function match is found for given arguments, // the function with arguments that can be casted to provided // arguments will be returned. Therefore, we have to assert that // the provided arguments do not match the arguments of the resolved // function if the function was deleted. assertThat(func.boundSignature().getArgumentDataTypes()).isNotEqualTo(Symbols.typeView(arguments)); } } catch (UnsupportedOperationException e) { assertThat(e.getMessage()).startsWith("Unknown function"); } } }, 20L, TimeUnit.SECONDS); } public void waitForMappingUpdateOnAll(final String tableOrPartition, final String... fieldNames) throws Exception { waitForMappingUpdateOnAll(new RelationName(sqlExecutor.getCurrentSchema(), tableOrPartition), fieldNames); } /** * Get the IndexSettings as JSON String * * @param index the name of the index * @return the IndexSettings as JSON String * @throws IOException */ protected String getIndexSettings(String index) throws IOException { ClusterStateRequest request = new ClusterStateRequest() .routingTable(false) .nodes(false) .metadata(true) .indices(index); ClusterStateResponse response = FutureUtils.get(client().admin().cluster().state(request)); Metadata metadata = response.getState().metadata(); XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); for (IndexMetadata indexMetadata : metadata) { builder.startObject(indexMetadata.getIndex().getName()); builder.startObject("settings"); Settings settings = indexMetadata.getSettings(); for (String settingName : settings.keySet()) { builder.field(settingName, settings.get(settingName)); } builder.endObject(); builder.endObject(); } builder.endObject(); return Strings.toString(builder); } /** * Creates an {@link Session} on a specific node. * This can be used to ensure that a request is performed on a specific node. * * @param nodeName The name of the node to create the session * @return The created session */ Session createSessionOnNode(String nodeName) { SQLOperations sqlOperations = internalCluster().getInstance(SQLOperations.class, nodeName); return sqlOperations.createSession( sqlExecutor.getCurrentSchema(), User.CRATE_USER); } /** * Creates a {@link Session} with the given default schema * and an options list. This is useful if you require a session which differs * from the default one. * * @param defaultSchema The default schema to use. Can be null. * @return The created session */ Session createSession(@Nullable String defaultSchema) { SQLOperations sqlOperations = internalCluster().getInstance(SQLOperations.class); return sqlOperations.createSession(defaultSchema, User.CRATE_USER); } /** * If the Test class or method contains a @UseJdbc annotation then, * based on the ratio provided, a random value of true or false is returned. * For more details on the ratio see {@link UseJdbc} * <p> * Method annotations have higher priority than class annotations. */ private boolean isJdbcEnabled() { UseJdbc useJdbc = getTestAnnotation(UseJdbc.class); if (useJdbc == null) { return false; } return isFeatureEnabled(useJdbc.value()); } /** * If the Test class or method is annotated with {@link UseHashJoins} then, * based on the provided ratio, a random value of true or false is returned. * For more details on the ratio see {@link UseHashJoins} * <p> * Method annotations have higher priority than class annotations. */ private boolean isHashJoinEnabled() { UseHashJoins useHashJoins = getTestAnnotation(UseHashJoins.class); if (useHashJoins == null) { return false; } return isFeatureEnabled(useHashJoins.value()); } /** * Checks if the current test method or test class is annotated with the provided {@param annotationClass} * * @return the annotation if one is present or null otherwise */ @Nullable private <T extends Annotation> T getTestAnnotation(Class<T> annotationClass) { try { Class<?> clazz = this.getClass(); String testMethodName = testName.getMethodName(); String[] split = testName.getMethodName().split(" "); if (split.length > 1) { // When we annotate tests with @Repeat the test method name gets augmented with a seed and we won't // be able to find it in the class methods, so just grab the method name. testMethodName = split[0]; } Method method = clazz.getMethod(testMethodName); T annotation = method.getAnnotation(annotationClass); if (annotation == null) { annotation = clazz.getAnnotation(annotationClass); } return annotation; } catch (NoSuchMethodException e) { return null; } } /** * We sometimes randomize the use of features in tests. * This method verifies the provided ratio and based on it and a random number * indicates if a feature should be enabled or not. * * @param ratio a number between [0, 1] that indicates a "randomness factor" * (1 = always enabled, 0 = always disabled) * * @return true if a feature should be active/used and false otherwise */ private boolean isFeatureEnabled(double ratio) { if (ratio == 0) { return false; } assert ratio >= 0.0 && ratio <= 1.0; return ratio == 1 || RandomizedContext.current().getRandom().nextDouble() < ratio; } /** * If the Test class or method contains a @UseRandomizedSchema annotation then, * based on the schema argument, a random (unquoted) schema name is returned. The schema name consists * of a 1-20 character long ASCII string. * For more details on the schema parameter see {@link UseRandomizedSchema} * <p> * Method annotations have higher priority than class annotations. */ private String randomizedSchema() { UseRandomizedSchema annotation = getTestAnnotation(UseRandomizedSchema.class); if (annotation == null || annotation.random() == false) { return Schemas.DOC_SCHEMA_NAME; } Random random = RandomizedContext.current().getRandom(); while (true) { String schemaName = RandomStrings.randomAsciiLettersOfLengthBetween(random, 1, 20).toLowerCase(); if (!Schemas.READ_ONLY_SYSTEM_SCHEMAS.contains(schemaName) && !Identifiers.isKeyWord(schemaName) && !containsExtendedAsciiChars(schemaName)) { return schemaName; } } } private boolean containsExtendedAsciiChars(String value) { for (char c : value.toCharArray()) { if ((short) c > 127) { return true; } } return false; } public static Index resolveIndex(String index) { ClusterService clusterService = internalCluster().getInstance(ClusterService.class); IndexMetadata indexMetadata = clusterService.state().metadata().index(index); return new Index(index, indexMetadata.getIndexUUID()); } }
tests: Remove unused `executeWith` from `SQLIntegrationTestCase`
server/src/testFixtures/java/io/crate/integrationtests/SQLIntegrationTestCase.java
tests: Remove unused `executeWith` from `SQLIntegrationTestCase`
Java
apache-2.0
112a80463a30a79e98dff2f80f461591eb3d5aa6
0
xzel23/meja,xzel23/meja
/* * Copyright 2016 Axel Howind. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.dua3.meja.ui; import java.util.concurrent.locks.Lock; import com.dua3.meja.model.BorderStyle; import com.dua3.meja.model.Cell; import com.dua3.meja.model.CellStyle; import com.dua3.meja.model.Direction; import com.dua3.meja.model.FillPattern; import com.dua3.meja.model.Row; import com.dua3.meja.model.Sheet; import com.dua3.meja.util.MejaConfig; import com.dua3.utility.Color; /** * A helper class that implements the actual drawing algorithm. * * @param <SV> the concrete class implementing SheetView * @param <GC> the concrete class implementing GraphicsContext */ public abstract class SheetPainterBase<SV extends SheetView, GC extends GraphicsContext> { enum CellDrawMode { /** * */ DRAW_CELL_BACKGROUND, /** * */ DRAW_CELL_BORDER, /** * */ DRAW_CELL_FOREGROUND } /** * Horizontal padding. */ protected static final int PADDING_X = 2; /** * Vertical padding. */ protected static final int PADDING_Y = 1; /** * Color used to draw the selection rectangle. */ protected static final Color SELECTION_COLOR = Color.BLACK; /** * Width of the selection rectangle borders. */ protected static final int SELECTION_STROKE_WIDTH = 4; /** * Test whether style uses text wrapping. While there is a property for text * wrapping, the alignment settings have to be taken into account too. * * @param style style * @return true if cell content should be displayed with text wrapping */ static boolean isWrapping(CellStyle style) { return style.isWrap() || style.getHAlign().isWrap() || style.getVAlign().isWrap(); } protected final SV sheetView; /** * Reference to the sheet. */ private Sheet sheet = null; /** * Array with column positions (x-axis) in pixels. */ private double[] columnPos = { 0 }; /** * Array with column positions (y-axis) in pixels. */ private double[] rowPos = { 0 }; private double sheetHeightInPoints = 0; private double sheetWidthInPoints = 0; protected SheetPainterBase(SV sheetView) { this.sheetView = sheetView; } public void drawSheet(GC gc) { if (sheet == null) { return; } Lock readLock = sheet.readLock(); readLock.lock(); try { beginDraw(gc); drawBackground(gc); drawLabels(gc); drawCells(gc, CellDrawMode.DRAW_CELL_BACKGROUND); drawCells(gc, CellDrawMode.DRAW_CELL_BORDER); drawCells(gc, CellDrawMode.DRAW_CELL_FOREGROUND); drawSelection(gc); endDraw(gc); } finally { readLock.unlock(); } } /** * Calculate the rectangle the cell occupies on screen. * * @param cell the cell whose area is requested * @return rectangle the rectangle the cell takes up in screen coordinates */ public Rectangle getCellRect(Cell cell) { final int i = cell.getRowNumber(); final int j = cell.getColumnNumber(); final double x = getColumnPos(j); final double w = getColumnPos(j + cell.getHorizontalSpan()) - x; final double y = getRowPos(i); final double h = getRowPos(i + cell.getVerticalSpan()) - y; return new Rectangle(x, y, w, h); } /** * Get number of columns for the currently loaded sheet. * * @return number of columns */ public int getColumnCount() { return columnPos.length - 1; } /** * Get the column number that the given x-coordinate belongs to. * * @param x x-coordinate * * @return * <ul> * <li>-1, if the first column is displayed to the right of the given * coordinate * <li>number of columns, if the right edge of the last column is * displayed to the left of the given coordinate * <li>the number of the column that belongs to the given coordinate * </ul> */ public int getColumnNumberFromX(double x) { if (columnPos.length == 0) { return 0; } // guess position int j = (int) (columnPos.length * x / sheetWidthInPoints); if (j < 0) { j = 0; } else if (j >= columnPos.length) { j = columnPos.length - 1; } // linear search from here if (getColumnPos(j) > x) { while (j > 0 && getColumnPos(j - 1) > x) { j--; } } else { while (j < columnPos.length && getColumnPos(j) <= x) { j++; } } return j - 1; } /** * @param j the column number * @return the columnPos */ public double getColumnPos(int j) { return columnPos[Math.min(columnPos.length - 1, j)]; } /** * Get number of rows for the currently loaded sheet. * * @return number of rows */ public int getRowCount() { return rowPos.length - 1; } /** * Get the row number that the given y-coordinate belongs to. * * @param y y-coordinate * * @return * <ul> * <li>-1, if the first row is displayed below the given coordinate * <li>number of rows, if the lower edge of the last row is displayed * above the given coordinate * <li>the number of the row that belongs to the given coordinate * </ul> */ public int getRowNumberFromY(double y) { if (rowPos.length == 0) { return 0; } // guess position int i = (int) (rowPos.length * y / sheetHeightInPoints); if (i < 0) { i = 0; } else if (i >= rowPos.length) { i = rowPos.length - 1; } // linear search from here if (getRowPos(i) > y) { while (i > 0 && getRowPos(i - 1) > y) { i--; } } else { while (i < rowPos.length && getRowPos(i) <= y) { i++; } } return i - 1; } /** * @param i the row number * @return the rowPos */ public double getRowPos(int i) { return rowPos[Math.min(rowPos.length - 1, i)]; } /** * Get display coordinates of selection rectangle. * * @param cell the selected cell * @return selection rectangle in display coordinates */ public Rectangle getSelectionRect(Cell cell) { Rectangle cellRect = getCellRect(cell.getLogicalCell()); double extra = (getSelectionStrokeWidth() + 1) / 2; return new Rectangle(cellRect.getX() - extra, cellRect.getY() - extra, cellRect.getW() + 2 * extra, cellRect.getH() + 2 * extra); } public double getSheetHeightInPoints() { return sheetHeightInPoints; } public double getSheetWidthInPoints() { return sheetWidthInPoints; } public double getSplitX() { return getColumnPos(sheet.getSplitColumn()); } public double getSplitY() { return getRowPos(sheet.getSplitRow()); } public void update(Sheet sheet) { if (sheet != this.sheet) { this.sheet = sheet; } // determine sheet dimensions if (sheet == null) { sheetWidthInPoints = 0; sheetHeightInPoints = 0; rowPos = new double[] { 0 }; columnPos = new double[] { 0 }; return; } Lock readLock = sheet.readLock(); readLock.lock(); try { sheetHeightInPoints = 0; rowPos = new double[2 + sheet.getLastRowNum()]; rowPos[0] = 0; for (int i = 1; i < rowPos.length; i++) { sheetHeightInPoints += sheet.getRowHeight(i - 1); rowPos[i] = sheetHeightInPoints; } sheetWidthInPoints = 0; columnPos = new double[2 + sheet.getLastColNum()]; columnPos[0] = 0; for (int j = 1; j < columnPos.length; j++) { sheetWidthInPoints += sheet.getColumnWidth(j - 1); columnPos[j] = sheetWidthInPoints; } } finally { readLock.unlock(); } } /** * Draw cell background. * * @param g the graphics context to use * @param cell cell to draw */ private void drawCellBackground(GC g, Cell cell) { Rectangle cr = getCellRect(cell); // draw grid lines g.setColor(getGridColor()); g.drawRect(cr.getX(), cr.getY(), cr.getW(), cr.getH()); CellStyle style = cell.getCellStyle(); FillPattern pattern = style.getFillPattern(); if (pattern == FillPattern.NONE) { return; } if (pattern != FillPattern.SOLID) { Color fillBgColor = style.getFillBgColor(); if (fillBgColor != null) { g.setColor(fillBgColor); g.fillRect(cr.getX(), cr.getY(), cr.getW(), cr.getH()); } } if (pattern != FillPattern.NONE) { Color fillFgColor = style.getFillFgColor(); if (fillFgColor != null) { g.setColor(fillFgColor); g.fillRect(cr.getX(), cr.getY(), cr.getW(), cr.getH()); } } } /** * Draw cell border. * * @param g the graphics context to use * @param cell cell to draw */ private void drawCellBorder(GC g, Cell cell) { CellStyle styleTopLeft = cell.getCellStyle(); Cell cellBottomRight = sheet.getRow(cell.getRowNumber() + cell.getVerticalSpan() - 1) .getCell(cell.getColumnNumber() + cell.getHorizontalSpan() - 1); CellStyle styleBottomRight = cellBottomRight.getCellStyle(); Rectangle cr = getCellRect(cell); // draw border for (Direction d : Direction.values()) { boolean isTopLeft = d == Direction.NORTH || d == Direction.WEST; CellStyle style = isTopLeft ? styleTopLeft : styleBottomRight; BorderStyle b = style.getBorderStyle(d); if (b.getWidth() == 0) { continue; } Color color = b.getColor(); if (color == null) { color = Color.BLACK; } g.setStroke(color, b.getWidth()); switch (d) { case NORTH: g.drawLine(cr.getLeft(), cr.getTop(), cr.getRight(), cr.getTop()); break; case EAST: g.drawLine(cr.getRight(), cr.getTop(), cr.getRight(), cr.getBottom()); break; case SOUTH: g.drawLine(cr.getLeft(), cr.getBottom(), cr.getRight(), cr.getBottom()); break; case WEST: g.drawLine(cr.getLeft(), cr.getTop(), cr.getLeft(), cr.getBottom()); break; } } } /** * Draw cell foreground. * * @param g the graphics context to use * @param cell cell to draw */ private void drawCellForeground(GC g, Cell cell) { if (cell.isEmpty()) { return; } double paddingX = getPaddingX(); double paddingY = getPaddingY(); // the rectangle used for positioning the text Rectangle textRect = getCellRect(cell); textRect = new Rectangle(textRect.getX() + paddingX, textRect.getY() + paddingY, textRect.getW() - 2 * paddingX, textRect.getH() - 2 * paddingY); // the clipping rectangle final Rectangle clipRect; final CellStyle style = cell.getCellStyle(); if (isWrapping(style)) { clipRect = textRect; } else { Row row = cell.getRow(); double clipXMin = textRect.getX(); for (int j = cell.getColumnNumber() - 1; j > 0; j--) { if (!row.getCell(j).isEmpty()) { break; } clipXMin = getColumnPos(j) + paddingX; } double clipXMax = textRect.getRight(); for (int j = cell.getColumnNumber() + 1; j < getColumnCount(); j++) { if (!row.getCell(j).isEmpty()) { break; } clipXMax = getColumnPos(j + 1) - paddingX; } clipRect = new Rectangle(clipXMin, textRect.getY(), clipXMax - clipXMin, textRect.getH()); } render(g, cell, textRect, clipRect); } /** * Draw frame around current selection. * * @param gc graphics object used for drawing */ private void drawSelection(GC gc) { // no sheet, no drawing if (sheet == null) { return; } Cell logicalCell = sheet.getCurrentCell().getLogicalCell(); Rectangle rect = getCellRect(logicalCell); gc.setXOR(MejaConfig.isXorDrawModeEnabled()); gc.setStroke(getSelectionColor(), getSelectionStrokeWidth()); gc.drawRect(rect.getX(), rect.getY(), rect.getW(), rect.getH()); gc.setXOR(false); } private String getColumnName(int j) { return sheetView.getColumnName(j); } private String getRowName(int i) { return sheetView.getRowName(i); } protected void beginDraw(GC gc) { // nop } protected abstract void drawBackground(GC gc); protected abstract void drawLabel(GC gc, Rectangle rect, String text); protected void drawLabels(GC gc) { // determine visible rows and columns Rectangle clipBounds = gc.getClipBounds(); int startRow = Math.max(0, getRowNumberFromY(clipBounds.getTop())); int endRow = Math.min(getRowCount(), 1 + getRowNumberFromY(clipBounds.getBottom())); int startColumn = Math.max(0, getColumnNumberFromX(clipBounds.getLeft())); int endColumn = Math.min(getColumnCount(), 1 + getColumnNumberFromX(clipBounds.getRight())); // draw row labels Rectangle r = new Rectangle(-getRowLabelWidth(), 0, getRowLabelWidth(), 0); for (int i = startRow; i < endRow; i++) { r.setY(getRowPos(i)); r.setH(getRowPos(i + 1) - r.getY()); String text = getRowName(i); drawLabel(gc, r, text); } // draw column labels r = new Rectangle(0, -getColumnLabelHeight(), 0, getColumnLabelHeight()); for (int j = startColumn; j < endColumn; j++) { r.setX(getColumnPos(j)); r.setW(getColumnPos(j + 1) - r.getX()); String text = getColumnName(j); drawLabel(gc, r, text); } } protected void endDraw(GC gc) { // nop } protected abstract double getColumnLabelHeight(); protected Color getGridColor() { return sheetView.getGridColor(); } protected double getPaddingX() { return PADDING_X; } protected double getPaddingY() { return PADDING_Y; } protected abstract double getRowLabelWidth(); protected Color getSelectionColor() { return SELECTION_COLOR; } protected double getSelectionStrokeWidth() { return SELECTION_STROKE_WIDTH; } protected abstract void render(GC g, Cell cell, Rectangle textRect, Rectangle clipRect); /** * Draw cells. * * Since borders can be draw over by the background of adjacent cells and text * can overlap, drawing is done in three steps: * <ul> * <li>draw background for <em>all</em> cells * <li>draw borders for <em>all</em> cells * <li>draw foreground <em>all</em> cells * </ul> * This is controlled by {@code cellDrawMode}. * * @param g the graphics object to use * @param cellDrawMode the draw mode to use */ void drawCells(GC g, CellDrawMode cellDrawMode) { // no sheet, no drawing if (sheet == null) { return; } double maxWidth = SheetView.MAX_COLUMN_WIDTH; Rectangle clipBounds = g.getClipBounds(); // determine visible rows and columns int startRow = Math.max(0, getRowNumberFromY(clipBounds.getTop())); int endRow = Math.min(getRowCount(), 1 + getRowNumberFromY(clipBounds.getBottom())); int startColumn = Math.max(0, getColumnNumberFromX(clipBounds.getLeft())); int endColumn = Math.min(getColumnCount(), 1 + getColumnNumberFromX(clipBounds.getRight())); // Collect cells to be drawn for (int i = startRow; i < endRow; i++) { Row row = sheet.getRow(i); if (row == null) { continue; } // if first/last displayed cell of row is empty, start drawing at // the first non-empty cell to the left/right to make sure // overflowing text is visible. int first = startColumn; while (first > 0 && getColumnPos(first) + maxWidth > clipBounds.getLeft() && row.getCell(first).isEmpty()) { first--; } int end = endColumn; while (end < getColumnCount() && getColumnPos(end) - maxWidth < clipBounds.getRight() && (end <= 0 || row.getCell(end - 1).isEmpty())) { end++; } for (int j = first; j < end; j++) { Cell cell = row.getCell(j); Cell logicalCell = cell.getLogicalCell(); final boolean visible; if (cell == logicalCell) { // if cell is not merged or the topleft cell of the // merged region, then it is visible visible = true; } else { // otherwise calculate row and column numbers of the // first visible cell of the merged region int iCell = Math.max(startRow, logicalCell.getRowNumber()); int jCell = Math.max(first, logicalCell.getColumnNumber()); visible = i == iCell && j == jCell; // skip the other cells of this row that belong to the same // merged region j = logicalCell.getColumnNumber() + logicalCell.getHorizontalSpan() - 1; // filter out cells that cannot overflow into the visible // region if (j < startColumn && isWrapping(cell.getCellStyle())) { continue; } } // draw cell if (visible) { switch (cellDrawMode) { case DRAW_CELL_BACKGROUND: drawCellBackground(g, logicalCell); break; case DRAW_CELL_BORDER: drawCellBorder(g, logicalCell); break; case DRAW_CELL_FOREGROUND: drawCellForeground(g, logicalCell); break; } } } } } }
src/main/java/com/dua3/meja/ui/SheetPainterBase.java
/* * Copyright 2016 Axel Howind. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.dua3.meja.ui; import java.util.concurrent.locks.Lock; import com.dua3.meja.model.BorderStyle; import com.dua3.meja.model.Cell; import com.dua3.meja.model.CellStyle; import com.dua3.meja.model.Direction; import com.dua3.meja.model.FillPattern; import com.dua3.meja.model.Row; import com.dua3.meja.model.Sheet; import com.dua3.meja.util.MejaConfig; import com.dua3.utility.Color; /** * A helper class that implements the actual drawing algorithm. * * @param <SV> the concrete class implementing SheetView * @param <GC> the concrete class implementing GraphicsContext */ public abstract class SheetPainterBase<SV extends SheetView, GC extends GraphicsContext> { enum CellDrawMode { /** * */ DRAW_CELL_BACKGROUND, /** * */ DRAW_CELL_BORDER, /** * */ DRAW_CELL_FOREGROUND } /** * Horizontal padding. */ protected static final int PADDING_X = 2; /** * Vertical padding. */ protected static final int PADDING_Y = 1; /** * Color used to draw the selection rectangle. */ protected static final Color SELECTION_COLOR = Color.BLACK; /** * Width of the selection rectangle borders. */ protected static final int SELECTION_STROKE_WIDTH = 4; /** * Test whether style uses text wrapping. While there is a property for text * wrapping, the alignment settings have to be taken into account too. * * @param style style * @return true if cell content should be displayed with text wrapping */ static boolean isWrapping(CellStyle style) { return style.isWrap() || style.getHAlign().isWrap() || style.getVAlign().isWrap(); } protected final SV sheetView; /** * Reference to the sheet. */ private Sheet sheet = null; /** * Array with column positions (x-axis) in pixels. */ private double[] columnPos; /** * Array with column positions (y-axis) in pixels. */ private double[] rowPos; private double sheetHeightInPoints = 0; private double sheetWidthInPoints = 0; protected SheetPainterBase(SV sheetView) { this.sheetView = sheetView; } public void drawSheet(GC gc) { if (sheet == null) { return; } Lock readLock = sheet.readLock(); readLock.lock(); try { beginDraw(gc); drawBackground(gc); drawLabels(gc); drawCells(gc, CellDrawMode.DRAW_CELL_BACKGROUND); drawCells(gc, CellDrawMode.DRAW_CELL_BORDER); drawCells(gc, CellDrawMode.DRAW_CELL_FOREGROUND); drawSelection(gc); endDraw(gc); } finally { readLock.unlock(); } } /** * Calculate the rectangle the cell occupies on screen. * * @param cell the cell whose area is requested * @return rectangle the rectangle the cell takes up in screen coordinates */ public Rectangle getCellRect(Cell cell) { final int i = cell.getRowNumber(); final int j = cell.getColumnNumber(); final double x = getColumnPos(j); final double w = getColumnPos(j + cell.getHorizontalSpan()) - x; final double y = getRowPos(i); final double h = getRowPos(i + cell.getVerticalSpan()) - y; return new Rectangle(x, y, w, h); } /** * Get number of columns for the currently loaded sheet. * * @return number of columns */ public int getColumnCount() { return columnPos.length - 1; } /** * Get the column number that the given x-coordinate belongs to. * * @param x x-coordinate * * @return * <ul> * <li>-1, if the first column is displayed to the right of the given * coordinate * <li>number of columns, if the right edge of the last column is * displayed to the left of the given coordinate * <li>the number of the column that belongs to the given coordinate * </ul> */ public int getColumnNumberFromX(double x) { if (columnPos.length == 0) { return 0; } // guess position int j = (int) (columnPos.length * x / sheetWidthInPoints); if (j < 0) { j = 0; } else if (j >= columnPos.length) { j = columnPos.length - 1; } // linear search from here if (getColumnPos(j) > x) { while (j > 0 && getColumnPos(j - 1) > x) { j--; } } else { while (j < columnPos.length && getColumnPos(j) <= x) { j++; } } return j - 1; } /** * @param j the column number * @return the columnPos */ public double getColumnPos(int j) { return columnPos[Math.min(columnPos.length - 1, j)]; } /** * Get number of rows for the currently loaded sheet. * * @return number of rows */ public int getRowCount() { return rowPos.length - 1; } /** * Get the row number that the given y-coordinate belongs to. * * @param y y-coordinate * * @return * <ul> * <li>-1, if the first row is displayed below the given coordinate * <li>number of rows, if the lower edge of the last row is displayed * above the given coordinate * <li>the number of the row that belongs to the given coordinate * </ul> */ public int getRowNumberFromY(double y) { if (rowPos.length == 0) { return 0; } // guess position int i = (int) (rowPos.length * y / sheetHeightInPoints); if (i < 0) { i = 0; } else if (i >= rowPos.length) { i = rowPos.length - 1; } // linear search from here if (getRowPos(i) > y) { while (i > 0 && getRowPos(i - 1) > y) { i--; } } else { while (i < rowPos.length && getRowPos(i) <= y) { i++; } } return i - 1; } /** * @param i the row number * @return the rowPos */ public double getRowPos(int i) { return rowPos[Math.min(rowPos.length - 1, i)]; } /** * Get display coordinates of selection rectangle. * * @param cell the selected cell * @return selection rectangle in display coordinates */ public Rectangle getSelectionRect(Cell cell) { Rectangle cellRect = getCellRect(cell.getLogicalCell()); double extra = (getSelectionStrokeWidth() + 1) / 2; return new Rectangle(cellRect.getX() - extra, cellRect.getY() - extra, cellRect.getW() + 2 * extra, cellRect.getH() + 2 * extra); } public double getSheetHeightInPoints() { return sheetHeightInPoints; } public double getSheetWidthInPoints() { return sheetWidthInPoints; } public double getSplitX() { return getColumnPos(sheet.getSplitColumn()); } public double getSplitY() { return getRowPos(sheet.getSplitRow()); } public void update(Sheet sheet) { if (sheet != this.sheet) { this.sheet = sheet; } // determine sheet dimensions if (sheet == null) { sheetWidthInPoints = 0; sheetHeightInPoints = 0; rowPos = new double[] { 0 }; columnPos = new double[] { 0 }; return; } Lock readLock = sheet.readLock(); readLock.lock(); try { sheetHeightInPoints = 0; rowPos = new double[2 + sheet.getLastRowNum()]; rowPos[0] = 0; for (int i = 1; i < rowPos.length; i++) { sheetHeightInPoints += sheet.getRowHeight(i - 1); rowPos[i] = sheetHeightInPoints; } sheetWidthInPoints = 0; columnPos = new double[2 + sheet.getLastColNum()]; columnPos[0] = 0; for (int j = 1; j < columnPos.length; j++) { sheetWidthInPoints += sheet.getColumnWidth(j - 1); columnPos[j] = sheetWidthInPoints; } } finally { readLock.unlock(); } } /** * Draw cell background. * * @param g the graphics context to use * @param cell cell to draw */ private void drawCellBackground(GC g, Cell cell) { Rectangle cr = getCellRect(cell); // draw grid lines g.setColor(getGridColor()); g.drawRect(cr.getX(), cr.getY(), cr.getW(), cr.getH()); CellStyle style = cell.getCellStyle(); FillPattern pattern = style.getFillPattern(); if (pattern == FillPattern.NONE) { return; } if (pattern != FillPattern.SOLID) { Color fillBgColor = style.getFillBgColor(); if (fillBgColor != null) { g.setColor(fillBgColor); g.fillRect(cr.getX(), cr.getY(), cr.getW(), cr.getH()); } } if (pattern != FillPattern.NONE) { Color fillFgColor = style.getFillFgColor(); if (fillFgColor != null) { g.setColor(fillFgColor); g.fillRect(cr.getX(), cr.getY(), cr.getW(), cr.getH()); } } } /** * Draw cell border. * * @param g the graphics context to use * @param cell cell to draw */ private void drawCellBorder(GC g, Cell cell) { CellStyle styleTopLeft = cell.getCellStyle(); Cell cellBottomRight = sheet.getRow(cell.getRowNumber() + cell.getVerticalSpan() - 1) .getCell(cell.getColumnNumber() + cell.getHorizontalSpan() - 1); CellStyle styleBottomRight = cellBottomRight.getCellStyle(); Rectangle cr = getCellRect(cell); // draw border for (Direction d : Direction.values()) { boolean isTopLeft = d == Direction.NORTH || d == Direction.WEST; CellStyle style = isTopLeft ? styleTopLeft : styleBottomRight; BorderStyle b = style.getBorderStyle(d); if (b.getWidth() == 0) { continue; } Color color = b.getColor(); if (color == null) { color = Color.BLACK; } g.setStroke(color, b.getWidth()); switch (d) { case NORTH: g.drawLine(cr.getLeft(), cr.getTop(), cr.getRight(), cr.getTop()); break; case EAST: g.drawLine(cr.getRight(), cr.getTop(), cr.getRight(), cr.getBottom()); break; case SOUTH: g.drawLine(cr.getLeft(), cr.getBottom(), cr.getRight(), cr.getBottom()); break; case WEST: g.drawLine(cr.getLeft(), cr.getTop(), cr.getLeft(), cr.getBottom()); break; } } } /** * Draw cell foreground. * * @param g the graphics context to use * @param cell cell to draw */ private void drawCellForeground(GC g, Cell cell) { if (cell.isEmpty()) { return; } double paddingX = getPaddingX(); double paddingY = getPaddingY(); // the rectangle used for positioning the text Rectangle textRect = getCellRect(cell); textRect = new Rectangle(textRect.getX() + paddingX, textRect.getY() + paddingY, textRect.getW() - 2 * paddingX, textRect.getH() - 2 * paddingY); // the clipping rectangle final Rectangle clipRect; final CellStyle style = cell.getCellStyle(); if (isWrapping(style)) { clipRect = textRect; } else { Row row = cell.getRow(); double clipXMin = textRect.getX(); for (int j = cell.getColumnNumber() - 1; j > 0; j--) { if (!row.getCell(j).isEmpty()) { break; } clipXMin = getColumnPos(j) + paddingX; } double clipXMax = textRect.getRight(); for (int j = cell.getColumnNumber() + 1; j < getColumnCount(); j++) { if (!row.getCell(j).isEmpty()) { break; } clipXMax = getColumnPos(j + 1) - paddingX; } clipRect = new Rectangle(clipXMin, textRect.getY(), clipXMax - clipXMin, textRect.getH()); } render(g, cell, textRect, clipRect); } /** * Draw frame around current selection. * * @param gc graphics object used for drawing */ private void drawSelection(GC gc) { // no sheet, no drawing if (sheet == null) { return; } Cell logicalCell = sheet.getCurrentCell().getLogicalCell(); Rectangle rect = getCellRect(logicalCell); gc.setXOR(MejaConfig.isXorDrawModeEnabled()); gc.setStroke(getSelectionColor(), getSelectionStrokeWidth()); gc.drawRect(rect.getX(), rect.getY(), rect.getW(), rect.getH()); gc.setXOR(false); } private String getColumnName(int j) { return sheetView.getColumnName(j); } private String getRowName(int i) { return sheetView.getRowName(i); } protected void beginDraw(GC gc) { // nop } protected abstract void drawBackground(GC gc); protected abstract void drawLabel(GC gc, Rectangle rect, String text); protected void drawLabels(GC gc) { // determine visible rows and columns Rectangle clipBounds = gc.getClipBounds(); int startRow = Math.max(0, getRowNumberFromY(clipBounds.getTop())); int endRow = Math.min(getRowCount(), 1 + getRowNumberFromY(clipBounds.getBottom())); int startColumn = Math.max(0, getColumnNumberFromX(clipBounds.getLeft())); int endColumn = Math.min(getColumnCount(), 1 + getColumnNumberFromX(clipBounds.getRight())); // draw row labels Rectangle r = new Rectangle(-getRowLabelWidth(), 0, getRowLabelWidth(), 0); for (int i = startRow; i < endRow; i++) { r.setY(getRowPos(i)); r.setH(getRowPos(i + 1) - r.getY()); String text = getRowName(i); drawLabel(gc, r, text); } // draw column labels r = new Rectangle(0, -getColumnLabelHeight(), 0, getColumnLabelHeight()); for (int j = startColumn; j < endColumn; j++) { r.setX(getColumnPos(j)); r.setW(getColumnPos(j + 1) - r.getX()); String text = getColumnName(j); drawLabel(gc, r, text); } } protected void endDraw(GC gc) { // nop } protected abstract double getColumnLabelHeight(); protected Color getGridColor() { return sheetView.getGridColor(); } protected double getPaddingX() { return PADDING_X; } protected double getPaddingY() { return PADDING_Y; } protected abstract double getRowLabelWidth(); protected Color getSelectionColor() { return SELECTION_COLOR; } protected double getSelectionStrokeWidth() { return SELECTION_STROKE_WIDTH; } protected abstract void render(GC g, Cell cell, Rectangle textRect, Rectangle clipRect); /** * Draw cells. * * Since borders can be draw over by the background of adjacent cells and text * can overlap, drawing is done in three steps: * <ul> * <li>draw background for <em>all</em> cells * <li>draw borders for <em>all</em> cells * <li>draw foreground <em>all</em> cells * </ul> * This is controlled by {@code cellDrawMode}. * * @param g the graphics object to use * @param cellDrawMode the draw mode to use */ void drawCells(GC g, CellDrawMode cellDrawMode) { // no sheet, no drawing if (sheet == null) { return; } double maxWidth = SheetView.MAX_COLUMN_WIDTH; Rectangle clipBounds = g.getClipBounds(); // determine visible rows and columns int startRow = Math.max(0, getRowNumberFromY(clipBounds.getTop())); int endRow = Math.min(getRowCount(), 1 + getRowNumberFromY(clipBounds.getBottom())); int startColumn = Math.max(0, getColumnNumberFromX(clipBounds.getLeft())); int endColumn = Math.min(getColumnCount(), 1 + getColumnNumberFromX(clipBounds.getRight())); // Collect cells to be drawn for (int i = startRow; i < endRow; i++) { Row row = sheet.getRow(i); if (row == null) { continue; } // if first/last displayed cell of row is empty, start drawing at // the first non-empty cell to the left/right to make sure // overflowing text is visible. int first = startColumn; while (first > 0 && getColumnPos(first) + maxWidth > clipBounds.getLeft() && row.getCell(first).isEmpty()) { first--; } int end = endColumn; while (end < getColumnCount() && getColumnPos(end) - maxWidth < clipBounds.getRight() && (end <= 0 || row.getCell(end - 1).isEmpty())) { end++; } for (int j = first; j < end; j++) { Cell cell = row.getCell(j); Cell logicalCell = cell.getLogicalCell(); final boolean visible; if (cell == logicalCell) { // if cell is not merged or the topleft cell of the // merged region, then it is visible visible = true; } else { // otherwise calculate row and column numbers of the // first visible cell of the merged region int iCell = Math.max(startRow, logicalCell.getRowNumber()); int jCell = Math.max(first, logicalCell.getColumnNumber()); visible = i == iCell && j == jCell; // skip the other cells of this row that belong to the same // merged region j = logicalCell.getColumnNumber() + logicalCell.getHorizontalSpan() - 1; // filter out cells that cannot overflow into the visible // region if (j < startColumn && isWrapping(cell.getCellStyle())) { continue; } } // draw cell if (visible) { switch (cellDrawMode) { case DRAW_CELL_BACKGROUND: drawCellBackground(g, logicalCell); break; case DRAW_CELL_BORDER: drawCellBorder(g, logicalCell); break; case DRAW_CELL_FOREGROUND: drawCellForeground(g, logicalCell); break; } } } } } }
NPE because of race condition
src/main/java/com/dua3/meja/ui/SheetPainterBase.java
NPE because of race condition
Java
apache-2.0
9711d5b953c9c0e6f6d70200fef02eb64e20a874
0
thinkbigthings/java9
import java.io.*; import java.util.*; public class Main { public static void main(String[] args) throws Exception { // resources may be declared outside the try statement Reader reader = new InputStreamReader(new FileInputStream("Main.java")); BufferedReader in = new BufferedReader(reader); List<String> lines = new ArrayList<>(); try(in) { String line; while ((line = in.readLine()) != null) { lines.add(line); } } ListProcessor processor = new ListProcessor() {}; int numOriginal = lines.size(); int numFlat = processor.flatten(lines).size(); System.out.println("number of duplicate lines: " + (numOriginal - numFlat)); } interface ListProcessor { default List<String> flatten(List<String>... lists) { return flattenStrings(lists); } // @SafeVarargs can be put on a private method // interfaces can have private methods @SafeVarargs private List<String> flattenStrings(List<String>... lists) { // anonymous classes can use type inference // single underscore now can NOT be used as variable name Set<String> _strings = new HashSet<>(){}; for(List<String> list : lists) { _strings.addAll(list); } return new ArrayList<>(_strings); } } } // http://stackoverflow.com/questions/7214069/compile-error-cannot-be-used-with-anonymous-classes /* Main.java:7: error: cannot infer type arguments for ArrayList<E> List<String> strings = new ArrayList<>(){}; ^ reason: cannot use '<>' with anonymous inner classes where E is a type-variable: E extends Object declared in class ArrayList 1 error ELSPHIM-4170403:~ young1$ */
coin/Main.java
import java.io.*; import java.util.*; public class Main { public static void main(String[] args) throws Exception { // resources may be declared outside the try statement Reader reader = new InputStreamReader(new FileInputStream("Main.java")); BufferedReader in = new BufferedReader(reader); try(in) { String line; while ((line = in.readLine()) != null) { System.out.println(line); } } } interface ListProcessor { default List<String> flatten(List<String>... lists) { return flattenStrings(lists); } // @SafeVarargs can be put on a private method // interfaces can have private methods @SafeVarargs private List<String> flattenStrings(List<String>... lists) { // anonymous classes can use type inference // single underscore now can NOT be used as variable name Set<String> _strings = new HashSet<>(){}; for(List<String> list : lists) { _strings.addAll(list); } return new ArrayList<>(_strings); } } } // http://stackoverflow.com/questions/7214069/compile-error-cannot-be-used-with-anonymous-classes /* Main.java:7: error: cannot infer type arguments for ArrayList<E> List<String> strings = new ArrayList<>(){}; ^ reason: cannot use '<>' with anonymous inner classes where E is a type-variable: E extends Object declared in class ArrayList 1 error ELSPHIM-4170403:~ young1$ */
made the example actually do something
coin/Main.java
made the example actually do something
Java
apache-2.0
4ff75f0247e19bff1f4da93f68060569cff7cd44
0
androidx/media,amzn/exoplayer-amazon-port,google/ExoPlayer,amzn/exoplayer-amazon-port,google/ExoPlayer,google/ExoPlayer,androidx/media,androidx/media,amzn/exoplayer-amazon-port
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.ext.opus; import static androidx.annotation.VisibleForTesting.PACKAGE_PRIVATE; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.decoder.CryptoConfig; import com.google.android.exoplayer2.decoder.CryptoException; import com.google.android.exoplayer2.decoder.CryptoInfo; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.decoder.SimpleDecoder; import com.google.android.exoplayer2.decoder.SimpleDecoderOutputBuffer; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.List; /** Opus decoder. */ @VisibleForTesting(otherwise = PACKAGE_PRIVATE) public final class OpusDecoder extends SimpleDecoder<DecoderInputBuffer, SimpleDecoderOutputBuffer, OpusDecoderException> { /** Opus streams are always 48000 Hz. */ /* package */ static final int SAMPLE_RATE = 48_000; private static final int DEFAULT_SEEK_PRE_ROLL_SAMPLES = 3840; private static final int FULL_CODEC_INITIALIZATION_DATA_BUFFER_COUNT = 3; private static final int NO_ERROR = 0; private static final int DECODE_ERROR = -1; private static final int DRM_ERROR = -2; public final boolean outputFloat; public final int channelCount; @Nullable private final CryptoConfig cryptoConfig; private final int preSkipSamples; private final int seekPreRollSamples; private final long nativeDecoderContext; private int skipSamples; /** * Creates an Opus decoder. * * @param numInputBuffers The number of input buffers. * @param numOutputBuffers The number of output buffers. * @param initialInputBufferSize The initial size of each input buffer. * @param initializationData Codec-specific initialization data. The first element must contain an * opus header. Optionally, the list may contain two additional buffers, which must contain * the encoder delay and seek pre roll values in nanoseconds, encoded as longs. * @param cryptoConfig The {@link CryptoConfig} object required for decoding encrypted content. * May be null and can be ignored if decoder does not handle encrypted content. * @param outputFloat Forces the decoder to output float PCM samples when set * @throws OpusDecoderException Thrown if an exception occurs when initializing the decoder. */ public OpusDecoder( int numInputBuffers, int numOutputBuffers, int initialInputBufferSize, List<byte[]> initializationData, @Nullable CryptoConfig cryptoConfig, boolean outputFloat) throws OpusDecoderException { super(new DecoderInputBuffer[numInputBuffers], new SimpleDecoderOutputBuffer[numOutputBuffers]); if (!OpusLibrary.isAvailable()) { throw new OpusDecoderException("Failed to load decoder native libraries"); } this.cryptoConfig = cryptoConfig; if (cryptoConfig != null && !OpusLibrary.opusIsSecureDecodeSupported()) { throw new OpusDecoderException("Opus decoder does not support secure decode"); } int initializationDataSize = initializationData.size(); if (initializationDataSize != 1 && initializationDataSize != 3) { throw new OpusDecoderException("Invalid initialization data size"); } if (initializationDataSize == 3 && (initializationData.get(1).length != 8 || initializationData.get(2).length != 8)) { throw new OpusDecoderException("Invalid pre-skip or seek pre-roll"); } preSkipSamples = getPreSkipSamples(initializationData); seekPreRollSamples = getSeekPreRollSamples(initializationData); byte[] headerBytes = initializationData.get(0); if (headerBytes.length < 19) { throw new OpusDecoderException("Invalid header length"); } channelCount = getChannelCount(headerBytes); if (channelCount > 8) { throw new OpusDecoderException("Invalid channel count: " + channelCount); } int gain = readSignedLittleEndian16(headerBytes, 16); byte[] streamMap = new byte[8]; int numStreams; int numCoupled; if (headerBytes[18] == 0) { // Channel mapping // If there is no channel mapping, use the defaults. if (channelCount > 2) { // Maximum channel count with default layout. throw new OpusDecoderException("Invalid header, missing stream map"); } numStreams = 1; numCoupled = (channelCount == 2) ? 1 : 0; streamMap[0] = 0; streamMap[1] = 1; } else { if (headerBytes.length < 21 + channelCount) { throw new OpusDecoderException("Invalid header length"); } // Read the channel mapping. numStreams = headerBytes[19] & 0xFF; numCoupled = headerBytes[20] & 0xFF; System.arraycopy(headerBytes, 21, streamMap, 0, channelCount); } nativeDecoderContext = opusInit(SAMPLE_RATE, channelCount, numStreams, numCoupled, gain, streamMap); if (nativeDecoderContext == 0) { throw new OpusDecoderException("Failed to initialize decoder"); } setInitialInputBufferSize(initialInputBufferSize); this.outputFloat = outputFloat; if (outputFloat) { opusSetFloatOutput(); } } @Override public String getName() { return "libopus" + OpusLibrary.getVersion(); } @Override protected DecoderInputBuffer createInputBuffer() { return new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DIRECT); } @Override protected SimpleDecoderOutputBuffer createOutputBuffer() { return new SimpleDecoderOutputBuffer(this::releaseOutputBuffer); } @Override protected OpusDecoderException createUnexpectedDecodeException(Throwable error) { return new OpusDecoderException("Unexpected decode error", error); } @Override @Nullable protected OpusDecoderException decode( DecoderInputBuffer inputBuffer, SimpleDecoderOutputBuffer outputBuffer, boolean reset) { if (reset) { opusReset(nativeDecoderContext); // When seeking to 0, skip number of samples as specified in opus header. When seeking to // any other time, skip number of samples as specified by seek preroll. skipSamples = (inputBuffer.timeUs == 0) ? preSkipSamples : seekPreRollSamples; } ByteBuffer inputData = Util.castNonNull(inputBuffer.data); CryptoInfo cryptoInfo = inputBuffer.cryptoInfo; int result = inputBuffer.isEncrypted() ? opusSecureDecode( nativeDecoderContext, inputBuffer.timeUs, inputData, inputData.limit(), outputBuffer, SAMPLE_RATE, cryptoConfig, cryptoInfo.mode, Assertions.checkNotNull(cryptoInfo.key), Assertions.checkNotNull(cryptoInfo.iv), cryptoInfo.numSubSamples, cryptoInfo.numBytesOfClearData, cryptoInfo.numBytesOfEncryptedData) : opusDecode( nativeDecoderContext, inputBuffer.timeUs, inputData, inputData.limit(), outputBuffer); if (result < 0) { if (result == DRM_ERROR) { String message = "Drm error: " + opusGetErrorMessage(nativeDecoderContext); CryptoException cause = new CryptoException(opusGetErrorCode(nativeDecoderContext), message); return new OpusDecoderException(message, cause); } else { return new OpusDecoderException("Decode error: " + opusGetErrorMessage(result)); } } ByteBuffer outputData = Util.castNonNull(outputBuffer.data); outputData.position(0); outputData.limit(result); if (skipSamples > 0) { int bytesPerSample = samplesToBytes(1, channelCount, outputFloat); int skipBytes = skipSamples * bytesPerSample; if (result <= skipBytes) { skipSamples -= result / bytesPerSample; outputBuffer.addFlag(C.BUFFER_FLAG_DECODE_ONLY); outputData.position(result); } else { skipSamples = 0; outputData.position(skipBytes); } } return null; } @Override public void release() { super.release(); opusClose(nativeDecoderContext); } /** * Parses the channel count from an Opus Identification Header. * * @param header An Opus Identification Header, as defined by RFC 7845. * @return The parsed channel count. */ @VisibleForTesting /* package */ static int getChannelCount(byte[] header) { return header[9] & 0xFF; } /** * Returns the number of pre-skip samples specified by the given Opus codec initialization data. * * @param initializationData The codec initialization data. * @return The number of pre-skip samples. */ @VisibleForTesting /* package */ static int getPreSkipSamples(List<byte[]> initializationData) { if (initializationData.size() == FULL_CODEC_INITIALIZATION_DATA_BUFFER_COUNT) { long codecDelayNs = ByteBuffer.wrap(initializationData.get(1)).order(ByteOrder.nativeOrder()).getLong(); return (int) ((codecDelayNs * SAMPLE_RATE) / C.NANOS_PER_SECOND); } // Fall back to parsing directly from the Opus Identification header. byte[] headerData = initializationData.get(0); return ((headerData[11] & 0xFF) << 8) | (headerData[10] & 0xFF); } /** * Returns the number of seek per-roll samples specified by the given Opus codec initialization * data. * * @param initializationData The codec initialization data. * @return The number of seek pre-roll samples. */ @VisibleForTesting /* package */ static int getSeekPreRollSamples(List<byte[]> initializationData) { if (initializationData.size() == FULL_CODEC_INITIALIZATION_DATA_BUFFER_COUNT) { long seekPreRollNs = ByteBuffer.wrap(initializationData.get(2)).order(ByteOrder.nativeOrder()).getLong(); return (int) ((seekPreRollNs * SAMPLE_RATE) / C.NANOS_PER_SECOND); } // Fall back to returning the default seek pre-roll. return DEFAULT_SEEK_PRE_ROLL_SAMPLES; } /** Returns number of bytes to represent {@code samples}. */ private static int samplesToBytes(int samples, int channelCount, boolean outputFloat) { int bytesPerChannel = outputFloat ? 4 : 2; return samples * channelCount * bytesPerChannel; } private static int readSignedLittleEndian16(byte[] input, int offset) { int value = input[offset] & 0xFF; value |= (input[offset + 1] & 0xFF) << 8; return (short) value; } private native long opusInit( int sampleRate, int channelCount, int numStreams, int numCoupled, int gain, byte[] streamMap); private native int opusDecode( long decoder, long timeUs, ByteBuffer inputBuffer, int inputSize, SimpleDecoderOutputBuffer outputBuffer); private native int opusSecureDecode( long decoder, long timeUs, ByteBuffer inputBuffer, int inputSize, SimpleDecoderOutputBuffer outputBuffer, int sampleRate, @Nullable CryptoConfig mediaCrypto, int inputMode, byte[] key, byte[] iv, int numSubSamples, @Nullable int[] numBytesOfClearData, @Nullable int[] numBytesOfEncryptedData); private native void opusClose(long decoder); private native void opusReset(long decoder); private native int opusGetErrorCode(long decoder); private native String opusGetErrorMessage(long decoder); private native void opusSetFloatOutput(); }
extensions/opus/src/main/java/com/google/android/exoplayer2/ext/opus/OpusDecoder.java
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.ext.opus; import static androidx.annotation.VisibleForTesting.PACKAGE_PRIVATE; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.decoder.CryptoConfig; import com.google.android.exoplayer2.decoder.CryptoException; import com.google.android.exoplayer2.decoder.CryptoInfo; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.decoder.SimpleDecoder; import com.google.android.exoplayer2.decoder.SimpleDecoderOutputBuffer; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.List; /** Opus decoder. */ @VisibleForTesting(otherwise = PACKAGE_PRIVATE) public final class OpusDecoder extends SimpleDecoder<DecoderInputBuffer, SimpleDecoderOutputBuffer, OpusDecoderException> { /** Opus streams are always 48000 Hz. */ /* package */ static final int SAMPLE_RATE = 48_000; private static final int DEFAULT_SEEK_PRE_ROLL_SAMPLES = 3840; private static final int FULL_CODEC_INITIALIZATION_DATA_BUFFER_COUNT = 3; private static final int NO_ERROR = 0; private static final int DECODE_ERROR = -1; private static final int DRM_ERROR = -2; public final boolean outputFloat; public final int channelCount; @Nullable private final CryptoConfig cryptoConfig; private final int preSkipSamples; private final int seekPreRollSamples; private final long nativeDecoderContext; private int skipSamples; /** * Creates an Opus decoder. * * @param numInputBuffers The number of input buffers. * @param numOutputBuffers The number of output buffers. * @param initialInputBufferSize The initial size of each input buffer. * @param initializationData Codec-specific initialization data. The first element must contain an * opus header. Optionally, the list may contain two additional buffers, which must contain * the encoder delay and seek pre roll values in nanoseconds, encoded as longs. * @param cryptoConfig The {@link CryptoConfig} object required for decoding encrypted content. * May be null and can be ignored if decoder does not handle encrypted content. * @param outputFloat Forces the decoder to output float PCM samples when set * @throws OpusDecoderException Thrown if an exception occurs when initializing the decoder. */ public OpusDecoder( int numInputBuffers, int numOutputBuffers, int initialInputBufferSize, List<byte[]> initializationData, @Nullable CryptoConfig cryptoConfig, boolean outputFloat) throws OpusDecoderException { super(new DecoderInputBuffer[numInputBuffers], new SimpleDecoderOutputBuffer[numOutputBuffers]); if (!OpusLibrary.isAvailable()) { throw new OpusDecoderException("Failed to load decoder native libraries"); } this.cryptoConfig = cryptoConfig; if (cryptoConfig != null && !OpusLibrary.opusIsSecureDecodeSupported()) { throw new OpusDecoderException("Opus decoder does not support secure decode"); } int initializationDataSize = initializationData.size(); if (initializationDataSize != 1 && initializationDataSize != 3) { throw new OpusDecoderException("Invalid initialization data size"); } if (initializationDataSize == 3 && (initializationData.get(1).length != 8 || initializationData.get(2).length != 8)) { throw new OpusDecoderException("Invalid pre-skip or seek pre-roll"); } preSkipSamples = getPreSkipSamples(initializationData); seekPreRollSamples = getSeekPreRollSamples(initializationData); byte[] headerBytes = initializationData.get(0); if (headerBytes.length < 19) { throw new OpusDecoderException("Invalid header length"); } channelCount = getChannelCount(headerBytes); if (channelCount > 8) { throw new OpusDecoderException("Invalid channel count: " + channelCount); } int gain = readSignedLittleEndian16(headerBytes, 16); byte[] streamMap = new byte[8]; int numStreams; int numCoupled; if (headerBytes[18] == 0) { // Channel mapping // If there is no channel mapping, use the defaults. if (channelCount > 2) { // Maximum channel count with default layout. throw new OpusDecoderException("Invalid header, missing stream map"); } numStreams = 1; numCoupled = (channelCount == 2) ? 1 : 0; streamMap[0] = 0; streamMap[1] = 1; } else { if (headerBytes.length < 21 + channelCount) { throw new OpusDecoderException("Invalid header length"); } // Read the channel mapping. numStreams = headerBytes[19] & 0xFF; numCoupled = headerBytes[20] & 0xFF; System.arraycopy(headerBytes, 21, streamMap, 0, channelCount); } nativeDecoderContext = opusInit(SAMPLE_RATE, channelCount, numStreams, numCoupled, gain, streamMap); if (nativeDecoderContext == 0) { throw new OpusDecoderException("Failed to initialize decoder"); } setInitialInputBufferSize(initialInputBufferSize); this.outputFloat = outputFloat; if (outputFloat) { opusSetFloatOutput(); } } @Override public String getName() { return "libopus" + OpusLibrary.getVersion(); } @Override protected DecoderInputBuffer createInputBuffer() { return new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DIRECT); } @Override protected SimpleDecoderOutputBuffer createOutputBuffer() { return new SimpleDecoderOutputBuffer(this::releaseOutputBuffer); } @Override protected OpusDecoderException createUnexpectedDecodeException(Throwable error) { return new OpusDecoderException("Unexpected decode error", error); } @Override @Nullable protected OpusDecoderException decode( DecoderInputBuffer inputBuffer, SimpleDecoderOutputBuffer outputBuffer, boolean reset) { if (reset) { opusReset(nativeDecoderContext); // When seeking to 0, skip number of samples as specified in opus header. When seeking to // any other time, skip number of samples as specified by seek preroll. skipSamples = (inputBuffer.timeUs == 0) ? preSkipSamples : seekPreRollSamples; } ByteBuffer inputData = Util.castNonNull(inputBuffer.data); CryptoInfo cryptoInfo = inputBuffer.cryptoInfo; int result = inputBuffer.isEncrypted() ? opusSecureDecode( nativeDecoderContext, inputBuffer.timeUs, inputData, inputData.limit(), outputBuffer, SAMPLE_RATE, cryptoConfig, cryptoInfo.mode, Assertions.checkNotNull(cryptoInfo.key), Assertions.checkNotNull(cryptoInfo.iv), cryptoInfo.numSubSamples, cryptoInfo.numBytesOfClearData, cryptoInfo.numBytesOfEncryptedData) : opusDecode( nativeDecoderContext, inputBuffer.timeUs, inputData, inputData.limit(), outputBuffer); if (result < 0) { if (result == DRM_ERROR) { String message = "Drm error: " + opusGetErrorMessage(nativeDecoderContext); CryptoException cause = new CryptoException(opusGetErrorCode(nativeDecoderContext), message); return new OpusDecoderException(message, cause); } else { return new OpusDecoderException("Decode error: " + opusGetErrorMessage(result)); } } ByteBuffer outputData = Util.castNonNull(outputBuffer.data); outputData.position(0); outputData.limit(result); if (skipSamples > 0) { int bytesPerSample = channelCount * 2; int skipBytes = skipSamples * bytesPerSample; if (result <= skipBytes) { skipSamples -= result / bytesPerSample; outputBuffer.addFlag(C.BUFFER_FLAG_DECODE_ONLY); outputData.position(result); } else { skipSamples = 0; outputData.position(skipBytes); } } return null; } @Override public void release() { super.release(); opusClose(nativeDecoderContext); } /** * Parses the channel count from an Opus Identification Header. * * @param header An Opus Identification Header, as defined by RFC 7845. * @return The parsed channel count. */ @VisibleForTesting /* package */ static int getChannelCount(byte[] header) { return header[9] & 0xFF; } /** * Returns the number of pre-skip samples specified by the given Opus codec initialization data. * * @param initializationData The codec initialization data. * @return The number of pre-skip samples. */ @VisibleForTesting /* package */ static int getPreSkipSamples(List<byte[]> initializationData) { if (initializationData.size() == FULL_CODEC_INITIALIZATION_DATA_BUFFER_COUNT) { long codecDelayNs = ByteBuffer.wrap(initializationData.get(1)).order(ByteOrder.nativeOrder()).getLong(); return (int) ((codecDelayNs * SAMPLE_RATE) / C.NANOS_PER_SECOND); } // Fall back to parsing directly from the Opus Identification header. byte[] headerData = initializationData.get(0); return ((headerData[11] & 0xFF) << 8) | (headerData[10] & 0xFF); } /** * Returns the number of seek per-roll samples specified by the given Opus codec initialization * data. * * @param initializationData The codec initialization data. * @return The number of seek pre-roll samples. */ @VisibleForTesting /* package */ static int getSeekPreRollSamples(List<byte[]> initializationData) { if (initializationData.size() == FULL_CODEC_INITIALIZATION_DATA_BUFFER_COUNT) { long seekPreRollNs = ByteBuffer.wrap(initializationData.get(2)).order(ByteOrder.nativeOrder()).getLong(); return (int) ((seekPreRollNs * SAMPLE_RATE) / C.NANOS_PER_SECOND); } // Fall back to returning the default seek pre-roll. return DEFAULT_SEEK_PRE_ROLL_SAMPLES; } private static int readSignedLittleEndian16(byte[] input, int offset) { int value = input[offset] & 0xFF; value |= (input[offset + 1] & 0xFF) << 8; return (short) value; } private native long opusInit( int sampleRate, int channelCount, int numStreams, int numCoupled, int gain, byte[] streamMap); private native int opusDecode( long decoder, long timeUs, ByteBuffer inputBuffer, int inputSize, SimpleDecoderOutputBuffer outputBuffer); private native int opusSecureDecode( long decoder, long timeUs, ByteBuffer inputBuffer, int inputSize, SimpleDecoderOutputBuffer outputBuffer, int sampleRate, @Nullable CryptoConfig mediaCrypto, int inputMode, byte[] key, byte[] iv, int numSubSamples, @Nullable int[] numBytesOfClearData, @Nullable int[] numBytesOfEncryptedData); private native void opusClose(long decoder); private native void opusReset(long decoder); private native int opusGetErrorCode(long decoder); private native String opusGetErrorMessage(long decoder); private native void opusSetFloatOutput(); }
Fix For Sample-To-Bytes Calculation In OpusDecoder Aligning the byte size calculation with the sizes defined in opus_jni.cc for the `outputFloat=true` case #minor-release PiperOrigin-RevId: 427028982
extensions/opus/src/main/java/com/google/android/exoplayer2/ext/opus/OpusDecoder.java
Fix For Sample-To-Bytes Calculation In OpusDecoder