id
int32
0
165k
repo
stringlengths
7
58
path
stringlengths
12
218
func_name
stringlengths
3
140
original_string
stringlengths
73
34.1k
language
stringclasses
1 value
code
stringlengths
73
34.1k
code_tokens
sequence
docstring
stringlengths
3
16k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
105
339
2,600
windy1/google-places-api-java
src/main/java/se/walkercrou/places/PlaceBuilder.java
PlaceBuilder.toJson
public JSONObject toJson() { JSONObject obj = new JSONObject().put(OBJECT_LOCATION, new JSONObject().put("lat", lat).put("lng", lng)) .put(STRING_NAME, name).put(STRING_TYPES, new JSONArray(types)); if (accuracy != -1) obj.put(INTEGER_ACCURACY, accuracy); if (phoneNumber != null) obj.put(STRING_PHONE_NUMBER, phoneNumber); if (address != null) obj.put(STRING_ADDRESS, address); if (website != null) obj.put(STRING_WEBSITE, website); if (locale != null) obj.put(STRING_LANGUAGE, locale.getLanguage()); return obj; }
java
public JSONObject toJson() { JSONObject obj = new JSONObject().put(OBJECT_LOCATION, new JSONObject().put("lat", lat).put("lng", lng)) .put(STRING_NAME, name).put(STRING_TYPES, new JSONArray(types)); if (accuracy != -1) obj.put(INTEGER_ACCURACY, accuracy); if (phoneNumber != null) obj.put(STRING_PHONE_NUMBER, phoneNumber); if (address != null) obj.put(STRING_ADDRESS, address); if (website != null) obj.put(STRING_WEBSITE, website); if (locale != null) obj.put(STRING_LANGUAGE, locale.getLanguage()); return obj; }
[ "public", "JSONObject", "toJson", "(", ")", "{", "JSONObject", "obj", "=", "new", "JSONObject", "(", ")", ".", "put", "(", "OBJECT_LOCATION", ",", "new", "JSONObject", "(", ")", ".", "put", "(", "\"lat\"", ",", "lat", ")", ".", "put", "(", "\"lng\"", ",", "lng", ")", ")", ".", "put", "(", "STRING_NAME", ",", "name", ")", ".", "put", "(", "STRING_TYPES", ",", "new", "JSONArray", "(", "types", ")", ")", ";", "if", "(", "accuracy", "!=", "-", "1", ")", "obj", ".", "put", "(", "INTEGER_ACCURACY", ",", "accuracy", ")", ";", "if", "(", "phoneNumber", "!=", "null", ")", "obj", ".", "put", "(", "STRING_PHONE_NUMBER", ",", "phoneNumber", ")", ";", "if", "(", "address", "!=", "null", ")", "obj", ".", "put", "(", "STRING_ADDRESS", ",", "address", ")", ";", "if", "(", "website", "!=", "null", ")", "obj", ".", "put", "(", "STRING_WEBSITE", ",", "website", ")", ";", "if", "(", "locale", "!=", "null", ")", "obj", ".", "put", "(", "STRING_LANGUAGE", ",", "locale", ".", "getLanguage", "(", ")", ")", ";", "return", "obj", ";", "}" ]
Returns a Google formatted JSON object to be sent to Google's servers. @return Google formatted JSON
[ "Returns", "a", "Google", "formatted", "JSON", "object", "to", "be", "sent", "to", "Google", "s", "servers", "." ]
a5f2a18a7d1ca03fc0480637eae255fe92fc8b86
https://github.com/windy1/google-places-api-java/blob/a5f2a18a7d1ca03fc0480637eae255fe92fc8b86/src/main/java/se/walkercrou/places/PlaceBuilder.java#L120-L134
2,601
windy1/google-places-api-java
src/main/java/se/walkercrou/places/Photo.java
Photo.download
public Photo download(int maxWidth, int maxHeight, Param... extraParams) { image = place.getClient().downloadPhoto(this, maxWidth, maxHeight, extraParams); return this; }
java
public Photo download(int maxWidth, int maxHeight, Param... extraParams) { image = place.getClient().downloadPhoto(this, maxWidth, maxHeight, extraParams); return this; }
[ "public", "Photo", "download", "(", "int", "maxWidth", ",", "int", "maxHeight", ",", "Param", "...", "extraParams", ")", "{", "image", "=", "place", ".", "getClient", "(", ")", ".", "downloadPhoto", "(", "this", ",", "maxWidth", ",", "maxHeight", ",", "extraParams", ")", ";", "return", "this", ";", "}" ]
Downloads the photo and caches it within the photo. @param maxWidth of photo @param maxHeight of photo @param extraParams to append to request url @return this
[ "Downloads", "the", "photo", "and", "caches", "it", "within", "the", "photo", "." ]
a5f2a18a7d1ca03fc0480637eae255fe92fc8b86
https://github.com/windy1/google-places-api-java/blob/a5f2a18a7d1ca03fc0480637eae255fe92fc8b86/src/main/java/se/walkercrou/places/Photo.java#L35-L38
2,602
windy1/google-places-api-java
src/main/java/se/walkercrou/places/exception/GooglePlacesException.java
GooglePlacesException.parse
public static GooglePlacesException parse(String statusCode, String errorMessage) { Class<?> clazz = statusClassMap.get(statusCode); if (clazz == null) return null; try { if (errorMessage == null || errorMessage.isEmpty()) return (GooglePlacesException) clazz.newInstance(); else { Constructor<?> constructor = clazz.getConstructor(String.class); return (GooglePlacesException) constructor.newInstance(errorMessage); } } catch (Exception e) { // Should never happen! throw new GooglePlacesException(e); } }
java
public static GooglePlacesException parse(String statusCode, String errorMessage) { Class<?> clazz = statusClassMap.get(statusCode); if (clazz == null) return null; try { if (errorMessage == null || errorMessage.isEmpty()) return (GooglePlacesException) clazz.newInstance(); else { Constructor<?> constructor = clazz.getConstructor(String.class); return (GooglePlacesException) constructor.newInstance(errorMessage); } } catch (Exception e) { // Should never happen! throw new GooglePlacesException(e); } }
[ "public", "static", "GooglePlacesException", "parse", "(", "String", "statusCode", ",", "String", "errorMessage", ")", "{", "Class", "<", "?", ">", "clazz", "=", "statusClassMap", ".", "get", "(", "statusCode", ")", ";", "if", "(", "clazz", "==", "null", ")", "return", "null", ";", "try", "{", "if", "(", "errorMessage", "==", "null", "||", "errorMessage", ".", "isEmpty", "(", ")", ")", "return", "(", "GooglePlacesException", ")", "clazz", ".", "newInstance", "(", ")", ";", "else", "{", "Constructor", "<", "?", ">", "constructor", "=", "clazz", ".", "getConstructor", "(", "String", ".", "class", ")", ";", "return", "(", "GooglePlacesException", ")", "constructor", ".", "newInstance", "(", "errorMessage", ")", ";", "}", "}", "catch", "(", "Exception", "e", ")", "{", "// Should never happen!", "throw", "new", "GooglePlacesException", "(", "e", ")", ";", "}", "}" ]
Returns the correct exception from a server-given status code and error message. @param statusCode to find exception for @param errorMessage error message from server @return exception that matches the specified status code
[ "Returns", "the", "correct", "exception", "from", "a", "server", "-", "given", "status", "code", "and", "error", "message", "." ]
a5f2a18a7d1ca03fc0480637eae255fe92fc8b86
https://github.com/windy1/google-places-api-java/blob/a5f2a18a7d1ca03fc0480637eae255fe92fc8b86/src/main/java/se/walkercrou/places/exception/GooglePlacesException.java#L81-L96
2,603
windy1/google-places-api-java
src/main/java/se/walkercrou/places/TypeParam.java
TypeParam.value
public Param value(List<String> values) { StringBuilder valuesSb = new StringBuilder(); for (int i = 0; i < values.size(); i++) { valuesSb.append(values.get(i)); if (i != (values.size() - 1)) { valuesSb.append("%7C"); // it represents a pipeline character | } } this.value = valuesSb.toString(); return this; }
java
public Param value(List<String> values) { StringBuilder valuesSb = new StringBuilder(); for (int i = 0; i < values.size(); i++) { valuesSb.append(values.get(i)); if (i != (values.size() - 1)) { valuesSb.append("%7C"); // it represents a pipeline character | } } this.value = valuesSb.toString(); return this; }
[ "public", "Param", "value", "(", "List", "<", "String", ">", "values", ")", "{", "StringBuilder", "valuesSb", "=", "new", "StringBuilder", "(", ")", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "values", ".", "size", "(", ")", ";", "i", "++", ")", "{", "valuesSb", ".", "append", "(", "values", ".", "get", "(", "i", ")", ")", ";", "if", "(", "i", "!=", "(", "values", ".", "size", "(", ")", "-", "1", ")", ")", "{", "valuesSb", ".", "append", "(", "\"%7C\"", ")", ";", "// it represents a pipeline character |", "}", "}", "this", ".", "value", "=", "valuesSb", ".", "toString", "(", ")", ";", "return", "this", ";", "}" ]
Sets the values of the Param. @param values of params @return this params
[ "Sets", "the", "values", "of", "the", "Param", "." ]
a5f2a18a7d1ca03fc0480637eae255fe92fc8b86
https://github.com/windy1/google-places-api-java/blob/a5f2a18a7d1ca03fc0480637eae255fe92fc8b86/src/main/java/se/walkercrou/places/TypeParam.java#L30-L40
2,604
bootique/bootique-jersey
bootique-jersey/src/main/java/io/bootique/jersey/JerseyModuleExtender.java
JerseyModuleExtender.setProperty
public JerseyModuleExtender setProperty(String name, Object value) { contributeProperties().addBinding(name).toInstance(value); return this; }
java
public JerseyModuleExtender setProperty(String name, Object value) { contributeProperties().addBinding(name).toInstance(value); return this; }
[ "public", "JerseyModuleExtender", "setProperty", "(", "String", "name", ",", "Object", "value", ")", "{", "contributeProperties", "(", ")", ".", "addBinding", "(", "name", ")", ".", "toInstance", "(", "value", ")", ";", "return", "this", ";", "}" ]
Sets Jersey container property. This allows setting ResourceConfig properties that can not be set via JAX RS features. @param name property name @param value property value @return @see org.glassfish.jersey.server.ServerProperties @since 0.22
[ "Sets", "Jersey", "container", "property", ".", "This", "allows", "setting", "ResourceConfig", "properties", "that", "can", "not", "be", "set", "via", "JAX", "RS", "features", "." ]
8def056158f0ad1914e975625eb5ed3e4712648c
https://github.com/bootique/bootique-jersey/blob/8def056158f0ad1914e975625eb5ed3e4712648c/bootique-jersey/src/main/java/io/bootique/jersey/JerseyModuleExtender.java#L104-L107
2,605
mediathekview/MServer
src/main/java/mServer/crawler/CrawlerTool.java
CrawlerTool.startMsg
public static synchronized void startMsg() { Log.startZeit.setTime(System.currentTimeMillis()); Log.versionMsg(Const.PROGRAMMNAME); Log.sysLog(Log.LILNE); Log.sysLog(""); Log.sysLog("Programmpfad: " + Functions.getPathJar()); Log.sysLog("Filmliste: " + getPathFilmlist_json_akt(true /*aktDate*/)); Log.sysLog("Useragent: " + Config.getUserAgent()); Log.sysLog(""); Log.sysLog(Log.LILNE); Log.sysLog(""); if (loadLongMax()) { Log.sysLog("Laden: alles"); } else { Log.sysLog("Laden: nur update"); } if (CrawlerConfig.updateFilmliste) { Log.sysLog("Filmliste: nur updaten"); } else { Log.sysLog("Filmliste: neu erstellen"); } Log.sysLog("ImportURL 1: " + CrawlerConfig.importUrl_1__anhaengen); Log.sysLog("ImportURL 2: " + CrawlerConfig.importUrl_2__anhaengen); Log.sysLog("ImportOLD: " + CrawlerConfig.importOld); Log.sysLog("ImportAkt: " + CrawlerConfig.importAkt); if (CrawlerConfig.nurSenderLaden != null) { Log.sysLog("Nur Sender laden: " + StringUtils.join(CrawlerConfig.nurSenderLaden, ',')); } Log.sysLog(""); Log.sysLog(Log.LILNE); }
java
public static synchronized void startMsg() { Log.startZeit.setTime(System.currentTimeMillis()); Log.versionMsg(Const.PROGRAMMNAME); Log.sysLog(Log.LILNE); Log.sysLog(""); Log.sysLog("Programmpfad: " + Functions.getPathJar()); Log.sysLog("Filmliste: " + getPathFilmlist_json_akt(true /*aktDate*/)); Log.sysLog("Useragent: " + Config.getUserAgent()); Log.sysLog(""); Log.sysLog(Log.LILNE); Log.sysLog(""); if (loadLongMax()) { Log.sysLog("Laden: alles"); } else { Log.sysLog("Laden: nur update"); } if (CrawlerConfig.updateFilmliste) { Log.sysLog("Filmliste: nur updaten"); } else { Log.sysLog("Filmliste: neu erstellen"); } Log.sysLog("ImportURL 1: " + CrawlerConfig.importUrl_1__anhaengen); Log.sysLog("ImportURL 2: " + CrawlerConfig.importUrl_2__anhaengen); Log.sysLog("ImportOLD: " + CrawlerConfig.importOld); Log.sysLog("ImportAkt: " + CrawlerConfig.importAkt); if (CrawlerConfig.nurSenderLaden != null) { Log.sysLog("Nur Sender laden: " + StringUtils.join(CrawlerConfig.nurSenderLaden, ',')); } Log.sysLog(""); Log.sysLog(Log.LILNE); }
[ "public", "static", "synchronized", "void", "startMsg", "(", ")", "{", "Log", ".", "startZeit", ".", "setTime", "(", "System", ".", "currentTimeMillis", "(", ")", ")", ";", "Log", ".", "versionMsg", "(", "Const", ".", "PROGRAMMNAME", ")", ";", "Log", ".", "sysLog", "(", "Log", ".", "LILNE", ")", ";", "Log", ".", "sysLog", "(", "\"\"", ")", ";", "Log", ".", "sysLog", "(", "\"Programmpfad: \"", "+", "Functions", ".", "getPathJar", "(", ")", ")", ";", "Log", ".", "sysLog", "(", "\"Filmliste: \"", "+", "getPathFilmlist_json_akt", "(", "true", "/*aktDate*/", ")", ")", ";", "Log", ".", "sysLog", "(", "\"Useragent: \"", "+", "Config", ".", "getUserAgent", "(", ")", ")", ";", "Log", ".", "sysLog", "(", "\"\"", ")", ";", "Log", ".", "sysLog", "(", "Log", ".", "LILNE", ")", ";", "Log", ".", "sysLog", "(", "\"\"", ")", ";", "if", "(", "loadLongMax", "(", ")", ")", "{", "Log", ".", "sysLog", "(", "\"Laden: alles\"", ")", ";", "}", "else", "{", "Log", ".", "sysLog", "(", "\"Laden: nur update\"", ")", ";", "}", "if", "(", "CrawlerConfig", ".", "updateFilmliste", ")", "{", "Log", ".", "sysLog", "(", "\"Filmliste: nur updaten\"", ")", ";", "}", "else", "{", "Log", ".", "sysLog", "(", "\"Filmliste: neu erstellen\"", ")", ";", "}", "Log", ".", "sysLog", "(", "\"ImportURL 1: \"", "+", "CrawlerConfig", ".", "importUrl_1__anhaengen", ")", ";", "Log", ".", "sysLog", "(", "\"ImportURL 2: \"", "+", "CrawlerConfig", ".", "importUrl_2__anhaengen", ")", ";", "Log", ".", "sysLog", "(", "\"ImportOLD: \"", "+", "CrawlerConfig", ".", "importOld", ")", ";", "Log", ".", "sysLog", "(", "\"ImportAkt: \"", "+", "CrawlerConfig", ".", "importAkt", ")", ";", "if", "(", "CrawlerConfig", ".", "nurSenderLaden", "!=", "null", ")", "{", "Log", ".", "sysLog", "(", "\"Nur Sender laden: \"", "+", "StringUtils", ".", "join", "(", "CrawlerConfig", ".", "nurSenderLaden", ",", "'", "'", ")", ")", ";", "}", "Log", ".", "sysLog", "(", "\"\"", ")", ";", "Log", ".", "sysLog", "(", "Log", ".", "LILNE", ")", ";", "}" ]
ist die aktuelle Filmliste, xz komprimiert
[ "ist", "die", "aktuelle", "Filmliste", "xz", "komprimiert" ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/crawler/CrawlerTool.java#L33-L63
2,606
mediathekview/MServer
src/main/java/mServer/crawler/sender/base/UrlUtils.java
UrlUtils.addDomainIfMissing
public static String addDomainIfMissing(final String aUrl, final String aDomain) { if (aUrl != null && !aUrl.isEmpty() && aUrl.startsWith("/")) { return aDomain + aUrl; } return aUrl; }
java
public static String addDomainIfMissing(final String aUrl, final String aDomain) { if (aUrl != null && !aUrl.isEmpty() && aUrl.startsWith("/")) { return aDomain + aUrl; } return aUrl; }
[ "public", "static", "String", "addDomainIfMissing", "(", "final", "String", "aUrl", ",", "final", "String", "aDomain", ")", "{", "if", "(", "aUrl", "!=", "null", "&&", "!", "aUrl", ".", "isEmpty", "(", ")", "&&", "aUrl", ".", "startsWith", "(", "\"/\"", ")", ")", "{", "return", "aDomain", "+", "aUrl", ";", "}", "return", "aUrl", ";", "}" ]
adds the domain if missing. @param aUrl the url to check @param aDomain the domain to add @return the url including the domain
[ "adds", "the", "domain", "if", "missing", "." ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/crawler/sender/base/UrlUtils.java#L37-L43
2,607
mediathekview/MServer
src/main/java/mServer/crawler/sender/base/UrlUtils.java
UrlUtils.addProtocolIfMissing
public static String addProtocolIfMissing(final String aUrl, final String aProtocol) { if (aUrl != null && aUrl.startsWith("//")) { return aProtocol + aUrl; } return aUrl; }
java
public static String addProtocolIfMissing(final String aUrl, final String aProtocol) { if (aUrl != null && aUrl.startsWith("//")) { return aProtocol + aUrl; } return aUrl; }
[ "public", "static", "String", "addProtocolIfMissing", "(", "final", "String", "aUrl", ",", "final", "String", "aProtocol", ")", "{", "if", "(", "aUrl", "!=", "null", "&&", "aUrl", ".", "startsWith", "(", "\"//\"", ")", ")", "{", "return", "aProtocol", "+", "aUrl", ";", "}", "return", "aUrl", ";", "}" ]
adds the protocol if missing. @param aUrl the url to check @param aProtocol the protocol to add @return the url including the protocol
[ "adds", "the", "protocol", "if", "missing", "." ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/crawler/sender/base/UrlUtils.java#L52-L58
2,608
mediathekview/MServer
src/main/java/mServer/crawler/sender/base/UrlUtils.java
UrlUtils.changeOrAddParameter
public static String changeOrAddParameter(final String aUrl, final String aParameter, final String aValue) { final StringBuilder newUrlBuilder = new StringBuilder(); final String[] splittedUrl = aUrl.split(REGEX_ESCAPOR + URL_TO_PARAMETERS_SPLITTERATOR); newUrlBuilder.append(splittedUrl[0]); if (splittedUrl.length == 2) { final String cleanedParameters = splittedUrl[1] + URL_TO_PARAMETERS_SPLITTERATOR .replaceAll(String.format(URL_PARAMETER_REPLACEMENT_REGEX_PATTERN, aParameter), "") .replaceAll(REGEX_ESCAPOR + WRONG_PARAMETER_START, URL_TO_PARAMETERS_SPLITTERATOR); newUrlBuilder.append(URL_TO_PARAMETERS_SPLITTERATOR); newUrlBuilder.append(cleanedParameters); if (!cleanedParameters.endsWith(URL_PARAMETER_SEPPERATOR) && !cleanedParameters.isEmpty()) { newUrlBuilder.append(URL_PARAMETER_SEPPERATOR); } } else { newUrlBuilder.append(URL_TO_PARAMETERS_SPLITTERATOR); } newUrlBuilder.append(String.format(PARAMETER_PATTERN, aParameter, aValue)); return newUrlBuilder.toString(); }
java
public static String changeOrAddParameter(final String aUrl, final String aParameter, final String aValue) { final StringBuilder newUrlBuilder = new StringBuilder(); final String[] splittedUrl = aUrl.split(REGEX_ESCAPOR + URL_TO_PARAMETERS_SPLITTERATOR); newUrlBuilder.append(splittedUrl[0]); if (splittedUrl.length == 2) { final String cleanedParameters = splittedUrl[1] + URL_TO_PARAMETERS_SPLITTERATOR .replaceAll(String.format(URL_PARAMETER_REPLACEMENT_REGEX_PATTERN, aParameter), "") .replaceAll(REGEX_ESCAPOR + WRONG_PARAMETER_START, URL_TO_PARAMETERS_SPLITTERATOR); newUrlBuilder.append(URL_TO_PARAMETERS_SPLITTERATOR); newUrlBuilder.append(cleanedParameters); if (!cleanedParameters.endsWith(URL_PARAMETER_SEPPERATOR) && !cleanedParameters.isEmpty()) { newUrlBuilder.append(URL_PARAMETER_SEPPERATOR); } } else { newUrlBuilder.append(URL_TO_PARAMETERS_SPLITTERATOR); } newUrlBuilder.append(String.format(PARAMETER_PATTERN, aParameter, aValue)); return newUrlBuilder.toString(); }
[ "public", "static", "String", "changeOrAddParameter", "(", "final", "String", "aUrl", ",", "final", "String", "aParameter", ",", "final", "String", "aValue", ")", "{", "final", "StringBuilder", "newUrlBuilder", "=", "new", "StringBuilder", "(", ")", ";", "final", "String", "[", "]", "splittedUrl", "=", "aUrl", ".", "split", "(", "REGEX_ESCAPOR", "+", "URL_TO_PARAMETERS_SPLITTERATOR", ")", ";", "newUrlBuilder", ".", "append", "(", "splittedUrl", "[", "0", "]", ")", ";", "if", "(", "splittedUrl", ".", "length", "==", "2", ")", "{", "final", "String", "cleanedParameters", "=", "splittedUrl", "[", "1", "]", "+", "URL_TO_PARAMETERS_SPLITTERATOR", ".", "replaceAll", "(", "String", ".", "format", "(", "URL_PARAMETER_REPLACEMENT_REGEX_PATTERN", ",", "aParameter", ")", ",", "\"\"", ")", ".", "replaceAll", "(", "REGEX_ESCAPOR", "+", "WRONG_PARAMETER_START", ",", "URL_TO_PARAMETERS_SPLITTERATOR", ")", ";", "newUrlBuilder", ".", "append", "(", "URL_TO_PARAMETERS_SPLITTERATOR", ")", ";", "newUrlBuilder", ".", "append", "(", "cleanedParameters", ")", ";", "if", "(", "!", "cleanedParameters", ".", "endsWith", "(", "URL_PARAMETER_SEPPERATOR", ")", "&&", "!", "cleanedParameters", ".", "isEmpty", "(", ")", ")", "{", "newUrlBuilder", ".", "append", "(", "URL_PARAMETER_SEPPERATOR", ")", ";", "}", "}", "else", "{", "newUrlBuilder", ".", "append", "(", "URL_TO_PARAMETERS_SPLITTERATOR", ")", ";", "}", "newUrlBuilder", ".", "append", "(", "String", ".", "format", "(", "PARAMETER_PATTERN", ",", "aParameter", ",", "aValue", ")", ")", ";", "return", "newUrlBuilder", ".", "toString", "(", ")", ";", "}" ]
Changes or adds an URL parameter. @param aUrl The URL which parameter should be changed or gets the parameter added. @param aParameter The parameter which should be changed or added. @param aValue The parameter value. @return The changed URL.
[ "Changes", "or", "adds", "an", "URL", "parameter", "." ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/crawler/sender/base/UrlUtils.java#L68-L90
2,609
mediathekview/MServer
src/main/java/mServer/crawler/sender/base/UrlUtils.java
UrlUtils.existsUrl
public static boolean existsUrl(final String aUrl) { try { final URL url = new URL(aUrl); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("HEAD"); connection.connect(); return connection.getResponseCode() == 200; } catch (IOException e) { return false; } }
java
public static boolean existsUrl(final String aUrl) { try { final URL url = new URL(aUrl); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("HEAD"); connection.connect(); return connection.getResponseCode() == 200; } catch (IOException e) { return false; } }
[ "public", "static", "boolean", "existsUrl", "(", "final", "String", "aUrl", ")", "{", "try", "{", "final", "URL", "url", "=", "new", "URL", "(", "aUrl", ")", ";", "HttpURLConnection", "connection", "=", "(", "HttpURLConnection", ")", "url", ".", "openConnection", "(", ")", ";", "connection", ".", "setRequestMethod", "(", "\"HEAD\"", ")", ";", "connection", ".", "connect", "(", ")", ";", "return", "connection", ".", "getResponseCode", "(", ")", "==", "200", ";", "}", "catch", "(", "IOException", "e", ")", "{", "return", "false", ";", "}", "}" ]
checks whether an url exists. uses head request to check. @param aUrl the url to check @return true if url exists else false.
[ "checks", "whether", "an", "url", "exists", ".", "uses", "head", "request", "to", "check", "." ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/crawler/sender/base/UrlUtils.java#L98-L108
2,610
mediathekview/MServer
src/main/java/mServer/crawler/sender/base/UrlUtils.java
UrlUtils.getFileName
public static Optional<String> getFileName(final String aUrl) { if (aUrl != null) { int index = aUrl.lastIndexOf('/'); if (index > 0) { final String file = aUrl.substring(index + 1); if (file.contains(".")) { return Optional.of(file); } } } return Optional.empty(); }
java
public static Optional<String> getFileName(final String aUrl) { if (aUrl != null) { int index = aUrl.lastIndexOf('/'); if (index > 0) { final String file = aUrl.substring(index + 1); if (file.contains(".")) { return Optional.of(file); } } } return Optional.empty(); }
[ "public", "static", "Optional", "<", "String", ">", "getFileName", "(", "final", "String", "aUrl", ")", "{", "if", "(", "aUrl", "!=", "null", ")", "{", "int", "index", "=", "aUrl", ".", "lastIndexOf", "(", "'", "'", ")", ";", "if", "(", "index", ">", "0", ")", "{", "final", "String", "file", "=", "aUrl", ".", "substring", "(", "index", "+", "1", ")", ";", "if", "(", "file", ".", "contains", "(", "\".\"", ")", ")", "{", "return", "Optional", ".", "of", "(", "file", ")", ";", "}", "}", "}", "return", "Optional", ".", "empty", "(", ")", ";", "}" ]
returns the file name of the url. @param aUrl the url @return the name of the file
[ "returns", "the", "file", "name", "of", "the", "url", "." ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/crawler/sender/base/UrlUtils.java#L139-L151
2,611
mediathekview/MServer
src/main/java/mServer/crawler/sender/base/UrlUtils.java
UrlUtils.getFileType
public static Optional<String> getFileType(final String aUrl) { if (aUrl != null) { int index = aUrl.lastIndexOf('.'); if (index > 0) { return Optional.of(aUrl.substring(index + 1)); } } return Optional.empty(); }
java
public static Optional<String> getFileType(final String aUrl) { if (aUrl != null) { int index = aUrl.lastIndexOf('.'); if (index > 0) { return Optional.of(aUrl.substring(index + 1)); } } return Optional.empty(); }
[ "public", "static", "Optional", "<", "String", ">", "getFileType", "(", "final", "String", "aUrl", ")", "{", "if", "(", "aUrl", "!=", "null", ")", "{", "int", "index", "=", "aUrl", ".", "lastIndexOf", "(", "'", "'", ")", ";", "if", "(", "index", ">", "0", ")", "{", "return", "Optional", ".", "of", "(", "aUrl", ".", "substring", "(", "index", "+", "1", ")", ")", ";", "}", "}", "return", "Optional", ".", "empty", "(", ")", ";", "}" ]
returns the file type of the url. @param aUrl the url @return the type of the file
[ "returns", "the", "file", "type", "of", "the", "url", "." ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/crawler/sender/base/UrlUtils.java#L159-L168
2,612
mediathekview/MServer
src/main/java/mServer/crawler/sender/base/UrlUtils.java
UrlUtils.getProtocol
public static Optional<String> getProtocol(final String aUrl) { if (aUrl != null) { int index = aUrl.indexOf("//"); if (index > 0) { String protocol = aUrl.substring(0, index); return Optional.of(protocol); } } return Optional.empty(); }
java
public static Optional<String> getProtocol(final String aUrl) { if (aUrl != null) { int index = aUrl.indexOf("//"); if (index > 0) { String protocol = aUrl.substring(0, index); return Optional.of(protocol); } } return Optional.empty(); }
[ "public", "static", "Optional", "<", "String", ">", "getProtocol", "(", "final", "String", "aUrl", ")", "{", "if", "(", "aUrl", "!=", "null", ")", "{", "int", "index", "=", "aUrl", ".", "indexOf", "(", "\"//\"", ")", ";", "if", "(", "index", ">", "0", ")", "{", "String", "protocol", "=", "aUrl", ".", "substring", "(", "0", ",", "index", ")", ";", "return", "Optional", ".", "of", "(", "protocol", ")", ";", "}", "}", "return", "Optional", ".", "empty", "(", ")", ";", "}" ]
returns the protocol of the url. @param aUrl the url @return the protocol of the url (e.g. "http:")
[ "returns", "the", "protocol", "of", "the", "url", "." ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/crawler/sender/base/UrlUtils.java#L176-L186
2,613
mediathekview/MServer
src/main/java/mServer/crawler/sender/base/UrlUtils.java
UrlUtils.getUrlParameterValue
public static Optional<String> getUrlParameterValue(final String aUrl, final String aParameterName) throws UrlParseException { if (aUrl != null) { Map<String, String> parameters = getUrlParameters(aUrl); if (parameters.containsKey(aParameterName)) { return Optional.of(parameters.get(aParameterName)); } } return Optional.empty(); }
java
public static Optional<String> getUrlParameterValue(final String aUrl, final String aParameterName) throws UrlParseException { if (aUrl != null) { Map<String, String> parameters = getUrlParameters(aUrl); if (parameters.containsKey(aParameterName)) { return Optional.of(parameters.get(aParameterName)); } } return Optional.empty(); }
[ "public", "static", "Optional", "<", "String", ">", "getUrlParameterValue", "(", "final", "String", "aUrl", ",", "final", "String", "aParameterName", ")", "throws", "UrlParseException", "{", "if", "(", "aUrl", "!=", "null", ")", "{", "Map", "<", "String", ",", "String", ">", "parameters", "=", "getUrlParameters", "(", "aUrl", ")", ";", "if", "(", "parameters", ".", "containsKey", "(", "aParameterName", ")", ")", "{", "return", "Optional", ".", "of", "(", "parameters", ".", "get", "(", "aParameterName", ")", ")", ";", "}", "}", "return", "Optional", ".", "empty", "(", ")", ";", "}" ]
returns the value of an url parameter. @param aUrl the url @param aParameterName the name of the url parameter @return the parameter value
[ "returns", "the", "value", "of", "an", "url", "parameter", "." ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/crawler/sender/base/UrlUtils.java#L195-L204
2,614
mediathekview/MServer
src/main/java/mServer/crawler/sender/arte/ArteVideoDetailsDeserializer.java
ArteVideoDetailsDeserializer.getBroadcastDate
private String getBroadcastDate(JsonArray broadcastArray) { String broadcastDate = ""; String broadcastBeginFirst = ""; String broadcastBeginMajor = ""; String broadcastBeginMinor = ""; // nach Priorität der BroadcastTypen den relevanten Eintrag suchen // FIRST_BROADCAST => MAJOR_REBROADCAST => MINOR_REBROADCAST // dabei die "aktuellste" Ausstrahlung verwenden for(int i = 0; i < broadcastArray.size(); i++) { JsonObject broadcastObject = broadcastArray.get(i).getAsJsonObject(); if(broadcastObject.has(JSON_ELEMENT_BROADCASTTYPE) && broadcastObject.has(JSON_ELEMENT_BROADCAST)) { String value = this.getBroadcastDateConsideringCatchupRights(broadcastObject); if(!value.isEmpty()) { String type = broadcastObject.get(JSON_ELEMENT_BROADCASTTYPE).getAsString(); switch(type) { case BROADCASTTTYPE_FIRST: broadcastBeginFirst = value; break; case BROADCASTTTYPE_MAJOR_RE: broadcastBeginMajor = value; break; case BROADCASTTTYPE_MINOR_RE: broadcastBeginMinor = value; break; default: LOG.debug("New broadcasttype: " + type); } } } } if(!broadcastBeginFirst.isEmpty()) { broadcastDate = broadcastBeginFirst; } else if(!broadcastBeginMajor.isEmpty()) { broadcastDate = broadcastBeginMajor; } else if(!broadcastBeginMinor.isEmpty()) { broadcastDate = broadcastBeginMinor; } // wenn kein Ausstrahlungsdatum vorhanden, dann die erste Ausstrahlung nehmen // egal, wann die CatchupRights liegen, damit ein "sinnvolles" Datum vorhanden ist if(broadcastDate.isEmpty()) { broadcastDate = getBroadcastDateIgnoringCatchupRights(broadcastArray, BROADCASTTTYPE_FIRST); } // wenn immer noch leer, dann die Major-Ausstrahlung verwenden if(broadcastDate.isEmpty()) { broadcastDate = getBroadcastDateIgnoringCatchupRights(broadcastArray, BROADCASTTTYPE_MAJOR_RE); } return broadcastDate; }
java
private String getBroadcastDate(JsonArray broadcastArray) { String broadcastDate = ""; String broadcastBeginFirst = ""; String broadcastBeginMajor = ""; String broadcastBeginMinor = ""; // nach Priorität der BroadcastTypen den relevanten Eintrag suchen // FIRST_BROADCAST => MAJOR_REBROADCAST => MINOR_REBROADCAST // dabei die "aktuellste" Ausstrahlung verwenden for(int i = 0; i < broadcastArray.size(); i++) { JsonObject broadcastObject = broadcastArray.get(i).getAsJsonObject(); if(broadcastObject.has(JSON_ELEMENT_BROADCASTTYPE) && broadcastObject.has(JSON_ELEMENT_BROADCAST)) { String value = this.getBroadcastDateConsideringCatchupRights(broadcastObject); if(!value.isEmpty()) { String type = broadcastObject.get(JSON_ELEMENT_BROADCASTTYPE).getAsString(); switch(type) { case BROADCASTTTYPE_FIRST: broadcastBeginFirst = value; break; case BROADCASTTTYPE_MAJOR_RE: broadcastBeginMajor = value; break; case BROADCASTTTYPE_MINOR_RE: broadcastBeginMinor = value; break; default: LOG.debug("New broadcasttype: " + type); } } } } if(!broadcastBeginFirst.isEmpty()) { broadcastDate = broadcastBeginFirst; } else if(!broadcastBeginMajor.isEmpty()) { broadcastDate = broadcastBeginMajor; } else if(!broadcastBeginMinor.isEmpty()) { broadcastDate = broadcastBeginMinor; } // wenn kein Ausstrahlungsdatum vorhanden, dann die erste Ausstrahlung nehmen // egal, wann die CatchupRights liegen, damit ein "sinnvolles" Datum vorhanden ist if(broadcastDate.isEmpty()) { broadcastDate = getBroadcastDateIgnoringCatchupRights(broadcastArray, BROADCASTTTYPE_FIRST); } // wenn immer noch leer, dann die Major-Ausstrahlung verwenden if(broadcastDate.isEmpty()) { broadcastDate = getBroadcastDateIgnoringCatchupRights(broadcastArray, BROADCASTTTYPE_MAJOR_RE); } return broadcastDate; }
[ "private", "String", "getBroadcastDate", "(", "JsonArray", "broadcastArray", ")", "{", "String", "broadcastDate", "=", "\"\"", ";", "String", "broadcastBeginFirst", "=", "\"\"", ";", "String", "broadcastBeginMajor", "=", "\"\"", ";", "String", "broadcastBeginMinor", "=", "\"\"", ";", "// nach Priorität der BroadcastTypen den relevanten Eintrag suchen", "// FIRST_BROADCAST => MAJOR_REBROADCAST => MINOR_REBROADCAST", "// dabei die \"aktuellste\" Ausstrahlung verwenden", "for", "(", "int", "i", "=", "0", ";", "i", "<", "broadcastArray", ".", "size", "(", ")", ";", "i", "++", ")", "{", "JsonObject", "broadcastObject", "=", "broadcastArray", ".", "get", "(", "i", ")", ".", "getAsJsonObject", "(", ")", ";", "if", "(", "broadcastObject", ".", "has", "(", "JSON_ELEMENT_BROADCASTTYPE", ")", "&&", "broadcastObject", ".", "has", "(", "JSON_ELEMENT_BROADCAST", ")", ")", "{", "String", "value", "=", "this", ".", "getBroadcastDateConsideringCatchupRights", "(", "broadcastObject", ")", ";", "if", "(", "!", "value", ".", "isEmpty", "(", ")", ")", "{", "String", "type", "=", "broadcastObject", ".", "get", "(", "JSON_ELEMENT_BROADCASTTYPE", ")", ".", "getAsString", "(", ")", ";", "switch", "(", "type", ")", "{", "case", "BROADCASTTTYPE_FIRST", ":", "broadcastBeginFirst", "=", "value", ";", "break", ";", "case", "BROADCASTTTYPE_MAJOR_RE", ":", "broadcastBeginMajor", "=", "value", ";", "break", ";", "case", "BROADCASTTTYPE_MINOR_RE", ":", "broadcastBeginMinor", "=", "value", ";", "break", ";", "default", ":", "LOG", ".", "debug", "(", "\"New broadcasttype: \"", "+", "type", ")", ";", "}", "}", "}", "}", "if", "(", "!", "broadcastBeginFirst", ".", "isEmpty", "(", ")", ")", "{", "broadcastDate", "=", "broadcastBeginFirst", ";", "}", "else", "if", "(", "!", "broadcastBeginMajor", ".", "isEmpty", "(", ")", ")", "{", "broadcastDate", "=", "broadcastBeginMajor", ";", "}", "else", "if", "(", "!", "broadcastBeginMinor", ".", "isEmpty", "(", ")", ")", "{", "broadcastDate", "=", "broadcastBeginMinor", ";", "}", "// wenn kein Ausstrahlungsdatum vorhanden, dann die erste Ausstrahlung nehmen", "// egal, wann die CatchupRights liegen, damit ein \"sinnvolles\" Datum vorhanden ist", "if", "(", "broadcastDate", ".", "isEmpty", "(", ")", ")", "{", "broadcastDate", "=", "getBroadcastDateIgnoringCatchupRights", "(", "broadcastArray", ",", "BROADCASTTTYPE_FIRST", ")", ";", "}", "// wenn immer noch leer, dann die Major-Ausstrahlung verwenden", "if", "(", "broadcastDate", ".", "isEmpty", "(", ")", ")", "{", "broadcastDate", "=", "getBroadcastDateIgnoringCatchupRights", "(", "broadcastArray", ",", "BROADCASTTTYPE_MAJOR_RE", ")", ";", "}", "return", "broadcastDate", ";", "}" ]
ermittelt Ausstrahlungsdatum aus der Liste der Ausstrahlungen @param broadcastArray @return
[ "ermittelt", "Ausstrahlungsdatum", "aus", "der", "Liste", "der", "Ausstrahlungen" ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/crawler/sender/arte/ArteVideoDetailsDeserializer.java#L176-L230
2,615
mediathekview/MServer
src/main/java/mServer/crawler/sender/arte/ArteVideoDetailsDeserializer.java
ArteVideoDetailsDeserializer.getBroadcastDateConsideringCatchupRights
private String getBroadcastDateConsideringCatchupRights(JsonObject broadcastObject) { String broadcastDate = ""; JsonElement elementBegin = broadcastObject.get(JSON_ELEMENT_BROADCAST_CATCHUPRIGHTS_BEGIN); JsonElement elementEnd = broadcastObject.get(JSON_ELEMENT_BROADCAST_CATCHUPRIGHTS_END); if (!elementBegin.isJsonNull() && !elementEnd.isJsonNull()) { String begin = elementBegin.getAsString(); String end = elementEnd.getAsString(); try { Calendar beginDate = Calendar.getInstance(); beginDate.setTime(broadcastDateFormat.parse(begin)); Calendar endDate = Calendar.getInstance(); endDate.setTime(broadcastDateFormat.parse(end)); if((DateWithoutTimeComparer.compare(today, beginDate) >= 0 && DateWithoutTimeComparer.compare(today, endDate) <= 0) || (DateWithoutTimeComparer.compare(today, beginDate) < 0)) { // wenn das heutige Datum zwischen begin und end liegt, // dann ist es die aktuelle Ausstrahlung broadcastDate = broadcastObject.get(JSON_ELEMENT_BROADCAST).getAsString(); } } catch (ParseException ex) { LOG.debug(ex); } } else { String broadcast = broadcastObject.get(JSON_ELEMENT_BROADCAST).getAsString(); try { Calendar broadcastCal = Calendar.getInstance(); broadcastCal.setTime(broadcastDateFormat.parse(broadcast)); broadcastDate = broadcast; } catch (ParseException ex) { LOG.debug(ex); } } return broadcastDate; }
java
private String getBroadcastDateConsideringCatchupRights(JsonObject broadcastObject) { String broadcastDate = ""; JsonElement elementBegin = broadcastObject.get(JSON_ELEMENT_BROADCAST_CATCHUPRIGHTS_BEGIN); JsonElement elementEnd = broadcastObject.get(JSON_ELEMENT_BROADCAST_CATCHUPRIGHTS_END); if (!elementBegin.isJsonNull() && !elementEnd.isJsonNull()) { String begin = elementBegin.getAsString(); String end = elementEnd.getAsString(); try { Calendar beginDate = Calendar.getInstance(); beginDate.setTime(broadcastDateFormat.parse(begin)); Calendar endDate = Calendar.getInstance(); endDate.setTime(broadcastDateFormat.parse(end)); if((DateWithoutTimeComparer.compare(today, beginDate) >= 0 && DateWithoutTimeComparer.compare(today, endDate) <= 0) || (DateWithoutTimeComparer.compare(today, beginDate) < 0)) { // wenn das heutige Datum zwischen begin und end liegt, // dann ist es die aktuelle Ausstrahlung broadcastDate = broadcastObject.get(JSON_ELEMENT_BROADCAST).getAsString(); } } catch (ParseException ex) { LOG.debug(ex); } } else { String broadcast = broadcastObject.get(JSON_ELEMENT_BROADCAST).getAsString(); try { Calendar broadcastCal = Calendar.getInstance(); broadcastCal.setTime(broadcastDateFormat.parse(broadcast)); broadcastDate = broadcast; } catch (ParseException ex) { LOG.debug(ex); } } return broadcastDate; }
[ "private", "String", "getBroadcastDateConsideringCatchupRights", "(", "JsonObject", "broadcastObject", ")", "{", "String", "broadcastDate", "=", "\"\"", ";", "JsonElement", "elementBegin", "=", "broadcastObject", ".", "get", "(", "JSON_ELEMENT_BROADCAST_CATCHUPRIGHTS_BEGIN", ")", ";", "JsonElement", "elementEnd", "=", "broadcastObject", ".", "get", "(", "JSON_ELEMENT_BROADCAST_CATCHUPRIGHTS_END", ")", ";", "if", "(", "!", "elementBegin", ".", "isJsonNull", "(", ")", "&&", "!", "elementEnd", ".", "isJsonNull", "(", ")", ")", "{", "String", "begin", "=", "elementBegin", ".", "getAsString", "(", ")", ";", "String", "end", "=", "elementEnd", ".", "getAsString", "(", ")", ";", "try", "{", "Calendar", "beginDate", "=", "Calendar", ".", "getInstance", "(", ")", ";", "beginDate", ".", "setTime", "(", "broadcastDateFormat", ".", "parse", "(", "begin", ")", ")", ";", "Calendar", "endDate", "=", "Calendar", ".", "getInstance", "(", ")", ";", "endDate", ".", "setTime", "(", "broadcastDateFormat", ".", "parse", "(", "end", ")", ")", ";", "if", "(", "(", "DateWithoutTimeComparer", ".", "compare", "(", "today", ",", "beginDate", ")", ">=", "0", "&&", "DateWithoutTimeComparer", ".", "compare", "(", "today", ",", "endDate", ")", "<=", "0", ")", "||", "(", "DateWithoutTimeComparer", ".", "compare", "(", "today", ",", "beginDate", ")", "<", "0", ")", ")", "{", "// wenn das heutige Datum zwischen begin und end liegt,", "// dann ist es die aktuelle Ausstrahlung", "broadcastDate", "=", "broadcastObject", ".", "get", "(", "JSON_ELEMENT_BROADCAST", ")", ".", "getAsString", "(", ")", ";", "}", "}", "catch", "(", "ParseException", "ex", ")", "{", "LOG", ".", "debug", "(", "ex", ")", ";", "}", "}", "else", "{", "String", "broadcast", "=", "broadcastObject", ".", "get", "(", "JSON_ELEMENT_BROADCAST", ")", ".", "getAsString", "(", ")", ";", "try", "{", "Calendar", "broadcastCal", "=", "Calendar", ".", "getInstance", "(", ")", ";", "broadcastCal", ".", "setTime", "(", "broadcastDateFormat", ".", "parse", "(", "broadcast", ")", ")", ";", "broadcastDate", "=", "broadcast", ";", "}", "catch", "(", "ParseException", "ex", ")", "{", "LOG", ".", "debug", "(", "ex", ")", ";", "}", "}", "return", "broadcastDate", ";", "}" ]
Liefert den Beginn der Ausstrahlung, wenn - heute im Zeitraum von CatchUpRights liegt - oder heute vor dem Zeitraum liegt - oder CatchUpRights nicht gesetzt ist @param broadcastObject @return der Beginn der Ausstrahlung oder ""
[ "Liefert", "den", "Beginn", "der", "Ausstrahlung", "wenn", "-", "heute", "im", "Zeitraum", "von", "CatchUpRights", "liegt", "-", "oder", "heute", "vor", "dem", "Zeitraum", "liegt", "-", "oder", "CatchUpRights", "nicht", "gesetzt", "ist" ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/crawler/sender/arte/ArteVideoDetailsDeserializer.java#L241-L280
2,616
mediathekview/MServer
src/main/java/mServer/crawler/sender/orf/JsonUtils.java
JsonUtils.getAttributeAsString
public static Optional<String> getAttributeAsString(final JsonObject aJsonObject, final String aAttributeName) { if (aJsonObject.has(aAttributeName)) { final JsonElement aElement = aJsonObject.get(aAttributeName); if (!aElement.isJsonNull()) { return Optional.of(aElement.getAsString()); } } return Optional.empty(); }
java
public static Optional<String> getAttributeAsString(final JsonObject aJsonObject, final String aAttributeName) { if (aJsonObject.has(aAttributeName)) { final JsonElement aElement = aJsonObject.get(aAttributeName); if (!aElement.isJsonNull()) { return Optional.of(aElement.getAsString()); } } return Optional.empty(); }
[ "public", "static", "Optional", "<", "String", ">", "getAttributeAsString", "(", "final", "JsonObject", "aJsonObject", ",", "final", "String", "aAttributeName", ")", "{", "if", "(", "aJsonObject", ".", "has", "(", "aAttributeName", ")", ")", "{", "final", "JsonElement", "aElement", "=", "aJsonObject", ".", "get", "(", "aAttributeName", ")", ";", "if", "(", "!", "aElement", ".", "isJsonNull", "(", ")", ")", "{", "return", "Optional", ".", "of", "(", "aElement", ".", "getAsString", "(", ")", ")", ";", "}", "}", "return", "Optional", ".", "empty", "(", ")", ";", "}" ]
Gets the value of an attribute @param aJsonObject the object @param aAttributeName the name of the attribute @return the value of the attribute, if it exists, else Optional.empty
[ "Gets", "the", "value", "of", "an", "attribute" ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/crawler/sender/orf/JsonUtils.java#L20-L30
2,617
mediathekview/MServer
src/main/java/mServer/tool/MserverDatumZeit.java
MserverDatumZeit.formatTime
public static String formatTime(String dateValue, FastDateFormat sdf) { try { return FDF_OUT_TIME.format(sdf.parse(dateValue)); } catch (ParseException ex) { LOG.debug(String.format("Fehler beim Parsen des Datums %s: %s", dateValue, ex.getMessage())); } return ""; }
java
public static String formatTime(String dateValue, FastDateFormat sdf) { try { return FDF_OUT_TIME.format(sdf.parse(dateValue)); } catch (ParseException ex) { LOG.debug(String.format("Fehler beim Parsen des Datums %s: %s", dateValue, ex.getMessage())); } return ""; }
[ "public", "static", "String", "formatTime", "(", "String", "dateValue", ",", "FastDateFormat", "sdf", ")", "{", "try", "{", "return", "FDF_OUT_TIME", ".", "format", "(", "sdf", ".", "parse", "(", "dateValue", ")", ")", ";", "}", "catch", "(", "ParseException", "ex", ")", "{", "LOG", ".", "debug", "(", "String", ".", "format", "(", "\"Fehler beim Parsen des Datums %s: %s\"", ",", "dateValue", ",", "ex", ".", "getMessage", "(", ")", ")", ")", ";", "}", "return", "\"\"", ";", "}" ]
formats a datetime string to the time format used in DatenFilm @param dateValue the datetime value @param sdf the format of dateValue @return the formatted time string
[ "formats", "a", "datetime", "string", "to", "the", "time", "format", "used", "in", "DatenFilm" ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/tool/MserverDatumZeit.java#L98-L106
2,618
mediathekview/MServer
src/main/java/mServer/crawler/FilmeSuchen.java
FilmeSuchen.filmeBeimSenderLaden
public synchronized void filmeBeimSenderLaden(ListeFilme listeFilme) { initStart(listeFilme); // die mReader nach Prio starten mrStarten(0); if (!Config.getStop()) { // waren und wenn Suchlauf noch nicht abgebrochen weiter mit dem Rest mrWarten(); mrStarten(1); allStarted = true; } }
java
public synchronized void filmeBeimSenderLaden(ListeFilme listeFilme) { initStart(listeFilme); // die mReader nach Prio starten mrStarten(0); if (!Config.getStop()) { // waren und wenn Suchlauf noch nicht abgebrochen weiter mit dem Rest mrWarten(); mrStarten(1); allStarted = true; } }
[ "public", "synchronized", "void", "filmeBeimSenderLaden", "(", "ListeFilme", "listeFilme", ")", "{", "initStart", "(", "listeFilme", ")", ";", "// die mReader nach Prio starten\r", "mrStarten", "(", "0", ")", ";", "if", "(", "!", "Config", ".", "getStop", "(", ")", ")", "{", "// waren und wenn Suchlauf noch nicht abgebrochen weiter mit dem Rest\r", "mrWarten", "(", ")", ";", "mrStarten", "(", "1", ")", ";", "allStarted", "=", "true", ";", "}", "}" ]
es werden alle Filme gesucht @param listeFilme
[ "es", "werden", "alle", "Filme", "gesucht" ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/crawler/FilmeSuchen.java#L119-L129
2,619
mediathekview/MServer
src/main/java/mServer/crawler/FilmeSuchen.java
FilmeSuchen.updateSender
public void updateSender(String[] nameSender, ListeFilme listeFilme) { // nur für den Mauskontext "Sender aktualisieren" boolean starten = false; initStart(listeFilme); for (MediathekReader reader : mediathekListe) { for (String s : nameSender) { if (reader.checkNameSenderFilmliste(s)) { starten = true; new Thread(reader).start(); //reader.start(); } } } allStarted = true; if (!starten) { // dann fertig meldenFertig(""); } }
java
public void updateSender(String[] nameSender, ListeFilme listeFilme) { // nur für den Mauskontext "Sender aktualisieren" boolean starten = false; initStart(listeFilme); for (MediathekReader reader : mediathekListe) { for (String s : nameSender) { if (reader.checkNameSenderFilmliste(s)) { starten = true; new Thread(reader).start(); //reader.start(); } } } allStarted = true; if (!starten) { // dann fertig meldenFertig(""); } }
[ "public", "void", "updateSender", "(", "String", "[", "]", "nameSender", ",", "ListeFilme", "listeFilme", ")", "{", "// nur für den Mauskontext \"Sender aktualisieren\"\r", "boolean", "starten", "=", "false", ";", "initStart", "(", "listeFilme", ")", ";", "for", "(", "MediathekReader", "reader", ":", "mediathekListe", ")", "{", "for", "(", "String", "s", ":", "nameSender", ")", "{", "if", "(", "reader", ".", "checkNameSenderFilmliste", "(", "s", ")", ")", "{", "starten", "=", "true", ";", "new", "Thread", "(", "reader", ")", ".", "start", "(", ")", ";", "//reader.start();\r", "}", "}", "}", "allStarted", "=", "true", ";", "if", "(", "!", "starten", ")", "{", "// dann fertig\r", "meldenFertig", "(", "\"\"", ")", ";", "}", "}" ]
es werden nur einige Sender aktualisiert @param nameSender @param listeFilme
[ "es", "werden", "nur", "einige", "Sender", "aktualisiert" ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/crawler/FilmeSuchen.java#L137-L155
2,620
mediathekview/MServer
src/main/java/mServer/crawler/sender/MediathekArd.java
MediathekArd.searchForUrlsWithM3U8
private Map<Qualities, String> searchForUrlsWithM3U8( final MSStringBuilder aSeiteStringExtractor) { final Map<Qualities, String> urls = new EnumMap<>(Qualities.class); final ArrayList<String> patternMatches = new ArrayList<>(); aSeiteStringExtractor.extractList(M3U8_PATTERN_START, M3U8_PATTERN_END, patternMatches); String m3u8Url = null; for (final String patternMatch : patternMatches) { if (patternMatch.startsWith(TEXT_START_HTTP)) { m3u8Url = patternMatch; break; } } if (m3u8Url != null) { m3u8Url = m3u8Url.replaceAll(URL_GET_PARAMETER, ""); if (m3u8Url.contains(M3U8Utils.M3U8_WDR_URL_BEGIN)) { urls.putAll(M3U8Utils.gatherUrlsFromWdrM3U8(m3u8Url)); } else { urls.put(Qualities.NORMAL, m3u8Url); } } return urls; }
java
private Map<Qualities, String> searchForUrlsWithM3U8( final MSStringBuilder aSeiteStringExtractor) { final Map<Qualities, String> urls = new EnumMap<>(Qualities.class); final ArrayList<String> patternMatches = new ArrayList<>(); aSeiteStringExtractor.extractList(M3U8_PATTERN_START, M3U8_PATTERN_END, patternMatches); String m3u8Url = null; for (final String patternMatch : patternMatches) { if (patternMatch.startsWith(TEXT_START_HTTP)) { m3u8Url = patternMatch; break; } } if (m3u8Url != null) { m3u8Url = m3u8Url.replaceAll(URL_GET_PARAMETER, ""); if (m3u8Url.contains(M3U8Utils.M3U8_WDR_URL_BEGIN)) { urls.putAll(M3U8Utils.gatherUrlsFromWdrM3U8(m3u8Url)); } else { urls.put(Qualities.NORMAL, m3u8Url); } } return urls; }
[ "private", "Map", "<", "Qualities", ",", "String", ">", "searchForUrlsWithM3U8", "(", "final", "MSStringBuilder", "aSeiteStringExtractor", ")", "{", "final", "Map", "<", "Qualities", ",", "String", ">", "urls", "=", "new", "EnumMap", "<>", "(", "Qualities", ".", "class", ")", ";", "final", "ArrayList", "<", "String", ">", "patternMatches", "=", "new", "ArrayList", "<>", "(", ")", ";", "aSeiteStringExtractor", ".", "extractList", "(", "M3U8_PATTERN_START", ",", "M3U8_PATTERN_END", ",", "patternMatches", ")", ";", "String", "m3u8Url", "=", "null", ";", "for", "(", "final", "String", "patternMatch", ":", "patternMatches", ")", "{", "if", "(", "patternMatch", ".", "startsWith", "(", "TEXT_START_HTTP", ")", ")", "{", "m3u8Url", "=", "patternMatch", ";", "break", ";", "}", "}", "if", "(", "m3u8Url", "!=", "null", ")", "{", "m3u8Url", "=", "m3u8Url", ".", "replaceAll", "(", "URL_GET_PARAMETER", ",", "\"\"", ")", ";", "if", "(", "m3u8Url", ".", "contains", "(", "M3U8Utils", ".", "M3U8_WDR_URL_BEGIN", ")", ")", "{", "urls", ".", "putAll", "(", "M3U8Utils", ".", "gatherUrlsFromWdrM3U8", "(", "m3u8Url", ")", ")", ";", "}", "else", "{", "urls", ".", "put", "(", "Qualities", ".", "NORMAL", ",", "m3u8Url", ")", ";", "}", "}", "return", "urls", ";", "}" ]
Searches the Seite for a quality auto to get a M3U8 URL. If the URL is from WRD it searches for the URLs of the MP4 files. @param aSeiteStringExtractor The Seite. @return A Map containing the URLs and Qualities which was found. An empty Map if nothing was found.
[ "Searches", "the", "Seite", "for", "a", "quality", "auto", "to", "get", "a", "M3U8", "URL", ".", "If", "the", "URL", "is", "from", "WRD", "it", "searches", "for", "the", "URLs", "of", "the", "MP4", "files", "." ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/crawler/sender/MediathekArd.java#L563-L588
2,621
mediathekview/MServer
src/main/java/mServer/crawler/sender/MediathekBr.java
MediathekBr.getUrl
private String getUrl(MSStringBuilder seiteXml, String pattern) { return seiteXml.extract(pattern, PATTERN_DLURL, PATTERN_END); }
java
private String getUrl(MSStringBuilder seiteXml, String pattern) { return seiteXml.extract(pattern, PATTERN_DLURL, PATTERN_END); }
[ "private", "String", "getUrl", "(", "MSStringBuilder", "seiteXml", ",", "String", "pattern", ")", "{", "return", "seiteXml", ".", "extract", "(", "pattern", ",", "PATTERN_DLURL", ",", "PATTERN_END", ")", ";", "}" ]
gets the url for the specified pattern @param seiteXml The xml site where to extract the url @param pattern The pattern used to identify the url type @return The extracted url
[ "gets", "the", "url", "for", "the", "specified", "pattern" ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/crawler/sender/MediathekBr.java#L585-L587
2,622
mediathekview/MServer
src/main/java/mServer/crawler/sender/MediathekReader.java
MediathekReader.addFilm
protected void addFilm(DatenFilm film) { film.setFileSize(); upgradeUrl(film); film.setUrlHistory(); setGeo(film); if (mlibFilmeSuchen.listeFilmeNeu.addFilmVomSender(film)) { // dann ist er neu FilmeSuchen.listeSenderLaufen.inc(film.arr[DatenFilm.FILM_SENDER], RunSender.Count.FILME); } }
java
protected void addFilm(DatenFilm film) { film.setFileSize(); upgradeUrl(film); film.setUrlHistory(); setGeo(film); if (mlibFilmeSuchen.listeFilmeNeu.addFilmVomSender(film)) { // dann ist er neu FilmeSuchen.listeSenderLaufen.inc(film.arr[DatenFilm.FILM_SENDER], RunSender.Count.FILME); } }
[ "protected", "void", "addFilm", "(", "DatenFilm", "film", ")", "{", "film", ".", "setFileSize", "(", ")", ";", "upgradeUrl", "(", "film", ")", ";", "film", ".", "setUrlHistory", "(", ")", ";", "setGeo", "(", "film", ")", ";", "if", "(", "mlibFilmeSuchen", ".", "listeFilmeNeu", ".", "addFilmVomSender", "(", "film", ")", ")", "{", "// dann ist er neu", "FilmeSuchen", ".", "listeSenderLaufen", ".", "inc", "(", "film", ".", "arr", "[", "DatenFilm", ".", "FILM_SENDER", "]", ",", "RunSender", ".", "Count", ".", "FILME", ")", ";", "}", "}" ]
Es werden die gefundenen Filme in die Liste einsortiert. @param film der einzufügende Film
[ "Es", "werden", "die", "gefundenen", "Filme", "in", "die", "Liste", "einsortiert", "." ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/crawler/sender/MediathekReader.java#L218-L229
2,623
mediathekview/MServer
src/main/java/mServer/crawler/sender/orf/tasks/OrfHelper.java
OrfHelper.parseLetterLinks
public static List<String> parseLetterLinks(Document aDocument) { final List<String> results = new ArrayList<>(); Elements links = aDocument.select(LETTER_URL_SELECTOR); links.forEach(element -> { if (element.hasAttr(ATTRIBUTE_HREF)) { String subpage = element.attr(ATTRIBUTE_HREF); results.add(OrfConstants.URL_BASE + subpage); } }); return results; }
java
public static List<String> parseLetterLinks(Document aDocument) { final List<String> results = new ArrayList<>(); Elements links = aDocument.select(LETTER_URL_SELECTOR); links.forEach(element -> { if (element.hasAttr(ATTRIBUTE_HREF)) { String subpage = element.attr(ATTRIBUTE_HREF); results.add(OrfConstants.URL_BASE + subpage); } }); return results; }
[ "public", "static", "List", "<", "String", ">", "parseLetterLinks", "(", "Document", "aDocument", ")", "{", "final", "List", "<", "String", ">", "results", "=", "new", "ArrayList", "<>", "(", ")", ";", "Elements", "links", "=", "aDocument", ".", "select", "(", "LETTER_URL_SELECTOR", ")", ";", "links", ".", "forEach", "(", "element", "->", "{", "if", "(", "element", ".", "hasAttr", "(", "ATTRIBUTE_HREF", ")", ")", "{", "String", "subpage", "=", "element", ".", "attr", "(", "ATTRIBUTE_HREF", ")", ";", "results", ".", "add", "(", "OrfConstants", ".", "URL_BASE", "+", "subpage", ")", ";", "}", "}", ")", ";", "return", "results", ";", "}" ]
determines the links to the letter pages @param aDocument the html document with letter links @return list with urls
[ "determines", "the", "links", "to", "the", "letter", "pages" ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/crawler/sender/orf/tasks/OrfHelper.java#L49-L61
2,624
mediathekview/MServer
src/main/java/mServer/tool/HashFileWriter.java
HashFileWriter.writeHash
public void writeHash(String id) { try (BufferedWriter fileWriter = Files.newBufferedWriter(baseDir.resolve(FILE_NAME), StandardCharsets.UTF_8)) { fileWriter.write(id); } catch (IOException ioException) { Log.errorLog(494461668, ioException, "Der Filmlisten Hash konnte nicht geschrieben werden."); } }
java
public void writeHash(String id) { try (BufferedWriter fileWriter = Files.newBufferedWriter(baseDir.resolve(FILE_NAME), StandardCharsets.UTF_8)) { fileWriter.write(id); } catch (IOException ioException) { Log.errorLog(494461668, ioException, "Der Filmlisten Hash konnte nicht geschrieben werden."); } }
[ "public", "void", "writeHash", "(", "String", "id", ")", "{", "try", "(", "BufferedWriter", "fileWriter", "=", "Files", ".", "newBufferedWriter", "(", "baseDir", ".", "resolve", "(", "FILE_NAME", ")", ",", "StandardCharsets", ".", "UTF_8", ")", ")", "{", "fileWriter", ".", "write", "(", "id", ")", ";", "}", "catch", "(", "IOException", "ioException", ")", "{", "Log", ".", "errorLog", "(", "494461668", ",", "ioException", ",", "\"Der Filmlisten Hash konnte nicht geschrieben werden.\"", ")", ";", "}", "}" ]
Schreibt die gegebene ID in die Filmlist Hash Datei. @param id Die zu schreibende ID.
[ "Schreibt", "die", "gegebene", "ID", "in", "die", "Filmlist", "Hash", "Datei", "." ]
ba8d03e6a1a303db3807a1327f553f1decd30388
https://github.com/mediathekview/MServer/blob/ba8d03e6a1a303db3807a1327f553f1decd30388/src/main/java/mServer/tool/HashFileWriter.java#L32-L38
2,625
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/util/JSONUtil.java
JSONUtil.writeJson
public static void writeJson(Writer writer, Object object) throws IOException { ObjectMapper om = ObjectMapperProvider.createCustomMapper(); om.configure(SerializationConfig.Feature.INDENT_OUTPUT, true); om.configure(SerializationConfig.Feature.FAIL_ON_EMPTY_BEANS, false); writer.write(om.writeValueAsString(object)); writer.write("\n"); writer.flush(); }
java
public static void writeJson(Writer writer, Object object) throws IOException { ObjectMapper om = ObjectMapperProvider.createCustomMapper(); om.configure(SerializationConfig.Feature.INDENT_OUTPUT, true); om.configure(SerializationConfig.Feature.FAIL_ON_EMPTY_BEANS, false); writer.write(om.writeValueAsString(object)); writer.write("\n"); writer.flush(); }
[ "public", "static", "void", "writeJson", "(", "Writer", "writer", ",", "Object", "object", ")", "throws", "IOException", "{", "ObjectMapper", "om", "=", "ObjectMapperProvider", ".", "createCustomMapper", "(", ")", ";", "om", ".", "configure", "(", "SerializationConfig", ".", "Feature", ".", "INDENT_OUTPUT", ",", "true", ")", ";", "om", ".", "configure", "(", "SerializationConfig", ".", "Feature", ".", "FAIL_ON_EMPTY_BEANS", ",", "false", ")", ";", "writer", ".", "write", "(", "om", ".", "writeValueAsString", "(", "object", ")", ")", ";", "writer", ".", "write", "(", "\"\\n\"", ")", ";", "writer", ".", "flush", "(", ")", ";", "}" ]
Writes object to the writer as JSON using Jackson and adds a new-line before flushing. @param writer the writer to write the JSON to @param object the object to write as JSON @throws IOException if the object can't be serialized as JSON or written to the writer
[ "Writes", "object", "to", "the", "writer", "as", "JSON", "using", "Jackson", "and", "adds", "a", "new", "-", "line", "before", "flushing", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/util/JSONUtil.java#L44-L53
2,626
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/HdfsStatsService.java
HdfsStatsService.getAllDirs
public List<HdfsStats> getAllDirs(String cluster, String pathPrefix, int limit, long runId) throws IOException { long encodedRunId = getEncodedRunId(runId); String rowPrefixStr = Long.toString(encodedRunId) + HdfsConstants.SEP + cluster; if (StringUtils.isNotEmpty(pathPrefix)) { // path expected to be cleansed at collection/storage time as well rowPrefixStr += HdfsConstants.SEP + StringUtil.cleanseToken(pathPrefix); } LOG.info(" Getting all dirs for cluster " + cluster + " with pathPrefix: " + pathPrefix + " for runId " + runId + " encodedRunId: " + encodedRunId + " limit: " + limit + " row prefix : " + rowPrefixStr); byte[] rowPrefix = Bytes.toBytes(rowPrefixStr); Scan scan = createScanWithAllColumns(); scan.setStartRow(rowPrefix); // require that all rows match the prefix we're looking for Filter prefixFilter = new WhileMatchFilter(new PrefixFilter(rowPrefix)); scan.setFilter(prefixFilter); // using a large scanner caching value with a small limit can mean we scan a // lot more data than // necessary, so lower the caching for low limits scan.setCaching(Math.min(limit, defaultScannerCaching)); // we need only the latest cell version scan.setMaxVersions(1); return createFromScanResults(cluster, null, scan, limit, Boolean.FALSE, 0l, 0l); }
java
public List<HdfsStats> getAllDirs(String cluster, String pathPrefix, int limit, long runId) throws IOException { long encodedRunId = getEncodedRunId(runId); String rowPrefixStr = Long.toString(encodedRunId) + HdfsConstants.SEP + cluster; if (StringUtils.isNotEmpty(pathPrefix)) { // path expected to be cleansed at collection/storage time as well rowPrefixStr += HdfsConstants.SEP + StringUtil.cleanseToken(pathPrefix); } LOG.info(" Getting all dirs for cluster " + cluster + " with pathPrefix: " + pathPrefix + " for runId " + runId + " encodedRunId: " + encodedRunId + " limit: " + limit + " row prefix : " + rowPrefixStr); byte[] rowPrefix = Bytes.toBytes(rowPrefixStr); Scan scan = createScanWithAllColumns(); scan.setStartRow(rowPrefix); // require that all rows match the prefix we're looking for Filter prefixFilter = new WhileMatchFilter(new PrefixFilter(rowPrefix)); scan.setFilter(prefixFilter); // using a large scanner caching value with a small limit can mean we scan a // lot more data than // necessary, so lower the caching for low limits scan.setCaching(Math.min(limit, defaultScannerCaching)); // we need only the latest cell version scan.setMaxVersions(1); return createFromScanResults(cluster, null, scan, limit, Boolean.FALSE, 0l, 0l); }
[ "public", "List", "<", "HdfsStats", ">", "getAllDirs", "(", "String", "cluster", ",", "String", "pathPrefix", ",", "int", "limit", ",", "long", "runId", ")", "throws", "IOException", "{", "long", "encodedRunId", "=", "getEncodedRunId", "(", "runId", ")", ";", "String", "rowPrefixStr", "=", "Long", ".", "toString", "(", "encodedRunId", ")", "+", "HdfsConstants", ".", "SEP", "+", "cluster", ";", "if", "(", "StringUtils", ".", "isNotEmpty", "(", "pathPrefix", ")", ")", "{", "// path expected to be cleansed at collection/storage time as well", "rowPrefixStr", "+=", "HdfsConstants", ".", "SEP", "+", "StringUtil", ".", "cleanseToken", "(", "pathPrefix", ")", ";", "}", "LOG", ".", "info", "(", "\" Getting all dirs for cluster \"", "+", "cluster", "+", "\" with pathPrefix: \"", "+", "pathPrefix", "+", "\" for runId \"", "+", "runId", "+", "\" encodedRunId: \"", "+", "encodedRunId", "+", "\" limit: \"", "+", "limit", "+", "\" row prefix : \"", "+", "rowPrefixStr", ")", ";", "byte", "[", "]", "rowPrefix", "=", "Bytes", ".", "toBytes", "(", "rowPrefixStr", ")", ";", "Scan", "scan", "=", "createScanWithAllColumns", "(", ")", ";", "scan", ".", "setStartRow", "(", "rowPrefix", ")", ";", "// require that all rows match the prefix we're looking for", "Filter", "prefixFilter", "=", "new", "WhileMatchFilter", "(", "new", "PrefixFilter", "(", "rowPrefix", ")", ")", ";", "scan", ".", "setFilter", "(", "prefixFilter", ")", ";", "// using a large scanner caching value with a small limit can mean we scan a", "// lot more data than", "// necessary, so lower the caching for low limits", "scan", ".", "setCaching", "(", "Math", ".", "min", "(", "limit", ",", "defaultScannerCaching", ")", ")", ";", "// we need only the latest cell version", "scan", ".", "setMaxVersions", "(", "1", ")", ";", "return", "createFromScanResults", "(", "cluster", ",", "null", ",", "scan", ",", "limit", ",", "Boolean", ".", "FALSE", ",", "0l", ",", "0l", ")", ";", "}" ]
Gets hdfs stats about all dirs on the given cluster @param cluster @param pathPrefix @param limit @param runId @return list of hdfs stats @throws IOException
[ "Gets", "hdfs", "stats", "about", "all", "dirs", "on", "the", "given", "cluster" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/HdfsStatsService.java#L103-L132
2,627
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/HdfsStatsService.java
HdfsStatsService.createFromScanResults
private List<HdfsStats> createFromScanResults(String cluster, String path, Scan scan, int maxCount, boolean checkPath, long starttime, long endtime) throws IOException { Map<HdfsStatsKey, HdfsStats> hdfsStats = new HashMap<HdfsStatsKey, HdfsStats>(); ResultScanner scanner = null; Stopwatch timer = new Stopwatch().start(); int rowCount = 0; long colCount = 0; long resultSize = 0; Table hdfsUsageTable = null; try { hdfsUsageTable = hbaseConnection .getTable(TableName.valueOf(HdfsConstants.HDFS_USAGE_TABLE)); scanner = hdfsUsageTable.getScanner(scan); for (Result result : scanner) { if (result != null && !result.isEmpty()) { colCount += result.size(); // TODO dogpiledays resultSize += result.getWritableSize(); rowCount = populateHdfsStats(result, hdfsStats, checkPath, path, starttime, endtime, rowCount); // return if we've already hit the limit if (rowCount >= maxCount) { break; } } } timer.stop(); LOG.info("In createFromScanResults For cluster " + cluster + " Fetched from hbase " + rowCount + " rows, " + colCount + " columns, " + resultSize + " bytes ( " + resultSize / (1024 * 1024) + ") MB, in total time of " + timer); } finally { try { if (scanner != null) { scanner.close(); } } finally { if (hdfsUsageTable != null) { hdfsUsageTable.close(); } } } List<HdfsStats> values = new ArrayList<HdfsStats>(hdfsStats.values()); // sort so that timestamps are arranged in descending order Collections.sort(values); return values; }
java
private List<HdfsStats> createFromScanResults(String cluster, String path, Scan scan, int maxCount, boolean checkPath, long starttime, long endtime) throws IOException { Map<HdfsStatsKey, HdfsStats> hdfsStats = new HashMap<HdfsStatsKey, HdfsStats>(); ResultScanner scanner = null; Stopwatch timer = new Stopwatch().start(); int rowCount = 0; long colCount = 0; long resultSize = 0; Table hdfsUsageTable = null; try { hdfsUsageTable = hbaseConnection .getTable(TableName.valueOf(HdfsConstants.HDFS_USAGE_TABLE)); scanner = hdfsUsageTable.getScanner(scan); for (Result result : scanner) { if (result != null && !result.isEmpty()) { colCount += result.size(); // TODO dogpiledays resultSize += result.getWritableSize(); rowCount = populateHdfsStats(result, hdfsStats, checkPath, path, starttime, endtime, rowCount); // return if we've already hit the limit if (rowCount >= maxCount) { break; } } } timer.stop(); LOG.info("In createFromScanResults For cluster " + cluster + " Fetched from hbase " + rowCount + " rows, " + colCount + " columns, " + resultSize + " bytes ( " + resultSize / (1024 * 1024) + ") MB, in total time of " + timer); } finally { try { if (scanner != null) { scanner.close(); } } finally { if (hdfsUsageTable != null) { hdfsUsageTable.close(); } } } List<HdfsStats> values = new ArrayList<HdfsStats>(hdfsStats.values()); // sort so that timestamps are arranged in descending order Collections.sort(values); return values; }
[ "private", "List", "<", "HdfsStats", ">", "createFromScanResults", "(", "String", "cluster", ",", "String", "path", ",", "Scan", "scan", ",", "int", "maxCount", ",", "boolean", "checkPath", ",", "long", "starttime", ",", "long", "endtime", ")", "throws", "IOException", "{", "Map", "<", "HdfsStatsKey", ",", "HdfsStats", ">", "hdfsStats", "=", "new", "HashMap", "<", "HdfsStatsKey", ",", "HdfsStats", ">", "(", ")", ";", "ResultScanner", "scanner", "=", "null", ";", "Stopwatch", "timer", "=", "new", "Stopwatch", "(", ")", ".", "start", "(", ")", ";", "int", "rowCount", "=", "0", ";", "long", "colCount", "=", "0", ";", "long", "resultSize", "=", "0", ";", "Table", "hdfsUsageTable", "=", "null", ";", "try", "{", "hdfsUsageTable", "=", "hbaseConnection", ".", "getTable", "(", "TableName", ".", "valueOf", "(", "HdfsConstants", ".", "HDFS_USAGE_TABLE", ")", ")", ";", "scanner", "=", "hdfsUsageTable", ".", "getScanner", "(", "scan", ")", ";", "for", "(", "Result", "result", ":", "scanner", ")", "{", "if", "(", "result", "!=", "null", "&&", "!", "result", ".", "isEmpty", "(", ")", ")", "{", "colCount", "+=", "result", ".", "size", "(", ")", ";", "// TODO dogpiledays resultSize += result.getWritableSize();", "rowCount", "=", "populateHdfsStats", "(", "result", ",", "hdfsStats", ",", "checkPath", ",", "path", ",", "starttime", ",", "endtime", ",", "rowCount", ")", ";", "// return if we've already hit the limit", "if", "(", "rowCount", ">=", "maxCount", ")", "{", "break", ";", "}", "}", "}", "timer", ".", "stop", "(", ")", ";", "LOG", ".", "info", "(", "\"In createFromScanResults For cluster \"", "+", "cluster", "+", "\" Fetched from hbase \"", "+", "rowCount", "+", "\" rows, \"", "+", "colCount", "+", "\" columns, \"", "+", "resultSize", "+", "\" bytes ( \"", "+", "resultSize", "/", "(", "1024", "*", "1024", ")", "+", "\") MB, in total time of \"", "+", "timer", ")", ";", "}", "finally", "{", "try", "{", "if", "(", "scanner", "!=", "null", ")", "{", "scanner", ".", "close", "(", ")", ";", "}", "}", "finally", "{", "if", "(", "hdfsUsageTable", "!=", "null", ")", "{", "hdfsUsageTable", ".", "close", "(", ")", ";", "}", "}", "}", "List", "<", "HdfsStats", ">", "values", "=", "new", "ArrayList", "<", "HdfsStats", ">", "(", "hdfsStats", ".", "values", "(", ")", ")", ";", "// sort so that timestamps are arranged in descending order", "Collections", ".", "sort", "(", "values", ")", ";", "return", "values", ";", "}" ]
Scans the hbase table and populates the hdfs stats @param cluster @param scan @param maxCount @return @throws IOException
[ "Scans", "the", "hbase", "table", "and", "populates", "the", "hdfs", "stats" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/HdfsStatsService.java#L174-L225
2,628
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/HdfsStatsService.java
HdfsStatsService.populateHdfsStats
private int populateHdfsStats(Result result, Map<HdfsStatsKey, HdfsStats> hdfsStats, boolean checkPath, String path, long starttime, long endtime, int rowCount) { HdfsStatsKey currentFullKey = hdfsStatsKeyConv.fromBytes(result.getRow()); QualifiedPathKey qpk = currentFullKey.getQualifiedPathKey(); // we check for exact match of path // since the scan does a prefix match, we need to filter out // other paths if (checkPath) { if (!qpk.getPath().equalsIgnoreCase(StringUtil.cleanseToken(path))) { return rowCount; } // sanity check if ((currentFullKey.getRunId() < endtime) || (currentFullKey.getRunId() > starttime)) { return rowCount; } } // create a hdfs stats key object per path without namespace // that will enable aggregating stats across all namespaces HdfsStatsKey currentKey = new HdfsStatsKey(qpk.getCluster(), qpk.getPath(), currentFullKey.getEncodedRunId()); HdfsStats currentHdfsStats = hdfsStats.get(currentKey); if (currentHdfsStats != null) { currentHdfsStats.populate(result); } else { currentHdfsStats = new HdfsStats(new HdfsStatsKey(currentKey)); currentHdfsStats.populate(result); hdfsStats.put(currentKey, currentHdfsStats); } return rowCount + 1; }
java
private int populateHdfsStats(Result result, Map<HdfsStatsKey, HdfsStats> hdfsStats, boolean checkPath, String path, long starttime, long endtime, int rowCount) { HdfsStatsKey currentFullKey = hdfsStatsKeyConv.fromBytes(result.getRow()); QualifiedPathKey qpk = currentFullKey.getQualifiedPathKey(); // we check for exact match of path // since the scan does a prefix match, we need to filter out // other paths if (checkPath) { if (!qpk.getPath().equalsIgnoreCase(StringUtil.cleanseToken(path))) { return rowCount; } // sanity check if ((currentFullKey.getRunId() < endtime) || (currentFullKey.getRunId() > starttime)) { return rowCount; } } // create a hdfs stats key object per path without namespace // that will enable aggregating stats across all namespaces HdfsStatsKey currentKey = new HdfsStatsKey(qpk.getCluster(), qpk.getPath(), currentFullKey.getEncodedRunId()); HdfsStats currentHdfsStats = hdfsStats.get(currentKey); if (currentHdfsStats != null) { currentHdfsStats.populate(result); } else { currentHdfsStats = new HdfsStats(new HdfsStatsKey(currentKey)); currentHdfsStats.populate(result); hdfsStats.put(currentKey, currentHdfsStats); } return rowCount + 1; }
[ "private", "int", "populateHdfsStats", "(", "Result", "result", ",", "Map", "<", "HdfsStatsKey", ",", "HdfsStats", ">", "hdfsStats", ",", "boolean", "checkPath", ",", "String", "path", ",", "long", "starttime", ",", "long", "endtime", ",", "int", "rowCount", ")", "{", "HdfsStatsKey", "currentFullKey", "=", "hdfsStatsKeyConv", ".", "fromBytes", "(", "result", ".", "getRow", "(", ")", ")", ";", "QualifiedPathKey", "qpk", "=", "currentFullKey", ".", "getQualifiedPathKey", "(", ")", ";", "// we check for exact match of path", "// since the scan does a prefix match, we need to filter out", "// other paths", "if", "(", "checkPath", ")", "{", "if", "(", "!", "qpk", ".", "getPath", "(", ")", ".", "equalsIgnoreCase", "(", "StringUtil", ".", "cleanseToken", "(", "path", ")", ")", ")", "{", "return", "rowCount", ";", "}", "// sanity check", "if", "(", "(", "currentFullKey", ".", "getRunId", "(", ")", "<", "endtime", ")", "||", "(", "currentFullKey", ".", "getRunId", "(", ")", ">", "starttime", ")", ")", "{", "return", "rowCount", ";", "}", "}", "// create a hdfs stats key object per path without namespace", "// that will enable aggregating stats across all namespaces", "HdfsStatsKey", "currentKey", "=", "new", "HdfsStatsKey", "(", "qpk", ".", "getCluster", "(", ")", ",", "qpk", ".", "getPath", "(", ")", ",", "currentFullKey", ".", "getEncodedRunId", "(", ")", ")", ";", "HdfsStats", "currentHdfsStats", "=", "hdfsStats", ".", "get", "(", "currentKey", ")", ";", "if", "(", "currentHdfsStats", "!=", "null", ")", "{", "currentHdfsStats", ".", "populate", "(", "result", ")", ";", "}", "else", "{", "currentHdfsStats", "=", "new", "HdfsStats", "(", "new", "HdfsStatsKey", "(", "currentKey", ")", ")", ";", "currentHdfsStats", ".", "populate", "(", "result", ")", ";", "hdfsStats", ".", "put", "(", "currentKey", ",", "currentHdfsStats", ")", ";", "}", "return", "rowCount", "+", "1", ";", "}" ]
Populates the hdfs stats for a cluster based on the hbase Result For federated hadoop2 clusters, there are multiple namespaces Since the namespace is part of the rowkey, we need to create an hdfs key without namespace so that we can aggregate across namespaces @param hbase scan result @param map of hdfsStats
[ "Populates", "the", "hdfs", "stats", "for", "a", "cluster", "based", "on", "the", "hbase", "Result" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/HdfsStatsService.java#L237-L269
2,629
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/TaskKeyConverter.java
TaskKeyConverter.toBytes
@Override public byte[] toBytes(TaskKey taskKey) { return Bytes.add(jobKeyConv.toBytes(taskKey), Constants.SEP_BYTES, Bytes.toBytes(taskKey.getTaskId())); }
java
@Override public byte[] toBytes(TaskKey taskKey) { return Bytes.add(jobKeyConv.toBytes(taskKey), Constants.SEP_BYTES, Bytes.toBytes(taskKey.getTaskId())); }
[ "@", "Override", "public", "byte", "[", "]", "toBytes", "(", "TaskKey", "taskKey", ")", "{", "return", "Bytes", ".", "add", "(", "jobKeyConv", ".", "toBytes", "(", "taskKey", ")", ",", "Constants", ".", "SEP_BYTES", ",", "Bytes", ".", "toBytes", "(", "taskKey", ".", "getTaskId", "(", ")", ")", ")", ";", "}" ]
Returns the bytes representation for a TaskKey. @param taskKey the TaskKey instance to serialize @return the serialized representation of the TaskKey
[ "Returns", "the", "bytes", "representation", "for", "a", "TaskKey", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/TaskKeyConverter.java#L36-L40
2,630
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/TaskKeyConverter.java
TaskKeyConverter.fromBytes
@Override public TaskKey fromBytes(byte[] bytes) { byte[][] keyComponents = JobKeyConverter.splitJobKey(bytes); JobKey jobKey = jobKeyConv.parseJobKey(keyComponents); return new TaskKey(jobKey, (keyComponents.length > 5 ? Bytes.toString(keyComponents[5]) : null)); }
java
@Override public TaskKey fromBytes(byte[] bytes) { byte[][] keyComponents = JobKeyConverter.splitJobKey(bytes); JobKey jobKey = jobKeyConv.parseJobKey(keyComponents); return new TaskKey(jobKey, (keyComponents.length > 5 ? Bytes.toString(keyComponents[5]) : null)); }
[ "@", "Override", "public", "TaskKey", "fromBytes", "(", "byte", "[", "]", "bytes", ")", "{", "byte", "[", "]", "[", "]", "keyComponents", "=", "JobKeyConverter", ".", "splitJobKey", "(", "bytes", ")", ";", "JobKey", "jobKey", "=", "jobKeyConv", ".", "parseJobKey", "(", "keyComponents", ")", ";", "return", "new", "TaskKey", "(", "jobKey", ",", "(", "keyComponents", ".", "length", ">", "5", "?", "Bytes", ".", "toString", "(", "keyComponents", "[", "5", "]", ")", ":", "null", ")", ")", ";", "}" ]
Generates a TaskKey from the byte encoded format. @param bytes the serialized version of a task key @return the deserialized TaskKey instance
[ "Generates", "a", "TaskKey", "from", "the", "byte", "encoded", "format", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/TaskKeyConverter.java#L48-L54
2,631
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryRawService.java
JobHistoryRawService.getRawJobConfiguration
public Configuration getRawJobConfiguration(QualifiedJobId jobId) throws IOException { Configuration conf = null; byte[] rowKey = idConv.toBytes(jobId); Get get = new Get(rowKey); get.addColumn(Constants.RAW_FAM_BYTES, Constants.JOBCONF_COL_BYTES); Table rawTable = null; try { rawTable = hbaseConnection .getTable(TableName.valueOf(Constants.HISTORY_RAW_TABLE)); Result result = rawTable.get(get); if (result != null && !result.isEmpty()) { conf = createConfigurationFromResult(result); } } catch (MissingColumnInResultException e) { LOG.error( "Failed to retrieve configuration from row returned for " + jobId, e); } finally { if (rawTable != null) { rawTable.close(); } } return conf; }
java
public Configuration getRawJobConfiguration(QualifiedJobId jobId) throws IOException { Configuration conf = null; byte[] rowKey = idConv.toBytes(jobId); Get get = new Get(rowKey); get.addColumn(Constants.RAW_FAM_BYTES, Constants.JOBCONF_COL_BYTES); Table rawTable = null; try { rawTable = hbaseConnection .getTable(TableName.valueOf(Constants.HISTORY_RAW_TABLE)); Result result = rawTable.get(get); if (result != null && !result.isEmpty()) { conf = createConfigurationFromResult(result); } } catch (MissingColumnInResultException e) { LOG.error( "Failed to retrieve configuration from row returned for " + jobId, e); } finally { if (rawTable != null) { rawTable.close(); } } return conf; }
[ "public", "Configuration", "getRawJobConfiguration", "(", "QualifiedJobId", "jobId", ")", "throws", "IOException", "{", "Configuration", "conf", "=", "null", ";", "byte", "[", "]", "rowKey", "=", "idConv", ".", "toBytes", "(", "jobId", ")", ";", "Get", "get", "=", "new", "Get", "(", "rowKey", ")", ";", "get", ".", "addColumn", "(", "Constants", ".", "RAW_FAM_BYTES", ",", "Constants", ".", "JOBCONF_COL_BYTES", ")", ";", "Table", "rawTable", "=", "null", ";", "try", "{", "rawTable", "=", "hbaseConnection", ".", "getTable", "(", "TableName", ".", "valueOf", "(", "Constants", ".", "HISTORY_RAW_TABLE", ")", ")", ";", "Result", "result", "=", "rawTable", ".", "get", "(", "get", ")", ";", "if", "(", "result", "!=", "null", "&&", "!", "result", ".", "isEmpty", "(", ")", ")", "{", "conf", "=", "createConfigurationFromResult", "(", "result", ")", ";", "}", "}", "catch", "(", "MissingColumnInResultException", "e", ")", "{", "LOG", ".", "error", "(", "\"Failed to retrieve configuration from row returned for \"", "+", "jobId", ",", "e", ")", ";", "}", "finally", "{", "if", "(", "rawTable", "!=", "null", ")", "{", "rawTable", ".", "close", "(", ")", ";", "}", "}", "return", "conf", ";", "}" ]
Returns the raw job configuration stored for the given cluster and job ID @param jobId the cluster and job ID to look up @return the stored job configuration @throws IOException
[ "Returns", "the", "raw", "job", "configuration", "stored", "for", "the", "given", "cluster", "and", "job", "ID" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryRawService.java#L295-L318
2,632
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryRawService.java
JobHistoryRawService.getRawJobHistory
public String getRawJobHistory(QualifiedJobId jobId) throws IOException { String historyData = null; byte[] rowKey = idConv.toBytes(jobId); Get get = new Get(rowKey); get.addColumn(Constants.RAW_FAM_BYTES, Constants.JOBHISTORY_COL_BYTES); Table rawTable = null; try { rawTable = hbaseConnection .getTable(TableName.valueOf(Constants.HISTORY_RAW_TABLE)); Result result = rawTable.get(get); if (result != null && !result.isEmpty()) { historyData = Bytes.toString(result.getValue(Constants.RAW_FAM_BYTES, Constants.JOBHISTORY_COL_BYTES)); } } finally { if (rawTable != null) { rawTable.close(); } } return historyData; }
java
public String getRawJobHistory(QualifiedJobId jobId) throws IOException { String historyData = null; byte[] rowKey = idConv.toBytes(jobId); Get get = new Get(rowKey); get.addColumn(Constants.RAW_FAM_BYTES, Constants.JOBHISTORY_COL_BYTES); Table rawTable = null; try { rawTable = hbaseConnection .getTable(TableName.valueOf(Constants.HISTORY_RAW_TABLE)); Result result = rawTable.get(get); if (result != null && !result.isEmpty()) { historyData = Bytes.toString(result.getValue(Constants.RAW_FAM_BYTES, Constants.JOBHISTORY_COL_BYTES)); } } finally { if (rawTable != null) { rawTable.close(); } } return historyData; }
[ "public", "String", "getRawJobHistory", "(", "QualifiedJobId", "jobId", ")", "throws", "IOException", "{", "String", "historyData", "=", "null", ";", "byte", "[", "]", "rowKey", "=", "idConv", ".", "toBytes", "(", "jobId", ")", ";", "Get", "get", "=", "new", "Get", "(", "rowKey", ")", ";", "get", ".", "addColumn", "(", "Constants", ".", "RAW_FAM_BYTES", ",", "Constants", ".", "JOBHISTORY_COL_BYTES", ")", ";", "Table", "rawTable", "=", "null", ";", "try", "{", "rawTable", "=", "hbaseConnection", ".", "getTable", "(", "TableName", ".", "valueOf", "(", "Constants", ".", "HISTORY_RAW_TABLE", ")", ")", ";", "Result", "result", "=", "rawTable", ".", "get", "(", "get", ")", ";", "if", "(", "result", "!=", "null", "&&", "!", "result", ".", "isEmpty", "(", ")", ")", "{", "historyData", "=", "Bytes", ".", "toString", "(", "result", ".", "getValue", "(", "Constants", ".", "RAW_FAM_BYTES", ",", "Constants", ".", "JOBHISTORY_COL_BYTES", ")", ")", ";", "}", "}", "finally", "{", "if", "(", "rawTable", "!=", "null", ")", "{", "rawTable", ".", "close", "(", ")", ";", "}", "}", "return", "historyData", ";", "}" ]
Returns the raw job history file stored for the given cluster and job ID. @param jobId the cluster and job ID to look up @return the stored job history file contents or {@code null} if no corresponding record was found @throws IOException
[ "Returns", "the", "raw", "job", "history", "file", "stored", "for", "the", "given", "cluster", "and", "job", "ID", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryRawService.java#L327-L348
2,633
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryRawService.java
JobHistoryRawService.getApproxSubmitTime
public long getApproxSubmitTime(Result value) throws MissingColumnInResultException { if (value == null) { throw new IllegalArgumentException( "Cannot get last modification time from " + "a null hbase result"); } Cell cell = value.getColumnLatestCell(Constants.INFO_FAM_BYTES, Constants.JOBHISTORY_LAST_MODIFIED_COL_BYTES); if (cell == null) { throw new MissingColumnInResultException(Constants.INFO_FAM_BYTES, Constants.JOBHISTORY_LAST_MODIFIED_COL_BYTES); } byte[] lastModTimeBytes = CellUtil.cloneValue(cell); // we try to approximately set the job submit time based on when the job // history file // was last modified and an average job duration long lastModTime = Bytes.toLong(lastModTimeBytes); long jobSubmitTimeMillis = lastModTime - Constants.AVERGAE_JOB_DURATION; LOG.debug("Approximate job submit time is " + jobSubmitTimeMillis + " based on " + lastModTime); return jobSubmitTimeMillis; }
java
public long getApproxSubmitTime(Result value) throws MissingColumnInResultException { if (value == null) { throw new IllegalArgumentException( "Cannot get last modification time from " + "a null hbase result"); } Cell cell = value.getColumnLatestCell(Constants.INFO_FAM_BYTES, Constants.JOBHISTORY_LAST_MODIFIED_COL_BYTES); if (cell == null) { throw new MissingColumnInResultException(Constants.INFO_FAM_BYTES, Constants.JOBHISTORY_LAST_MODIFIED_COL_BYTES); } byte[] lastModTimeBytes = CellUtil.cloneValue(cell); // we try to approximately set the job submit time based on when the job // history file // was last modified and an average job duration long lastModTime = Bytes.toLong(lastModTimeBytes); long jobSubmitTimeMillis = lastModTime - Constants.AVERGAE_JOB_DURATION; LOG.debug("Approximate job submit time is " + jobSubmitTimeMillis + " based on " + lastModTime); return jobSubmitTimeMillis; }
[ "public", "long", "getApproxSubmitTime", "(", "Result", "value", ")", "throws", "MissingColumnInResultException", "{", "if", "(", "value", "==", "null", ")", "{", "throw", "new", "IllegalArgumentException", "(", "\"Cannot get last modification time from \"", "+", "\"a null hbase result\"", ")", ";", "}", "Cell", "cell", "=", "value", ".", "getColumnLatestCell", "(", "Constants", ".", "INFO_FAM_BYTES", ",", "Constants", ".", "JOBHISTORY_LAST_MODIFIED_COL_BYTES", ")", ";", "if", "(", "cell", "==", "null", ")", "{", "throw", "new", "MissingColumnInResultException", "(", "Constants", ".", "INFO_FAM_BYTES", ",", "Constants", ".", "JOBHISTORY_LAST_MODIFIED_COL_BYTES", ")", ";", "}", "byte", "[", "]", "lastModTimeBytes", "=", "CellUtil", ".", "cloneValue", "(", "cell", ")", ";", "// we try to approximately set the job submit time based on when the job", "// history file", "// was last modified and an average job duration", "long", "lastModTime", "=", "Bytes", ".", "toLong", "(", "lastModTimeBytes", ")", ";", "long", "jobSubmitTimeMillis", "=", "lastModTime", "-", "Constants", ".", "AVERGAE_JOB_DURATION", ";", "LOG", ".", "debug", "(", "\"Approximate job submit time is \"", "+", "jobSubmitTimeMillis", "+", "\" based on \"", "+", "lastModTime", ")", ";", "return", "jobSubmitTimeMillis", ";", "}" ]
attempts to approximately set the job submit time based on the last modification time of the job history file @param value result @return approximate job submit time @throws MissingColumnInResultException
[ "attempts", "to", "approximately", "set", "the", "job", "submit", "time", "based", "on", "the", "last", "modification", "time", "of", "the", "job", "history", "file" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryRawService.java#L500-L524
2,634
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryRawService.java
JobHistoryRawService.markJobForReprocesssing
public void markJobForReprocesssing(QualifiedJobId jobId) throws IOException { Put p = new Put(idConv.toBytes(jobId)); p.addColumn(Constants.INFO_FAM_BYTES, Constants.RAW_COL_REPROCESS_BYTES, Bytes.toBytes(true)); Table rawTable = null; try { rawTable = hbaseConnection .getTable(TableName.valueOf(Constants.HISTORY_RAW_TABLE)); rawTable.put(p); } finally { if (rawTable != null) { rawTable.close(); } } }
java
public void markJobForReprocesssing(QualifiedJobId jobId) throws IOException { Put p = new Put(idConv.toBytes(jobId)); p.addColumn(Constants.INFO_FAM_BYTES, Constants.RAW_COL_REPROCESS_BYTES, Bytes.toBytes(true)); Table rawTable = null; try { rawTable = hbaseConnection .getTable(TableName.valueOf(Constants.HISTORY_RAW_TABLE)); rawTable.put(p); } finally { if (rawTable != null) { rawTable.close(); } } }
[ "public", "void", "markJobForReprocesssing", "(", "QualifiedJobId", "jobId", ")", "throws", "IOException", "{", "Put", "p", "=", "new", "Put", "(", "idConv", ".", "toBytes", "(", "jobId", ")", ")", ";", "p", ".", "addColumn", "(", "Constants", ".", "INFO_FAM_BYTES", ",", "Constants", ".", "RAW_COL_REPROCESS_BYTES", ",", "Bytes", ".", "toBytes", "(", "true", ")", ")", ";", "Table", "rawTable", "=", "null", ";", "try", "{", "rawTable", "=", "hbaseConnection", ".", "getTable", "(", "TableName", ".", "valueOf", "(", "Constants", ".", "HISTORY_RAW_TABLE", ")", ")", ";", "rawTable", ".", "put", "(", "p", ")", ";", "}", "finally", "{", "if", "(", "rawTable", "!=", "null", ")", "{", "rawTable", ".", "close", "(", ")", ";", "}", "}", "}" ]
Flags a job's RAW record for reprocessing @param jobId
[ "Flags", "a", "job", "s", "RAW", "record", "for", "reprocessing" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryRawService.java#L543-L557
2,635
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryRawService.java
JobHistoryRawService.getAggregatedStatusPut
public Put getAggregatedStatusPut(byte[] row, byte[] col, Boolean status) { Put put = new Put(row); put.addColumn(Constants.INFO_FAM_BYTES, col, Bytes.toBytes(status)); try { LOG.info(" agg status " + status + " and put " + put.toJSON()); } catch (IOException e) { // ignore json exception } return put; }
java
public Put getAggregatedStatusPut(byte[] row, byte[] col, Boolean status) { Put put = new Put(row); put.addColumn(Constants.INFO_FAM_BYTES, col, Bytes.toBytes(status)); try { LOG.info(" agg status " + status + " and put " + put.toJSON()); } catch (IOException e) { // ignore json exception } return put; }
[ "public", "Put", "getAggregatedStatusPut", "(", "byte", "[", "]", "row", ",", "byte", "[", "]", "col", ",", "Boolean", "status", ")", "{", "Put", "put", "=", "new", "Put", "(", "row", ")", ";", "put", ".", "addColumn", "(", "Constants", ".", "INFO_FAM_BYTES", ",", "col", ",", "Bytes", ".", "toBytes", "(", "status", ")", ")", ";", "try", "{", "LOG", ".", "info", "(", "\" agg status \"", "+", "status", "+", "\" and put \"", "+", "put", ".", "toJSON", "(", ")", ")", ";", "}", "catch", "(", "IOException", "e", ")", "{", "// ignore json exception", "}", "return", "put", ";", "}" ]
creates a put to be updated into the RAW table for aggregation status @param row key @param status of aggregation @return {@link Put}
[ "creates", "a", "put", "to", "be", "updated", "into", "the", "RAW", "table", "for", "aggregation", "status" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryRawService.java#L591-L600
2,636
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryRawService.java
JobHistoryRawService.getStatusAgg
public boolean getStatusAgg(byte[] row, byte[] col) throws IOException { Get g = new Get(row); g.addColumn(Constants.INFO_FAM_BYTES, col); Table rawTable = null; Cell cell = null; try { rawTable = hbaseConnection .getTable(TableName.valueOf(Constants.HISTORY_RAW_TABLE)); Result r = rawTable.get(g); cell = r.getColumnLatestCell(Constants.INFO_FAM_BYTES, col); } finally { if (rawTable != null) { rawTable.close(); } } boolean status = false; try { if (cell != null) { status = Bytes.toBoolean(CellUtil.cloneValue(cell)); } } catch (IllegalArgumentException iae) { LOG.error("Caught " + iae); } LOG.info("Returning from Raw, " + Bytes.toString(col) + " for this job=" + status); return status; }
java
public boolean getStatusAgg(byte[] row, byte[] col) throws IOException { Get g = new Get(row); g.addColumn(Constants.INFO_FAM_BYTES, col); Table rawTable = null; Cell cell = null; try { rawTable = hbaseConnection .getTable(TableName.valueOf(Constants.HISTORY_RAW_TABLE)); Result r = rawTable.get(g); cell = r.getColumnLatestCell(Constants.INFO_FAM_BYTES, col); } finally { if (rawTable != null) { rawTable.close(); } } boolean status = false; try { if (cell != null) { status = Bytes.toBoolean(CellUtil.cloneValue(cell)); } } catch (IllegalArgumentException iae) { LOG.error("Caught " + iae); } LOG.info("Returning from Raw, " + Bytes.toString(col) + " for this job=" + status); return status; }
[ "public", "boolean", "getStatusAgg", "(", "byte", "[", "]", "row", ",", "byte", "[", "]", "col", ")", "throws", "IOException", "{", "Get", "g", "=", "new", "Get", "(", "row", ")", ";", "g", ".", "addColumn", "(", "Constants", ".", "INFO_FAM_BYTES", ",", "col", ")", ";", "Table", "rawTable", "=", "null", ";", "Cell", "cell", "=", "null", ";", "try", "{", "rawTable", "=", "hbaseConnection", ".", "getTable", "(", "TableName", ".", "valueOf", "(", "Constants", ".", "HISTORY_RAW_TABLE", ")", ")", ";", "Result", "r", "=", "rawTable", ".", "get", "(", "g", ")", ";", "cell", "=", "r", ".", "getColumnLatestCell", "(", "Constants", ".", "INFO_FAM_BYTES", ",", "col", ")", ";", "}", "finally", "{", "if", "(", "rawTable", "!=", "null", ")", "{", "rawTable", ".", "close", "(", ")", ";", "}", "}", "boolean", "status", "=", "false", ";", "try", "{", "if", "(", "cell", "!=", "null", ")", "{", "status", "=", "Bytes", ".", "toBoolean", "(", "CellUtil", ".", "cloneValue", "(", "cell", ")", ")", ";", "}", "}", "catch", "(", "IllegalArgumentException", "iae", ")", "{", "LOG", ".", "error", "(", "\"Caught \"", "+", "iae", ")", ";", "}", "LOG", ".", "info", "(", "\"Returning from Raw, \"", "+", "Bytes", ".", "toString", "(", "col", ")", "+", "\" for this job=\"", "+", "status", ")", ";", "return", "status", ";", "}" ]
creates a Get to be fetch daily aggregation status from the RAW table @param row key @return {@link Get} @throws IOException
[ "creates", "a", "Get", "to", "be", "fetch", "daily", "aggregation", "status", "from", "the", "RAW", "table" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryRawService.java#L608-L634
2,637
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/FlowKey.java
FlowKey.compareTo
@Override public int compareTo(Object other) { if (other == null) { return -1; } FlowKey otherKey = (FlowKey)other; return new CompareToBuilder() .appendSuper(super.compareTo(other)) .append(getEncodedRunId(), otherKey.getEncodedRunId()) .toComparison(); }
java
@Override public int compareTo(Object other) { if (other == null) { return -1; } FlowKey otherKey = (FlowKey)other; return new CompareToBuilder() .appendSuper(super.compareTo(other)) .append(getEncodedRunId(), otherKey.getEncodedRunId()) .toComparison(); }
[ "@", "Override", "public", "int", "compareTo", "(", "Object", "other", ")", "{", "if", "(", "other", "==", "null", ")", "{", "return", "-", "1", ";", "}", "FlowKey", "otherKey", "=", "(", "FlowKey", ")", "other", ";", "return", "new", "CompareToBuilder", "(", ")", ".", "appendSuper", "(", "super", ".", "compareTo", "(", "other", ")", ")", ".", "append", "(", "getEncodedRunId", "(", ")", ",", "otherKey", ".", "getEncodedRunId", "(", ")", ")", ".", "toComparison", "(", ")", ";", "}" ]
Compares two FlowKey objects on the basis of their cluster, userName, appId and encodedRunId @param other @return 0 if this cluster, userName, appId and encodedRunId are equal to the other's cluster, userName, appId and encodedRunId, 1 if this cluster or userName or appId or encodedRunId are less than the other's cluster, userName, appId and encodedRunId, -1 if this cluster and userName and appId and encodedRunId are greater the other's cluster, userName, appId and encodedRunId,
[ "Compares", "two", "FlowKey", "objects", "on", "the", "basis", "of", "their", "cluster", "userName", "appId", "and", "encodedRunId" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/FlowKey.java#L86-L96
2,638
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryByIdService.java
JobHistoryByIdService.writeIndexes
public void writeIndexes(JobKey jobKey) throws IOException { // Defensive coding if (jobKey != null) { Table historyByJobIdTable = null; try { historyByJobIdTable = hbaseConnection .getTable(TableName.valueOf(Constants.HISTORY_BY_JOBID_TABLE)); byte[] jobKeyBytes = jobKeyConv.toBytes(jobKey); byte[] rowKeyBytes = jobIdConv.toBytes( new QualifiedJobId(jobKey.getCluster(), jobKey.getJobId())); // Insert (or update) row with jobid as the key Put p = new Put(rowKeyBytes); p.addColumn(Constants.INFO_FAM_BYTES, Constants.ROWKEY_COL_BYTES, jobKeyBytes); historyByJobIdTable.put(p); } finally { if (historyByJobIdTable != null) { historyByJobIdTable.close(); } } } }
java
public void writeIndexes(JobKey jobKey) throws IOException { // Defensive coding if (jobKey != null) { Table historyByJobIdTable = null; try { historyByJobIdTable = hbaseConnection .getTable(TableName.valueOf(Constants.HISTORY_BY_JOBID_TABLE)); byte[] jobKeyBytes = jobKeyConv.toBytes(jobKey); byte[] rowKeyBytes = jobIdConv.toBytes( new QualifiedJobId(jobKey.getCluster(), jobKey.getJobId())); // Insert (or update) row with jobid as the key Put p = new Put(rowKeyBytes); p.addColumn(Constants.INFO_FAM_BYTES, Constants.ROWKEY_COL_BYTES, jobKeyBytes); historyByJobIdTable.put(p); } finally { if (historyByJobIdTable != null) { historyByJobIdTable.close(); } } } }
[ "public", "void", "writeIndexes", "(", "JobKey", "jobKey", ")", "throws", "IOException", "{", "// Defensive coding", "if", "(", "jobKey", "!=", "null", ")", "{", "Table", "historyByJobIdTable", "=", "null", ";", "try", "{", "historyByJobIdTable", "=", "hbaseConnection", ".", "getTable", "(", "TableName", ".", "valueOf", "(", "Constants", ".", "HISTORY_BY_JOBID_TABLE", ")", ")", ";", "byte", "[", "]", "jobKeyBytes", "=", "jobKeyConv", ".", "toBytes", "(", "jobKey", ")", ";", "byte", "[", "]", "rowKeyBytes", "=", "jobIdConv", ".", "toBytes", "(", "new", "QualifiedJobId", "(", "jobKey", ".", "getCluster", "(", ")", ",", "jobKey", ".", "getJobId", "(", ")", ")", ")", ";", "// Insert (or update) row with jobid as the key", "Put", "p", "=", "new", "Put", "(", "rowKeyBytes", ")", ";", "p", ".", "addColumn", "(", "Constants", ".", "INFO_FAM_BYTES", ",", "Constants", ".", "ROWKEY_COL_BYTES", ",", "jobKeyBytes", ")", ";", "historyByJobIdTable", ".", "put", "(", "p", ")", ";", "}", "finally", "{", "if", "(", "historyByJobIdTable", "!=", "null", ")", "{", "historyByJobIdTable", ".", "close", "(", ")", ";", "}", "}", "}", "}" ]
Create the secondary indexes records cluster!jobId->jobKey. @param jobKey @throws IOException if the entry cannot be written.
[ "Create", "the", "secondary", "indexes", "records", "cluster!jobId", "-", ">", "jobKey", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryByIdService.java#L95-L118
2,639
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/util/ByteUtil.java
ByteUtil.join
public static byte[] join(byte[] separator, byte[]... components) { if (components == null || components.length == 0) { return Constants.EMPTY_BYTES; } int finalSize = 0; if (separator != null) { finalSize = separator.length * (components.length - 1); } for (byte[] comp : components) { finalSize += comp.length; } byte[] buf = new byte[finalSize]; int offset = 0; for (int i=0; i < components.length; i++) { System.arraycopy(components[i], 0, buf, offset, components[i].length); offset += components[i].length; if (i < (components.length-1) && separator != null && separator.length > 0) { System.arraycopy(separator, 0, buf, offset, separator.length); offset += separator.length; } } return buf; }
java
public static byte[] join(byte[] separator, byte[]... components) { if (components == null || components.length == 0) { return Constants.EMPTY_BYTES; } int finalSize = 0; if (separator != null) { finalSize = separator.length * (components.length - 1); } for (byte[] comp : components) { finalSize += comp.length; } byte[] buf = new byte[finalSize]; int offset = 0; for (int i=0; i < components.length; i++) { System.arraycopy(components[i], 0, buf, offset, components[i].length); offset += components[i].length; if (i < (components.length-1) && separator != null && separator.length > 0) { System.arraycopy(separator, 0, buf, offset, separator.length); offset += separator.length; } } return buf; }
[ "public", "static", "byte", "[", "]", "join", "(", "byte", "[", "]", "separator", ",", "byte", "[", "]", "...", "components", ")", "{", "if", "(", "components", "==", "null", "||", "components", ".", "length", "==", "0", ")", "{", "return", "Constants", ".", "EMPTY_BYTES", ";", "}", "int", "finalSize", "=", "0", ";", "if", "(", "separator", "!=", "null", ")", "{", "finalSize", "=", "separator", ".", "length", "*", "(", "components", ".", "length", "-", "1", ")", ";", "}", "for", "(", "byte", "[", "]", "comp", ":", "components", ")", "{", "finalSize", "+=", "comp", ".", "length", ";", "}", "byte", "[", "]", "buf", "=", "new", "byte", "[", "finalSize", "]", ";", "int", "offset", "=", "0", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "components", ".", "length", ";", "i", "++", ")", "{", "System", ".", "arraycopy", "(", "components", "[", "i", "]", ",", "0", ",", "buf", ",", "offset", ",", "components", "[", "i", "]", ".", "length", ")", ";", "offset", "+=", "components", "[", "i", "]", ".", "length", ";", "if", "(", "i", "<", "(", "components", ".", "length", "-", "1", ")", "&&", "separator", "!=", "null", "&&", "separator", ".", "length", ">", "0", ")", "{", "System", ".", "arraycopy", "(", "separator", ",", "0", ",", "buf", ",", "offset", ",", "separator", ".", "length", ")", ";", "offset", "+=", "separator", ".", "length", ";", "}", "}", "return", "buf", ";", "}" ]
Returns a single byte array containing all of the individual component arrays separated by the separator array. @param separator @param components @return
[ "Returns", "a", "single", "byte", "array", "containing", "all", "of", "the", "individual", "component", "arrays", "separated", "by", "the", "separator", "array", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/util/ByteUtil.java#L163-L187
2,640
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/util/ByteUtil.java
ByteUtil.safeCopy
public static byte[] safeCopy(byte[] source, int offset, int length) { if (length < 0 || source.length < offset+length) { return null; } byte[] copy = new byte[length]; System.arraycopy(source, offset, copy, 0, length); return copy; }
java
public static byte[] safeCopy(byte[] source, int offset, int length) { if (length < 0 || source.length < offset+length) { return null; } byte[] copy = new byte[length]; System.arraycopy(source, offset, copy, 0, length); return copy; }
[ "public", "static", "byte", "[", "]", "safeCopy", "(", "byte", "[", "]", "source", ",", "int", "offset", ",", "int", "length", ")", "{", "if", "(", "length", "<", "0", "||", "source", ".", "length", "<", "offset", "+", "length", ")", "{", "return", "null", ";", "}", "byte", "[", "]", "copy", "=", "new", "byte", "[", "length", "]", ";", "System", ".", "arraycopy", "(", "source", ",", "offset", ",", "copy", ",", "0", ",", "length", ")", ";", "return", "copy", ";", "}" ]
Returns a copy of the source byte array, starting at offset for the given length. If the offset + length is out of bounds for the array, returns null. @param source @param offset @param length @return
[ "Returns", "a", "copy", "of", "the", "source", "byte", "array", "starting", "at", "offset", "for", "the", "given", "length", ".", "If", "the", "offset", "+", "length", "is", "out", "of", "bounds", "for", "the", "array", "returns", "null", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/util/ByteUtil.java#L242-L249
2,641
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/util/ByteUtil.java
ByteUtil.getValueAsLong
public static long getValueAsLong(final byte[] key, final Map<byte[], byte[]> taskValues) { byte[] value = taskValues.get(key); if (value != null) { try { long retValue = Bytes.toLong(value); return retValue; } catch (NumberFormatException nfe) { LOG.error("Caught NFE while converting to long ", nfe); return 0L; } catch (IllegalArgumentException iae ) { // for exceptions like java.lang.IllegalArgumentException: // offset (0) + length (8) exceed the capacity of the array: 7 LOG.error("Caught IAE while converting to long ", iae); return 0L; } } else { return 0L; } }
java
public static long getValueAsLong(final byte[] key, final Map<byte[], byte[]> taskValues) { byte[] value = taskValues.get(key); if (value != null) { try { long retValue = Bytes.toLong(value); return retValue; } catch (NumberFormatException nfe) { LOG.error("Caught NFE while converting to long ", nfe); return 0L; } catch (IllegalArgumentException iae ) { // for exceptions like java.lang.IllegalArgumentException: // offset (0) + length (8) exceed the capacity of the array: 7 LOG.error("Caught IAE while converting to long ", iae); return 0L; } } else { return 0L; } }
[ "public", "static", "long", "getValueAsLong", "(", "final", "byte", "[", "]", "key", ",", "final", "Map", "<", "byte", "[", "]", ",", "byte", "[", "]", ">", "taskValues", ")", "{", "byte", "[", "]", "value", "=", "taskValues", ".", "get", "(", "key", ")", ";", "if", "(", "value", "!=", "null", ")", "{", "try", "{", "long", "retValue", "=", "Bytes", ".", "toLong", "(", "value", ")", ";", "return", "retValue", ";", "}", "catch", "(", "NumberFormatException", "nfe", ")", "{", "LOG", ".", "error", "(", "\"Caught NFE while converting to long \"", ",", "nfe", ")", ";", "return", "0L", ";", "}", "catch", "(", "IllegalArgumentException", "iae", ")", "{", "// for exceptions like java.lang.IllegalArgumentException:", "// offset (0) + length (8) exceed the capacity of the array: 7", "LOG", ".", "error", "(", "\"Caught IAE while converting to long \"", ",", "iae", ")", ";", "return", "0L", ";", "}", "}", "else", "{", "return", "0L", ";", "}", "}" ]
return a value from the NavigableMap as a long @param key @param taskValues @return value as Long or 0L
[ "return", "a", "value", "from", "the", "NavigableMap", "as", "a", "long" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/util/ByteUtil.java#L257-L276
2,642
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/util/ByteUtil.java
ByteUtil.getValueAsString
public static String getValueAsString(final byte[] key, final Map<byte[], byte[]> taskValues) { byte[] value = taskValues.get(key); if (value != null) { return Bytes.toString(value); } else { return ""; } }
java
public static String getValueAsString(final byte[] key, final Map<byte[], byte[]> taskValues) { byte[] value = taskValues.get(key); if (value != null) { return Bytes.toString(value); } else { return ""; } }
[ "public", "static", "String", "getValueAsString", "(", "final", "byte", "[", "]", "key", ",", "final", "Map", "<", "byte", "[", "]", ",", "byte", "[", "]", ">", "taskValues", ")", "{", "byte", "[", "]", "value", "=", "taskValues", ".", "get", "(", "key", ")", ";", "if", "(", "value", "!=", "null", ")", "{", "return", "Bytes", ".", "toString", "(", "value", ")", ";", "}", "else", "{", "return", "\"\"", ";", "}", "}" ]
return a value from the NavigableMap as a String @param key @param taskValues @return value as a String or ""
[ "return", "a", "value", "from", "the", "NavigableMap", "as", "a", "String" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/util/ByteUtil.java#L284-L292
2,643
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/util/ByteUtil.java
ByteUtil.getValueAsDouble
public static double getValueAsDouble(byte[] key, NavigableMap<byte[], byte[]> infoValues) { byte[] value = infoValues.get(key); if (value != null) { return Bytes.toDouble(value); } else { return 0.0; } }
java
public static double getValueAsDouble(byte[] key, NavigableMap<byte[], byte[]> infoValues) { byte[] value = infoValues.get(key); if (value != null) { return Bytes.toDouble(value); } else { return 0.0; } }
[ "public", "static", "double", "getValueAsDouble", "(", "byte", "[", "]", "key", ",", "NavigableMap", "<", "byte", "[", "]", ",", "byte", "[", "]", ">", "infoValues", ")", "{", "byte", "[", "]", "value", "=", "infoValues", ".", "get", "(", "key", ")", ";", "if", "(", "value", "!=", "null", ")", "{", "return", "Bytes", ".", "toDouble", "(", "value", ")", ";", "}", "else", "{", "return", "0.0", ";", "}", "}" ]
return a value from the NavigableMap as a Double @param key to be looked up for the value @param infoValues - the map containing the key values @return value as Double or 0.0
[ "return", "a", "value", "from", "the", "NavigableMap", "as", "a", "Double" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/util/ByteUtil.java#L300-L308
2,644
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/util/ByteUtil.java
ByteUtil.getValueAsInt
public static int getValueAsInt(byte[] key, Map<byte[], byte[]> infoValues) { byte[] value = infoValues.get(key); if (value != null) { try { int retValue = Bytes.toInt(value); return retValue; } catch (NumberFormatException nfe) { LOG.error("Caught NFE while converting to int ", nfe); return 0; } catch (IllegalArgumentException iae) { // for exceptions like java.lang.IllegalArgumentException: // offset (0) + length (8) exceed the capacity of the array: 7 LOG.error("Caught IAE while converting to int ", iae); return 0; } } else { return 0; } }
java
public static int getValueAsInt(byte[] key, Map<byte[], byte[]> infoValues) { byte[] value = infoValues.get(key); if (value != null) { try { int retValue = Bytes.toInt(value); return retValue; } catch (NumberFormatException nfe) { LOG.error("Caught NFE while converting to int ", nfe); return 0; } catch (IllegalArgumentException iae) { // for exceptions like java.lang.IllegalArgumentException: // offset (0) + length (8) exceed the capacity of the array: 7 LOG.error("Caught IAE while converting to int ", iae); return 0; } } else { return 0; } }
[ "public", "static", "int", "getValueAsInt", "(", "byte", "[", "]", "key", ",", "Map", "<", "byte", "[", "]", ",", "byte", "[", "]", ">", "infoValues", ")", "{", "byte", "[", "]", "value", "=", "infoValues", ".", "get", "(", "key", ")", ";", "if", "(", "value", "!=", "null", ")", "{", "try", "{", "int", "retValue", "=", "Bytes", ".", "toInt", "(", "value", ")", ";", "return", "retValue", ";", "}", "catch", "(", "NumberFormatException", "nfe", ")", "{", "LOG", ".", "error", "(", "\"Caught NFE while converting to int \"", ",", "nfe", ")", ";", "return", "0", ";", "}", "catch", "(", "IllegalArgumentException", "iae", ")", "{", "// for exceptions like java.lang.IllegalArgumentException:", "// offset (0) + length (8) exceed the capacity of the array: 7", "LOG", ".", "error", "(", "\"Caught IAE while converting to int \"", ",", "iae", ")", ";", "return", "0", ";", "}", "}", "else", "{", "return", "0", ";", "}", "}" ]
get value from a map as an int @param key @param infoValues @return int
[ "get", "value", "from", "a", "map", "as", "an", "int" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/util/ByteUtil.java#L316-L334
2,645
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/AppKey.java
AppKey.compareTo
@Override public int compareTo(Object other) { if (other == null) { return -1; } AppKey otherKey = (AppKey) other; return new CompareToBuilder() .append(this.cluster, otherKey.getCluster()) .append(this.userName, otherKey.getUserName()) .append(this.appId, otherKey.getAppId()) .toComparison(); }
java
@Override public int compareTo(Object other) { if (other == null) { return -1; } AppKey otherKey = (AppKey) other; return new CompareToBuilder() .append(this.cluster, otherKey.getCluster()) .append(this.userName, otherKey.getUserName()) .append(this.appId, otherKey.getAppId()) .toComparison(); }
[ "@", "Override", "public", "int", "compareTo", "(", "Object", "other", ")", "{", "if", "(", "other", "==", "null", ")", "{", "return", "-", "1", ";", "}", "AppKey", "otherKey", "=", "(", "AppKey", ")", "other", ";", "return", "new", "CompareToBuilder", "(", ")", ".", "append", "(", "this", ".", "cluster", ",", "otherKey", ".", "getCluster", "(", ")", ")", ".", "append", "(", "this", ".", "userName", ",", "otherKey", ".", "getUserName", "(", ")", ")", ".", "append", "(", "this", ".", "appId", ",", "otherKey", ".", "getAppId", "(", ")", ")", ".", "toComparison", "(", ")", ";", "}" ]
Compares two AppKey objects on the basis of their cluster, userName, appId and encodedRunId @param other @return 0 if this cluster, userName, appId are equal to the other's cluster, userName, appId, 1 if this cluster or userName or appId are less than the other's cluster, userName, appId, -1 if this cluster and userName and appId are greater the other's cluster, userName, appId
[ "Compares", "two", "AppKey", "objects", "on", "the", "basis", "of", "their", "cluster", "userName", "appId", "and", "encodedRunId" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/AppKey.java#L84-L95
2,646
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserFactory.java
JobHistoryFileParserFactory.getVersion
public static HadoopVersion getVersion(byte[] historyFileContents) { if(historyFileContents.length > HADOOP2_VERSION_LENGTH) { // the first 10 bytes in a hadoop2.0 history file contain Avro-Json String version2Part = new String(historyFileContents, 0, HADOOP2_VERSION_LENGTH); if (StringUtils.equalsIgnoreCase(version2Part, HADOOP2_VERSION_STRING)) { return HadoopVersion.TWO; } } // throw an exception if we did not find any matching version throw new IllegalArgumentException(" Unknown format of job history file: " + historyFileContents); }
java
public static HadoopVersion getVersion(byte[] historyFileContents) { if(historyFileContents.length > HADOOP2_VERSION_LENGTH) { // the first 10 bytes in a hadoop2.0 history file contain Avro-Json String version2Part = new String(historyFileContents, 0, HADOOP2_VERSION_LENGTH); if (StringUtils.equalsIgnoreCase(version2Part, HADOOP2_VERSION_STRING)) { return HadoopVersion.TWO; } } // throw an exception if we did not find any matching version throw new IllegalArgumentException(" Unknown format of job history file: " + historyFileContents); }
[ "public", "static", "HadoopVersion", "getVersion", "(", "byte", "[", "]", "historyFileContents", ")", "{", "if", "(", "historyFileContents", ".", "length", ">", "HADOOP2_VERSION_LENGTH", ")", "{", "// the first 10 bytes in a hadoop2.0 history file contain Avro-Json", "String", "version2Part", "=", "new", "String", "(", "historyFileContents", ",", "0", ",", "HADOOP2_VERSION_LENGTH", ")", ";", "if", "(", "StringUtils", ".", "equalsIgnoreCase", "(", "version2Part", ",", "HADOOP2_VERSION_STRING", ")", ")", "{", "return", "HadoopVersion", ".", "TWO", ";", "}", "}", "// throw an exception if we did not find any matching version", "throw", "new", "IllegalArgumentException", "(", "\" Unknown format of job history file: \"", "+", "historyFileContents", ")", ";", "}" ]
determines the verison of hadoop that the history file belongs to @return returns 1 for hadoop 1 (pre MAPREDUCE-1016) returns 2 for newer job history files (newer job history files have "AVRO-JSON" as the signature at the start of the file, REFERENCE: https://issues.apache.org/jira/browse/MAPREDUCE-1016? \ focusedCommentId=12763160& \ page=com.atlassian.jira.plugin.system .issuetabpanels:comment-tabpanel#comment-12763160 @throws IllegalArgumentException if neither match
[ "determines", "the", "verison", "of", "hadoop", "that", "the", "history", "file", "belongs", "to" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserFactory.java#L50-L60
2,647
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/mapreduce/CombineFileInputFormat.java
CombineFileInputFormat.createPool
protected void createPool(PathFilter... filters) { MultiPathFilter multi = new MultiPathFilter(); for (PathFilter f: filters) { multi.add(f); } pools.add(multi); }
java
protected void createPool(PathFilter... filters) { MultiPathFilter multi = new MultiPathFilter(); for (PathFilter f: filters) { multi.add(f); } pools.add(multi); }
[ "protected", "void", "createPool", "(", "PathFilter", "...", "filters", ")", "{", "MultiPathFilter", "multi", "=", "new", "MultiPathFilter", "(", ")", ";", "for", "(", "PathFilter", "f", ":", "filters", ")", "{", "multi", ".", "add", "(", "f", ")", ";", "}", "pools", ".", "add", "(", "multi", ")", ";", "}" ]
Create a new pool and add the filters to it. A pathname can satisfy any one of the specified filters. A split cannot have files from different pools.
[ "Create", "a", "new", "pool", "and", "add", "the", "filters", "to", "it", ".", "A", "pathname", "can", "satisfy", "any", "one", "of", "the", "specified", "filters", ".", "A", "split", "cannot", "have", "files", "from", "different", "pools", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/mapreduce/CombineFileInputFormat.java#L129-L135
2,648
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/mapreduce/CombineFileInputFormat.java
CombineFileInputFormat.addCreatedSplit
private void addCreatedSplit(List<InputSplit> splitList, List<String> locations, ArrayList<OneBlockInfo> validBlocks) { // create an input split Path[] fl = new Path[validBlocks.size()]; long[] offset = new long[validBlocks.size()]; long[] length = new long[validBlocks.size()]; for (int i = 0; i < validBlocks.size(); i++) { fl[i] = validBlocks.get(i).onepath; offset[i] = validBlocks.get(i).offset; length[i] = validBlocks.get(i).length; } // add this split to the list that is returned CombineFileSplit thissplit = new CombineFileSplit(fl, offset, length, locations.toArray(new String[0])); splitList.add(thissplit); }
java
private void addCreatedSplit(List<InputSplit> splitList, List<String> locations, ArrayList<OneBlockInfo> validBlocks) { // create an input split Path[] fl = new Path[validBlocks.size()]; long[] offset = new long[validBlocks.size()]; long[] length = new long[validBlocks.size()]; for (int i = 0; i < validBlocks.size(); i++) { fl[i] = validBlocks.get(i).onepath; offset[i] = validBlocks.get(i).offset; length[i] = validBlocks.get(i).length; } // add this split to the list that is returned CombineFileSplit thissplit = new CombineFileSplit(fl, offset, length, locations.toArray(new String[0])); splitList.add(thissplit); }
[ "private", "void", "addCreatedSplit", "(", "List", "<", "InputSplit", ">", "splitList", ",", "List", "<", "String", ">", "locations", ",", "ArrayList", "<", "OneBlockInfo", ">", "validBlocks", ")", "{", "// create an input split", "Path", "[", "]", "fl", "=", "new", "Path", "[", "validBlocks", ".", "size", "(", ")", "]", ";", "long", "[", "]", "offset", "=", "new", "long", "[", "validBlocks", ".", "size", "(", ")", "]", ";", "long", "[", "]", "length", "=", "new", "long", "[", "validBlocks", ".", "size", "(", ")", "]", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "validBlocks", ".", "size", "(", ")", ";", "i", "++", ")", "{", "fl", "[", "i", "]", "=", "validBlocks", ".", "get", "(", "i", ")", ".", "onepath", ";", "offset", "[", "i", "]", "=", "validBlocks", ".", "get", "(", "i", ")", ".", "offset", ";", "length", "[", "i", "]", "=", "validBlocks", ".", "get", "(", "i", ")", ".", "length", ";", "}", "// add this split to the list that is returned", "CombineFileSplit", "thissplit", "=", "new", "CombineFileSplit", "(", "fl", ",", "offset", ",", "length", ",", "locations", ".", "toArray", "(", "new", "String", "[", "0", "]", ")", ")", ";", "splitList", ".", "add", "(", "thissplit", ")", ";", "}" ]
Create a single split from the list of blocks specified in validBlocks Add this new split into splitList.
[ "Create", "a", "single", "split", "from", "the", "list", "of", "blocks", "specified", "in", "validBlocks", "Add", "this", "new", "split", "into", "splitList", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/mapreduce/CombineFileInputFormat.java#L432-L449
2,649
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/JobKeyConverter.java
JobKeyConverter.toBytes
@Override public byte[] toBytes(JobKey jobKey) { if (jobKey == null) { return Constants.EMPTY_BYTES; } else { return ByteUtil.join(Constants.SEP_BYTES, Bytes.toBytes(jobKey.getCluster()), Bytes.toBytes(jobKey.getUserName()), Bytes.toBytes(jobKey.getAppId()), Bytes.toBytes(jobKey.getEncodedRunId()), idConv.toBytes(jobKey.getJobId())); } }
java
@Override public byte[] toBytes(JobKey jobKey) { if (jobKey == null) { return Constants.EMPTY_BYTES; } else { return ByteUtil.join(Constants.SEP_BYTES, Bytes.toBytes(jobKey.getCluster()), Bytes.toBytes(jobKey.getUserName()), Bytes.toBytes(jobKey.getAppId()), Bytes.toBytes(jobKey.getEncodedRunId()), idConv.toBytes(jobKey.getJobId())); } }
[ "@", "Override", "public", "byte", "[", "]", "toBytes", "(", "JobKey", "jobKey", ")", "{", "if", "(", "jobKey", "==", "null", ")", "{", "return", "Constants", ".", "EMPTY_BYTES", ";", "}", "else", "{", "return", "ByteUtil", ".", "join", "(", "Constants", ".", "SEP_BYTES", ",", "Bytes", ".", "toBytes", "(", "jobKey", ".", "getCluster", "(", ")", ")", ",", "Bytes", ".", "toBytes", "(", "jobKey", ".", "getUserName", "(", ")", ")", ",", "Bytes", ".", "toBytes", "(", "jobKey", ".", "getAppId", "(", ")", ")", ",", "Bytes", ".", "toBytes", "(", "jobKey", ".", "getEncodedRunId", "(", ")", ")", ",", "idConv", ".", "toBytes", "(", "jobKey", ".", "getJobId", "(", ")", ")", ")", ";", "}", "}" ]
Returns the byte encoded representation of a JobKey @param jobKey the JobKey to serialize @return the byte encoded representation of the JobKey
[ "Returns", "the", "byte", "encoded", "representation", "of", "a", "JobKey" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/JobKeyConverter.java#L36-L48
2,650
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/JobKeyConverter.java
JobKeyConverter.parseJobKey
public JobKey parseJobKey(byte[][] keyComponents) { // runId is inverted in the bytes representation so we get reverse // chronological order long encodedRunId = keyComponents.length > 3 ? Bytes.toLong(keyComponents[3]) : Long.MAX_VALUE; JobId jobId = keyComponents.length > 4 ? idConv.fromBytes(keyComponents[4]) : null; return new JobKey(Bytes.toString(keyComponents[0]), (keyComponents.length > 1 ? Bytes.toString(keyComponents[1]) : null), (keyComponents.length > 2 ? Bytes.toString(keyComponents[2]) : null), Long.MAX_VALUE - encodedRunId, jobId); }
java
public JobKey parseJobKey(byte[][] keyComponents) { // runId is inverted in the bytes representation so we get reverse // chronological order long encodedRunId = keyComponents.length > 3 ? Bytes.toLong(keyComponents[3]) : Long.MAX_VALUE; JobId jobId = keyComponents.length > 4 ? idConv.fromBytes(keyComponents[4]) : null; return new JobKey(Bytes.toString(keyComponents[0]), (keyComponents.length > 1 ? Bytes.toString(keyComponents[1]) : null), (keyComponents.length > 2 ? Bytes.toString(keyComponents[2]) : null), Long.MAX_VALUE - encodedRunId, jobId); }
[ "public", "JobKey", "parseJobKey", "(", "byte", "[", "]", "[", "]", "keyComponents", ")", "{", "// runId is inverted in the bytes representation so we get reverse", "// chronological order", "long", "encodedRunId", "=", "keyComponents", ".", "length", ">", "3", "?", "Bytes", ".", "toLong", "(", "keyComponents", "[", "3", "]", ")", ":", "Long", ".", "MAX_VALUE", ";", "JobId", "jobId", "=", "keyComponents", ".", "length", ">", "4", "?", "idConv", ".", "fromBytes", "(", "keyComponents", "[", "4", "]", ")", ":", "null", ";", "return", "new", "JobKey", "(", "Bytes", ".", "toString", "(", "keyComponents", "[", "0", "]", ")", ",", "(", "keyComponents", ".", "length", ">", "1", "?", "Bytes", ".", "toString", "(", "keyComponents", "[", "1", "]", ")", ":", "null", ")", ",", "(", "keyComponents", ".", "length", ">", "2", "?", "Bytes", ".", "toString", "(", "keyComponents", "[", "2", "]", ")", ":", "null", ")", ",", "Long", ".", "MAX_VALUE", "-", "encodedRunId", ",", "jobId", ")", ";", "}" ]
Constructs a JobKey instance from the individual byte encoded key components. @param keyComponents as split on @return a JobKey instance containing the decoded components
[ "Constructs", "a", "JobKey", "instance", "from", "the", "individual", "byte", "encoded", "key", "components", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/JobKeyConverter.java#L72-L86
2,651
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/VersionInfo.java
VersionInfo.compareTo
@Override public int compareTo(VersionInfo other) { if (this.timestamp == other.timestamp) { return 0; } if (this.timestamp < other.timestamp) { return 1; } return -1; }
java
@Override public int compareTo(VersionInfo other) { if (this.timestamp == other.timestamp) { return 0; } if (this.timestamp < other.timestamp) { return 1; } return -1; }
[ "@", "Override", "public", "int", "compareTo", "(", "VersionInfo", "other", ")", "{", "if", "(", "this", ".", "timestamp", "==", "other", ".", "timestamp", ")", "{", "return", "0", ";", "}", "if", "(", "this", ".", "timestamp", "<", "other", ".", "timestamp", ")", "{", "return", "1", ";", "}", "return", "-", "1", ";", "}" ]
Compares two VersionInfo timestamps to order them in reverse chronological order @param other @return 0 if timestamps are equal, 1 if this timestamp less than other timestamp, -1 if this timestamp is greater than other timestamp
[ "Compares", "two", "VersionInfo", "timestamps", "to", "order", "them", "in", "reverse", "chronological", "order" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/VersionInfo.java#L55-L64
2,652
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/TaskKey.java
TaskKey.compareTo
@Override public int compareTo(Object other) { if (other == null) { return -1; } TaskKey otherKey = (TaskKey) other; return new CompareToBuilder().appendSuper(super.compareTo(otherKey)) .append(this.taskId, otherKey.getTaskId()) .toComparison(); }
java
@Override public int compareTo(Object other) { if (other == null) { return -1; } TaskKey otherKey = (TaskKey) other; return new CompareToBuilder().appendSuper(super.compareTo(otherKey)) .append(this.taskId, otherKey.getTaskId()) .toComparison(); }
[ "@", "Override", "public", "int", "compareTo", "(", "Object", "other", ")", "{", "if", "(", "other", "==", "null", ")", "{", "return", "-", "1", ";", "}", "TaskKey", "otherKey", "=", "(", "TaskKey", ")", "other", ";", "return", "new", "CompareToBuilder", "(", ")", ".", "appendSuper", "(", "super", ".", "compareTo", "(", "otherKey", ")", ")", ".", "append", "(", "this", ".", "taskId", ",", "otherKey", ".", "getTaskId", "(", ")", ")", ".", "toComparison", "(", ")", ";", "}" ]
Compares two TaskKey objects on the basis of their taskId @param other @return 0 if the taskIds are equal, 1 if this taskId is greater than other taskId, -1 if this taskId is less than other taskId
[ "Compares", "two", "TaskKey", "objects", "on", "the", "basis", "of", "their", "taskId" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/TaskKey.java#L61-L70
2,653
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/JobKey.java
JobKey.compareTo
@Override public int compareTo(Object other) { if (other == null) { return -1; } JobKey otherKey = (JobKey)other; return new CompareToBuilder().appendSuper(super.compareTo(otherKey)) .append(this.jobId, otherKey.getJobId()) .toComparison(); }
java
@Override public int compareTo(Object other) { if (other == null) { return -1; } JobKey otherKey = (JobKey)other; return new CompareToBuilder().appendSuper(super.compareTo(otherKey)) .append(this.jobId, otherKey.getJobId()) .toComparison(); }
[ "@", "Override", "public", "int", "compareTo", "(", "Object", "other", ")", "{", "if", "(", "other", "==", "null", ")", "{", "return", "-", "1", ";", "}", "JobKey", "otherKey", "=", "(", "JobKey", ")", "other", ";", "return", "new", "CompareToBuilder", "(", ")", ".", "appendSuper", "(", "super", ".", "compareTo", "(", "otherKey", ")", ")", ".", "append", "(", "this", ".", "jobId", ",", "otherKey", ".", "getJobId", "(", ")", ")", ".", "toComparison", "(", ")", ";", "}" ]
Compares two JobKey QualifiedJobId @param other @return 0 if the Qualified Job Ids are equal, 1 if this QualifiedJobId greater than other QualifiedJobId, -1 if this QualifiedJobId is less than other QualifiedJobId
[ "Compares", "two", "JobKey", "QualifiedJobId" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/JobKey.java#L119-L128
2,654
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserBase.java
JobHistoryFileParserBase.getHadoopVersionPut
public Put getHadoopVersionPut(HadoopVersion historyFileVersion, byte[] jobKeyBytes) { Put pVersion = new Put(jobKeyBytes); byte[] valueBytes = null; valueBytes = Bytes.toBytes(historyFileVersion.toString()); byte[] qualifier = Bytes.toBytes(JobHistoryKeys.hadoopversion.toString().toLowerCase()); pVersion.add(Constants.INFO_FAM_BYTES, qualifier, valueBytes); return pVersion; }
java
public Put getHadoopVersionPut(HadoopVersion historyFileVersion, byte[] jobKeyBytes) { Put pVersion = new Put(jobKeyBytes); byte[] valueBytes = null; valueBytes = Bytes.toBytes(historyFileVersion.toString()); byte[] qualifier = Bytes.toBytes(JobHistoryKeys.hadoopversion.toString().toLowerCase()); pVersion.add(Constants.INFO_FAM_BYTES, qualifier, valueBytes); return pVersion; }
[ "public", "Put", "getHadoopVersionPut", "(", "HadoopVersion", "historyFileVersion", ",", "byte", "[", "]", "jobKeyBytes", ")", "{", "Put", "pVersion", "=", "new", "Put", "(", "jobKeyBytes", ")", ";", "byte", "[", "]", "valueBytes", "=", "null", ";", "valueBytes", "=", "Bytes", ".", "toBytes", "(", "historyFileVersion", ".", "toString", "(", ")", ")", ";", "byte", "[", "]", "qualifier", "=", "Bytes", ".", "toBytes", "(", "JobHistoryKeys", ".", "hadoopversion", ".", "toString", "(", ")", ".", "toLowerCase", "(", ")", ")", ";", "pVersion", ".", "add", "(", "Constants", ".", "INFO_FAM_BYTES", ",", "qualifier", ",", "valueBytes", ")", ";", "return", "pVersion", ";", "}" ]
generates a put that sets the hadoop version for a record @param historyFileVersion @param jobKeyBytes @return Put
[ "generates", "a", "put", "that", "sets", "the", "hadoop", "version", "for", "a", "record" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserBase.java#L57-L64
2,655
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserBase.java
JobHistoryFileParserBase.getSubmitTimeMillisFromJobHistory
public static long getSubmitTimeMillisFromJobHistory(byte[] jobHistoryRaw) { long submitTimeMillis = 0; if (null == jobHistoryRaw) { return submitTimeMillis; } HadoopVersion hv = JobHistoryFileParserFactory.getVersion(jobHistoryRaw); switch (hv) { case TWO: // look for the job submitted event, since that has the job submit time int startIndex = ByteUtil.indexOf(jobHistoryRaw, Constants.JOB_SUBMIT_EVENT_BYTES, 0); if (startIndex != -1) { // now look for the submit time in this event int secondQuoteIndex = ByteUtil.indexOf(jobHistoryRaw, Constants.SUBMIT_TIME_PREFIX_HADOOP2_BYTES, startIndex); if (secondQuoteIndex != -1) { // read the string that contains the unix timestamp String submitTimeMillisString = Bytes.toString(jobHistoryRaw, secondQuoteIndex + Constants.EPOCH_TIMESTAMP_STRING_LENGTH, Constants.EPOCH_TIMESTAMP_STRING_LENGTH); try { submitTimeMillis = Long.parseLong(submitTimeMillisString); } catch (NumberFormatException nfe) { LOG.error(" caught NFE during conversion of submit time " + submitTimeMillisString + " " + nfe.getMessage()); submitTimeMillis = 0; } } } break; case ONE: default: // The start of the history file looks like this: // Meta VERSION="1" . // Job JOBID="job_20120101120000_12345" JOBNAME="..." // USER="username" SUBMIT_TIME="1339063492288" JOBCONF=" // First we look for the first occurrence of SUBMIT_TIME=" // Then we find the place of the next close quote " // Then our value is in between those two if valid at all. startIndex = ByteUtil.indexOf(jobHistoryRaw, Constants.SUBMIT_TIME_PREFIX_BYTES, 0); if (startIndex != -1) { int prefixEndIndex = startIndex + Constants.SUBMIT_TIME_PREFIX_BYTES.length; // Find close quote in the snippet, start looking where the prefix ends. int secondQuoteIndex = ByteUtil.indexOf(jobHistoryRaw, Constants.QUOTE_BYTES, prefixEndIndex); if (secondQuoteIndex != -1) { int numberLength = secondQuoteIndex - prefixEndIndex; String submitTimeMillisString = Bytes.toString(jobHistoryRaw, prefixEndIndex, numberLength); try { submitTimeMillis = Long.parseLong(submitTimeMillisString); } catch (NumberFormatException nfe) { LOG.error(" caught NFE during conversion of submit time " + submitTimeMillisString + " " + nfe.getMessage()); submitTimeMillis = 0; } } } break; } return submitTimeMillis; }
java
public static long getSubmitTimeMillisFromJobHistory(byte[] jobHistoryRaw) { long submitTimeMillis = 0; if (null == jobHistoryRaw) { return submitTimeMillis; } HadoopVersion hv = JobHistoryFileParserFactory.getVersion(jobHistoryRaw); switch (hv) { case TWO: // look for the job submitted event, since that has the job submit time int startIndex = ByteUtil.indexOf(jobHistoryRaw, Constants.JOB_SUBMIT_EVENT_BYTES, 0); if (startIndex != -1) { // now look for the submit time in this event int secondQuoteIndex = ByteUtil.indexOf(jobHistoryRaw, Constants.SUBMIT_TIME_PREFIX_HADOOP2_BYTES, startIndex); if (secondQuoteIndex != -1) { // read the string that contains the unix timestamp String submitTimeMillisString = Bytes.toString(jobHistoryRaw, secondQuoteIndex + Constants.EPOCH_TIMESTAMP_STRING_LENGTH, Constants.EPOCH_TIMESTAMP_STRING_LENGTH); try { submitTimeMillis = Long.parseLong(submitTimeMillisString); } catch (NumberFormatException nfe) { LOG.error(" caught NFE during conversion of submit time " + submitTimeMillisString + " " + nfe.getMessage()); submitTimeMillis = 0; } } } break; case ONE: default: // The start of the history file looks like this: // Meta VERSION="1" . // Job JOBID="job_20120101120000_12345" JOBNAME="..." // USER="username" SUBMIT_TIME="1339063492288" JOBCONF=" // First we look for the first occurrence of SUBMIT_TIME=" // Then we find the place of the next close quote " // Then our value is in between those two if valid at all. startIndex = ByteUtil.indexOf(jobHistoryRaw, Constants.SUBMIT_TIME_PREFIX_BYTES, 0); if (startIndex != -1) { int prefixEndIndex = startIndex + Constants.SUBMIT_TIME_PREFIX_BYTES.length; // Find close quote in the snippet, start looking where the prefix ends. int secondQuoteIndex = ByteUtil.indexOf(jobHistoryRaw, Constants.QUOTE_BYTES, prefixEndIndex); if (secondQuoteIndex != -1) { int numberLength = secondQuoteIndex - prefixEndIndex; String submitTimeMillisString = Bytes.toString(jobHistoryRaw, prefixEndIndex, numberLength); try { submitTimeMillis = Long.parseLong(submitTimeMillisString); } catch (NumberFormatException nfe) { LOG.error(" caught NFE during conversion of submit time " + submitTimeMillisString + " " + nfe.getMessage()); submitTimeMillis = 0; } } } break; } return submitTimeMillis; }
[ "public", "static", "long", "getSubmitTimeMillisFromJobHistory", "(", "byte", "[", "]", "jobHistoryRaw", ")", "{", "long", "submitTimeMillis", "=", "0", ";", "if", "(", "null", "==", "jobHistoryRaw", ")", "{", "return", "submitTimeMillis", ";", "}", "HadoopVersion", "hv", "=", "JobHistoryFileParserFactory", ".", "getVersion", "(", "jobHistoryRaw", ")", ";", "switch", "(", "hv", ")", "{", "case", "TWO", ":", "// look for the job submitted event, since that has the job submit time", "int", "startIndex", "=", "ByteUtil", ".", "indexOf", "(", "jobHistoryRaw", ",", "Constants", ".", "JOB_SUBMIT_EVENT_BYTES", ",", "0", ")", ";", "if", "(", "startIndex", "!=", "-", "1", ")", "{", "// now look for the submit time in this event", "int", "secondQuoteIndex", "=", "ByteUtil", ".", "indexOf", "(", "jobHistoryRaw", ",", "Constants", ".", "SUBMIT_TIME_PREFIX_HADOOP2_BYTES", ",", "startIndex", ")", ";", "if", "(", "secondQuoteIndex", "!=", "-", "1", ")", "{", "// read the string that contains the unix timestamp", "String", "submitTimeMillisString", "=", "Bytes", ".", "toString", "(", "jobHistoryRaw", ",", "secondQuoteIndex", "+", "Constants", ".", "EPOCH_TIMESTAMP_STRING_LENGTH", ",", "Constants", ".", "EPOCH_TIMESTAMP_STRING_LENGTH", ")", ";", "try", "{", "submitTimeMillis", "=", "Long", ".", "parseLong", "(", "submitTimeMillisString", ")", ";", "}", "catch", "(", "NumberFormatException", "nfe", ")", "{", "LOG", ".", "error", "(", "\" caught NFE during conversion of submit time \"", "+", "submitTimeMillisString", "+", "\" \"", "+", "nfe", ".", "getMessage", "(", ")", ")", ";", "submitTimeMillis", "=", "0", ";", "}", "}", "}", "break", ";", "case", "ONE", ":", "default", ":", "// The start of the history file looks like this:", "// Meta VERSION=\"1\" .", "// Job JOBID=\"job_20120101120000_12345\" JOBNAME=\"...\"", "// USER=\"username\" SUBMIT_TIME=\"1339063492288\" JOBCONF=\"", "// First we look for the first occurrence of SUBMIT_TIME=\"", "// Then we find the place of the next close quote \"", "// Then our value is in between those two if valid at all.", "startIndex", "=", "ByteUtil", ".", "indexOf", "(", "jobHistoryRaw", ",", "Constants", ".", "SUBMIT_TIME_PREFIX_BYTES", ",", "0", ")", ";", "if", "(", "startIndex", "!=", "-", "1", ")", "{", "int", "prefixEndIndex", "=", "startIndex", "+", "Constants", ".", "SUBMIT_TIME_PREFIX_BYTES", ".", "length", ";", "// Find close quote in the snippet, start looking where the prefix ends.", "int", "secondQuoteIndex", "=", "ByteUtil", ".", "indexOf", "(", "jobHistoryRaw", ",", "Constants", ".", "QUOTE_BYTES", ",", "prefixEndIndex", ")", ";", "if", "(", "secondQuoteIndex", "!=", "-", "1", ")", "{", "int", "numberLength", "=", "secondQuoteIndex", "-", "prefixEndIndex", ";", "String", "submitTimeMillisString", "=", "Bytes", ".", "toString", "(", "jobHistoryRaw", ",", "prefixEndIndex", ",", "numberLength", ")", ";", "try", "{", "submitTimeMillis", "=", "Long", ".", "parseLong", "(", "submitTimeMillisString", ")", ";", "}", "catch", "(", "NumberFormatException", "nfe", ")", "{", "LOG", ".", "error", "(", "\" caught NFE during conversion of submit time \"", "+", "submitTimeMillisString", "+", "\" \"", "+", "nfe", ".", "getMessage", "(", ")", ")", ";", "submitTimeMillis", "=", "0", ";", "}", "}", "}", "break", ";", "}", "return", "submitTimeMillis", ";", "}" ]
fetches the submit time from a raw job history byte representation @param jobHistoryRaw from which to pull the SUBMIT_TIME @return the job submit time in milliseconds since January 1, 1970 UTC; or 0 if no value can be found.
[ "fetches", "the", "submit", "time", "from", "a", "raw", "job", "history", "byte", "representation" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserBase.java#L162-L229
2,656
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/FlowEventService.java
FlowEventService.addEvent
public void addEvent(FlowEvent event) throws IOException { Put p = createPutForEvent(event); Table eventTable = null; try { eventTable = hbaseConnection .getTable(TableName.valueOf(Constants.FLOW_EVENT_TABLE)); eventTable.put(p); } finally { if (eventTable != null) { eventTable.close(); } } }
java
public void addEvent(FlowEvent event) throws IOException { Put p = createPutForEvent(event); Table eventTable = null; try { eventTable = hbaseConnection .getTable(TableName.valueOf(Constants.FLOW_EVENT_TABLE)); eventTable.put(p); } finally { if (eventTable != null) { eventTable.close(); } } }
[ "public", "void", "addEvent", "(", "FlowEvent", "event", ")", "throws", "IOException", "{", "Put", "p", "=", "createPutForEvent", "(", "event", ")", ";", "Table", "eventTable", "=", "null", ";", "try", "{", "eventTable", "=", "hbaseConnection", ".", "getTable", "(", "TableName", ".", "valueOf", "(", "Constants", ".", "FLOW_EVENT_TABLE", ")", ")", ";", "eventTable", ".", "put", "(", "p", ")", ";", "}", "finally", "{", "if", "(", "eventTable", "!=", "null", ")", "{", "eventTable", ".", "close", "(", ")", ";", "}", "}", "}" ]
Stores a single flow event row @param event @throws IOException
[ "Stores", "a", "single", "flow", "event", "row" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/FlowEventService.java#L83-L95
2,657
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/FlowEventService.java
FlowEventService.addEvents
public void addEvents(List<FlowEvent> events) throws IOException { List<Put> puts = new ArrayList<Put>(events.size()); for (FlowEvent e : events) { puts.add(createPutForEvent(e)); } Table eventTable = null; try { eventTable = hbaseConnection .getTable(TableName.valueOf(Constants.FLOW_EVENT_TABLE)); eventTable.put(puts); } finally { if (eventTable != null) { eventTable.close(); } } }
java
public void addEvents(List<FlowEvent> events) throws IOException { List<Put> puts = new ArrayList<Put>(events.size()); for (FlowEvent e : events) { puts.add(createPutForEvent(e)); } Table eventTable = null; try { eventTable = hbaseConnection .getTable(TableName.valueOf(Constants.FLOW_EVENT_TABLE)); eventTable.put(puts); } finally { if (eventTable != null) { eventTable.close(); } } }
[ "public", "void", "addEvents", "(", "List", "<", "FlowEvent", ">", "events", ")", "throws", "IOException", "{", "List", "<", "Put", ">", "puts", "=", "new", "ArrayList", "<", "Put", ">", "(", "events", ".", "size", "(", ")", ")", ";", "for", "(", "FlowEvent", "e", ":", "events", ")", "{", "puts", ".", "add", "(", "createPutForEvent", "(", "e", ")", ")", ";", "}", "Table", "eventTable", "=", "null", ";", "try", "{", "eventTable", "=", "hbaseConnection", ".", "getTable", "(", "TableName", ".", "valueOf", "(", "Constants", ".", "FLOW_EVENT_TABLE", ")", ")", ";", "eventTable", ".", "put", "(", "puts", ")", ";", "}", "finally", "{", "if", "(", "eventTable", "!=", "null", ")", "{", "eventTable", ".", "close", "(", ")", ";", "}", "}", "}" ]
Stores a batch of events @param events @throws IOException
[ "Stores", "a", "batch", "of", "events" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/FlowEventService.java#L102-L117
2,658
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/HdfsStatsKeyConverter.java
HdfsStatsKeyConverter.toBytes
@Override public byte[] toBytes(HdfsStatsKey hdfsStatsKey) { if (hdfsStatsKey == null || hdfsStatsKey.getQualifiedPathKey() == null) { return HdfsConstants.EMPTY_BYTES; } else { if (StringUtils.isBlank(hdfsStatsKey.getQualifiedPathKey().getNamespace())) { // hadoop1 or non federated namespace return ByteUtil.join(HdfsConstants.SEP_BYTES, Bytes.toBytes(Long.toString(hdfsStatsKey.getEncodedRunId())), Bytes.toBytes(hdfsStatsKey.getQualifiedPathKey().getCluster()), Bytes.toBytes(hdfsStatsKey.getQualifiedPathKey().getPath())); } else { // include federated namespace return ByteUtil.join(HdfsConstants.SEP_BYTES, Bytes.toBytes(Long.toString(hdfsStatsKey.getEncodedRunId())), Bytes.toBytes(hdfsStatsKey.getQualifiedPathKey().getCluster()), Bytes.toBytes(hdfsStatsKey.getQualifiedPathKey().getPath()), Bytes.toBytes(hdfsStatsKey.getQualifiedPathKey().getNamespace())); } } }
java
@Override public byte[] toBytes(HdfsStatsKey hdfsStatsKey) { if (hdfsStatsKey == null || hdfsStatsKey.getQualifiedPathKey() == null) { return HdfsConstants.EMPTY_BYTES; } else { if (StringUtils.isBlank(hdfsStatsKey.getQualifiedPathKey().getNamespace())) { // hadoop1 or non federated namespace return ByteUtil.join(HdfsConstants.SEP_BYTES, Bytes.toBytes(Long.toString(hdfsStatsKey.getEncodedRunId())), Bytes.toBytes(hdfsStatsKey.getQualifiedPathKey().getCluster()), Bytes.toBytes(hdfsStatsKey.getQualifiedPathKey().getPath())); } else { // include federated namespace return ByteUtil.join(HdfsConstants.SEP_BYTES, Bytes.toBytes(Long.toString(hdfsStatsKey.getEncodedRunId())), Bytes.toBytes(hdfsStatsKey.getQualifiedPathKey().getCluster()), Bytes.toBytes(hdfsStatsKey.getQualifiedPathKey().getPath()), Bytes.toBytes(hdfsStatsKey.getQualifiedPathKey().getNamespace())); } } }
[ "@", "Override", "public", "byte", "[", "]", "toBytes", "(", "HdfsStatsKey", "hdfsStatsKey", ")", "{", "if", "(", "hdfsStatsKey", "==", "null", "||", "hdfsStatsKey", ".", "getQualifiedPathKey", "(", ")", "==", "null", ")", "{", "return", "HdfsConstants", ".", "EMPTY_BYTES", ";", "}", "else", "{", "if", "(", "StringUtils", ".", "isBlank", "(", "hdfsStatsKey", ".", "getQualifiedPathKey", "(", ")", ".", "getNamespace", "(", ")", ")", ")", "{", "// hadoop1 or non federated namespace", "return", "ByteUtil", ".", "join", "(", "HdfsConstants", ".", "SEP_BYTES", ",", "Bytes", ".", "toBytes", "(", "Long", ".", "toString", "(", "hdfsStatsKey", ".", "getEncodedRunId", "(", ")", ")", ")", ",", "Bytes", ".", "toBytes", "(", "hdfsStatsKey", ".", "getQualifiedPathKey", "(", ")", ".", "getCluster", "(", ")", ")", ",", "Bytes", ".", "toBytes", "(", "hdfsStatsKey", ".", "getQualifiedPathKey", "(", ")", ".", "getPath", "(", ")", ")", ")", ";", "}", "else", "{", "// include federated namespace", "return", "ByteUtil", ".", "join", "(", "HdfsConstants", ".", "SEP_BYTES", ",", "Bytes", ".", "toBytes", "(", "Long", ".", "toString", "(", "hdfsStatsKey", ".", "getEncodedRunId", "(", ")", ")", ")", ",", "Bytes", ".", "toBytes", "(", "hdfsStatsKey", ".", "getQualifiedPathKey", "(", ")", ".", "getCluster", "(", ")", ")", ",", "Bytes", ".", "toBytes", "(", "hdfsStatsKey", ".", "getQualifiedPathKey", "(", ")", ".", "getPath", "(", ")", ")", ",", "Bytes", ".", "toBytes", "(", "hdfsStatsKey", ".", "getQualifiedPathKey", "(", ")", ".", "getNamespace", "(", ")", ")", ")", ";", "}", "}", "}" ]
Returns the byte encoded representation of a HdfsStatsKey @param hdfsStats the HdfsStatsKey to serialize @return the byte encoded representation of the HdfsStatsKey
[ "Returns", "the", "byte", "encoded", "representation", "of", "a", "HdfsStatsKey" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/HdfsStatsKeyConverter.java#L44-L64
2,659
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/HdfsStatsKeyConverter.java
HdfsStatsKeyConverter.parseHdfsStatsKey
public static HdfsStatsKey parseHdfsStatsKey(byte[][] keyComponents) { return new HdfsStatsKey(new QualifiedPathKey( // cluster: (keyComponents.length > 1 ? Bytes.toString(keyComponents[1]) : null), // path: (keyComponents.length > 2 ? Bytes.toString(keyComponents[2]) : null), // namespace: (keyComponents.length > 3 ? Bytes.toString(keyComponents[3]) : null)), // encodedRunId: (keyComponents.length > 0 ? Long.parseLong(Bytes.toString(keyComponents[0])) : null)); }
java
public static HdfsStatsKey parseHdfsStatsKey(byte[][] keyComponents) { return new HdfsStatsKey(new QualifiedPathKey( // cluster: (keyComponents.length > 1 ? Bytes.toString(keyComponents[1]) : null), // path: (keyComponents.length > 2 ? Bytes.toString(keyComponents[2]) : null), // namespace: (keyComponents.length > 3 ? Bytes.toString(keyComponents[3]) : null)), // encodedRunId: (keyComponents.length > 0 ? Long.parseLong(Bytes.toString(keyComponents[0])) : null)); }
[ "public", "static", "HdfsStatsKey", "parseHdfsStatsKey", "(", "byte", "[", "]", "[", "]", "keyComponents", ")", "{", "return", "new", "HdfsStatsKey", "(", "new", "QualifiedPathKey", "(", "// cluster:", "(", "keyComponents", ".", "length", ">", "1", "?", "Bytes", ".", "toString", "(", "keyComponents", "[", "1", "]", ")", ":", "null", ")", ",", "// path:", "(", "keyComponents", ".", "length", ">", "2", "?", "Bytes", ".", "toString", "(", "keyComponents", "[", "2", "]", ")", ":", "null", ")", ",", "// namespace:", "(", "keyComponents", ".", "length", ">", "3", "?", "Bytes", ".", "toString", "(", "keyComponents", "[", "3", "]", ")", ":", "null", ")", ")", ",", "// encodedRunId:", "(", "keyComponents", ".", "length", ">", "0", "?", "Long", ".", "parseLong", "(", "Bytes", ".", "toString", "(", "keyComponents", "[", "0", "]", ")", ")", ":", "null", ")", ")", ";", "}" ]
Constructs a HdfsStatsKey instance from the individual byte encoded key components. @param keyComponents as split on Separator '!' @return a HdfsStatsKey instance containing the decoded components
[ "Constructs", "a", "HdfsStatsKey", "instance", "from", "the", "individual", "byte", "encoded", "key", "components", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/HdfsStatsKeyConverter.java#L84-L94
2,660
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/HdfsStatsKeyConverter.java
HdfsStatsKeyConverter.splitHdfsStatsKey
static byte[][] splitHdfsStatsKey(byte[] rawKey) { byte[][] splits = ByteUtil.split(rawKey, HdfsConstants.SEP_BYTES, HdfsConstants.NUM_HDFS_USAGE_ROWKEY_COMPONENTS); return splits; }
java
static byte[][] splitHdfsStatsKey(byte[] rawKey) { byte[][] splits = ByteUtil.split(rawKey, HdfsConstants.SEP_BYTES, HdfsConstants.NUM_HDFS_USAGE_ROWKEY_COMPONENTS); return splits; }
[ "static", "byte", "[", "]", "[", "]", "splitHdfsStatsKey", "(", "byte", "[", "]", "rawKey", ")", "{", "byte", "[", "]", "[", "]", "splits", "=", "ByteUtil", ".", "split", "(", "rawKey", ",", "HdfsConstants", ".", "SEP_BYTES", ",", "HdfsConstants", ".", "NUM_HDFS_USAGE_ROWKEY_COMPONENTS", ")", ";", "return", "splits", ";", "}" ]
Handles splitting the encoded hdfsStats key @param rawKey byte encoded representation of the path key @return
[ "Handles", "splitting", "the", "encoded", "hdfsStats", "key" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/HdfsStatsKeyConverter.java#L102-L106
2,661
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/TaskDetails.java
TaskDetails.compareTo
@Override public int compareTo(TaskDetails otherTask) { if (otherTask == null) { return -1; } return new CompareToBuilder().append(this.taskKey, otherTask.getTaskKey()) .toComparison(); }
java
@Override public int compareTo(TaskDetails otherTask) { if (otherTask == null) { return -1; } return new CompareToBuilder().append(this.taskKey, otherTask.getTaskKey()) .toComparison(); }
[ "@", "Override", "public", "int", "compareTo", "(", "TaskDetails", "otherTask", ")", "{", "if", "(", "otherTask", "==", "null", ")", "{", "return", "-", "1", ";", "}", "return", "new", "CompareToBuilder", "(", ")", ".", "append", "(", "this", ".", "taskKey", ",", "otherTask", ".", "getTaskKey", "(", ")", ")", ".", "toComparison", "(", ")", ";", "}" ]
Compares two TaskDetails objects on the basis of their TaskKey @param other @return 0 if this TaskKey is equal to the other TaskKey, 1 if this TaskKey greater than other TaskKey, -1 if this TaskKey is less than other TaskKey
[ "Compares", "two", "TaskDetails", "objects", "on", "the", "basis", "of", "their", "TaskKey" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/TaskDetails.java#L128-L136
2,662
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/etl/JobFile.java
JobFile.parseFilename
private void parseFilename() { // Add additional filtering to discard empty files, or files ending in .crc if ((filename != null) && (filename.length() > 0) && (!filename.endsWith(".crc"))) { Matcher matcher = PATTERN.matcher(filename); if (matcher.matches()) { // jobTracker = ""; jobid = matcher.group(1); Matcher confMatcher = CONF_PATTERN.matcher(filename); if (confMatcher.matches()) { isJobConfFile = true; LOG.debug("Job Conf file " + filename + " with job id: " + jobid); } else { isJobHistoryFile = true; LOG.debug("Job History file " + filename + " with job id: " + jobid); } } else { LOG.info(" file does not match any format: " + filename); } } }
java
private void parseFilename() { // Add additional filtering to discard empty files, or files ending in .crc if ((filename != null) && (filename.length() > 0) && (!filename.endsWith(".crc"))) { Matcher matcher = PATTERN.matcher(filename); if (matcher.matches()) { // jobTracker = ""; jobid = matcher.group(1); Matcher confMatcher = CONF_PATTERN.matcher(filename); if (confMatcher.matches()) { isJobConfFile = true; LOG.debug("Job Conf file " + filename + " with job id: " + jobid); } else { isJobHistoryFile = true; LOG.debug("Job History file " + filename + " with job id: " + jobid); } } else { LOG.info(" file does not match any format: " + filename); } } }
[ "private", "void", "parseFilename", "(", ")", "{", "// Add additional filtering to discard empty files, or files ending in .crc", "if", "(", "(", "filename", "!=", "null", ")", "&&", "(", "filename", ".", "length", "(", ")", ">", "0", ")", "&&", "(", "!", "filename", ".", "endsWith", "(", "\".crc\"", ")", ")", ")", "{", "Matcher", "matcher", "=", "PATTERN", ".", "matcher", "(", "filename", ")", ";", "if", "(", "matcher", ".", "matches", "(", ")", ")", "{", "// jobTracker = \"\";", "jobid", "=", "matcher", ".", "group", "(", "1", ")", ";", "Matcher", "confMatcher", "=", "CONF_PATTERN", ".", "matcher", "(", "filename", ")", ";", "if", "(", "confMatcher", ".", "matches", "(", ")", ")", "{", "isJobConfFile", "=", "true", ";", "LOG", ".", "debug", "(", "\"Job Conf file \"", "+", "filename", "+", "\" with job id: \"", "+", "jobid", ")", ";", "}", "else", "{", "isJobHistoryFile", "=", "true", ";", "LOG", ".", "debug", "(", "\"Job History file \"", "+", "filename", "+", "\" with job id: \"", "+", "jobid", ")", ";", "}", "}", "else", "{", "LOG", ".", "info", "(", "\" file does not match any format: \"", "+", "filename", ")", ";", "}", "}", "}" ]
Parse the filename and pull the jobtracker and jobid out of it.
[ "Parse", "the", "filename", "and", "pull", "the", "jobtracker", "and", "jobid", "out", "of", "it", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/etl/JobFile.java#L74-L98
2,663
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/util/BatchUtil.java
BatchUtil.shouldRetain
public static boolean shouldRetain(int i, int maxRetention, int length) { // Files with a zero-based index greater or equal than the retentionCutoff // should be retained. int retentionCutoff = length - maxRetention; boolean retain = (i >= retentionCutoff) ? true : false; return retain; }
java
public static boolean shouldRetain(int i, int maxRetention, int length) { // Files with a zero-based index greater or equal than the retentionCutoff // should be retained. int retentionCutoff = length - maxRetention; boolean retain = (i >= retentionCutoff) ? true : false; return retain; }
[ "public", "static", "boolean", "shouldRetain", "(", "int", "i", ",", "int", "maxRetention", ",", "int", "length", ")", "{", "// Files with a zero-based index greater or equal than the retentionCutoff", "// should be retained.", "int", "retentionCutoff", "=", "length", "-", "maxRetention", ";", "boolean", "retain", "=", "(", "i", ">=", "retentionCutoff", ")", "?", "true", ":", "false", ";", "return", "retain", ";", "}" ]
Method that can be used when iterating over an array and you want to retain only maxRetention items. @param i index of element in ordered array of length @param maxRetention total number of elements to retain. @param length of the ordered array @return whether this element should be retained or not.
[ "Method", "that", "can", "be", "used", "when", "iterating", "over", "an", "array", "and", "you", "want", "to", "retain", "only", "maxRetention", "items", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/util/BatchUtil.java#L41-L47
2,664
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserHadoop2.java
JobHistoryFileParserHadoop2.getJobStatusPut
private Put getJobStatusPut() { Put pStatus = new Put(jobKeyBytes); byte[] valueBytes = Bytes.toBytes(this.jobStatus); byte[] qualifier = Bytes.toBytes(JobHistoryKeys.JOB_STATUS.toString().toLowerCase()); pStatus.addColumn(Constants.INFO_FAM_BYTES, qualifier, valueBytes); return pStatus; }
java
private Put getJobStatusPut() { Put pStatus = new Put(jobKeyBytes); byte[] valueBytes = Bytes.toBytes(this.jobStatus); byte[] qualifier = Bytes.toBytes(JobHistoryKeys.JOB_STATUS.toString().toLowerCase()); pStatus.addColumn(Constants.INFO_FAM_BYTES, qualifier, valueBytes); return pStatus; }
[ "private", "Put", "getJobStatusPut", "(", ")", "{", "Put", "pStatus", "=", "new", "Put", "(", "jobKeyBytes", ")", ";", "byte", "[", "]", "valueBytes", "=", "Bytes", ".", "toBytes", "(", "this", ".", "jobStatus", ")", ";", "byte", "[", "]", "qualifier", "=", "Bytes", ".", "toBytes", "(", "JobHistoryKeys", ".", "JOB_STATUS", ".", "toString", "(", ")", ".", "toLowerCase", "(", ")", ")", ";", "pStatus", ".", "addColumn", "(", "Constants", ".", "INFO_FAM_BYTES", ",", "qualifier", ",", "valueBytes", ")", ";", "return", "pStatus", ";", "}" ]
generates a put for job status @return Put that contains Job Status
[ "generates", "a", "put", "for", "job", "status" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserHadoop2.java#L321-L327
2,665
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserHadoop2.java
JobHistoryFileParserHadoop2.understandSchema
private void understandSchema(String schema) throws JSONException { JSONObject j1 = new JSONObject(schema); JSONArray fields = j1.getJSONArray(FIELDS); String fieldName; String fieldTypeValue; Object recName; for (int k = 0; k < fields.length(); k++) { if (fields.get(k) == null) { continue; } JSONObject allEvents = new JSONObject(fields.get(k).toString()); Object name = allEvents.get(NAME); if (name != null) { if (name.toString().equalsIgnoreCase(EVENT)) { JSONArray allTypeDetails = allEvents.getJSONArray(TYPE); for (int i = 0; i < allTypeDetails.length(); i++) { JSONObject actual = (JSONObject) allTypeDetails.get(i); JSONArray types = actual.getJSONArray(FIELDS); Map<String, String> typeDetails = new HashMap<String, String>(); for (int j = 0; j < types.length(); j++) { if (types.getJSONObject(j) == null ) { continue; } fieldName = types.getJSONObject(j).getString(NAME); fieldTypeValue = types.getJSONObject(j).getString(TYPE); if ((fieldName != null) && (fieldTypeValue != null)) { typeDetails.put(fieldName, fieldTypeValue); } } recName = actual.get(NAME); if (recName != null) { /* the next statement may throw an IllegalArgumentException if * it finds a new string that's not part of the Hadoop2RecordType enum * that way we know what types of events we are parsing */ fieldTypes.put(Hadoop2RecordType.valueOf(recName.toString()), typeDetails); } } } } } }
java
private void understandSchema(String schema) throws JSONException { JSONObject j1 = new JSONObject(schema); JSONArray fields = j1.getJSONArray(FIELDS); String fieldName; String fieldTypeValue; Object recName; for (int k = 0; k < fields.length(); k++) { if (fields.get(k) == null) { continue; } JSONObject allEvents = new JSONObject(fields.get(k).toString()); Object name = allEvents.get(NAME); if (name != null) { if (name.toString().equalsIgnoreCase(EVENT)) { JSONArray allTypeDetails = allEvents.getJSONArray(TYPE); for (int i = 0; i < allTypeDetails.length(); i++) { JSONObject actual = (JSONObject) allTypeDetails.get(i); JSONArray types = actual.getJSONArray(FIELDS); Map<String, String> typeDetails = new HashMap<String, String>(); for (int j = 0; j < types.length(); j++) { if (types.getJSONObject(j) == null ) { continue; } fieldName = types.getJSONObject(j).getString(NAME); fieldTypeValue = types.getJSONObject(j).getString(TYPE); if ((fieldName != null) && (fieldTypeValue != null)) { typeDetails.put(fieldName, fieldTypeValue); } } recName = actual.get(NAME); if (recName != null) { /* the next statement may throw an IllegalArgumentException if * it finds a new string that's not part of the Hadoop2RecordType enum * that way we know what types of events we are parsing */ fieldTypes.put(Hadoop2RecordType.valueOf(recName.toString()), typeDetails); } } } } } }
[ "private", "void", "understandSchema", "(", "String", "schema", ")", "throws", "JSONException", "{", "JSONObject", "j1", "=", "new", "JSONObject", "(", "schema", ")", ";", "JSONArray", "fields", "=", "j1", ".", "getJSONArray", "(", "FIELDS", ")", ";", "String", "fieldName", ";", "String", "fieldTypeValue", ";", "Object", "recName", ";", "for", "(", "int", "k", "=", "0", ";", "k", "<", "fields", ".", "length", "(", ")", ";", "k", "++", ")", "{", "if", "(", "fields", ".", "get", "(", "k", ")", "==", "null", ")", "{", "continue", ";", "}", "JSONObject", "allEvents", "=", "new", "JSONObject", "(", "fields", ".", "get", "(", "k", ")", ".", "toString", "(", ")", ")", ";", "Object", "name", "=", "allEvents", ".", "get", "(", "NAME", ")", ";", "if", "(", "name", "!=", "null", ")", "{", "if", "(", "name", ".", "toString", "(", ")", ".", "equalsIgnoreCase", "(", "EVENT", ")", ")", "{", "JSONArray", "allTypeDetails", "=", "allEvents", ".", "getJSONArray", "(", "TYPE", ")", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "allTypeDetails", ".", "length", "(", ")", ";", "i", "++", ")", "{", "JSONObject", "actual", "=", "(", "JSONObject", ")", "allTypeDetails", ".", "get", "(", "i", ")", ";", "JSONArray", "types", "=", "actual", ".", "getJSONArray", "(", "FIELDS", ")", ";", "Map", "<", "String", ",", "String", ">", "typeDetails", "=", "new", "HashMap", "<", "String", ",", "String", ">", "(", ")", ";", "for", "(", "int", "j", "=", "0", ";", "j", "<", "types", ".", "length", "(", ")", ";", "j", "++", ")", "{", "if", "(", "types", ".", "getJSONObject", "(", "j", ")", "==", "null", ")", "{", "continue", ";", "}", "fieldName", "=", "types", ".", "getJSONObject", "(", "j", ")", ".", "getString", "(", "NAME", ")", ";", "fieldTypeValue", "=", "types", ".", "getJSONObject", "(", "j", ")", ".", "getString", "(", "TYPE", ")", ";", "if", "(", "(", "fieldName", "!=", "null", ")", "&&", "(", "fieldTypeValue", "!=", "null", ")", ")", "{", "typeDetails", ".", "put", "(", "fieldName", ",", "fieldTypeValue", ")", ";", "}", "}", "recName", "=", "actual", ".", "get", "(", "NAME", ")", ";", "if", "(", "recName", "!=", "null", ")", "{", "/* the next statement may throw an IllegalArgumentException if\n * it finds a new string that's not part of the Hadoop2RecordType enum\n * that way we know what types of events we are parsing\n */", "fieldTypes", ".", "put", "(", "Hadoop2RecordType", ".", "valueOf", "(", "recName", ".", "toString", "(", ")", ")", ",", "typeDetails", ")", ";", "}", "}", "}", "}", "}", "}" ]
understand the schema so that we can parse the rest of the file @throws JSONException
[ "understand", "the", "schema", "so", "that", "we", "can", "parse", "the", "rest", "of", "the", "file" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserHadoop2.java#L333-L378
2,666
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserHadoop2.java
JobHistoryFileParserHadoop2.iterateAndPreparePuts
private void iterateAndPreparePuts(JSONObject eventDetails, Put p, Hadoop2RecordType recType) throws JSONException { Iterator<?> keys = eventDetails.keys(); while (keys.hasNext()) { String key = (String) keys.next(); processAllTypes(p, recType, eventDetails, key); } }
java
private void iterateAndPreparePuts(JSONObject eventDetails, Put p, Hadoop2RecordType recType) throws JSONException { Iterator<?> keys = eventDetails.keys(); while (keys.hasNext()) { String key = (String) keys.next(); processAllTypes(p, recType, eventDetails, key); } }
[ "private", "void", "iterateAndPreparePuts", "(", "JSONObject", "eventDetails", ",", "Put", "p", ",", "Hadoop2RecordType", "recType", ")", "throws", "JSONException", "{", "Iterator", "<", "?", ">", "keys", "=", "eventDetails", ".", "keys", "(", ")", ";", "while", "(", "keys", ".", "hasNext", "(", ")", ")", "{", "String", "key", "=", "(", "String", ")", "keys", ".", "next", "(", ")", ";", "processAllTypes", "(", "p", ",", "recType", ",", "eventDetails", ",", "key", ")", ";", "}", "}" ]
iterate over the event details and prepare puts @throws JSONException
[ "iterate", "over", "the", "event", "details", "and", "prepare", "puts" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserHadoop2.java#L488-L495
2,667
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserHadoop2.java
JobHistoryFileParserHadoop2.getKey
private String getKey(String key) throws IllegalArgumentException { String checkKey = JobHistoryKeys.HADOOP2_TO_HADOOP1_MAPPING.containsKey(key) ? JobHistoryKeys.HADOOP2_TO_HADOOP1_MAPPING .get(key) : key; return (JobHistoryKeys.valueOf(checkKey).toString()); }
java
private String getKey(String key) throws IllegalArgumentException { String checkKey = JobHistoryKeys.HADOOP2_TO_HADOOP1_MAPPING.containsKey(key) ? JobHistoryKeys.HADOOP2_TO_HADOOP1_MAPPING .get(key) : key; return (JobHistoryKeys.valueOf(checkKey).toString()); }
[ "private", "String", "getKey", "(", "String", "key", ")", "throws", "IllegalArgumentException", "{", "String", "checkKey", "=", "JobHistoryKeys", ".", "HADOOP2_TO_HADOOP1_MAPPING", ".", "containsKey", "(", "key", ")", "?", "JobHistoryKeys", ".", "HADOOP2_TO_HADOOP1_MAPPING", ".", "get", "(", "key", ")", ":", "key", ";", "return", "(", "JobHistoryKeys", ".", "valueOf", "(", "checkKey", ")", ".", "toString", "(", ")", ")", ";", "}" ]
maintains compatibility between hadoop 1.0 keys and hadoop 2.0 keys. It also confirms that this key exists in JobHistoryKeys enum @throws IllegalArgumentException NullPointerException
[ "maintains", "compatibility", "between", "hadoop", "1", ".", "0", "keys", "and", "hadoop", "2", ".", "0", "keys", ".", "It", "also", "confirms", "that", "this", "key", "exists", "in", "JobHistoryKeys", "enum" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserHadoop2.java#L597-L602
2,668
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserHadoop2.java
JobHistoryFileParserHadoop2.populatePut
private void populatePut(Put p, byte[] family, String key, long value) { byte[] valueBytes = null; valueBytes = (value != 0L) ? Bytes.toBytes(value) : Constants.ZERO_LONG_BYTES; byte[] qualifier = Bytes.toBytes(getKey(key).toLowerCase()); p.addColumn(family, qualifier, valueBytes); }
java
private void populatePut(Put p, byte[] family, String key, long value) { byte[] valueBytes = null; valueBytes = (value != 0L) ? Bytes.toBytes(value) : Constants.ZERO_LONG_BYTES; byte[] qualifier = Bytes.toBytes(getKey(key).toLowerCase()); p.addColumn(family, qualifier, valueBytes); }
[ "private", "void", "populatePut", "(", "Put", "p", ",", "byte", "[", "]", "family", ",", "String", "key", ",", "long", "value", ")", "{", "byte", "[", "]", "valueBytes", "=", "null", ";", "valueBytes", "=", "(", "value", "!=", "0L", ")", "?", "Bytes", ".", "toBytes", "(", "value", ")", ":", "Constants", ".", "ZERO_LONG_BYTES", ";", "byte", "[", "]", "qualifier", "=", "Bytes", ".", "toBytes", "(", "getKey", "(", "key", ")", ".", "toLowerCase", "(", ")", ")", ";", "p", ".", "addColumn", "(", "family", ",", "qualifier", ",", "valueBytes", ")", ";", "}" ]
populates a put for long values @param {@link Put} p @param {@link Constants} family @param String key @param long value
[ "populates", "a", "put", "for", "long", "values" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserHadoop2.java#L611-L617
2,669
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserHadoop2.java
JobHistoryFileParserHadoop2.getValue
byte[] getValue(String key, int value) { byte[] valueBytes = null; Class<?> clazz = JobHistoryKeys.KEY_TYPES.get(JobHistoryKeys.valueOf(key)); if (clazz == null) { throw new IllegalArgumentException(" unknown key " + key + " encountered while parsing " + this.jobKey); } if (Long.class.equals(clazz)) { valueBytes = (value != 0L) ? Bytes.toBytes(new Long(value)) : Constants.ZERO_LONG_BYTES; } else { valueBytes = (value != 0) ? Bytes.toBytes(value) : Constants.ZERO_INT_BYTES; } return valueBytes; }
java
byte[] getValue(String key, int value) { byte[] valueBytes = null; Class<?> clazz = JobHistoryKeys.KEY_TYPES.get(JobHistoryKeys.valueOf(key)); if (clazz == null) { throw new IllegalArgumentException(" unknown key " + key + " encountered while parsing " + this.jobKey); } if (Long.class.equals(clazz)) { valueBytes = (value != 0L) ? Bytes.toBytes(new Long(value)) : Constants.ZERO_LONG_BYTES; } else { valueBytes = (value != 0) ? Bytes.toBytes(value) : Constants.ZERO_INT_BYTES; } return valueBytes; }
[ "byte", "[", "]", "getValue", "(", "String", "key", ",", "int", "value", ")", "{", "byte", "[", "]", "valueBytes", "=", "null", ";", "Class", "<", "?", ">", "clazz", "=", "JobHistoryKeys", ".", "KEY_TYPES", ".", "get", "(", "JobHistoryKeys", ".", "valueOf", "(", "key", ")", ")", ";", "if", "(", "clazz", "==", "null", ")", "{", "throw", "new", "IllegalArgumentException", "(", "\" unknown key \"", "+", "key", "+", "\" encountered while parsing \"", "+", "this", ".", "jobKey", ")", ";", "}", "if", "(", "Long", ".", "class", ".", "equals", "(", "clazz", ")", ")", "{", "valueBytes", "=", "(", "value", "!=", "0L", ")", "?", "Bytes", ".", "toBytes", "(", "new", "Long", "(", "value", ")", ")", ":", "Constants", ".", "ZERO_LONG_BYTES", ";", "}", "else", "{", "valueBytes", "=", "(", "value", "!=", "0", ")", "?", "Bytes", ".", "toBytes", "(", "value", ")", ":", "Constants", ".", "ZERO_INT_BYTES", ";", "}", "return", "valueBytes", ";", "}" ]
gets the int values as ints or longs some keys in 2.0 are now int, they were longs in 1.0 this will maintain compatiblity between 1.0 and 2.0 by casting those ints to long keeping this function package level visible (unit testing) @throws IllegalArgumentException if new key is encountered
[ "gets", "the", "int", "values", "as", "ints", "or", "longs", "some", "keys", "in", "2", ".", "0", "are", "now", "int", "they", "were", "longs", "in", "1", ".", "0", "this", "will", "maintain", "compatiblity", "between", "1", ".", "0", "and", "2", ".", "0", "by", "casting", "those", "ints", "to", "long" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserHadoop2.java#L626-L639
2,670
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserHadoop2.java
JobHistoryFileParserHadoop2.getTaskKey
public byte[] getTaskKey(String prefix, String jobNumber, String fullId) { String taskComponent = fullId; if (fullId == null) { taskComponent = ""; } else { String expectedPrefix = prefix + jobNumber + "_"; if ((fullId.startsWith(expectedPrefix)) && (fullId.length() > expectedPrefix.length())) { taskComponent = fullId.substring(expectedPrefix.length()); } } return taskKeyConv.toBytes(new TaskKey(this.jobKey, taskComponent)); }
java
public byte[] getTaskKey(String prefix, String jobNumber, String fullId) { String taskComponent = fullId; if (fullId == null) { taskComponent = ""; } else { String expectedPrefix = prefix + jobNumber + "_"; if ((fullId.startsWith(expectedPrefix)) && (fullId.length() > expectedPrefix.length())) { taskComponent = fullId.substring(expectedPrefix.length()); } } return taskKeyConv.toBytes(new TaskKey(this.jobKey, taskComponent)); }
[ "public", "byte", "[", "]", "getTaskKey", "(", "String", "prefix", ",", "String", "jobNumber", ",", "String", "fullId", ")", "{", "String", "taskComponent", "=", "fullId", ";", "if", "(", "fullId", "==", "null", ")", "{", "taskComponent", "=", "\"\"", ";", "}", "else", "{", "String", "expectedPrefix", "=", "prefix", "+", "jobNumber", "+", "\"_\"", ";", "if", "(", "(", "fullId", ".", "startsWith", "(", "expectedPrefix", ")", ")", "&&", "(", "fullId", ".", "length", "(", ")", ">", "expectedPrefix", ".", "length", "(", ")", ")", ")", "{", "taskComponent", "=", "fullId", ".", "substring", "(", "expectedPrefix", ".", "length", "(", ")", ")", ";", "}", "}", "return", "taskKeyConv", ".", "toBytes", "(", "new", "TaskKey", "(", "this", ".", "jobKey", ",", "taskComponent", ")", ")", ";", "}" ]
Returns the Task ID or Task Attempt ID, stripped of the leading job ID, appended to the job row key.
[ "Returns", "the", "Task", "ID", "or", "Task", "Attempt", "ID", "stripped", "of", "the", "leading", "job", "ID", "appended", "to", "the", "job", "row", "key", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserHadoop2.java#L793-L804
2,671
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserHadoop2.java
JobHistoryFileParserHadoop2.getAMKey
public byte[] getAMKey(String prefix, String fullId) { String taskComponent = prefix + fullId; return taskKeyConv.toBytes(new TaskKey(this.jobKey, taskComponent)); }
java
public byte[] getAMKey(String prefix, String fullId) { String taskComponent = prefix + fullId; return taskKeyConv.toBytes(new TaskKey(this.jobKey, taskComponent)); }
[ "public", "byte", "[", "]", "getAMKey", "(", "String", "prefix", ",", "String", "fullId", ")", "{", "String", "taskComponent", "=", "prefix", "+", "fullId", ";", "return", "taskKeyConv", ".", "toBytes", "(", "new", "TaskKey", "(", "this", ".", "jobKey", ",", "taskComponent", ")", ")", ";", "}" ]
Returns the AM Attempt id stripped of the leading job ID, appended to the job row key.
[ "Returns", "the", "AM", "Attempt", "id", "stripped", "of", "the", "leading", "job", "ID", "appended", "to", "the", "job", "row", "key", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserHadoop2.java#L809-L813
2,672
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserHadoop2.java
JobHistoryFileParserHadoop2.printAllPuts
public void printAllPuts(List<Put> p) { for (Put p1 : p) { Map<byte[], List<KeyValue>> d = p1.getFamilyMap(); for (byte[] k : d.keySet()) { System.out.println(" k " + Bytes.toString(k)); } for (List<KeyValue> lkv : d.values()) { for (KeyValue kv : lkv) { System.out.println("\n row: " + taskKeyConv.fromBytes(kv.getRow()) + "\n " + Bytes.toString(kv.getQualifier()) + ": " + Bytes.toString(kv.getValue())); } } } }
java
public void printAllPuts(List<Put> p) { for (Put p1 : p) { Map<byte[], List<KeyValue>> d = p1.getFamilyMap(); for (byte[] k : d.keySet()) { System.out.println(" k " + Bytes.toString(k)); } for (List<KeyValue> lkv : d.values()) { for (KeyValue kv : lkv) { System.out.println("\n row: " + taskKeyConv.fromBytes(kv.getRow()) + "\n " + Bytes.toString(kv.getQualifier()) + ": " + Bytes.toString(kv.getValue())); } } } }
[ "public", "void", "printAllPuts", "(", "List", "<", "Put", ">", "p", ")", "{", "for", "(", "Put", "p1", ":", "p", ")", "{", "Map", "<", "byte", "[", "]", ",", "List", "<", "KeyValue", ">", ">", "d", "=", "p1", ".", "getFamilyMap", "(", ")", ";", "for", "(", "byte", "[", "]", "k", ":", "d", ".", "keySet", "(", ")", ")", "{", "System", ".", "out", ".", "println", "(", "\" k \"", "+", "Bytes", ".", "toString", "(", "k", ")", ")", ";", "}", "for", "(", "List", "<", "KeyValue", ">", "lkv", ":", "d", ".", "values", "(", ")", ")", "{", "for", "(", "KeyValue", "kv", ":", "lkv", ")", "{", "System", ".", "out", ".", "println", "(", "\"\\n row: \"", "+", "taskKeyConv", ".", "fromBytes", "(", "kv", ".", "getRow", "(", ")", ")", "+", "\"\\n \"", "+", "Bytes", ".", "toString", "(", "kv", ".", "getQualifier", "(", ")", ")", "+", "\": \"", "+", "Bytes", ".", "toString", "(", "kv", ".", "getValue", "(", ")", ")", ")", ";", "}", "}", "}", "}" ]
utitlity function for printing all puts
[ "utitlity", "function", "for", "printing", "all", "puts" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/etl/JobHistoryFileParserHadoop2.java#L835-L848
2,673
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/FlowQueueService.java
FlowQueueService.moveFlow
public void moveFlow(FlowQueueKey oldKey, FlowQueueKey newKey) throws DataException, IOException { byte[] oldRowKey = queueKeyConverter.toBytes(oldKey); Get get = new Get(oldRowKey); Table flowQueueTable = null; try { flowQueueTable = hbaseConnection .getTable(TableName.valueOf(Constants.FLOW_QUEUE_TABLE)); Result result = flowQueueTable.get(get); if (result == null || result.isEmpty()) { // no existing row throw new DataException( "No row for key " + Bytes.toStringBinary(oldRowKey)); } // copy the existing row to the new key Put p = new Put(queueKeyConverter.toBytes(newKey)); for (Cell c : result.rawCells()) { p.addColumn(CellUtil.cloneFamily(c), CellUtil.cloneQualifier(c), CellUtil.cloneValue(c)); } flowQueueTable.put(p); // delete the old row Delete d = new Delete(oldRowKey); flowQueueTable.delete(d); } finally { if (flowQueueTable != null) { flowQueueTable.close(); } } }
java
public void moveFlow(FlowQueueKey oldKey, FlowQueueKey newKey) throws DataException, IOException { byte[] oldRowKey = queueKeyConverter.toBytes(oldKey); Get get = new Get(oldRowKey); Table flowQueueTable = null; try { flowQueueTable = hbaseConnection .getTable(TableName.valueOf(Constants.FLOW_QUEUE_TABLE)); Result result = flowQueueTable.get(get); if (result == null || result.isEmpty()) { // no existing row throw new DataException( "No row for key " + Bytes.toStringBinary(oldRowKey)); } // copy the existing row to the new key Put p = new Put(queueKeyConverter.toBytes(newKey)); for (Cell c : result.rawCells()) { p.addColumn(CellUtil.cloneFamily(c), CellUtil.cloneQualifier(c), CellUtil.cloneValue(c)); } flowQueueTable.put(p); // delete the old row Delete d = new Delete(oldRowKey); flowQueueTable.delete(d); } finally { if (flowQueueTable != null) { flowQueueTable.close(); } } }
[ "public", "void", "moveFlow", "(", "FlowQueueKey", "oldKey", ",", "FlowQueueKey", "newKey", ")", "throws", "DataException", ",", "IOException", "{", "byte", "[", "]", "oldRowKey", "=", "queueKeyConverter", ".", "toBytes", "(", "oldKey", ")", ";", "Get", "get", "=", "new", "Get", "(", "oldRowKey", ")", ";", "Table", "flowQueueTable", "=", "null", ";", "try", "{", "flowQueueTable", "=", "hbaseConnection", ".", "getTable", "(", "TableName", ".", "valueOf", "(", "Constants", ".", "FLOW_QUEUE_TABLE", ")", ")", ";", "Result", "result", "=", "flowQueueTable", ".", "get", "(", "get", ")", ";", "if", "(", "result", "==", "null", "||", "result", ".", "isEmpty", "(", ")", ")", "{", "// no existing row", "throw", "new", "DataException", "(", "\"No row for key \"", "+", "Bytes", ".", "toStringBinary", "(", "oldRowKey", ")", ")", ";", "}", "// copy the existing row to the new key", "Put", "p", "=", "new", "Put", "(", "queueKeyConverter", ".", "toBytes", "(", "newKey", ")", ")", ";", "for", "(", "Cell", "c", ":", "result", ".", "rawCells", "(", ")", ")", "{", "p", ".", "addColumn", "(", "CellUtil", ".", "cloneFamily", "(", "c", ")", ",", "CellUtil", ".", "cloneQualifier", "(", "c", ")", ",", "CellUtil", ".", "cloneValue", "(", "c", ")", ")", ";", "}", "flowQueueTable", ".", "put", "(", "p", ")", ";", "// delete the old row", "Delete", "d", "=", "new", "Delete", "(", "oldRowKey", ")", ";", "flowQueueTable", ".", "delete", "(", "d", ")", ";", "}", "finally", "{", "if", "(", "flowQueueTable", "!=", "null", ")", "{", "flowQueueTable", ".", "close", "(", ")", ";", "}", "}", "}" ]
Moves a flow_queue record from one row key to another. All Cells in the existing row will be written to the new row. This would primarily be used for transitioning a flow's data from one status to another. @param oldKey the existing row key to move @param newKey the new row key to move to @throws IOException
[ "Moves", "a", "flow_queue", "record", "from", "one", "row", "key", "to", "another", ".", "All", "Cells", "in", "the", "existing", "row", "will", "be", "written", "to", "the", "new", "row", ".", "This", "would", "primarily", "be", "used", "for", "transitioning", "a", "flow", "s", "data", "from", "one", "status", "to", "another", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/FlowQueueService.java#L96-L125
2,674
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/FlowQueueService.java
FlowQueueService.getPaginatedFlowsForStatus
public PaginatedResult<Flow> getPaginatedFlowsForStatus(String cluster, Flow.Status status, int limit, String user, byte[] startRow) throws IOException { // retrieve one more flow than requested for pagination support List<Flow> flows = getFlowsForStatus(cluster, status, limit + 1, user, startRow); PaginatedResult<Flow> result = new PaginatedResult<Flow>(limit); if (flows.size() > limit) { result.setValues(flows.subList(0, limit)); Flow lastFlow = flows.get(limit); result.setNextStartRow(queueKeyConverter.toBytes(lastFlow.getQueueKey())); } else { result.setValues(flows); } return result; }
java
public PaginatedResult<Flow> getPaginatedFlowsForStatus(String cluster, Flow.Status status, int limit, String user, byte[] startRow) throws IOException { // retrieve one more flow than requested for pagination support List<Flow> flows = getFlowsForStatus(cluster, status, limit + 1, user, startRow); PaginatedResult<Flow> result = new PaginatedResult<Flow>(limit); if (flows.size() > limit) { result.setValues(flows.subList(0, limit)); Flow lastFlow = flows.get(limit); result.setNextStartRow(queueKeyConverter.toBytes(lastFlow.getQueueKey())); } else { result.setValues(flows); } return result; }
[ "public", "PaginatedResult", "<", "Flow", ">", "getPaginatedFlowsForStatus", "(", "String", "cluster", ",", "Flow", ".", "Status", "status", ",", "int", "limit", ",", "String", "user", ",", "byte", "[", "]", "startRow", ")", "throws", "IOException", "{", "// retrieve one more flow than requested for pagination support", "List", "<", "Flow", ">", "flows", "=", "getFlowsForStatus", "(", "cluster", ",", "status", ",", "limit", "+", "1", ",", "user", ",", "startRow", ")", ";", "PaginatedResult", "<", "Flow", ">", "result", "=", "new", "PaginatedResult", "<", "Flow", ">", "(", "limit", ")", ";", "if", "(", "flows", ".", "size", "(", ")", ">", "limit", ")", "{", "result", ".", "setValues", "(", "flows", ".", "subList", "(", "0", ",", "limit", ")", ")", ";", "Flow", "lastFlow", "=", "flows", ".", "get", "(", "limit", ")", ";", "result", ".", "setNextStartRow", "(", "queueKeyConverter", ".", "toBytes", "(", "lastFlow", ".", "getQueueKey", "(", ")", ")", ")", ";", "}", "else", "{", "result", ".", "setValues", "(", "flows", ")", ";", "}", "return", "result", ";", "}" ]
Returns a page of flows for the given cluster and status @param cluster The cluster for the flows' execution @param status The flows' status @param limit Maximum number of flows to retrieve @param user Filter results to this user, if present @param startRow Start pagination with this row (inclusive), if present @return A page of Flow instances @throws IOException In the case of an error retrieving results
[ "Returns", "a", "page", "of", "flows", "for", "the", "given", "cluster", "and", "status" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/FlowQueueService.java#L271-L286
2,675
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/etl/MinMaxJobFileTracker.java
MinMaxJobFileTracker.track
public JobFile track(FileStatus jobFileStatus) { String jobfileName = jobFileStatus.getPath().getName(); JobFile jobFile = new JobFile(jobfileName); // Extra check, caller should already have taken care of this. if (jobFile.isJobConfFile() || jobFile.isJobHistoryFile()) { track(jobFile.getJobid()); long modificationTimeMillis = jobFileStatus.getModificationTime(); if (modificationTimeMillis < minModificationTimeMillis) { minModificationTimeMillis = modificationTimeMillis; } if (modificationTimeMillis > maxModificationTimeMillis) { maxModificationTimeMillis = modificationTimeMillis; } } return jobFile; }
java
public JobFile track(FileStatus jobFileStatus) { String jobfileName = jobFileStatus.getPath().getName(); JobFile jobFile = new JobFile(jobfileName); // Extra check, caller should already have taken care of this. if (jobFile.isJobConfFile() || jobFile.isJobHistoryFile()) { track(jobFile.getJobid()); long modificationTimeMillis = jobFileStatus.getModificationTime(); if (modificationTimeMillis < minModificationTimeMillis) { minModificationTimeMillis = modificationTimeMillis; } if (modificationTimeMillis > maxModificationTimeMillis) { maxModificationTimeMillis = modificationTimeMillis; } } return jobFile; }
[ "public", "JobFile", "track", "(", "FileStatus", "jobFileStatus", ")", "{", "String", "jobfileName", "=", "jobFileStatus", ".", "getPath", "(", ")", ".", "getName", "(", ")", ";", "JobFile", "jobFile", "=", "new", "JobFile", "(", "jobfileName", ")", ";", "// Extra check, caller should already have taken care of this.", "if", "(", "jobFile", ".", "isJobConfFile", "(", ")", "||", "jobFile", ".", "isJobHistoryFile", "(", ")", ")", "{", "track", "(", "jobFile", ".", "getJobid", "(", ")", ")", ";", "long", "modificationTimeMillis", "=", "jobFileStatus", ".", "getModificationTime", "(", ")", ";", "if", "(", "modificationTimeMillis", "<", "minModificationTimeMillis", ")", "{", "minModificationTimeMillis", "=", "modificationTimeMillis", ";", "}", "if", "(", "modificationTimeMillis", ">", "maxModificationTimeMillis", ")", "{", "maxModificationTimeMillis", "=", "modificationTimeMillis", ";", "}", "}", "return", "jobFile", ";", "}" ]
Converts a jobFileStatus to a JobFile and tracks the min and max modification times and JobIds. @param jobFileStatus of a jobfile, must be a proper JobFile. Cannot be null. @return a JobFile for the given jobFileStatus.
[ "Converts", "a", "jobFileStatus", "to", "a", "JobFile", "and", "tracks", "the", "min", "and", "max", "modification", "times", "and", "JobIds", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/etl/MinMaxJobFileTracker.java#L70-L88
2,676
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/etl/JobFileProcessor.java
JobFileProcessor.processRecords
boolean processRecords(Configuration conf, Connection hbaseConnection, String cluster, int batchSize, int threadCount, String processFileSubstring) throws IOException, InterruptedException, ClassNotFoundException, ExecutionException, RowKeyParseException { List<ProcessRecord> processRecords = getProcessRecords(conf, hbaseConnection, cluster, processFileSubstring); // Bail out early if needed if ((processRecords == null) || (processRecords.size() == 0)) { return true; } // Grab the min and the max jobId from all processing records. MinMaxJobFileTracker minMaxJobFileTracker = new MinMaxJobFileTracker(); for (ProcessRecord processRecord : processRecords) { minMaxJobFileTracker.track(processRecord.getMinJobId()); minMaxJobFileTracker.track(processRecord.getMaxJobId()); } List<JobRunner> jobRunners = getJobRunners(conf, hbaseConnection, cluster, false, batchSize, minMaxJobFileTracker.getMinJobId(), minMaxJobFileTracker.getMaxJobId()); boolean success = runJobs(threadCount, jobRunners); if (success) { updateProcessRecords(conf, hbaseConnection, processRecords); } return success; }
java
boolean processRecords(Configuration conf, Connection hbaseConnection, String cluster, int batchSize, int threadCount, String processFileSubstring) throws IOException, InterruptedException, ClassNotFoundException, ExecutionException, RowKeyParseException { List<ProcessRecord> processRecords = getProcessRecords(conf, hbaseConnection, cluster, processFileSubstring); // Bail out early if needed if ((processRecords == null) || (processRecords.size() == 0)) { return true; } // Grab the min and the max jobId from all processing records. MinMaxJobFileTracker minMaxJobFileTracker = new MinMaxJobFileTracker(); for (ProcessRecord processRecord : processRecords) { minMaxJobFileTracker.track(processRecord.getMinJobId()); minMaxJobFileTracker.track(processRecord.getMaxJobId()); } List<JobRunner> jobRunners = getJobRunners(conf, hbaseConnection, cluster, false, batchSize, minMaxJobFileTracker.getMinJobId(), minMaxJobFileTracker.getMaxJobId()); boolean success = runJobs(threadCount, jobRunners); if (success) { updateProcessRecords(conf, hbaseConnection, processRecords); } return success; }
[ "boolean", "processRecords", "(", "Configuration", "conf", ",", "Connection", "hbaseConnection", ",", "String", "cluster", ",", "int", "batchSize", ",", "int", "threadCount", ",", "String", "processFileSubstring", ")", "throws", "IOException", ",", "InterruptedException", ",", "ClassNotFoundException", ",", "ExecutionException", ",", "RowKeyParseException", "{", "List", "<", "ProcessRecord", ">", "processRecords", "=", "getProcessRecords", "(", "conf", ",", "hbaseConnection", ",", "cluster", ",", "processFileSubstring", ")", ";", "// Bail out early if needed", "if", "(", "(", "processRecords", "==", "null", ")", "||", "(", "processRecords", ".", "size", "(", ")", "==", "0", ")", ")", "{", "return", "true", ";", "}", "// Grab the min and the max jobId from all processing records.", "MinMaxJobFileTracker", "minMaxJobFileTracker", "=", "new", "MinMaxJobFileTracker", "(", ")", ";", "for", "(", "ProcessRecord", "processRecord", ":", "processRecords", ")", "{", "minMaxJobFileTracker", ".", "track", "(", "processRecord", ".", "getMinJobId", "(", ")", ")", ";", "minMaxJobFileTracker", ".", "track", "(", "processRecord", ".", "getMaxJobId", "(", ")", ")", ";", "}", "List", "<", "JobRunner", ">", "jobRunners", "=", "getJobRunners", "(", "conf", ",", "hbaseConnection", ",", "cluster", ",", "false", ",", "batchSize", ",", "minMaxJobFileTracker", ".", "getMinJobId", "(", ")", ",", "minMaxJobFileTracker", ".", "getMaxJobId", "(", ")", ")", ";", "boolean", "success", "=", "runJobs", "(", "threadCount", ",", "jobRunners", ")", ";", "if", "(", "success", ")", "{", "updateProcessRecords", "(", "conf", ",", "hbaseConnection", ",", "processRecords", ")", ";", "}", "return", "success", ";", "}" ]
Pick up the ranges of jobs to process from ProcessRecords. Skip raw rows that have already been processed. @param conf used to contact HBase and to run jobs against @param hbaseConnection @param cluster for which to process records. @param batchSize the total number of jobs to process in a batch (a MR job scanning these many records in the raw table). @param threadCount how many parallel threads should be used to run Hadoop jobs in parallel. @param processFileSubstring Use only process records where the process file path contains this string. If <code>null</code> or empty string, then no filtering is applied. @return whether all job files for all processRecords were properly processed. @throws IOException @throws ClassNotFoundException when problems occur setting up the job. @throws InterruptedException @throws ExecutionException when at least one of the jobs could not be scheduled. @throws RowKeyParseException
[ "Pick", "up", "the", "ranges", "of", "jobs", "to", "process", "from", "ProcessRecords", ".", "Skip", "raw", "rows", "that", "have", "already", "been", "processed", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/etl/JobFileProcessor.java#L379-L409
2,677
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/etl/JobFileProcessor.java
JobFileProcessor.runJobs
private boolean runJobs(int threadCount, List<JobRunner> jobRunners) throws InterruptedException, ExecutionException { ExecutorService execSvc = Executors.newFixedThreadPool(threadCount); if ((jobRunners == null) || (jobRunners.size() == 0)) { return true; } boolean success = true; try { List<Future<Boolean>> jobFutures = new LinkedList<Future<Boolean>>(); for (JobRunner jobRunner : jobRunners) { Future<Boolean> jobFuture = execSvc.submit(jobRunner); jobFutures.add(jobFuture); } // Wait for all jobs to complete. for (Future<Boolean> jobFuture : jobFutures) { success = jobFuture.get(); if (!success) { // Stop the presses as soon as we see an error. Note that several // other jobs may have already been scheduled. Others will never be // scheduled. break; } } } finally { // Shut down the executor so that the JVM can exit. List<Runnable> neverRan = execSvc.shutdownNow(); if (neverRan != null && neverRan.size() > 0) { System.err.println( "Interrupted run. Currently running Hadoop jobs will continue unless cancelled. " + neverRan + " jobs never scheduled."); } } return success; }
java
private boolean runJobs(int threadCount, List<JobRunner> jobRunners) throws InterruptedException, ExecutionException { ExecutorService execSvc = Executors.newFixedThreadPool(threadCount); if ((jobRunners == null) || (jobRunners.size() == 0)) { return true; } boolean success = true; try { List<Future<Boolean>> jobFutures = new LinkedList<Future<Boolean>>(); for (JobRunner jobRunner : jobRunners) { Future<Boolean> jobFuture = execSvc.submit(jobRunner); jobFutures.add(jobFuture); } // Wait for all jobs to complete. for (Future<Boolean> jobFuture : jobFutures) { success = jobFuture.get(); if (!success) { // Stop the presses as soon as we see an error. Note that several // other jobs may have already been scheduled. Others will never be // scheduled. break; } } } finally { // Shut down the executor so that the JVM can exit. List<Runnable> neverRan = execSvc.shutdownNow(); if (neverRan != null && neverRan.size() > 0) { System.err.println( "Interrupted run. Currently running Hadoop jobs will continue unless cancelled. " + neverRan + " jobs never scheduled."); } } return success; }
[ "private", "boolean", "runJobs", "(", "int", "threadCount", ",", "List", "<", "JobRunner", ">", "jobRunners", ")", "throws", "InterruptedException", ",", "ExecutionException", "{", "ExecutorService", "execSvc", "=", "Executors", ".", "newFixedThreadPool", "(", "threadCount", ")", ";", "if", "(", "(", "jobRunners", "==", "null", ")", "||", "(", "jobRunners", ".", "size", "(", ")", "==", "0", ")", ")", "{", "return", "true", ";", "}", "boolean", "success", "=", "true", ";", "try", "{", "List", "<", "Future", "<", "Boolean", ">>", "jobFutures", "=", "new", "LinkedList", "<", "Future", "<", "Boolean", ">", ">", "(", ")", ";", "for", "(", "JobRunner", "jobRunner", ":", "jobRunners", ")", "{", "Future", "<", "Boolean", ">", "jobFuture", "=", "execSvc", ".", "submit", "(", "jobRunner", ")", ";", "jobFutures", ".", "add", "(", "jobFuture", ")", ";", "}", "// Wait for all jobs to complete.", "for", "(", "Future", "<", "Boolean", ">", "jobFuture", ":", "jobFutures", ")", "{", "success", "=", "jobFuture", ".", "get", "(", ")", ";", "if", "(", "!", "success", ")", "{", "// Stop the presses as soon as we see an error. Note that several", "// other jobs may have already been scheduled. Others will never be", "// scheduled.", "break", ";", "}", "}", "}", "finally", "{", "// Shut down the executor so that the JVM can exit.", "List", "<", "Runnable", ">", "neverRan", "=", "execSvc", ".", "shutdownNow", "(", ")", ";", "if", "(", "neverRan", "!=", "null", "&&", "neverRan", ".", "size", "(", ")", ">", "0", ")", "{", "System", ".", "err", ".", "println", "(", "\"Interrupted run. Currently running Hadoop jobs will continue unless cancelled. \"", "+", "neverRan", "+", "\" jobs never scheduled.\"", ")", ";", "}", "}", "return", "success", ";", "}" ]
Run the jobs and wait for all of them to complete. @param threadCount up to how many jobs to run in parallel @param jobRunners the list of jobs to run. @return whether all jobs completed successfully or not. @throws InterruptedException when interrupted while running jobs. @throws ExecutionException when at least one of the jobs could not be scheduled.
[ "Run", "the", "jobs", "and", "wait", "for", "all", "of", "them", "to", "complete", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/etl/JobFileProcessor.java#L450-L486
2,678
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryService.java
JobHistoryService.createFlowScan
private Scan createFlowScan(byte[] rowPrefix, int limit, String version) { Scan scan = new Scan(); scan.setStartRow(rowPrefix); // using a large scanner caching value with a small limit can mean we scan a // lot more data than necessary, so lower the caching for low limits scan.setCaching(Math.min(limit, defaultScannerCaching)); // require that all rows match the prefix we're looking for Filter prefixFilter = new WhileMatchFilter(new PrefixFilter(rowPrefix)); // if version is passed, restrict the rows returned to that version if (version != null && version.length() > 0) { FilterList filters = new FilterList(FilterList.Operator.MUST_PASS_ALL); filters.addFilter(prefixFilter); filters.addFilter(new SingleColumnValueFilter(Constants.INFO_FAM_BYTES, Constants.VERSION_COLUMN_BYTES, CompareFilter.CompareOp.EQUAL, Bytes.toBytes(version))); scan.setFilter(filters); } else { scan.setFilter(prefixFilter); } return scan; }
java
private Scan createFlowScan(byte[] rowPrefix, int limit, String version) { Scan scan = new Scan(); scan.setStartRow(rowPrefix); // using a large scanner caching value with a small limit can mean we scan a // lot more data than necessary, so lower the caching for low limits scan.setCaching(Math.min(limit, defaultScannerCaching)); // require that all rows match the prefix we're looking for Filter prefixFilter = new WhileMatchFilter(new PrefixFilter(rowPrefix)); // if version is passed, restrict the rows returned to that version if (version != null && version.length() > 0) { FilterList filters = new FilterList(FilterList.Operator.MUST_PASS_ALL); filters.addFilter(prefixFilter); filters.addFilter(new SingleColumnValueFilter(Constants.INFO_FAM_BYTES, Constants.VERSION_COLUMN_BYTES, CompareFilter.CompareOp.EQUAL, Bytes.toBytes(version))); scan.setFilter(filters); } else { scan.setFilter(prefixFilter); } return scan; }
[ "private", "Scan", "createFlowScan", "(", "byte", "[", "]", "rowPrefix", ",", "int", "limit", ",", "String", "version", ")", "{", "Scan", "scan", "=", "new", "Scan", "(", ")", ";", "scan", ".", "setStartRow", "(", "rowPrefix", ")", ";", "// using a large scanner caching value with a small limit can mean we scan a", "// lot more data than necessary, so lower the caching for low limits", "scan", ".", "setCaching", "(", "Math", ".", "min", "(", "limit", ",", "defaultScannerCaching", ")", ")", ";", "// require that all rows match the prefix we're looking for", "Filter", "prefixFilter", "=", "new", "WhileMatchFilter", "(", "new", "PrefixFilter", "(", "rowPrefix", ")", ")", ";", "// if version is passed, restrict the rows returned to that version", "if", "(", "version", "!=", "null", "&&", "version", ".", "length", "(", ")", ">", "0", ")", "{", "FilterList", "filters", "=", "new", "FilterList", "(", "FilterList", ".", "Operator", ".", "MUST_PASS_ALL", ")", ";", "filters", ".", "addFilter", "(", "prefixFilter", ")", ";", "filters", ".", "addFilter", "(", "new", "SingleColumnValueFilter", "(", "Constants", ".", "INFO_FAM_BYTES", ",", "Constants", ".", "VERSION_COLUMN_BYTES", ",", "CompareFilter", ".", "CompareOp", ".", "EQUAL", ",", "Bytes", ".", "toBytes", "(", "version", ")", ")", ")", ";", "scan", ".", "setFilter", "(", "filters", ")", ";", "}", "else", "{", "scan", ".", "setFilter", "(", "prefixFilter", ")", ";", "}", "return", "scan", ";", "}" ]
creates a scan for flow data @param rowPrefix - start row prefix @param limit - limit on scanned results @param version - version to match @return Scan
[ "creates", "a", "scan", "for", "flow", "data" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryService.java#L225-L246
2,679
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryService.java
JobHistoryService.getJobByJobID
public JobDetails getJobByJobID(String cluster, String jobId) throws IOException { return getJobByJobID(cluster, jobId, false); }
java
public JobDetails getJobByJobID(String cluster, String jobId) throws IOException { return getJobByJobID(cluster, jobId, false); }
[ "public", "JobDetails", "getJobByJobID", "(", "String", "cluster", ",", "String", "jobId", ")", "throws", "IOException", "{", "return", "getJobByJobID", "(", "cluster", ",", "jobId", ",", "false", ")", ";", "}" ]
Returns a specific job's data by job ID. This version does not populate the job's task data. @param cluster the cluster identifier @param cluster the job ID
[ "Returns", "a", "specific", "job", "s", "data", "by", "job", "ID", ".", "This", "version", "does", "not", "populate", "the", "job", "s", "task", "data", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryService.java#L401-L404
2,680
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryService.java
JobHistoryService.getTaskScan
private Scan getTaskScan(JobKey jobKey) { byte[] startKey = Bytes.add(jobKeyConv.toBytes(jobKey), Constants.SEP_BYTES); Scan scan = new Scan(); scan.setStartRow(startKey); // only return tasks for this job scan.setFilter(new WhileMatchFilter(new PrefixFilter(startKey))); // expect a lot of tasks on average scan.setCaching(500); return scan; }
java
private Scan getTaskScan(JobKey jobKey) { byte[] startKey = Bytes.add(jobKeyConv.toBytes(jobKey), Constants.SEP_BYTES); Scan scan = new Scan(); scan.setStartRow(startKey); // only return tasks for this job scan.setFilter(new WhileMatchFilter(new PrefixFilter(startKey))); // expect a lot of tasks on average scan.setCaching(500); return scan; }
[ "private", "Scan", "getTaskScan", "(", "JobKey", "jobKey", ")", "{", "byte", "[", "]", "startKey", "=", "Bytes", ".", "add", "(", "jobKeyConv", ".", "toBytes", "(", "jobKey", ")", ",", "Constants", ".", "SEP_BYTES", ")", ";", "Scan", "scan", "=", "new", "Scan", "(", ")", ";", "scan", ".", "setStartRow", "(", "startKey", ")", ";", "// only return tasks for this job", "scan", ".", "setFilter", "(", "new", "WhileMatchFilter", "(", "new", "PrefixFilter", "(", "startKey", ")", ")", ")", ";", "// expect a lot of tasks on average", "scan", ".", "setCaching", "(", "500", ")", ";", "return", "scan", ";", "}" ]
Returns a Scan instance to retrieve all the task rows for a given job from the job_history_task table. @param jobKey the job key to match for all task rows @return a {@code Scan} instance for the job_history_task table
[ "Returns", "a", "Scan", "instance", "to", "retrieve", "all", "the", "task", "rows", "for", "a", "given", "job", "from", "the", "job_history_task", "table", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryService.java#L634-L644
2,681
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryService.java
JobHistoryService.parseConfiguration
public static Configuration parseConfiguration( Map<byte[], byte[]> keyValues) { Configuration config = new Configuration(false); byte[] configPrefix = Bytes.add(Constants.JOB_CONF_COLUMN_PREFIX_BYTES, Constants.SEP_BYTES); for (Map.Entry<byte[], byte[]> entry : keyValues.entrySet()) { byte[] key = entry.getKey(); if (Bytes.startsWith(key, configPrefix) && key.length > configPrefix.length) { byte[] name = Bytes.tail(key, key.length - configPrefix.length); config.set(Bytes.toString(name), Bytes.toString(entry.getValue())); } } return config; }
java
public static Configuration parseConfiguration( Map<byte[], byte[]> keyValues) { Configuration config = new Configuration(false); byte[] configPrefix = Bytes.add(Constants.JOB_CONF_COLUMN_PREFIX_BYTES, Constants.SEP_BYTES); for (Map.Entry<byte[], byte[]> entry : keyValues.entrySet()) { byte[] key = entry.getKey(); if (Bytes.startsWith(key, configPrefix) && key.length > configPrefix.length) { byte[] name = Bytes.tail(key, key.length - configPrefix.length); config.set(Bytes.toString(name), Bytes.toString(entry.getValue())); } } return config; }
[ "public", "static", "Configuration", "parseConfiguration", "(", "Map", "<", "byte", "[", "]", ",", "byte", "[", "]", ">", "keyValues", ")", "{", "Configuration", "config", "=", "new", "Configuration", "(", "false", ")", ";", "byte", "[", "]", "configPrefix", "=", "Bytes", ".", "add", "(", "Constants", ".", "JOB_CONF_COLUMN_PREFIX_BYTES", ",", "Constants", ".", "SEP_BYTES", ")", ";", "for", "(", "Map", ".", "Entry", "<", "byte", "[", "]", ",", "byte", "[", "]", ">", "entry", ":", "keyValues", ".", "entrySet", "(", ")", ")", "{", "byte", "[", "]", "key", "=", "entry", ".", "getKey", "(", ")", ";", "if", "(", "Bytes", ".", "startsWith", "(", "key", ",", "configPrefix", ")", "&&", "key", ".", "length", ">", "configPrefix", ".", "length", ")", "{", "byte", "[", "]", "name", "=", "Bytes", ".", "tail", "(", "key", ",", "key", ".", "length", "-", "configPrefix", ".", "length", ")", ";", "config", ".", "set", "(", "Bytes", ".", "toString", "(", "name", ")", ",", "Bytes", ".", "toString", "(", "entry", ".", "getValue", "(", ")", ")", ")", ";", "}", "}", "return", "config", ";", "}" ]
Converts serialized configuration properties back in to a Configuration object. @param keyValues @return
[ "Converts", "serialized", "configuration", "properties", "back", "in", "to", "a", "Configuration", "object", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryService.java#L653-L668
2,682
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryService.java
JobHistoryService.parseCounters
public static CounterMap parseCounters(byte[] prefix, Map<byte[], byte[]> keyValues) { CounterMap counterValues = new CounterMap(); byte[] counterPrefix = Bytes.add(prefix, Constants.SEP_BYTES); for (Map.Entry<byte[], byte[]> entry : keyValues.entrySet()) { byte[] key = entry.getKey(); if (Bytes.startsWith(key, counterPrefix) && key.length > counterPrefix.length) { // qualifier should be in the format: g!countergroup!counterkey byte[][] qualifierFields = ByteUtil.split(Bytes.tail(key, key.length - counterPrefix.length), Constants.SEP_BYTES); if (qualifierFields.length != 2) { throw new IllegalArgumentException( "Malformed column qualifier for counter value: " + Bytes.toStringBinary(key)); } Counter c = new Counter(Bytes.toString(qualifierFields[0]), Bytes.toString(qualifierFields[1]), Bytes.toLong(entry.getValue())); counterValues.add(c); } } return counterValues; }
java
public static CounterMap parseCounters(byte[] prefix, Map<byte[], byte[]> keyValues) { CounterMap counterValues = new CounterMap(); byte[] counterPrefix = Bytes.add(prefix, Constants.SEP_BYTES); for (Map.Entry<byte[], byte[]> entry : keyValues.entrySet()) { byte[] key = entry.getKey(); if (Bytes.startsWith(key, counterPrefix) && key.length > counterPrefix.length) { // qualifier should be in the format: g!countergroup!counterkey byte[][] qualifierFields = ByteUtil.split(Bytes.tail(key, key.length - counterPrefix.length), Constants.SEP_BYTES); if (qualifierFields.length != 2) { throw new IllegalArgumentException( "Malformed column qualifier for counter value: " + Bytes.toStringBinary(key)); } Counter c = new Counter(Bytes.toString(qualifierFields[0]), Bytes.toString(qualifierFields[1]), Bytes.toLong(entry.getValue())); counterValues.add(c); } } return counterValues; }
[ "public", "static", "CounterMap", "parseCounters", "(", "byte", "[", "]", "prefix", ",", "Map", "<", "byte", "[", "]", ",", "byte", "[", "]", ">", "keyValues", ")", "{", "CounterMap", "counterValues", "=", "new", "CounterMap", "(", ")", ";", "byte", "[", "]", "counterPrefix", "=", "Bytes", ".", "add", "(", "prefix", ",", "Constants", ".", "SEP_BYTES", ")", ";", "for", "(", "Map", ".", "Entry", "<", "byte", "[", "]", ",", "byte", "[", "]", ">", "entry", ":", "keyValues", ".", "entrySet", "(", ")", ")", "{", "byte", "[", "]", "key", "=", "entry", ".", "getKey", "(", ")", ";", "if", "(", "Bytes", ".", "startsWith", "(", "key", ",", "counterPrefix", ")", "&&", "key", ".", "length", ">", "counterPrefix", ".", "length", ")", "{", "// qualifier should be in the format: g!countergroup!counterkey", "byte", "[", "]", "[", "]", "qualifierFields", "=", "ByteUtil", ".", "split", "(", "Bytes", ".", "tail", "(", "key", ",", "key", ".", "length", "-", "counterPrefix", ".", "length", ")", ",", "Constants", ".", "SEP_BYTES", ")", ";", "if", "(", "qualifierFields", ".", "length", "!=", "2", ")", "{", "throw", "new", "IllegalArgumentException", "(", "\"Malformed column qualifier for counter value: \"", "+", "Bytes", ".", "toStringBinary", "(", "key", ")", ")", ";", "}", "Counter", "c", "=", "new", "Counter", "(", "Bytes", ".", "toString", "(", "qualifierFields", "[", "0", "]", ")", ",", "Bytes", ".", "toString", "(", "qualifierFields", "[", "1", "]", ")", ",", "Bytes", ".", "toLong", "(", "entry", ".", "getValue", "(", ")", ")", ")", ";", "counterValues", ".", "add", "(", "c", ")", ";", "}", "}", "return", "counterValues", ";", "}" ]
Converts encoded key values back into counter objects. @param keyValues @return
[ "Converts", "encoded", "key", "values", "back", "into", "counter", "objects", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryService.java#L676-L700
2,683
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryService.java
JobHistoryService.removeJob
public int removeJob(JobKey key) throws IOException { byte[] jobRow = jobKeyConv.toBytes(key); Table historyTable = hbaseConnection.getTable(TableName.valueOf(Constants.HISTORY_TABLE)); historyTable.delete(new Delete(jobRow)); historyTable.close(); int deleteCount = 1; // delete all task rows Scan taskScan = getTaskScan(key); // only need the row keys back to delete (all should have taskid) taskScan.addColumn(Constants.INFO_FAM_BYTES, JobHistoryKeys.KEYS_TO_BYTES.get(JobHistoryKeys.TASKID)); // no reason to cache rows we're deleting taskScan.setCacheBlocks(false); List<Delete> taskDeletes = new ArrayList<Delete>(); Table taskTable = hbaseConnection .getTable(TableName.valueOf(Constants.HISTORY_TASK_TABLE)); ResultScanner scanner = taskTable.getScanner(taskScan); try { for (Result r : scanner) { if (r != null && !r.isEmpty()) { byte[] rowKey = r.getRow(); TaskKey taskKey = taskKeyConv.fromBytes(rowKey); if (!key.equals(taskKey)) { LOG.warn("Found task not in the current job " + Bytes.toStringBinary(rowKey)); break; } taskDeletes.add(new Delete(r.getRow())); } } // Hang on the count because delete will modify our list. deleteCount += taskDeletes.size(); if (taskDeletes.size() > 0) { LOG.info("Deleting " + taskDeletes.size() + " tasks for job " + key); taskTable.delete(taskDeletes); } } finally { scanner.close(); taskTable.close(); } return deleteCount; }
java
public int removeJob(JobKey key) throws IOException { byte[] jobRow = jobKeyConv.toBytes(key); Table historyTable = hbaseConnection.getTable(TableName.valueOf(Constants.HISTORY_TABLE)); historyTable.delete(new Delete(jobRow)); historyTable.close(); int deleteCount = 1; // delete all task rows Scan taskScan = getTaskScan(key); // only need the row keys back to delete (all should have taskid) taskScan.addColumn(Constants.INFO_FAM_BYTES, JobHistoryKeys.KEYS_TO_BYTES.get(JobHistoryKeys.TASKID)); // no reason to cache rows we're deleting taskScan.setCacheBlocks(false); List<Delete> taskDeletes = new ArrayList<Delete>(); Table taskTable = hbaseConnection .getTable(TableName.valueOf(Constants.HISTORY_TASK_TABLE)); ResultScanner scanner = taskTable.getScanner(taskScan); try { for (Result r : scanner) { if (r != null && !r.isEmpty()) { byte[] rowKey = r.getRow(); TaskKey taskKey = taskKeyConv.fromBytes(rowKey); if (!key.equals(taskKey)) { LOG.warn("Found task not in the current job " + Bytes.toStringBinary(rowKey)); break; } taskDeletes.add(new Delete(r.getRow())); } } // Hang on the count because delete will modify our list. deleteCount += taskDeletes.size(); if (taskDeletes.size() > 0) { LOG.info("Deleting " + taskDeletes.size() + " tasks for job " + key); taskTable.delete(taskDeletes); } } finally { scanner.close(); taskTable.close(); } return deleteCount; }
[ "public", "int", "removeJob", "(", "JobKey", "key", ")", "throws", "IOException", "{", "byte", "[", "]", "jobRow", "=", "jobKeyConv", ".", "toBytes", "(", "key", ")", ";", "Table", "historyTable", "=", "hbaseConnection", ".", "getTable", "(", "TableName", ".", "valueOf", "(", "Constants", ".", "HISTORY_TABLE", ")", ")", ";", "historyTable", ".", "delete", "(", "new", "Delete", "(", "jobRow", ")", ")", ";", "historyTable", ".", "close", "(", ")", ";", "int", "deleteCount", "=", "1", ";", "// delete all task rows", "Scan", "taskScan", "=", "getTaskScan", "(", "key", ")", ";", "// only need the row keys back to delete (all should have taskid)", "taskScan", ".", "addColumn", "(", "Constants", ".", "INFO_FAM_BYTES", ",", "JobHistoryKeys", ".", "KEYS_TO_BYTES", ".", "get", "(", "JobHistoryKeys", ".", "TASKID", ")", ")", ";", "// no reason to cache rows we're deleting", "taskScan", ".", "setCacheBlocks", "(", "false", ")", ";", "List", "<", "Delete", ">", "taskDeletes", "=", "new", "ArrayList", "<", "Delete", ">", "(", ")", ";", "Table", "taskTable", "=", "hbaseConnection", ".", "getTable", "(", "TableName", ".", "valueOf", "(", "Constants", ".", "HISTORY_TASK_TABLE", ")", ")", ";", "ResultScanner", "scanner", "=", "taskTable", ".", "getScanner", "(", "taskScan", ")", ";", "try", "{", "for", "(", "Result", "r", ":", "scanner", ")", "{", "if", "(", "r", "!=", "null", "&&", "!", "r", ".", "isEmpty", "(", ")", ")", "{", "byte", "[", "]", "rowKey", "=", "r", ".", "getRow", "(", ")", ";", "TaskKey", "taskKey", "=", "taskKeyConv", ".", "fromBytes", "(", "rowKey", ")", ";", "if", "(", "!", "key", ".", "equals", "(", "taskKey", ")", ")", "{", "LOG", ".", "warn", "(", "\"Found task not in the current job \"", "+", "Bytes", ".", "toStringBinary", "(", "rowKey", ")", ")", ";", "break", ";", "}", "taskDeletes", ".", "add", "(", "new", "Delete", "(", "r", ".", "getRow", "(", ")", ")", ")", ";", "}", "}", "// Hang on the count because delete will modify our list.", "deleteCount", "+=", "taskDeletes", ".", "size", "(", ")", ";", "if", "(", "taskDeletes", ".", "size", "(", ")", ">", "0", ")", "{", "LOG", ".", "info", "(", "\"Deleting \"", "+", "taskDeletes", ".", "size", "(", ")", "+", "\" tasks for job \"", "+", "key", ")", ";", "taskTable", ".", "delete", "(", "taskDeletes", ")", ";", "}", "}", "finally", "{", "scanner", ".", "close", "(", ")", ";", "taskTable", ".", "close", "(", ")", ";", "}", "return", "deleteCount", ";", "}" ]
Removes the job's row from the job_history table, and all related task rows from the job_history_task table. @param key the job to be removed @return the number of rows deleted. @throws IOException
[ "Removes", "the", "job", "s", "row", "from", "the", "job_history", "table", "and", "all", "related", "task", "rows", "from", "the", "job_history_task", "table", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/JobHistoryService.java#L783-L828
2,684
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/util/HadoopConfUtil.java
HadoopConfUtil.getUserNameInConf
public static String getUserNameInConf(Configuration jobConf) throws IllegalArgumentException { String userName = jobConf.get(Constants.USER_CONF_KEY_HADOOP2); if (StringUtils.isBlank(userName)) { userName = jobConf.get(Constants.USER_CONF_KEY); if (StringUtils.isBlank(userName)) { // neither user.name nor hadoop.mapreduce.job.user.name found throw new IllegalArgumentException(" Found neither " + Constants.USER_CONF_KEY + " nor " + Constants.USER_CONF_KEY_HADOOP2); } } return userName; }
java
public static String getUserNameInConf(Configuration jobConf) throws IllegalArgumentException { String userName = jobConf.get(Constants.USER_CONF_KEY_HADOOP2); if (StringUtils.isBlank(userName)) { userName = jobConf.get(Constants.USER_CONF_KEY); if (StringUtils.isBlank(userName)) { // neither user.name nor hadoop.mapreduce.job.user.name found throw new IllegalArgumentException(" Found neither " + Constants.USER_CONF_KEY + " nor " + Constants.USER_CONF_KEY_HADOOP2); } } return userName; }
[ "public", "static", "String", "getUserNameInConf", "(", "Configuration", "jobConf", ")", "throws", "IllegalArgumentException", "{", "String", "userName", "=", "jobConf", ".", "get", "(", "Constants", ".", "USER_CONF_KEY_HADOOP2", ")", ";", "if", "(", "StringUtils", ".", "isBlank", "(", "userName", ")", ")", "{", "userName", "=", "jobConf", ".", "get", "(", "Constants", ".", "USER_CONF_KEY", ")", ";", "if", "(", "StringUtils", ".", "isBlank", "(", "userName", ")", ")", "{", "// neither user.name nor hadoop.mapreduce.job.user.name found", "throw", "new", "IllegalArgumentException", "(", "\" Found neither \"", "+", "Constants", ".", "USER_CONF_KEY", "+", "\" nor \"", "+", "Constants", ".", "USER_CONF_KEY_HADOOP2", ")", ";", "}", "}", "return", "userName", ";", "}" ]
Get the user name from the job conf check for hadoop2 config param, then hadoop1 @param jobConf @return userName @throws IllegalArgumentException
[ "Get", "the", "user", "name", "from", "the", "job", "conf", "check", "for", "hadoop2", "config", "param", "then", "hadoop1" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/util/HadoopConfUtil.java#L42-L55
2,685
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/util/HadoopConfUtil.java
HadoopConfUtil.contains
public static boolean contains(Configuration jobConf, String name) { if (StringUtils.isNotBlank(jobConf.get(name))) { return true; } else { return false; } }
java
public static boolean contains(Configuration jobConf, String name) { if (StringUtils.isNotBlank(jobConf.get(name))) { return true; } else { return false; } }
[ "public", "static", "boolean", "contains", "(", "Configuration", "jobConf", ",", "String", "name", ")", "{", "if", "(", "StringUtils", ".", "isNotBlank", "(", "jobConf", ".", "get", "(", "name", ")", ")", ")", "{", "return", "true", ";", "}", "else", "{", "return", "false", ";", "}", "}" ]
checks if the jobConf contains a certain parameter @param jobConf @param name @return true if the job conf contains that parameter false if the job conf does not contain that parameter
[ "checks", "if", "the", "jobConf", "contains", "a", "certain", "parameter" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/util/HadoopConfUtil.java#L65-L71
2,686
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/util/HadoopConfUtil.java
HadoopConfUtil.getQueueName
public static String getQueueName(Configuration jobConf) { // look for the hadoop2 queuename first String hRavenQueueName = jobConf.get(Constants.QUEUENAME_HADOOP2); if (StringUtils.isBlank(hRavenQueueName)) { // presumably a hadoop1 conf, check for fair scheduler pool name hRavenQueueName = jobConf .get(Constants.FAIR_SCHEDULER_POOLNAME_HADOOP1); if (StringUtils.isBlank(hRavenQueueName)) { // check for capacity scheduler queue name hRavenQueueName = jobConf .get(Constants.CAPACITY_SCHEDULER_QUEUENAME_HADOOP1); if (StringUtils.isBlank(hRavenQueueName)) { // neither pool (hadoop1) nor queuename (hadoop2) found // presumably FIFO scheduler, hence set to "DEFAULT_QUEUE" hRavenQueueName = Constants.DEFAULT_QUEUENAME; LOG.info(" Found neither " + Constants.FAIR_SCHEDULER_POOLNAME_HADOOP1 + " nor " + Constants.QUEUENAME_HADOOP2 + " nor " + Constants.CAPACITY_SCHEDULER_QUEUENAME_HADOOP1 + " hence presuming FIFO scheduler " + " and setting the queuename to " + Constants.DEFAULT_QUEUENAME); } } } return hRavenQueueName; }
java
public static String getQueueName(Configuration jobConf) { // look for the hadoop2 queuename first String hRavenQueueName = jobConf.get(Constants.QUEUENAME_HADOOP2); if (StringUtils.isBlank(hRavenQueueName)) { // presumably a hadoop1 conf, check for fair scheduler pool name hRavenQueueName = jobConf .get(Constants.FAIR_SCHEDULER_POOLNAME_HADOOP1); if (StringUtils.isBlank(hRavenQueueName)) { // check for capacity scheduler queue name hRavenQueueName = jobConf .get(Constants.CAPACITY_SCHEDULER_QUEUENAME_HADOOP1); if (StringUtils.isBlank(hRavenQueueName)) { // neither pool (hadoop1) nor queuename (hadoop2) found // presumably FIFO scheduler, hence set to "DEFAULT_QUEUE" hRavenQueueName = Constants.DEFAULT_QUEUENAME; LOG.info(" Found neither " + Constants.FAIR_SCHEDULER_POOLNAME_HADOOP1 + " nor " + Constants.QUEUENAME_HADOOP2 + " nor " + Constants.CAPACITY_SCHEDULER_QUEUENAME_HADOOP1 + " hence presuming FIFO scheduler " + " and setting the queuename to " + Constants.DEFAULT_QUEUENAME); } } } return hRavenQueueName; }
[ "public", "static", "String", "getQueueName", "(", "Configuration", "jobConf", ")", "{", "// look for the hadoop2 queuename first", "String", "hRavenQueueName", "=", "jobConf", ".", "get", "(", "Constants", ".", "QUEUENAME_HADOOP2", ")", ";", "if", "(", "StringUtils", ".", "isBlank", "(", "hRavenQueueName", ")", ")", "{", "// presumably a hadoop1 conf, check for fair scheduler pool name", "hRavenQueueName", "=", "jobConf", ".", "get", "(", "Constants", ".", "FAIR_SCHEDULER_POOLNAME_HADOOP1", ")", ";", "if", "(", "StringUtils", ".", "isBlank", "(", "hRavenQueueName", ")", ")", "{", "// check for capacity scheduler queue name", "hRavenQueueName", "=", "jobConf", ".", "get", "(", "Constants", ".", "CAPACITY_SCHEDULER_QUEUENAME_HADOOP1", ")", ";", "if", "(", "StringUtils", ".", "isBlank", "(", "hRavenQueueName", ")", ")", "{", "// neither pool (hadoop1) nor queuename (hadoop2) found", "// presumably FIFO scheduler, hence set to \"DEFAULT_QUEUE\"", "hRavenQueueName", "=", "Constants", ".", "DEFAULT_QUEUENAME", ";", "LOG", ".", "info", "(", "\" Found neither \"", "+", "Constants", ".", "FAIR_SCHEDULER_POOLNAME_HADOOP1", "+", "\" nor \"", "+", "Constants", ".", "QUEUENAME_HADOOP2", "+", "\" nor \"", "+", "Constants", ".", "CAPACITY_SCHEDULER_QUEUENAME_HADOOP1", "+", "\" hence presuming FIFO scheduler \"", "+", "\" and setting the queuename to \"", "+", "Constants", ".", "DEFAULT_QUEUENAME", ")", ";", "}", "}", "}", "return", "hRavenQueueName", ";", "}" ]
retrieves the queue name from a hadoop conf looks for hadoop2 and hadoop1 settings @param jobConf @return queuename
[ "retrieves", "the", "queue", "name", "from", "a", "hadoop", "conf", "looks", "for", "hadoop2", "and", "hadoop1", "settings" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/util/HadoopConfUtil.java#L80-L106
2,687
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/AppVersionService.java
AppVersionService.getDistinctVersions
public List<VersionInfo> getDistinctVersions(String cluster, String user, String appId) throws IOException { Get get = new Get(getRowKey(cluster, user, appId)); List<VersionInfo> versions = Lists.newArrayList(); Long ts = 0L; Table versionsTable = null; try { versionsTable = hbaseConnection .getTable(TableName.valueOf(Constants.HISTORY_APP_VERSION_TABLE)); Result r = versionsTable.get(get); if (r != null && !r.isEmpty()) { for (Cell c : r.listCells()) { ts = 0L; try { ts = Bytes.toLong(CellUtil.cloneValue(c)); versions.add(new VersionInfo( Bytes.toString(CellUtil.cloneQualifier(c)), ts)); } catch (IllegalArgumentException e1) { // Bytes.toLong may throw IllegalArgumentException, although // unlikely. LOG.error( "Caught conversion error while converting timestamp to long value " + e1.getMessage()); // rethrow the exception in order to propagate it throw e1; } } } if (versions.size() > 0) { Collections.sort(versions); } } finally { if (versionsTable != null) { versionsTable.close(); } } return versions; }
java
public List<VersionInfo> getDistinctVersions(String cluster, String user, String appId) throws IOException { Get get = new Get(getRowKey(cluster, user, appId)); List<VersionInfo> versions = Lists.newArrayList(); Long ts = 0L; Table versionsTable = null; try { versionsTable = hbaseConnection .getTable(TableName.valueOf(Constants.HISTORY_APP_VERSION_TABLE)); Result r = versionsTable.get(get); if (r != null && !r.isEmpty()) { for (Cell c : r.listCells()) { ts = 0L; try { ts = Bytes.toLong(CellUtil.cloneValue(c)); versions.add(new VersionInfo( Bytes.toString(CellUtil.cloneQualifier(c)), ts)); } catch (IllegalArgumentException e1) { // Bytes.toLong may throw IllegalArgumentException, although // unlikely. LOG.error( "Caught conversion error while converting timestamp to long value " + e1.getMessage()); // rethrow the exception in order to propagate it throw e1; } } } if (versions.size() > 0) { Collections.sort(versions); } } finally { if (versionsTable != null) { versionsTable.close(); } } return versions; }
[ "public", "List", "<", "VersionInfo", ">", "getDistinctVersions", "(", "String", "cluster", ",", "String", "user", ",", "String", "appId", ")", "throws", "IOException", "{", "Get", "get", "=", "new", "Get", "(", "getRowKey", "(", "cluster", ",", "user", ",", "appId", ")", ")", ";", "List", "<", "VersionInfo", ">", "versions", "=", "Lists", ".", "newArrayList", "(", ")", ";", "Long", "ts", "=", "0L", ";", "Table", "versionsTable", "=", "null", ";", "try", "{", "versionsTable", "=", "hbaseConnection", ".", "getTable", "(", "TableName", ".", "valueOf", "(", "Constants", ".", "HISTORY_APP_VERSION_TABLE", ")", ")", ";", "Result", "r", "=", "versionsTable", ".", "get", "(", "get", ")", ";", "if", "(", "r", "!=", "null", "&&", "!", "r", ".", "isEmpty", "(", ")", ")", "{", "for", "(", "Cell", "c", ":", "r", ".", "listCells", "(", ")", ")", "{", "ts", "=", "0L", ";", "try", "{", "ts", "=", "Bytes", ".", "toLong", "(", "CellUtil", ".", "cloneValue", "(", "c", ")", ")", ";", "versions", ".", "add", "(", "new", "VersionInfo", "(", "Bytes", ".", "toString", "(", "CellUtil", ".", "cloneQualifier", "(", "c", ")", ")", ",", "ts", ")", ")", ";", "}", "catch", "(", "IllegalArgumentException", "e1", ")", "{", "// Bytes.toLong may throw IllegalArgumentException, although", "// unlikely.", "LOG", ".", "error", "(", "\"Caught conversion error while converting timestamp to long value \"", "+", "e1", ".", "getMessage", "(", ")", ")", ";", "// rethrow the exception in order to propagate it", "throw", "e1", ";", "}", "}", "}", "if", "(", "versions", ".", "size", "(", ")", ">", "0", ")", "{", "Collections", ".", "sort", "(", "versions", ")", ";", "}", "}", "finally", "{", "if", "(", "versionsTable", "!=", "null", ")", "{", "versionsTable", ".", "close", "(", ")", ";", "}", "}", "return", "versions", ";", "}" ]
Returns the list of distinct versions for the given application sorted in reverse chronological order @param cluster @param user @param appId @return the list of versions sorted in reverse chronological order (the list will be empty if no versions are found) @throws IOException
[ "Returns", "the", "list", "of", "distinct", "versions", "for", "the", "given", "application", "sorted", "in", "reverse", "chronological", "order" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/AppVersionService.java#L110-L149
2,688
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/datasource/AppVersionService.java
AppVersionService.addVersion
public boolean addVersion(String cluster, String user, String appId, String version, long timestamp) throws IOException { boolean updated = false; // check if the version already exists byte[] rowKey = getRowKey(cluster, user, appId); byte[] versionCol = Bytes.toBytes(version); int attempts = 0; // retry up to this many times for checkAndPut failures int maxAttempts = 3; boolean checkForUpdate = true; while (checkForUpdate && attempts < maxAttempts) { attempts++; // values for conditional update Put p = null; byte[] expectedValue = null; Get get = new Get(rowKey); get.addColumn(Constants.INFO_FAM_BYTES, versionCol); Table versionsTable = null; try { versionsTable = hbaseConnection .getTable(TableName.valueOf(Constants.HISTORY_APP_VERSION_TABLE)); Result r = versionsTable.get(get); if (r != null && !r.isEmpty()) { byte[] storedValue = r.getValue(Constants.INFO_FAM_BYTES, versionCol); long storedTS = Bytes.toLong(storedValue); if (timestamp < storedTS) { // update the stored timestamp to our earlier value p = new Put(rowKey); p.addColumn(Constants.INFO_FAM_BYTES, versionCol, Bytes.toBytes(timestamp)); expectedValue = storedValue; } else { // version exists and exceeds our value, no update necessary checkForUpdate = false; } } else { // no stored value p = new Put(rowKey); p.addColumn(Constants.INFO_FAM_BYTES, versionCol, Bytes.toBytes(timestamp)); } if (p != null) { // we have an updated value to add updated = versionsTable.checkAndPut(rowKey, Constants.INFO_FAM_BYTES, versionCol, expectedValue, p); checkForUpdate = !updated; if (!updated) { LOG.warn("Update of cluster=" + cluster + ", user=" + user + ", app=" + appId + ", version=" + version + " to timestamp " + timestamp + " failed because currently set value changed!" + " (attempt " + attempts + " of " + maxAttempts + ")"); } } } finally { if (versionsTable != null) { versionsTable.close(); } } } return updated; }
java
public boolean addVersion(String cluster, String user, String appId, String version, long timestamp) throws IOException { boolean updated = false; // check if the version already exists byte[] rowKey = getRowKey(cluster, user, appId); byte[] versionCol = Bytes.toBytes(version); int attempts = 0; // retry up to this many times for checkAndPut failures int maxAttempts = 3; boolean checkForUpdate = true; while (checkForUpdate && attempts < maxAttempts) { attempts++; // values for conditional update Put p = null; byte[] expectedValue = null; Get get = new Get(rowKey); get.addColumn(Constants.INFO_FAM_BYTES, versionCol); Table versionsTable = null; try { versionsTable = hbaseConnection .getTable(TableName.valueOf(Constants.HISTORY_APP_VERSION_TABLE)); Result r = versionsTable.get(get); if (r != null && !r.isEmpty()) { byte[] storedValue = r.getValue(Constants.INFO_FAM_BYTES, versionCol); long storedTS = Bytes.toLong(storedValue); if (timestamp < storedTS) { // update the stored timestamp to our earlier value p = new Put(rowKey); p.addColumn(Constants.INFO_FAM_BYTES, versionCol, Bytes.toBytes(timestamp)); expectedValue = storedValue; } else { // version exists and exceeds our value, no update necessary checkForUpdate = false; } } else { // no stored value p = new Put(rowKey); p.addColumn(Constants.INFO_FAM_BYTES, versionCol, Bytes.toBytes(timestamp)); } if (p != null) { // we have an updated value to add updated = versionsTable.checkAndPut(rowKey, Constants.INFO_FAM_BYTES, versionCol, expectedValue, p); checkForUpdate = !updated; if (!updated) { LOG.warn("Update of cluster=" + cluster + ", user=" + user + ", app=" + appId + ", version=" + version + " to timestamp " + timestamp + " failed because currently set value changed!" + " (attempt " + attempts + " of " + maxAttempts + ")"); } } } finally { if (versionsTable != null) { versionsTable.close(); } } } return updated; }
[ "public", "boolean", "addVersion", "(", "String", "cluster", ",", "String", "user", ",", "String", "appId", ",", "String", "version", ",", "long", "timestamp", ")", "throws", "IOException", "{", "boolean", "updated", "=", "false", ";", "// check if the version already exists", "byte", "[", "]", "rowKey", "=", "getRowKey", "(", "cluster", ",", "user", ",", "appId", ")", ";", "byte", "[", "]", "versionCol", "=", "Bytes", ".", "toBytes", "(", "version", ")", ";", "int", "attempts", "=", "0", ";", "// retry up to this many times for checkAndPut failures", "int", "maxAttempts", "=", "3", ";", "boolean", "checkForUpdate", "=", "true", ";", "while", "(", "checkForUpdate", "&&", "attempts", "<", "maxAttempts", ")", "{", "attempts", "++", ";", "// values for conditional update", "Put", "p", "=", "null", ";", "byte", "[", "]", "expectedValue", "=", "null", ";", "Get", "get", "=", "new", "Get", "(", "rowKey", ")", ";", "get", ".", "addColumn", "(", "Constants", ".", "INFO_FAM_BYTES", ",", "versionCol", ")", ";", "Table", "versionsTable", "=", "null", ";", "try", "{", "versionsTable", "=", "hbaseConnection", ".", "getTable", "(", "TableName", ".", "valueOf", "(", "Constants", ".", "HISTORY_APP_VERSION_TABLE", ")", ")", ";", "Result", "r", "=", "versionsTable", ".", "get", "(", "get", ")", ";", "if", "(", "r", "!=", "null", "&&", "!", "r", ".", "isEmpty", "(", ")", ")", "{", "byte", "[", "]", "storedValue", "=", "r", ".", "getValue", "(", "Constants", ".", "INFO_FAM_BYTES", ",", "versionCol", ")", ";", "long", "storedTS", "=", "Bytes", ".", "toLong", "(", "storedValue", ")", ";", "if", "(", "timestamp", "<", "storedTS", ")", "{", "// update the stored timestamp to our earlier value", "p", "=", "new", "Put", "(", "rowKey", ")", ";", "p", ".", "addColumn", "(", "Constants", ".", "INFO_FAM_BYTES", ",", "versionCol", ",", "Bytes", ".", "toBytes", "(", "timestamp", ")", ")", ";", "expectedValue", "=", "storedValue", ";", "}", "else", "{", "// version exists and exceeds our value, no update necessary", "checkForUpdate", "=", "false", ";", "}", "}", "else", "{", "// no stored value", "p", "=", "new", "Put", "(", "rowKey", ")", ";", "p", ".", "addColumn", "(", "Constants", ".", "INFO_FAM_BYTES", ",", "versionCol", ",", "Bytes", ".", "toBytes", "(", "timestamp", ")", ")", ";", "}", "if", "(", "p", "!=", "null", ")", "{", "// we have an updated value to add", "updated", "=", "versionsTable", ".", "checkAndPut", "(", "rowKey", ",", "Constants", ".", "INFO_FAM_BYTES", ",", "versionCol", ",", "expectedValue", ",", "p", ")", ";", "checkForUpdate", "=", "!", "updated", ";", "if", "(", "!", "updated", ")", "{", "LOG", ".", "warn", "(", "\"Update of cluster=\"", "+", "cluster", "+", "\", user=\"", "+", "user", "+", "\", app=\"", "+", "appId", "+", "\", version=\"", "+", "version", "+", "\" to timestamp \"", "+", "timestamp", "+", "\" failed because currently set value changed!\"", "+", "\" (attempt \"", "+", "attempts", "+", "\" of \"", "+", "maxAttempts", "+", "\")\"", ")", ";", "}", "}", "}", "finally", "{", "if", "(", "versionsTable", "!=", "null", ")", "{", "versionsTable", ".", "close", "(", ")", ";", "}", "}", "}", "return", "updated", ";", "}" ]
Adds an entry for the given version, if it does not already exist. If the given timestamp is earlier than the currently stored timestamp for the version, it will be updated. @param cluster cluster identifier (cluster@identifier) @param user user name @param appId application identifier @param version version identifier @param timestamp timestamp to store with this version (only the earliest timestamp is stored) @return {@code true} if a new version entry was added, {@code false} if the version already existed
[ "Adds", "an", "entry", "for", "the", "given", "version", "if", "it", "does", "not", "already", "exist", ".", "If", "the", "given", "timestamp", "is", "earlier", "than", "the", "currently", "stored", "timestamp", "for", "the", "version", "it", "will", "be", "updated", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/datasource/AppVersionService.java#L165-L231
2,689
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/rest/client/HRavenRestClient.java
HRavenRestClient.fetchFlowsWithConfig
public List<Flow> fetchFlowsWithConfig(String cluster, String username, String batchDesc, String signature, int limit, String... configProps) throws IOException { LOG.info(String.format( "Fetching last %d matching jobs for cluster=%s, user.name=%s, " + "batch.desc=%s, pig.logical.plan.signature=%s", limit, cluster, username, batchDesc, signature)); String configParam = ""; if (configProps != null && configProps.length > 0) { configParam = StringUtil.buildParam("includeConf", configProps); } String urlString = signature == null ? String.format("http://%s/api/v1/flow/%s/%s/%s?limit=%d&%s", apiHostname, cluster, username, StringUtil.cleanseToken(batchDesc), limit, configParam) : String.format("http://%s/api/v1/flow/%s/%s/%s/%s?limit=%d&%s", apiHostname, cluster, username, StringUtil.cleanseToken(batchDesc), signature, limit, configParam); return retrieveFlowsFromURL(urlString); }
java
public List<Flow> fetchFlowsWithConfig(String cluster, String username, String batchDesc, String signature, int limit, String... configProps) throws IOException { LOG.info(String.format( "Fetching last %d matching jobs for cluster=%s, user.name=%s, " + "batch.desc=%s, pig.logical.plan.signature=%s", limit, cluster, username, batchDesc, signature)); String configParam = ""; if (configProps != null && configProps.length > 0) { configParam = StringUtil.buildParam("includeConf", configProps); } String urlString = signature == null ? String.format("http://%s/api/v1/flow/%s/%s/%s?limit=%d&%s", apiHostname, cluster, username, StringUtil.cleanseToken(batchDesc), limit, configParam) : String.format("http://%s/api/v1/flow/%s/%s/%s/%s?limit=%d&%s", apiHostname, cluster, username, StringUtil.cleanseToken(batchDesc), signature, limit, configParam); return retrieveFlowsFromURL(urlString); }
[ "public", "List", "<", "Flow", ">", "fetchFlowsWithConfig", "(", "String", "cluster", ",", "String", "username", ",", "String", "batchDesc", ",", "String", "signature", ",", "int", "limit", ",", "String", "...", "configProps", ")", "throws", "IOException", "{", "LOG", ".", "info", "(", "String", ".", "format", "(", "\"Fetching last %d matching jobs for cluster=%s, user.name=%s, \"", "+", "\"batch.desc=%s, pig.logical.plan.signature=%s\"", ",", "limit", ",", "cluster", ",", "username", ",", "batchDesc", ",", "signature", ")", ")", ";", "String", "configParam", "=", "\"\"", ";", "if", "(", "configProps", "!=", "null", "&&", "configProps", ".", "length", ">", "0", ")", "{", "configParam", "=", "StringUtil", ".", "buildParam", "(", "\"includeConf\"", ",", "configProps", ")", ";", "}", "String", "urlString", "=", "signature", "==", "null", "?", "String", ".", "format", "(", "\"http://%s/api/v1/flow/%s/%s/%s?limit=%d&%s\"", ",", "apiHostname", ",", "cluster", ",", "username", ",", "StringUtil", ".", "cleanseToken", "(", "batchDesc", ")", ",", "limit", ",", "configParam", ")", ":", "String", ".", "format", "(", "\"http://%s/api/v1/flow/%s/%s/%s/%s?limit=%d&%s\"", ",", "apiHostname", ",", "cluster", ",", "username", ",", "StringUtil", ".", "cleanseToken", "(", "batchDesc", ")", ",", "signature", ",", "limit", ",", "configParam", ")", ";", "return", "retrieveFlowsFromURL", "(", "urlString", ")", ";", "}" ]
Fetches a list of flows that include jobs in that flow that include the specified configuration properties @param cluster @param username @param batchDesc @param signature @param limit @param configProps @return list of flows @throws IOException
[ "Fetches", "a", "list", "of", "flows", "that", "include", "jobs", "in", "that", "flow", "that", "include", "the", "specified", "configuration", "properties" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/rest/client/HRavenRestClient.java#L156-L177
2,690
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/rest/client/HRavenRestClient.java
HRavenRestClient.fetchFlowsWithConfig
public List<Flow> fetchFlowsWithConfig(String cluster, String username, String batchDesc, String signature, int limit, List<String> flowResponseFilters, List<String> jobResponseFilters, List<String> configPropertyFields) throws IOException { LOG.info(String.format( "Fetching last %d matching jobs for cluster=%s, user.name=%s, " + "batch.desc=%s, pig.logical.plan.signature=%s", limit, cluster, username, batchDesc, signature)); StringBuilder urlStringBuilder = buildFlowURL(cluster, username, batchDesc, signature, limit, flowResponseFilters, jobResponseFilters); if ((configPropertyFields != null) && (configPropertyFields.size() > 0)) { urlStringBuilder.append(AND); urlStringBuilder .append(StringUtil.buildParam("includeConf", configPropertyFields)); } return retrieveFlowsFromURL(urlStringBuilder.toString()); }
java
public List<Flow> fetchFlowsWithConfig(String cluster, String username, String batchDesc, String signature, int limit, List<String> flowResponseFilters, List<String> jobResponseFilters, List<String> configPropertyFields) throws IOException { LOG.info(String.format( "Fetching last %d matching jobs for cluster=%s, user.name=%s, " + "batch.desc=%s, pig.logical.plan.signature=%s", limit, cluster, username, batchDesc, signature)); StringBuilder urlStringBuilder = buildFlowURL(cluster, username, batchDesc, signature, limit, flowResponseFilters, jobResponseFilters); if ((configPropertyFields != null) && (configPropertyFields.size() > 0)) { urlStringBuilder.append(AND); urlStringBuilder .append(StringUtil.buildParam("includeConf", configPropertyFields)); } return retrieveFlowsFromURL(urlStringBuilder.toString()); }
[ "public", "List", "<", "Flow", ">", "fetchFlowsWithConfig", "(", "String", "cluster", ",", "String", "username", ",", "String", "batchDesc", ",", "String", "signature", ",", "int", "limit", ",", "List", "<", "String", ">", "flowResponseFilters", ",", "List", "<", "String", ">", "jobResponseFilters", ",", "List", "<", "String", ">", "configPropertyFields", ")", "throws", "IOException", "{", "LOG", ".", "info", "(", "String", ".", "format", "(", "\"Fetching last %d matching jobs for cluster=%s, user.name=%s, \"", "+", "\"batch.desc=%s, pig.logical.plan.signature=%s\"", ",", "limit", ",", "cluster", ",", "username", ",", "batchDesc", ",", "signature", ")", ")", ";", "StringBuilder", "urlStringBuilder", "=", "buildFlowURL", "(", "cluster", ",", "username", ",", "batchDesc", ",", "signature", ",", "limit", ",", "flowResponseFilters", ",", "jobResponseFilters", ")", ";", "if", "(", "(", "configPropertyFields", "!=", "null", ")", "&&", "(", "configPropertyFields", ".", "size", "(", ")", ">", "0", ")", ")", "{", "urlStringBuilder", ".", "append", "(", "AND", ")", ";", "urlStringBuilder", ".", "append", "(", "StringUtil", ".", "buildParam", "(", "\"includeConf\"", ",", "configPropertyFields", ")", ")", ";", "}", "return", "retrieveFlowsFromURL", "(", "urlStringBuilder", ".", "toString", "(", ")", ")", ";", "}" ]
Fetches a list of flows that include jobs in that flow that include the specified flow fields and job fields specified configuration properties @param cluster @param username @param batchDesc @param signature @param limit @param flowResponseFilters @param jobResponseFilters @param configPropertyFields @return list of flows @throws IOException
[ "Fetches", "a", "list", "of", "flows", "that", "include", "jobs", "in", "that", "flow", "that", "include", "the", "specified", "flow", "fields", "and", "job", "fields", "specified", "configuration", "properties" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/rest/client/HRavenRestClient.java#L193-L210
2,691
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/rest/client/HRavenRestClient.java
HRavenRestClient.buildFlowURL
private StringBuilder buildFlowURL(String cluster, String username, String batchDesc, String signature, int limit, List<String> flowResponseFilters, List<String> jobResponseFilters) throws IOException { StringBuilder urlStringBuilder = new StringBuilder(); urlStringBuilder.append("http://"); urlStringBuilder.append(apiHostname); urlStringBuilder.append(RestJSONResource.SLASH); urlStringBuilder.append(URL_PORTION_API_V1); urlStringBuilder.append(FLOW_API); urlStringBuilder.append(RestJSONResource.SLASH); urlStringBuilder.append(cluster); urlStringBuilder.append(RestJSONResource.SLASH); urlStringBuilder.append(username); urlStringBuilder.append(RestJSONResource.SLASH); urlStringBuilder.append(StringUtil.cleanseToken(batchDesc)); if (StringUtils.isNotEmpty(signature)) { urlStringBuilder.append(RestJSONResource.SLASH); urlStringBuilder.append(signature); } urlStringBuilder.append(QUESTION_MARK); urlStringBuilder.append(LIMIT); urlStringBuilder.append(EQUAL_TO); urlStringBuilder.append(limit); if ((flowResponseFilters != null) && (flowResponseFilters.size() > 0)) { urlStringBuilder.append(AND); urlStringBuilder .append(StringUtil.buildParam("include", flowResponseFilters)); } if ((jobResponseFilters != null) && (jobResponseFilters.size() > 0)) { urlStringBuilder.append(AND); urlStringBuilder .append(StringUtil.buildParam("includeJobField", jobResponseFilters)); } return urlStringBuilder; }
java
private StringBuilder buildFlowURL(String cluster, String username, String batchDesc, String signature, int limit, List<String> flowResponseFilters, List<String> jobResponseFilters) throws IOException { StringBuilder urlStringBuilder = new StringBuilder(); urlStringBuilder.append("http://"); urlStringBuilder.append(apiHostname); urlStringBuilder.append(RestJSONResource.SLASH); urlStringBuilder.append(URL_PORTION_API_V1); urlStringBuilder.append(FLOW_API); urlStringBuilder.append(RestJSONResource.SLASH); urlStringBuilder.append(cluster); urlStringBuilder.append(RestJSONResource.SLASH); urlStringBuilder.append(username); urlStringBuilder.append(RestJSONResource.SLASH); urlStringBuilder.append(StringUtil.cleanseToken(batchDesc)); if (StringUtils.isNotEmpty(signature)) { urlStringBuilder.append(RestJSONResource.SLASH); urlStringBuilder.append(signature); } urlStringBuilder.append(QUESTION_MARK); urlStringBuilder.append(LIMIT); urlStringBuilder.append(EQUAL_TO); urlStringBuilder.append(limit); if ((flowResponseFilters != null) && (flowResponseFilters.size() > 0)) { urlStringBuilder.append(AND); urlStringBuilder .append(StringUtil.buildParam("include", flowResponseFilters)); } if ((jobResponseFilters != null) && (jobResponseFilters.size() > 0)) { urlStringBuilder.append(AND); urlStringBuilder .append(StringUtil.buildParam("includeJobField", jobResponseFilters)); } return urlStringBuilder; }
[ "private", "StringBuilder", "buildFlowURL", "(", "String", "cluster", ",", "String", "username", ",", "String", "batchDesc", ",", "String", "signature", ",", "int", "limit", ",", "List", "<", "String", ">", "flowResponseFilters", ",", "List", "<", "String", ">", "jobResponseFilters", ")", "throws", "IOException", "{", "StringBuilder", "urlStringBuilder", "=", "new", "StringBuilder", "(", ")", ";", "urlStringBuilder", ".", "append", "(", "\"http://\"", ")", ";", "urlStringBuilder", ".", "append", "(", "apiHostname", ")", ";", "urlStringBuilder", ".", "append", "(", "RestJSONResource", ".", "SLASH", ")", ";", "urlStringBuilder", ".", "append", "(", "URL_PORTION_API_V1", ")", ";", "urlStringBuilder", ".", "append", "(", "FLOW_API", ")", ";", "urlStringBuilder", ".", "append", "(", "RestJSONResource", ".", "SLASH", ")", ";", "urlStringBuilder", ".", "append", "(", "cluster", ")", ";", "urlStringBuilder", ".", "append", "(", "RestJSONResource", ".", "SLASH", ")", ";", "urlStringBuilder", ".", "append", "(", "username", ")", ";", "urlStringBuilder", ".", "append", "(", "RestJSONResource", ".", "SLASH", ")", ";", "urlStringBuilder", ".", "append", "(", "StringUtil", ".", "cleanseToken", "(", "batchDesc", ")", ")", ";", "if", "(", "StringUtils", ".", "isNotEmpty", "(", "signature", ")", ")", "{", "urlStringBuilder", ".", "append", "(", "RestJSONResource", ".", "SLASH", ")", ";", "urlStringBuilder", ".", "append", "(", "signature", ")", ";", "}", "urlStringBuilder", ".", "append", "(", "QUESTION_MARK", ")", ";", "urlStringBuilder", ".", "append", "(", "LIMIT", ")", ";", "urlStringBuilder", ".", "append", "(", "EQUAL_TO", ")", ";", "urlStringBuilder", ".", "append", "(", "limit", ")", ";", "if", "(", "(", "flowResponseFilters", "!=", "null", ")", "&&", "(", "flowResponseFilters", ".", "size", "(", ")", ">", "0", ")", ")", "{", "urlStringBuilder", ".", "append", "(", "AND", ")", ";", "urlStringBuilder", ".", "append", "(", "StringUtil", ".", "buildParam", "(", "\"include\"", ",", "flowResponseFilters", ")", ")", ";", "}", "if", "(", "(", "jobResponseFilters", "!=", "null", ")", "&&", "(", "jobResponseFilters", ".", "size", "(", ")", ">", "0", ")", ")", "{", "urlStringBuilder", ".", "append", "(", "AND", ")", ";", "urlStringBuilder", ".", "append", "(", "StringUtil", ".", "buildParam", "(", "\"includeJobField\"", ",", "jobResponseFilters", ")", ")", ";", "}", "return", "urlStringBuilder", ";", "}" ]
builds up a StringBuilder with the parameters for the FLOW API @param cluster @param username @param batchDesc @param signature @param limit @param flowResponseFilters @param jobResponseFilters @return @throws IOException
[ "builds", "up", "a", "StringBuilder", "with", "the", "parameters", "for", "the", "FLOW", "API" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/rest/client/HRavenRestClient.java#L279-L316
2,692
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/rest/client/HRavenRestClient.java
HRavenRestClient.fetchTaskDetails
public List<TaskDetails> fetchTaskDetails(String cluster, String jobId) throws IOException { String urlString = String.format("http://%s/api/v1/tasks/%s/%s", apiHostname, cluster, jobId); return retrieveTaskDetailsFromUrl(urlString); }
java
public List<TaskDetails> fetchTaskDetails(String cluster, String jobId) throws IOException { String urlString = String.format("http://%s/api/v1/tasks/%s/%s", apiHostname, cluster, jobId); return retrieveTaskDetailsFromUrl(urlString); }
[ "public", "List", "<", "TaskDetails", ">", "fetchTaskDetails", "(", "String", "cluster", ",", "String", "jobId", ")", "throws", "IOException", "{", "String", "urlString", "=", "String", ".", "format", "(", "\"http://%s/api/v1/tasks/%s/%s\"", ",", "apiHostname", ",", "cluster", ",", "jobId", ")", ";", "return", "retrieveTaskDetailsFromUrl", "(", "urlString", ")", ";", "}" ]
Fetch details tasks of a given job. @param cluster @param jobId @return
[ "Fetch", "details", "tasks", "of", "a", "given", "job", "." ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/rest/client/HRavenRestClient.java#L334-L339
2,693
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/rest/client/HRavenRestClient.java
HRavenRestClient.fetchTaskDetails
public List<TaskDetails> fetchTaskDetails(String cluster, String jobId, List<String> taskResponseFilters) throws IOException { String taskFilters = StringUtil.buildParam("include", taskResponseFilters); String urlString = String.format("http://%s/api/v1/tasks/%s/%s?%s", apiHostname, cluster, jobId, taskFilters); return retrieveTaskDetailsFromUrl(urlString); }
java
public List<TaskDetails> fetchTaskDetails(String cluster, String jobId, List<String> taskResponseFilters) throws IOException { String taskFilters = StringUtil.buildParam("include", taskResponseFilters); String urlString = String.format("http://%s/api/v1/tasks/%s/%s?%s", apiHostname, cluster, jobId, taskFilters); return retrieveTaskDetailsFromUrl(urlString); }
[ "public", "List", "<", "TaskDetails", ">", "fetchTaskDetails", "(", "String", "cluster", ",", "String", "jobId", ",", "List", "<", "String", ">", "taskResponseFilters", ")", "throws", "IOException", "{", "String", "taskFilters", "=", "StringUtil", ".", "buildParam", "(", "\"include\"", ",", "taskResponseFilters", ")", ";", "String", "urlString", "=", "String", ".", "format", "(", "\"http://%s/api/v1/tasks/%s/%s?%s\"", ",", "apiHostname", ",", "cluster", ",", "jobId", ",", "taskFilters", ")", ";", "return", "retrieveTaskDetailsFromUrl", "(", "urlString", ")", ";", "}" ]
Fetch details tasks of a given job for the specified fields @param cluster @param jobId @param taskResponseFilters @return
[ "Fetch", "details", "tasks", "of", "a", "given", "job", "for", "the", "specified", "fields" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/rest/client/HRavenRestClient.java#L348-L354
2,694
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/mapreduce/JobFileTableMapper.java
JobFileTableMapper.aggreagteJobStats
private void aggreagteJobStats(JobDetails jobDetails, byte[] rowKey, Context context, AggregationConstants.AGGREGATION_TYPE aggType) throws IOException, InterruptedException { byte[] aggStatusCol = null; switch (aggType) { case DAILY: aggStatusCol = AggregationConstants.JOB_DAILY_AGGREGATION_STATUS_COL_BYTES; break; case WEEKLY: aggStatusCol = AggregationConstants.JOB_WEEKLY_AGGREGATION_STATUS_COL_BYTES; break; default: LOG.error("Unknown aggregation type " + aggType); return; } boolean aggStatus = appSummaryService.aggregateJobDetails(jobDetails, aggType); context.progress(); LOG.debug("Status of aggreagting stats for " + aggType + "=" + aggStatus); if (aggStatus) { // update raw table for this history file with aggregation status // Indicate that we processed the agg for this RAW successfully // so that we can skip it on the next scan (or not). Put aggStatusPut = rawService.getAggregatedStatusPut(rowKey, aggStatusCol, aggStatus); // TODO // In the unlikely event of multiple mappers running against one RAW // row, with one succeeding and one failing, // there could be a race where the // raw does not properly indicate the true status // (which is questionable in // any case with multiple simultaneous runs with different outcome). context.write(RAW_TABLE, aggStatusPut); } }
java
private void aggreagteJobStats(JobDetails jobDetails, byte[] rowKey, Context context, AggregationConstants.AGGREGATION_TYPE aggType) throws IOException, InterruptedException { byte[] aggStatusCol = null; switch (aggType) { case DAILY: aggStatusCol = AggregationConstants.JOB_DAILY_AGGREGATION_STATUS_COL_BYTES; break; case WEEKLY: aggStatusCol = AggregationConstants.JOB_WEEKLY_AGGREGATION_STATUS_COL_BYTES; break; default: LOG.error("Unknown aggregation type " + aggType); return; } boolean aggStatus = appSummaryService.aggregateJobDetails(jobDetails, aggType); context.progress(); LOG.debug("Status of aggreagting stats for " + aggType + "=" + aggStatus); if (aggStatus) { // update raw table for this history file with aggregation status // Indicate that we processed the agg for this RAW successfully // so that we can skip it on the next scan (or not). Put aggStatusPut = rawService.getAggregatedStatusPut(rowKey, aggStatusCol, aggStatus); // TODO // In the unlikely event of multiple mappers running against one RAW // row, with one succeeding and one failing, // there could be a race where the // raw does not properly indicate the true status // (which is questionable in // any case with multiple simultaneous runs with different outcome). context.write(RAW_TABLE, aggStatusPut); } }
[ "private", "void", "aggreagteJobStats", "(", "JobDetails", "jobDetails", ",", "byte", "[", "]", "rowKey", ",", "Context", "context", ",", "AggregationConstants", ".", "AGGREGATION_TYPE", "aggType", ")", "throws", "IOException", ",", "InterruptedException", "{", "byte", "[", "]", "aggStatusCol", "=", "null", ";", "switch", "(", "aggType", ")", "{", "case", "DAILY", ":", "aggStatusCol", "=", "AggregationConstants", ".", "JOB_DAILY_AGGREGATION_STATUS_COL_BYTES", ";", "break", ";", "case", "WEEKLY", ":", "aggStatusCol", "=", "AggregationConstants", ".", "JOB_WEEKLY_AGGREGATION_STATUS_COL_BYTES", ";", "break", ";", "default", ":", "LOG", ".", "error", "(", "\"Unknown aggregation type \"", "+", "aggType", ")", ";", "return", ";", "}", "boolean", "aggStatus", "=", "appSummaryService", ".", "aggregateJobDetails", "(", "jobDetails", ",", "aggType", ")", ";", "context", ".", "progress", "(", ")", ";", "LOG", ".", "debug", "(", "\"Status of aggreagting stats for \"", "+", "aggType", "+", "\"=\"", "+", "aggStatus", ")", ";", "if", "(", "aggStatus", ")", "{", "// update raw table for this history file with aggregation status", "// Indicate that we processed the agg for this RAW successfully", "// so that we can skip it on the next scan (or not).", "Put", "aggStatusPut", "=", "rawService", ".", "getAggregatedStatusPut", "(", "rowKey", ",", "aggStatusCol", ",", "aggStatus", ")", ";", "// TODO", "// In the unlikely event of multiple mappers running against one RAW", "// row, with one succeeding and one failing,", "// there could be a race where the", "// raw does not properly indicate the true status", "// (which is questionable in", "// any case with multiple simultaneous runs with different outcome).", "context", ".", "write", "(", "RAW_TABLE", ",", "aggStatusPut", ")", ";", "}", "}" ]
aggregate this job's stats only if re-aggregation is turned on OR aggreation is on AND job not already aggregated if job has already been aggregated, we don't want to mistakenly aggregate again
[ "aggregate", "this", "job", "s", "stats", "only", "if", "re", "-", "aggregation", "is", "turned", "on", "OR", "aggreation", "is", "on", "AND", "job", "not", "already", "aggregated" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/mapreduce/JobFileTableMapper.java#L385-L423
2,695
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/mapreduce/JobFileTableMapper.java
JobFileTableMapper.getMegaByteMillisPut
private Put getMegaByteMillisPut(Long mbMillis, JobKey jobKey) { Put pMb = new Put(jobKeyConv.toBytes(jobKey)); pMb.addColumn(Constants.INFO_FAM_BYTES, Constants.MEGABYTEMILLIS_BYTES, Bytes.toBytes(mbMillis)); return pMb; }
java
private Put getMegaByteMillisPut(Long mbMillis, JobKey jobKey) { Put pMb = new Put(jobKeyConv.toBytes(jobKey)); pMb.addColumn(Constants.INFO_FAM_BYTES, Constants.MEGABYTEMILLIS_BYTES, Bytes.toBytes(mbMillis)); return pMb; }
[ "private", "Put", "getMegaByteMillisPut", "(", "Long", "mbMillis", ",", "JobKey", "jobKey", ")", "{", "Put", "pMb", "=", "new", "Put", "(", "jobKeyConv", ".", "toBytes", "(", "jobKey", ")", ")", ";", "pMb", ".", "addColumn", "(", "Constants", ".", "INFO_FAM_BYTES", ",", "Constants", ".", "MEGABYTEMILLIS_BYTES", ",", "Bytes", ".", "toBytes", "(", "mbMillis", ")", ")", ";", "return", "pMb", ";", "}" ]
generates a put for the megabytemillis @param mbMillis @param jobKey @return the put with megabytemillis
[ "generates", "a", "put", "for", "the", "megabytemillis" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/mapreduce/JobFileTableMapper.java#L431-L436
2,696
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/mapreduce/JobFileTableMapper.java
JobFileTableMapper.loadCostProperties
Properties loadCostProperties(Path cachePath, String machineType) { Properties prop = new Properties(); InputStream inp = null; try { inp = new FileInputStream(cachePath.toString()); prop.load(inp); return prop; } catch (FileNotFoundException fnf) { LOG.error("cost properties does not exist, using default values"); return null; } catch (IOException e) { LOG.error("error loading properties, using default values"); return null; } finally { if (inp != null) { try { inp.close(); } catch (IOException ignore) { // do nothing } } } }
java
Properties loadCostProperties(Path cachePath, String machineType) { Properties prop = new Properties(); InputStream inp = null; try { inp = new FileInputStream(cachePath.toString()); prop.load(inp); return prop; } catch (FileNotFoundException fnf) { LOG.error("cost properties does not exist, using default values"); return null; } catch (IOException e) { LOG.error("error loading properties, using default values"); return null; } finally { if (inp != null) { try { inp.close(); } catch (IOException ignore) { // do nothing } } } }
[ "Properties", "loadCostProperties", "(", "Path", "cachePath", ",", "String", "machineType", ")", "{", "Properties", "prop", "=", "new", "Properties", "(", ")", ";", "InputStream", "inp", "=", "null", ";", "try", "{", "inp", "=", "new", "FileInputStream", "(", "cachePath", ".", "toString", "(", ")", ")", ";", "prop", ".", "load", "(", "inp", ")", ";", "return", "prop", ";", "}", "catch", "(", "FileNotFoundException", "fnf", ")", "{", "LOG", ".", "error", "(", "\"cost properties does not exist, using default values\"", ")", ";", "return", "null", ";", "}", "catch", "(", "IOException", "e", ")", "{", "LOG", ".", "error", "(", "\"error loading properties, using default values\"", ")", ";", "return", "null", ";", "}", "finally", "{", "if", "(", "inp", "!=", "null", ")", "{", "try", "{", "inp", ".", "close", "(", ")", ";", "}", "catch", "(", "IOException", "ignore", ")", "{", "// do nothing", "}", "}", "}", "}" ]
looks for cost file in distributed cache @param cachePath of the cost properties file @param machineType of the node the job ran on @throws IOException
[ "looks", "for", "cost", "file", "in", "distributed", "cache" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/mapreduce/JobFileTableMapper.java#L444-L466
2,697
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/mapreduce/JobFileTableMapper.java
JobFileTableMapper.getJobCost
private Double getJobCost(Long mbMillis, Configuration currentConf) { Double computeTco = 0.0; Long machineMemory = 0L; Properties prop = null; String machineType = currentConf.get(Constants.HRAVEN_MACHINE_TYPE, "default"); LOG.debug(" machine type " + machineType); try { Path[] cacheFiles = DistributedCache.getLocalCacheFiles(currentConf); if (null != cacheFiles && cacheFiles.length > 0) { for (Path cachePath : cacheFiles) { LOG.debug(" distributed cache path " + cachePath); if (cachePath.getName().equals(Constants.COST_PROPERTIES_FILENAME)) { prop = loadCostProperties(cachePath, machineType); break; } } } else { LOG.error( "Unable to find anything (" + Constants.COST_PROPERTIES_FILENAME + ") in distributed cache, continuing with defaults"); } } catch (IOException ioe) { LOG.error("IOException reading from distributed cache for " + Constants.COST_PROPERTIES_HDFS_DIR + ", continuing with defaults" + ioe.toString()); } if (prop != null) { String computeTcoStr = prop.getProperty(machineType + ".computecost"); try { computeTco = Double.parseDouble(computeTcoStr); } catch (NumberFormatException nfe) { LOG.error("error in conversion to long for compute tco " + computeTcoStr + " using default value of 0"); } String machineMemStr = prop.getProperty(machineType + ".machinememory"); try { machineMemory = Long.parseLong(machineMemStr); } catch (NumberFormatException nfe) { LOG.error("error in conversion to long for machine memory " + machineMemStr + " using default value of 0"); } } else { LOG.error("Could not load properties file, using defaults"); } Double jobCost = JobHistoryFileParserBase.calculateJobCost(mbMillis, computeTco, machineMemory); LOG.info("from cost properties file, jobCost is " + jobCost + " based on compute tco: " + computeTco + " machine memory: " + machineMemory + " for machine type " + machineType); return jobCost; }
java
private Double getJobCost(Long mbMillis, Configuration currentConf) { Double computeTco = 0.0; Long machineMemory = 0L; Properties prop = null; String machineType = currentConf.get(Constants.HRAVEN_MACHINE_TYPE, "default"); LOG.debug(" machine type " + machineType); try { Path[] cacheFiles = DistributedCache.getLocalCacheFiles(currentConf); if (null != cacheFiles && cacheFiles.length > 0) { for (Path cachePath : cacheFiles) { LOG.debug(" distributed cache path " + cachePath); if (cachePath.getName().equals(Constants.COST_PROPERTIES_FILENAME)) { prop = loadCostProperties(cachePath, machineType); break; } } } else { LOG.error( "Unable to find anything (" + Constants.COST_PROPERTIES_FILENAME + ") in distributed cache, continuing with defaults"); } } catch (IOException ioe) { LOG.error("IOException reading from distributed cache for " + Constants.COST_PROPERTIES_HDFS_DIR + ", continuing with defaults" + ioe.toString()); } if (prop != null) { String computeTcoStr = prop.getProperty(machineType + ".computecost"); try { computeTco = Double.parseDouble(computeTcoStr); } catch (NumberFormatException nfe) { LOG.error("error in conversion to long for compute tco " + computeTcoStr + " using default value of 0"); } String machineMemStr = prop.getProperty(machineType + ".machinememory"); try { machineMemory = Long.parseLong(machineMemStr); } catch (NumberFormatException nfe) { LOG.error("error in conversion to long for machine memory " + machineMemStr + " using default value of 0"); } } else { LOG.error("Could not load properties file, using defaults"); } Double jobCost = JobHistoryFileParserBase.calculateJobCost(mbMillis, computeTco, machineMemory); LOG.info("from cost properties file, jobCost is " + jobCost + " based on compute tco: " + computeTco + " machine memory: " + machineMemory + " for machine type " + machineType); return jobCost; }
[ "private", "Double", "getJobCost", "(", "Long", "mbMillis", ",", "Configuration", "currentConf", ")", "{", "Double", "computeTco", "=", "0.0", ";", "Long", "machineMemory", "=", "0L", ";", "Properties", "prop", "=", "null", ";", "String", "machineType", "=", "currentConf", ".", "get", "(", "Constants", ".", "HRAVEN_MACHINE_TYPE", ",", "\"default\"", ")", ";", "LOG", ".", "debug", "(", "\" machine type \"", "+", "machineType", ")", ";", "try", "{", "Path", "[", "]", "cacheFiles", "=", "DistributedCache", ".", "getLocalCacheFiles", "(", "currentConf", ")", ";", "if", "(", "null", "!=", "cacheFiles", "&&", "cacheFiles", ".", "length", ">", "0", ")", "{", "for", "(", "Path", "cachePath", ":", "cacheFiles", ")", "{", "LOG", ".", "debug", "(", "\" distributed cache path \"", "+", "cachePath", ")", ";", "if", "(", "cachePath", ".", "getName", "(", ")", ".", "equals", "(", "Constants", ".", "COST_PROPERTIES_FILENAME", ")", ")", "{", "prop", "=", "loadCostProperties", "(", "cachePath", ",", "machineType", ")", ";", "break", ";", "}", "}", "}", "else", "{", "LOG", ".", "error", "(", "\"Unable to find anything (\"", "+", "Constants", ".", "COST_PROPERTIES_FILENAME", "+", "\") in distributed cache, continuing with defaults\"", ")", ";", "}", "}", "catch", "(", "IOException", "ioe", ")", "{", "LOG", ".", "error", "(", "\"IOException reading from distributed cache for \"", "+", "Constants", ".", "COST_PROPERTIES_HDFS_DIR", "+", "\", continuing with defaults\"", "+", "ioe", ".", "toString", "(", ")", ")", ";", "}", "if", "(", "prop", "!=", "null", ")", "{", "String", "computeTcoStr", "=", "prop", ".", "getProperty", "(", "machineType", "+", "\".computecost\"", ")", ";", "try", "{", "computeTco", "=", "Double", ".", "parseDouble", "(", "computeTcoStr", ")", ";", "}", "catch", "(", "NumberFormatException", "nfe", ")", "{", "LOG", ".", "error", "(", "\"error in conversion to long for compute tco \"", "+", "computeTcoStr", "+", "\" using default value of 0\"", ")", ";", "}", "String", "machineMemStr", "=", "prop", ".", "getProperty", "(", "machineType", "+", "\".machinememory\"", ")", ";", "try", "{", "machineMemory", "=", "Long", ".", "parseLong", "(", "machineMemStr", ")", ";", "}", "catch", "(", "NumberFormatException", "nfe", ")", "{", "LOG", ".", "error", "(", "\"error in conversion to long for machine memory \"", "+", "machineMemStr", "+", "\" using default value of 0\"", ")", ";", "}", "}", "else", "{", "LOG", ".", "error", "(", "\"Could not load properties file, using defaults\"", ")", ";", "}", "Double", "jobCost", "=", "JobHistoryFileParserBase", ".", "calculateJobCost", "(", "mbMillis", ",", "computeTco", ",", "machineMemory", ")", ";", "LOG", ".", "info", "(", "\"from cost properties file, jobCost is \"", "+", "jobCost", "+", "\" based on compute tco: \"", "+", "computeTco", "+", "\" machine memory: \"", "+", "machineMemory", "+", "\" for machine type \"", "+", "machineType", ")", ";", "return", "jobCost", ";", "}" ]
calculates the cost of this job based on mbMillis, machineType and cost details from the properties file @param mbMillis @param currentConf @return cost of the job
[ "calculates", "the", "cost", "of", "this", "job", "based", "on", "mbMillis", "machineType", "and", "cost", "details", "from", "the", "properties", "file" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/mapreduce/JobFileTableMapper.java#L475-L528
2,698
twitter/hraven
hraven-etl/src/main/java/com/twitter/hraven/mapreduce/JobFileTableMapper.java
JobFileTableMapper.getJobCostPut
private Put getJobCostPut(Double jobCost, JobKey jobKey) { Put pJobCost = new Put(jobKeyConv.toBytes(jobKey)); pJobCost.addColumn(Constants.INFO_FAM_BYTES, Constants.JOBCOST_BYTES, Bytes.toBytes(jobCost)); return pJobCost; }
java
private Put getJobCostPut(Double jobCost, JobKey jobKey) { Put pJobCost = new Put(jobKeyConv.toBytes(jobKey)); pJobCost.addColumn(Constants.INFO_FAM_BYTES, Constants.JOBCOST_BYTES, Bytes.toBytes(jobCost)); return pJobCost; }
[ "private", "Put", "getJobCostPut", "(", "Double", "jobCost", ",", "JobKey", "jobKey", ")", "{", "Put", "pJobCost", "=", "new", "Put", "(", "jobKeyConv", ".", "toBytes", "(", "jobKey", ")", ")", ";", "pJobCost", ".", "addColumn", "(", "Constants", ".", "INFO_FAM_BYTES", ",", "Constants", ".", "JOBCOST_BYTES", ",", "Bytes", ".", "toBytes", "(", "jobCost", ")", ")", ";", "return", "pJobCost", ";", "}" ]
generates a put for the job cost @param jobCost @param jobKey @return the put with job cost
[ "generates", "a", "put", "for", "the", "job", "cost" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-etl/src/main/java/com/twitter/hraven/mapreduce/JobFileTableMapper.java#L536-L541
2,699
twitter/hraven
hraven-core/src/main/java/com/twitter/hraven/JobDetails.java
JobDetails.compareTo
@Override public int compareTo(JobDetails otherJob) { if (otherJob == null) { return -1; } return new CompareToBuilder().append(this.jobKey, otherJob.getJobKey()) .toComparison(); }
java
@Override public int compareTo(JobDetails otherJob) { if (otherJob == null) { return -1; } return new CompareToBuilder().append(this.jobKey, otherJob.getJobKey()) .toComparison(); }
[ "@", "Override", "public", "int", "compareTo", "(", "JobDetails", "otherJob", ")", "{", "if", "(", "otherJob", "==", "null", ")", "{", "return", "-", "1", ";", "}", "return", "new", "CompareToBuilder", "(", ")", ".", "append", "(", "this", ".", "jobKey", ",", "otherJob", ".", "getJobKey", "(", ")", ")", ".", "toComparison", "(", ")", ";", "}" ]
Compares two JobDetails objects on the basis of their JobKey @param other @return 0 if this JobKey is equal to the other JobKey, 1 if this JobKey greater than other JobKey, -1 if this JobKey is less than other JobKey
[ "Compares", "two", "JobDetails", "objects", "on", "the", "basis", "of", "their", "JobKey" ]
e35996b6e2f016bcd18db0bad320be7c93d95208
https://github.com/twitter/hraven/blob/e35996b6e2f016bcd18db0bad320be7c93d95208/hraven-core/src/main/java/com/twitter/hraven/JobDetails.java#L113-L120