id
stringlengths 7
14
| text
stringlengths 1
106k
|
---|---|
663418_0
|
public String get(String key) {
Map<String, String> namedConfig = getNamedConfig();
return namedConfig.get(key);
}
|
663418_1
|
public String get(String key) {
Map<String, String> namedConfig = getNamedConfig();
return namedConfig.get(key);
}
|
663418_2
|
public String get(String key) {
Map<String, String> namedConfig = getNamedConfig();
return namedConfig.get(key);
}
|
663418_3
|
String getPictureName() {
return pictureName;
}
|
663418_4
|
@Override
public void doCells(Parse parse) {
cells = parse;
processArguments();
boolean state = validateState();
notifyInvalidState(state);
configFixture();
restClient = buildRestClient();
configRestClient();
try {
Method method1 = getClass().getMethod(parse.text());
method1.invoke(this);
} catch (Exception exception) {
exception(parse, exception);
}
}
|
663418_5
|
@Override
public void doCells(Parse parse) {
cells = parse;
processArguments();
boolean state = validateState();
notifyInvalidState(state);
configFixture();
restClient = buildRestClient();
configRestClient();
try {
Method method1 = getClass().getMethod(parse.text());
method1.invoke(this);
} catch (Exception exception) {
exception(parse, exception);
}
}
|
663418_6
|
@Override
public void doCells(Parse parse) {
cells = parse;
processArguments();
boolean state = validateState();
notifyInvalidState(state);
configFixture();
restClient = buildRestClient();
configRestClient();
try {
Method method1 = getClass().getMethod(parse.text());
method1.invoke(this);
} catch (Exception exception) {
exception(parse, exception);
}
}
|
663418_7
|
@Override
public void doCells(Parse parse) {
cells = parse;
processArguments();
boolean state = validateState();
notifyInvalidState(state);
configFixture();
restClient = buildRestClient();
configRestClient();
try {
Method method1 = getClass().getMethod(parse.text());
method1.invoke(this);
} catch (Exception exception) {
exception(parse, exception);
}
}
|
663418_8
|
@Override
public void doCells(Parse parse) {
cells = parse;
processArguments();
boolean state = validateState();
notifyInvalidState(state);
configFixture();
restClient = buildRestClient();
configRestClient();
try {
Method method1 = getClass().getMethod(parse.text());
method1.invoke(this);
} catch (Exception exception) {
exception(parse, exception);
}
}
|
663418_9
|
@Override
public void doCells(Parse parse) {
cells = parse;
processArguments();
boolean state = validateState();
notifyInvalidState(state);
configFixture();
restClient = buildRestClient();
configRestClient();
try {
Method method1 = getClass().getMethod(parse.text());
method1.invoke(this);
} catch (Exception exception) {
exception(parse, exception);
}
}
|
666662_0
|
public void append(char c) {
if (isFull()) {
removeFirst();
}
insertLast(c);
}
|
666662_1
|
public void append(char c) {
if (isFull()) {
removeFirst();
}
insertLast(c);
}
|
667236_0
|
protected AndroidSdk getAndroidSdk() throws MojoExecutionException {
File chosenSdkPath;
String chosenSdkPlatform;
if (sdk != null) {
// An <sdk> tag exists in the pom.
if (sdk.getPath() != null) {
// An <sdk><path> tag is set in the pom.
chosenSdkPath = sdk.getPath();
} else {
// There is no <sdk><path> tag in the pom.
if (sdkPath != null) {
// -Dandroid.sdk.path is set on command line, or via <properties><sdk.path>...
chosenSdkPath = sdkPath;
} else {
// No -Dandroid.sdk.path is set on command line, or via <properties><sdk.path>...
chosenSdkPath = new File(getAndroidHomeOrThrow());
}
}
// Use <sdk><platform> from pom if it's there, otherwise try -Dandroid.sdk.platform from command line or <properties><sdk.platform>...
if (!isBlank(sdk.getPlatform())) {
chosenSdkPlatform = sdk.getPlatform();
} else {
chosenSdkPlatform = sdkPlatform;
}
} else {
// There is no <sdk> tag in the pom.
if (sdkPath != null) {
// -Dandroid.sdk.path is set on command line, or via <properties><sdk.path>...
chosenSdkPath = sdkPath;
} else {
// No -Dandroid.sdk.path is set on command line, or via <properties><sdk.path>...
chosenSdkPath = new File(getAndroidHomeOrThrow());
}
// Use any -Dandroid.sdk.platform from command line or <properties><sdk.platform>...
chosenSdkPlatform = sdkPlatform;
}
return new AndroidSdk(chosenSdkPath, chosenSdkPlatform);
}
|
667236_1
|
protected String extractPackageNameFromAndroidManifest(File androidManifestFile) throws MojoExecutionException {
final URL xmlURL;
try {
xmlURL = androidManifestFile.toURI().toURL();
} catch (MalformedURLException e) {
throw new MojoExecutionException("Error while trying to figure out package name from inside AndroidManifest.xml file " + androidManifestFile, e);
}
final DocumentContainer documentContainer = new DocumentContainer(xmlURL);
final Object packageName = JXPathContext.newContext(documentContainer).getValue("manifest/@package", String.class);
return (String) packageName;
}
|
667236_2
|
protected String extractInstrumentationRunnerFromAndroidManifest(File androidManifestFile) throws MojoExecutionException {
final URL xmlURL;
try {
xmlURL = androidManifestFile.toURI().toURL();
} catch (MalformedURLException e) {
throw new MojoExecutionException("Error while trying to figure out instrumentation runner from inside AndroidManifest.xml file " + androidManifestFile, e);
}
final DocumentContainer documentContainer = new DocumentContainer(xmlURL);
final Object instrumentationRunner;
try {
instrumentationRunner = JXPathContext.newContext(documentContainer).getValue("manifest//instrumentation/@android:name", String.class);
} catch (JXPathNotFoundException e) {
return null;
}
return (String) instrumentationRunner;
}
|
667236_3
|
protected String extractInstrumentationRunnerFromAndroidManifest(File androidManifestFile) throws MojoExecutionException {
final URL xmlURL;
try {
xmlURL = androidManifestFile.toURI().toURL();
} catch (MalformedURLException e) {
throw new MojoExecutionException("Error while trying to figure out instrumentation runner from inside AndroidManifest.xml file " + androidManifestFile, e);
}
final DocumentContainer documentContainer = new DocumentContainer(xmlURL);
final Object instrumentationRunner;
try {
instrumentationRunner = JXPathContext.newContext(documentContainer).getValue("manifest//instrumentation/@android:name", String.class);
} catch (JXPathNotFoundException e) {
return null;
}
return (String) instrumentationRunner;
}
|
667236_4
|
protected String extractPackageNameFromAndroidManifestXmlTree(String aaptDumpXmlTree) {
final Scanner scanner = new Scanner(aaptDumpXmlTree);
// Finds the root element named "manifest".
scanner.findWithinHorizon("^E: manifest", 0);
// Finds the manifest element's attribute named "package".
scanner.findWithinHorizon(" A: package=\"", 0);
// Extracts the package value including the trailing double quote.
String packageName = scanner.next(".*?\"");
// Removes the double quote.
packageName = packageName.substring(0, packageName.length() - 1);
return packageName;
}
|
667236_5
|
protected String extractPackageNameFromApk(File apkFile) throws MojoExecutionException {
CommandExecutor executor = CommandExecutor.Factory.createDefaultCommmandExecutor();
executor.setLogger(this.getLog());
List<String> commands = new ArrayList<String>();
commands.add("dump");
commands.add("xmltree");
commands.add(apkFile.getAbsolutePath());
commands.add("AndroidManifest.xml");
getLog().info(getAndroidSdk().getPathForTool("aapt") + " " + commands.toString());
try {
executor.executeCommand(getAndroidSdk().getPathForTool("aapt"), commands, true);
final String xmlTree = executor.getStandardOut();
return extractPackageNameFromAndroidManifestXmlTree(xmlTree);
} catch (ExecutionException e) {
throw new MojoExecutionException("Error while trying to figure out package name from inside apk file " + apkFile);
} finally {
getLog().error(executor.getStandardError());
}
}
|
667236_6
|
protected String extractPackageNameFromApk(File apkFile) throws MojoExecutionException {
CommandExecutor executor = CommandExecutor.Factory.createDefaultCommmandExecutor();
executor.setLogger(this.getLog());
List<String> commands = new ArrayList<String>();
commands.add("dump");
commands.add("xmltree");
commands.add(apkFile.getAbsolutePath());
commands.add("AndroidManifest.xml");
getLog().info(getAndroidSdk().getPathForTool("aapt") + " " + commands.toString());
try {
executor.executeCommand(getAndroidSdk().getPathForTool("aapt"), commands, true);
final String xmlTree = executor.getStandardOut();
return extractPackageNameFromAndroidManifestXmlTree(xmlTree);
} catch (ExecutionException e) {
throw new MojoExecutionException("Error while trying to figure out package name from inside apk file " + apkFile);
} finally {
getLog().error(executor.getStandardError());
}
}
|
667236_7
|
public String getPathForTool(String tool) {
if (getLayout() == Layout.LAYOUT_1_1) {
return sdkPath + "/tools/" + tool;
}
if (getLayout() == Layout.LAYOUT_1_5) {
if (commonToolsIn11And15.contains(tool)) {
return sdkPath + "/tools/" + tool;
} else {
return getPlatform() + "/tools/" + tool;
}
}
throw new InvalidSdkException("Unsupported layout \"" + getLayout() + "\"! " + PARAMETER_MESSAGE);
}
|
667236_8
|
public String getPathForTool(String tool) {
if (getLayout() == Layout.LAYOUT_1_1) {
return sdkPath + "/tools/" + tool;
}
if (getLayout() == Layout.LAYOUT_1_5) {
if (commonToolsIn11And15.contains(tool)) {
return sdkPath + "/tools/" + tool;
} else {
return getPlatform() + "/tools/" + tool;
}
}
throw new InvalidSdkException("Unsupported layout \"" + getLayout() + "\"! " + PARAMETER_MESSAGE);
}
|
667236_9
|
public String getPathForTool(String tool) {
if (getLayout() == Layout.LAYOUT_1_1) {
return sdkPath + "/tools/" + tool;
}
if (getLayout() == Layout.LAYOUT_1_5) {
if (commonToolsIn11And15.contains(tool)) {
return sdkPath + "/tools/" + tool;
} else {
return getPlatform() + "/tools/" + tool;
}
}
throw new InvalidSdkException("Unsupported layout \"" + getLayout() + "\"! " + PARAMETER_MESSAGE);
}
|
678267_0
|
@Override
public String getBaseUrl() {
return fedoraBaseUrl;
}
|
678267_1
|
@Override
public Date getDate() {
return date;
}
|
678267_2
|
@Override
public String getMethodName() {
return methodName;
}
|
678267_3
|
@Override
public String getPID() {
return pid;
}
|
678267_4
|
public String getAuthor() {
return author;
}
|
678267_5
|
@Override
public String getFormat() {
return format;
}
|
678267_6
|
@Override
public String getServerVersion() {
return serverVersion;
}
|
678267_7
|
@Override
public String getReturnVal() {
return returnVal;
}
|
678267_8
|
@Override
public String getMethodParamVal(String paramName) {
return getCategoryTerm(String.format(parameterScheme, paramName));
}
|
678267_9
|
public static DateTime parseXSDDateTime(String input) {
Matcher m = XSD_DATETIME.matcher(input);
if (!m.find()) {
throw new IllegalArgumentException(input +
" is not a valid XML Schema 1.1 dateTime.");
}
int year = Integer.parseInt(m.group(1));
int month = Integer.parseInt(m.group(3));
int day = Integer.parseInt(m.group(4));
int hour = 0, minute = 0, second = 0, millis = 0;
boolean hasEndOfDayFrag = m.group(11) != null;
if (!hasEndOfDayFrag) {
hour = Integer.parseInt(m.group(6));
minute = Integer.parseInt(m.group(7));
second = Integer.parseInt(m.group(8));
// Parse fractional seconds
// m.group(9), if not null/empty should be Strings such as ".5" or
// ".050" which convert to 500 and 50, respectively.
if (m.group(9) != null && !m.group(9).isEmpty()) {
// parse as Double as a quick hack to drop trailing 0s.
// e.g. ".0500" becomes 0.05
double d = Double.parseDouble(m.group(9));
// Something like the following would allow for int-sized
// precision, but joda-time 1.6 only supports millis (i.e. <= 999).
// see: org.joda.time.field.FieldUtils.verifyValueBounds
// int digits = String.valueOf(d).length() - 2;
// fractionalSeconds = (int) (d * Math.pow(10, digits));
millis = (int) (d * 1000);
}
}
DateTimeZone zone = null;
if (m.group(13) != null) {
String tmp = m.group(13);
if (tmp.equals("Z")) {
tmp = "+00:00";
}
zone = DateTimeZone.forID(tmp);
}
DateTime dt =
new DateTime(year, month, day, hour, minute, second, millis,
zone);
if (hasEndOfDayFrag) {
return dt.plusDays(1);
}
return dt;
}
|
684382_0
|
public byte[] encodeHeader() {
byte[] prevTagSize = encodePreviousTagSize(0);
byte[] header = new byte[flvHeader.length + prevTagSize.length];
System.arraycopy(flvHeader, 0, header, 0, flvHeader.length);
System.arraycopy(prevTagSize, 0, header, flvHeader.length, prevTagSize.length);
return header;
}
|
684382_1
|
public static boolean validateMinJREVersion(String runtimeVersion, String minVersion){
String[] requestedVersioning = minVersion.split("\\.");
String[] clientVersioning = runtimeVersion.split("\\.");
if (requestedVersioning.length < 3 || clientVersioning.length < 3)
return false;
// First major update
if (Integer.parseInt(clientVersioning[0]) > Integer.parseInt(requestedVersioning[0]))
return true;
else{
// Checking Java version
if (Integer.parseInt(clientVersioning[1]) > Integer.parseInt(requestedVersioning[1]))
return true;
// Checking update
else if (Integer.parseInt(clientVersioning[1]) == Integer.parseInt(requestedVersioning[1])){
// non-GA or non-FCS release won't be supported
if(clientVersioning[2].indexOf("-") != -1)
return false;
int rUpdatePart1 = 0;
int rUpdatePart2 = 0;
int underbar = requestedVersioning[2].indexOf("_");
if ( underbar == -1){
rUpdatePart1 = Integer.parseInt(requestedVersioning[2]);
} else {
rUpdatePart1 = Integer.parseInt(requestedVersioning[2].substring(0, underbar));
rUpdatePart2 = Integer.parseInt(requestedVersioning[2].substring(underbar + 1, requestedVersioning[2].length()));
}
int cUpdatePart1 = 0;
int cUpdatePart2 = 0;
underbar = clientVersioning[2].indexOf("_");
if ( underbar == -1) {
cUpdatePart1 = Integer.parseInt(clientVersioning[2]);
} else {
cUpdatePart1 = Integer.parseInt(clientVersioning[2].substring(0, underbar));
cUpdatePart2 = Integer.parseInt(clientVersioning[2].substring(underbar + 1, clientVersioning[2].length()));
}
if (cUpdatePart1 > rUpdatePart1)
return true;
else if (cUpdatePart1 == rUpdatePart1) {
if (cUpdatePart2 > rUpdatePart2 || cUpdatePart2 == rUpdatePart2)
return true;
else
return false;
} else
return false;
} else
return false;
}
}
|
684382_2
|
public static boolean validateMinJREVersion(String runtimeVersion, String minVersion){
String[] requestedVersioning = minVersion.split("\\.");
String[] clientVersioning = runtimeVersion.split("\\.");
if (requestedVersioning.length < 3 || clientVersioning.length < 3)
return false;
// First major update
if (Integer.parseInt(clientVersioning[0]) > Integer.parseInt(requestedVersioning[0]))
return true;
else{
// Checking Java version
if (Integer.parseInt(clientVersioning[1]) > Integer.parseInt(requestedVersioning[1]))
return true;
// Checking update
else if (Integer.parseInt(clientVersioning[1]) == Integer.parseInt(requestedVersioning[1])){
// non-GA or non-FCS release won't be supported
if(clientVersioning[2].indexOf("-") != -1)
return false;
int rUpdatePart1 = 0;
int rUpdatePart2 = 0;
int underbar = requestedVersioning[2].indexOf("_");
if ( underbar == -1){
rUpdatePart1 = Integer.parseInt(requestedVersioning[2]);
} else {
rUpdatePart1 = Integer.parseInt(requestedVersioning[2].substring(0, underbar));
rUpdatePart2 = Integer.parseInt(requestedVersioning[2].substring(underbar + 1, requestedVersioning[2].length()));
}
int cUpdatePart1 = 0;
int cUpdatePart2 = 0;
underbar = clientVersioning[2].indexOf("_");
if ( underbar == -1) {
cUpdatePart1 = Integer.parseInt(clientVersioning[2]);
} else {
cUpdatePart1 = Integer.parseInt(clientVersioning[2].substring(0, underbar));
cUpdatePart2 = Integer.parseInt(clientVersioning[2].substring(underbar + 1, clientVersioning[2].length()));
}
if (cUpdatePart1 > rUpdatePart1)
return true;
else if (cUpdatePart1 == rUpdatePart1) {
if (cUpdatePart2 > rUpdatePart2 || cUpdatePart2 == rUpdatePart2)
return true;
else
return false;
} else
return false;
} else
return false;
}
}
|
684382_3
|
public static boolean validateMinJREVersion(String runtimeVersion, String minVersion){
String[] requestedVersioning = minVersion.split("\\.");
String[] clientVersioning = runtimeVersion.split("\\.");
if (requestedVersioning.length < 3 || clientVersioning.length < 3)
return false;
// First major update
if (Integer.parseInt(clientVersioning[0]) > Integer.parseInt(requestedVersioning[0]))
return true;
else{
// Checking Java version
if (Integer.parseInt(clientVersioning[1]) > Integer.parseInt(requestedVersioning[1]))
return true;
// Checking update
else if (Integer.parseInt(clientVersioning[1]) == Integer.parseInt(requestedVersioning[1])){
// non-GA or non-FCS release won't be supported
if(clientVersioning[2].indexOf("-") != -1)
return false;
int rUpdatePart1 = 0;
int rUpdatePart2 = 0;
int underbar = requestedVersioning[2].indexOf("_");
if ( underbar == -1){
rUpdatePart1 = Integer.parseInt(requestedVersioning[2]);
} else {
rUpdatePart1 = Integer.parseInt(requestedVersioning[2].substring(0, underbar));
rUpdatePart2 = Integer.parseInt(requestedVersioning[2].substring(underbar + 1, requestedVersioning[2].length()));
}
int cUpdatePart1 = 0;
int cUpdatePart2 = 0;
underbar = clientVersioning[2].indexOf("_");
if ( underbar == -1) {
cUpdatePart1 = Integer.parseInt(clientVersioning[2]);
} else {
cUpdatePart1 = Integer.parseInt(clientVersioning[2].substring(0, underbar));
cUpdatePart2 = Integer.parseInt(clientVersioning[2].substring(underbar + 1, clientVersioning[2].length()));
}
if (cUpdatePart1 > rUpdatePart1)
return true;
else if (cUpdatePart1 == rUpdatePart1) {
if (cUpdatePart2 > rUpdatePart2 || cUpdatePart2 == rUpdatePart2)
return true;
else
return false;
} else
return false;
} else
return false;
}
}
|
684382_4
|
public boolean hasPadding() {
if (packetLength >= HEADER_LENGTH)
/* 0x20 = 0010 0000 */
return ((packet[0] & 0x20) >> 5) == 1;
else return false; // broken packet
}
|
684382_5
|
public boolean hasExtension() {
if (packetLength >= HEADER_LENGTH)
/* 0x10 = 0001 0000 */
return ((packet[0] & 0x10) >> 4) == 1;
else return false; // broken packet
}
|
684382_6
|
public boolean hasMarker() {
if (packetLength >= HEADER_LENGTH) {
return ((packet[1] & 0x80) >> 7) == 1;
} else return false; // broken packet
}
|
688360_0
|
public static SimpleImmutableEntry<String, Object> validateStrictConfigFilePermissions(Path path, LinkOption... options)
throws IOException {
if ((path == null) || (!Files.exists(path, options))) {
return null;
}
Collection<PosixFilePermission> perms = IoUtils.getPermissions(path, options);
if (GenericUtils.isEmpty(perms)) {
return null;
}
if (OsUtils.isUNIX()) {
PosixFilePermission p = IoUtils.validateExcludedPermissions(perms, STRICTLY_PROHIBITED_FILE_PERMISSION);
if (p != null) {
return new SimpleImmutableEntry<>(String.format("Permissions violation (%s)", p), p);
}
}
String owner = IoUtils.getFileOwner(path, options);
if (GenericUtils.isEmpty(owner)) {
// we cannot get owner
// general issue: jvm does not support permissions
// security issue: specific filesystem does not support permissions
return null;
}
String current = OsUtils.getCurrentUser();
Set<String> expected = new HashSet<>();
expected.add(current);
if (OsUtils.isUNIX()) {
// Windows "Administrator" was considered however in Windows most likely a group is used.
expected.add(OsUtils.ROOT_USER);
}
if (!expected.contains(owner)) {
return new SimpleImmutableEntry<>(String.format("Owner violation (%s)", owner), owner);
}
return null;
}
|
688360_1
|
public Collection<Path> scan() throws IOException, IllegalStateException {
return scan(LinkedList::new);
}
|
688360_2
|
public Collection<Path> scan() throws IOException, IllegalStateException {
return scan(LinkedList::new);
}
|
688360_3
|
@Override
public void close() throws IOException {
// ignored
}
|
688360_4
|
@Override
public void close() throws IOException {
// ignored
}
|
688360_5
|
public EmptyInputStream() {
super();
}
|
688360_6
|
@Override
public void close() throws IOException {
// ignored
}
|
688360_7
|
@Override
public int read() throws IOException {
if (!isOpen()) {
throw new EOFException("Stream is closed for reading one value");
}
return -1;
}
|
688360_8
|
@Override
public int read() throws IOException {
if (!isOpen()) {
throw new EOFException("Stream is closed for reading one value");
}
return -1;
}
|
688360_9
|
@Override
public int read() throws IOException {
if (!isOpen()) {
throw new EOFException("Stream is closed for reading one value");
}
return -1;
}
|
690050_0
|
@Override
@Transactional
public void restoreFromHistory(OtherPageHistorized historized) {
otherPageHistorizer.restore(historized);
}
|
690050_1
|
@Override
@Transactional
public void save(OtherPage entity) {
Action action = entity.isTransient() ? Action.CREATED : Action.MODIFIED;
otherPageRepository.save(entity);
otherPageHistorizer.historize(entity, action);
}
|
690050_2
|
@Override
@Transactional
public void delete(OtherPage entity) {
otherPageHistorizer.deleteHistory(entity);
otherPageRepository.delete(entity);
mountService.delete(entity.getId().toString(), OtherPageConstants.HANDLER_KEY);
}
|
690050_3
|
@Override
@Transactional(readOnly = true)
public OtherPage findById(Integer id) {
return otherPageRepository.findById(id);
}
|
690050_4
|
@Override
@Transactional(readOnly = true)
public OtherPage newOtherPageEntity() {
OtherPage otherPage = new OtherPage();
otherPage.setAllRights(otherPageRepository.findLastSelectedRights());
return otherPage;
}
|
690050_5
|
@Override
public void historize(OtherPage otherPage, Action action) {
historize(otherPage, action, null);
}
|
690050_6
|
@Override
public OtherPage restore(OtherPageHistorized historized) {
OtherPage otherPage = historized.getOtherPage();
otherPage.copyFrom(historized);
otherPage.setAllRights(rightService.findWhitespaceSeparatedRights(historized.getRights()));
otherPage.setUpdateModificationData(false);
historize(otherPage, Action.RESTORED, historized.getVersionNumber());
otherPageRepository.save(otherPage);
return otherPage;
}
|
690050_7
|
@Override
public void deleteHistory(OtherPage otherPage) {
otherPageHistorizedRepository.deleteHistoryForOtherPage(otherPage);
}
|
690050_8
|
@Override
@Transactional(readOnly = true)
public IRequestTarget getRequestTarget(String requestedUrl, MountPoint mountPoint) {
String relatedContentId = mountPoint.getRelatedContentId();
PageParameters pageParameters = new PageParameters("0=" + relatedContentId);
return new BookmarkablePageRequestTarget(OtherPageViewPage.class, pageParameters);
}
|
690050_9
|
@Override
@Transactional(readOnly = true)
public boolean canHandlePageClass(Class<? extends Page> pageClazz, PageParameters pageParameters) {
if(OtherPageViewPage.class.equals(pageClazz)) {
String relatedContentId = pageParameters.getString("0");
if(StringUtils.isNumeric(relatedContentId)) {
return mountService.existsMountPoint(relatedContentId, getHandlerKey());
}
}
return false;
}
|
692484_5
|
@Override
public void reduce(Gram ngram,
Iterator<Gram> values,
OutputCollector<Text,DoubleWritable> output,
Reporter reporter) throws IOException {
int[] gramFreq = new int[2];
gramFreq[0] = gramFreq[1] = -1;
if (ngram.getType() == Gram.Type.UNIGRAM && emitUnigrams) {
DoubleWritable dd = new DoubleWritable(ngram.getFrequency());
Text t = new Text(ngram.getString());
output.collect(t, dd);
return;
}
// FIXME: better way to handle errors? Wouldn't an exception thrown here
// cause hadoop to re-try the job?
String[] gram = new String[2];
while (values.hasNext()) {
Gram value = values.next();
int pos = value.getType() == Gram.Type.HEAD ? 0 : 1;
if (gramFreq[pos] != -1) {
log.warn("Extra {} for {}, skipping", value.getType(), ngram);
if (value.getType() == Gram.Type.HEAD) {
reporter.incrCounter(Skipped.EXTRA_HEAD, 1);
} else {
reporter.incrCounter(Skipped.EXTRA_TAIL, 1);
}
return;
}
gram[pos] = value.getString();
gramFreq[pos] = value.getFrequency();
}
if (gramFreq[0] == -1) {
log.warn("Missing head for {}, skipping.", ngram);
reporter.incrCounter(Skipped.MISSING_HEAD, 1);
return;
} else if (gramFreq[1] == -1) {
log.warn("Missing tail for {}, skipping", ngram);
reporter.incrCounter(Skipped.MISSING_TAIL, 1);
return;
}
int k11 = ngram.getFrequency(); /* a&b */
int k12 = gramFreq[0] - ngram.getFrequency(); /* a&!b */
int k21 = gramFreq[1] - ngram.getFrequency(); /* !b&a */
int k22 = (int) (ngramTotal - (gramFreq[0] + gramFreq[1] - ngram.getFrequency())); /* !a&!b */
try {
double llr = ll.logLikelihoodRatio(k11, k12, k21, k22);
if (llr < minLLRValue) {
reporter.incrCounter(Skipped.LESS_THAN_MIN_LLR, 1);
return;
}
DoubleWritable dd = new DoubleWritable(llr);
Text t = new Text(ngram.getString());
output.collect(t, dd);
} catch (IllegalArgumentException ex) {
reporter.incrCounter(Skipped.LLR_CALCULATION_ERROR, 1);
log.error("Problem calculating LLR ratio: " + ex.getMessage());
log.error("NGram: " + ngram);
log.error("HEAD: " + gram[0] + ':' + gramFreq[0]);
log.error("TAIL: " + gram[1] + ':' + gramFreq[1]);
log.error("k11: " + k11 + " k12: " + k12 + " k21: " + k21 + " k22: " + k22);
}
}
|
692484_7
|
@Override
public int getPartition(GramKey key, Gram value, int numPartitions) {
// see: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/BinaryPartitioner.java?revision=816664&view=markup
int length = key.getLength()-1;
int right = (offset + length) % length;
int hash = WritableComparator.hashBytes(key.getBytes(), right);
return (hash & Integer.MAX_VALUE) % numPartitions;
}
|
697210_0
|
public static XStream initXStreamForDeserialization(final String allowTypes) {
XStream xstream = new XStream();
XStream.setupDefaultSecurity(xstream); // to be removed with XStream 1.5 and later
xstream.allowTypesByWildcard(new String[]{allowTypes});
return xstream;
}
|
697210_1
|
public static XStream initXStreamForDeserialization(final String allowTypes) {
XStream xstream = new XStream();
XStream.setupDefaultSecurity(xstream); // to be removed with XStream 1.5 and later
xstream.allowTypesByWildcard(new String[]{allowTypes});
return xstream;
}
|
697210_2
|
public String[] readNext() throws IOException {
while (recordsQueue.isEmpty() && !eof) {
char[] data = new char[CHUNK_SIZE];
int size = r.read(data);
if (size == -1) {
break;
}
processChunk(data, size);
}
if (recordsQueue.isEmpty()) {
if (wasEscapeOrNotOpeningQuote) {
handlePreviousEscapeOrQuote(null);
}
if (quotedField) {
throw new IllegalStateException("Missing quote character to close the quote char at ["
+ quotedFieldStartRow + "," + quotedFieldStartCol + "]");
}
if (openRecord.isEmpty()) {
return null;
} else {
if (openField.length() > 0) {
openRecord.add(openField.toString());
openField.delete(0, openField.length());
}
String[] result = openRecord.toArray(new String[]{});
openRecord.clear();
return result;
}
}
return recordsQueue.removeFirst();
}
|
697210_3
|
public String[] readNext() throws IOException {
while (recordsQueue.isEmpty() && !eof) {
char[] data = new char[CHUNK_SIZE];
int size = r.read(data);
if (size == -1) {
break;
}
processChunk(data, size);
}
if (recordsQueue.isEmpty()) {
if (wasEscapeOrNotOpeningQuote) {
handlePreviousEscapeOrQuote(null);
}
if (quotedField) {
throw new IllegalStateException("Missing quote character to close the quote char at ["
+ quotedFieldStartRow + "," + quotedFieldStartCol + "]");
}
if (openRecord.isEmpty()) {
return null;
} else {
if (openField.length() > 0) {
openRecord.add(openField.toString());
openField.delete(0, openField.length());
}
String[] result = openRecord.toArray(new String[]{});
openRecord.clear();
return result;
}
}
return recordsQueue.removeFirst();
}
|
697210_4
|
public String[] readNext() throws IOException {
while (recordsQueue.isEmpty() && !eof) {
char[] data = new char[CHUNK_SIZE];
int size = r.read(data);
if (size == -1) {
break;
}
processChunk(data, size);
}
if (recordsQueue.isEmpty()) {
if (wasEscapeOrNotOpeningQuote) {
handlePreviousEscapeOrQuote(null);
}
if (quotedField) {
throw new IllegalStateException("Missing quote character to close the quote char at ["
+ quotedFieldStartRow + "," + quotedFieldStartCol + "]");
}
if (openRecord.isEmpty()) {
return null;
} else {
if (openField.length() > 0) {
openRecord.add(openField.toString());
openField.delete(0, openField.length());
}
String[] result = openRecord.toArray(new String[]{});
openRecord.clear();
return result;
}
}
return recordsQueue.removeFirst();
}
|
697210_5
|
public String[] readNext() throws IOException {
while (recordsQueue.isEmpty() && !eof) {
char[] data = new char[CHUNK_SIZE];
int size = r.read(data);
if (size == -1) {
break;
}
processChunk(data, size);
}
if (recordsQueue.isEmpty()) {
if (wasEscapeOrNotOpeningQuote) {
handlePreviousEscapeOrQuote(null);
}
if (quotedField) {
throw new IllegalStateException("Missing quote character to close the quote char at ["
+ quotedFieldStartRow + "," + quotedFieldStartCol + "]");
}
if (openRecord.isEmpty()) {
return null;
} else {
if (openField.length() > 0) {
openRecord.add(openField.toString());
openField.delete(0, openField.length());
}
String[] result = openRecord.toArray(new String[]{});
openRecord.clear();
return result;
}
}
return recordsQueue.removeFirst();
}
|
697210_6
|
public String[] readNext() throws IOException {
while (recordsQueue.isEmpty() && !eof) {
char[] data = new char[CHUNK_SIZE];
int size = r.read(data);
if (size == -1) {
break;
}
processChunk(data, size);
}
if (recordsQueue.isEmpty()) {
if (wasEscapeOrNotOpeningQuote) {
handlePreviousEscapeOrQuote(null);
}
if (quotedField) {
throw new IllegalStateException("Missing quote character to close the quote char at ["
+ quotedFieldStartRow + "," + quotedFieldStartCol + "]");
}
if (openRecord.isEmpty()) {
return null;
} else {
if (openField.length() > 0) {
openRecord.add(openField.toString());
openField.delete(0, openField.length());
}
String[] result = openRecord.toArray(new String[]{});
openRecord.clear();
return result;
}
}
return recordsQueue.removeFirst();
}
|
697210_7
|
public String[] readNext() throws IOException {
while (recordsQueue.isEmpty() && !eof) {
char[] data = new char[CHUNK_SIZE];
int size = r.read(data);
if (size == -1) {
break;
}
processChunk(data, size);
}
if (recordsQueue.isEmpty()) {
if (wasEscapeOrNotOpeningQuote) {
handlePreviousEscapeOrQuote(null);
}
if (quotedField) {
throw new IllegalStateException("Missing quote character to close the quote char at ["
+ quotedFieldStartRow + "," + quotedFieldStartCol + "]");
}
if (openRecord.isEmpty()) {
return null;
} else {
if (openField.length() > 0) {
openRecord.add(openField.toString());
openField.delete(0, openField.length());
}
String[] result = openRecord.toArray(new String[]{});
openRecord.clear();
return result;
}
}
return recordsQueue.removeFirst();
}
|
697210_8
|
@Override
public String toString() {
return "CsvConfiguration [hasHeader=" + hasHeader + ", separator=" + separator + ", quotechar=" + quotechar + ", escape=" + escape + ", skipSpaces=" + skipSpaces + "]";
}
|
697210_9
|
public String generateMaqlCreate() {
String script = "# This is MAQL script that generates project logical model.\n# See the MAQL documentation " +
"at http://developer.gooddata.com/reference/maql/maql-ddl for more details.\n\n";
script += "# Create dataset. Dataset groups all following logical model elements together.\n";
script += "CREATE DATASET {" + schema.getDatasetName() + "} VISUAL(TITLE \"" + lsn + "\");\n\n";
script += generateFoldersMaqlDdl(schema.getColumns());
script += generateMaqlAdd(schema.getColumns(), new ArrayList<SourceColumn>(), true);
return script;
}
|
702991_0
|
public synchronized ValidationResult validate(DataSource source)
throws ValidationException {
DocumentHandler handler = createDocumentHandler(source);
try {
// syntax (javacc) validation
try {
PDFParser parser = new PDFParser(source.getInputStream());
parser.PDF();
handler.setParser(parser);
} catch (IOException e) {
throw new ValidationException("Failed to parse datasource due to : "
+ e.getMessage(), e);
} catch (ParseException e) {
return createErrorResult(e);
}
// if here is reached, validate with helpers
// init PDF Box document
PDDocument document = null;
try {
document = PDDocument.load(handler.getSource().getInputStream());
handler.setDocument(document);
} catch (IOException e) {
throw new ValidationException("PDFBox failed to parse datasource", e);
}
// init PDF Extractor
try {
SimpleCharStream scs = new SimpleCharStream(source.getInputStream());
ExtractorTokenManager extractor = new ExtractorTokenManager(scs);
extractor.parse();
handler.setPdfExtractor(extractor);
} catch (IOException e) {
throw new ValidationException(
"PDF ExtractorTokenMng failed to parse datasource", e);
}
// call all helpers
ArrayList<ValidationError> allErrors = new ArrayList<ValidationError>();
// Execute priority helpers.
for ( AbstractValidationHelper helper : priorHelpers ) {
runValidation(handler, helper, allErrors);
}
// Execute other helpers.
for ( AbstractValidationHelper helper : standHelpers ) {
runValidation(handler, helper, allErrors);
}
// check result
ValidationResult valRes = null;
if (allErrors.size() == 0) {
valRes = new ValidationResult(true);
} else {
// there are some errors
valRes = new ValidationResult(allErrors);
}
// addition of the some objects to avoid a second file parsing
valRes.setPdf(document);
valRes.setXmpMetaData(handler.getMetadata());
return valRes;
} catch ( ValidationException e ) {
// ---- Close all open resources if an error occurs.
handler.close();
throw e;
}
}
|
702991_1
|
public List<ValidationError> validateMetadataSynchronization(PDDocument document, XMPMetadata metadata)
throws ValidationException {
List<ValidationError> ve = new ArrayList<ValidationError>();
if (document == null) {
throw new ValidationException("Document provided is null");
} else {
PDDocumentInformation dico = document.getDocumentInformation();
if (metadata == null) {
throw new ValidationException("Metadata provided are null");
} else {
DublinCoreSchema dc = metadata.getDublinCoreSchema();
// TITLE
analyzeTitleProperty(dico, dc, ve);
// AUTHOR
analyzeAuthorProperty(dico, dc, ve);
// SUBJECT
analyzeSubjectProperty(dico, dc, ve);
AdobePDFSchema pdf = metadata.getAdobePDFSchema();
// KEYWORDS
analyzeKeywordsProperty(dico, pdf, ve);
// PRODUCER
analyzeProducerProperty(dico, pdf, ve);
XMPBasicSchema xmp = metadata.getXMPBasicSchema();
// CREATOR TOOL
analyzeCreatorToolProperty(dico, xmp, ve);
// CREATION DATE
analyzeCreationDateProperty(dico, xmp, ve);
// MODIFY DATE
analyzeModifyDateProperty(dico, xmp, ve);
}
}
return ve;
}
|
702991_2
|
public List<ValidationError> validateMetadataSynchronization(PDDocument document, XMPMetadata metadata)
throws ValidationException {
List<ValidationError> ve = new ArrayList<ValidationError>();
if (document == null) {
throw new ValidationException("Document provided is null");
} else {
PDDocumentInformation dico = document.getDocumentInformation();
if (metadata == null) {
throw new ValidationException("Metadata provided are null");
} else {
DublinCoreSchema dc = metadata.getDublinCoreSchema();
// TITLE
analyzeTitleProperty(dico, dc, ve);
// AUTHOR
analyzeAuthorProperty(dico, dc, ve);
// SUBJECT
analyzeSubjectProperty(dico, dc, ve);
AdobePDFSchema pdf = metadata.getAdobePDFSchema();
// KEYWORDS
analyzeKeywordsProperty(dico, pdf, ve);
// PRODUCER
analyzeProducerProperty(dico, pdf, ve);
XMPBasicSchema xmp = metadata.getXMPBasicSchema();
// CREATOR TOOL
analyzeCreatorToolProperty(dico, xmp, ve);
// CREATION DATE
analyzeCreationDateProperty(dico, xmp, ve);
// MODIFY DATE
analyzeModifyDateProperty(dico, xmp, ve);
}
}
return ve;
}
|
702991_3
|
public List<ValidationError> validateMetadataSynchronization(PDDocument document, XMPMetadata metadata)
throws ValidationException {
List<ValidationError> ve = new ArrayList<ValidationError>();
if (document == null) {
throw new ValidationException("Document provided is null");
} else {
PDDocumentInformation dico = document.getDocumentInformation();
if (metadata == null) {
throw new ValidationException("Metadata provided are null");
} else {
DublinCoreSchema dc = metadata.getDublinCoreSchema();
// TITLE
analyzeTitleProperty(dico, dc, ve);
// AUTHOR
analyzeAuthorProperty(dico, dc, ve);
// SUBJECT
analyzeSubjectProperty(dico, dc, ve);
AdobePDFSchema pdf = metadata.getAdobePDFSchema();
// KEYWORDS
analyzeKeywordsProperty(dico, pdf, ve);
// PRODUCER
analyzeProducerProperty(dico, pdf, ve);
XMPBasicSchema xmp = metadata.getXMPBasicSchema();
// CREATOR TOOL
analyzeCreatorToolProperty(dico, xmp, ve);
// CREATION DATE
analyzeCreationDateProperty(dico, xmp, ve);
// MODIFY DATE
analyzeModifyDateProperty(dico, xmp, ve);
}
}
return ve;
}
|
702991_4
|
public List<ValidationError> validateMetadataSynchronization(PDDocument document, XMPMetadata metadata)
throws ValidationException {
List<ValidationError> ve = new ArrayList<ValidationError>();
if (document == null) {
throw new ValidationException("Document provided is null");
} else {
PDDocumentInformation dico = document.getDocumentInformation();
if (metadata == null) {
throw new ValidationException("Metadata provided are null");
} else {
DublinCoreSchema dc = metadata.getDublinCoreSchema();
// TITLE
analyzeTitleProperty(dico, dc, ve);
// AUTHOR
analyzeAuthorProperty(dico, dc, ve);
// SUBJECT
analyzeSubjectProperty(dico, dc, ve);
AdobePDFSchema pdf = metadata.getAdobePDFSchema();
// KEYWORDS
analyzeKeywordsProperty(dico, pdf, ve);
// PRODUCER
analyzeProducerProperty(dico, pdf, ve);
XMPBasicSchema xmp = metadata.getXMPBasicSchema();
// CREATOR TOOL
analyzeCreatorToolProperty(dico, xmp, ve);
// CREATION DATE
analyzeCreationDateProperty(dico, xmp, ve);
// MODIFY DATE
analyzeModifyDateProperty(dico, xmp, ve);
}
}
return ve;
}
|
702991_5
|
public List<ValidationError> validateMetadataSynchronization(PDDocument document, XMPMetadata metadata)
throws ValidationException {
List<ValidationError> ve = new ArrayList<ValidationError>();
if (document == null) {
throw new ValidationException("Document provided is null");
} else {
PDDocumentInformation dico = document.getDocumentInformation();
if (metadata == null) {
throw new ValidationException("Metadata provided are null");
} else {
DublinCoreSchema dc = metadata.getDublinCoreSchema();
// TITLE
analyzeTitleProperty(dico, dc, ve);
// AUTHOR
analyzeAuthorProperty(dico, dc, ve);
// SUBJECT
analyzeSubjectProperty(dico, dc, ve);
AdobePDFSchema pdf = metadata.getAdobePDFSchema();
// KEYWORDS
analyzeKeywordsProperty(dico, pdf, ve);
// PRODUCER
analyzeProducerProperty(dico, pdf, ve);
XMPBasicSchema xmp = metadata.getXMPBasicSchema();
// CREATOR TOOL
analyzeCreatorToolProperty(dico, xmp, ve);
// CREATION DATE
analyzeCreationDateProperty(dico, xmp, ve);
// MODIFY DATE
analyzeModifyDateProperty(dico, xmp, ve);
}
}
return ve;
}
|
702991_6
|
public List<ValidationError> validateMetadataSynchronization(PDDocument document, XMPMetadata metadata)
throws ValidationException {
List<ValidationError> ve = new ArrayList<ValidationError>();
if (document == null) {
throw new ValidationException("Document provided is null");
} else {
PDDocumentInformation dico = document.getDocumentInformation();
if (metadata == null) {
throw new ValidationException("Metadata provided are null");
} else {
DublinCoreSchema dc = metadata.getDublinCoreSchema();
// TITLE
analyzeTitleProperty(dico, dc, ve);
// AUTHOR
analyzeAuthorProperty(dico, dc, ve);
// SUBJECT
analyzeSubjectProperty(dico, dc, ve);
AdobePDFSchema pdf = metadata.getAdobePDFSchema();
// KEYWORDS
analyzeKeywordsProperty(dico, pdf, ve);
// PRODUCER
analyzeProducerProperty(dico, pdf, ve);
XMPBasicSchema xmp = metadata.getXMPBasicSchema();
// CREATOR TOOL
analyzeCreatorToolProperty(dico, xmp, ve);
// CREATION DATE
analyzeCreationDateProperty(dico, xmp, ve);
// MODIFY DATE
analyzeModifyDateProperty(dico, xmp, ve);
}
}
return ve;
}
|
702991_7
|
public List<ValidationError> validateMetadataSynchronization(PDDocument document, XMPMetadata metadata)
throws ValidationException {
List<ValidationError> ve = new ArrayList<ValidationError>();
if (document == null) {
throw new ValidationException("Document provided is null");
} else {
PDDocumentInformation dico = document.getDocumentInformation();
if (metadata == null) {
throw new ValidationException("Metadata provided are null");
} else {
DublinCoreSchema dc = metadata.getDublinCoreSchema();
// TITLE
analyzeTitleProperty(dico, dc, ve);
// AUTHOR
analyzeAuthorProperty(dico, dc, ve);
// SUBJECT
analyzeSubjectProperty(dico, dc, ve);
AdobePDFSchema pdf = metadata.getAdobePDFSchema();
// KEYWORDS
analyzeKeywordsProperty(dico, pdf, ve);
// PRODUCER
analyzeProducerProperty(dico, pdf, ve);
XMPBasicSchema xmp = metadata.getXMPBasicSchema();
// CREATOR TOOL
analyzeCreatorToolProperty(dico, xmp, ve);
// CREATION DATE
analyzeCreationDateProperty(dico, xmp, ve);
// MODIFY DATE
analyzeModifyDateProperty(dico, xmp, ve);
}
}
return ve;
}
|
702991_8
|
public List<ValidationError> validateMetadataSynchronization(PDDocument document, XMPMetadata metadata)
throws ValidationException {
List<ValidationError> ve = new ArrayList<ValidationError>();
if (document == null) {
throw new ValidationException("Document provided is null");
} else {
PDDocumentInformation dico = document.getDocumentInformation();
if (metadata == null) {
throw new ValidationException("Metadata provided are null");
} else {
DublinCoreSchema dc = metadata.getDublinCoreSchema();
// TITLE
analyzeTitleProperty(dico, dc, ve);
// AUTHOR
analyzeAuthorProperty(dico, dc, ve);
// SUBJECT
analyzeSubjectProperty(dico, dc, ve);
AdobePDFSchema pdf = metadata.getAdobePDFSchema();
// KEYWORDS
analyzeKeywordsProperty(dico, pdf, ve);
// PRODUCER
analyzeProducerProperty(dico, pdf, ve);
XMPBasicSchema xmp = metadata.getXMPBasicSchema();
// CREATOR TOOL
analyzeCreatorToolProperty(dico, xmp, ve);
// CREATION DATE
analyzeCreationDateProperty(dico, xmp, ve);
// MODIFY DATE
analyzeModifyDateProperty(dico, xmp, ve);
}
}
return ve;
}
|
702991_9
|
public List<ValidationError> validateMetadataSynchronization(PDDocument document, XMPMetadata metadata)
throws ValidationException {
List<ValidationError> ve = new ArrayList<ValidationError>();
if (document == null) {
throw new ValidationException("Document provided is null");
} else {
PDDocumentInformation dico = document.getDocumentInformation();
if (metadata == null) {
throw new ValidationException("Metadata provided are null");
} else {
DublinCoreSchema dc = metadata.getDublinCoreSchema();
// TITLE
analyzeTitleProperty(dico, dc, ve);
// AUTHOR
analyzeAuthorProperty(dico, dc, ve);
// SUBJECT
analyzeSubjectProperty(dico, dc, ve);
AdobePDFSchema pdf = metadata.getAdobePDFSchema();
// KEYWORDS
analyzeKeywordsProperty(dico, pdf, ve);
// PRODUCER
analyzeProducerProperty(dico, pdf, ve);
XMPBasicSchema xmp = metadata.getXMPBasicSchema();
// CREATOR TOOL
analyzeCreatorToolProperty(dico, xmp, ve);
// CREATION DATE
analyzeCreationDateProperty(dico, xmp, ve);
// MODIFY DATE
analyzeModifyDateProperty(dico, xmp, ve);
}
}
return ve;
}
|
710733_0
|
@Override
public Iterable<Row<T, K>> scan(K startRow, K stopRow) throws AvroBaseException {
BasicDBObject b = b();
if (startRow != null) {
if (stopRow == null) {
b.put("id", b("$gte", startRow));
} else {
BasicDBObject query = b("$gte", startRow);
query.put("$lt", stopRow);
b = b("id", query);
}
} else if (stopRow != null) b.put("id", b("$lt", stopRow));
final DBCursor dbCursor = rows.find(b);
dbCursor.sort(b("id", 1));
return new Iterable<Row<T, K>>() {
@Override
public Iterator<Row<T, K>> iterator() {
final Iterator<DBObject> iterator = dbCursor.iterator();
return new Iterator<Row<T, K>>() {
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public Row<T, K> next() {
DBObject next = iterator.next();
return newrow((K) next.get("id"), next);
}
@Override
public void remove() {
}
};
}
};
}
|
710733_1
|
@Override
public byte[] create(T value) throws AvroBaseException {
switch (createType) {
case CUSTOM: {
// loop until we don't get an ID collision
byte[] row;
do {
row = keygen.get();
} while (!put(row, value, 0));
return row;
}
case RANDOM: {
// loop until we don't get a random ID collision
byte[] row;
do {
row = Bytes.toBytes(random.nextLong());
} while (!put(row, value, 0));
return row;
}
case SEQUENTIAL: {
HTableInterface table = getTable();
try {
byte[] row;
do {
row = getNextRow(table, family);
} while (!put(row, value, 0));
return row;
} catch (IOException e) {
throw new AvroBaseException("Failed to increment column", e);
} finally {
pool.putTable(table);
}
}
case TIMESTAMP:
case REVERSE_TIMESTAMP: {
HTableInterface table = getTable();
try {
byte[] row;
do {
long l = createType == CreateType.TIMESTAMP ?
TIMESTAMP_GENERATOR.getTimestamp() :
TIMESTAMP_GENERATOR.getInvertedTimestamp();
row = Bytes.toBytes(l);
} while (!put(row, value, 0));
return row;
} finally {
pool.putTable(table);
}
}
}
return null;
}
|
710733_2
|
@Override
public Iterable<Row<T, byte[]>> scan(byte[] startRow, byte[] stopRow) throws AvroBaseException {
Scan scan = new Scan();
scan.addFamily(family);
if (startRow != null) {
scan.setStartRow(startRow);
}
if (stopRow != null) {
scan.setStopRow(stopRow);
}
HTableInterface table = pool.getTable(tableName);
try {
ResultScanner scanner = table.getScanner(scan);
final Iterator<Result> results = scanner.iterator();
return new Iterable<Row<T, byte[]>>() {
@Override
public Iterator<Row<T, byte[]>> iterator() {
return new Iterator<Row<T, byte[]>>() {
Row<T, byte[]> r;
@Override
public boolean hasNext() {
if (r != null) return true;
while (results.hasNext()) {
Result result = results.next();
r = getRowResult(result, result.getRow());
// Skip empty rows and the increment row
if (r == null || r.row.length == 0) {
continue;
}
return true;
}
return false;
}
@Override
public Row<T, byte[]> next() {
if (hasNext()) {
try {
return r;
} finally {
r = null;
}
}
throw new NoSuchElementException();
}
@Override
public void remove() {
throw new NotImplementedException();
}
};
}
};
} catch (IOException e) {
throw new AvroBaseException(e);
} finally {
// FIXME: Is this safe?
pool.putTable(table);
}
}
|
710733_3
|
@Override
public Row<T, byte[]> get(byte[] row) throws AvroBaseException {
HTableInterface table = getTable();
try {
Result result = getHBaseRow(table, row, family);
// TODO: This is working around a bug in HBASE 0.89
if (row.length == 0 && !Bytes.equals(row, result.getRow())) {
return null;
}
return getRowResult(result, row);
} catch (IOException e) {
throw new AvroBaseException(e);
} finally {
pool.putTable(table);
}
}
|
710733_4
|
public static <T extends SpecificRecord, K, Q> AvroBase<T, K> createAvroBase(Module module, Class<? extends AvroBase> clazz, final AvroFormat format) throws AvroBaseException {
Injector injector = Guice.createInjector(module);
return injector.createChildInjector(new Module() {
@Override
public void configure(Binder binder) {
binder.bind(AvroFormat.class).toInstance(format);
}
}).getInstance(clazz);
}
|
710733_5
|
public Row<T, K> clone() {
Schema schema = value.getSchema();
T newvalue;
try {
newvalue = (T) Class.forName(schema.getFullName()).newInstance();
} catch (Exception e) {
throw new AvroBaseException("Could not clone row", e);
}
for (Schema.Field field : schema.getFields()) {
int pos = field.pos();
newvalue.put(pos, value.get(pos));
}
return new Row<T, K>(newvalue, row, version);
}
|
710733_6
|
@Override
public void delete(final String row) throws AvroBaseException {
try {
boolean returned = false;
Jedis j = pool.getResource();
try {
j.select(db);
List<Object> results;
do {
results = j.multi(new TransactionBlock() {
@Override
public void execute() throws JedisException {
del(row + v); // Delete the version first and it is deleted
del(row + d);
del(row + s);
}
});
} while (results == null);
} catch (Exception e) {
pool.returnBrokenResource(j);
returned = true;
throw new AvroBaseException(e);
} finally {
if (!returned) pool.returnResource(j);
}
} catch (TimeoutException e) {
throw new AvroBaseException("Timed out", e);
}
}
|
710733_7
|
@Override
public String create(T value) throws AvroBaseException {
String row;
do {
row = kg.get();
} while(!put(row, value, 0));
return row;
}
|
710733_8
|
@Override
public Row<T, K> get(byte[] row) throws AvroBaseException {
try {
ResultSet rs = session.find(new String[]{new String(row)});
if (rs.next()) {
int schema_id = rs.getInt(1);
long version = rs.getLong(2);
AvroFormat format = AvroFormat.values()[rs.getByte(3)];
byte[] avro = rs.getBytes(4);
Schema schema = getSchema(schema_id);
if (schema != null) {
return new Row<T, K>(readValue(avro, schema, format), keytx.fromBytes(row), version);
} else {
throw new AvroBaseException("Failed to find schema: " + schema_id);
}
} else {
return null;
}
} catch (Exception e) {
throw new AvroBaseException("Failed to retrieve row", e);
}
}
|
710733_9
|
@Override
public Row<T, K> get(byte[] row) throws AvroBaseException {
try {
ResultSet rs = session.find(new String[]{new String(row)});
if (rs.next()) {
int schema_id = rs.getInt(1);
long version = rs.getLong(2);
AvroFormat format = AvroFormat.values()[rs.getByte(3)];
byte[] avro = rs.getBytes(4);
Schema schema = getSchema(schema_id);
if (schema != null) {
return new Row<T, K>(readValue(avro, schema, format), keytx.fromBytes(row), version);
} else {
throw new AvroBaseException("Failed to find schema: " + schema_id);
}
} else {
return null;
}
} catch (Exception e) {
throw new AvroBaseException("Failed to retrieve row", e);
}
}
|
711931_0
|
public void fetchAlbums(StatusUpdate su, boolean async) {
throw new RuntimeException("This method is not available on this protocol");
}
|
711931_1
|
public void fetchAlbums(StatusUpdate su, boolean async) {
throw new RuntimeException("This method is not available on this protocol");
}
|
711931_2
|
public boolean checkAuth() {
throw new RuntimeException("This method is not available on this protocol");
}
|
711931_3
|
public boolean checkAuth() {
throw new RuntimeException("This method is not available on this protocol");
}
|
711931_4
|
public boolean checkAuth() {
throw new RuntimeException("This method is not available on this protocol");
}
|
711931_5
|
public void uploadFiles(StatusUpdate su, boolean async) {
throw new RuntimeException("This method is not available on this protocol");
}
|
711931_6
|
public void uploadFiles(StatusUpdate su, boolean async) {
throw new RuntimeException("This method is not available on this protocol");
}
|
711931_7
|
public void newAlbum(StatusUpdate su, Album album, boolean async) {
throw new RuntimeException("This method is not available on this protocol");
}
|
726694_0
|
public Boolean valid()
{
return max.getX() >= min.getX() && max.getY() >= min.getY();
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.