output
stringlengths
64
73.2k
input
stringlengths
208
73.3k
instruction
stringclasses
1 value
#fixed code public static void displayDifferences(PrintStream out, Context context, String actionStr, List<Difference> differences, Consumer<Difference> displayOneDifference) { int truncateOutput = context.getTruncateOutput(); if (truncateOutput < 1) { return; } int quarter = truncateOutput / 4; int differencesSize = differences.size(); for (int index = 0; index < differencesSize; index++) { Difference difference = differences.get(index); if (index >= truncateOutput && (differencesSize - index) > quarter) { out.println(" [Too many lines. Truncating the output] ..."); int moreFiles = differencesSize - index; out.printf("%s%d %s more%n", actionStr, moreFiles, plural("file", moreFiles)); break; } if (displayOneDifference != null) { displayOneDifference.accept(difference); } } }
#vulnerable code public static void displayDifferences(PrintStream out, Context context, String actionStr, List<Difference> differences, Consumer<Difference> displayOneDifference) { int truncateOutput = context.getTruncateOutput(); if (truncateOutput < 1) { return; } int quarter = truncateOutput / 4; int differencesSize = differences.size(); for (int index = 0; index < differencesSize; index++) { Difference difference = differences.get(index); if (index >= truncateOutput && (differencesSize - index) > quarter) { out.println(" [Too many lines. Truncating the output] ..."); int moreFiles = differencesSize - index; out.printf(actionStr + "%d %s more%n", moreFiles, plural("file", moreFiles)); break; } if (displayOneDifference != null) { displayOneDifference.accept(difference); } } } #location 16 #vulnerability type CHECKERS_PRINTF_ARGS
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public State loadState(int stateNumber) throws IOException { File stateFile = getStateFile(stateNumber); if (!stateFile.exists()) { throw new IllegalStateException(String.format("Unable to load State file %d from directory %s", stateNumber, stateDir)); } State state = State.loadFromGZipFile(stateFile); adjustAccordingToHashMode(state); return state; }
#vulnerable code public State loadState(int stateNumber) throws IOException { File stateFile = getStateFile(stateNumber); if (!stateFile.exists()) { throw new IllegalStateException(String.format("Unable to load State file %d from directory %s", stateNumber, stateDir)); } State state = new State(); state.loadFromGZipFile(stateFile); // Replace by 'no_hash' accurately to be able to compare the FileState entry switch (parameters.getHashMode()) { case DONT_HASH_FILES: for (FileState fileState : state.getFileStates()) { fileState.getFileHash().setFirstFourKiloHash(FileState.NO_HASH); fileState.getFileHash().setFirstMegaHash(FileState.NO_HASH); fileState.getFileHash().setFullHash(FileState.NO_HASH); } break; case HASH_ONLY_FIRST_FOUR_KILO: for (FileState fileState : state.getFileStates()) { fileState.getFileHash().setFirstMegaHash(FileState.NO_HASH); fileState.getFileHash().setFullHash(FileState.NO_HASH); } break; case HASH_ONLY_FIRST_MEGA: for (FileState fileState : state.getFileStates()) { fileState.getFileHash().setFirstFourKiloHash(FileState.NO_HASH); fileState.getFileHash().setFullHash(FileState.NO_HASH); } break; case COMPUTE_ALL_HASH: // Nothing to do break; } return state; } #location 16 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public CompareResult displayChanges() { if (lastState != null) { System.out.printf("Comparing with the last committed state from %s%n", formatDate(lastState.getTimestamp())); if (lastState.getComment().length() > 0) { System.out.println("Comment: " + lastState.getComment()); } Console.newLine(); } if (!context.isVerbose()) { displayCounts(); return this; } String stateFormat = "%-17s "; final String addedStr = String.format(stateFormat, "Added:"); displayDifferences(addedStr, added, diff -> System.out.printf(addedStr + "%s%n", diff.getFileState().getFileName())); final String copiedStr = String.format(stateFormat, "Copied:"); displayDifferences(copiedStr, copied, diff -> System.out.printf(copiedStr + "%s \t(was %s)%n", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName())); final String duplicatedStr = String.format(stateFormat, "Duplicated:"); displayDifferences(duplicatedStr, duplicated, diff -> System.out.printf(duplicatedStr + "%s = %s%s%n", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName(), formatModifiedAttributes(diff, true))); final String dateModifiedStr = String.format(stateFormat, "Date modified:"); displayDifferences(dateModifiedStr, dateModified, diff -> System.out.printf(dateModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false))); final String contentModifiedStr = String.format(stateFormat, "Content modified:"); displayDifferences(contentModifiedStr, contentModified, diff -> System.out.printf(contentModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false))); final String attrsModifiedStr = String.format(stateFormat, "Attrs. modified:"); displayDifferences(attrsModifiedStr, attributesModified, diff -> System.out.printf(attrsModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false))); final String renamedStr = String.format(stateFormat, "Renamed:"); displayDifferences(renamedStr, renamed, diff -> System.out.printf(renamedStr + "%s -> %s%s%n", diff.getPreviousFileState().getFileName(), diff.getFileState().getFileName(), formatModifiedAttributes(diff, true))); final String deletedStr = String.format(stateFormat, "Deleted:"); displayDifferences(deletedStr, deleted, diff -> System.out.printf(deletedStr + "%s%n", diff.getFileState().getFileName())); final String corruptedStr = String.format(stateFormat, "Corrupted?:"); displayDifferences(corruptedStr, corrupted, diff -> System.out.printf(corruptedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false))); if (somethingModified()) { Console.newLine(); } displayCounts(); return this; }
#vulnerable code public CompareResult displayChanges() { if (lastState != null) { System.out.printf("Comparing with the last committed state from %s%n", formatDate(lastState.getTimestamp())); if (lastState.getComment().length() > 0) { System.out.println("Comment: " + lastState.getComment()); } Console.newLine(); } if (!context.isVerbose()) { displayCounts(); return this; } String stateFormat = "%-17s "; for (Difference diff : added) { System.out.printf(stateFormat + "%s%n", "Added:", diff.getFileState().getFileName()); } for (Difference diff : copied) { System.out.printf(stateFormat + "%s \t(was %s)%n", "Copied:", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName()); } for (Difference diff : duplicated) { System.out.printf(stateFormat + "%s = %s%s%n", "Duplicated:", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName(), formatModifiedAttributes(diff, true)); } for (Difference diff : dateModified) { System.out.printf(stateFormat + "%s \t%s%n", "Date modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)); } for (Difference diff : contentModified) { System.out.printf(stateFormat + "%s \t%s%n", "Content modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)); } for (Difference diff : attributesModified) { System.out.printf(stateFormat + "%s \t%s%n", "Attrs. modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)); } for (Difference diff : renamed) { System.out.printf(stateFormat + "%s -> %s%s%n", "Renamed:", diff.getPreviousFileState().getFileName(), diff.getFileState().getFileName(), formatModifiedAttributes(diff, true)); } for (Difference diff : deleted) { System.out.printf(stateFormat + "%s%n", "Deleted:", diff.getFileState().getFileName()); } for (Difference diff : corrupted) { System.out.printf(stateFormat + "%s \t%s%n", "Corrupted?:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)); } if (somethingModified()) { Console.newLine(); } displayCounts(); return this; } #location 23 #vulnerability type CHECKERS_PRINTF_ARGS
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public State generateState(String comment, File fimRepositoryRootDir) throws IOException, NoSuchAlgorithmException { Logger.info(String.format("Scanning recursively local files, %s, using %d thread", hashModeToString(), parameters.getThreadCount())); System.out.printf(" (Hash progress legend for files grouped %d by %d: %s)%n", PROGRESS_DISPLAY_FILE_COUNT, PROGRESS_DISPLAY_FILE_COUNT, hashProgressLegend()); State state = new State(); state.setComment(comment); long start = System.currentTimeMillis(); progressOutputInit(); BlockingDeque<File> filesToHash = new LinkedBlockingDeque<>(1000); List<FileHasher> hashers = new ArrayList<>(); executorService = Executors.newFixedThreadPool(parameters.getThreadCount()); for (int index = 0; index < parameters.getThreadCount(); index++) { FileHasher hasher = new FileHasher(this, filesToHash, fimRepositoryRootDir.toString()); executorService.submit(hasher); hashers.add(hasher); } scanFileTree(filesToHash, fimRepositoryRootDir); waitAllFileHashed(); for (FileHasher hasher : hashers) { state.getFileStates().addAll(hasher.getFileStates()); totalFileContentLength += hasher.getTotalFileContentLength(); totalBytesHashed += hasher.getTotalBytesHashed(); } Collections.sort(state.getFileStates(), fileNameComparator); progressOutputStop(); displayStatistics(start, state); return state; }
#vulnerable code public State generateState(String comment, File fimRepositoryRootDir) throws IOException, NoSuchAlgorithmException { Logger.info(String.format("Scanning recursively local files, %s, using %d thread", hashModeToString(), parameters.getThreadCount())); System.out.printf(" (Hash progress legend: " + hashProgressLegend() + ")%n"); State state = new State(); state.setComment(comment); long start = System.currentTimeMillis(); progressOutputInit(); BlockingDeque<File> filesToHash = new LinkedBlockingDeque<>(1000); List<FileHasher> hashers = new ArrayList<>(); executorService = Executors.newFixedThreadPool(parameters.getThreadCount()); for (int index = 0; index < parameters.getThreadCount(); index++) { FileHasher hasher = new FileHasher(this, filesToHash, fimRepositoryRootDir.toString()); executorService.submit(hasher); hashers.add(hasher); } scanFileTree(filesToHash, fimRepositoryRootDir); waitAllFileHashed(); for (FileHasher hasher : hashers) { state.getFileStates().addAll(hasher.getFileStates()); totalFileContentLength += hasher.getTotalFileContentLength(); totalBytesHashed += hasher.getTotalBytesHashed(); } Collections.sort(state.getFileStates(), fileNameComparator); progressOutputStop(); displayStatistics(start, state); return state; } #location 4 #vulnerability type CHECKERS_PRINTF_ARGS
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public CompareResult displayChanges() { if (lastState != null) { System.out.printf("Comparing with the last committed state from %s%n", formatDate(lastState.getTimestamp())); if (lastState.getComment().length() > 0) { System.out.println("Comment: " + lastState.getComment()); } Console.newLine(); } if (!context.isVerbose()) { displayCounts(); return this; } String stateFormat = "%-17s "; final String addedStr = String.format(stateFormat, "Added:"); displayDifferences(addedStr, added, diff -> System.out.printf(addedStr + "%s%n", diff.getFileState().getFileName())); final String copiedStr = String.format(stateFormat, "Copied:"); displayDifferences(copiedStr, copied, diff -> System.out.printf(copiedStr + "%s \t(was %s)%n", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName())); final String duplicatedStr = String.format(stateFormat, "Duplicated:"); displayDifferences(duplicatedStr, duplicated, diff -> System.out.printf(duplicatedStr + "%s = %s%s%n", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName(), formatModifiedAttributes(diff, true))); final String dateModifiedStr = String.format(stateFormat, "Date modified:"); displayDifferences(dateModifiedStr, dateModified, diff -> System.out.printf(dateModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false))); final String contentModifiedStr = String.format(stateFormat, "Content modified:"); displayDifferences(contentModifiedStr, contentModified, diff -> System.out.printf(contentModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false))); final String attrsModifiedStr = String.format(stateFormat, "Attrs. modified:"); displayDifferences(attrsModifiedStr, attributesModified, diff -> System.out.printf(attrsModifiedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false))); final String renamedStr = String.format(stateFormat, "Renamed:"); displayDifferences(renamedStr, renamed, diff -> System.out.printf(renamedStr + "%s -> %s%s%n", diff.getPreviousFileState().getFileName(), diff.getFileState().getFileName(), formatModifiedAttributes(diff, true))); final String deletedStr = String.format(stateFormat, "Deleted:"); displayDifferences(deletedStr, deleted, diff -> System.out.printf(deletedStr + "%s%n", diff.getFileState().getFileName())); final String corruptedStr = String.format(stateFormat, "Corrupted?:"); displayDifferences(corruptedStr, corrupted, diff -> System.out.printf(corruptedStr + "%s \t%s%n", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false))); if (somethingModified()) { Console.newLine(); } displayCounts(); return this; }
#vulnerable code public CompareResult displayChanges() { if (lastState != null) { System.out.printf("Comparing with the last committed state from %s%n", formatDate(lastState.getTimestamp())); if (lastState.getComment().length() > 0) { System.out.println("Comment: " + lastState.getComment()); } Console.newLine(); } if (!context.isVerbose()) { displayCounts(); return this; } String stateFormat = "%-17s "; for (Difference diff : added) { System.out.printf(stateFormat + "%s%n", "Added:", diff.getFileState().getFileName()); } for (Difference diff : copied) { System.out.printf(stateFormat + "%s \t(was %s)%n", "Copied:", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName()); } for (Difference diff : duplicated) { System.out.printf(stateFormat + "%s = %s%s%n", "Duplicated:", diff.getFileState().getFileName(), diff.getPreviousFileState().getFileName(), formatModifiedAttributes(diff, true)); } for (Difference diff : dateModified) { System.out.printf(stateFormat + "%s \t%s%n", "Date modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)); } for (Difference diff : contentModified) { System.out.printf(stateFormat + "%s \t%s%n", "Content modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)); } for (Difference diff : attributesModified) { System.out.printf(stateFormat + "%s \t%s%n", "Attrs. modified:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)); } for (Difference diff : renamed) { System.out.printf(stateFormat + "%s -> %s%s%n", "Renamed:", diff.getPreviousFileState().getFileName(), diff.getFileState().getFileName(), formatModifiedAttributes(diff, true)); } for (Difference diff : deleted) { System.out.printf(stateFormat + "%s%n", "Deleted:", diff.getFileState().getFileName()); } for (Difference diff : corrupted) { System.out.printf(stateFormat + "%s \t%s%n", "Corrupted?:", diff.getFileState().getFileName(), formatModifiedAttributes(diff, false)); } if (somethingModified()) { Console.newLine(); } displayCounts(); return this; } #location 43 #vulnerability type CHECKERS_PRINTF_ARGS
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void outputInit() { progressLock.lock(); try { summedFileLength = 0; fileCount = 0; } finally { progressLock.unlock(); } }
#vulnerable code public void outputInit() { summedFileLength = 0; fileCount = 0; } #location 2 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static void main(String[] args) throws IOException { String[] filteredArgs = filterEmptyArgs(args); if (filteredArgs.length < 1) { youMustSpecifyACommandToRun(); } Command command = Command.fromName(filteredArgs[0]); if (command == null) { youMustSpecifyACommandToRun(); } CommandLineParser cmdLineGnuParser = new GnuParser(); Options options = constructOptions(); CommandLine commandLine; boolean verbose = true; CompareMode compareMode = CompareMode.FULL; String message = ""; boolean useLastState = false; int threadCount = Runtime.getRuntime().availableProcessors(); try { String[] actionArgs = Arrays.copyOfRange(filteredArgs, 1, filteredArgs.length); commandLine = cmdLineGnuParser.parse(options, actionArgs); if (commandLine.hasOption("h")) { printUsage(); System.exit(0); } else { verbose = !commandLine.hasOption('q'); compareMode = commandLine.hasOption('f') ? CompareMode.FAST : CompareMode.FULL; message = commandLine.getOptionValue('m', message); threadCount = Integer.parseInt(commandLine.getOptionValue('t', "" + threadCount)); useLastState = commandLine.hasOption('l'); } } catch (Exception ex) { printUsage(); System.exit(-1); } if (compareMode == CompareMode.FAST) { threadCount = 1; System.out.println("Using fast compare mode. Thread count forced to 1"); } if (threadCount < 1) { System.out.println("Thread count must be at least one"); System.exit(0); } File baseDirectory = new File("."); File stateDir = new File(StateGenerator.FIC_DIR, "states"); if (command == Command.INIT) { if (stateDir.exists()) { System.out.println("fim repository already exist"); System.exit(0); } } else { if (!stateDir.exists()) { System.out.println("fim repository does not exist. Please run 'fim init' before."); System.exit(-1); } } State lastState; State currentState; StateGenerator generator = new StateGenerator(threadCount, compareMode); StateManager manager = new StateManager(stateDir, compareMode); StateComparator comparator = new StateComparator(compareMode); DuplicateFinder finder = new DuplicateFinder(); switch (command) { case INIT: fastCompareNotSupported(compareMode); stateDir.mkdirs(); currentState = generator.generateState("Initial State", baseDirectory); comparator.compare(null, currentState).displayChanges(verbose); manager.createNewState(currentState); break; case COMMIT: fastCompareNotSupported(compareMode); lastState = manager.loadLastState(); currentState = generator.generateState(message, baseDirectory); CompareResult result = comparator.compare(lastState, currentState).displayChanges(verbose); if (result.somethingModified()) { System.out.println(""); if (confirmCommand("commit")) { manager.createNewState(currentState); } else { System.out.println("Nothing committed"); } } break; case DIFF: lastState = manager.loadLastState(); currentState = generator.generateState(message, baseDirectory); comparator.compare(lastState, currentState).displayChanges(verbose); break; case FIND_DUPLICATES: fastCompareNotSupported(compareMode); System.out.println("Searching for duplicated files" + (useLastState ? " from the last committed State" : "")); System.out.println(""); State state; if (useLastState) { state = manager.loadLastState(); } else { state = generator.generateState(message, baseDirectory); } finder.findDuplicates(state).displayDuplicates(verbose); break; case RESET_DATES: fastCompareNotSupported(compareMode); lastState = manager.loadLastState(); manager.resetDates(lastState); break; case LOG: manager.displayStatesLog(); break; } }
#vulnerable code public static void main(String[] args) throws IOException { String[] filteredArgs = filterEmptyArgs(args); if (filteredArgs.length < 1) { youMustSpecifyACommandToRun(); } Command command = Command.fromName(filteredArgs[0]); if (command == null) { youMustSpecifyACommandToRun(); } CommandLineParser cmdLineGnuParser = new GnuParser(); Options options = constructOptions(); CommandLine commandLine; boolean verbose = true; CompareMode compareMode = CompareMode.FULL; String message = ""; boolean useLastState = false; int threadCount = Runtime.getRuntime().availableProcessors(); try { String[] actionArgs = Arrays.copyOfRange(filteredArgs, 1, filteredArgs.length); commandLine = cmdLineGnuParser.parse(options, actionArgs); if (commandLine.hasOption("h")) { printUsage(); System.exit(0); } else { verbose = !commandLine.hasOption('q'); compareMode = commandLine.hasOption('f') ? CompareMode.FAST : CompareMode.FULL; message = commandLine.getOptionValue('m', message); threadCount = Integer.parseInt(commandLine.getOptionValue('t', "" + threadCount)); useLastState = commandLine.hasOption('l'); } } catch (Exception ex) { printUsage(); System.exit(-1); } if (compareMode == CompareMode.FAST) { threadCount = 1; System.out.println("Using fast compare mode. Thread count forced to 1"); } if (threadCount < 1) { System.out.println("Thread count must be at least one"); System.exit(0); } File baseDirectory = new File("."); File stateDir = new File(StateGenerator.FIC_DIR, "states"); if (command == Command.INIT) { if (stateDir.exists()) { System.out.println("fim repository already exist"); System.exit(0); } } else { if (!stateDir.exists()) { System.out.println("fim repository does not exist. Please run 'fim init' before."); System.exit(-1); } } State previousState; State currentState; StateGenerator generator = new StateGenerator(threadCount, compareMode); StateManager manager = new StateManager(stateDir, compareMode); StateComparator comparator = new StateComparator(compareMode); DuplicateFinder finder = new DuplicateFinder(); switch (command) { case INIT: fastCompareNotSupported(compareMode); stateDir.mkdirs(); currentState = generator.generateState("Initial State", baseDirectory); comparator.compare(null, currentState).displayChanges(verbose); manager.createNewState(currentState); break; case COMMIT: fastCompareNotSupported(compareMode); previousState = manager.loadPreviousState(); currentState = generator.generateState(message, baseDirectory); CompareResult result = comparator.compare(previousState, currentState).displayChanges(verbose); if (result.somethingModified()) { System.out.println(""); if (confirmCommand("commit")) { manager.createNewState(currentState); } else { System.out.println("Nothing committed"); } } break; case DIFF: previousState = manager.loadPreviousState(); currentState = generator.generateState(message, baseDirectory); comparator.compare(previousState, currentState).displayChanges(verbose); break; case FIND_DUPLICATES: fastCompareNotSupported(compareMode); System.out.println("Searching for duplicated files" + (useLastState ? " from the last committed State" : "")); System.out.println(""); State state; if (useLastState) { state = manager.loadPreviousState(); } else { state = generator.generateState(message, baseDirectory); } finder.findDuplicates(state).displayDuplicates(verbose); break; case RESET_DATES: fastCompareNotSupported(compareMode); previousState = manager.loadPreviousState(); manager.resetDates(previousState); break; case LOG: manager.displayStatesLog(); break; } } #location 124 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public int getNumberOfFeatures(){ List<? extends HasArray> coefs = getCoefs(); return NeuralNetworkUtil.getNumberOfFeatures(coefs); }
#vulnerable code @Override public int getNumberOfFeatures(){ List<?> coefs = getCoefs(); NDArray input = (NDArray)coefs.get(0); int[] shape = NDArrayUtil.getShape(input); return shape[0]; } #location 5 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public Expression encode(int index, FieldName name){ Expression expression = new FieldRef(name); if(getWithMean()){ Number mean = Iterables.get(getMean(), index); if(Double.compare(mean.doubleValue(), 0d) != 0){ expression = PMMLUtil.createApply("-", expression, PMMLUtil.createConstant(mean)); } } // End if if(gwtWithStd()){ Number std = Iterables.get(getStd(), index); if(Double.compare(std.doubleValue(), 1d) != 0){ expression = PMMLUtil.createApply("/", expression, PMMLUtil.createConstant(std)); } } // "($name - mean) / std" return expression; }
#vulnerable code @Override public Expression encode(int index, FieldName name){ Expression expression = new FieldRef(name); if(withMean()){ Number mean = Iterables.get(getMean(), index); if(Double.compare(mean.doubleValue(), 0d) != 0){ expression = PMMLUtil.createApply("-", expression, PMMLUtil.createConstant(mean)); } } // End if if(withStd()){ Number std = Iterables.get(getStd(), index); if(Double.compare(std.doubleValue(), 1d) != 0){ expression = PMMLUtil.createApply("/", expression, PMMLUtil.createConstant(std)); } } // "($name - mean) / std" return expression; } #location 13 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private List<? extends Number> getNodeAttribute(String key){ List<? extends Number> nodeAttributes = (List<? extends Number>)ClassDictUtil.getArray(this, "nodes", key); return nodeAttributes; }
#vulnerable code private List<? extends Number> getNodeAttribute(String key){ NDArrayWrapper nodes = (NDArrayWrapper)get("nodes"); Map<String, ?> content = (Map<String, ?>)nodes.getContent(); return (List<? extends Number>)content.get(key); } #location 4 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code static public List<?> getArray(ClassDict dict, String name, String key){ Object object = dict.get(name); if(object instanceof NDArrayWrapper){ NDArrayWrapper arrayWrapper = (NDArrayWrapper)object; NDArray array = arrayWrapper.getContent(); return NDArrayUtil.getContent(array, key); } throw new IllegalArgumentException(); }
#vulnerable code static public List<?> getArray(ClassDict dict, String name, String key){ NDArrayWrapper arrayWrapper = (NDArrayWrapper)dict.get(name); NDArray array = arrayWrapper.getContent(); return NDArrayUtil.getData(array, key); } #location 5 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public Expression encode(int index, FieldName name){ Expression expression = new FieldRef(name); if(getWithMean()){ Number mean = Iterables.get(getMean(), index); if(Double.compare(mean.doubleValue(), 0d) != 0){ expression = PMMLUtil.createApply("-", expression, PMMLUtil.createConstant(mean)); } } // End if if(gwtWithStd()){ Number std = Iterables.get(getStd(), index); if(Double.compare(std.doubleValue(), 1d) != 0){ expression = PMMLUtil.createApply("/", expression, PMMLUtil.createConstant(std)); } } // "($name - mean) / std" return expression; }
#vulnerable code @Override public Expression encode(int index, FieldName name){ Expression expression = new FieldRef(name); if(withMean()){ Number mean = Iterables.get(getMean(), index); if(Double.compare(mean.doubleValue(), 0d) != 0){ expression = PMMLUtil.createApply("-", expression, PMMLUtil.createConstant(mean)); } } // End if if(withStd()){ Number std = Iterables.get(getStd(), index); if(Double.compare(std.doubleValue(), 1d) != 0){ expression = PMMLUtil.createApply("/", expression, PMMLUtil.createConstant(std)); } } // "($name - mean) / std" return expression; } #location 5 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code protected Object[] getEstimatorStep(){ List<Object[]> steps = getSteps(); if(steps == null || steps.size() < 1){ throw new IllegalArgumentException("Missing estimator step"); } return steps.get(steps.size() - 1); }
#vulnerable code protected Object[] getEstimatorStep(){ List<Object[]> steps = getSteps(); if(steps.size() < 1){ throw new IllegalArgumentException("Missing estimator step"); } return steps.get(steps.size() - 1); } #location 4 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public Object getFill(){ return getScalar("fill_"); }
#vulnerable code public Object getFill(){ return asJavaObject(get("fill_")); } #location 2 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public PMML encodePMML(){ List<DataField> dataFields = new ArrayList<>(); dataFields.add(encodeTargetField()); int features = getNumberOfFeatures(); for(int i = 0; i < features; i++){ dataFields.add(encodeActiveField(i)); } DataDictionary dataDictionary = new DataDictionary(dataFields); PMML pmml = new PMML("4.2", PMMLUtil.createHeader("JPMML-SkLearn"), dataDictionary); Model model = encodeModel(dataFields); pmml.addModels(model); return pmml; }
#vulnerable code public PMML encodePMML(){ List<DataField> dataFields = new ArrayList<>(); DataField targetDataField = encodeTarget(); dataFields.add(targetDataField); Integer features = getFeatures(); for(int i = 0; i < features.intValue(); i++){ DataField dataField = new DataField(FieldName.create("x" + String.valueOf(i + 1)), OpType.CONTINUOUS, DataType.DOUBLE); dataFields.add(dataField); } DataDictionary dataDictionary = new DataDictionary(dataFields); PMML pmml = new PMML("4.2", PMMLUtil.createHeader("JPMML-SkLearn"), dataDictionary); Model model = encodeModel(dataFields); pmml.addModels(model); return pmml; } #location 8 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public int getNumberOfFeatures(){ int[] shape = getCoefShape(); if(shape.length != 1){ throw new IllegalArgumentException(); } return shape[0]; }
#vulnerable code @Override public int getNumberOfFeatures(){ return (Integer)get("rank_"); } #location 3 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code static public int[] getShape(NDArray array){ Object[] shape = array.getShape(); List<? extends Number> values = (List)Arrays.asList(shape); return Ints.toArray(ValueUtil.asIntegers(values)); }
#vulnerable code static public int[] getShape(NDArray array){ Object[] shape = array.getShape(); int[] result = new int[shape.length]; for(int i = 0; i < shape.length; i++){ result[i] = ValueUtil.asInteger((Number)shape[i]); } return result; } #location 5 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void encodeFeatures(SkLearnEncoder encoder){ Object _default = getDefault(); List<Object[]> rows = getFeatures(); if(!(Boolean.FALSE).equals(_default)){ throw new IllegalArgumentException(); } for(Object[] row : rows){ List<String> ids = new ArrayList<>(); List<Feature> features = new ArrayList<>(); List<String> columns = getColumnList(row); for(String column : columns){ FieldName name = FieldName.create(column); ids.add(name.getValue()); DataField dataField = encoder.getDataField(name); if(dataField == null){ dataField = encoder.createDataField(name); } Feature feature = new WildcardFeature(encoder, dataField); features.add(feature); } List<Transformer> transformers = getTransformerList(row); for(Transformer transformer : transformers){ for(Feature feature : features){ encoder.updateType(feature.getName(), transformer.getOpType(), transformer.getDataType()); } features = transformer.encodeFeatures(ids, features, encoder); } encoder.addRow(ids, features); } }
#vulnerable code public void encodeFeatures(SkLearnEncoder encoder){ List<Object[]> steps = getFeatures(); for(int row = 0; row < steps.size(); row++){ Object[] step = steps.get(row); List<String> ids = new ArrayList<>(); List<Feature> features = new ArrayList<>(); List<String> names = getNameList(step); for(String name : names){ ids.add(name); DataField dataField = encoder.createDataField(FieldName.create(name)); Feature feature = new WildcardFeature(encoder, dataField); features.add(feature); } List<Transformer> transformers = getTransformerList(step); for(int column = 0; column < transformers.size(); column++){ Transformer transformer = transformers.get(column); for(Feature feature : features){ encoder.updateType(feature.getName(), transformer.getOpType(), transformer.getDataType()); } features = transformer.encodeFeatures(ids, features, encoder); } encoder.addRow(ids, features); } } #location 4 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public Expression encode(int index, FieldName name){ List<?> classes = getClasses(); Object value = classes.get(index); Number posLabel = getPosLabel(); Number negLabel = getNegLabel(); if(ValueUtil.isOne(posLabel) && ValueUtil.isZero(negLabel)){ NormDiscrete normDiscrete = new NormDiscrete(name, String.valueOf(value)); return normDiscrete; } // "($name == value) ? pos_label : neg_label" return PMMLUtil.createApply("if", PMMLUtil.createApply("equal", new FieldRef(name), PMMLUtil.createConstant(value)), PMMLUtil.createConstant(posLabel), PMMLUtil.createConstant(negLabel)); }
#vulnerable code @Override public Expression encode(int index, FieldName name){ List<?> classes = getClasses(); Object value = classes.get(index); Number posLabel = getPosLabel(); Number negLabel = getNegLabel(); if(Double.compare(posLabel.doubleValue(), 1d) == 0 && Double.compare(negLabel.doubleValue(), 0d) == 0){ NormDiscrete normDiscrete = new NormDiscrete(name, String.valueOf(value)); return normDiscrete; } // "($name == value) ? pos_label : neg_label" return PMMLUtil.createApply("if", PMMLUtil.createApply("equal", new FieldRef(name), PMMLUtil.createConstant(value)), PMMLUtil.createConstant(posLabel), PMMLUtil.createConstant(negLabel)); } #location 10 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public Object getMissingValues(){ return getScalar("missing_values"); }
#vulnerable code public Object getMissingValues(){ return asJavaObject(get("missing_values")); } #location 2 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public int getNumberOfFeatures(){ return ValueUtil.asInteger((Number)get("n_features")); }
#vulnerable code @Override public int getNumberOfFeatures(){ return (Integer)get("n_features"); } #location 3 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private Object loadContent(){ Object[] shape = getShape(); Object descr = getDescr(); byte[] data = (byte[])getData(); if(descr instanceof DType){ DType dType = (DType)descr; descr = dType.toDescr(); } try { InputStream is = new ByteArrayInputStream(data); try { return NDArrayUtil.parseData(is, descr, shape); } finally { is.close(); } } catch(IOException ioe){ throw new RuntimeException(ioe); } }
#vulnerable code private Object loadContent(){ Object[] shape = getShape(); Object descr = getDescr(); byte[] data = (byte[])getData(); try { InputStream is = new ByteArrayInputStream(data); try { return NDArrayUtil.parseData(is, descr, shape); } finally { is.close(); } } catch(IOException ioe){ throw new RuntimeException(ioe); } } #location 10 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public Number getWeight(int index){ CSRMatrix idfDiag = get("_idf_diag", CSRMatrix.class); List<?> data = idfDiag.getData(); return (Number)data.get(index); }
#vulnerable code public Number getWeight(int index){ CSRMatrix idfDiag = (CSRMatrix)get("_idf_diag"); List<?> data = idfDiag.getData(); return (Number)data.get(index); } #location 4 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public int getNumberOfFeatures(){ List<? extends HasArray> coefs = getCoefs(); return NeuralNetworkUtil.getNumberOfFeatures(coefs); }
#vulnerable code @Override public int getNumberOfFeatures(){ List<?> coefs = getCoefs(); NDArray input = (NDArray)coefs.get(0); int[] shape = NDArrayUtil.getShape(input); return shape[0]; } #location 5 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public Expression encode(int index, FieldName name){ List<?> classes = getClasses(); Object value = classes.get(index); Number posLabel = getPosLabel(); Number negLabel = getNegLabel(); if(ValueUtil.isOne(posLabel) && ValueUtil.isZero(negLabel)){ NormDiscrete normDiscrete = new NormDiscrete(name, String.valueOf(value)); return normDiscrete; } // "($name == value) ? pos_label : neg_label" return PMMLUtil.createApply("if", PMMLUtil.createApply("equal", new FieldRef(name), PMMLUtil.createConstant(value)), PMMLUtil.createConstant(posLabel), PMMLUtil.createConstant(negLabel)); }
#vulnerable code @Override public Expression encode(int index, FieldName name){ List<?> classes = getClasses(); Object value = classes.get(index); Number posLabel = getPosLabel(); Number negLabel = getNegLabel(); if(Double.compare(posLabel.doubleValue(), 1d) == 0 && Double.compare(negLabel.doubleValue(), 0d) == 0){ NormDiscrete normDiscrete = new NormDiscrete(name, String.valueOf(value)); return normDiscrete; } // "($name == value) ? pos_label : neg_label" return PMMLUtil.createApply("if", PMMLUtil.createApply("equal", new FieldRef(name), PMMLUtil.createConstant(value)), PMMLUtil.createConstant(posLabel), PMMLUtil.createConstant(negLabel)); } #location 10 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public DefineFunction encodeDefineFunction(){ String analyzer = getAnalyzer(); Boolean binary = getBinary(); Object preprocessor = getPreprocessor(); String stripAccents = getStripAccents(); Splitter tokenizer = getTokenizer(); switch(analyzer){ case "word": break; default: throw new IllegalArgumentException(analyzer); } if(preprocessor != null){ throw new IllegalArgumentException(); } // End if if(stripAccents != null){ throw new IllegalArgumentException(stripAccents); } ParameterField documentField = new ParameterField(FieldName.create("text")); ParameterField termField = new ParameterField(FieldName.create("term")); TextIndex textIndex = new TextIndex(documentField.getName()) .setTokenize(Boolean.TRUE) .setWordSeparatorCharacterRE(tokenizer.getSeparatorRE()) .setLocalTermWeights(binary ? TextIndex.LocalTermWeights.BINARY : null) .setExpression(new FieldRef(termField.getName())); DefineFunction defineFunction = new DefineFunction("tf", OpType.CONTINUOUS, null) .setDataType(DataType.DOUBLE) .addParameterFields(documentField, termField) .setExpression(textIndex); return defineFunction; }
#vulnerable code public DefineFunction encodeDefineFunction(){ String analyzer = getAnalyzer(); Boolean binary = getBinary(); String stripAccents = getStripAccents(); String tokenPattern = getTokenPattern(); switch(analyzer){ case "word": break; default: throw new IllegalArgumentException(analyzer); } if(stripAccents != null){ throw new IllegalArgumentException(stripAccents); } // End if if(tokenPattern != null && !("(?u)\\b\\w\\w+\\b").equals(tokenPattern)){ throw new IllegalArgumentException(tokenPattern); } ParameterField documentField = new ParameterField(FieldName.create("document")); ParameterField termField = new ParameterField(FieldName.create("term")); TextIndex textIndex = new TextIndex(documentField.getName()) .setTokenize(Boolean.TRUE) .setLocalTermWeights(binary ? TextIndex.LocalTermWeights.BINARY : null) .setExpression(new FieldRef(termField.getName())); DefineFunction defineFunction = new DefineFunction("tf", OpType.CONTINUOUS, null) .setDataType(DataType.DOUBLE) .addParameterFields(documentField, termField) .setExpression(textIndex); return defineFunction; } #location 7 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public ContinuousOutputFeature toContinuousFeature(){ PMMLEncoder encoder = ensureEncoder(); Output output = getOutput(); OutputField outputField = getField(); DataType dataType = outputField.getDataType(); switch(dataType){ case INTEGER: case FLOAT: case DOUBLE: break; default: throw new IllegalArgumentException(); } outputField.setOpType(OpType.CONTINUOUS); return new ContinuousOutputFeature(encoder, output, outputField); }
#vulnerable code @Override public ContinuousOutputFeature toContinuousFeature(){ PMMLEncoder encoder = ensureEncoder(); Output output = getOutput(); OutputField outputField = OutputUtil.getOutputField(output, getName()); DataType dataType = outputField.getDataType(); switch(dataType){ case INTEGER: case FLOAT: case DOUBLE: break; default: throw new IllegalArgumentException(); } outputField.setOpType(OpType.CONTINUOUS); return new ContinuousOutputFeature(encoder, output, outputField); } #location 9 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public int getNumberOfFeatures(){ return ValueUtil.asInteger((Number)get("n_features")); }
#vulnerable code @Override public int getNumberOfFeatures(){ return (Integer)get("n_features"); } #location 3 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private List<?> loadContent(){ DType dtype = getDType(); byte[] obj = getObj(); try { InputStream is = new ByteArrayInputStream(obj); try { return (List<?>)NDArrayUtil.parseData(is, dtype, new Object[0]); } finally { is.close(); } } catch(IOException ioe){ throw new RuntimeException(ioe); } }
#vulnerable code private List<?> loadContent(){ DType dtype = getDType(); byte[] obj = getObj(); try { InputStream is = new ByteArrayInputStream(obj); try { return (List<?>)NDArrayUtil.parseData(is, dtype.toDescr(), new Object[0]); } finally { is.close(); } } catch(IOException ioe){ throw new RuntimeException(ioe); } } #location 9 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public List<Feature> encodeFeatures(List<String> ids, List<Feature> features, SkLearnEncoder encoder){ List<? extends Number> dataMin = getDataMin(); List<? extends Number> dataMax = getDataMax(); ClassDictUtil.checkSize(ids, features, dataMin, dataMax); List<Feature> result = new ArrayList<>(); for(int i = 0; i < features.size(); i++){ WildcardFeature wildcardFeature = (WildcardFeature)features.get(i); ContinuousFeature continuousFeature = wildcardFeature.toContinuousFeature(); Interval interval = new Interval(Interval.Closure.CLOSED_CLOSED) .setLeftMargin(ValueUtil.asDouble(dataMin.get(i))) .setRightMargin(ValueUtil.asDouble(dataMax.get(i))); ValidValueDecorator validValueDecorator = new ValidValueDecorator() .addIntervals(interval); encoder.addDecorator(continuousFeature.getName(), validValueDecorator); result.add(continuousFeature); } return super.encodeFeatures(ids, result, encoder); }
#vulnerable code @Override public List<Feature> encodeFeatures(List<String> ids, List<Feature> features, SkLearnEncoder encoder){ List<? extends Number> dataMin = getDataMin(); List<? extends Number> dataMax = getDataMax(); ClassDictUtil.checkSize(ids, features, dataMin, dataMax); InvalidValueTreatmentMethod invalidValueTreatment = DomainUtil.parseInvalidValueTreatment(getInvalidValueTreatment()); InvalidValueDecorator invalidValueDecorator = new InvalidValueDecorator() .setInvalidValueTreatment(invalidValueTreatment); List<Feature> result = new ArrayList<>(); for(int i = 0; i < features.size(); i++){ WildcardFeature wildcardFeature = (WildcardFeature)features.get(i); ContinuousFeature continuousFeature = wildcardFeature.toContinuousFeature(); Interval interval = new Interval(Interval.Closure.CLOSED_CLOSED) .setLeftMargin(ValueUtil.asDouble(dataMin.get(i))) .setRightMargin(ValueUtil.asDouble(dataMax.get(i))); ValidValueDecorator validValueDecorator = new ValidValueDecorator() .addIntervals(interval); encoder.addDecorator(continuousFeature.getName(), validValueDecorator); encoder.addDecorator(continuousFeature.getName(), invalidValueDecorator); result.add(continuousFeature); } return result; } #location 8 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public List<Feature> encodeFeatures(List<String> ids, List<Feature> features, SkLearnEncoder encoder){ int[] shape = getComponentsShape(); int numberOfComponents = shape[0]; int numberOfFeatures = shape[1]; List<? extends Number> components = getComponents(); List<? extends Number> mean = getMean(); ClassDictUtil.checkSize(numberOfFeatures, ids, features, mean); Boolean whiten = getWhiten(); List<? extends Number> explainedVariance = (whiten ? getExplainedVariance() : null); ClassDictUtil.checkSize(numberOfComponents, explainedVariance); String id = String.valueOf(PCA.SEQUENCE.getAndIncrement()); ids.clear(); List<Feature> result = new ArrayList<>(); for(int i = 0; i < numberOfComponents; i++){ List<? extends Number> component = MatrixUtil.getRow(components, numberOfComponents, numberOfFeatures, i); Apply apply = new Apply("sum"); for(int j = 0; j < numberOfFeatures; j++){ Feature feature = features.get(j); // "($name[i] - mean[i]) * component[i]" Expression expression = (feature.toContinuousFeature()).ref(); Number meanValue = mean.get(j); if(!ValueUtil.isZero(meanValue)){ expression = PMMLUtil.createApply("-", expression, PMMLUtil.createConstant(meanValue)); } Number componentValue = component.get(j); if(!ValueUtil.isOne(componentValue)){ expression = PMMLUtil.createApply("*", expression, PMMLUtil.createConstant(componentValue)); } apply.addExpressions(expression); } if(whiten){ Number explainedVarianceValue = explainedVariance.get(i); if(!ValueUtil.isOne(explainedVarianceValue)){ apply = PMMLUtil.createApply("/", apply, PMMLUtil.createConstant(Math.sqrt(ValueUtil.asDouble(explainedVarianceValue)))); } } DerivedField derivedField = encoder.createDerivedField(createName(id, i), apply); ids.add((derivedField.getName()).getValue()); result.add(new ContinuousFeature(encoder, derivedField)); } return result; }
#vulnerable code @Override public List<Feature> encodeFeatures(List<String> ids, List<Feature> features, SkLearnEncoder encoder){ int[] shape = getComponentsShape(); int numberOfComponents = shape[0]; int numberOfFeatures = shape[1]; if(ids.size() != numberOfFeatures || features.size() != numberOfFeatures){ throw new IllegalArgumentException(); } String id = String.valueOf(PCA.SEQUENCE.getAndIncrement()); List<? extends Number> components = getComponents(); List<? extends Number> mean = getMean(); Boolean whiten = getWhiten(); List<? extends Number> explainedVariance = (whiten ? getExplainedVariance() : null); ids.clear(); List<Feature> result = new ArrayList<>(); for(int i = 0; i < numberOfComponents; i++){ List<? extends Number> component = MatrixUtil.getRow(components, numberOfComponents, numberOfFeatures, i); Apply apply = new Apply("sum"); for(int j = 0; j < numberOfFeatures; j++){ Feature feature = features.get(j); // "($name[i] - mean[i]) * component[i]" Expression expression = (feature.toContinuousFeature()).ref(); Number meanValue = mean.get(j); if(!ValueUtil.isZero(meanValue)){ expression = PMMLUtil.createApply("-", expression, PMMLUtil.createConstant(meanValue)); } Number componentValue = component.get(j); if(!ValueUtil.isOne(componentValue)){ expression = PMMLUtil.createApply("*", expression, PMMLUtil.createConstant(componentValue)); } apply.addExpressions(expression); } if(whiten){ Number explainedVarianceValue = explainedVariance.get(i); if(!ValueUtil.isOne(explainedVarianceValue)){ apply = PMMLUtil.createApply("/", apply, PMMLUtil.createConstant(Math.sqrt(ValueUtil.asDouble(explainedVarianceValue)))); } } DerivedField derivedField = encoder.createDerivedField(createName(id, i), apply); ids.add((derivedField.getName()).getValue()); result.add(new ContinuousFeature(encoder, derivedField)); } return result; } #location 19 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public List<Feature> encodeFeatures(List<Feature> features, SkLearnEncoder encoder){ String function = getFunction(); if(features.size() <= 1){ return features; } Apply apply = new Apply(translateFunction(function)); for(Feature feature : features){ apply.addExpressions(feature.ref()); } FieldName name = FieldName.create(function + "(" + FeatureUtil.formatFeatureList(features) + ")"); // XXX DerivedField derivedField = encoder.createDerivedField(name, OpType.CONTINUOUS, DataType.DOUBLE, apply); return Collections.<Feature>singletonList(new ContinuousFeature(encoder, derivedField)); }
#vulnerable code @Override public List<Feature> encodeFeatures(List<Feature> features, SkLearnEncoder encoder){ String function = translateFunction(getFunction()); if(features.size() <= 1){ return features; } FieldName name = FieldName.create(function + "(" + FeatureUtil.formatFeatureList(features) + ")"); Apply apply = new Apply(function); for(Feature feature : features){ apply.addExpressions(feature.ref()); } DerivedField derivedField = encoder.createDerivedField(name, OpType.CONTINUOUS, DataType.DOUBLE, apply); return Collections.<Feature>singletonList(new ContinuousFeature(encoder, derivedField)); } #location 3 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private int[] getCoefShape(){ return ClassDictUtil.getShape(this, "coef_"); }
#vulnerable code private int[] getCoefShape(){ NDArrayWrapper arrayWrapper = (NDArrayWrapper)get("coef_"); NDArray array = arrayWrapper.getContent(); return NDArrayUtil.getShape(array); } #location 4 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code static public List<?> getArray(ClassDict dict, String name, String key){ Object object = dict.get(name); if(object instanceof NDArrayWrapper){ NDArrayWrapper arrayWrapper = (NDArrayWrapper)object; object = arrayWrapper.getContent(); } // End if if(object instanceof NDArray){ NDArray array = (NDArray)object; return NDArrayUtil.getContent(array, key); } throw new IllegalArgumentException("The value of the " + ClassDictUtil.formatMember(dict, name) + " attribute (" + ClassDictUtil.formatClass(object) + ") is not a supported array type"); }
#vulnerable code static public List<?> getArray(ClassDict dict, String name, String key){ Object object = unwrap(dict.get(name)); if(object instanceof NDArray){ NDArray array = (NDArray)object; return NDArrayUtil.getContent(array, key); } throw new IllegalArgumentException("The value of the " + ClassDictUtil.formatMember(dict, name) + " attribute (" + ClassDictUtil.formatClass(object) + ") is not a supported array type"); } #location 3 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public int getNumberOfFeatures(){ return ValueUtil.asInteger((Number)get("n_features_")); }
#vulnerable code public int getNumberOfFeatures(){ return (Integer)get("n_features_"); } #location 2 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code static public List<?> getContent(NDArray array, String key){ Map<String, ?> content = (Map<String, ?>)array.getContent(); return asJavaList(array, (List<?>)content.get(key)); }
#vulnerable code static public List<?> getContent(NDArray array, String key){ Map<String, ?> data = (Map<String, ?>)array.getContent(); return asJavaList(array, (List<?>)data.get(key)); } #location 5 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public List<Transformer> getTransformers(){ List<Object[]> steps = getSteps(); return TransformerUtil.asTransformerList(TupleUtil.extractElementList(steps, 1)); }
#vulnerable code public List<Transformer> getTransformers(){ List<Object[]> steps = getSteps(); boolean flexible = isFlexible(); if(flexible && steps.size() > 0){ Estimator estimator = getEstimator(); if(estimator != null){ steps = steps.subList(0, steps.size() - 1); } } return TransformerUtil.asTransformerList(TupleUtil.extractElementList(steps, 1)); } #location 5 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public List<Feature> initializeFeatures(SkLearnEncoder encoder){ List<? extends String> featureNames = getFeatureNames(); String separator = getSeparator(); Map<String, Integer> vocabulary = getVocabulary(); Feature[] featureArray = new Feature[featureNames.size()]; for(String featureName : featureNames){ String key = featureName; String value = null; int index = featureName.indexOf(separator); if(index > -1){ key = featureName.substring(0, index); value = featureName.substring(index + separator.length()); } FieldName name = FieldName.create(key); DataField dataField = encoder.getDataField(name); if(dataField == null){ if(value != null){ dataField = encoder.createDataField(name, OpType.CATEGORICAL, DataType.STRING); } else { dataField = encoder.createDataField(name, OpType.CONTINUOUS, DataType.DOUBLE); } } Feature feature; if(value != null){ PMMLUtil.addValues(dataField, Collections.singletonList(value)); feature = new BinaryFeature(encoder, dataField, value); } else { feature = new ContinuousFeature(encoder, dataField); } featureArray[vocabulary.get(featureName)] = feature; } List<Feature> result = new ArrayList<>(); result.addAll(Arrays.asList(featureArray)); return result; }
#vulnerable code @Override public List<Feature> initializeFeatures(SkLearnEncoder encoder){ List<String> featureNames = getFeatureNames(); String separator = getSeparator(); Map<String, Integer> vocabulary = getVocabulary(); Feature[] featureArray = new Feature[featureNames.size()]; for(String featureName : featureNames){ String key = featureName; String value = null; int index = featureName.indexOf(separator); if(index > -1){ key = featureName.substring(0, index); value = featureName.substring(index + separator.length()); } FieldName name = FieldName.create(key); DataField dataField = encoder.getDataField(name); if(dataField == null){ if(value != null){ dataField = encoder.createDataField(name, OpType.CATEGORICAL, DataType.STRING); } else { dataField = encoder.createDataField(name, OpType.CONTINUOUS, DataType.DOUBLE); } } Feature feature; if(value != null){ PMMLUtil.addValues(dataField, Collections.singletonList(value)); feature = new BinaryFeature(encoder, dataField, value); } else { feature = new ContinuousFeature(encoder, dataField); } featureArray[vocabulary.get(featureName)] = feature; } List<Feature> result = new ArrayList<>(); result.addAll(Arrays.asList(featureArray)); return result; } #location 16 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code static public List<?> getArray(ClassDict dict, String name){ Object object = dict.get(name); if(object instanceof HasArray){ HasArray hasArray = (HasArray)object; return hasArray.getArrayContent(); } // End if if(object instanceof Number){ return Collections.singletonList(object); } throw new IllegalArgumentException("The value of the " + ClassDictUtil.formatMember(dict, name) + " attribute (" + ClassDictUtil.formatClass(object) + ") is not a supported array type"); }
#vulnerable code static public List<?> getArray(ClassDict dict, String name){ Object object = unwrap(dict.get(name)); if(object instanceof NDArray){ NDArray array = (NDArray)object; return NDArrayUtil.getContent(array); } else if(object instanceof CSRMatrix){ CSRMatrix matrix = (CSRMatrix)object; return CSRMatrixUtil.getContent(matrix); } else if(object instanceof Scalar){ Scalar scalar = (Scalar)object; return scalar.getContent(); } // End if if(object instanceof Number){ return Collections.singletonList(object); } throw new IllegalArgumentException("The value of the " + ClassDictUtil.formatMember(dict, name) + " attribute (" + ClassDictUtil.formatClass(object) + ") is not a supported array type"); } #location 3 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public List<?> getClasses(){ LabelEncoder labelEncoder = getLabelEncoder(); return labelEncoder.getClasses(); }
#vulnerable code @Override public List<?> getClasses(){ List<Object> result = new ArrayList<>(); List<?> values = (List<?>)get("classes_"); for(Object value : values){ if(value instanceof HasArray){ HasArray hasArray = (HasArray)value; result.addAll(hasArray.getArrayContent()); } else { result.add(value); } } return result; } #location 6 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public double[] getValues(){ List<? extends Number> values = (List<? extends Number>)ClassDictUtil.getArray(this, "values"); return Doubles.toArray(values); }
#vulnerable code public double[] getValues(){ NDArrayWrapper values = (NDArrayWrapper)get("values"); return Doubles.toArray((List<? extends Number>)values.getContent()); } #location 4 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code static public MiningModel encodeBooster(HasBooster hasBooster, Schema schema){ Booster booster = hasBooster.getBooster(); Learner learner = booster.getLearner(); Schema xgbSchema = XGBoostUtil.toXGBoostSchema(schema); // XXX List<Feature> features = xgbSchema.getFeatures(); for(Feature feature : features){ if(feature instanceof ContinuousFeature){ SkLearnEncoder encoder = (SkLearnEncoder)feature.getEncoder(); TypeDefinitionField field = encoder.getField(feature.getName()); if(!(OpType.CONTINUOUS).equals(field.getOpType())){ field.setOpType(OpType.CONTINUOUS); } } } MiningModel miningModel = learner.encodeMiningModel(xgbSchema); return miningModel; }
#vulnerable code static public MiningModel encodeBooster(HasBooster hasBooster, Schema schema){ Booster booster = hasBooster.getBooster(); Learner learner = booster.getLearner(); Schema xgbSchema = XGBoostUtil.toXGBoostSchema(schema); MiningModel miningModel = learner.encodeMiningModel(xgbSchema); return miningModel; } #location 5 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public List<Feature> encodeFeatures(List<Feature> features, SkLearnEncoder encoder){ Object func = getFunc(); if(func == null){ return features; } UFunc ufunc; try { ufunc = (UFunc)func; } catch(ClassCastException cce){ throw new IllegalArgumentException("The function object (" + ClassDictUtil.formatClass(func) + ") is not a Numpy universal function", cce); } List<Feature> result = new ArrayList<>(); for(int i = 0; i < features.size(); i++){ ContinuousFeature continuousFeature = (features.get(i)).toContinuousFeature(); FieldName name = FeatureUtil.createName(ufunc.getName(), continuousFeature); DerivedField derivedField = encoder.getDerivedField(name); if(derivedField == null){ Expression expression = encodeUFunc(ufunc, continuousFeature.ref()); derivedField = encoder.createDerivedField(name, expression); } result.add(new ContinuousFeature(encoder, derivedField)); } return result; }
#vulnerable code @Override public List<Feature> encodeFeatures(List<Feature> features, SkLearnEncoder encoder){ Object func = getFunc(); UFunc ufunc; try { ufunc = (UFunc)func; } catch(ClassCastException cce){ throw new IllegalArgumentException("The function object (" + ClassDictUtil.formatClass(func) + ") is not a Numpy universal function", cce); } List<Feature> result = new ArrayList<>(); for(int i = 0; i < features.size(); i++){ ContinuousFeature continuousFeature = (features.get(i)).toContinuousFeature(); FieldName name = FeatureUtil.createName(ufunc.getName(), continuousFeature); DerivedField derivedField = encoder.getDerivedField(name); if(derivedField == null){ Expression expression = encodeUFunc(ufunc, continuousFeature.ref()); derivedField = encoder.createDerivedField(name, expression); } result.add(new ContinuousFeature(encoder, derivedField)); } return result; } #location 18 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void test_dispatcher_local_greeting_request_completes_before_timeout() { Microservices gateway = Microservices.builder() .discoveryPort(port.incrementAndGet()) .services(new GreetingServiceImpl()) .build(); Call service = gateway.call(); Publisher<ServiceMessage> result = service.requestOne(GREETING_REQUEST_REQ, GreetingResponse.class); GreetingResponse greetings = Mono.from(result).timeout(Duration.ofSeconds(TIMEOUT)).block().data(); System.out.println("1. greeting_request_completes_before_timeout : " + greetings.getResult()); assertTrue(greetings.getResult().equals(" hello to: joe")); gateway.shutdown().block(); }
#vulnerable code @Test public void test_dispatcher_local_greeting_request_completes_before_timeout() { Microservices gateway = Microservices.builder() .services(new GreetingServiceImpl()) .build(); Call service = gateway.call(); Publisher<ServiceMessage> result = service.requestOne(GREETING_REQUEST_REQ, GreetingResponse.class); GreetingResponse greetings = Mono.from(result).timeout(Duration.ofSeconds(TIMEOUT)).block().data(); System.out.println("1. greeting_request_completes_before_timeout : " + greetings.getResult()); assertTrue(greetings.getResult().equals(" hello to: joe")); gateway.shutdown().block(); } #location 8 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public ServiceMessage decodeMessage(Payload payload) { Builder builder = ServiceMessage.builder(); if (payload.getData().hasRemaining()) { try { builder.data(payload.sliceData()); } catch (Throwable ex) { LOGGER.error("Failed to deserialize data", ex); } } if (payload.hasMetadata()) { try (ByteBufInputStream inputStream = new ByteBufInputStream(payload.sliceMetadata(), true)) { builder.headers(readFrom(inputStream, mapType)); } catch (Throwable ex) { LOGGER.error("Failed to deserialize data", ex); } } payload.release(); return builder.build(); }
#vulnerable code @Override public ServiceMessage decodeMessage(Payload payload) { Builder builder = ServiceMessage.builder(); if (payload.getData().hasRemaining()) { try { builder.data(payload.sliceData()); } catch (Throwable ex) { LOGGER.error("Failed to deserialize data", ex); } } if (payload.hasMetadata()) { ByteBuf headers = payload.sliceMetadata(); ByteBufInputStream inputStream = new ByteBufInputStream(headers); try { builder.headers((Map<String, String>) (readFrom(inputStream, mapType))); } catch (Throwable ex) { LOGGER.error("Failed to deserialize data", ex); } } return builder.build(); } #location 17 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void test_local_quotes_service() throws InterruptedException { Microservices node = Microservices.builder() .discoveryPort(port.incrementAndGet()) .services(new SimpleQuoteService()).build(); QuoteService service = node.call().api(QuoteService.class); CountDownLatch latch = new CountDownLatch(3); Flux<String> obs = service.quotes(); Disposable sub = obs.subscribe(onNext -> latch.countDown()); latch.await(4, TimeUnit.SECONDS); sub.dispose(); assertTrue(latch.getCount() <= 0); node.shutdown(); }
#vulnerable code @Test public void test_local_quotes_service() throws InterruptedException { Microservices node = Microservices.builder().services(new SimpleQuoteService()).build(); QuoteService service = node.call().api(QuoteService.class); CountDownLatch latch = new CountDownLatch(3); Flux<String> obs = service.quotes(); Disposable sub = obs.subscribe(onNext -> latch.countDown()); latch.await(4, TimeUnit.SECONDS); sub.dispose(); assertTrue(latch.getCount() <= 0); node.shutdown(); } #location 5 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public ServiceMessage decodeData(ServiceMessage message, Class type) { if (message.data() != null && message.data() instanceof ByteBuf) { try (ByteBufInputStream inputStream = new ByteBufInputStream(message.data(), true)) { return ServiceMessage.from(message).data(readFrom(inputStream, type)).build(); } catch (Throwable ex) { LOGGER.error("Failed to deserialize data", ex); } } return message; }
#vulnerable code @Override public ServiceMessage decodeData(ServiceMessage message, Class type) { if (message.data() != null && message.data() instanceof ByteBuf) { ByteBufInputStream inputStream = new ByteBufInputStream(message.data()); try { return ServiceMessage.from(message).data(readFrom(inputStream, type)).build(); } catch (Throwable ex) { LOGGER.error("Failed to deserialize data", ex); } } return message; } #location 6 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void importNodeIndexes(File file, String indexName, String indexType) throws IOException { BatchInserterIndex index; if (indexType.equals("fulltext")) { index = lucene.nodeIndex( indexName, FULLTEXT_CONFIG ); } else { index = lucene.nodeIndex( indexName, EXACT_CONFIG ); } BufferedReader bf = new BufferedReader(new FileReader(file)); final Data data = new Data(bf.readLine(), "\t", 1); Object[] node = new Object[1]; String line; report.reset(); while ((line = bf.readLine()) != null) { final Map<String, Object> properties = data.update(line, node); index.add(id(node[0]), properties); report.dots(); } report.finishImport("Nodes into " + indexName + " Index"); }
#vulnerable code private void importNodeIndexes(File file, String indexName, String indexType) throws IOException { BatchInserterIndex index; if (indexType.equals("fulltext")) { index = lucene.nodeIndex( indexName, stringMap( "type", "fulltext" ) ); } else { index = lucene.nodeIndex( indexName, EXACT_CONFIG ); } BufferedReader bf = new BufferedReader(new FileReader(file)); final Data data = new Data(bf.readLine(), "\t", 1); Object[] node = new Object[1]; String line; report.reset(); while ((line = bf.readLine()) != null) { final Map<String, Object> properties = map(data.update(line, node)); index.add(id(node[0]), properties); report.dots(); } report.finishImport("Nodes into " + indexName + " Index"); } #location 7 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void importNodes(File file) throws IOException { BufferedReader bf = new BufferedReader(new FileReader(file)); final Data data = new Data(bf.readLine(), "\t", 0); String line; report.reset(); while ((line = bf.readLine()) != null) { db.createNode(data.update(line)); report.dots(); } report.finishImport("Nodes"); }
#vulnerable code private void importNodes(File file) throws IOException { BufferedReader bf = new BufferedReader(new FileReader(file)); final Data data = new Data(bf.readLine(), "\t", 0); String line; report.reset(); while ((line = bf.readLine()) != null) { db.createNode(map(data.update(line))); report.dots(); } report.finishImport("Nodes"); } #location 9 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void importRelationshipIndexes(File file, String indexName, String indexType) throws IOException { BatchInserterIndex index; if (indexType.equals("fulltext")) { index = lucene.relationshipIndex( indexName, FULLTEXT_CONFIG ); } else { index = lucene.relationshipIndex( indexName, EXACT_CONFIG ); } BufferedReader bf = new BufferedReader(new FileReader(file)); final Data data = new Data(bf.readLine(), "\t", 1); Object[] rel = new Object[1]; String line; report.reset(); while ((line = bf.readLine()) != null) { final Map<String, Object> properties = data.update(line, rel); index.add(id(rel[0]), properties); report.dots(); } report.finishImport("Relationships into " + indexName + " Index"); }
#vulnerable code private void importRelationshipIndexes(File file, String indexName, String indexType) throws IOException { BatchInserterIndex index; if (indexType.equals("fulltext")) { index = lucene.relationshipIndex( indexName, stringMap( "type", "fulltext" ) ); } else { index = lucene.relationshipIndex( indexName, EXACT_CONFIG ); } BufferedReader bf = new BufferedReader(new FileReader(file)); final Data data = new Data(bf.readLine(), "\t", 1); Object[] rel = new Object[1]; String line; report.reset(); while ((line = bf.readLine()) != null) { final Map<String, Object> properties = map(data.update(line, rel)); index.add(id(rel[0]), properties); report.dots(); } report.finishImport("Relationships into " + indexName + " Index"); } #location 7 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void importRelationships(File file) throws IOException { BufferedReader bf = new BufferedReader(new FileReader(file)); final Data data = new Data(bf.readLine(), "\t", 3); Object[] rel = new Object[3]; final RelType relType = new RelType(); String line; report.reset(); while ((line = bf.readLine()) != null) { final Map<String, Object> properties = data.update(line, rel); db.createRelationship(id(rel[0]), id(rel[1]), relType.update(rel[2]), properties); report.dots(); } report.finishImport("Relationships"); }
#vulnerable code private void importRelationships(File file) throws IOException { BufferedReader bf = new BufferedReader(new FileReader(file)); final Data data = new Data(bf.readLine(), "\t", 3); Object[] rel = new Object[3]; final Type type = new Type(); String line; report.reset(); while ((line = bf.readLine()) != null) { final Map<String, Object> properties = map(data.update(line, rel)); db.createRelationship(id(rel[0]), id(rel[1]), type.update(rel[2]), properties); report.dots(); } report.finishImport("Relationships"); } #location 12 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testCache() { long start1 = System.currentTimeMillis(); int size = 10000; for (int i = 0; i < size; i++) { userDetailsService.loadUserByUsername("admin"); } long end1 = System.currentTimeMillis(); //关闭缓存 userDetailsService.setEnableCache(false); long start2 = System.currentTimeMillis(); for (int i = 0; i < size; i++) { userDetailsService.loadUserByUsername("admin"); } long end2 = System.currentTimeMillis(); System.out.print("使用缓存:" + (end1 - start1) + "毫秒\n 不使用缓存:" + (end2 - start2) + "毫秒"); }
#vulnerable code @Test public void testCache() { long start1 = System.currentTimeMillis(); for (int i = 0; i < size; i++) { userDetailsService.loadUserByUsername("admin"); } long end1 = System.currentTimeMillis(); //关闭缓存 userDetailsService.setEnableCache(false); long start2 = System.currentTimeMillis(); for (int i = 0; i < size; i++) { userDetailsService.loadUserByUsername("admin"); } long end2 = System.currentTimeMillis(); System.out.printf("使用缓存:" + (end1 - start1) + "毫秒\n 不使用缓存:" + (end2 - start2) + "毫秒"); } #location 15 #vulnerability type CHECKERS_PRINTF_ARGS
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private static List<String> readSqlList(File sqlFile) throws Exception { List<String> sqlList = Lists.newArrayList(); StringBuilder sb = new StringBuilder(); try (BufferedReader reader = new BufferedReader(new InputStreamReader( new FileInputStream(sqlFile), StandardCharsets.UTF_8))) { String tmp; while ((tmp = reader.readLine()) != null) { log.info("line:{}", tmp); if (tmp.endsWith(";")) { sb.append(tmp); sqlList.add(sb.toString()); sb.delete(0, sb.length()); } else { sb.append(tmp); } } if (!"".endsWith(sb.toString().trim())) { sqlList.add(sb.toString()); } } return sqlList; }
#vulnerable code private static List<String> readSqlList(File sqlFile) throws Exception { List<String> sqlList = Lists.newArrayList(); StringBuilder sb = new StringBuilder(); BufferedReader reader = null; try { reader = new BufferedReader(new InputStreamReader( new FileInputStream(sqlFile), "UTF-8")); String tmp = null; while ((tmp = reader.readLine()) != null) { log.info("line:{}", tmp); if (tmp.endsWith(";")) { sb.append(tmp); sqlList.add(sb.toString()); sb.delete(0, sb.length()); } else { sb.append(tmp); } } if (!"".endsWith(sb.toString().trim())) { sqlList.add(sb.toString()); } } finally { try { reader.close(); } catch (IOException e1) { } } return sqlList; } #location 24 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void test1() { long l = System.currentTimeMillis() / 1000; LocalDateTime localDateTime = DateUtil.fromTimeStamp(l); System.out.print(DateUtil.localDateTimeFormatyMdHms(localDateTime)); }
#vulnerable code @Test public void test1() { long l = System.currentTimeMillis() / 1000; LocalDateTime localDateTime = DateUtil.fromTimeStamp(l); System.out.printf(DateUtil.localDateTimeFormatyMdHms(localDateTime)); } #location 5 #vulnerability type CHECKERS_PRINTF_ARGS
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static void upZipFile(File zipFile, String folderPath) throws ZipException, IOException { File desDir = new File(folderPath); if (!desDir.exists()) { if (!desDir.mkdirs()) { System.out.println("was not successful."); } } ZipFile zf = new ZipFile(zipFile); for (Enumeration<?> entries = zf.entries(); entries.hasMoreElements(); ) { ZipEntry entry = ((ZipEntry) entries.nextElement()); InputStream in = zf.getInputStream(entry); String str = folderPath; File desFile = new File(str, java.net.URLEncoder.encode(entry.getName(), "UTF-8")); if (!desFile.exists()) { File fileParentDir = desFile.getParentFile(); if (!fileParentDir.exists()) { if (!fileParentDir.mkdirs()) { System.out.println("was not successful."); } } } OutputStream out = new FileOutputStream(desFile); byte[] buffer = new byte[1024 * 1024]; int realLength = in.read(buffer); while (realLength != -1) { out.write(buffer, 0, realLength); realLength = in.read(buffer); } out.close(); in.close(); } }
#vulnerable code public static void upZipFile(File zipFile, String folderPath) throws ZipException, IOException { File desDir = new File(folderPath); if (!desDir.exists()) { desDir.mkdirs(); } ZipFile zf = new ZipFile(zipFile); for (Enumeration<?> entries = zf.entries(); entries.hasMoreElements(); ) { ZipEntry entry = ((ZipEntry) entries.nextElement()); InputStream in = zf.getInputStream(entry); String str = folderPath; File desFile = new File(str, java.net.URLEncoder.encode(entry.getName(), "UTF-8")); if (!desFile.exists()) { File fileParentDir = desFile.getParentFile(); if (!fileParentDir.exists()) { fileParentDir.mkdirs(); } } OutputStream out = new FileOutputStream(desFile); byte[] buffer = new byte[1024 * 1024]; int realLength = in.read(buffer); while (realLength != -1) { out.write(buffer, 0, realLength); realLength = in.read(buffer); } out.close(); in.close(); } } #location 5 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test() public void testSizeControl() throws IOException, InterruptedException, ExecutionException { runSizeControl("scaling-avl.tsv", new AvlDigestFactory()); }
#vulnerable code @Test() public void testSizeControl() throws IOException, InterruptedException, ExecutionException { // very slow running data generator. Don't want to run this normally. To run slow tests use // mvn test -DrunSlowTests=true assumeTrue(Boolean.parseBoolean(System.getProperty("runSlowTests"))); final Random gen0 = RandomUtils.getRandom(); final PrintWriter out = new PrintWriter(new FileOutputStream("scaling.tsv")); out.printf("k\tsamples\tcompression\tsize1\tsize2\n"); List<Callable<String>> tasks = Lists.newArrayList(); for (int k = 0; k < 20; k++) { for (final int size : new int[]{10, 100, 1000, 10000}) { final int currentK = k; tasks.add(new Callable<String>() { Random gen = new Random(gen0.nextLong()); @Override public String call() throws Exception { System.out.printf("Starting %d,%d\n", currentK, size); StringWriter s = new StringWriter(); PrintWriter out = new PrintWriter(s); for (double compression : new double[]{2, 5, 10, 20, 50, 100, 200, 500, 1000}) { AVLTreeDigest dist = new AVLTreeDigest(compression); for (int i = 0; i < size * 1000; i++) { dist.add(gen.nextDouble()); } out.printf("%d\t%d\t%.0f\t%d\t%d\n", currentK, size, compression, dist.smallByteSize(), dist.byteSize()); out.flush(); } out.close(); return s.toString(); } }); } } for (Future<String> result : Executors.newFixedThreadPool(20).invokeAll(tasks)) { out.write(result.get()); } out.close(); } #location 27 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test() public void testSizeControl() throws IOException, InterruptedException, ExecutionException { // very slow running data generator. Don't want to run this normally. To run slow tests use // mvn test -DrunSlowTests=true // assumeTrue(Boolean.parseBoolean(System.getProperty("runSlowTests"))); final Random gen0 = getRandom(); try (final PrintWriter out = new PrintWriter(new FileOutputStream(String.format("scaling-%s.tsv", digestName)))) { out.printf("k\tsamples\tcompression\tsize1\tsize2\n"); List<Callable<String>> tasks = Lists.newArrayList(); for (int k = 0; k < 20; k++) { for (final int size : new int[]{10, 100, 1000, 10000}) { final int currentK = k; tasks.add(new Callable<String>() { final Random gen = new Random(gen0.nextLong()); @Override public String call() throws Exception { System.out.printf("Starting %d,%d\n", currentK, size); StringWriter s = new StringWriter(); PrintWriter out = new PrintWriter(s); for (double compression : new double[]{20, 50, 100, 200, 500, 1000}) { TDigest dist = factory(compression).create(); for (int i = 0; i < size * 1000; i++) { dist.add(gen.nextDouble()); } out.printf("%d\t%d\t%.0f\t%d\t%d\n", currentK, size, compression, dist.smallByteSize(), dist.byteSize()); out.flush(); } out.close(); return s.toString(); } }); } } ExecutorService executor = Executors.newFixedThreadPool(20); for (Future<String> result : executor.invokeAll(tasks)) { out.write(result.get()); } executor.shutdownNow(); assertTrue("Dangling executor thread", executor.awaitTermination(5, TimeUnit.SECONDS)); } }
#vulnerable code @Test() public void testSizeControl() throws IOException, InterruptedException, ExecutionException { // very slow running data generator. Don't want to run this normally. To run slow tests use // mvn test -DrunSlowTests=true assumeTrue(Boolean.parseBoolean(System.getProperty("runSlowTests"))); final Random gen0 = getRandom(); final PrintWriter out = new PrintWriter(new FileOutputStream("scaling.tsv")); out.printf("k\tsamples\tcompression\tsize1\tsize2\n"); List<Callable<String>> tasks = Lists.newArrayList(); for (int k = 0; k < 20; k++) { for (final int size : new int[]{10, 100, 1000, 10000}) { final int currentK = k; tasks.add(new Callable<String>() { final Random gen = new Random(gen0.nextLong()); @Override public String call() throws Exception { System.out.printf("Starting %d,%d\n", currentK, size); StringWriter s = new StringWriter(); PrintWriter out = new PrintWriter(s); for (double compression : new double[]{2, 5, 10, 20, 50, 100, 200, 500, 1000}) { TDigest dist = factory(compression).create(); for (int i = 0; i < size * 1000; i++) { dist.add(gen.nextDouble()); } out.printf("%d\t%d\t%.0f\t%d\t%d\n", currentK, size, compression, dist.smallByteSize(), dist.byteSize()); out.flush(); } out.close(); return s.toString(); } }); } } ExecutorService executor = Executors.newFixedThreadPool(20); for (Future<String> result : executor.invokeAll(tasks)) { out.write(result.get()); } executor.shutdown(); executor.awaitTermination(5, TimeUnit.SECONDS); out.close(); } #location 27 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code static void whitelistVerify( final String remoteHost, final WhitelistItem whitelistItem, final Map<String, List<String>> headers, final String postContent) throws WhitelistException { WhitelistHost whitelistHost = new WhitelistHost(whitelistItem.getHost()); if (HostVerifier.whitelistVerified(new WhitelistHost(remoteHost), whitelistHost)) { if (whitelistItem.isHmacEnabled()) { final Optional<StringCredentials> hmacKeyOpt = CredentialsHelper.findCredentials(whitelistItem.getHmacCredentialId()); if (!hmacKeyOpt.isPresent()) { throw new WhitelistException( "Was unable to find secret text credential " + whitelistItem.getHmacCredentialId()); } final String hmacHeader = whitelistItem.getHmacHeader(); final String hmacKey = hmacKeyOpt.get().getSecret().getPlainText(); final String hmacAlgorithm = whitelistItem.getHmacAlgorithm(); hmacVerify(headers, postContent, hmacHeader, hmacKey, hmacAlgorithm); return; } return; } throw new WhitelistException( "Sending host \"" + remoteHost + "\" was not matched by whitelist."); }
#vulnerable code static void whitelistVerify( final String remoteHost, final WhitelistItem whitelistItem, final Map<String, List<String>> headers, final String postContent) throws WhitelistException { String whitelistHost = whitelistItem.getHost(); if (HostVerifier.whitelistContains(remoteHost, whitelistHost)) { if (whitelistItem.isHmacEnabled()) { final Optional<StringCredentials> hmacKeyOpt = CredentialsHelper.findCredentials(whitelistItem.getHmacCredentialId()); if (!hmacKeyOpt.isPresent()) { throw new WhitelistException( "Was unable to find secret text credential " + whitelistItem.getHmacCredentialId()); } final String hmacHeader = whitelistItem.getHmacHeader(); final String hmacKey = hmacKeyOpt.get().getSecret().getPlainText(); final String hmacAlgorithm = whitelistItem.getHmacAlgorithm(); hmacVerify(headers, postContent, hmacHeader, hmacKey, hmacAlgorithm); return; } return; } throw new WhitelistException( "Sending host \"" + remoteHost + "\" was not matched by whitelist."); } #location 10 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { if (roundEnv.processingOver()) { return false; } final DslJson<Object> dslJson = new DslJson<>(Settings.withRuntime().includeServiceLoader()); Set<Type> knownEncoders = dslJson.getRegisteredEncoders(); Set<Type> knownDecoders = dslJson.getRegisteredDecoders(); Set<String> allTypes = new HashSet<>(); for (Type t : knownEncoders) { if (knownDecoders.contains(t)) { allTypes.add(t.getTypeName()); } } final Analysis analysis = new Analysis( processingEnv, annotationUsage, logLevel, allTypes, rawClass -> { try { Class<?> raw = Class.forName(rawClass); return dslJson.canSerialize(raw) && dslJson.canDeserialize(raw); } catch (Exception ignore) { return false; } }, JsonIgnore, NonNullable, PropertyAlias, JsonRequired, Constructors, Indexes, unknownTypes, false, true, true, true); Set<? extends Element> compiledJsons = roundEnv.getElementsAnnotatedWith(analysis.compiledJsonElement); Set<? extends Element> jacksonCreators = withJackson && jacksonCreatorElement != null ? roundEnv.getElementsAnnotatedWith(jacksonCreatorElement) : new HashSet<>(); Set<? extends Element> jsonbCreators = withJsonb && jsonbCreatorElement != null ? roundEnv.getElementsAnnotatedWith(jsonbCreatorElement) : new HashSet<>(); if (!compiledJsons.isEmpty() || !jacksonCreators.isEmpty() || !jsonbCreators.isEmpty()) { Set<? extends Element> jsonConverters = roundEnv.getElementsAnnotatedWith(analysis.converterElement); List<String> configurations = analysis.processConverters(jsonConverters); analysis.processAnnotation(analysis.compiledJsonType, compiledJsons); if (!jacksonCreators.isEmpty() && jacksonCreatorType != null) { analysis.processAnnotation(jacksonCreatorType, jacksonCreators); } if (!jsonbCreators.isEmpty() && jsonbCreatorType != null) { analysis.processAnnotation(jsonbCreatorType, jsonbCreators); } Map<String, StructInfo> structs = analysis.analyze(); if (analysis.hasError()) { return false; } try { String className = "dsl_json_Annotation_Processor_External_Serialization"; Writer writer = processingEnv.getFiler().createSourceFile(className).openWriter(); buildCode(writer, structs, allowInline, allTypes); writer.close(); writer = processingEnv.getFiler().createResource(StandardLocation.CLASS_OUTPUT, "", CONFIG).openWriter(); writer.write(className); for (String conf : configurations) { writer.write('\n'); writer.write(conf); } writer.close(); } catch (IOException e) { processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Failed saving compiled json serialization files"); } } return false; }
#vulnerable code @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { if (roundEnv.processingOver()) { return false; } Set<? extends Element> compiledJsons = roundEnv.getElementsAnnotatedWith(analysis.compiledJsonElement); Set<? extends Element> jacksonCreators = withJackson && jacksonCreatorElement != null ? roundEnv.getElementsAnnotatedWith(jacksonCreatorElement) : new HashSet<>(); Set<? extends Element> jsonbCreators = withJsonb && jsonbCreatorElement != null ? roundEnv.getElementsAnnotatedWith(jsonbCreatorElement) : new HashSet<>(); if (!compiledJsons.isEmpty() || !jacksonCreators.isEmpty() || !jsonbCreators.isEmpty()) { Set<? extends Element> jsonConverters = roundEnv.getElementsAnnotatedWith(analysis.converterElement); List<String> configurations = analysis.processConverters(jsonConverters); analysis.processAnnotation(analysis.compiledJsonType, compiledJsons); if (!jacksonCreators.isEmpty() && jacksonCreatorType != null) { analysis.processAnnotation(jacksonCreatorType, jacksonCreators); } if (!jsonbCreators.isEmpty() && jsonbCreatorType != null) { analysis.processAnnotation(jsonbCreatorType, jsonbCreators); } Map<String, StructInfo> structs = analysis.analyze(); if (analysis.hasError()) { return false; } try { String className = "dsl_json_Annotation_Processor_External_Serialization"; Writer writer = processingEnv.getFiler().createSourceFile(className).openWriter(); buildCode(writer, structs, allowInline); writer.close(); writer = processingEnv.getFiler().createResource(StandardLocation.CLASS_OUTPUT, "", CONFIG).openWriter(); writer.write(className); for (String conf : configurations) { writer.write('\n'); writer.write(conf); } writer.close(); } catch (IOException e) { processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Failed saving compiled json serialization files"); } } return false; } #location 20 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { if (roundEnv.processingOver() || annotations.isEmpty()) { return false; } final DslJson<Object> dslJson = new DslJson<>(Settings.withRuntime().includeServiceLoader(getClass().getClassLoader())); Set<Type> knownEncoders = dslJson.getRegisteredEncoders(); Set<Type> knownDecoders = dslJson.getRegisteredDecoders(); Set<String> allTypes = new HashSet<>(); for (Type t : knownEncoders) { if (knownDecoders.contains(t)) { allTypes.add(t.getTypeName()); } } final Analysis analysis = new Analysis( processingEnv, annotationUsage, logLevel, allTypes, rawClass -> { try { Class<?> raw = Class.forName(rawClass); return dslJson.canSerialize(raw) && dslJson.canDeserialize(raw); } catch (Exception ignore) { return false; } }, JsonIgnore, NonNullable, PropertyAlias, JsonRequired, Constructors, Indexes, unknownTypes, false, true, true, true); Set<? extends Element> compiledJsons = roundEnv.getElementsAnnotatedWith(analysis.compiledJsonElement); Set<? extends Element> jacksonCreators = withJackson && jacksonCreatorElement != null ? roundEnv.getElementsAnnotatedWith(jacksonCreatorElement) : new HashSet<>(); Set<? extends Element> jsonbCreators = withJsonb && jsonbCreatorElement != null ? roundEnv.getElementsAnnotatedWith(jsonbCreatorElement) : new HashSet<>(); if (!compiledJsons.isEmpty() || !jacksonCreators.isEmpty() || !jsonbCreators.isEmpty()) { Set<? extends Element> jsonConverters = roundEnv.getElementsAnnotatedWith(analysis.converterElement); List<String> configurations = analysis.processConverters(jsonConverters); analysis.processAnnotation(analysis.compiledJsonType, compiledJsons); if (!jacksonCreators.isEmpty() && jacksonCreatorType != null) { analysis.processAnnotation(jacksonCreatorType, jacksonCreators); } if (!jsonbCreators.isEmpty() && jsonbCreatorType != null) { analysis.processAnnotation(jsonbCreatorType, jsonbCreators); } Map<String, StructInfo> structs = analysis.analyze(); if (analysis.hasError()) { return false; } final List<String> generatedFiles = new ArrayList<>(); final List<Element> originatingElements = new ArrayList<>(); for (Map.Entry<String, StructInfo> entry : structs.entrySet()) { StructInfo structInfo = entry.getValue(); if (structInfo.type == ObjectType.CLASS && structInfo.attributes.isEmpty()) { continue; } String classNamePath = findConverterName(entry.getValue()); try { JavaFileObject converterFile = processingEnv.getFiler().createSourceFile(classNamePath, structInfo.element); try (Writer writer = converterFile.openWriter()) { buildCode(writer, entry.getKey(), structInfo, structs, allTypes); generatedFiles.add(classNamePath); originatingElements.add(structInfo.element); } catch (IOException e) { processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Failed saving compiled json serialization file " + classNamePath); } } catch (IOException e) { processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Failed creating compiled json serialization file " + classNamePath); } } if (configurationFileName != null) { final List<String> allConfigurations = new ArrayList<>(configurations); try { FileObject configFile = processingEnv.getFiler() .createSourceFile(configurationFileName, originatingElements.toArray(new Element[0])); try (Writer writer = configFile.openWriter()) { buildRootConfiguration(writer, configurationFileName, generatedFiles); allConfigurations.add(configurationFileName); } catch (Exception e) { processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Failed saving configuration file " + configurationFileName); } } catch (IOException e) { processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Failed creating configuration file " + configurationFileName); } saveToServiceConfigFile(allConfigurations); } } return false; }
#vulnerable code @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { if (roundEnv.processingOver() || annotations.isEmpty()) { return false; } final DslJson<Object> dslJson = new DslJson<>(Settings.withRuntime().includeServiceLoader(getClass().getClassLoader())); Set<Type> knownEncoders = dslJson.getRegisteredEncoders(); Set<Type> knownDecoders = dslJson.getRegisteredDecoders(); Set<String> allTypes = new HashSet<>(); for (Type t : knownEncoders) { if (knownDecoders.contains(t)) { allTypes.add(t.getTypeName()); } } final Analysis analysis = new Analysis( processingEnv, annotationUsage, logLevel, allTypes, rawClass -> { try { Class<?> raw = Class.forName(rawClass); return dslJson.canSerialize(raw) && dslJson.canDeserialize(raw); } catch (Exception ignore) { return false; } }, JsonIgnore, NonNullable, PropertyAlias, JsonRequired, Constructors, Indexes, unknownTypes, false, true, true, true); Set<? extends Element> compiledJsons = roundEnv.getElementsAnnotatedWith(analysis.compiledJsonElement); Set<? extends Element> jacksonCreators = withJackson && jacksonCreatorElement != null ? roundEnv.getElementsAnnotatedWith(jacksonCreatorElement) : new HashSet<>(); Set<? extends Element> jsonbCreators = withJsonb && jsonbCreatorElement != null ? roundEnv.getElementsAnnotatedWith(jsonbCreatorElement) : new HashSet<>(); if (!compiledJsons.isEmpty() || !jacksonCreators.isEmpty() || !jsonbCreators.isEmpty()) { Set<? extends Element> jsonConverters = roundEnv.getElementsAnnotatedWith(analysis.converterElement); List<String> configurations = analysis.processConverters(jsonConverters); analysis.processAnnotation(analysis.compiledJsonType, compiledJsons); if (!jacksonCreators.isEmpty() && jacksonCreatorType != null) { analysis.processAnnotation(jacksonCreatorType, jacksonCreators); } if (!jsonbCreators.isEmpty() && jsonbCreatorType != null) { analysis.processAnnotation(jsonbCreatorType, jsonbCreators); } Map<String, StructInfo> structs = analysis.analyze(); if (analysis.hasError()) { return false; } final List<String> generatedFiles = new ArrayList<>(); final List<Element> originatingElements = new ArrayList<>(); for (Map.Entry<String, StructInfo> entry : structs.entrySet()) { StructInfo structInfo = entry.getValue(); if (structInfo.type == ObjectType.CLASS && structInfo.attributes.isEmpty()) { continue; } String classNamePath = findConverterName(entry.getValue()); try { JavaFileObject converterFile = processingEnv.getFiler().createSourceFile(classNamePath, structInfo.element); try (Writer writer = converterFile.openWriter()) { buildCode(writer, entry.getKey(), structInfo, structs, allowInline, allTypes); generatedFiles.add(classNamePath); originatingElements.add(structInfo.element); } catch (IOException e) { processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Failed saving compiled json serialization file " + classNamePath); } } catch (IOException e) { processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Failed creating compiled json serialization file " + classNamePath); } } if (configurationFileName != null) { final List<String> allConfigurations = new ArrayList<>(configurations); try { FileObject configFile = processingEnv.getFiler() .createSourceFile(configurationFileName, originatingElements.toArray(new Element[0])); try (Writer writer = configFile.openWriter()) { buildRootConfiguration(writer, configurationFileName, generatedFiles); allConfigurations.add(configurationFileName); } catch (Exception e) { processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Failed saving configuration file " + configurationFileName); } } catch (IOException e) { processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Failed creating configuration file " + configurationFileName); } saveToServiceConfigFile(allConfigurations); } } return false; } #location 70 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { if (roundEnv.processingOver()) { return false; } Set<? extends Element> jsonAnnotated = roundEnv.getElementsAnnotatedWith(jsonTypeElement); if (!jsonAnnotated.isEmpty()) { Map<String, StructInfo> structs = new HashMap<String, StructInfo>(); CompileOptions options = new CompileOptions(); for (Element el : jsonAnnotated) { findStructs(structs, options, el, "CompiledJson requires public no argument constructor"); } findRelatedReferences(structs, options); String dsl = buildDsl(structs, options); if (options.hasError) { return false; } processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE, dsl); String fileContent; try { fileContent = AnnotationCompiler.buildExternalJson(dsl, options.toOptions(namespace)); } catch (Exception e) { processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "DSL compilation error\n" + e.getMessage()); return false; } try { String className = namespace + ".json.ExternalSerialization"; Writer writer = processingEnv.getFiler().createSourceFile(className).openWriter(); writer.write(fileContent); writer.close(); writer = processingEnv.getFiler().createResource(StandardLocation.CLASS_OUTPUT, "", CONFIG).openWriter(); writer.write(className); writer.close(); } catch (IOException e) { processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Failed saving compiled json serialization files"); } } return false; }
#vulnerable code @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { if (roundEnv.processingOver()) { return false; } Set<? extends Element> jsonAnnotated = roundEnv.getElementsAnnotatedWith(jsonTypeElement); if (!jsonAnnotated.isEmpty()) { Map<String, StructInfo> structs = new HashMap<String, StructInfo>(); CompileOptions options = new CompileOptions(); for (Element el : jsonAnnotated) { findStructs(structs, options, el, "CompiledJson requires public no argument constructor"); } findRelatedReferences(structs, options); String dsl = buildDsl(structs, options); if (options.hasError) { return false; } processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE, dsl); String fileContent; try { fileContent = AnnotationCompiler.buildExternalJson(dsl, options.toOptions(namespace)); } catch (Exception e) { processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "DSL compilation error\n" + e.getMessage()); return false; } try { JavaFileObject jfo = processingEnv.getFiler().createSourceFile("ExternalSerialization"); BufferedWriter bw = new BufferedWriter(jfo.openWriter()); bw.write(fileContent); bw.close(); FileObject rfo = processingEnv.getFiler().createResource(StandardLocation.CLASS_OUTPUT, "", "META-INF/services/com.dslplatform.json.Configuration"); bw = new BufferedWriter(rfo.openWriter()); bw.write(namespace + ".json.ExternalSerialization"); bw.close(); } catch (IOException e) { processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, "Failed saving compiled json serialization files"); } } return false; } #location 38 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @ApiMethod(name = "processSignResponse") public List<String> processSignResponse( @Named("responseData") String responseData, User user) throws OAuthRequestException, ResponseException { if (user == null) { throw new OAuthRequestException("User is not authenticated"); } Gson gson = new Gson(); JsonElement element = gson.fromJson(responseData, JsonElement.class); JsonObject object = element.getAsJsonObject(); String clientDataJSON = object.get("clientDataJSON").getAsString(); String authenticatorData = object.get("authenticatorData").getAsString(); String credentialId = object.get("credentialId").getAsString(); String signature = object.get("signature").getAsString(); AuthenticatorAssertionResponse assertion = new AuthenticatorAssertionResponse(clientDataJSON, authenticatorData, signature); // TODO String type = null; String session = null; PublicKeyCredential cred = new PublicKeyCredential(credentialId, type, BaseEncoding.base64Url().decode(credentialId), assertion); try { U2fServer.verifyAssertion(cred, user.getEmail(), session); } catch (ServletException e) { // TODO } Credential credential = new Credential(cred); credential.save(user.getEmail()); List<String> resultList = new ArrayList<String>(); resultList.add(credential.toJson()); return resultList; }
#vulnerable code @ApiMethod(name = "processSignResponse") public List<String> processSignResponse( @Named("responseData") String responseData, User user) throws OAuthRequestException, ResponseException { if (user == null) { throw new OAuthRequestException("User is not authenticated"); } Gson gson = new Gson(); JsonElement element = gson.fromJson(responseData, JsonElement.class); JsonObject object = element.getAsJsonObject(); String clientDataJSON = object.get("clientDataJSON").getAsString(); String authenticatorData = object.get("authenticatorData").getAsString(); String signature = object.get("signature").getAsString(); AuthenticatorAssertionResponse assertion = new AuthenticatorAssertionResponse(clientDataJSON, authenticatorData, signature); // TODO String credentialId = BaseEncoding.base64Url().encode( assertion.getAuthenticatorData().getAttData().getCredentialId()); String type = null; String session = null; PublicKeyCredential cred = new PublicKeyCredential(credentialId, type, BaseEncoding.base64Url().decode(credentialId), assertion); try { U2fServer.verifyAssertion(cred, user.getEmail(), session); } catch (ServletException e) { // TODO } Credential credential = new Credential(cred); credential.save(user.getEmail()); List<String> resultList = new ArrayList<String>(); resultList.add(credential.toJson()); return resultList; } #location 21 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @ApiMethod(name = "processRegistrationResponse") public List<String> processRegistrationResponse( @Named("responseData") String responseData, User user) throws OAuthRequestException, ResponseException { if (user == null) { throw new OAuthRequestException("User is not authenticated"); } Gson gson = new Gson(); JsonElement element = gson.fromJson(responseData, JsonElement.class); JsonObject object = element.getAsJsonObject(); String clientDataJSON = object.get("clientDataJSON").getAsString(); String attestationObject = object.get("attestationObject").getAsString(); AuthenticatorAttestationResponse attestation = new AuthenticatorAttestationResponse(clientDataJSON, attestationObject); // TODO String credentialId = BaseEncoding.base64Url().encode( attestation.getAttestationObject().getAuthenticatorData().getAttData().getCredentialId()); String type = null; String session = null; PublicKeyCredential cred = new PublicKeyCredential(credentialId, type, BaseEncoding.base64Url().decode(credentialId), attestation); try { switch (cred.getAttestationType()) { case FIDOU2F: U2fServer.registerCredential(cred, user.getEmail(), session, Constants.APP_ID); break; case ANDROIDSAFETYNET: AndroidSafetyNetServer.registerCredential( cred, user.getEmail(), session, Constants.APP_ID); break; default: // This should never happen. } } catch (ServletException e) { // TODO } Credential credential = new Credential(cred); credential.save(user.getEmail()); List<String> resultList = new ArrayList<String>(); resultList.add(credential.toJson()); return resultList; }
#vulnerable code @ApiMethod(name = "processRegistrationResponse") public List<String> processRegistrationResponse( @Named("responseData") String responseData, User user) throws OAuthRequestException, ResponseException { if (user == null) { throw new OAuthRequestException("User is not authenticated"); } Gson gson = new Gson(); JsonElement element = gson.fromJson(responseData, JsonElement.class); AuthenticatorAttestationResponse attestation = new AuthenticatorAttestationResponse(element); // TODO String credentialId = BaseEncoding.base64Url().encode( attestation.getAttestationObject().getAuthenticatorData().getAttData().getCredentialId()); String type = null; String session = null; PublicKeyCredential cred = new PublicKeyCredential(credentialId, type, BaseEncoding.base64Url().decode(credentialId), attestation); try { switch (cred.getAttestationType()) { case FIDOU2F: U2fServer.registerCredential(cred, user.getEmail(), session, Constants.APP_ID); break; case ANDROIDSAFETYNET: AndroidSafetyNetServer.registerCredential( cred, user.getEmail(), session, Constants.APP_ID); break; default: // This should never happen. } } catch (ServletException e) { // TODO } Credential credential = new Credential(cred); credential.save(user.getEmail()); List<String> resultList = new ArrayList<String>(); resultList.add(credential.toJson()); return resultList; } #location 16 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @ApiMethod(name = "processSignResponse") public List<String> processSignResponse( @Named("responseData") String responseData, User user) throws OAuthRequestException, ResponseException { if (user == null) { throw new OAuthRequestException("User is not authenticated"); } Gson gson = new Gson(); JsonElement element = gson.fromJson(responseData, JsonElement.class); JsonObject object = element.getAsJsonObject(); String clientDataJSON = object.get("clientDataJSON").getAsString(); String authenticatorData = object.get("authenticatorData").getAsString(); String signature = object.get("signature").getAsString(); AuthenticatorAssertionResponse assertion = new AuthenticatorAssertionResponse(clientDataJSON, authenticatorData, signature); // TODO String credentialId = BaseEncoding.base64Url().encode( assertion.getAuthenticatorData().getAttData().getCredentialId()); String type = null; String session = null; PublicKeyCredential cred = new PublicKeyCredential(credentialId, type, BaseEncoding.base64Url().decode(credentialId), assertion); try { U2fServer.verifyAssertion(cred, user.getEmail(), session); } catch (ServletException e) { // TODO } Credential credential = new Credential(cred); credential.save(user.getEmail()); List<String> resultList = new ArrayList<String>(); resultList.add(credential.toJson()); return resultList; }
#vulnerable code @ApiMethod(name = "processSignResponse") public List<String> processSignResponse( @Named("responseData") String responseData, User user) throws OAuthRequestException, ResponseException { if (user == null) { throw new OAuthRequestException("User is not authenticated"); } AuthenticatorAssertionResponse assertion = new AuthenticatorAssertionResponse(responseData); // TODO String credentialId = BaseEncoding.base64Url().encode( assertion.getAuthenticatorData().getAttData().getCredentialId()); String type = null; String session = null; PublicKeyCredential cred = new PublicKeyCredential(credentialId, type, BaseEncoding.base64Url().decode(credentialId), assertion); try { U2fServer.verifyAssertion(cred, user.getEmail(), session); } catch (ServletException e) { // TODO } Credential credential = new Credential(cred); credential.save(user.getEmail()); List<String> resultList = new ArrayList<String>(); resultList.add(credential.toJson()); return resultList; } #location 14 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testRoot() throws IOException { try (CompoundDocument document = createTestDocument()) { Entry root = document.getRootEntry(); assertNotNull(root); assertEquals("Root Entry", root.getName()); assertTrue(root.isRoot()); assertFalse(root.isFile()); assertFalse(root.isDirectory()); assertEquals(0, root.length()); assertNull(root.getInputStream()); } }
#vulnerable code @Test public void testRoot() throws IOException { CompoundDocument document = createTestDocument(); Entry root = document.getRootEntry(); assertNotNull(root); assertEquals("Root Entry", root.getName()); assertTrue(root.isRoot()); assertFalse(root.isFile()); assertFalse(root.isDirectory()); assertEquals(0, root.length()); assertNull(root.getInputStream()); } #location 13 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public int getHeight() throws IOException { if (compression == 1) { // 1 = no compression Entry height = ifd.getEntryById(TIFF.TAG_IMAGE_HEIGHT); if (height == null) { throw new IIOException("Missing dimensions for unknown EXIF thumbnail"); } return ((Number) height.getValue()).intValue(); } else if (compression == 6) { // 6 = JPEG compression return readJPEGCached(false).getHeight(); } else { throw new IIOException("Unsupported EXIF thumbnail compression (expected 1 or 6): " + compression); } }
#vulnerable code @Override public int getHeight() throws IOException { if (compression == 1) { // 1 = no compression Entry height = ifd.getEntryById(TIFF.TAG_IMAGE_HEIGHT); if (height == null) { throw new IIOException("Missing dimensions for RAW EXIF thumbnail"); } return ((Number) height.getValue()).intValue(); } else if (compression == 6) { // 6 = JPEG compression return readJPEGCached(false).getHeight(); } else { throw new IIOException("Unsupported EXIF thumbnail compression: " + compression); } } #location 13 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private DataOutput createCompressorStream(final RenderedImage image, final ImageWriteParam param, final Map<Integer, Entry> entries) { /* 36 MB test data: No compression: Write time: 450 ms output.length: 36000226 PackBits: Write time: 688 ms output.length: 30322187 Deflate, BEST_SPEED (1): Write time: 1276 ms output.length: 14128866 Deflate, 2: Write time: 1297 ms output.length: 13848735 Deflate, 3: Write time: 1594 ms output.length: 13103224 Deflate, 4: Write time: 1663 ms output.length: 13380899 (!!) 5 Write time: 1941 ms output.length: 13171244 6 Write time: 2311 ms output.length: 12845101 7: Write time: 2853 ms output.length: 12759426 8: Write time: 4429 ms output.length: 12624517 Deflate: DEFAULT_COMPRESSION (6?): Write time: 2357 ms output.length: 12845101 Deflate, BEST_COMPRESSION (9): Write time: 4998 ms output.length: 12600399 */ int samplesPerPixel = (Integer) entries.get(TIFF.TAG_SAMPLES_PER_PIXEL).getValue(); int bitPerSample = ((short[]) entries.get(TIFF.TAG_BITS_PER_SAMPLE).getValue())[0]; // Use predictor by default for LZW and ZLib/Deflate // TODO: Unless explicitly disabled in TIFFImageWriteParam int compression = (int) entries.get(TIFF.TAG_COMPRESSION).getValue(); OutputStream stream; switch (compression) { case TIFFBaseline.COMPRESSION_NONE: return imageOutput; case TIFFBaseline.COMPRESSION_PACKBITS: stream = IIOUtil.createStreamAdapter(imageOutput); stream = new EncoderStream(stream, new PackBitsEncoder(), true); // NOTE: PackBits + Predictor is possible, but not generally supported, disable it by default // (and probably not even allow it, see http://stackoverflow.com/questions/20337400/tiff-packbits-compression-with-predictor-step) return new DataOutputStream(stream); case TIFFExtension.COMPRESSION_ZLIB: case TIFFExtension.COMPRESSION_DEFLATE: // NOTE: This interpretation does the opposite of the JAI TIFFImageWriter, but seems more correct. // API Docs says: // A compression quality setting of 0.0 is most generically interpreted as "high compression is important," // while a setting of 1.0 is most generically interpreted as "high image quality is important." // However, the JAI TIFFImageWriter uses: // if (param & compression etc...) { // float quality = param.getCompressionQuality(); // deflateLevel = (int)(1 + 8*quality); // } else { // deflateLevel = Deflater.DEFAULT_COMPRESSION; // } // (in other words, 0.0 means 1 == BEST_SPEED, 1.0 means 9 == BEST_COMPRESSION) // PS: PNGImageWriter just uses hardcoded BEST_COMPRESSION... :-P int deflateSetting = Deflater.BEST_SPEED; // This is consistent with default compression quality being 1.0 and 0 meaning max compression... if (param.getCompressionMode() == ImageWriteParam.MODE_EXPLICIT) { deflateSetting = Deflater.BEST_COMPRESSION - Math.round((Deflater.BEST_COMPRESSION - 1) * param.getCompressionQuality()); } stream = IIOUtil.createStreamAdapter(imageOutput); stream = new DeflaterOutputStream(stream, new Deflater(deflateSetting), 1024); if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) { stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), samplesPerPixel, bitPerSample, imageOutput.getByteOrder()); } return new DataOutputStream(stream); case TIFFExtension.COMPRESSION_LZW: stream = IIOUtil.createStreamAdapter(imageOutput); stream = new EncoderStream(stream, new LZWEncoder((image.getTileWidth() * image.getTileHeight() * samplesPerPixel * bitPerSample + 7) / 8)); if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) { stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), samplesPerPixel, bitPerSample, imageOutput.getByteOrder()); } return new DataOutputStream(stream); case TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE: case TIFFExtension.COMPRESSION_CCITT_T4: case TIFFExtension.COMPRESSION_CCITT_T6: long option = 0L; if (compression != TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE) { option = (long) entries.get(compression == TIFFExtension.COMPRESSION_CCITT_T4 ? TIFF.TAG_GROUP3OPTIONS : TIFF.TAG_GROUP4OPTIONS).getValue(); } Entry fillOrderEntry = entries.get(TIFF.TAG_FILL_ORDER); int fillOrder = (int) (fillOrderEntry != null ? fillOrderEntry.getValue() : TIFFBaseline.FILL_LEFT_TO_RIGHT); stream = IIOUtil.createStreamAdapter(imageOutput); stream = new CCITTFaxEncoderStream(stream, image.getTileWidth(), image.getTileHeight(), compression, fillOrder, option); return new DataOutputStream(stream); } throw new IllegalArgumentException(String.format("Unsupported TIFF compression: %d", compression)); }
#vulnerable code private DataOutput createCompressorStream(final RenderedImage image, final ImageWriteParam param, final Map<Integer, Entry> entries) { /* 36 MB test data: No compression: Write time: 450 ms output.length: 36000226 PackBits: Write time: 688 ms output.length: 30322187 Deflate, BEST_SPEED (1): Write time: 1276 ms output.length: 14128866 Deflate, 2: Write time: 1297 ms output.length: 13848735 Deflate, 3: Write time: 1594 ms output.length: 13103224 Deflate, 4: Write time: 1663 ms output.length: 13380899 (!!) 5 Write time: 1941 ms output.length: 13171244 6 Write time: 2311 ms output.length: 12845101 7: Write time: 2853 ms output.length: 12759426 8: Write time: 4429 ms output.length: 12624517 Deflate: DEFAULT_COMPRESSION (6?): Write time: 2357 ms output.length: 12845101 Deflate, BEST_COMPRESSION (9): Write time: 4998 ms output.length: 12600399 */ // Use predictor by default for LZW and ZLib/Deflate // TODO: Unless explicitly disabled in TIFFImageWriteParam int compression = (int) entries.get(TIFF.TAG_COMPRESSION).getValue(); OutputStream stream; switch (compression) { case TIFFBaseline.COMPRESSION_NONE: return imageOutput; case TIFFBaseline.COMPRESSION_PACKBITS: stream = IIOUtil.createStreamAdapter(imageOutput); stream = new EncoderStream(stream, new PackBitsEncoder(), true); // NOTE: PackBits + Predictor is possible, but not generally supported, disable it by default // (and probably not even allow it, see http://stackoverflow.com/questions/20337400/tiff-packbits-compression-with-predictor-step) return new DataOutputStream(stream); case TIFFExtension.COMPRESSION_ZLIB: case TIFFExtension.COMPRESSION_DEFLATE: // NOTE: This interpretation does the opposite of the JAI TIFFImageWriter, but seems more correct. // API Docs says: // A compression quality setting of 0.0 is most generically interpreted as "high compression is important," // while a setting of 1.0 is most generically interpreted as "high image quality is important." // However, the JAI TIFFImageWriter uses: // if (param & compression etc...) { // float quality = param.getCompressionQuality(); // deflateLevel = (int)(1 + 8*quality); // } else { // deflateLevel = Deflater.DEFAULT_COMPRESSION; // } // (in other words, 0.0 means 1 == BEST_SPEED, 1.0 means 9 == BEST_COMPRESSION) // PS: PNGImageWriter just uses hardcoded BEST_COMPRESSION... :-P int deflateSetting = Deflater.BEST_SPEED; // This is consistent with default compression quality being 1.0 and 0 meaning max compression... if (param.getCompressionMode() == ImageWriteParam.MODE_EXPLICIT) { deflateSetting = Deflater.BEST_COMPRESSION - Math.round((Deflater.BEST_COMPRESSION - 1) * param.getCompressionQuality()); } stream = IIOUtil.createStreamAdapter(imageOutput); stream = new DeflaterOutputStream(stream, new Deflater(deflateSetting), 1024); if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) { stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), image.getTile(0, 0).getNumBands(), image.getColorModel().getComponentSize(0), imageOutput.getByteOrder()); } return new DataOutputStream(stream); case TIFFExtension.COMPRESSION_LZW: stream = IIOUtil.createStreamAdapter(imageOutput); stream = new EncoderStream(stream, new LZWEncoder((image.getTileWidth() * image.getTileHeight() * image.getColorModel().getPixelSize() + 7) / 8)); if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) { stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), image.getTile(0, 0).getNumBands(), image.getColorModel().getComponentSize(0), imageOutput.getByteOrder()); } return new DataOutputStream(stream); case TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE: case TIFFExtension.COMPRESSION_CCITT_T4: case TIFFExtension.COMPRESSION_CCITT_T6: long option = 0L; if (compression != TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE) { option = (long) entries.get(compression == TIFFExtension.COMPRESSION_CCITT_T4 ? TIFF.TAG_GROUP3OPTIONS : TIFF.TAG_GROUP4OPTIONS).getValue(); } Entry fillOrderEntry = entries.get(TIFF.TAG_FILL_ORDER); int fillOrder = (int) (fillOrderEntry != null ? fillOrderEntry.getValue() : TIFFBaseline.FILL_LEFT_TO_RIGHT); stream = IIOUtil.createStreamAdapter(imageOutput); stream = new CCITTFaxEncoderStream(stream, image.getTileWidth(), image.getTileHeight(), compression, fillOrder, option); return new DataOutputStream(stream); } throw new IllegalArgumentException(String.format("Unsupported TIFF compression: %d", compression)); } #location 89 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void writeBody(ByteArrayOutputStream pImageData) throws IOException { imageOutput.writeInt(IFF.CHUNK_BODY); imageOutput.writeInt(pImageData.size()); // NOTE: This is much faster than imageOutput.write(pImageData.toByteArray()) // as the data array is not duplicated OutputStream adapter = IIOUtil.createStreamAdapter(imageOutput); try { pImageData.writeTo(adapter); } finally { adapter.close(); } if (pImageData.size() % 2 == 0) { imageOutput.writeByte(0); // PAD } imageOutput.flush(); }
#vulnerable code private void writeBody(ByteArrayOutputStream pImageData) throws IOException { imageOutput.writeInt(IFF.CHUNK_BODY); imageOutput.writeInt(pImageData.size()); // NOTE: This is much faster than mOutput.write(pImageData.toByteArray()) // as the data array is not duplicated pImageData.writeTo(IIOUtil.createStreamAdapter(imageOutput)); if (pImageData.size() % 2 == 0) { imageOutput.writeByte(0); // PAD } imageOutput.flush(); } #location 7 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testRoot() throws IOException { try (CompoundDocument document = createTestDocument()) { Entry root = document.getRootEntry(); assertNotNull(root); assertEquals("Root Entry", root.getName()); assertTrue(root.isRoot()); assertFalse(root.isFile()); assertFalse(root.isDirectory()); assertEquals(0, root.length()); assertNull(root.getInputStream()); } }
#vulnerable code @Test public void testRoot() throws IOException { CompoundDocument document = createTestDocument(); Entry root = document.getRootEntry(); assertNotNull(root); assertEquals("Root Entry", root.getName()); assertTrue(root.isRoot()); assertFalse(root.isFile()); assertFalse(root.isDirectory()); assertEquals(0, root.length()); assertNull(root.getInputStream()); } #location 5 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override protected IIOMetadataNode getStandardChromaNode() { IIOMetadataNode chroma = new IIOMetadataNode("Chroma"); // Handle ColorSpaceType (RGB/CMYK/YCbCr etc)... Entry photometricTag = ifd.getEntryById(TIFF.TAG_PHOTOMETRIC_INTERPRETATION); int photometricValue = getValueAsInt(photometricTag); // No default for this tag! int numChannelsValue = getSamplesPerPixelWithFallback(); IIOMetadataNode colorSpaceType = new IIOMetadataNode("ColorSpaceType"); chroma.appendChild(colorSpaceType); switch (photometricValue) { case TIFFBaseline.PHOTOMETRIC_WHITE_IS_ZERO: case TIFFBaseline.PHOTOMETRIC_BLACK_IS_ZERO: case TIFFBaseline.PHOTOMETRIC_MASK: // It's really a transparency mask/alpha channel, but... colorSpaceType.setAttribute("value", "GRAY"); break; case TIFFBaseline.PHOTOMETRIC_RGB: case TIFFBaseline.PHOTOMETRIC_PALETTE: colorSpaceType.setAttribute("value", "RGB"); break; case TIFFExtension.PHOTOMETRIC_YCBCR: colorSpaceType.setAttribute("value", "YCbCr"); break; case TIFFExtension.PHOTOMETRIC_CIELAB: case TIFFExtension.PHOTOMETRIC_ICCLAB: case TIFFExtension.PHOTOMETRIC_ITULAB: colorSpaceType.setAttribute("value", "Lab"); break; case TIFFExtension.PHOTOMETRIC_SEPARATED: // TODO: May be CMYK, or something else... Consult InkSet and NumberOfInks! if (numChannelsValue == 3) { colorSpaceType.setAttribute("value", "CMY"); } else { colorSpaceType.setAttribute("value", "CMYK"); } break; case TIFFCustom.PHOTOMETRIC_LOGL: // ..? case TIFFCustom.PHOTOMETRIC_LOGLUV: colorSpaceType.setAttribute("value", "Luv"); break; case TIFFCustom.PHOTOMETRIC_CFA: case TIFFCustom.PHOTOMETRIC_LINEAR_RAW: // ...or is this RGB? colorSpaceType.setAttribute("value", "3CLR"); break; default: colorSpaceType.setAttribute("value", Integer.toHexString(numChannelsValue) + "CLR"); break; } // NumChannels IIOMetadataNode numChannels = new IIOMetadataNode("NumChannels"); chroma.appendChild(numChannels); if (photometricValue == TIFFBaseline.PHOTOMETRIC_PALETTE) { numChannels.setAttribute("value", "3"); } else { numChannels.setAttribute("value", Integer.toString(numChannelsValue)); } // BlackIsZero (defaults to TRUE) IIOMetadataNode blackIsZero = new IIOMetadataNode("BlackIsZero"); chroma.appendChild(blackIsZero); switch (photometricValue) { case TIFFBaseline.PHOTOMETRIC_WHITE_IS_ZERO: blackIsZero.setAttribute("value", "FALSE"); break; default: break; } Entry colorMapTag = ifd.getEntryById(TIFF.TAG_COLOR_MAP); if (colorMapTag != null) { int[] colorMapValues = (int[]) colorMapTag.getValue(); IIOMetadataNode palette = new IIOMetadataNode("Palette"); chroma.appendChild(palette); int count = colorMapValues.length / 3; for (int i = 0; i < count; i++) { IIOMetadataNode paletteEntry = new IIOMetadataNode("PaletteEntry"); paletteEntry.setAttribute("index", Integer.toString(i)); // TODO: See TIFFImageReader createIndexColorModel, to detect 8 bit colorMap paletteEntry.setAttribute("red", Integer.toString((colorMapValues[i] >> 8) & 0xff)); paletteEntry.setAttribute("green", Integer.toString((colorMapValues[i + count] >> 8) & 0xff)); paletteEntry.setAttribute("blue", Integer.toString((colorMapValues[i + count * 2] >> 8) & 0xff)); palette.appendChild(paletteEntry); } } return chroma; }
#vulnerable code @Override protected IIOMetadataNode getStandardChromaNode() { IIOMetadataNode chroma = new IIOMetadataNode("Chroma"); // Handle ColorSpaceType (RGB/CMYK/YCbCr etc)... Entry photometricTag = ifd.getEntryById(TIFF.TAG_PHOTOMETRIC_INTERPRETATION); int photometricValue = getValueAsInt(photometricTag); // No default for this tag! Entry samplesPerPixelTag = ifd.getEntryById(TIFF.TAG_SAMPLES_PER_PIXEL); Entry bitsPerSampleTag = ifd.getEntryById(TIFF.TAG_BITS_PER_SAMPLE); int numChannelsValue = samplesPerPixelTag != null ? getValueAsInt(samplesPerPixelTag) : bitsPerSampleTag.valueCount(); IIOMetadataNode colorSpaceType = new IIOMetadataNode("ColorSpaceType"); chroma.appendChild(colorSpaceType); switch (photometricValue) { case TIFFBaseline.PHOTOMETRIC_WHITE_IS_ZERO: case TIFFBaseline.PHOTOMETRIC_BLACK_IS_ZERO: case TIFFBaseline.PHOTOMETRIC_MASK: // It's really a transparency mask/alpha channel, but... colorSpaceType.setAttribute("value", "GRAY"); break; case TIFFBaseline.PHOTOMETRIC_RGB: case TIFFBaseline.PHOTOMETRIC_PALETTE: colorSpaceType.setAttribute("value", "RGB"); break; case TIFFExtension.PHOTOMETRIC_YCBCR: colorSpaceType.setAttribute("value", "YCbCr"); break; case TIFFExtension.PHOTOMETRIC_CIELAB: case TIFFExtension.PHOTOMETRIC_ICCLAB: case TIFFExtension.PHOTOMETRIC_ITULAB: colorSpaceType.setAttribute("value", "Lab"); break; case TIFFExtension.PHOTOMETRIC_SEPARATED: // TODO: May be CMYK, or something else... Consult InkSet and NumberOfInks! if (numChannelsValue == 3) { colorSpaceType.setAttribute("value", "CMY"); } else { colorSpaceType.setAttribute("value", "CMYK"); } break; case TIFFCustom.PHOTOMETRIC_LOGL: // ..? case TIFFCustom.PHOTOMETRIC_LOGLUV: colorSpaceType.setAttribute("value", "Luv"); break; case TIFFCustom.PHOTOMETRIC_CFA: case TIFFCustom.PHOTOMETRIC_LINEAR_RAW: // ...or is this RGB? colorSpaceType.setAttribute("value", "3CLR"); break; default: colorSpaceType.setAttribute("value", Integer.toHexString(numChannelsValue) + "CLR"); break; } // NumChannels IIOMetadataNode numChannels = new IIOMetadataNode("NumChannels"); chroma.appendChild(numChannels); if (photometricValue == TIFFBaseline.PHOTOMETRIC_PALETTE) { numChannels.setAttribute("value", "3"); } else { numChannels.setAttribute("value", Integer.toString(numChannelsValue)); } // BlackIsZero (defaults to TRUE) IIOMetadataNode blackIsZero = new IIOMetadataNode("BlackIsZero"); chroma.appendChild(blackIsZero); switch (photometricValue) { case TIFFBaseline.PHOTOMETRIC_WHITE_IS_ZERO: blackIsZero.setAttribute("value", "FALSE"); break; default: break; } Entry colorMapTag = ifd.getEntryById(TIFF.TAG_COLOR_MAP); if (colorMapTag != null) { int[] colorMapValues = (int[]) colorMapTag.getValue(); IIOMetadataNode palette = new IIOMetadataNode("Palette"); chroma.appendChild(palette); int count = colorMapValues.length / 3; for (int i = 0; i < count; i++) { IIOMetadataNode paletteEntry = new IIOMetadataNode("PaletteEntry"); paletteEntry.setAttribute("index", Integer.toString(i)); // TODO: See TIFFImageReader createIndexColorModel, to detect 8 bit colorMap paletteEntry.setAttribute("red", Integer.toString((colorMapValues[i] >> 8) & 0xff)); paletteEntry.setAttribute("green", Integer.toString((colorMapValues[i + count] >> 8) & 0xff)); paletteEntry.setAttribute("blue", Integer.toString((colorMapValues[i + count * 2] >> 8) & 0xff)); palette.appendChild(paletteEntry); } } return chroma; } #location 13 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testEOFExceptionInSegmentParsingShouldNotCreateBadState() throws IOException { ImageInputStream iis = new JPEGSegmentImageInputStream(ImageIO.createImageInputStream(getClassLoaderResource("/broken-jpeg/broken-no-sof-ascii-transfer-mode.jpg"))); byte[] buffer = new byte[4096]; // NOTE: This is a simulation of how the native parts of com.sun...JPEGImageReader would read the image... assertEquals(2, iis.read(buffer, 0, buffer.length)); assertEquals(2, iis.getStreamPosition()); iis.seek(0x2012); // bad segment length, should have been 0x0012, not 0x2012 assertEquals(0x2012, iis.getStreamPosition()); // So far, so good (but stream position is now really beyond EOF)... // This however, will blow up with an EOFException internally (but we'll return -1 to be good) assertEquals(-1, iis.read(buffer, 0, buffer.length)); assertEquals(-1, iis.read()); assertEquals(0x2012, iis.getStreamPosition()); // Again, should just continue returning -1 for ever assertEquals(-1, iis.read(buffer, 0, buffer.length)); assertEquals(-1, iis.read()); assertEquals(0x2012, iis.getStreamPosition()); }
#vulnerable code @Test public void testEOFExceptionInSegmentParsingShouldNotCreateBadState() throws IOException { ImageInputStream iis = new JPEGSegmentImageInputStream(ImageIO.createImageInputStream(getClassLoaderResource("/broken-jpeg/broken-no-sof-ascii-transfer-mode.jpg"))); byte[] buffer = new byte[4096]; // NOTE: This is a simulation of how the native parts of com.sun...JPEGImageReader would read the image... assertEquals(2, iis.read(buffer, 0, buffer.length)); assertEquals(2, iis.getStreamPosition()); iis.seek(0x2012); // bad segment length, should have been 0x0012, not 0x2012 assertEquals(0x2012, iis.getStreamPosition()); // So far, so good (but stream position is now really beyond EOF)... // This however, will blow up with an EOFException internally (but we'll return -1 to be good) assertEquals(-1, iis.read(buffer, 0, buffer.length)); assertEquals(0x2012, iis.getStreamPosition()); // Again, should just continue returning -1 for ever assertEquals(-1, iis.read(buffer, 0, buffer.length)); assertEquals(0x2012, iis.getStreamPosition()); } #location 12 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testReadThumbsCatalogFile() throws IOException { try (CompoundDocument document = createTestDocument()) { Entry root = document.getRootEntry(); assertNotNull(root); assertEquals(25, root.getChildEntries().size()); Entry catalog = root.getChildEntry("Catalog"); assertNotNull(catalog); assertNotNull("Input stream may not be null", catalog.getInputStream()); } }
#vulnerable code @Test public void testReadThumbsCatalogFile() throws IOException { CompoundDocument document = createTestDocument(); Entry root = document.getRootEntry(); assertNotNull(root); assertEquals(25, root.getChildEntries().size()); Entry catalog = root.getChildEntry("Catalog"); assertNotNull(catalog); assertNotNull("Input stream may not be null", catalog.getInputStream()); } #location 10 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testContents() throws IOException { try (CompoundDocument document = createTestDocument()) { Entry root = document.getRootEntry(); assertNotNull(root); SortedSet<Entry> children = new TreeSet<Entry>(root.getChildEntries()); assertEquals(25, children.size()); // Weirdness in the file format, name is *written backwards* 1-24 + Catalog for (String name : "1,2,3,4,5,6,7,8,9,01,02,11,12,21,22,31,32,41,42,51,61,71,81,91,Catalog".split(",")) { assertEquals(name, children.first().getName()); children.remove(children.first()); } } }
#vulnerable code @Test public void testContents() throws IOException { CompoundDocument document = createTestDocument(); Entry root = document.getRootEntry(); assertNotNull(root); SortedSet<Entry> children = new TreeSet<Entry>(root.getChildEntries()); assertEquals(25, children.size()); // Weirdness in the file format, name is *written backwards* 1-24 + Catalog for (String name : "1,2,3,4,5,6,7,8,9,01,02,11,12,21,22,31,32,41,42,51,61,71,81,91,Catalog".split(",")) { assertEquals(name, children.first().getName()); children.remove(children.first()); } } #location 5 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test(expected = UnsupportedOperationException.class) public void testChildEntriesUnmodifiable() throws IOException { try (CompoundDocument document = createTestDocument()) { Entry root = document.getRootEntry(); assertNotNull(root); SortedSet<Entry> children = root.getChildEntries(); // Should not be allowed, as it modifies the internal structure children.remove(children.first()); } }
#vulnerable code @Test(expected = UnsupportedOperationException.class) public void testChildEntriesUnmodifiable() throws IOException { CompoundDocument document = createTestDocument(); Entry root = document.getRootEntry(); assertNotNull(root); SortedSet<Entry> children = root.getChildEntries(); // Should not be allowed, as it modifies the internal structure children.remove(children.first()); } #location 9 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private DataOutput createCompressorStream(final RenderedImage image, final ImageWriteParam param, final Map<Integer, Entry> entries) { /* 36 MB test data: No compression: Write time: 450 ms output.length: 36000226 PackBits: Write time: 688 ms output.length: 30322187 Deflate, BEST_SPEED (1): Write time: 1276 ms output.length: 14128866 Deflate, 2: Write time: 1297 ms output.length: 13848735 Deflate, 3: Write time: 1594 ms output.length: 13103224 Deflate, 4: Write time: 1663 ms output.length: 13380899 (!!) 5 Write time: 1941 ms output.length: 13171244 6 Write time: 2311 ms output.length: 12845101 7: Write time: 2853 ms output.length: 12759426 8: Write time: 4429 ms output.length: 12624517 Deflate: DEFAULT_COMPRESSION (6?): Write time: 2357 ms output.length: 12845101 Deflate, BEST_COMPRESSION (9): Write time: 4998 ms output.length: 12600399 */ int samplesPerPixel = (Integer) entries.get(TIFF.TAG_SAMPLES_PER_PIXEL).getValue(); int bitPerSample = ((short[]) entries.get(TIFF.TAG_BITS_PER_SAMPLE).getValue())[0]; // Use predictor by default for LZW and ZLib/Deflate // TODO: Unless explicitly disabled in TIFFImageWriteParam int compression = (int) entries.get(TIFF.TAG_COMPRESSION).getValue(); OutputStream stream; switch (compression) { case TIFFBaseline.COMPRESSION_NONE: return imageOutput; case TIFFBaseline.COMPRESSION_PACKBITS: stream = IIOUtil.createStreamAdapter(imageOutput); stream = new EncoderStream(stream, new PackBitsEncoder(), true); // NOTE: PackBits + Predictor is possible, but not generally supported, disable it by default // (and probably not even allow it, see http://stackoverflow.com/questions/20337400/tiff-packbits-compression-with-predictor-step) return new DataOutputStream(stream); case TIFFExtension.COMPRESSION_ZLIB: case TIFFExtension.COMPRESSION_DEFLATE: // NOTE: This interpretation does the opposite of the JAI TIFFImageWriter, but seems more correct. // API Docs says: // A compression quality setting of 0.0 is most generically interpreted as "high compression is important," // while a setting of 1.0 is most generically interpreted as "high image quality is important." // However, the JAI TIFFImageWriter uses: // if (param & compression etc...) { // float quality = param.getCompressionQuality(); // deflateLevel = (int)(1 + 8*quality); // } else { // deflateLevel = Deflater.DEFAULT_COMPRESSION; // } // (in other words, 0.0 means 1 == BEST_SPEED, 1.0 means 9 == BEST_COMPRESSION) // PS: PNGImageWriter just uses hardcoded BEST_COMPRESSION... :-P int deflateSetting = Deflater.BEST_SPEED; // This is consistent with default compression quality being 1.0 and 0 meaning max compression... if (param.getCompressionMode() == ImageWriteParam.MODE_EXPLICIT) { deflateSetting = Deflater.BEST_COMPRESSION - Math.round((Deflater.BEST_COMPRESSION - 1) * param.getCompressionQuality()); } stream = IIOUtil.createStreamAdapter(imageOutput); stream = new DeflaterOutputStream(stream, new Deflater(deflateSetting), 1024); if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) { stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), samplesPerPixel, bitPerSample, imageOutput.getByteOrder()); } return new DataOutputStream(stream); case TIFFExtension.COMPRESSION_LZW: stream = IIOUtil.createStreamAdapter(imageOutput); stream = new EncoderStream(stream, new LZWEncoder((image.getTileWidth() * image.getTileHeight() * samplesPerPixel * bitPerSample + 7) / 8)); if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) { stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), samplesPerPixel, bitPerSample, imageOutput.getByteOrder()); } return new DataOutputStream(stream); case TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE: case TIFFExtension.COMPRESSION_CCITT_T4: case TIFFExtension.COMPRESSION_CCITT_T6: long option = 0L; if (compression != TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE) { option = (long) entries.get(compression == TIFFExtension.COMPRESSION_CCITT_T4 ? TIFF.TAG_GROUP3OPTIONS : TIFF.TAG_GROUP4OPTIONS).getValue(); } Entry fillOrderEntry = entries.get(TIFF.TAG_FILL_ORDER); int fillOrder = (int) (fillOrderEntry != null ? fillOrderEntry.getValue() : TIFFBaseline.FILL_LEFT_TO_RIGHT); stream = IIOUtil.createStreamAdapter(imageOutput); stream = new CCITTFaxEncoderStream(stream, image.getTileWidth(), image.getTileHeight(), compression, fillOrder, option); return new DataOutputStream(stream); } throw new IllegalArgumentException(String.format("Unsupported TIFF compression: %d", compression)); }
#vulnerable code private DataOutput createCompressorStream(final RenderedImage image, final ImageWriteParam param, final Map<Integer, Entry> entries) { /* 36 MB test data: No compression: Write time: 450 ms output.length: 36000226 PackBits: Write time: 688 ms output.length: 30322187 Deflate, BEST_SPEED (1): Write time: 1276 ms output.length: 14128866 Deflate, 2: Write time: 1297 ms output.length: 13848735 Deflate, 3: Write time: 1594 ms output.length: 13103224 Deflate, 4: Write time: 1663 ms output.length: 13380899 (!!) 5 Write time: 1941 ms output.length: 13171244 6 Write time: 2311 ms output.length: 12845101 7: Write time: 2853 ms output.length: 12759426 8: Write time: 4429 ms output.length: 12624517 Deflate: DEFAULT_COMPRESSION (6?): Write time: 2357 ms output.length: 12845101 Deflate, BEST_COMPRESSION (9): Write time: 4998 ms output.length: 12600399 */ // Use predictor by default for LZW and ZLib/Deflate // TODO: Unless explicitly disabled in TIFFImageWriteParam int compression = (int) entries.get(TIFF.TAG_COMPRESSION).getValue(); OutputStream stream; switch (compression) { case TIFFBaseline.COMPRESSION_NONE: return imageOutput; case TIFFBaseline.COMPRESSION_PACKBITS: stream = IIOUtil.createStreamAdapter(imageOutput); stream = new EncoderStream(stream, new PackBitsEncoder(), true); // NOTE: PackBits + Predictor is possible, but not generally supported, disable it by default // (and probably not even allow it, see http://stackoverflow.com/questions/20337400/tiff-packbits-compression-with-predictor-step) return new DataOutputStream(stream); case TIFFExtension.COMPRESSION_ZLIB: case TIFFExtension.COMPRESSION_DEFLATE: // NOTE: This interpretation does the opposite of the JAI TIFFImageWriter, but seems more correct. // API Docs says: // A compression quality setting of 0.0 is most generically interpreted as "high compression is important," // while a setting of 1.0 is most generically interpreted as "high image quality is important." // However, the JAI TIFFImageWriter uses: // if (param & compression etc...) { // float quality = param.getCompressionQuality(); // deflateLevel = (int)(1 + 8*quality); // } else { // deflateLevel = Deflater.DEFAULT_COMPRESSION; // } // (in other words, 0.0 means 1 == BEST_SPEED, 1.0 means 9 == BEST_COMPRESSION) // PS: PNGImageWriter just uses hardcoded BEST_COMPRESSION... :-P int deflateSetting = Deflater.BEST_SPEED; // This is consistent with default compression quality being 1.0 and 0 meaning max compression... if (param.getCompressionMode() == ImageWriteParam.MODE_EXPLICIT) { deflateSetting = Deflater.BEST_COMPRESSION - Math.round((Deflater.BEST_COMPRESSION - 1) * param.getCompressionQuality()); } stream = IIOUtil.createStreamAdapter(imageOutput); stream = new DeflaterOutputStream(stream, new Deflater(deflateSetting), 1024); if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) { stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), image.getTile(0, 0).getNumBands(), image.getColorModel().getComponentSize(0), imageOutput.getByteOrder()); } return new DataOutputStream(stream); case TIFFExtension.COMPRESSION_LZW: stream = IIOUtil.createStreamAdapter(imageOutput); stream = new EncoderStream(stream, new LZWEncoder((image.getTileWidth() * image.getTileHeight() * image.getColorModel().getPixelSize() + 7) / 8)); if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) { stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), image.getTile(0, 0).getNumBands(), image.getColorModel().getComponentSize(0), imageOutput.getByteOrder()); } return new DataOutputStream(stream); case TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE: case TIFFExtension.COMPRESSION_CCITT_T4: case TIFFExtension.COMPRESSION_CCITT_T6: long option = 0L; if (compression != TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE) { option = (long) entries.get(compression == TIFFExtension.COMPRESSION_CCITT_T4 ? TIFF.TAG_GROUP3OPTIONS : TIFF.TAG_GROUP4OPTIONS).getValue(); } Entry fillOrderEntry = entries.get(TIFF.TAG_FILL_ORDER); int fillOrder = (int) (fillOrderEntry != null ? fillOrderEntry.getValue() : TIFFBaseline.FILL_LEFT_TO_RIGHT); stream = IIOUtil.createStreamAdapter(imageOutput); stream = new CCITTFaxEncoderStream(stream, image.getTileWidth(), image.getTileHeight(), compression, fillOrder, option); return new DataOutputStream(stream); } throw new IllegalArgumentException(String.format("Unsupported TIFF compression: %d", compression)); } #location 91 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testContents() throws IOException { try (CompoundDocument document = createTestDocument()) { Entry root = document.getRootEntry(); assertNotNull(root); SortedSet<Entry> children = new TreeSet<Entry>(root.getChildEntries()); assertEquals(25, children.size()); // Weirdness in the file format, name is *written backwards* 1-24 + Catalog for (String name : "1,2,3,4,5,6,7,8,9,01,02,11,12,21,22,31,32,41,42,51,61,71,81,91,Catalog".split(",")) { assertEquals(name, children.first().getName()); children.remove(children.first()); } } }
#vulnerable code @Test public void testContents() throws IOException { CompoundDocument document = createTestDocument(); Entry root = document.getRootEntry(); assertNotNull(root); SortedSet<Entry> children = new TreeSet<Entry>(root.getChildEntries()); assertEquals(25, children.size()); // Weirdness in the file format, name is *written backwards* 1-24 + Catalog for (String name : "1,2,3,4,5,6,7,8,9,01,02,11,12,21,22,31,32,41,42,51,61,71,81,91,Catalog".split(",")) { assertEquals(name, children.first().getName()); children.remove(children.first()); } } #location 9 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static ColorSpace getColorSpace(int colorSpace) { ICC_Profile profile; switch (colorSpace) { case CS_ADOBE_RGB_1998: synchronized (ColorSpaces.class) { profile = adobeRGB1998.get(); if (profile == null) { // Try to get system default or user-defined profile profile = readProfileFromPath(Profiles.getPath("ADOBE_RGB_1998")); if (profile == null) { // Fall back to the bundled ClayRGB1998 public domain Adobe RGB 1998 compatible profile, // which is identical for all practical purposes profile = readProfileFromClasspathResource("/profiles/ClayRGB1998.icc"); if (profile == null) { // Should never happen given we now bundle fallback profile... throw new IllegalStateException("Could not read AdobeRGB1998 profile"); } } if (profile.getColorSpaceType() != ColorSpace.TYPE_RGB) { throw new IllegalStateException("Configured AdobeRGB1998 profile is not TYPE_RGB"); } adobeRGB1998 = new WeakReference<>(profile); } } return createColorSpace(profile); case CS_GENERIC_CMYK: synchronized (ColorSpaces.class) { profile = genericCMYK.get(); if (profile == null) { // Try to get system default or user-defined profile profile = readProfileFromPath(Profiles.getPath("GENERIC_CMYK")); if (profile == null) { if (DEBUG) { System.out.println("Using fallback profile"); } // Fall back to generic CMYK ColorSpace, which is *insanely slow* using ColorConvertOp... :-P return CMYKColorSpace.getInstance(); } if (profile.getColorSpaceType() != ColorSpace.TYPE_CMYK) { throw new IllegalStateException("Configured Generic CMYK profile is not TYPE_CMYK"); } genericCMYK = new WeakReference<>(profile); } } return createColorSpace(profile); default: // Default cases for convenience return ColorSpace.getInstance(colorSpace); } }
#vulnerable code public static ColorSpace getColorSpace(int colorSpace) { ICC_Profile profile; switch (colorSpace) { case CS_ADOBE_RGB_1998: synchronized (ColorSpaces.class) { profile = adobeRGB1998.get(); if (profile == null) { // Try to get system default or user-defined profile profile = readProfileFromPath(Profiles.getPath("ADOBE_RGB_1998")); if (profile == null) { // Fall back to the bundled ClayRGB1998 public domain Adobe RGB 1998 compatible profile, // which is identical for all practical purposes profile = readProfileFromClasspathResource("/profiles/ClayRGB1998.icc"); if (profile == null) { // Should never happen given we now bundle fallback profile... throw new IllegalStateException("Could not read AdobeRGB1998 profile"); } } adobeRGB1998 = new WeakReference<>(profile); } } return createColorSpace(profile); case CS_GENERIC_CMYK: synchronized (ColorSpaces.class) { profile = genericCMYK.get(); if (profile == null) { // Try to get system default or user-defined profile profile = readProfileFromPath(Profiles.getPath("GENERIC_CMYK")); if (profile == null) { if (DEBUG) { System.out.println("Using fallback profile"); } // Fall back to generic CMYK ColorSpace, which is *insanely slow* using ColorConvertOp... :-P return CMYKColorSpace.getInstance(); } genericCMYK = new WeakReference<>(profile); } } return createColorSpace(profile); default: // Default cases for convenience return ColorSpace.getInstance(colorSpace); } } #location 28 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private DataOutput createCompressorStream(final RenderedImage image, final ImageWriteParam param, final Map<Integer, Entry> entries) { /* 36 MB test data: No compression: Write time: 450 ms output.length: 36000226 PackBits: Write time: 688 ms output.length: 30322187 Deflate, BEST_SPEED (1): Write time: 1276 ms output.length: 14128866 Deflate, 2: Write time: 1297 ms output.length: 13848735 Deflate, 3: Write time: 1594 ms output.length: 13103224 Deflate, 4: Write time: 1663 ms output.length: 13380899 (!!) 5 Write time: 1941 ms output.length: 13171244 6 Write time: 2311 ms output.length: 12845101 7: Write time: 2853 ms output.length: 12759426 8: Write time: 4429 ms output.length: 12624517 Deflate: DEFAULT_COMPRESSION (6?): Write time: 2357 ms output.length: 12845101 Deflate, BEST_COMPRESSION (9): Write time: 4998 ms output.length: 12600399 */ int samplesPerPixel = (Integer) entries.get(TIFF.TAG_SAMPLES_PER_PIXEL).getValue(); int bitPerSample = ((short[]) entries.get(TIFF.TAG_BITS_PER_SAMPLE).getValue())[0]; // Use predictor by default for LZW and ZLib/Deflate // TODO: Unless explicitly disabled in TIFFImageWriteParam int compression = (int) entries.get(TIFF.TAG_COMPRESSION).getValue(); OutputStream stream; switch (compression) { case TIFFBaseline.COMPRESSION_NONE: return imageOutput; case TIFFBaseline.COMPRESSION_PACKBITS: stream = IIOUtil.createStreamAdapter(imageOutput); stream = new EncoderStream(stream, new PackBitsEncoder(), true); // NOTE: PackBits + Predictor is possible, but not generally supported, disable it by default // (and probably not even allow it, see http://stackoverflow.com/questions/20337400/tiff-packbits-compression-with-predictor-step) return new DataOutputStream(stream); case TIFFExtension.COMPRESSION_ZLIB: case TIFFExtension.COMPRESSION_DEFLATE: // NOTE: This interpretation does the opposite of the JAI TIFFImageWriter, but seems more correct. // API Docs says: // A compression quality setting of 0.0 is most generically interpreted as "high compression is important," // while a setting of 1.0 is most generically interpreted as "high image quality is important." // However, the JAI TIFFImageWriter uses: // if (param & compression etc...) { // float quality = param.getCompressionQuality(); // deflateLevel = (int)(1 + 8*quality); // } else { // deflateLevel = Deflater.DEFAULT_COMPRESSION; // } // (in other words, 0.0 means 1 == BEST_SPEED, 1.0 means 9 == BEST_COMPRESSION) // PS: PNGImageWriter just uses hardcoded BEST_COMPRESSION... :-P int deflateSetting = Deflater.BEST_SPEED; // This is consistent with default compression quality being 1.0 and 0 meaning max compression... if (param.getCompressionMode() == ImageWriteParam.MODE_EXPLICIT) { deflateSetting = Deflater.BEST_COMPRESSION - Math.round((Deflater.BEST_COMPRESSION - 1) * param.getCompressionQuality()); } stream = IIOUtil.createStreamAdapter(imageOutput); stream = new DeflaterOutputStream(stream, new Deflater(deflateSetting), 1024); if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) { stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), samplesPerPixel, bitPerSample, imageOutput.getByteOrder()); } return new DataOutputStream(stream); case TIFFExtension.COMPRESSION_LZW: stream = IIOUtil.createStreamAdapter(imageOutput); stream = new EncoderStream(stream, new LZWEncoder((image.getTileWidth() * image.getTileHeight() * samplesPerPixel * bitPerSample + 7) / 8)); if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) { stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), samplesPerPixel, bitPerSample, imageOutput.getByteOrder()); } return new DataOutputStream(stream); case TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE: case TIFFExtension.COMPRESSION_CCITT_T4: case TIFFExtension.COMPRESSION_CCITT_T6: long option = 0L; if (compression != TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE) { option = (long) entries.get(compression == TIFFExtension.COMPRESSION_CCITT_T4 ? TIFF.TAG_GROUP3OPTIONS : TIFF.TAG_GROUP4OPTIONS).getValue(); } Entry fillOrderEntry = entries.get(TIFF.TAG_FILL_ORDER); int fillOrder = (int) (fillOrderEntry != null ? fillOrderEntry.getValue() : TIFFBaseline.FILL_LEFT_TO_RIGHT); stream = IIOUtil.createStreamAdapter(imageOutput); stream = new CCITTFaxEncoderStream(stream, image.getTileWidth(), image.getTileHeight(), compression, fillOrder, option); return new DataOutputStream(stream); } throw new IllegalArgumentException(String.format("Unsupported TIFF compression: %d", compression)); }
#vulnerable code private DataOutput createCompressorStream(final RenderedImage image, final ImageWriteParam param, final Map<Integer, Entry> entries) { /* 36 MB test data: No compression: Write time: 450 ms output.length: 36000226 PackBits: Write time: 688 ms output.length: 30322187 Deflate, BEST_SPEED (1): Write time: 1276 ms output.length: 14128866 Deflate, 2: Write time: 1297 ms output.length: 13848735 Deflate, 3: Write time: 1594 ms output.length: 13103224 Deflate, 4: Write time: 1663 ms output.length: 13380899 (!!) 5 Write time: 1941 ms output.length: 13171244 6 Write time: 2311 ms output.length: 12845101 7: Write time: 2853 ms output.length: 12759426 8: Write time: 4429 ms output.length: 12624517 Deflate: DEFAULT_COMPRESSION (6?): Write time: 2357 ms output.length: 12845101 Deflate, BEST_COMPRESSION (9): Write time: 4998 ms output.length: 12600399 */ // Use predictor by default for LZW and ZLib/Deflate // TODO: Unless explicitly disabled in TIFFImageWriteParam int compression = (int) entries.get(TIFF.TAG_COMPRESSION).getValue(); OutputStream stream; switch (compression) { case TIFFBaseline.COMPRESSION_NONE: return imageOutput; case TIFFBaseline.COMPRESSION_PACKBITS: stream = IIOUtil.createStreamAdapter(imageOutput); stream = new EncoderStream(stream, new PackBitsEncoder(), true); // NOTE: PackBits + Predictor is possible, but not generally supported, disable it by default // (and probably not even allow it, see http://stackoverflow.com/questions/20337400/tiff-packbits-compression-with-predictor-step) return new DataOutputStream(stream); case TIFFExtension.COMPRESSION_ZLIB: case TIFFExtension.COMPRESSION_DEFLATE: // NOTE: This interpretation does the opposite of the JAI TIFFImageWriter, but seems more correct. // API Docs says: // A compression quality setting of 0.0 is most generically interpreted as "high compression is important," // while a setting of 1.0 is most generically interpreted as "high image quality is important." // However, the JAI TIFFImageWriter uses: // if (param & compression etc...) { // float quality = param.getCompressionQuality(); // deflateLevel = (int)(1 + 8*quality); // } else { // deflateLevel = Deflater.DEFAULT_COMPRESSION; // } // (in other words, 0.0 means 1 == BEST_SPEED, 1.0 means 9 == BEST_COMPRESSION) // PS: PNGImageWriter just uses hardcoded BEST_COMPRESSION... :-P int deflateSetting = Deflater.BEST_SPEED; // This is consistent with default compression quality being 1.0 and 0 meaning max compression... if (param.getCompressionMode() == ImageWriteParam.MODE_EXPLICIT) { deflateSetting = Deflater.BEST_COMPRESSION - Math.round((Deflater.BEST_COMPRESSION - 1) * param.getCompressionQuality()); } stream = IIOUtil.createStreamAdapter(imageOutput); stream = new DeflaterOutputStream(stream, new Deflater(deflateSetting), 1024); if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) { stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), image.getTile(0, 0).getNumBands(), image.getColorModel().getComponentSize(0), imageOutput.getByteOrder()); } return new DataOutputStream(stream); case TIFFExtension.COMPRESSION_LZW: stream = IIOUtil.createStreamAdapter(imageOutput); stream = new EncoderStream(stream, new LZWEncoder((image.getTileWidth() * image.getTileHeight() * image.getColorModel().getPixelSize() + 7) / 8)); if (entries.containsKey(TIFF.TAG_PREDICTOR) && entries.get(TIFF.TAG_PREDICTOR).getValue().equals(TIFFExtension.PREDICTOR_HORIZONTAL_DIFFERENCING)) { stream = new HorizontalDifferencingStream(stream, image.getTileWidth(), image.getTile(0, 0).getNumBands(), image.getColorModel().getComponentSize(0), imageOutput.getByteOrder()); } return new DataOutputStream(stream); case TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE: case TIFFExtension.COMPRESSION_CCITT_T4: case TIFFExtension.COMPRESSION_CCITT_T6: long option = 0L; if (compression != TIFFBaseline.COMPRESSION_CCITT_MODIFIED_HUFFMAN_RLE) { option = (long) entries.get(compression == TIFFExtension.COMPRESSION_CCITT_T4 ? TIFF.TAG_GROUP3OPTIONS : TIFF.TAG_GROUP4OPTIONS).getValue(); } Entry fillOrderEntry = entries.get(TIFF.TAG_FILL_ORDER); int fillOrder = (int) (fillOrderEntry != null ? fillOrderEntry.getValue() : TIFFBaseline.FILL_LEFT_TO_RIGHT); stream = IIOUtil.createStreamAdapter(imageOutput); stream = new CCITTFaxEncoderStream(stream, image.getTileWidth(), image.getTileHeight(), compression, fillOrder, option); return new DataOutputStream(stream); } throw new IllegalArgumentException(String.format("Unsupported TIFF compression: %d", compression)); } #location 91 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private CompoundDirectory getExif() throws IOException { List<Application> exifSegments = getAppSegments(JPEG.APP1, "Exif"); if (!exifSegments.isEmpty()) { Application exif = exifSegments.get(0); int offset = exif.identifier.length() + 2; // Incl. pad if (exif.data.length <= offset) { processWarningOccurred("Exif chunk has no data."); } else { // TODO: Consider returning ByteArrayImageInputStream from Segment.data() try (ImageInputStream stream = new ByteArrayImageInputStream(exif.data, offset, exif.data.length - offset)) { return (CompoundDirectory) new TIFFReader().read(stream); } } } return null; }
#vulnerable code private CompoundDirectory getExif() throws IOException { List<Application> exifSegments = getAppSegments(JPEG.APP1, "Exif"); if (!exifSegments.isEmpty()) { Application exif = exifSegments.get(0); InputStream data = exif.data(); if (data.read() == -1) { // Read pad processWarningOccurred("Exif chunk has no data."); } else { ImageInputStream stream = new MemoryCacheImageInputStream(data); return (CompoundDirectory) new TIFFReader().read(stream); // TODO: Directory offset of thumbnail is wrong/relative to container stream, causing trouble for the TIFFReader... } } return null; } #location 13 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test(expected = UnsupportedOperationException.class) public void testChildEntriesUnmodifiable() throws IOException { try (CompoundDocument document = createTestDocument()) { Entry root = document.getRootEntry(); assertNotNull(root); SortedSet<Entry> children = root.getChildEntries(); // Should not be allowed, as it modifies the internal structure children.remove(children.first()); } }
#vulnerable code @Test(expected = UnsupportedOperationException.class) public void testChildEntriesUnmodifiable() throws IOException { CompoundDocument document = createTestDocument(); Entry root = document.getRootEntry(); assertNotNull(root); SortedSet<Entry> children = root.getChildEntries(); // Should not be allowed, as it modifies the internal structure children.remove(children.first()); } #location 5 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static ICC_ColorSpace createColorSpace(final ICC_Profile profile) { Validate.notNull(profile, "profile"); // Fix profile before lookup/create profileCleaner.fixProfile(profile); byte[] profileHeader = getProfileHeaderWithProfileId(profile); ICC_ColorSpace cs = getInternalCS(profile.getColorSpaceType(), profileHeader); if (cs != null) { return cs; } return getCachedOrCreateCS(profile, profileHeader); }
#vulnerable code public static ICC_ColorSpace createColorSpace(final ICC_Profile profile) { Validate.notNull(profile, "profile"); byte[] profileHeader = profile.getData(ICC_Profile.icSigHead); ICC_ColorSpace cs = getInternalCS(profile.getColorSpaceType(), profileHeader); if (cs != null) { return cs; } // Special case for color profiles with rendering intent != 0, see isOffendingColorProfile method // NOTE: Rendering intent is really a 4 byte value, but legal values are 0-3 (ICC1v42_2006_05_1.pdf, 7.2.15, p. 19) if (profileHeader[ICC_Profile.icHdrRenderingIntent] != 0) { profileHeader[ICC_Profile.icHdrRenderingIntent] = 0; // Test again if this is an internal CS cs = getInternalCS(profile.getColorSpaceType(), profileHeader); if (cs != null) { return cs; } // Fix profile before lookup/create profileCleaner.fixProfile(profile, profileHeader); } else { profileCleaner.fixProfile(profile, null); } return getCachedOrCreateCS(profile, profileHeader); } #location 14 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testReadThumbsCatalogFile() throws IOException { try (CompoundDocument document = createTestDocument()) { Entry root = document.getRootEntry(); assertNotNull(root); assertEquals(25, root.getChildEntries().size()); Entry catalog = root.getChildEntry("Catalog"); assertNotNull(catalog); assertNotNull("Input stream may not be null", catalog.getInputStream()); } }
#vulnerable code @Test public void testReadThumbsCatalogFile() throws IOException { CompoundDocument document = createTestDocument(); Entry root = document.getRootEntry(); assertNotNull(root); assertEquals(25, root.getChildEntries().size()); Entry catalog = root.getChildEntry("Catalog"); assertNotNull(catalog); assertNotNull("Input stream may not be null", catalog.getInputStream()); } #location 14 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public int getWidth() throws IOException { if (compression == 1) { // 1 = no compression Entry width = ifd.getEntryById(TIFF.TAG_IMAGE_WIDTH); if (width == null) { throw new IIOException("Missing dimensions for unknown EXIF thumbnail"); } return ((Number) width.getValue()).intValue(); } else if (compression == 6) { // 6 = JPEG compression return readJPEGCached(false).getWidth(); } else { throw new IIOException("Unsupported EXIF thumbnail compression (expected 1 or 6): " + compression); } }
#vulnerable code @Override public int getWidth() throws IOException { if (compression == 1) { // 1 = no compression Entry width = ifd.getEntryById(TIFF.TAG_IMAGE_WIDTH); if (width == null) { throw new IIOException("Missing dimensions for RAW EXIF thumbnail"); } return ((Number) width.getValue()).intValue(); } else if (compression == 6) { // 6 = JPEG compression return readJPEGCached(false).getWidth(); } else { throw new IIOException("Unsupported EXIF thumbnail compression: " + compression); } } #location 13 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @DELETE @Path("/reports/{name}") public void deleteReport(@PathParam("name") String name) { try { ItemCollection itemCol = reportService.findReport(name); entityService.remove(itemCol); } catch (Exception e) { e.printStackTrace(); } }
#vulnerable code @DELETE @Path("/reports/{name}") public void deleteReport(@PathParam("name") String name) { try { ItemCollection itemCol = reportService.getReport(name); entityService.remove(itemCol); } catch (Exception e) { e.printStackTrace(); } } #location 6 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void printVersionTable(OutputStream out) { try { StringBuffer buffer = new StringBuffer(); List<String> modelVersionList = modelService.getAllModelVersions(); buffer.append("<table>"); buffer.append("<tr><th>Version</th><th>Workflow Group</th><th>Uploaded</th></tr>"); for (String modelVersion : modelVersionList) { Model model=modelService.getModel(modelVersion); ItemCollection modelEntity=modelService.loadModelEntity(modelVersion); // now check groups... List<String> groupList = model.getGroups(); for (String group : groupList) { buffer.append("<tr>"); buffer.append("<td>" + modelVersion + "</td>"); buffer.append("<td><a href=\"./model/" + modelVersion + "/groups/" + group + "\">" + group + "</a></td>"); // print upload date... if (modelEntity!=null) { Date dat = modelEntity.getItemValueDate("$Modified"); SimpleDateFormat formater = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss"); buffer.append("<td>" + formater.format(dat) + "</td>"); } else { buffer.append("<td> - </td>"); } buffer.append("</tr>"); } } buffer.append("</table>"); out.write(buffer.toString().getBytes()); } catch (Exception e) { // no opp! try { out.write("No model definition found.".getBytes()); } catch (IOException e1) { e1.printStackTrace(); } } }
#vulnerable code private void printVersionTable(OutputStream out) { try { StringBuffer buffer = new StringBuffer(); List<String> col = modelService.getAllModelVersions(); buffer.append("<table>"); buffer.append("<tr><th>Version</th><th>Workflow Group</th><th>Updated</th></tr>"); for (String aversion : col) { // now check groups... List<String> groupList = modelService.getAllWorkflowGroups(aversion); for (String group : groupList) { buffer.append("<tr>"); buffer.append("<td>" + aversion + "</td>"); buffer.append("<td><a href=\"./model/" + aversion + "/groups/" + group + "\">" + group + "</a></td>"); // get update date... List<ItemCollection> processList = null; logger.severe("NOT IMPLEMENTED"); //modelService.getAllModelVersions() // .getAllProcessEntitiesByGroup(group, // aversion); if (processList.size() > 0) { ItemCollection process = processList.get(0); Date dat = process.getItemValueDate("$Modified"); SimpleDateFormat formater = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss"); buffer.append("<td>" + formater.format(dat) + "</td>"); } buffer.append("</tr>"); } } buffer.append("</table>"); out.write(buffer.toString().getBytes()); } catch (Exception e) { // no opp! try { out.write("No model definition found.".getBytes()); } catch (IOException e1) { e1.printStackTrace(); } } } #location 25 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testAddWorkdaysFromSaturday() { Calendar startDate = Calendar.getInstance(); // adjust to SATURDAY startDate.set(Calendar.DAY_OF_WEEK, Calendar.SATURDAY); System.out.println("Startdate=" + startDate.getTime()); // adjust -1 Workdays -> TUESDAY Assert.assertEquals(Calendar.TUESDAY, workflowSchedulerService.addWorkDays(startDate, 1).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.FRIDAY, workflowSchedulerService.addWorkDays(startDate, 4).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.MONDAY, workflowSchedulerService.addWorkDays(startDate, 5).get(Calendar.DAY_OF_WEEK)); }
#vulnerable code @Test public void testAddWorkdaysFromSaturday() { Calendar startDate = Calendar.getInstance(); // adjust to SATURDAY startDate.set(Calendar.DAY_OF_WEEK, Calendar.SATURDAY); System.out.println("Startdate=" + startDate.getTime()); // adjust -1 Workdays -> TUESDAY Assert.assertEquals(Calendar.TUESDAY, WorkflowSchedulerService.addWorkDays(startDate, 1).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.FRIDAY, WorkflowSchedulerService.addWorkDays(startDate, 4).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.MONDAY, WorkflowSchedulerService.addWorkDays(startDate, 5).get(Calendar.DAY_OF_WEEK)); } #location 14 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testComplexWorkitem() throws ParseException { InputStream inputStream = getClass() .getResourceAsStream("/json/workitem.json"); ItemCollection itemCol = null; try { itemCol = JSONParser.parseWorkitem(inputStream,"UTF-8"); } catch (UnsupportedEncodingException e) { e.printStackTrace(); Assert.fail(); } Assert.assertNotNull(itemCol); Assert.assertEquals("worklist", itemCol.getItemValueString("txtworkflowresultmessage")); Assert.assertEquals("14194929161-1003e42a", itemCol.getItemValueString("$UniqueID")); List<?> list=itemCol.getItemValue("txtworkflowpluginlog"); Assert.assertEquals(7, list.size()); }
#vulnerable code @Test public void testComplexWorkitem() throws ParseException { InputStream inputStream = getClass() .getResourceAsStream("/json/workitem.json"); ItemCollection itemCol = JSONParser.parseWorkitem(inputStream); Assert.assertNotNull(itemCol); Assert.assertEquals("worklist", itemCol.getItemValueString("txtworkflowresultmessage")); Assert.assertEquals("14194929161-1003e42a", itemCol.getItemValueString("$UniqueID")); List<?> list=itemCol.getItemValue("txtworkflowpluginlog"); Assert.assertEquals(7, list.size()); } #location 11 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testComplexPluginException() throws ScriptException { ItemCollection adocumentContext = new ItemCollection(); ItemCollection adocumentActivity = new ItemCollection(); // 1) invalid returning one messsage String script = "var a=1;var b=2;var isValid = (a>b);" + " var errorCode='MY_ERROR';" + " var errorMessage='Somehing go wrong!';"; System.out.println("Script=" + script); adocumentActivity.replaceItemValue("txtBusinessRUle", script); try { rulePlugin.run(adocumentContext, adocumentActivity); Assert.fail(); } catch (PluginException e) { // test excption Assert.assertEquals("MY_ERROR", e.getErrorCode()); Object[] params = e.getErrorParameters(); Assert.assertEquals(1, params.length); Assert.assertEquals("Somehing go wrong!", params[0].toString()); } // 2) invalid returning 2 messages in an array script = "var a=1;var b=2;var isValid = (a>b);" + " var errorMessage = new Array();" + " errorMessage[0]='Somehing go wrong!';" + " errorMessage[1]='Somehingelse go wrong!';"; System.out.println("Script=" + script); adocumentActivity.replaceItemValue("txtBusinessRUle", script); try { rulePlugin.run(adocumentContext, adocumentActivity); Assert.fail(); } catch (PluginException e) { // e.printStackTrace(); // test exception Assert.assertEquals(RulePlugin.VALIDATION_ERROR, e.getErrorCode()); Object[] params = e.getErrorParameters(); Assert.assertEquals(2, params.length); Assert.assertEquals("Somehing go wrong!", params[0].toString()); Assert.assertEquals("Somehingelse go wrong!", params[1].toString()); } }
#vulnerable code @Test public void testComplexPluginException() throws ScriptException { ItemCollection adocumentContext = new ItemCollection(); ItemCollection adocumentActivity = new ItemCollection(); // 1) invalid returning one messsage String script = "var a=1;var b=2;var isValid = (a>b);" + " var errorCode='MY_ERROR';" + " var errorMessage='Somehing go wrong!';"; System.out.println("Script=" + script); adocumentActivity.replaceItemValue("txtBusinessRUle", script); try { rulePlugin.run(adocumentContext, adocumentActivity); Assert.fail(); } catch (PluginException e) { // test excption Assert.assertEquals("MY_ERROR", e.getErrorCode()); Object[] params = e.getErrorParameters(); Assert.assertEquals(1, params.length); Assert.assertEquals("Somehing go wrong!", params[0].toString()); } // 2) invalid returning 2 messages in an array script = "var a=1;var b=2;var isValid = (a>b);" + " var errorMessage = new Array();" + " errorMessage[0]='Somehing go wrong!';" + " errorMessage[1]='Somehingelse go wrong!';"; System.out.println("Script=" + script); adocumentActivity.replaceItemValue("txtBusinessRUle", script); try { rulePlugin.run(adocumentContext, adocumentActivity); Assert.fail(); } catch (PluginException e) { //e.printStackTrace(); // test exception Assert.assertEquals(RulePlugin.VALIDATION_ERROR, e.getErrorCode()); Object[] params = e.getErrorParameters(); Assert.assertEquals(2, params.length); Assert.assertEquals("Somehing go wrong!", params[0].toString()); Assert.assertEquals("Somehingelse go wrong!", params[1].toString()); } } #location 41 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testParseResult() { List<ItemCollection> result=null; String testString = "{\n" + " \"responseHeader\":{\n" + " \"status\":0,\n" + " \"QTime\":4,\n" + " \"params\":{\n" + " \"q\":\"*:*\",\n" + " \"_\":\"1567286252995\"}},\n" + " \"response\":{\"numFound\":2,\"start\":0,\"docs\":[\n" + " {\n" + " \"type\":[\"model\"],\n" + " \"id\":\"3a182d18-33d9-4951-8970-d9eaf9d337ff\",\n" + " \"_modified\":[20190831211617],\n" + " \"_created\":[20190831211617],\n" + " \"_version_\":1643418672068296704},\n" + " {\n" + " \"type\":[\"adminp\"],\n" + " \"id\":\"60825929-4d7d-4346-9333-afd7dbfca457\",\n" + " \"_modified\":[20190831211618],\n" + " \"_created\":[20190831211618],\n" + " \"_version_\":1643418672172105728}]\n" + " }}"; result=solrSearchService.parseQueryResult(testString); Assert.assertEquals(2,result.size()); ItemCollection document=null; document=result.get(0); Assert.assertEquals("model", document.getItemValueString("type")); Assert.assertEquals("3a182d18-33d9-4951-8970-d9eaf9d337ff", document.getItemValueString("id")); Calendar cal=Calendar.getInstance(); cal.setTime(document.getItemValueDate("_modified")); Assert.assertEquals(7,cal.get(Calendar.MONTH)); Assert.assertEquals(31,cal.get(Calendar.DAY_OF_MONTH)); document=result.get(1); Assert.assertEquals("adminp", document.getItemValueString("type")); }
#vulnerable code @Test public void testParseResult() { List<ItemCollection> result=null; String testString = "{\n" + " \"responseHeader\":{\n" + " \"status\":0,\n" + " \"QTime\":4,\n" + " \"params\":{\n" + " \"q\":\"*:*\",\n" + " \"_\":\"1567286252995\"}},\n" + " \"response\":{\"numFound\":2,\"start\":0,\"docs\":[\n" + " {\n" + " \"type\":[\"model\"],\n" + " \"id\":\"3a182d18-33d9-4951-8970-d9eaf9d337ff\",\n" + " \"_modified\":[20190831211617],\n" + " \"_created\":[20190831211617],\n" + " \"_version_\":1643418672068296704},\n" + " {\n" + " \"type\":[\"adminp\"],\n" + " \"id\":\"60825929-4d7d-4346-9333-afd7dbfca457\",\n" + " \"_modified\":[20190831211618],\n" + " \"_created\":[20190831211618],\n" + " \"_version_\":1643418672172105728}]\n" + " }}"; result=solrSearchService.parseQueryResult(testString); Assert.assertEquals(2,result.size()); ItemCollection document=null; document=result.get(0); Assert.assertEquals("model", document.getItemValueString("type")); Assert.assertEquals("3a182d18-33d9-4951-8970-d9eaf9d337ff", document.getItemValueString("id")); Assert.assertEquals(1567278977000l, document.getItemValueDate("_modified").getTime()); Assert.assertEquals(1567278977000l, document.getItemValueDate("_created").getTime()); document=result.get(1); Assert.assertEquals("adminp", document.getItemValueString("type")); Assert.assertEquals("60825929-4d7d-4346-9333-afd7dbfca457",document.getItemValueString("id")); Assert.assertEquals(1567278978000l, document.getItemValueDate("_created").getTime()); } #location 37 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code Timer createTimerOnInterval(ItemCollection configItemCollection) { // Create an interval timer Date startDate = configItemCollection.getItemValueDate("datstart"); Date endDate = configItemCollection.getItemValueDate("datstop"); long interval = configItemCollection.getItemValueInteger("numInterval"); // if endDate is in the past we do not start the timer! Calendar calNow = Calendar.getInstance(); Calendar calEnd = Calendar.getInstance(); if (endDate != null) calEnd.setTime(endDate); if (calNow.after(calEnd)) { logger.warning("[WorkflowSchedulerService] " + configItemCollection.getItemValueString("txtName") + " stop-date is in the past"); endDate = startDate; } Timer timer = timerService.createTimer(startDate, interval, configItemCollection); return timer; }
#vulnerable code void processWorkList(ItemCollection activityEntity) throws Exception { // get processID int iProcessID = activityEntity.getItemValueInteger("numprocessid"); // get Modelversion String sModelVersion = activityEntity .getItemValueString("$modelversion"); // if a query is defined in the activityEntity then use the EQL // statement // to query the items. Otherwise use standard method // getWorklistByProcessID() String sQuery = activityEntity.getItemValueString("txtscheduledview"); // get all workitems... Collection<ItemCollection> worklist = null; if (sQuery != null && !"".equals(sQuery)) { logger.fine("[WorkflowSchedulerService] Query=" + sQuery); worklist = entityService.findAllEntities(sQuery, 0, -1); } else { logger.fine("[WorkflowSchedulerService] get WorkList for ProcessID:" + iProcessID); worklist = workflowService.getWorkListByProcessID(iProcessID, 0, -1, null, 0); } logger.fine("[WorkflowSchedulerService] " + worklist.size() + " workitems found"); iScheduledWorkItems += worklist.size(); for (ItemCollection workitem : worklist) { // verify processID if (iProcessID == workitem.getItemValueInteger("$processid")) { // verify modelversion if (sModelVersion.equals(workitem .getItemValueString("$modelversion"))) { // verify due date if (workItemInDue(workitem, activityEntity)) { int iActivityID = activityEntity .getItemValueInteger("numActivityID"); workitem.replaceItemValue("$activityid", iActivityID); processWorkitem(workitem); iProcessWorkItems++; } } } } } #location 32 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public int run(ItemCollection adocumentContext, ItemCollection adocumentActivity) throws PluginException { documentContext = adocumentContext; // evaluate new items.... ItemCollection evalItemCollection = new ItemCollection(); evalItemCollection=adocumentContext=evaluateWorkflowResult(adocumentActivity,documentContext); // copy values if (evalItemCollection!=null) { documentContext.replaceAllItems(evalItemCollection.getAllItems()); } return Plugin.PLUGIN_OK; }
#vulnerable code public int run(ItemCollection adocumentContext, ItemCollection adocumentActivity) throws PluginException { documentContext = adocumentContext; // evaluate new items.... ItemCollection evalItemCollection = new ItemCollection(); evalItemCollection=adocumentContext=evaluateWorkflowResult(adocumentActivity,documentContext); // copy values documentContext.replaceAllItems(evalItemCollection.getAllItems()); return Plugin.PLUGIN_OK; } #location 7 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testAddWorkdaysFromMonday() { Calendar startDate = Calendar.getInstance(); // adjust to FRIDAY startDate.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY); System.out.println("Startdate=" + startDate.getTime()); Assert.assertEquals(Calendar.TUESDAY, workflowSchedulerService.addWorkDays(startDate, 1).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.WEDNESDAY, workflowSchedulerService.addWorkDays(startDate, 2).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.FRIDAY, workflowSchedulerService.addWorkDays(startDate, 4).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.MONDAY, workflowSchedulerService.addWorkDays(startDate, 5).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.FRIDAY, workflowSchedulerService.addWorkDays(startDate, 9).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.MONDAY, workflowSchedulerService.addWorkDays(startDate, 10).get(Calendar.DAY_OF_WEEK)); }
#vulnerable code @Test public void testAddWorkdaysFromMonday() { Calendar startDate = Calendar.getInstance(); // adjust to FRIDAY startDate.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY); System.out.println("Startdate=" + startDate.getTime()); Assert.assertEquals(Calendar.TUESDAY, WorkflowSchedulerService.addWorkDays(startDate, 1).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.WEDNESDAY, WorkflowSchedulerService.addWorkDays(startDate, 2).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.FRIDAY, WorkflowSchedulerService.addWorkDays(startDate, 4).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.MONDAY, WorkflowSchedulerService.addWorkDays(startDate, 5).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.FRIDAY, WorkflowSchedulerService.addWorkDays(startDate, 9).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.MONDAY, WorkflowSchedulerService.addWorkDays(startDate, 10).get(Calendar.DAY_OF_WEEK)); } #location 26 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code boolean flushEventLogByCount(int count) { Date lastEventDate = null; boolean cacheIsEmpty = true; IndexWriter indexWriter = null; long l = System.currentTimeMillis(); logger.finest("......flush eventlog cache...."); List<EventLogEntry> events = eventLogService.findEvents(count + 1, EVENTLOG_TOPIC_ADD, EVENTLOG_TOPIC_REMOVE); if (events != null && events.size() > 0) { try { indexWriter = createIndexWriter(); int _counter = 0; for (EventLogEntry eventLogEntry : events) { Term term = new Term("$uniqueid", eventLogEntry.getUniqueID()); // lookup the Document Entity... org.imixs.workflow.engine.jpa.Document doc = manager .find(org.imixs.workflow.engine.jpa.Document.class, eventLogEntry.getUniqueID()); // if the document was found we add/update the index. Otherwise we remove the // document form the index. if (doc != null && EVENTLOG_TOPIC_ADD.equals(eventLogEntry.getTopic())) { // add workitem to search index.... long l2 = System.currentTimeMillis(); ItemCollection workitem = new ItemCollection(); workitem.setAllItems(doc.getData()); if (!workitem.getItemValueBoolean(DocumentService.NOINDEX)) { indexWriter.updateDocument(term, createDocument(workitem)); logger.finest("......lucene add/update workitem '" + doc.getId() + "' to index in " + (System.currentTimeMillis() - l2) + "ms"); } } else { long l2 = System.currentTimeMillis(); indexWriter.deleteDocuments(term); logger.finest("......lucene remove workitem '" + term + "' from index in " + (System.currentTimeMillis() - l2) + "ms"); } // remove the eventLogEntry. lastEventDate = eventLogEntry.getModified().getTime(); eventLogService.removeEvent(eventLogEntry); // break? _counter++; if (_counter >= count) { // we skipp the last one if the maximum was reached. cacheIsEmpty = false; break; } } } catch (IOException luceneEx) { logger.warning("...unable to flush lucene event log: " + luceneEx.getMessage()); // We just log a warning here and close the flush mode to no longer block the // writer. // NOTE: maybe throwing a IndexException would be an alternative: // // throw new IndexException(IndexException.INVALID_INDEX, "Unable to update // lucene search index", // luceneEx); return true; } finally { // close writer! if (indexWriter != null) { logger.finest("......lucene close IndexWriter..."); try { indexWriter.close(); } catch (CorruptIndexException e) { throw new IndexException(IndexException.INVALID_INDEX, "Unable to close lucene IndexWriter: ", e); } catch (IOException e) { throw new IndexException(IndexException.INVALID_INDEX, "Unable to close lucene IndexWriter: ", e); } } } } logger.fine("...flushEventLog - " + events.size() + " events in " + (System.currentTimeMillis() - l) + " ms - last log entry: " + lastEventDate); return cacheIsEmpty; }
#vulnerable code boolean flushEventLogByCount(int count) { Date lastEventDate = null; boolean cacheIsEmpty = true; IndexWriter indexWriter = null; long l = System.currentTimeMillis(); logger.finest("......flush eventlog cache...."); List<org.imixs.workflow.engine.jpa.Document> documentList = eventLogService.findEvents(count + 1, EVENTLOG_TOPIC_ADD, EVENTLOG_TOPIC_REMOVE); if (documentList != null && documentList.size() > 0) { try { indexWriter = createIndexWriter(); int _counter = 0; for (org.imixs.workflow.engine.jpa.Document eventLogEntry : documentList) { String topic = null; String id = eventLogEntry.getId(); // cut prafix... if (id.startsWith(EVENTLOG_TOPIC_ADD)) { id = id.substring(EVENTLOG_TOPIC_ADD.length() + 1); topic = EVENTLOG_TOPIC_ADD; } if (id.startsWith(EVENTLOG_TOPIC_REMOVE)) { id = id.substring(EVENTLOG_TOPIC_REMOVE.length() + 1); topic = EVENTLOG_TOPIC_REMOVE; } // lookup the workitem... org.imixs.workflow.engine.jpa.Document doc = manager .find(org.imixs.workflow.engine.jpa.Document.class, id); Term term = new Term("$uniqueid", id); // if the document was found we add/update the index. Otherwise we remove the // document form the index. if (doc != null && EVENTLOG_TOPIC_ADD.equals(topic)) { // add workitem to search index.... long l2 = System.currentTimeMillis(); ItemCollection workitem = new ItemCollection(); workitem.setAllItems(doc.getData()); if (!workitem.getItemValueBoolean(DocumentService.NOINDEX)) { indexWriter.updateDocument(term, createDocument(workitem)); logger.finest("......lucene add/update workitem '" + id + "' to index in " + (System.currentTimeMillis() - l2) + "ms"); } } else { long l2 = System.currentTimeMillis(); indexWriter.deleteDocuments(term); logger.finest("......lucene remove workitem '" + id + "' from index in " + (System.currentTimeMillis() - l2) + "ms"); } // remove the eventLogEntry. lastEventDate = eventLogEntry.getCreated().getTime(); manager.remove(eventLogEntry); // break? _counter++; if (_counter >= count) { // we skipp the last one if the maximum was reached. cacheIsEmpty = false; break; } } } catch (IOException luceneEx) { logger.warning("...unable to flush lucene event log: " + luceneEx.getMessage()); // We just log a warning here and close the flush mode to no longer block the // writer. // NOTE: maybe throwing a IndexException would be an alternative: // // throw new IndexException(IndexException.INVALID_INDEX, "Unable to update // lucene search index", // luceneEx); return true; } finally { // close writer! if (indexWriter != null) { logger.finest("......lucene close IndexWriter..."); try { indexWriter.close(); } catch (CorruptIndexException e) { throw new IndexException(IndexException.INVALID_INDEX, "Unable to close lucene IndexWriter: ", e); } catch (IOException e) { throw new IndexException(IndexException.INVALID_INDEX, "Unable to close lucene IndexWriter: ", e); } } } } logger.fine("...flushEventLog - " + documentList.size() + " events in " + (System.currentTimeMillis() - l) + " ms - last log entry: " + lastEventDate); return cacheIsEmpty; } #location 90 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testAddWorkdaysFromMonday() { Calendar startDate = Calendar.getInstance(); // adjust to FRIDAY startDate.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY); System.out.println("Startdate=" + startDate.getTime()); Assert.assertEquals(Calendar.TUESDAY, workflowSchedulerService.addWorkDays(startDate, 1).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.WEDNESDAY, workflowSchedulerService.addWorkDays(startDate, 2).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.FRIDAY, workflowSchedulerService.addWorkDays(startDate, 4).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.MONDAY, workflowSchedulerService.addWorkDays(startDate, 5).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.FRIDAY, workflowSchedulerService.addWorkDays(startDate, 9).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.MONDAY, workflowSchedulerService.addWorkDays(startDate, 10).get(Calendar.DAY_OF_WEEK)); }
#vulnerable code @Test public void testAddWorkdaysFromMonday() { Calendar startDate = Calendar.getInstance(); // adjust to FRIDAY startDate.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY); System.out.println("Startdate=" + startDate.getTime()); Assert.assertEquals(Calendar.TUESDAY, WorkflowSchedulerService.addWorkDays(startDate, 1).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.WEDNESDAY, WorkflowSchedulerService.addWorkDays(startDate, 2).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.FRIDAY, WorkflowSchedulerService.addWorkDays(startDate, 4).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.MONDAY, WorkflowSchedulerService.addWorkDays(startDate, 5).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.FRIDAY, WorkflowSchedulerService.addWorkDays(startDate, 9).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.MONDAY, WorkflowSchedulerService.addWorkDays(startDate, 10).get(Calendar.DAY_OF_WEEK)); } #location 19 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testMinusWorkdaysFromFriday() { Calendar startDate = Calendar.getInstance(); // adjust to FRIDAY startDate.set(Calendar.DAY_OF_WEEK, Calendar.FRIDAY); System.out.println("Startdate=" + startDate.getTime()); // adjust -3 Workdays -> THUSEDAY Assert.assertEquals(Calendar.THURSDAY, workflowSchedulerService.addWorkDays(startDate, -1).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.WEDNESDAY, workflowSchedulerService.addWorkDays(startDate, -2).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.MONDAY, workflowSchedulerService.addWorkDays(startDate, -4).get(Calendar.DAY_OF_WEEK)); // friday - 5 Assert.assertEquals(Calendar.FRIDAY, workflowSchedulerService.addWorkDays(startDate, -5).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.MONDAY, workflowSchedulerService.addWorkDays(startDate, -9).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.FRIDAY, workflowSchedulerService.addWorkDays(startDate, -10).get(Calendar.DAY_OF_WEEK)); }
#vulnerable code @Test public void testMinusWorkdaysFromFriday() { Calendar startDate = Calendar.getInstance(); // adjust to FRIDAY startDate.set(Calendar.DAY_OF_WEEK, Calendar.FRIDAY); System.out.println("Startdate=" + startDate.getTime()); // adjust -3 Workdays -> THUSEDAY Assert.assertEquals(Calendar.THURSDAY, WorkflowSchedulerService.addWorkDays(startDate, -1).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.WEDNESDAY, WorkflowSchedulerService.addWorkDays(startDate, -2).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.MONDAY, WorkflowSchedulerService.addWorkDays(startDate, -4).get(Calendar.DAY_OF_WEEK)); // friday - 5 Assert.assertEquals(Calendar.FRIDAY, WorkflowSchedulerService.addWorkDays(startDate, -5).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.MONDAY, WorkflowSchedulerService.addWorkDays(startDate, -9).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.FRIDAY, WorkflowSchedulerService.addWorkDays(startDate, -10).get(Calendar.DAY_OF_WEEK)); } #location 11 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testParseResult() { List<ItemCollection> result=null; String testString = "{\n" + " \"responseHeader\":{\n" + " \"status\":0,\n" + " \"QTime\":4,\n" + " \"params\":{\n" + " \"q\":\"*:*\",\n" + " \"_\":\"1567286252995\"}},\n" + " \"response\":{\"numFound\":2,\"start\":0,\"docs\":[\n" + " {\n" + " \"type\":[\"model\"],\n" + " \"id\":\"3a182d18-33d9-4951-8970-d9eaf9d337ff\",\n" + " \"_modified\":[20190831211617],\n" + " \"_created\":[20190831211617],\n" + " \"_version_\":1643418672068296704},\n" + " {\n" + " \"type\":[\"adminp\"],\n" + " \"id\":\"60825929-4d7d-4346-9333-afd7dbfca457\",\n" + " \"_modified\":[20190831211618],\n" + " \"_created\":[20190831211618],\n" + " \"_version_\":1643418672172105728}]\n" + " }}"; result=solrSearchService.parseQueryResult(testString); Assert.assertEquals(2,result.size()); ItemCollection document=null; document=result.get(0); Assert.assertEquals("model", document.getItemValueString("type")); Assert.assertEquals("3a182d18-33d9-4951-8970-d9eaf9d337ff", document.getItemValueString("id")); Assert.assertEquals(1567278977000l, document.getItemValueDate("_modified").getTime()); Assert.assertEquals(1567278977000l, document.getItemValueDate("_created").getTime()); document=result.get(1); Assert.assertEquals("adminp", document.getItemValueString("type")); Assert.assertEquals("60825929-4d7d-4346-9333-afd7dbfca457",document.getItemValueString("id")); Assert.assertEquals(1567278978000l, document.getItemValueDate("_created").getTime()); }
#vulnerable code @Test public void testParseResult() { List<ItemCollection> result=null; String testString = "{\n" + " \"responseHeader\":{\n" + " \"status\":0,\n" + " \"QTime\":4,\n" + " \"params\":{\n" + " \"q\":\"*:*\",\n" + " \"_\":\"1567286252995\"}},\n" + " \"response\":{\"numFound\":2,\"start\":0,\"docs\":[\n" + " {\n" + " \"type\":[\"model\"],\n" + " \"id\":\"3a182d18-33d9-4951-8970-d9eaf9d337ff\",\n" + " \"_modified\":[20190831211617],\n" + " \"_created\":[20190831211617],\n" + " \"_version_\":1643418672068296704},\n" + " {\n" + " \"type\":[\"adminp\"],\n" + " \"id\":\"60825929-4d7d-4346-9333-afd7dbfca457\",\n" + " \"_modified\":[20190831211618],\n" + " \"_created\":[20190831211618],\n" + " \"_version_\":1643418672172105728}]\n" + " }}"; result=solrSearchService.parseQueryResult(testString); Assert.assertEquals(2,result.size()); ItemCollection document=null; document=result.get(0); Assert.assertEquals("model", document.getItemValueString("type")); Assert.assertEquals("3a182d18-33d9-4951-8970-d9eaf9d337ff", document.getUniqueID()); Assert.assertEquals(1567278977000l, document.getItemValueDate("$modified").getTime()); Assert.assertEquals(1567278977000l, document.getItemValueDate("$created").getTime()); document=result.get(1); Assert.assertEquals("adminp", document.getItemValueString("type")); Assert.assertEquals("60825929-4d7d-4346-9333-afd7dbfca457", document.getUniqueID()); Assert.assertEquals(1567278978000l, document.getItemValueDate("$created").getTime()); } #location 37 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testComplexPluginException() throws ScriptException { ItemCollection adocumentContext = new ItemCollection(); ItemCollection adocumentActivity = new ItemCollection(); // 1) invalid returning one messsage String script = "var a=1;var b=2;var isValid = (a>b);" + " var errorCode='MY_ERROR';" + " var errorMessage='Somehing go wrong!';"; System.out.println("Script=" + script); adocumentActivity.replaceItemValue("txtBusinessRUle", script); try { rulePlugin.run(adocumentContext, adocumentActivity); Assert.fail(); } catch (PluginException e) { // test excption Assert.assertEquals("MY_ERROR", e.getErrorCode()); Object[] params = e.getErrorParameters(); Assert.assertEquals(1, params.length); Assert.assertEquals("Somehing go wrong!", params[0].toString()); } // 2) invalid returning 2 messages in an array script = "var a=1;var b=2;var isValid = (a>b);" + " var errorMessage = new Array();" + " errorMessage[0]='Somehing go wrong!';" + " errorMessage[1]='Somehingelse go wrong!';"; System.out.println("Script=" + script); adocumentActivity.replaceItemValue("txtBusinessRUle", script); try { rulePlugin.run(adocumentContext, adocumentActivity); Assert.fail(); } catch (PluginException e) { // e.printStackTrace(); // test exception Assert.assertEquals(RulePlugin.VALIDATION_ERROR, e.getErrorCode()); Object[] params = e.getErrorParameters(); Assert.assertEquals(2, params.length); Assert.assertEquals("Somehing go wrong!", params[0].toString()); Assert.assertEquals("Somehingelse go wrong!", params[1].toString()); } }
#vulnerable code @Test public void testComplexPluginException() throws ScriptException { ItemCollection adocumentContext = new ItemCollection(); ItemCollection adocumentActivity = new ItemCollection(); // 1) invalid returning one messsage String script = "var a=1;var b=2;var isValid = (a>b);" + " var errorCode='MY_ERROR';" + " var errorMessage='Somehing go wrong!';"; System.out.println("Script=" + script); adocumentActivity.replaceItemValue("txtBusinessRUle", script); try { rulePlugin.run(adocumentContext, adocumentActivity); Assert.fail(); } catch (PluginException e) { // test excption Assert.assertEquals("MY_ERROR", e.getErrorCode()); Object[] params = e.getErrorParameters(); Assert.assertEquals(1, params.length); Assert.assertEquals("Somehing go wrong!", params[0].toString()); } // 2) invalid returning 2 messages in an array script = "var a=1;var b=2;var isValid = (a>b);" + " var errorMessage = new Array();" + " errorMessage[0]='Somehing go wrong!';" + " errorMessage[1]='Somehingelse go wrong!';"; System.out.println("Script=" + script); adocumentActivity.replaceItemValue("txtBusinessRUle", script); try { rulePlugin.run(adocumentContext, adocumentActivity); Assert.fail(); } catch (PluginException e) { //e.printStackTrace(); // test exception Assert.assertEquals(RulePlugin.VALIDATION_ERROR, e.getErrorCode()); Object[] params = e.getErrorParameters(); Assert.assertEquals(2, params.length); Assert.assertEquals("Somehing go wrong!", params[0].toString()); Assert.assertEquals("Somehingelse go wrong!", params[1].toString()); } } #location 21 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testAddWorkdaysFromSaturday() { Calendar startDate = Calendar.getInstance(); // adjust to SATURDAY startDate.set(Calendar.DAY_OF_WEEK, Calendar.SATURDAY); System.out.println("Startdate=" + startDate.getTime()); // adjust -1 Workdays -> TUESDAY Assert.assertEquals(Calendar.TUESDAY, workflowSchedulerService.addWorkDays(startDate, 1).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.FRIDAY, workflowSchedulerService.addWorkDays(startDate, 4).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.MONDAY, workflowSchedulerService.addWorkDays(startDate, 5).get(Calendar.DAY_OF_WEEK)); }
#vulnerable code @Test public void testAddWorkdaysFromSaturday() { Calendar startDate = Calendar.getInstance(); // adjust to SATURDAY startDate.set(Calendar.DAY_OF_WEEK, Calendar.SATURDAY); System.out.println("Startdate=" + startDate.getTime()); // adjust -1 Workdays -> TUESDAY Assert.assertEquals(Calendar.TUESDAY, WorkflowSchedulerService.addWorkDays(startDate, 1).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.FRIDAY, WorkflowSchedulerService.addWorkDays(startDate, 4).get(Calendar.DAY_OF_WEEK)); Assert.assertEquals(Calendar.MONDAY, WorkflowSchedulerService.addWorkDays(startDate, 5).get(Calendar.DAY_OF_WEEK)); } #location 18 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void removeWorkitem(String uniqueID) throws PluginException { IndexWriter awriter = null; try { awriter = createIndexWriter(); Term term = new Term("$uniqueid", uniqueID); awriter.deleteDocuments(term); } catch (CorruptIndexException e) { throw new PluginException(LucenePlugin.class.getSimpleName(), INVALID_INDEX, "Unable to remove workitem '" + uniqueID + "' from search index", e); } catch (LockObtainFailedException e) { throw new PluginException(LucenePlugin.class.getSimpleName(), INVALID_INDEX, "Unable to remove workitem '" + uniqueID + "' from search index", e); } catch (IOException e) { throw new PluginException(LucenePlugin.class.getSimpleName(), INVALID_INDEX, "Unable to remove workitem '" + uniqueID + "' from search index", e); } }
#vulnerable code public void removeWorkitem(String uniqueID) throws PluginException { IndexWriter awriter = null; Properties prop = propertyService.getProperties(); if (!prop.isEmpty()) { try { awriter = createIndexWriter(prop); Term term = new Term("$uniqueid", uniqueID); awriter.deleteDocuments(term); } catch (CorruptIndexException e) { throw new PluginException(LucenePlugin.class.getSimpleName(), INVALID_INDEX, "Unable to remove workitem '" + uniqueID + "' from search index", e); } catch (LockObtainFailedException e) { throw new PluginException(LucenePlugin.class.getSimpleName(), INVALID_INDEX, "Unable to remove workitem '" + uniqueID + "' from search index", e); } catch (IOException e) { throw new PluginException(LucenePlugin.class.getSimpleName(), INVALID_INDEX, "Unable to remove workitem '" + uniqueID + "' from search index", e); } } } #location 8 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @SuppressWarnings({ "unchecked", "rawtypes" }) @Override public void endElement(String uri, String localName, String qName) throws SAXException { // end of bpmn2:process if (qName.equalsIgnoreCase("bpmn2:process")) { if (currentWorkflowGroup != null) { currentWorkflowGroup = null; } } // end of bpmn2:task - if (bImixsTask && qName.equalsIgnoreCase("bpmn2:task")) { bImixsTask = false; taskCache.put(bpmnID, currentEntity); } if (qName.equalsIgnoreCase("bpmn2:extensionElements")) { bExtensionElements = false; } // end of bpmn2:intermediateCatchEvent - if (bImixsEvent && (qName.equalsIgnoreCase("bpmn2:intermediateCatchEvent") || qName.equalsIgnoreCase("bpmn2:intermediateThrowEvent"))) { bImixsEvent = false; // we need to cache the activities because the sequenceflows must be // analysed later eventCache.put(bpmnID, currentEntity); } /* * End of a imixs:value */ if (qName.equalsIgnoreCase("imixs:value")) { if (bExtensionElements && bItemValue && currentEntity != null && characterStream != null) { String svalue = characterStream.toString(); List valueList = currentEntity.getItemValue(currentItemName); if ("xs:boolean".equals(currentItemType.toLowerCase())) { valueList.add(Boolean.valueOf(svalue)); } else if ("xs:integer".equals(currentItemType.toLowerCase())) { valueList.add(Integer.valueOf(svalue)); } else { valueList.add(svalue); } // item will only be added if it is not listed in the ignoreItem // List! if (!ignoreItemList.contains(currentItemName)) { currentEntity.replaceItemValue(currentItemName, valueList); } } bItemValue = false; characterStream = null; } if (qName.equalsIgnoreCase("bpmn2:documentation")) { if (currentEntity != null) { currentEntity.replaceItemValue("rtfdescription", characterStream.toString()); } // bpmn2:message? if (bMessage) { // cache the message... messageCache.put(currentMessageName, characterStream.toString()); bMessage = false; } // bpmn2:annotation? if (bAnnotation) { // cache the annotation annotationCache.put(currentAnnotationName, characterStream.toString()); bAnnotation = false; } characterStream = null; bdocumentation = false; } // end of bpmn2:intermediateThrowEvent - if (bLinkThrowEvent && !bLinkCatchEvent && (qName.equalsIgnoreCase("bpmn2:linkEventDefinition"))) { bLinkThrowEvent = false; // we need to cache the link name linkThrowEventCache.put(bpmnID, currentLinkName); } // end of bpmn2:intermediateCatchEvent - if (bLinkCatchEvent && !bLinkThrowEvent && (qName.equalsIgnoreCase("bpmn2:linkEventDefinition"))) { bLinkCatchEvent = false; // we need to cache the link name linkCatchEventCache.put(currentLinkName, bpmnID); } // test conditional sequence flow... if (bSequenceFlow && bconditionExpression && qName.equalsIgnoreCase("bpmn2:conditionExpression")) { String svalue = characterStream.toString(); logger.fine("conditional SequenceFlow:" + bpmnID + "=" + svalue); bconditionExpression = false; conditionCache.put(bpmnID, svalue); } }
#vulnerable code @SuppressWarnings({ "unchecked", "rawtypes" }) @Override public void endElement(String uri, String localName, String qName) throws SAXException { // end of bpmn2:process if (qName.equalsIgnoreCase("bpmn2:process")) { if (currentWorkflowGroup != null) { currentWorkflowGroup = null; } } // end of bpmn2:task - if (bImixsTask && qName.equalsIgnoreCase("bpmn2:task")) { bImixsTask = false; taskCache.put(bpmnID, currentEntity); } if (qName.equalsIgnoreCase("bpmn2:extensionElements")) { bExtensionElements = false; } // end of bpmn2:intermediateCatchEvent - if (bImixsEvent && (qName.equalsIgnoreCase("bpmn2:intermediateCatchEvent") || qName.equalsIgnoreCase("bpmn2:intermediateThrowEvent"))) { bImixsEvent = false; // we need to cache the activities because the sequenceflows must be // analysed later eventCache.put(bpmnID, currentEntity); } /* * End of a imixs:value */ if (qName.equalsIgnoreCase("imixs:value")) { if (bExtensionElements && bItemValue && currentEntity != null && characterStream != null) { String svalue = characterStream.toString(); List valueList = currentEntity.getItemValue(currentItemName); if ("xs:boolean".equals(currentItemType.toLowerCase())) { valueList.add(Boolean.valueOf(svalue)); } else if ("xs:integer".equals(currentItemType.toLowerCase())) { valueList.add(Integer.valueOf(svalue)); } else { valueList.add(svalue); } // item will only be added if it is not listed in the ignoreItem // List! if (!ignoreItemList.contains(currentItemName)) { currentEntity.replaceItemValue(currentItemName, valueList); } } bItemValue = false; characterStream = null; } if (qName.equalsIgnoreCase("bpmn2:documentation")) { if (currentEntity != null) { currentEntity.replaceItemValue("rtfdescription", characterStream.toString()); } // bpmn2:message? if (bMessage) { // cache the message... messageCache.put(currentMessageName, characterStream.toString()); bMessage = false; } // bpmn2:annotation? if (bAnnotation) { // cache the annotation annotationCache.put(currentAnnotationName, characterStream.toString()); bAnnotation = false; } characterStream = null; bdocumentation = false; } // end of bpmn2:intermediateThrowEvent - if (bLinkThrowEvent && !bLinkCatchEvent && (qName.equalsIgnoreCase("bpmn2:linkEventDefinition"))) { bLinkThrowEvent = false; // we need to cache the link name linkThrowEventCache.put(bpmnID, currentLinkName); } // end of bpmn2:intermediateCatchEvent - if (bLinkCatchEvent && !bLinkThrowEvent && (qName.equalsIgnoreCase("bpmn2:linkEventDefinition"))) { bLinkCatchEvent = false; // we need to cache the link name linkCatchEventCache.put(currentLinkName, bpmnID); } } #location 73 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.