output
stringlengths
64
73.2k
input
stringlengths
208
73.3k
instruction
stringclasses
1 value
#fixed code public void testMultipleRetrieveCache() { String randomApiString = this.getRandomString(); this.api.saveApi(new ApiResult(0, randomApiString, "privateKey", "", "")); for(int i=0; i < 500; i++) { Optional<ApiResult> apiByPublicKey = this.api.getApiByPublicKey(randomApiString); assertThat(apiByPublicKey.get().getPublicKey()).isEqualTo(randomApiString); assertThat(apiByPublicKey.get().getPrivateKey()).isEqualTo("privateKey"); } this.api.deleteApiByPublicKey(randomApiString); }
#vulnerable code public void testMultipleRetrieveCache() { String randomApiString = this.getRandomString(); this.api.saveApi(new ApiResult(0, randomApiString, "privateKey", "", "")); for(int i=0; i < 500; i++) { ApiResult apiResult = this.api.getApiByPublicKey(randomApiString); assertThat(apiResult.getPublicKey()).isEqualTo(randomApiString); assertThat(apiResult.getPrivateKey()).isEqualTo("privateKey"); } this.api.deleteApiByPublicKey(randomApiString); } #location 9 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static List<String> readFileLinesGuessEncoding(String filePath, int maxFileLineDepth) throws IOException { List<String> fileLines = new ArrayList<>(); BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(filePath), guessCharset(new File(filePath)))); try { String line; int lineCount = 0; while ((line = reader.readLine()) != null) { lineCount++; fileLines.add(line); if (lineCount == maxFileLineDepth) { return fileLines; } } } finally { reader.close(); } return fileLines; }
#vulnerable code public static List<String> readFileLinesGuessEncoding(String filePath, int maxFileLineDepth) throws IOException { BufferedReader reader = new BufferedReader( new InputStreamReader( new FileInputStream(filePath), guessCharset(new File(filePath)))); List<String> fileLines = new ArrayList<>(); String line = ""; int lineCount = 0; while ((line = reader.readLine()) != null) { lineCount++; fileLines.add(line); if (lineCount == maxFileLineDepth) { return fileLines; } } return fileLines; } #location 14 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public String getCurrentRevision(String repoLocations, String repoName) { String currentRevision = ""; ProcessBuilder processBuilder = new ProcessBuilder(this.SVNBINARYPATH, "info", "--xml"); processBuilder.directory(new File(repoLocations + repoName)); Process process = null; BufferedReader bufferedReader = null; try { process = processBuilder.start(); InputStream is = process.getInputStream(); InputStreamReader isr = new InputStreamReader(is); bufferedReader = new BufferedReader(isr); StringBuilder sb = new StringBuilder(); String line; while ((line = bufferedReader.readLine()) != null) { sb.append(Helpers.removeUTF8BOM(line)); } DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); Singleton.getLogger().info("getCurrentRevision: " + repoName + " " + sb.toString()); Document doc = dBuilder.parse(new ByteArrayInputStream(sb.toString().getBytes())); doc.getDocumentElement().normalize(); NodeList nList = doc.getElementsByTagName("entry"); for (int temp = 0; temp < nList.getLength(); temp++) { Node nNode = nList.item(temp); if (nNode.getNodeType() == Node.ELEMENT_NODE) { Element eElement = (Element) nNode; currentRevision = eElement.getAttribute("revision"); } } } catch (IOException | ParserConfigurationException | SAXException ex) { Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass() + " getCurrentRevision for " + repoName + "\n with message: " + ex.getMessage()); } finally { Helpers.closeQuietly(process); Helpers.closeQuietly(bufferedReader); } return currentRevision; }
#vulnerable code public String getCurrentRevision(String repoLocations, String repoName) { String currentRevision = ""; ProcessBuilder processBuilder = new ProcessBuilder(this.SVNBINARYPATH, "info", "--xml"); processBuilder.directory(new File(repoLocations + repoName)); Process process = null; try { process = processBuilder.start(); InputStream is = process.getInputStream(); InputStreamReader isr = new InputStreamReader(is); BufferedReader br = new BufferedReader(isr); StringBuilder sb = new StringBuilder(); String line; while ((line = br.readLine()) != null) { sb.append(Helpers.removeUTF8BOM(line)); } DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); Singleton.getLogger().info("getCurrentRevision: " + repoName + " " + sb.toString()); Document doc = dBuilder.parse(new ByteArrayInputStream(sb.toString().getBytes())); doc.getDocumentElement().normalize(); NodeList nList = doc.getElementsByTagName("entry"); for (int temp = 0; temp < nList.getLength(); temp++) { Node nNode = nList.item(temp); if (nNode.getNodeType() == Node.ELEMENT_NODE) { Element eElement = (Element) nNode; currentRevision = eElement.getAttribute("revision"); } } } catch (IOException | ParserConfigurationException | SAXException ex) { Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass() + " getCurrentRevision for " + repoName + "\n with message: " + ex.getMessage()); } finally { Helpers.closeQuietly(process); } return currentRevision; } #location 39 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void testExecuteNothingInQueue() throws JobExecutionException { IndexGitRepoJob indexGitRepoJob = new IndexGitRepoJob(); IndexGitRepoJob spy = spy(indexGitRepoJob); when(spy.getNextQueuedRepo()).thenReturn(new UniqueRepoQueue()); JobExecutionContext mockContext = Mockito.mock(JobExecutionContext.class); JobDetail mockDetail = Mockito.mock(JobDetail.class); JobDataMap mockJobDataMap = Mockito.mock(JobDataMap.class); CodeIndexer mockCodeIndexer = Mockito.mock(CodeIndexer.class); when(mockJobDataMap.get("REPOLOCATIONS")).thenReturn(""); when(mockJobDataMap.get("LOWMEMORY")).thenReturn("true"); when(mockDetail.getJobDataMap()).thenReturn(mockJobDataMap); when(mockContext.getJobDetail()).thenReturn(mockDetail); when(mockCodeIndexer.shouldPauseAdding()).thenReturn(false); spy.codeIndexer = mockCodeIndexer; spy.execute(mockContext); assertThat(spy.haveRepoResult).isFalse(); }
#vulnerable code public void testExecuteNothingInQueue() throws JobExecutionException { IndexGitRepoJob indexGitRepoJob = new IndexGitRepoJob(); IndexGitRepoJob spy = spy(indexGitRepoJob); when(spy.getNextQueuedRepo()).thenReturn(new UniqueRepoQueue()); spy.execute(null); assertThat(spy.haveRepoResult).isFalse(); } #location 6 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public synchronized void deleteRepoByName(String repositoryName) { ConnStmtRs connStmtRs = new ConnStmtRs(); try { connStmtRs.conn = this.dbConfig.getConnection(); connStmtRs.stmt = connStmtRs.conn.prepareStatement("delete from repo where name=?;"); connStmtRs.stmt.setString(1, repositoryName); connStmtRs.stmt.execute(); } catch (SQLException ex) { this.logger.severe(String.format("8f05a49c::error in class %s exception %s searchcode was unable to delete repository by name %s, this is unlikely to break anything but there should be other errors in the logs", ex.getClass(), ex.getMessage(), repositoryName)); } finally { this.helpers.closeQuietly(connStmtRs, this.dbConfig.closeConnection()); } }
#vulnerable code public synchronized void deleteRepoByName(String repositoryName) { Connection connection; PreparedStatement preparedStatement = null; ResultSet resultSet = null; try { connection = this.dbConfig.getConnection(); preparedStatement = connection.prepareStatement("delete from repo where name=?;"); preparedStatement.setString(1, repositoryName); preparedStatement.execute(); } catch (SQLException ex) { this.logger.severe(String.format("8f05a49c::error in class %s exception %s searchcode was unable to delete repository by name %s, this is unlikely to break anything but there should be other errors in the logs", ex.getClass(), ex.getMessage(), repositoryName)); } finally { this.helpers.closeQuietly(resultSet); this.helpers.closeQuietly(preparedStatement); } } #location 14 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public RepositoryChanged getDiffBetweenRevisions(String repoLocations, String repoName, String startRevision) { // svn diff -r 4000:HEAD --summarize --xml List<String> changedFiles = new ArrayList<>(); List<String> deletedFiles = new ArrayList<>(); ProcessBuilder processBuilder = new ProcessBuilder(this.SVNBINARYPATH, "diff", "-r", startRevision + ":HEAD", "--summarize", "--xml"); processBuilder.directory(new File(repoLocations + repoName)); Process process = null; BufferedReader bufferedReader = null; try { process = processBuilder.start(); InputStream is = process.getInputStream(); InputStreamReader isr = new InputStreamReader(is); bufferedReader = new BufferedReader(isr); String line; StringBuffer sb = new StringBuffer(); while ((line = bufferedReader.readLine()) != null) { Singleton.getLogger().info("svn diff: " + line); sb.append(Helpers.removeUTF8BOM(line)); } Singleton.getLogger().info("Before XML parsing: " + sb.toString()); DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); Document doc = dBuilder.parse(new ByteArrayInputStream(sb.toString().getBytes())); doc.getDocumentElement().normalize(); Element node = (Element)doc.getElementsByTagName("diff").item(0); node = (Element)node.getElementsByTagName("paths").item(0); NodeList nList = node.getElementsByTagName("path"); for (int temp = 0; temp < nList.getLength(); temp++) { Node nNode = nList.item(temp); if (nNode.getNodeType() == Node.ELEMENT_NODE) { Element eElement = (Element) nNode; String type = eElement.getAttribute("item"); String path = eElement.getTextContent(); if ("modified".equals(type) || "added".equals(type)) { changedFiles.add(path); } else { deletedFiles.add(path); } } } } catch(IOException | ParserConfigurationException | SAXException ex) { Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass() + " getDiffBetweenRevisions for " + repoName + "\n with message: " + ex.getMessage()); } finally { Helpers.closeQuietly(process); Helpers.closeQuietly(bufferedReader); } return new RepositoryChanged(true, changedFiles, deletedFiles); }
#vulnerable code public RepositoryChanged getDiffBetweenRevisions(String repoLocations, String repoName, String startRevision) { // svn diff -r 4000:HEAD --summarize --xml List<String> changedFiles = new ArrayList<>(); List<String> deletedFiles = new ArrayList<>(); ProcessBuilder processBuilder = new ProcessBuilder(this.SVNBINARYPATH, "diff", "-r", startRevision + ":HEAD", "--summarize", "--xml"); processBuilder.directory(new File(repoLocations + repoName)); Process process = null; try { process = processBuilder.start(); InputStream is = process.getInputStream(); InputStreamReader isr = new InputStreamReader(is); BufferedReader br = new BufferedReader(isr); String line; StringBuffer sb = new StringBuffer(); while ((line = br.readLine()) != null) { Singleton.getLogger().info("svn diff: " + line); sb.append(Helpers.removeUTF8BOM(line)); } Singleton.getLogger().info("Before XML parsing: " + sb.toString()); DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); Document doc = dBuilder.parse(new ByteArrayInputStream(sb.toString().getBytes())); doc.getDocumentElement().normalize(); Element node = (Element)doc.getElementsByTagName("diff").item(0); node = (Element)node.getElementsByTagName("paths").item(0); NodeList nList = node.getElementsByTagName("path"); for (int temp = 0; temp < nList.getLength(); temp++) { Node nNode = nList.item(temp); if (nNode.getNodeType() == Node.ELEMENT_NODE) { Element eElement = (Element) nNode; String type = eElement.getAttribute("item"); String path = eElement.getTextContent(); if ("modified".equals(type) || "added".equals(type)) { changedFiles.add(path); } else { deletedFiles.add(path); } } } } catch(IOException | ParserConfigurationException | SAXException ex) { Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass() + " getDiffBetweenRevisions for " + repoName + "\n with message: " + ex.getMessage()); } finally { Helpers.closeQuietly(process); } return new RepositoryChanged(true, changedFiles, deletedFiles); } #location 57 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private String runCommand(String directory, String... command) throws IOException { ProcessBuilder processBuilder = new ProcessBuilder(command); processBuilder.directory(new File(directory)); Process process = processBuilder.start(); InputStream is = process.getInputStream(); InputStreamReader isr = new InputStreamReader(is, Values.CHARSET_UTF8); BufferedReader br = new BufferedReader(isr); String line; StringBuilder sb = new StringBuilder(); while ((line = br.readLine()) != null) { sb.append(line); } return sb.toString(); }
#vulnerable code private String runCommand(String directory, String... command) throws IOException { ProcessBuilder processBuilder = new ProcessBuilder(command); processBuilder.directory(new File(directory)); Process process = processBuilder.start(); InputStream is = process.getInputStream(); InputStreamReader isr = new InputStreamReader(is); BufferedReader br = new BufferedReader(isr); String line; StringBuilder sb = new StringBuilder(); while ((line = br.readLine()) != null) { sb.append(line); } return sb.toString(); } #location 17 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void getGitChangeSets() throws IOException, GitAPIException { Repository localRepository = new FileRepository(new File("./repo/server/.git")); Git git = new Git(localRepository); Iterable<RevCommit> logs = git.log().call(); List<String> revisions = new ArrayList<>(); for(RevCommit rev: logs) { System.out.println(rev.getCommitTime() + " " + rev.getName()); revisions.add(rev.getName()); } revisions = Lists.reverse(revisions); for (int i = 1; i < revisions.size(); i++) { System.out.println("///////////////////////////////////////////////"); this.getRevisionChanges(revisions.get(i - 1), revisions.get(i)); } }
#vulnerable code public void getGitChangeSets() throws IOException, GitAPIException { Repository localRepository = new FileRepository(new File("./repo/.timelord/test/.git")); Git git = new Git(localRepository); Iterable<RevCommit> logs = git.log().call(); for(RevCommit rev: logs) { System.out.println(rev.getName()); git.checkout().setName(rev.getName()).call(); } } #location 10 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public SourceCodeDTO saveCode(CodeIndexDocument codeIndexDocument) { Optional<SourceCodeDTO> existing = this.getByCodeIndexDocument(codeIndexDocument); ConnStmtRs connStmtRs = new ConnStmtRs(); try { connStmtRs.conn = this.dbConfig.getConnection(); // If the language does not exist then create it Optional<LanguageTypeDTO> languageType = this.languageType.createLanguageType(codeIndexDocument.getLanguageName()); String query = "INSERT INTO `sourcecode` (`id`, `repoid`, `languageid`, `sourceid`, `ownerid`, `licenseid`, `location`, `filename`, `content`, `hash`, `simhash`, `linescount`, `data`) VALUES " + "(NULL, ?, ?, ?, ?, ?, ?, ?, COMPRESS(?), ?, ?, ?, ?)"; // Why is this here and not above?? if (existing.isPresent()) { return existing.get(); } connStmtRs.stmt = connStmtRs.conn.prepareStatement(query, Statement.RETURN_GENERATED_KEYS); connStmtRs.stmt.setInt(1, 31337); connStmtRs.stmt.setInt(2, languageType.get().getId()); connStmtRs.stmt.setInt(3, 31337); connStmtRs.stmt.setInt(4, 31337); connStmtRs.stmt.setInt(5, 31337); connStmtRs.stmt.setString(6, this.getLocation(codeIndexDocument)); connStmtRs.stmt.setString(7, codeIndexDocument.getFileName()); connStmtRs.stmt.setString(8, codeIndexDocument.getContents()); connStmtRs.stmt.setString(9, codeIndexDocument.getHash()); connStmtRs.stmt.setString(10, "simhash"); connStmtRs.stmt.setInt(11, codeIndexDocument.getLines()); connStmtRs.stmt.setString(12, "{}"); connStmtRs.stmt.execute(); ResultSet tableKeys = connStmtRs.stmt.getGeneratedKeys(); tableKeys.next(); int autoGeneratedID = tableKeys.getInt(1); return this.getById(autoGeneratedID).get(); } catch (SQLException ex) { this.logger.severe(String.format("4a1aa86d::error in class %s exception %s searchcode save code with name %s", ex.getClass(), ex.getMessage(), codeIndexDocument.getFileName())); } finally { this.helpers.closeQuietly(connStmtRs, this.dbConfig.closeConnection()); } return null; }
#vulnerable code public SourceCodeDTO saveCode(CodeIndexDocument codeIndexDocument) { Optional<SourceCodeDTO> existing = this.getByCodeIndexDocument(codeIndexDocument); Connection conn = null; PreparedStatement stmt = null; try { conn = this.dbConfig.getConnection(); // If the language does not exist then create it Optional<LanguageTypeDTO> languageType = this.languageType.createLanguageType(codeIndexDocument.getLanguageName()); String query = "INSERT INTO `sourcecode` (`id`, `repoid`, `languageid`, `sourceid`, `ownerid`, `licenseid`, `location`, `filename`, `content`, `hash`, `simhash`, `linescount`, `data`) VALUES " + "(NULL, ?, ?, ?, ?, ?, ?, ?, COMPRESS(?), ?, ?, ?, ?)"; // Why is this here and not above?? if (existing.isPresent()) { return existing.get(); } stmt = conn.prepareStatement(query, Statement.RETURN_GENERATED_KEYS); stmt.setInt(1, 31337); stmt.setInt(2, languageType.get().getId()); stmt.setInt(3, 31337); stmt.setInt(4, 31337); stmt.setInt(5, 31337); stmt.setString(6, this.getLocation(codeIndexDocument)); stmt.setString(7, codeIndexDocument.getFileName()); stmt.setString(8, codeIndexDocument.getContents()); stmt.setString(9, codeIndexDocument.getHash()); stmt.setString(10, "simhash"); stmt.setInt(11, codeIndexDocument.getLines()); stmt.setString(12, "{}"); stmt.execute(); ResultSet tableKeys = stmt.getGeneratedKeys(); tableKeys.next(); int autoGeneratedID = tableKeys.getInt(1); return this.getById(autoGeneratedID).get(); } catch (SQLException ex) { this.logger.severe(String.format("4a1aa86d::error in class %s exception %s searchcode save code with name %s", ex.getClass(), ex.getMessage(), codeIndexDocument.getFileName())); } finally { this.helpers.closeQuietly(stmt); // this.helpers.closeQuietly(conn); } return null; } #location 44 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void configWriter(String content, Path filePath) { if(Files.exists(filePath) && !Files.isWritable(filePath)){ log.error("Error file is not writable: " + filePath); return; } if(Files.notExists(filePath.getParent())) { try { Files.createDirectories(filePath.getParent()); } catch (IOException e) { log.error("Error creating the directory: " + filePath + " message: " + e.getMessage(), e); } } try { Path target = null; if(Files.exists(filePath)) { target = FileSystems.getDefault().getPath(filePath.getParent().toString(), "habridge.config.old"); Files.move(filePath, target); } Files.write(filePath, content.getBytes(), StandardOpenOption.CREATE); // set attributes to be for user only // using PosixFilePermission to set file permissions Set<PosixFilePermission> perms = new HashSet<PosixFilePermission>(); // add owners permission perms.add(PosixFilePermission.OWNER_READ); perms.add(PosixFilePermission.OWNER_WRITE); try { String osName = System.getProperty("os.name"); if(osName.toLowerCase().indexOf("win") < 0) Files.setPosixFilePermissions(filePath, perms); } catch(UnsupportedOperationException e) { log.info("Cannot set permissions for config file on this system as it is not supported. Continuing"); } if(target != null) Files.delete(target); } catch (IOException e) { log.error("Error writing the file: " + filePath + " message: " + e.getMessage(), e); } }
#vulnerable code private void configWriter(String content, Path filePath) { if(Files.exists(filePath) && !Files.isWritable(filePath)){ log.error("Error file is not writable: " + filePath); return; } if(Files.notExists(filePath.getParent())) { try { Files.createDirectories(filePath.getParent()); } catch (IOException e) { log.error("Error creating the directory: " + filePath + " message: " + e.getMessage(), e); } } try { Path target = null; if(Files.exists(filePath)) { target = FileSystems.getDefault().getPath(filePath.getParent().toString(), "habridge.config.old"); Files.move(filePath, target); } Files.write(filePath, content.getBytes(), StandardOpenOption.CREATE); // set attributes to be for user only // using PosixFilePermission to set file permissions Set<PosixFilePermission> perms = new HashSet<PosixFilePermission>(); // add owners permission perms.add(PosixFilePermission.OWNER_READ); perms.add(PosixFilePermission.OWNER_WRITE); try { if(System.getProperty("os.name").toLowerCase().indexOf("win") <= 0) Files.setPosixFilePermissions(filePath, perms); } catch(UnsupportedOperationException e) { log.info("Cannot set permissions for config file on this system as it is not supported. Continuing"); } if(target != null) Files.delete(target); } catch (IOException e) { log.error("Error writing the file: " + filePath + " message: " + e.getMessage(), e); } } #location 31 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void denormalizeSdata(Sdata theSdata) { Map<String,Room> roomMap = new HashMap<String,Room>(); for (Room i : theSdata.getRooms()) roomMap.put(i.getId(),i); Map<String,Categorie> categoryMap = new HashMap<String,Categorie>(); for (Categorie i : theSdata.getCategoriess()) categoryMap.put(i.getId(),i); Categorie controllerCat = new Categorie(); controllerCat.setName("Controller"); controllerCat.setId("0"); categoryMap.put(controllerCat.getId(),controllerCat); ListIterator<Device> theIterator = theSdata.getDevices().listIterator(); Device theDevice = null; while (theIterator.hasNext()) { theDevice = theIterator.next(); if(theDevice.getRoom() != null && roomMap.get(theDevice.getRoom()) != null) theDevice.setRoom(roomMap.get(theDevice.getRoom()).getName()); else theDevice.setRoom("no room"); if(theDevice.getCategory() != null && categoryMap.get(theDevice.getCategory()) != null) theDevice.setCategory(categoryMap.get(theDevice.getCategory()).getName()); else theDevice.setCategory("<unknown>"); } ListIterator<Scene> theSecneIter = theSdata.getScenes().listIterator(); Scene theScene = null; while (theSecneIter.hasNext()) { theScene = theSecneIter.next(); theScene.setRoom(roomMap.get(theScene.getRoom()).getName()); } }
#vulnerable code private void denormalizeSdata(Sdata theSdata) { Map<String,Room> roomMap = new HashMap<String,Room>(); for (Room i : theSdata.getRooms()) roomMap.put(i.getId(),i); Map<String,Categorie> categoryMap = new HashMap<String,Categorie>(); for (Categorie i : theSdata.getCategoriess()) categoryMap.put(i.getId(),i); Categorie controllerCat = new Categorie(); controllerCat.setName("Controller"); controllerCat.setId("0"); categoryMap.put(controllerCat.getId(),controllerCat); ListIterator<Device> theIterator = theSdata.getDevices().listIterator(); Device theDevice = null; while (theIterator.hasNext()) { theDevice = theIterator.next(); if(theDevice.getRoom() != null) theDevice.setRoom(roomMap.get(theDevice.getRoom()).getName()); else theDevice.setRoom("<unknown>"); if(theDevice.getCategory() != null) theDevice.setCategory(categoryMap.get(theDevice.getCategory()).getName()); else theDevice.setCategory("<unknown>"); } ListIterator<Scene> theSecneIter = theSdata.getScenes().listIterator(); Scene theScene = null; while (theSecneIter.hasNext()) { theScene = theSecneIter.next(); theScene.setRoom(roomMap.get(theScene.getRoom()).getName()); } } #location 15 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public String deviceHandler(CallItem anItem, MultiCommandUtil aMultiUtil, String lightId, int intensity, Integer targetBri,Integer targetBriInc, DeviceDescriptor device, String body) { Socket dataSendSocket = null; log.debug("executing HUE api request to TCP: " + anItem.getItem().getAsString()); String theUrl = anItem.getItem().getAsString(); if(theUrl != null && !theUrl.isEmpty () && theUrl.startsWith("tcp://")) { String intermediate = theUrl.substring(theUrl.indexOf("://") + 3); String hostPortion = intermediate.substring(0, intermediate.indexOf('/')); String theUrlBody = intermediate.substring(intermediate.indexOf('/') + 1); String hostAddr = null; String port = null; InetAddress IPAddress = null; dataSendSocket = theSockets.get(hostPortion); if(dataSendSocket == null) { if (hostPortion.contains(":")) { hostAddr = hostPortion.substring(0, intermediate.indexOf(':')); port = hostPortion.substring(intermediate.indexOf(':') + 1); } else hostAddr = hostPortion; try { IPAddress = InetAddress.getByName(hostAddr); } catch (UnknownHostException e) { // noop } try { dataSendSocket = new Socket(IPAddress, Integer.parseInt(port)); theSockets.put(hostPortion, dataSendSocket); } catch (Exception e) { // noop } } theUrlBody = TimeDecode.replaceTimeValue(theUrlBody); if (theUrlBody.startsWith("0x")) { theUrlBody = BrightnessDecode.calculateReplaceIntensityValue(theUrlBody, intensity, targetBri, targetBriInc, true); sendData = DatatypeConverter.parseHexBinary(theUrlBody.substring(2)); } else { theUrlBody = BrightnessDecode.calculateReplaceIntensityValue(theUrlBody, intensity, targetBri, targetBriInc, false); sendData = theUrlBody.getBytes(); } try { DataOutputStream outToClient = new DataOutputStream(dataSendSocket.getOutputStream()); outToClient.write(sendData); outToClient.flush(); } catch (Exception e) { // noop } } else log.warn("Tcp Call to be presented as tcp://<ip_address>:<port>/payload, format of request unknown: " + theUrl); return null; }
#vulnerable code @Override public String deviceHandler(CallItem anItem, MultiCommandUtil aMultiUtil, String lightId, int intensity, Integer targetBri,Integer targetBriInc, DeviceDescriptor device, String body) { log.debug("executing HUE api request to TCP: " + anItem.getItem().getAsString()); String theUrl = anItem.getItem().getAsString(); if(theUrl != null && !theUrl.isEmpty () && theUrl.startsWith("tcp://")) { String intermediate = theUrl.substring(theUrl.indexOf("://") + 3); String hostPortion = intermediate.substring(0, intermediate.indexOf('/')); String theUrlBody = intermediate.substring(intermediate.indexOf('/') + 1); String hostAddr = null; String port = null; InetAddress IPAddress = null; if (hostPortion.contains(":")) { hostAddr = hostPortion.substring(0, intermediate.indexOf(':')); port = hostPortion.substring(intermediate.indexOf(':') + 1); } else hostAddr = hostPortion; try { IPAddress = InetAddress.getByName(hostAddr); } catch (UnknownHostException e) { // noop } theUrlBody = TimeDecode.replaceTimeValue(theUrlBody); if (theUrlBody.startsWith("0x")) { theUrlBody = BrightnessDecode.calculateReplaceIntensityValue(theUrlBody, intensity, targetBri, targetBriInc, true); sendData = DatatypeConverter.parseHexBinary(theUrlBody.substring(2)); } else { theUrlBody = BrightnessDecode.calculateReplaceIntensityValue(theUrlBody, intensity, targetBri, targetBriInc, false); sendData = theUrlBody.getBytes(); } try { Socket dataSendSocket = new Socket(IPAddress, Integer.parseInt(port)); DataOutputStream outToClient = new DataOutputStream(dataSendSocket.getOutputStream()); outToClient.write(sendData); outToClient.flush(); dataSendSocket.close(); } catch (Exception e) { // noop } } else log.warn("Tcp Call to be presented as tcp://<ip_address>:<port>/payload, format of request unknown: " + theUrl); return null; } #location 39 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public Home createHome(BridgeSettings bridgeSettings) { fhemMap = null; validFhem = bridgeSettings.getBridgeSettingsDescriptor().isValidFhem(); log.info("FHEM Home created." + (validFhem ? "" : " No FHEMs configured.")); if(validFhem) { fhemMap = new HashMap<String,FHEMInstance>(); httpClient = HTTPHome.getHandler(); Iterator<NamedIP> theList = bridgeSettings.getBridgeSettingsDescriptor().getFhemaddress().getDevices().iterator(); while(theList.hasNext() && validFhem) { NamedIP aFhem = theList.next(); try { fhemMap.put(aFhem.getName(), new FHEMInstance(aFhem)); } catch (Exception e) { log.error("Cannot get FHEM (" + aFhem.getName() + ") setup, Exiting with message: " + e.getMessage(), e); validFhem = false; } } } return this; }
#vulnerable code @Override public Home createHome(BridgeSettings bridgeSettings) { fhemMap = null; validFhem = bridgeSettings.getBridgeSettingsDescriptor().isValidOpenhab(); log.info("FHEM Home created." + (validFhem ? "" : " No FHEMs configured.")); if(validFhem) { fhemMap = new HashMap<String,FHEMInstance>(); httpClient = HTTPHome.getHandler(); Iterator<NamedIP> theList = bridgeSettings.getBridgeSettingsDescriptor().getOpenhabaddress().getDevices().iterator(); while(theList.hasNext() && validFhem) { NamedIP aFhem = theList.next(); try { fhemMap.put(aFhem.getName(), new FHEMInstance(aFhem)); } catch (Exception e) { log.error("Cannot get FHEM (" + aFhem.getName() + ") setup, Exiting with message: " + e.getMessage(), e); validFhem = false; } } } return this; } #location 9 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public HueError[] validateWhitelistUser(String aUser, String userDescription, boolean strict) { String validUser = null; boolean found = false; if (aUser != null && !aUser.equalsIgnoreCase("undefined") && !aUser.equalsIgnoreCase("null") && !aUser.equalsIgnoreCase("")) { if (securityDescriptor.getWhitelist() != null) { Set<String> theUserIds = securityDescriptor.getWhitelist().keySet(); Iterator<String> userIterator = theUserIds.iterator(); while (userIterator.hasNext()) { validUser = userIterator.next(); if (validUser.equals(aUser)) { found = true; log.debug("validateWhitelistUser: found a user <" + aUser + ">"); } } } } if(!found && !strict) { log.debug("validateWhitelistUser: a user was not found and it is not strict rules <" + aUser + "> being created"); newWhitelistUser(aUser, userDescription); found = true; } if (!found) { log.debug("validateWhitelistUser: a user was not found and it is strict rules <" + aUser + ">"); return HueErrorResponse.createResponse("1", "/api/" + aUser == null ? "" : aUser, "unauthorized user", null, null, null).getTheErrors(); } return null; }
#vulnerable code public HueError[] validateWhitelistUser(String aUser, String userDescription, boolean strict) { String validUser = null; boolean found = false; if (aUser != null && !aUser.equalsIgnoreCase("undefined") && !aUser.equalsIgnoreCase("null") && !aUser.equalsIgnoreCase("")) { if (securityDescriptor.getWhitelist() != null) { Set<String> theUserIds = securityDescriptor.getWhitelist().keySet(); Iterator<String> userIterator = theUserIds.iterator(); while (userIterator.hasNext()) { validUser = userIterator.next(); if (validUser.equals(aUser)) found = true; } } } if(!found && !strict) { newWhitelistUser(aUser, userDescription); found = true; } if (!found) { return HueErrorResponse.createResponse("1", "/api/" + aUser, "unauthorized user", null, null, null).getTheErrors(); } Object anUser = securityDescriptor.getWhitelist().remove(DEPRACATED_INTERNAL_USER); if(anUser != null) setSettingsChanged(true); return null; } #location 27 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public String deviceHandler(CallItem anItem, MultiCommandUtil aMultiUtil, String lightId, int intensity, Integer targetBri, Integer targetBriInc, ColorData colorData, DeviceDescriptor device, String body) { log.debug("Exec Request called with url: " + anItem.getItem().getAsString() + " and exec Garden: " + (theSettings.getBridgeSecurity().getExecGarden() == null ? "not given" : theSettings.getBridgeSecurity().getExecGarden())); String responseString = null; String intermediate; if (anItem.getItem().getAsString().contains("exec://")) intermediate = anItem.getItem().getAsString().substring(anItem.getItem().getAsString().indexOf("://") + 3); else intermediate = anItem.getItem().getAsString(); intermediate = BrightnessDecode.calculateReplaceIntensityValue(intermediate, intensity, targetBri, targetBriInc, false); if (colorData != null) { intermediate = ColorDecode.replaceColorData(intermediate, colorData, BrightnessDecode.calculateIntensity(intensity, targetBri, targetBriInc), false); } intermediate = DeviceDataDecode.replaceDeviceData(intermediate, device); intermediate = TimeDecode.replaceTimeValue(intermediate); String execGarden = theSettings.getBridgeSecurity().getExecGarden(); if(execGarden != null && !execGarden.trim().isEmpty()) { intermediate = new File(execGarden.trim(), intermediate).getAbsolutePath(); } String anError = doExecRequest(intermediate, lightId); if (anError != null) { responseString = anError; } return responseString; }
#vulnerable code @Override public String deviceHandler(CallItem anItem, MultiCommandUtil aMultiUtil, String lightId, int intensity, Integer targetBri, Integer targetBriInc, ColorData colorData, DeviceDescriptor device, String body) { log.debug("Exec Request called with url: " + anItem.getItem().getAsString() + " and exec Garden: " + (theSettings.getBridgeSecurity().getExecGarden() == null ? "not given" : theSettings.getBridgeSecurity().getExecGarden())); String responseString = null; String intermediate; if (anItem.getItem().getAsString().contains("exec://")) intermediate = anItem.getItem().getAsString().substring(anItem.getItem().getAsString().indexOf("://") + 3); else intermediate = anItem.getItem().getAsString(); intermediate = BrightnessDecode.calculateReplaceIntensityValue(intermediate, intensity, targetBri, targetBriInc, false); if (colorData != null) { intermediate = ColorDecode.replaceColorData(intermediate, colorData, BrightnessDecode.calculateIntensity(intensity, targetBri, targetBriInc), false); } intermediate = DeviceDataDecode.replaceDeviceData(intermediate, device); intermediate = TimeDecode.replaceTimeValue(intermediate); String execGarden = theSettings.getBridgeSecurity().getExecGarden(); if(execGarden != null && !execGarden.trim().isEmpty()) { if(System.getProperty("os.name").toLowerCase().indexOf("win") >= 0) intermediate = execGarden + "\\" + intermediate; else intermediate = execGarden + "/" + intermediate; } String anError = doExecRequest(intermediate, lightId); if (anError != null) { responseString = anError; } return responseString; } #location 18 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static void main(String[] args) { Logger log = LoggerFactory.getLogger(HABridge.class); DeviceResource theResources; HomeManager homeManager; HueMulator theHueMulator; UDPDatagramSender udpSender; UpnpSettingsResource theSettingResponder; UpnpListener theUpnpListener; SystemControl theSystem; BridgeSettings bridgeSettings; Version theVersion; theVersion = new Version(); log.info("HA Bridge (v" + theVersion.getVersion() + ") starting...."); bridgeSettings = new BridgeSettings(); // sparkjava config directive to set html static file location for Jetty staticFileLocation("/public"); while(!bridgeSettings.getBridgeControl().isStop()) { bridgeSettings.buildSettings(); bridgeSettings.getBridgeSecurity().removeTestUsers(); log.info("HA Bridge initializing...."); // sparkjava config directive to set ip address for the web server to listen on ipAddress(bridgeSettings.getBridgeSettingsDescriptor().getWebaddress()); // sparkjava config directive to set port for the web server to listen on port(bridgeSettings.getBridgeSettingsDescriptor().getServerPort()); if(!bridgeSettings.getBridgeControl().isReinit()) init(); bridgeSettings.getBridgeControl().setReinit(false); // setup system control api first theSystem = new SystemControl(bridgeSettings, theVersion); theSystem.setupServer(); // setup the UDP Datagram socket to be used by the HueMulator and the upnpListener udpSender = UDPDatagramSender.createUDPDatagramSender(bridgeSettings.getBridgeSettingsDescriptor().getUpnpResponsePort()); if(udpSender == null) { bridgeSettings.getBridgeControl().setStop(true); } else { //Setup the device connection homes through the manager homeManager = new HomeManager(); homeManager.buildHomes(bridgeSettings, udpSender); // setup the class to handle the resource setup rest api theResources = new DeviceResource(bridgeSettings, homeManager); // setup the class to handle the upnp response rest api theSettingResponder = new UpnpSettingsResource(bridgeSettings.getBridgeSettingsDescriptor()); theSettingResponder.setupServer(); // setup the class to handle the hue emulator rest api theHueMulator = new HueMulator(bridgeSettings, theResources.getDeviceRepository(), homeManager); theHueMulator.setupServer(); // wait for the sparkjava initialization of the rest api classes to be complete awaitInitialization(); // start the upnp ssdp discovery listener theUpnpListener = new UpnpListener(bridgeSettings.getBridgeSettingsDescriptor(), bridgeSettings.getBridgeControl(), udpSender); if(theUpnpListener.startListening()) log.info("HA Bridge (v" + theVersion.getVersion() + ") reinitialization requessted...."); else bridgeSettings.getBridgeControl().setStop(true); if(bridgeSettings.getBridgeSettingsDescriptor().isSettingsChanged()) bridgeSettings.save(bridgeSettings.getBridgeSettingsDescriptor()); homeManager.closeHomes(); udpSender.closeResponseSocket(); udpSender = null; } stop(); if(!bridgeSettings.getBridgeControl().isStop()) { try { Thread.sleep(5000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } bridgeSettings.getBridgeSecurity().removeTestUsers(); if(bridgeSettings.getBridgeSecurity().isSettingsChanged()) bridgeSettings.updateConfigFile(); log.info("HA Bridge (v" + theVersion.getVersion() + ") exiting...."); System.exit(0); }
#vulnerable code public static void main(String[] args) { Logger log = LoggerFactory.getLogger(HABridge.class); DeviceResource theResources; HomeManager homeManager; HueMulator theHueMulator; UDPDatagramSender udpSender; UpnpSettingsResource theSettingResponder; UpnpListener theUpnpListener; SystemControl theSystem; BridgeSettings bridgeSettings; Version theVersion; theVersion = new Version(); log.info("HA Bridge (v" + theVersion.getVersion() + ") starting...."); bridgeSettings = new BridgeSettings(); // sparkjava config directive to set html static file location for Jetty staticFileLocation("/public"); while(!bridgeSettings.getBridgeControl().isStop()) { bridgeSettings.buildSettings(); log.info("HA Bridge initializing...."); // sparkjava config directive to set ip address for the web server to listen on ipAddress(bridgeSettings.getBridgeSettingsDescriptor().getWebaddress()); // sparkjava config directive to set port for the web server to listen on port(bridgeSettings.getBridgeSettingsDescriptor().getServerPort()); if(!bridgeSettings.getBridgeControl().isReinit()) init(); bridgeSettings.getBridgeControl().setReinit(false); // setup system control api first theSystem = new SystemControl(bridgeSettings, theVersion); theSystem.setupServer(); // setup the UDP Datagram socket to be used by the HueMulator and the upnpListener udpSender = UDPDatagramSender.createUDPDatagramSender(bridgeSettings.getBridgeSettingsDescriptor().getUpnpResponsePort()); if(udpSender == null) { bridgeSettings.getBridgeControl().setStop(true); } else { //Setup the device connection homes through the manager homeManager = new HomeManager(); homeManager.buildHomes(bridgeSettings, udpSender); // setup the class to handle the resource setup rest api theResources = new DeviceResource(bridgeSettings, homeManager); // setup the class to handle the upnp response rest api theSettingResponder = new UpnpSettingsResource(bridgeSettings.getBridgeSettingsDescriptor()); theSettingResponder.setupServer(); // setup the class to handle the hue emulator rest api theHueMulator = new HueMulator(bridgeSettings, theResources.getDeviceRepository(), homeManager); theHueMulator.setupServer(); // wait for the sparkjava initialization of the rest api classes to be complete awaitInitialization(); // start the upnp ssdp discovery listener theUpnpListener = new UpnpListener(bridgeSettings.getBridgeSettingsDescriptor(), bridgeSettings.getBridgeControl(), udpSender); if(theUpnpListener.startListening()) log.info("HA Bridge (v" + theVersion.getVersion() + ") reinitialization requessted...."); else bridgeSettings.getBridgeControl().setStop(true); if(bridgeSettings.getBridgeSettingsDescriptor().isSettingsChanged()) bridgeSettings.save(bridgeSettings.getBridgeSettingsDescriptor()); homeManager.closeHomes(); udpSender.closeResponseSocket(); udpSender = null; } stop(); if(!bridgeSettings.getBridgeControl().isStop()) { try { Thread.sleep(5000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } bridgeSettings.getBridgeSecurity().removeTestUsers(); if(bridgeSettings.getBridgeSecurity().isSettingsChanged()) bridgeSettings.updateConfigFile(); log.info("HA Bridge (v" + theVersion.getVersion() + ") exiting...."); System.exit(0); } #location 26 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void denormalizeSdata(Sdata theSdata) { Map<String,Room> roomMap = new HashMap<String,Room>(); for (Room i : theSdata.getRooms()) roomMap.put(i.getId(),i); Map<String,Categorie> categoryMap = new HashMap<String,Categorie>(); for (Categorie i : theSdata.getCategoriess()) categoryMap.put(i.getId(),i); Categorie controllerCat = new Categorie(); controllerCat.setName("Controller"); controllerCat.setId("0"); categoryMap.put(controllerCat.getId(),controllerCat); ListIterator<Device> theIterator = theSdata.getDevices().listIterator(); Device theDevice = null; while (theIterator.hasNext()) { theDevice = theIterator.next(); if(theDevice.getRoom() != null && roomMap.get(theDevice.getRoom()) != null) theDevice.setRoom(roomMap.get(theDevice.getRoom()).getName()); else theDevice.setRoom("no room"); if(theDevice.getCategory() != null && categoryMap.get(theDevice.getCategory()) != null) theDevice.setCategory(categoryMap.get(theDevice.getCategory()).getName()); else theDevice.setCategory("<unknown>"); } ListIterator<Scene> theSecneIter = theSdata.getScenes().listIterator(); Scene theScene = null; while (theSecneIter.hasNext()) { theScene = theSecneIter.next(); theScene.setRoom(roomMap.get(theScene.getRoom()).getName()); } }
#vulnerable code private void denormalizeSdata(Sdata theSdata) { Map<String,Room> roomMap = new HashMap<String,Room>(); for (Room i : theSdata.getRooms()) roomMap.put(i.getId(),i); Map<String,Categorie> categoryMap = new HashMap<String,Categorie>(); for (Categorie i : theSdata.getCategoriess()) categoryMap.put(i.getId(),i); Categorie controllerCat = new Categorie(); controllerCat.setName("Controller"); controllerCat.setId("0"); categoryMap.put(controllerCat.getId(),controllerCat); ListIterator<Device> theIterator = theSdata.getDevices().listIterator(); Device theDevice = null; while (theIterator.hasNext()) { theDevice = theIterator.next(); if(theDevice.getRoom() != null) theDevice.setRoom(roomMap.get(theDevice.getRoom()).getName()); else theDevice.setRoom("<unknown>"); if(theDevice.getCategory() != null) theDevice.setCategory(categoryMap.get(theDevice.getCategory()).getName()); else theDevice.setCategory("<unknown>"); } ListIterator<Scene> theSecneIter = theSdata.getScenes().listIterator(); Scene theScene = null; while (theSecneIter.hasNext()) { theScene = theSecneIter.next(); theScene.setRoom(roomMap.get(theScene.getRoom()).getName()); } } #location 19 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void saveResource(String resourcePath, boolean replace) { if (resourcePath == null || resourcePath.equals("")) { throw new IllegalArgumentException("ResourcePath cannot be null or empty"); } resourcePath = resourcePath.replace('\\', '/'); InputStream in = getResource(resourcePath); if (in == null) { throw new IllegalArgumentException("The embedded resource '" + resourcePath + "' cannot be found in " + getFile()); } File outFile = new File(getDataFolder(), resourcePath); int lastIndex = resourcePath.lastIndexOf('/'); File outDir = new File(getDataFolder(), resourcePath.substring(0, lastIndex >= 0 ? lastIndex : 0)); if (!outDir.exists()) { outDir.mkdirs(); } try { if (!outFile.exists() || replace) { OutputStream out = new FileOutputStream(outFile); byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } out.close(); in.close(); } else { getLogger().log(Level.WARNING, "Could not save " + outFile.getName() + " to " + outFile + " because " + outFile.getName() + " already exists."); } } catch (IOException ex) { getLogger().log(Level.SEVERE, "Could not save " + outFile.getName() + " to " + outFile, ex); } }
#vulnerable code public void saveResource(String resourcePath, boolean replace) { if (resourcePath == null || resourcePath.equals("")) { throw new IllegalArgumentException("ResourcePath cannot be null or empty"); } resourcePath = resourcePath.replace('\\', '/'); InputStream in = getResource(resourcePath); if (in == null) { throw new IllegalArgumentException("The embedded resource '" + resourcePath + "' cannot be found in " + getFile()); } File outFile = new File(getDataFolder(), resourcePath); int lastIndex = resourcePath.lastIndexOf('/'); File outDir = new File(getDataFolder(), resourcePath.substring(0, lastIndex >= 0 ? lastIndex : 0)); if (!outDir.exists()) { outDir.mkdirs(); } try { if (!outFile.exists() || replace) { OutputStream out = new FileOutputStream(outFile); byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } out.close(); in.close(); } else { Logger.getLogger(JavaPlugin.class.getName()).log(Level.WARNING, "Could not save " + outFile.getName() + " to " + outFile + " because " + outFile.getName() + " already exists."); } } catch (IOException ex) { Logger.getLogger(JavaPlugin.class.getName()).log(Level.SEVERE, "Could not save " + outFile.getName() + " to " + outFile, ex); } } #location 33 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void load(File file) throws FileNotFoundException, IOException, InvalidConfigurationException { Validate.notNull(file, "File cannot be null"); final FileInputStream stream = new FileInputStream(file); load(new InputStreamReader(stream, UTF8_OVERRIDE && !UTF_BIG ? Charsets.UTF_8 : Charset.defaultCharset())); }
#vulnerable code public void load(File file) throws FileNotFoundException, IOException, InvalidConfigurationException { Validate.notNull(file, "File cannot be null"); load(new FileInputStream(file)); } #location 4 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public Plugin loadPlugin(File file) throws InvalidPluginException, InvalidDescriptionException, UnknownDependencyException { return loadPlugin(file, false); }
#vulnerable code public Plugin loadPlugin(File file) throws InvalidPluginException, InvalidDescriptionException, UnknownDependencyException { JavaPlugin result = null; PluginDescriptionFile description = null; if (!file.exists()) { throw new InvalidPluginException(new FileNotFoundException(String.format("%s does not exist", file.getPath()))); } try { JarFile jar = new JarFile(file); JarEntry entry = jar.getJarEntry("plugin.yml"); if (entry == null) { throw new InvalidPluginException(new FileNotFoundException("Jar does not contain plugin.yml")); } InputStream stream = jar.getInputStream(entry); description = new PluginDescriptionFile(stream); stream.close(); jar.close(); } catch (IOException ex) { throw new InvalidPluginException(ex); } catch (YAMLException ex) { throw new InvalidPluginException(ex); } File dataFolder = new File(file.getParentFile(), description.getName()); File oldDataFolder = getDataFolder(file); // Found old data folder if (dataFolder.equals(oldDataFolder)) { // They are equal -- nothing needs to be done! } else if (dataFolder.isDirectory() && oldDataFolder.isDirectory()) { server.getLogger().log( Level.INFO, String.format( "While loading %s (%s) found old-data folder: %s next to the new one: %s", description.getName(), file, oldDataFolder, dataFolder )); } else if (oldDataFolder.isDirectory() && !dataFolder.exists()) { if (!oldDataFolder.renameTo(dataFolder)) { throw new InvalidPluginException(new Exception("Unable to rename old data folder: '" + oldDataFolder + "' to: '" + dataFolder + "'")); } server.getLogger().log( Level.INFO, String.format( "While loading %s (%s) renamed data folder: '%s' to '%s'", description.getName(), file, oldDataFolder, dataFolder )); } if (dataFolder.exists() && !dataFolder.isDirectory()) { throw new InvalidPluginException(new Exception(String.format( "Projected datafolder: '%s' for %s (%s) exists and is not a directory", dataFolder, description.getName(), file ))); } ArrayList<String> depend; try { depend = (ArrayList)description.getDepend(); if(depend == null) { depend = new ArrayList<String>(); } } catch (ClassCastException ex) { throw new InvalidPluginException(ex); } for(String pluginName : depend) { if(loaders == null) { throw new UnknownDependencyException(pluginName); } PluginClassLoader current = loaders.get(pluginName); if(current == null) { throw new UnknownDependencyException(pluginName); } } PluginClassLoader loader = null; try { URL[] urls = new URL[1]; urls[0] = file.toURI().toURL(); loader = new PluginClassLoader(this, urls, getClass().getClassLoader()); Class<?> jarClass = Class.forName(description.getMain(), true, loader); Class<? extends JavaPlugin> plugin = jarClass.asSubclass(JavaPlugin.class); Constructor<? extends JavaPlugin> constructor = plugin.getConstructor(); result = constructor.newInstance(); result.initialize(this, server, description, dataFolder, file, loader); } catch (Throwable ex) { throw new InvalidPluginException(ex); } loaders.put(description.getName(), (PluginClassLoader)loader); return (Plugin)result; } #location 13 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public boolean execute(CommandSender sender, String currentAlias, String[] args) { if (!testPermission(sender)) return true; if (args.length < 1 || args.length > 4) { sender.sendMessage(ChatColor.RED + "Usage: " + usageMessage); return false; } Player player; if (args.length == 1 || args.length == 3) { if (sender instanceof Player) { player = (Player) sender; } else { sender.sendMessage("Please provide a player!"); return true; } } else { player = Bukkit.getPlayerExact(args[0]); } if (player == null) { sender.sendMessage("Player not found: " + args[0]); return true; } if (args.length < 3) { Player target = Bukkit.getPlayerExact(args[args.length - 1]); if (target == null) { sender.sendMessage("Can't find user " + args[args.length - 1] + ". No tp."); return true; } player.teleport(target, TeleportCause.COMMAND); Command.broadcastCommandMessage(sender, "Teleported " + player.getName() + " to " + target.getName()); } else if (player.getWorld() != null) { int x = getInteger(sender, args[args.length - 3], -30000000, 30000000); int y = getInteger(sender, args[args.length - 2], 0, 256); int z = getInteger(sender, args[args.length - 1], -30000000, 30000000); Location location = new Location(player.getWorld(), x, y, z); player.teleport(location); Command.broadcastCommandMessage(sender, "Teleported " + player.getName() + " to " + + x + "," + y + "," + z); } return true; }
#vulnerable code @Override public boolean execute(CommandSender sender, String currentAlias, String[] args) { if (!testPermission(sender)) return true; if (args.length < 1 || args.length > 4) { sender.sendMessage(ChatColor.RED + "Usage: " + usageMessage); return false; } Player player; if (args.length == 1 || args.length == 3) { if (sender instanceof Player) { player = (Player) sender; } else { sender.sendMessage("Please provide a player!"); return true; } } else { player = Bukkit.getPlayerExact(args[0]); } if (player == null) { sender.sendMessage("Player not found: " + args[0]); } if (args.length < 3) { Player target = Bukkit.getPlayerExact(args[args.length - 1]); if (target == null) { sender.sendMessage("Can't find user " + args[args.length - 1] + ". No tp."); } player.teleport(target, TeleportCause.COMMAND); sender.sendMessage("Teleported " + player.getName() + " to " + target.getName()); } else if (player.getWorld() != null) { int x = getInteger(sender, args[args.length - 3], -30000000, 30000000); int y = getInteger(sender, args[args.length - 2], 0, 256); int z = getInteger(sender, args[args.length - 1], -30000000, 30000000); Location location = new Location(player.getWorld(), x, y, z); player.teleport(location); sender.sendMessage("Teleported " + player.getName() + " to " + x + "," + y + "," + z); } return true; } #location 32 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void recalculatePermissions() { clearPermissions(); Set<Permission> defaults = Bukkit.getServer().getPluginManager().getDefaultPermissions(isOp()); Bukkit.getServer().getPluginManager().subscribeToDefaultPerms(isOp(), parent); for (Permission perm : defaults) { String name = perm.getName().toLowerCase(); permissions.put(name, new PermissionAttachmentInfo(parent, name, null, true)); Bukkit.getServer().getPluginManager().subscribeToPermission(name, parent); calculateChildPermissions(perm.getChildren(), false, null); } for (PermissionAttachment attachment : attachments) { calculateChildPermissions(attachment.getPermissions(), false, attachment); } }
#vulnerable code public void recalculatePermissions() { dirtyPermissions = true; } #location 2 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testConsumerNormalOps() throws InterruptedException, ExecutionException { // Tests create instance, read, and delete final List<ConsumerRecord> referenceRecords = Arrays.asList( new ConsumerRecord("k1".getBytes(), "v1".getBytes(), 0, 0), new ConsumerRecord("k2".getBytes(), "v2".getBytes(), 1, 0), new ConsumerRecord("k3".getBytes(), "v3".getBytes(), 2, 0) ); Map<Integer,List<ConsumerRecord>> referenceSchedule = new HashMap<>(); referenceSchedule.put(50, referenceRecords); Map<String,List<Map<Integer,List<ConsumerRecord>>>> schedules = new HashMap<>(); schedules.put(topicName, Arrays.asList(referenceSchedule)); expectCreate(schedules); EasyMock.expect(mdObserver.topicExists(topicName)).andReturn(true); EasyMock.replay(mdObserver, consumerFactory); String cid = consumerManager.createConsumer(groupName, new ConsumerInstanceConfig()); consumerManager.readTopic(groupName, cid, topicName, new ConsumerManager.ReadCallback() { @Override public void onCompletion(List<ConsumerRecord> records, Exception e) { assertNull(e); assertEquals(referenceRecords, records); } }).get(); // With # of messages < max per request, this should finish at the per-request timeout assertEquals(config.consumerRequestTimeoutMs, config.time.milliseconds()); consumerManager.commitOffsets(groupName, cid, new ConsumerManager.CommitCallback() { @Override public void onCompletion(List<TopicPartitionOffset> offsets, Exception e) { assertNull(e); // Mock consumer doesn't handle offsets, so we just check we get some output for the right partitions assertNotNull(offsets); assertEquals(3, offsets.size()); } }).get(); consumerManager.deleteConsumer(groupName, cid); EasyMock.verify(mdObserver, consumerFactory); }
#vulnerable code @Test public void testConsumerNormalOps() throws InterruptedException, ExecutionException { // Tests create instance, read, and delete final List<ConsumerRecord> referenceRecords = Arrays.asList( new ConsumerRecord("k1".getBytes(), "v1".getBytes(), 0, 0), new ConsumerRecord("k2".getBytes(), "v2".getBytes(), 1, 0), new ConsumerRecord("k3".getBytes(), "v3".getBytes(), 2, 0) ); Map<Integer,List<ConsumerRecord>> referenceSchedule = new HashMap<>(); referenceSchedule.put(50, referenceRecords); Map<String,List<Map<Integer,List<ConsumerRecord>>>> schedules = new HashMap<>(); schedules.put(topicName, Arrays.asList(referenceSchedule)); expectCreate(schedules); EasyMock.expect(mdObserver.topicExists(topicName)).andReturn(true); EasyMock.replay(mdObserver, consumerFactory); String cid = consumerManager.createConsumer(groupName); consumerManager.readTopic(groupName, cid, topicName, new ConsumerManager.ReadCallback() { @Override public void onCompletion(List<ConsumerRecord> records, Exception e) { assertNull(e); assertEquals(referenceRecords, records); } }).get(); // With # of messages < max per request, this should finish at the per-request timeout assertEquals(config.consumerRequestTimeoutMs, config.time.milliseconds()); consumerManager.commitOffsets(groupName, cid, new ConsumerManager.CommitCallback() { @Override public void onCompletion(List<TopicPartitionOffset> offsets, Exception e) { assertNull(e); // Mock consumer doesn't handle offsets, so we just check we get some output for the right partitions assertNotNull(offsets); assertEquals(3, offsets.size()); } }).get(); consumerManager.deleteConsumer(groupName, cid); EasyMock.verify(mdObserver, consumerFactory); } #location 27 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void test() { JedisPoolConfig config = new JedisPoolConfig(); // 设置空间连接 config.setMaxIdle(20); config.setMaxWaitMillis(1000); // JedisPool pool = new JedisPool(config, "27.126.180.210", 6379); // System.out.println(pool.getResource()); // Jedis jedis = pool.getResource(); // jedis.set("name", "陈杰"); // System.out.println(jedis.get("name")); }
#vulnerable code @Test public void test() { JedisPoolConfig config = new JedisPoolConfig(); // 设置空间连接 config.setMaxIdle(20); config.setMaxWaitMillis(1000); JedisPool pool = new JedisPool(config, "27.126.180.210", 6379); System.out.println(pool.getResource()); Jedis jedis = pool.getResource(); jedis.set("name", "陈杰"); System.out.println(jedis.get("name")); } #location 12 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public File saveAs(String path) throws IOException, InterruptedException { byte[] pdf = this.getPDF(); File file = new File(path); BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(new FileOutputStream(file)); bufferedOutputStream.write(pdf); bufferedOutputStream.flush(); bufferedOutputStream.close(); return file; }
#vulnerable code public File saveAs(String path) throws IOException, InterruptedException { Runtime rt = Runtime.getRuntime(); String command = this.commandWithParameters() + Symbol.separator + path; Process proc = rt.exec(command); if(htmlFromString) { OutputStream stdin = proc.getOutputStream(); stdin.write(htmlInput.getBytes()); stdin.close(); } proc.waitFor(); if(proc.exitValue() != 0) { throw new RuntimeException("Process (" + command + ") exited with status code " + proc.exitValue()); } return new File(path); } #location 16 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public byte[] getPDF() throws IOException, InterruptedException { return getPDF(STDOUT); }
#vulnerable code public byte[] getPDF() throws IOException, InterruptedException { Runtime runtime = Runtime.getRuntime(); if(htmlFromString && !this.params.contains(new Param("-"))) { this.addParam(new Param("-")); } String command = this.commandWithParameters() + Symbol.separator + "-"; Process process = runtime.exec(command); if(htmlFromString) { OutputStream stdInStream = process.getOutputStream(); stdInStream.write(htmlInput.getBytes()); stdInStream.close(); } InputStream stdOutStream = process.getInputStream(); InputStream stdErrStream = process.getErrorStream(); process.waitFor(); ByteArrayOutputStream stdOut = new ByteArrayOutputStream(); ByteArrayOutputStream stdErr = new ByteArrayOutputStream(); for(int i = 0; i < stdOutStream.available(); i++) { stdOut.write((char) stdOutStream.read()); } stdOutStream.close(); for(int i = 0; i < stdErrStream.available(); i++) { stdErr.write((char) stdErrStream.read()); } stdErrStream.close(); if(process.exitValue() != 0) { throw new RuntimeException("Process (" + command + ") exited with status code " + process.exitValue() + ":\n"+new String(stdErr.toByteArray())); } return stdOut.toByteArray(); } #location 33 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public File saveAs(String path) throws IOException, InterruptedException { byte[] pdf = this.getPDF(); File file = new File(path); BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(new FileOutputStream(file)); bufferedOutputStream.write(pdf); bufferedOutputStream.flush(); bufferedOutputStream.close(); return file; }
#vulnerable code public File saveAs(String path) throws IOException, InterruptedException { Runtime rt = Runtime.getRuntime(); String command = this.commandWithParameters() + Symbol.separator + path; Process proc = rt.exec(command); if(htmlFromString) { OutputStream stdin = proc.getOutputStream(); stdin.write(htmlInput.getBytes()); stdin.close(); } proc.waitFor(); if(proc.exitValue() != 0) { throw new RuntimeException("Process (" + command + ") exited with status code " + proc.exitValue()); } return new File(path); } #location 13 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public byte[] getPDF() throws IOException, InterruptedException { return getPDF(STDOUT); }
#vulnerable code public byte[] getPDF() throws IOException, InterruptedException { Runtime runtime = Runtime.getRuntime(); if(htmlFromString && !this.params.contains(new Param("-"))) { this.addParam(new Param("-")); } String command = this.commandWithParameters() + Symbol.separator + "-"; Process process = runtime.exec(command); if(htmlFromString) { OutputStream stdInStream = process.getOutputStream(); stdInStream.write(htmlInput.getBytes()); stdInStream.close(); } InputStream stdOutStream = process.getInputStream(); InputStream stdErrStream = process.getErrorStream(); process.waitFor(); ByteArrayOutputStream stdOut = new ByteArrayOutputStream(); ByteArrayOutputStream stdErr = new ByteArrayOutputStream(); for(int i = 0; i < stdOutStream.available(); i++) { stdOut.write((char) stdOutStream.read()); } stdOutStream.close(); for(int i = 0; i < stdErrStream.available(); i++) { stdErr.write((char) stdErrStream.read()); } stdErrStream.close(); if(process.exitValue() != 0) { throw new RuntimeException("Process (" + command + ") exited with status code " + process.exitValue() + ":\n"+new String(stdErr.toByteArray())); } return stdOut.toByteArray(); } #location 30 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public byte[] getPDF() throws IOException, InterruptedException { Runtime runtime = Runtime.getRuntime(); Process process = runtime.exec(getCommandAsArray()); StreamEater outputStreamEater = new StreamEater(process.getInputStream()); outputStreamEater.start(); StreamEater errorStreamEater = new StreamEater(process.getErrorStream()); errorStreamEater.start(); outputStreamEater.join(); errorStreamEater.join(); process.waitFor(); if (process.exitValue() != 0) { throw new RuntimeException("Process (" + getCommand() + ") exited with status code " + process.exitValue() + ":\n" + new String(errorStreamEater.getBytes())); } if (outputStreamEater.getError() != null) { throw outputStreamEater.getError(); } if (errorStreamEater.getError() != null) { throw errorStreamEater.getError(); } return outputStreamEater.getBytes(); }
#vulnerable code public byte[] getPDF() throws IOException, InterruptedException { Runtime runtime = Runtime.getRuntime(); Process process = runtime.exec(getCommandAsArray()); for (Page page : pages) { if (page.getType().equals(PageType.htmlAsString)) { OutputStream stdInStream = process.getOutputStream(); stdInStream.write(page.getSource().getBytes("UTF-8")); stdInStream.close(); } } StreamEater outputStreamEater = new StreamEater(process.getInputStream()); outputStreamEater.start(); StreamEater errorStreamEater = new StreamEater(process.getErrorStream()); errorStreamEater.start(); outputStreamEater.join(); errorStreamEater.join(); process.waitFor(); if (process.exitValue() != 0) { throw new RuntimeException("Process (" + getCommand() + ") exited with status code " + process.exitValue() + ":\n" + new String(errorStreamEater.getBytes())); } if (outputStreamEater.getError() != null) { throw outputStreamEater.getError(); } if (errorStreamEater.getError() != null) { throw errorStreamEater.getError(); } return outputStreamEater.getBytes(); } #location 35 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public File saveAs(String path) throws IOException, InterruptedException { byte[] pdf = this.getPDF(); File file = new File(path); BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(new FileOutputStream(file)); bufferedOutputStream.write(pdf); bufferedOutputStream.flush(); bufferedOutputStream.close(); return file; }
#vulnerable code public File saveAs(String path) throws IOException, InterruptedException { Runtime rt = Runtime.getRuntime(); String command = this.commandWithParameters() + Symbol.separator + path; Process proc = rt.exec(command); if(htmlFromString) { OutputStream stdin = proc.getOutputStream(); stdin.write(htmlInput.getBytes()); stdin.close(); } proc.waitFor(); if(proc.exitValue() != 0) { throw new RuntimeException("Process (" + command + ") exited with status code " + proc.exitValue()); } return new File(path); } #location 16 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testFieldPopulator() { Album funeral = DataGenerator.funeral(); ResultTraverser traverser = new ResultTraverser(); populatorRegistry.register( new AlbumFieldPopulator() ); YogaRequestContext requestContext = new YogaRequestContext( "test", new GDataSelectorParser(), new DummyHttpServletRequest(), new DummyHttpServletResponse(), new HrefListener( this.populatorRegistry ) ); Map<String, Object> objectTree = doTraverse( funeral, ":", traverser, requestContext ); Assert.assertEquals( "/album/" + funeral.getId() + ".test", objectTree.get( "href" ) ); }
#vulnerable code @Test public void testFieldPopulator() { Album funeral = DataGenerator.funeral(); ResultTraverser traverser = new ResultTraverser(); populatorRegistry.register( new AlbumFieldPopulator() ); YogaRequestContext requestContext = new YogaRequestContext( "test", new DummyHttpServletRequest(), new DummyHttpServletResponse(), new HrefListener( this.populatorRegistry ) ); Map<String, Object> objectTree = doTraverse( funeral, ":", traverser, requestContext ); Assert.assertEquals( "/album/" + funeral.getId() + ".test", objectTree.get( "href" ) ); } #location 12 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testSelectUnsupportedField() { Album chasingProphecy = DataGenerator.chasingProphecy(); ResultTraverser traverser = new ResultTraverser(); populatorRegistry.register( new AlbumFieldPopulator() ); Map<String,Object> objectTree = doTraverse( chasingProphecy, "id,title,year,artist", traverser ); Assert.assertEquals( 3, objectTree.size() ); Assert.assertEquals( chasingProphecy.getId(), objectTree.get( "id" ) ); Assert.assertEquals( chasingProphecy.getTitle(), objectTree.get( "title" ) ); Map<String,Object> eighthDay = (Map<String, Object>) objectTree.get( "artist" ); Assert.assertEquals( DataGenerator.eigthDay().getName(), eighthDay.get( "name" ) ); }
#vulnerable code @Test public void testSelectUnsupportedField() { Album chasingProphecy = DataGenerator.chasingProphecy(); ResultTraverser traverser = new ResultTraverser(); populatorRegistry.register( new AlbumFieldPopulator() ); Map<String,Object> objectTree = doTraverse( chasingProphecy, ":(id,title,year,artist)", traverser ); Assert.assertEquals( 3, objectTree.size() ); Assert.assertEquals( chasingProphecy.getId(), objectTree.get( "id" ) ); Assert.assertEquals( chasingProphecy.getTitle(), objectTree.get( "title" ) ); Map<String,Object> eighthDay = (Map<String, Object>) objectTree.get( "artist" ); Assert.assertEquals( DataGenerator.eigthDay().getName(), eighthDay.get( "name" ) ); } #location 9 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testComplexCoreFields() { User carter = DataGenerator.carter(); carter.getFavoriteArtists().add( DataGenerator.neutralMilkHotel() ); carter.getFavoriteArtists().add( DataGenerator.arcadeFire() ); ResultTraverser traverser = new ResultTraverser(); traverser.getFieldPopulatorRegistry().register( new UserFieldPopulatorWithArtistCoreField() ); Map<String, Object> objectTree = doTraverse( carter, ":", traverser, _simpleContext ); Assert.assertEquals( 3, objectTree.size() ); Assert.assertEquals( carter.getId(), objectTree.get( "id" ) ); Assert.assertEquals( carter.getName(), objectTree.get( "name" ) ); List<Map<String, Object>> favoriteArtists = getList( objectTree, "favoriteArtists" ); Assert.assertNotNull( favoriteArtists ); Assert.assertEquals( 2, favoriteArtists.size() ); Map<String, Object> neutralMap = findItem( favoriteArtists, "name", "Neutral Milk Hotel" ); Assert.assertEquals( DataGenerator.neutralMilkHotel().getId(), neutralMap.get( "id" ) ); Map<String, Object> arcadeMap = findItem( favoriteArtists, "name", "Arcade Fire" ); Assert.assertEquals( DataGenerator.arcadeFire().getId(), arcadeMap.get( "id" ) ); }
#vulnerable code @Test public void testComplexCoreFields() { User carter = DataGenerator.carter(); carter.getFavoriteArtists().add( DataGenerator.neutralMilkHotel() ); carter.getFavoriteArtists().add( DataGenerator.arcadeFire() ); ResultTraverser traverser = new ResultTraverser(); MapSelector selector = new MapSelector(); selector.register( User.class, "id", "favoriteArtists" ); Map<String, Object> objectTree = doTraverse( carter, traverser, _simpleContext, new CompositeSelector( selector, new CoreSelector() ) ); Assert.assertTrue( objectTree.size() >= 2 ); Assert.assertEquals( carter.getId(), objectTree.get( "id" ) ); Assert.assertEquals( carter.getName(), objectTree.get( "name" ) ); List<Map<String, Object>> favoriteArtists = getList( objectTree, "favoriteArtists" ); Assert.assertNotNull( favoriteArtists ); Assert.assertEquals( 2, favoriteArtists.size() ); Map<String, Object> neutralMap = findItem( favoriteArtists, "name", "Neutral Milk Hotel" ); Assert.assertEquals( DataGenerator.neutralMilkHotel().getId(), neutralMap.get( "id" ) ); Map<String, Object> arcadeMap = findItem( favoriteArtists, "name", "Arcade Fire" ); Assert.assertEquals( DataGenerator.arcadeFire().getId(), arcadeMap.get( "id" ) ); } #location 20 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @SuppressWarnings("unchecked") @Test // Add the MetadataLinkListener to the listener chain. The output will render an href to view the metadata // for the album object. public void testMetadataHref() { String prefixUrl = "/metadata/"; String fileExtension = "test"; Album signOfTheTimes = DataGenerator.signOfTheTimes(); DefaultMetaDataRegistry service = new DefaultMetaDataRegistry(); service.setRootMetaDataUrl( prefixUrl ); service.setCoreSelector( new CoreSelector( populatorRegistry ) ); Map<String,Class<?>> typeMappings = new HashMap<String, Class<?>>(); typeMappings.put( "album", Album.class ); service.setTypeMappings( typeMappings ); MetadataLinkListener metadataLinkListener = new MetadataLinkListener(); metadataLinkListener.setMetaDataRegistry( service ); ResultTraverser traverser = new ResultTraverser(); YogaRequestContext requestContext = new YogaRequestContext( fileExtension, new GDataSelectorParser(), new DummyHttpServletRequest(), new DummyHttpServletResponse(), metadataLinkListener ); Map<String, Object> objectTree = doTraverse( signOfTheTimes, "", traverser, requestContext ); Map<String,String> metadataMap = (Map<String,String>) objectTree.get( "metadata" ); String metadataHref = prefixUrl + "album." + fileExtension; Assert.assertEquals( metadataHref, metadataMap.get( "href" ) ); }
#vulnerable code @SuppressWarnings("unchecked") @Test // Add the MetadataLinkListener to the listener chain. The output will render an href to view the metadata // for the album object. public void testMetadataHref() { String prefixUrl = "/metadata/"; String fileExtension = "test"; Album signOfTheTimes = DataGenerator.signOfTheTimes(); DefaultMetaDataRegistry service = new DefaultMetaDataRegistry(); service.setRootMetaDataUrl( prefixUrl ); service.setCoreSelector( new CoreSelector( populatorRegistry ) ); Map<String,Class<?>> typeMappings = new HashMap<String, Class<?>>(); typeMappings.put( "album", Album.class ); service.setTypeMappings( typeMappings ); MetadataLinkListener metadataLinkListener = new MetadataLinkListener(); metadataLinkListener.setMetaDataRegistry( service ); ResultTraverser traverser = new ResultTraverser(); YogaRequestContext requestContext = new YogaRequestContext( fileExtension, new DummyHttpServletRequest(), new DummyHttpServletResponse(), metadataLinkListener ); Map<String, Object> objectTree = doTraverse( signOfTheTimes, ":", traverser, requestContext ); Map<String,String> metadataMap = (Map<String,String>) objectTree.get( "metadata" ); String metadataHref = prefixUrl + "album." + fileExtension; Assert.assertEquals( metadataHref, metadataMap.get( "href" ) ); } #location 27 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void init() { try { init( Thread.currentThread().getContextClassLoader().getResourceAsStream( "sampledb.sql" ) ); } catch ( Exception e ) { throw new RuntimeException( e ); } // this doesn't seem to work yet. It reads a partial line and throws up // try // { // init( new GZIPInputStream( new URL( remoteData ).openStream() ) ); // } // catch ( Exception e ) // { // e.printStackTrace(); // } }
#vulnerable code public void init() throws IOException { InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream( "sampledb.sql" ); BufferedReader reader = new BufferedReader( new InputStreamReader( is ) ); String line = null; while ( ( line = reader.readLine() ) != null ) { String type = line.replaceFirst( "INSERT INTO ([^(]+).*", "$1" ); String[] values = line.replaceFirst( ".*VALUES\\((.*)\\)", "$1" ).split( ", " ); if ("User".equalsIgnoreCase( type )) { newUser( toLong( values[ 0 ] ), toStr( values[ 1 ] ) ); } else if ("Friend".equalsIgnoreCase( type )) { newFriend( toLong( values[ 0 ] ), toLong( values[ 1 ] ) ); } else if ("Artist".equalsIgnoreCase( type )) { newArtist( toLong( values[ 0 ] ), toStr( values[ 1 ] ) ); } else if ("Fan".equalsIgnoreCase( type )) { newFan( toLong( values[ 0 ] ), toLong( values[ 1 ] ) ); } else if ("Album".equalsIgnoreCase( type )) { newAlbum( toLong( values[ 0 ] ), toStr( values[ 1 ] ), toLong( values[ 2 ] ), new Integer( values[ 3 ].trim() ) ); } else if ("Song".equalsIgnoreCase( type )) { newSong( toLong( values[ 0 ] ), toStr( values[ 1 ] ), toLong( values[ 2 ] ), toLong( values[ 3 ] ) ); } } is.close(); } #location 13 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testAnnotatedModel() { User solomon = DataGenerator.solomon(); ResultTraverser traverser = new ResultTraverser(); YogaRequestContext requestContext = new YogaRequestContext( "test", new GDataSelectorParser(), new DummyHttpServletRequest(), new DummyHttpServletResponse(), new HrefListener() ); Map<String, Object> objectTree = doTraverse( solomon, ":", traverser, requestContext ); Assert.assertEquals( "/user/" + solomon.getId() + ".test", objectTree.get( "href" ) ); }
#vulnerable code @Test public void testAnnotatedModel() { User solomon = DataGenerator.solomon(); ResultTraverser traverser = new ResultTraverser(); YogaRequestContext requestContext = new YogaRequestContext( "test", new DummyHttpServletRequest(), new DummyHttpServletResponse(), new HrefListener() ); Map<String, Object> objectTree = doTraverse( solomon, ":", traverser, requestContext ); Assert.assertEquals( "/user/" + solomon.getId() + ".test", objectTree.get( "href" ) ); } #location 11 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testView() throws IOException { store.setAttribute("owner:owner", createUserPrincipal("user")); PosixFileAttributeView view = provider.getView(attributeStoreSupplier()); assertNotNull(view); ASSERT.that(view.name()).is("posix"); ASSERT.that(view.getOwner()).is(createUserPrincipal("user")); PosixFileAttributes attrs = view.readAttributes(); ASSERT.that(attrs.fileKey()).is(0L); ASSERT.that(attrs.owner()).is(createUserPrincipal("user")); ASSERT.that(attrs.group()).is(createGroupPrincipal("group")); ASSERT.that(attrs.permissions()).is(PosixFilePermissions.fromString("rw-r--r--")); view.setOwner(createUserPrincipal("root")); ASSERT.that(view.getOwner()).is(createUserPrincipal("root")); ASSERT.that(store.getAttribute("owner:owner")).is(createUserPrincipal("root")); view.setGroup(createGroupPrincipal("root")); ASSERT.that(view.readAttributes().group()).is(createGroupPrincipal("root")); ASSERT.that(store.getAttribute("posix:group")).is(createGroupPrincipal("root")); view.setPermissions(PosixFilePermissions.fromString("rwx------")); ASSERT.that(view.readAttributes().permissions()) .is(PosixFilePermissions.fromString("rwx------")); ASSERT.that(store.getAttribute("posix:permissions")) .is(PosixFilePermissions.fromString("rwx------")); }
#vulnerable code @Test public void testView() throws IOException { PosixFileAttributeView view = service.getFileAttributeView( fileSupplier(), PosixFileAttributeView.class); assertNotNull(view); ASSERT.that(view.name()).is("posix"); ASSERT.that(view.getOwner()).is(createUserPrincipal("user")); PosixFileAttributes attrs = view.readAttributes(); ASSERT.that(attrs.fileKey()).is(0L); ASSERT.that(attrs.owner()).is(createUserPrincipal("user")); ASSERT.that(attrs.group()).is(createGroupPrincipal("group")); ASSERT.that(attrs.permissions()).is(PosixFilePermissions.fromString("rw-r--r--")); FileTime time = FileTime.fromMillis(0L); view.setTimes(time, time, time); assertContainsAll(file, ImmutableMap.<String, Object>of( "posix:creationTime", time, "posix:lastAccessTime", time, "posix:lastModifiedTime", time)); view.setOwner(createUserPrincipal("root")); ASSERT.that(view.getOwner()).is(createUserPrincipal("root")); ASSERT.that(file.getAttribute("owner:owner")).is(createUserPrincipal("root")); view.setGroup(createGroupPrincipal("root")); ASSERT.that(view.readAttributes().group()).is(createGroupPrincipal("root")); ASSERT.that(file.getAttribute("posix:group")).is(createGroupPrincipal("root")); view.setPermissions(PosixFilePermissions.fromString("rwx------")); ASSERT.that(view.readAttributes().permissions()) .is(PosixFilePermissions.fromString("rwx------")); ASSERT.that(file.getAttribute("posix:permissions")) .is(PosixFilePermissions.fromString("rwx------")); } #location 7 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testView() throws IOException { AclFileAttributeView view = provider.getView(attributeStoreSupplier()); assertNotNull(view); ASSERT.that(view.name()).is("acl"); ASSERT.that(view.getAcl()).is(defaultAcl); view.setAcl(ImmutableList.<AclEntry>of()); view.setOwner(FOO); ASSERT.that(view.getAcl()).is(ImmutableList.<AclEntry>of()); ASSERT.that(view.getOwner()).is(FOO); ASSERT.that(store.getAttribute("acl:acl")).is(ImmutableList.<AclEntry>of()); }
#vulnerable code @Test public void testView() throws IOException { AclFileAttributeView view = service.getFileAttributeView(fileSupplier(), AclFileAttributeView.class); assertNotNull(view); ASSERT.that(view.name()).is("acl"); ASSERT.that(view.getAcl()).is(defaultAcl); ASSERT.that(view.getOwner()).is(USER); view.setAcl(ImmutableList.<AclEntry>of()); view.setOwner(FOO); ASSERT.that(view.getAcl()).is(ImmutableList.<AclEntry>of()); ASSERT.that(view.getOwner()).is(FOO); ASSERT.that(file.getAttribute("acl:acl")).is(ImmutableList.<AclEntry>of()); } #location 7 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testClosedChannel() throws Throwable { RegularFile file = regularFile(15); ExecutorService executor = Executors.newSingleThreadExecutor(); try { JimfsAsynchronousFileChannel channel = channel(file, executor, READ, WRITE); channel.close(); assertClosed(channel.read(ByteBuffer.allocate(10), 0)); assertClosed(channel.write(ByteBuffer.allocate(10), 15)); assertClosed(channel.lock()); assertClosed(channel.lock(0, 10, true)); } finally { executor.shutdown(); } }
#vulnerable code @Test public void testClosedChannel() throws IOException, InterruptedException { RegularFile file = regularFile(15); ExecutorService executor = Executors.newSingleThreadExecutor(); try { JimfsAsynchronousFileChannel channel = channel(file, executor, READ, WRITE); channel.close(); assertClosed(channel.read(ByteBuffer.allocate(10), 0)); assertClosed(channel.write(ByteBuffer.allocate(10), 15)); assertClosed(channel.lock()); assertClosed(channel.lock(0, 10, true)); } finally { executor.shutdown(); } } #location 3 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testView() throws IOException { UserDefinedFileAttributeView view = provider.getView(attributeStoreSupplier()); assertNotNull(view); ASSERT.that(view.name()).is("user"); ASSERT.that(view.list()).isEmpty(); byte[] b1 = {0, 1, 2}; byte[] b2 = {0, 1, 2, 3, 4}; view.write("b1", ByteBuffer.wrap(b1)); view.write("b2", ByteBuffer.wrap(b2)); ASSERT.that(view.list()).has().allOf("b1", "b2"); ASSERT.that(store.getAttributeKeys()).has().exactly("user:b1", "user:b2"); ASSERT.that(view.size("b1")).is(3); ASSERT.that(view.size("b2")).is(5); ByteBuffer buf1 = ByteBuffer.allocate(view.size("b1")); ByteBuffer buf2 = ByteBuffer.allocate(view.size("b2")); view.read("b1", buf1); view.read("b2", buf2); ASSERT.that(Arrays.equals(b1, buf1.array())).isTrue(); ASSERT.that(Arrays.equals(b2, buf2.array())).isTrue(); view.delete("b2"); ASSERT.that(view.list()).has().exactly("b1"); ASSERT.that(store.getAttributeKeys()).has().exactly("user:b1"); try { view.size("b2"); fail(); } catch (IllegalArgumentException expected) { ASSERT.that(expected.getMessage()).contains("not set"); } try { view.read("b2", ByteBuffer.allocate(10)); fail(); } catch (IllegalArgumentException expected) { ASSERT.that(expected.getMessage()).contains("not set"); } view.write("b1", ByteBuffer.wrap(b2)); ASSERT.that(view.size("b1")).is(5); view.delete("b2"); // succeeds }
#vulnerable code @Test public void testView() throws IOException { UserDefinedFileAttributeView view = service.getFileAttributeView(fileSupplier(), UserDefinedFileAttributeView.class); assertNotNull(view); ASSERT.that(view.name()).is("user"); ASSERT.that(view.list()).isEmpty(); byte[] b1 = {0, 1, 2}; byte[] b2 = {0, 1, 2, 3, 4}; view.write("b1", ByteBuffer.wrap(b1)); view.write("b2", ByteBuffer.wrap(b2)); ASSERT.that(view.list()).has().allOf("b1", "b2"); ASSERT.that(service.readAttributes(file, "user:*").keySet()) .has().allOf("b1", "b2"); ASSERT.that(view.size("b1")).is(3); ASSERT.that(view.size("b2")).is(5); ByteBuffer buf1 = ByteBuffer.allocate(view.size("b1")); ByteBuffer buf2 = ByteBuffer.allocate(view.size("b2")); view.read("b1", buf1); view.read("b2", buf2); ASSERT.that(Arrays.equals(b1, buf1.array())).isTrue(); ASSERT.that(Arrays.equals(b2, buf2.array())).isTrue(); view.delete("b2"); ASSERT.that(view.list()).has().exactly("b1"); ASSERT.that(service.readAttributes(file, "user:*").keySet()) .has().exactly("b1"); try { view.size("b2"); fail(); } catch (IllegalArgumentException expected) { ASSERT.that(expected.getMessage()).contains("not set"); } try { view.read("b2", ByteBuffer.allocate(10)); fail(); } catch (IllegalArgumentException expected) { ASSERT.that(expected.getMessage()).contains("not set"); } view.write("b1", ByteBuffer.wrap(b2)); ASSERT.that(view.size("b1")).is(5); view.delete("b2"); // succeeds } #location 7 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static FileSystem newFileSystem(String name) { return newFileSystem(name, Configuration.forCurrentPlatform()); }
#vulnerable code public static FileSystem newFileSystem(String name) { String os = System.getProperty("os.name"); Configuration config; if (os.contains("Windows")) { config = Configuration.windows(); } else if (os.contains("OS X")) { config = Configuration.osX(); } else { config = Configuration.unix(); } return newFileSystem(name, config); } #location 5 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testAsynchronousClose() throws Exception { RegularFile file = regularFile(10); final FileChannel channel = channel(file, READ, WRITE); file.writeLock().lock(); // ensure all operations on the channel will block ExecutorService executor = Executors.newCachedThreadPool(); CountDownLatch latch = new CountDownLatch(BLOCKING_OP_COUNT); List<Future<?>> futures = queueAllBlockingOperations(channel, executor, latch); // wait for all the threads to have started running latch.await(); // then ensure time for operations to start blocking Uninterruptibles.sleepUninterruptibly(20, MILLISECONDS); // close channel on this thread channel.close(); // the blocking operations are running on different threads, so they all get // AsynchronousCloseException for (Future<?> future : futures) { try { future.get(); fail(); } catch (ExecutionException expected) { assertThat(expected.getCause()).named("blocking thread exception") .isA(AsynchronousCloseException.class); } } }
#vulnerable code @Test public void testAsynchronousClose() throws IOException, InterruptedException { RegularFile file = regularFile(10); final FileChannel channel = channel(file, READ, WRITE); file.writeLock().lock(); // ensure all operations on the channel will block ExecutorService executor = Executors.newCachedThreadPool(); List<Future<?>> futures = queueAllBlockingOperations(channel, executor); // ensure time for operations to start blocking Uninterruptibles.sleepUninterruptibly(10, MILLISECONDS); channel.close(); for (Future<?> future : futures) { try { future.get(); fail(); } catch (ExecutionException expected) { assertTrue(expected.getCause() instanceof AsynchronousCloseException); } } } #location 3 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public WatchService newWatchService() throws IOException { return watchServiceConfig.newWatchService(defaultView, pathService); }
#vulnerable code @Override public WatchService newWatchService() throws IOException { return new PollingWatchService(defaultView, pathService, fileStore.state()); } #location 3 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public JimfsPath toRealPath( JimfsPath path, PathService pathService, Set<? super LinkOption> options) throws IOException { checkNotNull(path); checkNotNull(options); store.readLock().lock(); try { DirectoryEntry entry = lookUp(path, options) .requireExists(path); List<Name> names = new ArrayList<>(); names.add(entry.name()); while (!entry.file().isRootDirectory()) { entry = entry.directory().entryInParent(); names.add(entry.name()); } // names are ordered last to first in the list, so get the reverse view List<Name> reversed = Lists.reverse(names); Name root = reversed.remove(0); return pathService.createPath(root, reversed); } finally { store.readLock().unlock(); } }
#vulnerable code public JimfsPath toRealPath( JimfsPath path, PathService pathService, Set<? super LinkOption> options) throws IOException { checkNotNull(path); checkNotNull(options); store.readLock().lock(); try { DirectoryEntry entry = lookUp(path, options) .requireExists(path); List<Name> names = new ArrayList<>(); names.add(entry.name()); while (!entry.file().isRootDirectory()) { // entryInParent(), though @Nullable, won't return null here. The only way to get a null // entry is to look up a file relative to a SecureDirectoryStream that is open against a // deleted directory. toRealPath doesn't do this: it looks up a file relative to a Path, // not a SecureDirectoryStream. entry = entry.directory().entryInParent(); names.add(entry.name()); } // names are ordered last to first in the list, so get the reverse view List<Name> reversed = Lists.reverse(names); Name root = reversed.remove(0); return pathService.createPath(root, reversed); } finally { store.readLock().unlock(); } } #location 12 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testCloseByInterrupt() throws Exception { RegularFile file = regularFile(10); final FileChannel channel = channel(file, READ, WRITE); file.writeLock().lock(); // ensure all operations on the channel will block ExecutorService executor = Executors.newCachedThreadPool(); final CountDownLatch threadStartLatch = new CountDownLatch(1); final SettableFuture<Throwable> interruptException = SettableFuture.create(); // This thread, being the first to run, will be blocking on the interruptible lock (the byte // file's write lock) and as such will be interrupted properly... the other threads will be // blocked on the lock that guards the position field and the specification that only one method // on the channel will be in progress at a time. That lock is not interruptible, so we must // interrupt this thread. Thread thread = new Thread(new Runnable() { @Override public void run() { threadStartLatch.countDown(); try { channel.write(ByteBuffer.allocate(20)); interruptException.set(null); } catch (Throwable e) { interruptException.set(e); } } }); thread.start(); // let the thread start running threadStartLatch.await(); // then ensure time for thread to start blocking on the write lock Uninterruptibles.sleepUninterruptibly(10, MILLISECONDS); CountDownLatch blockingStartLatch = new CountDownLatch(BLOCKING_OP_COUNT); List<Future<?>> futures = queueAllBlockingOperations(channel, executor, blockingStartLatch); // wait for all blocking threads to start blockingStartLatch.await(); // then ensure time for the operations to start blocking Uninterruptibles.sleepUninterruptibly(20, MILLISECONDS); // interrupting this blocking thread closes the channel and makes all the other threads // throw AsynchronousCloseException... the operation on this thread should throw // ClosedByInterruptException thread.interrupt(); // get the exception that caused the interrupted operation to terminate assertThat(interruptException.get(200, MILLISECONDS)) .named("interrupted thread exception") .isA(ClosedByInterruptException.class); // check that each other thread got AsynchronousCloseException (since the interrupt, on a // different thread, closed the channel) for (Future<?> future : futures) { try { future.get(); fail(); } catch (ExecutionException expected) { assertThat(expected.getCause()).named("blocking thread exception") .isA(AsynchronousCloseException.class); } } }
#vulnerable code @Test public void testCloseByInterrupt() throws IOException, InterruptedException { RegularFile file = regularFile(10); final FileChannel channel = channel(file, READ, WRITE); file.writeLock().lock(); // ensure all operations on the channel will block ExecutorService executor = Executors.newCachedThreadPool(); final CountDownLatch latch = new CountDownLatch(1); final AtomicReference<Throwable> interruptException = new AtomicReference<>(); // This thread, being the first to run, will be blocking on the interruptible lock (the byte // file's write lock) and as such will be interrupted properly... the other threads will be // blocked on the lock that guards the position field and the specification that only one method // on the channel will be in progress at a time. That lock is not interruptible, so we must // interrupt this thread. Thread thread = new Thread(new Runnable() { @Override public void run() { try { channel.write(ByteBuffer.allocate(20)); latch.countDown(); } catch (Throwable e) { interruptException.set(e); latch.countDown(); } } }); thread.start(); // ensure time for thread to start blocking on the write lock Uninterruptibles.sleepUninterruptibly(5, MILLISECONDS); List<Future<?>> futures = queueAllBlockingOperations(channel, executor); // ensure time for operations to start blocking Uninterruptibles.sleepUninterruptibly(10, MILLISECONDS); // interrupting this blocking thread closes the channel and makes all the other threads // throw AsynchronousCloseException... the operation on this thread should throw // ClosedByInterruptException thread.interrupt(); latch.await(); assertTrue(interruptException.get() instanceof ClosedByInterruptException); for (Future<?> future : futures) { try { future.get(); fail(); } catch (ExecutionException expected) { assertTrue(expected.getCause() instanceof AsynchronousCloseException); } } } #location 3 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Test public void testView() throws IOException { DosFileAttributeView view = provider.getView(attributeStoreSupplier()); assertNotNull(view); ASSERT.that(view.name()).is("dos"); DosFileAttributes attrs = view.readAttributes(); ASSERT.that(attrs.isHidden()).isFalse(); ASSERT.that(attrs.isArchive()).isFalse(); ASSERT.that(attrs.isReadOnly()).isFalse(); ASSERT.that(attrs.isSystem()).isFalse(); view.setArchive(true); view.setReadOnly(true); view.setHidden(true); view.setSystem(false); ASSERT.that(attrs.isHidden()).isFalse(); ASSERT.that(attrs.isArchive()).isFalse(); ASSERT.that(attrs.isReadOnly()).isFalse(); attrs = view.readAttributes(); ASSERT.that(attrs.isHidden()).isTrue(); ASSERT.that(attrs.isArchive()).isTrue(); ASSERT.that(attrs.isReadOnly()).isTrue(); ASSERT.that(attrs.isSystem()).isFalse(); view.setTimes(FileTime.fromMillis(0L), null, null); ASSERT.that(view.readAttributes().lastModifiedTime()) .is(FileTime.fromMillis(0L)); }
#vulnerable code @Test public void testView() throws IOException { DosFileAttributeView view = service.getFileAttributeView(fileSupplier(), DosFileAttributeView.class); assertNotNull(view); ASSERT.that(view.name()).is("dos"); DosFileAttributes attrs = view.readAttributes(); ASSERT.that(attrs.isHidden()).isFalse(); ASSERT.that(attrs.isArchive()).isFalse(); ASSERT.that(attrs.isReadOnly()).isFalse(); ASSERT.that(attrs.isSystem()).isFalse(); view.setArchive(true); view.setReadOnly(true); view.setHidden(true); view.setSystem(false); ASSERT.that(attrs.isHidden()).isFalse(); ASSERT.that(attrs.isArchive()).isFalse(); ASSERT.that(attrs.isReadOnly()).isFalse(); attrs = view.readAttributes(); ASSERT.that(attrs.isHidden()).isTrue(); ASSERT.that(attrs.isArchive()).isTrue(); ASSERT.that(attrs.isReadOnly()).isTrue(); ASSERT.that(attrs.isSystem()).isFalse(); view.setTimes(FileTime.fromMillis(0L), null, null); ASSERT.that(view.readAttributes().lastModifiedTime()) .is(FileTime.fromMillis(0L)); } #location 7 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static void main(String[] args) { loadProperties(); int concurrents = Integer.parseInt(properties.getProperty("concurrents")); int runtime = Integer.parseInt(properties.getProperty("runtime")); String classname = properties.getProperty("classname"); String params = properties.getProperty("params"); isMultiClient = Boolean.parseBoolean(properties.getProperty("isMultiClient")); if (args.length == 5) { concurrents = Integer.parseInt(args[0]); runtime = Integer.parseInt(args[1]); classname = args[2]; params = args[3]; isMultiClient = Boolean.parseBoolean(args[4]); } ApplicationContext applicationContext = new ClassPathXmlApplicationContext( new String[]{"classpath*:motan-benchmark-client.xml"}); benchmarkService = (BenchmarkService) applicationContext.getBean("motanBenchmarkReferer"); new MotanBenchmarkClient().start(concurrents, runtime, classname, params); }
#vulnerable code public static void main(String[] args) { int concurrents = Integer.parseInt(properties.getProperty("concurrents")); int runtime = Integer.parseInt(properties.getProperty("runtime")); String classname = properties.getProperty("classname"); String params = properties.getProperty("params"); isMultiClient = Boolean.parseBoolean(properties.getProperty("isMultiClient")); if (args.length == 5) { concurrents = Integer.parseInt(args[0]); runtime = Integer.parseInt(args[1]); classname = args[2]; params = args[3]; isMultiClient = Boolean.parseBoolean(args[4]); } ApplicationContext applicationContext = new ClassPathXmlApplicationContext(new String[]{"classpath*:motan-benchmark-client.xml"}); benchmarkService = (BenchmarkService) applicationContext.getBean("motanBenchmarkReferer"); new MotanBenchmarkClient().start(concurrents, runtime, classname, params); } #location 16 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private void register(List<URL> registryUrls, URL serviceUrl) { for (URL url : registryUrls) { // 根据check参数的设置,register失败可能会抛异常,上层应该知晓 RegistryFactory registryFactory = ExtensionLoader.getExtensionLoader(RegistryFactory.class).getExtension(url.getProtocol()); if (registryFactory == null) { throw new MotanFrameworkException(new MotanErrorMsg(500, MotanErrorMsgConstant.FRAMEWORK_REGISTER_ERROR_CODE, "register error! Could not find extension for registry protocol:" + url.getProtocol() + ", make sure registry module for " + url.getProtocol() + " is in classpath!")); } Registry registry = registryFactory.getRegistry(url); registry.register(serviceUrl); } }
#vulnerable code private void register(List<URL> registryUrls, URL serviceUrl) { for (URL url : registryUrls) { // 根据check参数的设置,register失败可能会抛异常,上层应该知晓 RegistryFactory registryFactory = ExtensionLoader.getExtensionLoader(RegistryFactory.class).getExtension(url.getProtocol()); Registry registry = registryFactory.getRegistry(url); registry.register(serviceUrl); } } #location 5 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Before public void setup() throws DBException { orientDBClient = new OrientDBClient(); Properties p = new Properties(); // TODO: Extract the property names into final variables in OrientDBClient p.setProperty("orientdb.url", TEST_DB_URL); orientDBClient.setProperties(p); orientDBClient.init(); }
#vulnerable code @Before public void setup() throws DBException { orientDBClient = new OrientDBClient(); Properties p = new Properties(); // TODO: Extract the property names into final variables in OrientDBClient p.setProperty("orientdb.url", TEST_DB_URL); orientDBClient.setProperties(p); orientDBClient.init(); orientDBDictionary = orientDBClient.getDB().getDictionary(); } #location 11 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void cleanup() throws DBException { // Get the measurements instance as this is the only client that should // count clean up time like an update since autoflush is off. Measurements _measurements = Measurements.getMeasurements(); try { long st=System.nanoTime(); if (_hTable != null) { _hTable.flushCommits(); } synchronized(THREAD_COUNT) { int threadCount = THREAD_COUNT.decrementAndGet(); if (threadCount <= 0 && _hConn != null) { _hConn.close(); } } long en=System.nanoTime(); _measurements.measure("UPDATE", (int)((en-st)/1000)); } catch (IOException e) { throw new DBException(e); } }
#vulnerable code public void cleanup() throws DBException { // Get the measurements instance as this is the only client that should // count clean up time like an update since autoflush is off. Measurements _measurements = Measurements.getMeasurements(); try { long st=System.nanoTime(); if (_hTable != null) { _hTable.flushCommits(); } if (_hConn != null) { _hConn.close(); } long en=System.nanoTime(); _measurements.measure("UPDATE", (int)((en-st)/1000)); } catch (IOException e) { throw new DBException(e); } } #location 9 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public int read(String table, String key, Set<String> fields, HashMap<String, ByteIterator> result) { try { MongoCollection<Document> collection = database .getCollection(table); Document query = new Document("_id", key); FindIterable<Document> findIterable = collection.find(query); Document queryResult = null; if (fields != null) { Document projection = new Document(); for (String field : fields) { projection.put(field, INCLUDE); } findIterable.projection(projection); } queryResult = findIterable.first(); if (queryResult != null) { fillMap(result, queryResult); } return queryResult != null ? 0 : 1; } catch (Exception e) { System.err.println(e.toString()); return 1; } }
#vulnerable code @Override public int read(String table, String key, Set<String> fields, HashMap<String, ByteIterator> result) { try { MongoCollection<Document> collection = database .getCollection(table); Document query = new Document("_id", key); FindIterable<Document> findIterable = collection .withReadPreference(readPreference) .find(query); Document queryResult = null; if (fields != null) { Document projection = new Document(); for (String field : fields) { projection.put(field, INCLUDE); } findIterable.projection(projection); } queryResult = findIterable.first(); if (queryResult != null) { fillMap(result, queryResult); } return queryResult != null ? 0 : 1; } catch (Exception e) { System.err.println(e.toString()); return 1; } } #location 10 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static RemoteCacheManager getInstance(Properties props){ RemoteCacheManager result = cacheManager; if(result == null){ synchronized (RemoteCacheManagerHolder.class) { result = cacheManager; if (result == null) { cacheManager = result = new RemoteCacheManager(props); } } } return result; }
#vulnerable code public static RemoteCacheManager getInstance(Properties props){ if(cacheManager == null){ synchronized (RemoteCacheManager.class) { cacheManager = new RemoteCacheManager(props); } } return cacheManager; } #location 7 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void init() throws DBException { if ( (getProperties().getProperty("debug")!=null) && (getProperties().getProperty("debug").compareTo("true")==0) ) { _debug=true; } if (getProperties().containsKey("clientbuffering")) { _clientSideBuffering = Boolean.parseBoolean(getProperties().getProperty("clientbuffering")); } if (getProperties().containsKey("writebuffersize")) { _writeBufferSize = Long.parseLong(getProperties().getProperty("writebuffersize")); } if ("false".equals(getProperties().getProperty("hbase.usepagefilter", "true"))) { _usePageFilter = false; } if ("kerberos".equalsIgnoreCase(config.get("hbase.security.authentication"))) { config.set("hadoop.security.authentication", "Kerberos"); UserGroupInformation.setConfiguration(config); } if ( (getProperties().getProperty("principal")!=null) && (getProperties().getProperty("keytab")!=null) ){ try { UserGroupInformation.loginUserFromKeytab(getProperties().getProperty("principal"), getProperties().getProperty("keytab")); } catch (IOException e) { System.err.println("Keytab file is not readable or not found"); throw new DBException(e); } } try { _hConn = HConnectionManager.createConnection(config); } catch (IOException e) { System.err.println("Connection to HBase was not successful"); throw new DBException(e); } _columnFamily = getProperties().getProperty("columnfamily"); if (_columnFamily == null) { System.err.println("Error, must specify a columnfamily for HBase table"); throw new DBException("No columnfamily specified"); } _columnFamilyBytes = Bytes.toBytes(_columnFamily); // Terminate right now if table does not exist, since the client // will not propagate this error upstream once the workload // starts. String table = com.yahoo.ycsb.workloads.CoreWorkload.table; try { HTableInterface ht = _hConn.getTable(table); ht.getTableDescriptor(); } catch (IOException e) { throw new DBException(e); } }
#vulnerable code public void init() throws DBException { if ( (getProperties().getProperty("debug")!=null) && (getProperties().getProperty("debug").compareTo("true")==0) ) { _debug=true; } if (getProperties().containsKey("clientbuffering")) { _clientSideBuffering = Boolean.parseBoolean(getProperties().getProperty("clientbuffering")); } if (getProperties().containsKey("writebuffersize")) { _writeBufferSize = Long.parseLong(getProperties().getProperty("writebuffersize")); } if ("false".equals(getProperties().getProperty("hbase.usepagefilter", "true"))) { _usePageFilter = false; } _columnFamily = getProperties().getProperty("columnfamily"); if (_columnFamily == null) { System.err.println("Error, must specify a columnfamily for HBase table"); throw new DBException("No columnfamily specified"); } _columnFamilyBytes = Bytes.toBytes(_columnFamily); // Terminate right now if table does not exist, since the client // will not propagate this error upstream once the workload // starts. String table = com.yahoo.ycsb.workloads.CoreWorkload.table; try { HTable ht = new HTable(config, table); ht.getTableDescriptor(); } catch (IOException e) { throw new DBException(e); } } #location 36 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public Status scan(String table, String startkey, int recordcount, Set<String> fields, Vector<HashMap<String, ByteIterator>> result) { try { PreparedStatement stmt = (fields == null) ? scanAllStmt.get() : scanStmts.get(fields); // Prepare statement on demand if (stmt == null) { Select.Builder selectBuilder; if (fields == null) { selectBuilder = QueryBuilder.select().all(); } else { selectBuilder = QueryBuilder.select(); for (String col : fields) { ((Select.Selection) selectBuilder).column(col); } } Select selectStmt = selectBuilder.from(table); // The statement builder is not setup right for tokens. // So, we need to build it manually. String initialStmt = selectStmt.toString(); StringBuilder scanStmt = new StringBuilder(); scanStmt.append(initialStmt.substring(0, initialStmt.length() - 1)); scanStmt.append(" WHERE "); scanStmt.append(QueryBuilder.token(YCSB_KEY)); scanStmt.append(" >= "); scanStmt.append("token("); scanStmt.append(QueryBuilder.bindMarker()); scanStmt.append(")"); scanStmt.append(" LIMIT "); scanStmt.append(QueryBuilder.bindMarker()); stmt = session.prepare(scanStmt.toString()); stmt.setConsistencyLevel(readConsistencyLevel); if (trace) { stmt.enableTracing(); } PreparedStatement prevStmt = (fields == null) ? scanAllStmt.getAndSet(stmt) : scanStmts.putIfAbsent(new HashSet(fields), stmt); if (prevStmt != null) { stmt = prevStmt; } } logger.debug(stmt.getQueryString()); logger.debug("startKey = {}, recordcount = {}", startkey, recordcount); ResultSet rs = session.execute(stmt.bind(startkey, Integer.valueOf(recordcount))); HashMap<String, ByteIterator> tuple; while (!rs.isExhausted()) { Row row = rs.one(); tuple = new HashMap<String, ByteIterator>(); ColumnDefinitions cd = row.getColumnDefinitions(); for (ColumnDefinitions.Definition def : cd) { ByteBuffer val = row.getBytesUnsafe(def.getName()); if (val != null) { tuple.put(def.getName(), new ByteArrayByteIterator(val.array())); } else { tuple.put(def.getName(), null); } } result.add(tuple); } return Status.OK; } catch (Exception e) { logger.error( MessageFormatter.format("Error scanning with startkey: {}", startkey).getMessage(), e); return Status.ERROR; } }
#vulnerable code @Override public Status scan(String table, String startkey, int recordcount, Set<String> fields, Vector<HashMap<String, ByteIterator>> result) { try { Statement stmt; Select.Builder selectBuilder; if (fields == null) { selectBuilder = QueryBuilder.select().all(); } else { selectBuilder = QueryBuilder.select(); for (String col : fields) { ((Select.Selection) selectBuilder).column(col); } } stmt = selectBuilder.from(table); // The statement builder is not setup right for tokens. // So, we need to build it manually. String initialStmt = stmt.toString(); StringBuilder scanStmt = new StringBuilder(); scanStmt.append(initialStmt.substring(0, initialStmt.length() - 1)); scanStmt.append(" WHERE "); scanStmt.append(QueryBuilder.token(YCSB_KEY)); scanStmt.append(" >= "); scanStmt.append("token('"); scanStmt.append(startkey); scanStmt.append("')"); scanStmt.append(" LIMIT "); scanStmt.append(recordcount); stmt = new SimpleStatement(scanStmt.toString()); stmt.setConsistencyLevel(readConsistencyLevel); if (debug) { System.out.println(stmt.toString()); } if (trace) { stmt.enableTracing(); } ResultSet rs = session.execute(stmt); HashMap<String, ByteIterator> tuple; while (!rs.isExhausted()) { Row row = rs.one(); tuple = new HashMap<String, ByteIterator>(); ColumnDefinitions cd = row.getColumnDefinitions(); for (ColumnDefinitions.Definition def : cd) { ByteBuffer val = row.getBytesUnsafe(def.getName()); if (val != null) { tuple.put(def.getName(), new ByteArrayByteIterator(val.array())); } else { tuple.put(def.getName(), null); } } result.add(tuple); } return Status.OK; } catch (Exception e) { e.printStackTrace(); System.out.println("Error scanning with startkey: " + startkey); return Status.ERROR; } } #location 37 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public Status read(String table, String key, Set<String> fields, Map<String, ByteIterator> result) { try { PreparedStatement stmt = (fields == null) ? readAllStmt.get() : readStmts.get(fields); // Prepare statement on demand if (stmt == null) { Select.Builder selectBuilder; if (fields == null) { selectBuilder = QueryBuilder.select().all(); } else { selectBuilder = QueryBuilder.select(); for (String col : fields) { ((Select.Selection) selectBuilder).column(col); } } stmt = session.prepare(selectBuilder.from(table) .where(QueryBuilder.eq(YCSB_KEY, QueryBuilder.bindMarker())) .limit(1)); stmt.setConsistencyLevel(readConsistencyLevel); if (trace) { stmt.enableTracing(); } PreparedStatement prevStmt = (fields == null) ? readAllStmt.getAndSet(stmt) : readStmts.putIfAbsent(new HashSet(fields), stmt); if (prevStmt != null) { stmt = prevStmt; } } logger.debug(stmt.getQueryString()); logger.debug("key = {}", key); ResultSet rs = session.execute(stmt.bind(key)); if (rs.isExhausted()) { return Status.NOT_FOUND; } // Should be only 1 row Row row = rs.one(); ColumnDefinitions cd = row.getColumnDefinitions(); for (ColumnDefinitions.Definition def : cd) { ByteBuffer val = row.getBytesUnsafe(def.getName()); if (val != null) { result.put(def.getName(), new ByteArrayByteIterator(val.array())); } else { result.put(def.getName(), null); } } return Status.OK; } catch (Exception e) { logger.error(MessageFormatter.format("Error reading key: {}", key).getMessage(), e); return Status.ERROR; } }
#vulnerable code @Override public Status read(String table, String key, Set<String> fields, Map<String, ByteIterator> result) { try { Statement stmt; Select.Builder selectBuilder; if (fields == null) { selectBuilder = QueryBuilder.select().all(); } else { selectBuilder = QueryBuilder.select(); for (String col : fields) { ((Select.Selection) selectBuilder).column(col); } } stmt = selectBuilder.from(table).where(QueryBuilder.eq(YCSB_KEY, key)) .limit(1); stmt.setConsistencyLevel(readConsistencyLevel); if (debug) { System.out.println(stmt.toString()); } if (trace) { stmt.enableTracing(); } ResultSet rs = session.execute(stmt); if (rs.isExhausted()) { return Status.NOT_FOUND; } // Should be only 1 row Row row = rs.one(); ColumnDefinitions cd = row.getColumnDefinitions(); for (ColumnDefinitions.Definition def : cd) { ByteBuffer val = row.getBytesUnsafe(def.getName()); if (val != null) { result.put(def.getName(), new ByteArrayByteIterator(val.array())); } else { result.put(def.getName(), null); } } return Status.OK; } catch (Exception e) { e.printStackTrace(); System.out.println("Error reading key: " + key); return Status.ERROR; } } #location 21 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public int scan(String table, String startkey, int recordcount, Set<String> fields, Vector<HashMap<String, ByteIterator>> result) { MongoCursor<Document> cursor = null; try { MongoCollection<Document> collection = database .getCollection(table); Document scanRange = new Document("$gte", startkey); Document query = new Document("_id", scanRange); Document sort = new Document("_id", INCLUDE); Document projection = null; if (fields != null) { projection = new Document(); for (String fieldName : fields) { projection.put(fieldName, INCLUDE); } } cursor = collection.find(query) .projection(projection).sort(sort).limit(recordcount).iterator(); if (!cursor.hasNext()) { System.err.println("Nothing found in scan for key " + startkey); return 1; } while (cursor.hasNext()) { HashMap<String, ByteIterator> resultMap = new HashMap<String, ByteIterator>(); Document obj = cursor.next(); fillMap(resultMap, obj); result.add(resultMap); } return 0; } catch (Exception e) { System.err.println(e.toString()); return 1; } finally { if (cursor != null) { cursor.close(); } } }
#vulnerable code @Override public int scan(String table, String startkey, int recordcount, Set<String> fields, Vector<HashMap<String, ByteIterator>> result) { MongoCursor<Document> cursor = null; try { MongoCollection<Document> collection = database .getCollection(table); Document scanRange = new Document("$gte", startkey); Document query = new Document("_id", scanRange); Document sort = new Document("_id", INCLUDE); Document projection = null; if (fields != null) { projection = new Document(); for (String fieldName : fields) { projection.put(fieldName, INCLUDE); } } cursor = collection.withReadPreference(readPreference).find(query) .projection(projection).sort(sort).limit(recordcount).iterator(); if (!cursor.hasNext()) { System.err.println("Nothing found in scan for key " + startkey); return 1; } while (cursor.hasNext()) { HashMap<String, ByteIterator> resultMap = new HashMap<String, ByteIterator>(); Document obj = cursor.next(); fillMap(resultMap, obj); result.add(resultMap); } return 0; } catch (Exception e) { System.err.println(e.toString()); return 1; } finally { if (cursor != null) { cursor.close(); } } } #location 20 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public void init() throws DBException { Properties props = getProperties(); String url = props.getProperty(URL_PROPERTY); String user = props.getProperty(USER_PROPERTY, USER_PROPERTY_DEFAULT); String password = props.getProperty(PASSWORD_PROPERTY, PASSWORD_PROPERTY_DEFAULT); Boolean newdb = Boolean.parseBoolean(props.getProperty(NEWDB_PROPERTY, NEWDB_PROPERTY_DEFAULT)); String remoteStorageType = props.getProperty(STORAGE_TYPE_PROPERTY); String intent = props.getProperty(INTENT_PROPERTY, INTENT_PROPERTY_DEFAULT); Boolean dotransactions = Boolean.parseBoolean( props.getProperty(DO_TRANSACTIONS_PROPERTY, DO_TRANSACTIONS_PROPERTY_DEFAULT)); if (url == null) { throw new DBException(String.format("Required property \"%s\" missing for OrientDBClient", URL_PROPERTY)); } log.info("OrientDB loading database url = " + url); // If using a remote database, use the OServerAdmin interface to connect if (url.startsWith(OEngineRemote.NAME)) { isRemote = true; if (remoteStorageType == null) { throw new DBException("When connecting to a remote OrientDB instance, " + "specify a database storage type (plocal or memory) with " + STORAGE_TYPE_PROPERTY); } try { OServerAdmin server = new OServerAdmin(url).connect(user, password); if (server.existsDatabase()) { if (newdb && !dotransactions) { log.info("OrientDB dropping and recreating fresh db on remote server."); server.dropDatabase(remoteStorageType); server.createDatabase(server.getURL(), ORIENTDB_DOCUMENT_TYPE, remoteStorageType); } } else { log.info("OrientDB database not found, creating fresh db"); server.createDatabase(server.getURL(), ORIENTDB_DOCUMENT_TYPE, remoteStorageType); } server.close(); db = new ODatabaseDocumentTx(url).open(user, password); } catch (IOException | OException e) { throw new DBException(String.format("Error interfacing with %s", url), e); } } else { try { db = new ODatabaseDocumentTx(url); if (db.exists()) { db.open(user, password); if (newdb && !dotransactions) { log.info("OrientDB dropping and recreating fresh db."); db.drop(); db.create(); } } else { log.info("OrientDB database not found, creating fresh db"); db.create(); } } catch (ODatabaseException e) { throw new DBException(String.format("Error interfacing with %s", url), e); } } log.info("OrientDB connection created with " + url); dictionary = db.getMetadata().getIndexManager().getDictionary(); if (!db.getMetadata().getSchema().existsClass(CLASS)) { db.getMetadata().getSchema().createClass(CLASS); } if (intent.equals(ORIENTDB_MASSIVEINSERT)) { log.info("Declaring intent of MassiveInsert."); db.declareIntent(new OIntentMassiveInsert()); } else if (intent.equals(ORIENTDB_MASSIVEREAD)) { log.info("Declaring intent of MassiveRead."); db.declareIntent(new OIntentMassiveRead()); } else if (intent.equals(ORIENTDB_NOCACHE)) { log.info("Declaring intent of NoCache."); db.declareIntent(new OIntentNoCache()); } }
#vulnerable code @Override public void init() throws DBException { Properties props = getProperties(); String url = props.getProperty(URL_PROPERTY); String user = props.getProperty(USER_PROPERTY, USER_PROPERTY_DEFAULT); String password = props.getProperty(PASSWORD_PROPERTY, PASSWORD_PROPERTY_DEFAULT); Boolean newdb = Boolean.parseBoolean(props.getProperty(NEWDB_PROPERTY, NEWDB_PROPERTY_DEFAULT)); String remoteStorageType = props.getProperty(STORAGE_TYPE_PROPERTY); Boolean dotransactions = Boolean.parseBoolean( props.getProperty(DO_TRANSACTIONS_PROPERTY, DO_TRANSACTIONS_PROPERTY_DEFAULT)); if (url == null) { throw new DBException(String.format("Required property \"%s\" missing for OrientDBClient", URL_PROPERTY)); } log.info("OrientDB loading database url = " + url); // If using a remote database, use the OServerAdmin interface to connect if (url.startsWith(OEngineRemote.NAME)) { isRemote = true; if (remoteStorageType == null) { throw new DBException("When connecting to a remote OrientDB instance, " + "specify a database storage type (plocal or memory) with " + STORAGE_TYPE_PROPERTY); } try { OServerAdmin server = new OServerAdmin(url).connect(user, password); if (server.existsDatabase()) { if (newdb && !dotransactions) { log.info("OrientDB dropping and recreating fresh db on remote server."); server.dropDatabase(remoteStorageType); server.createDatabase(server.getURL(), ORIENTDB_DOCUMENT_TYPE, remoteStorageType); } } else { log.info("OrientDB database not found, creating fresh db"); server.createDatabase(server.getURL(), ORIENTDB_DOCUMENT_TYPE, remoteStorageType); } server.close(); db = new ODatabaseDocumentTx(url).open(user, password); } catch (IOException | OException e) { throw new DBException(String.format("Error interfacing with %s", url), e); } } else { try { db = new ODatabaseDocumentTx(url); if (db.exists()) { db.open(user, password); if (newdb && !dotransactions) { log.info("OrientDB dropping and recreating fresh db."); db.drop(); db.create(); } } else { log.info("OrientDB database not found, creating fresh db"); db.create(); } } catch (ODatabaseException e) { throw new DBException(String.format("Error interfacing with %s", url), e); } } log.info("OrientDB connection created with " + url); dictionary = db.getMetadata().getIndexManager().getDictionary(); if (!db.getMetadata().getSchema().existsClass(CLASS)) { db.getMetadata().getSchema().createClass(CLASS); } db.declareIntent(new OIntentMassiveInsert()); } #location 42 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public Status delete(String table, String key) { try { PreparedStatement stmt = deleteStmt.get(); // Prepare statement on demand if (stmt == null) { stmt = session.prepare(QueryBuilder.delete().from(table) .where(QueryBuilder.eq(YCSB_KEY, QueryBuilder.bindMarker()))); stmt.setConsistencyLevel(writeConsistencyLevel); if (trace) { stmt.enableTracing(); } PreparedStatement prevStmt = deleteStmt.getAndSet(stmt); if (prevStmt != null) { stmt = prevStmt; } } logger.debug(stmt.getQueryString()); logger.debug("key = {}", key); session.execute(stmt.bind(key)); return Status.OK; } catch (Exception e) { logger.error(MessageFormatter.format("Error deleting key: {}", key).getMessage(), e); } return Status.ERROR; }
#vulnerable code @Override public Status delete(String table, String key) { try { Statement stmt; stmt = QueryBuilder.delete().from(table) .where(QueryBuilder.eq(YCSB_KEY, key)); stmt.setConsistencyLevel(writeConsistencyLevel); if (debug) { System.out.println(stmt.toString()); } if (trace) { stmt.enableTracing(); } session.execute(stmt); return Status.OK; } catch (Exception e) { e.printStackTrace(); System.out.println("Error deleting key: " + key); } return Status.ERROR; } #location 11 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Before public void setUp() throws Exception { session = cassandraUnit.getSession(); Properties p = new Properties(); p.setProperty("hosts", HOST); p.setProperty("port", Integer.toString(PORT)); p.setProperty("table", TABLE); Measurements.setProperties(p); final CoreWorkload workload = new CoreWorkload(); workload.init(p); client = new CassandraCQLClient(); client.setProperties(p); client.init(); }
#vulnerable code @Before public void setUp() throws Exception { // check that this is Java 8+ int javaVersion = Integer.parseInt(System.getProperty("java.version").split("\\.")[1]); Assume.assumeTrue(javaVersion >= 8); session = cassandraUnit.getSession(); Properties p = new Properties(); p.setProperty("hosts", HOST); p.setProperty("port", Integer.toString(PORT)); p.setProperty("table", TABLE); Measurements.setProperties(p); final CoreWorkload workload = new CoreWorkload(); workload.init(p); client = new CassandraCQLClient(); client.setProperties(p); client.init(); } #location 4 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code protected net.spy.memcached.MemcachedClient createMemcachedClient() throws Exception { ConnectionFactoryBuilder connectionFactoryBuilder = new ConnectionFactoryBuilder(); connectionFactoryBuilder.setReadBufferSize(Integer.parseInt( getProperties().getProperty(READ_BUFFER_SIZE_PROPERTY, DEFAULT_READ_BUFFER_SIZE))); connectionFactoryBuilder.setOpTimeout(Integer.parseInt( getProperties().getProperty(OP_TIMEOUT_PROPERTY, DEFAULT_OP_TIMEOUT))); String protocolString = getProperties().getProperty(PROTOCOL_PROPERTY); connectionFactoryBuilder.setProtocol( protocolString == null ? DEFAULT_PROTOCOL : ConnectionFactoryBuilder.Protocol.valueOf(protocolString.toUpperCase())); String failureString = getProperties().getProperty(FAILURE_MODE_PROPERTY); connectionFactoryBuilder.setFailureMode( failureString == null ? FAILURE_MODE_PROPERTY_DEFAULT : FailureMode.valueOf(failureString)); // Note: this only works with IPv4 addresses due to its assumption of // ":" being the separator of hostname/IP and port; this is not the case // when dealing with IPv6 addresses. // // TODO(mbrukman): fix this. List<InetSocketAddress> addresses = new ArrayList<InetSocketAddress>(); String[] hosts = getProperties().getProperty(HOSTS_PROPERTY).split(","); for (String address : hosts) { int colon = address.indexOf(":"); int port = DEFAULT_PORT; String host = address; if (colon != -1) { port = Integer.parseInt(address.substring(colon + 1)); host = address.substring(0, colon); } addresses.add(new InetSocketAddress(host, port)); } return new net.spy.memcached.MemcachedClient( connectionFactoryBuilder.build(), addresses); }
#vulnerable code protected net.spy.memcached.MemcachedClient createMemcachedClient() throws Exception { ConnectionFactoryBuilder connectionFactoryBuilder = new ConnectionFactoryBuilder(); connectionFactoryBuilder.setReadBufferSize(Integer.parseInt( getProperties().getProperty(READ_BUFFER_SIZE_PROPERTY, DEFAULT_READ_BUFFER_SIZE))); connectionFactoryBuilder.setOpTimeout(Integer.parseInt( getProperties().getProperty(OP_TIMEOUT_PROPERTY, DEFAULT_OP_TIMEOUT))); String failureString = getProperties().getProperty(FAILURE_MODE_PROPERTY); connectionFactoryBuilder.setFailureMode( failureString == null ? FAILURE_MODE_PROPERTY_DEFAULT : FailureMode.valueOf(failureString)); // Note: this only works with IPv4 addresses due to its assumption of // ":" being the separator of hostname/IP and port; this is not the case // when dealing with IPv6 addresses. // // TODO(mbrukman): fix this. List<InetSocketAddress> addresses = new ArrayList<InetSocketAddress>(); String[] hosts = getProperties().getProperty(HOSTS_PROPERTY).split(","); for (String address : hosts) { int colon = address.indexOf(":"); int port = DEFAULT_PORT; String host = address; if (colon != -1) { port = Integer.parseInt(address.substring(colon + 1)); host = address.substring(0, colon); } addresses.add(new InetSocketAddress(host, port)); } return new net.spy.memcached.MemcachedClient( connectionFactoryBuilder.build(), addresses); } #location 24 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public boolean doTransaction(DB db, Object threadstate) { String operation = operationchooser.nextString(); if (operation == null) { return false; } switch (operation) { case "UPDATE": doTransactionUpdate(db); break; case "INSERT": doTransactionInsert(db); break; case "DELETE": doTransactionDelete(db); break; default: doTransactionRead(db); } return true; }
#vulnerable code @Override public boolean doTransaction(DB db, Object threadstate) { switch (operationchooser.nextString()) { case "UPDATE": doTransactionUpdate(db); break; case "INSERT": doTransactionInsert(db); break; case "DELETE": doTransactionDelete(db); break; default: doTransactionRead(db); } return true; } #location 3 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public void cleanup() throws DBException { // Get the measurements instance as this is the only client that should // count clean up time like an update if client-side buffering is // enabled. Measurements measurements = Measurements.getMeasurements(); try { long st = System.nanoTime(); if (bufferedMutator != null) { bufferedMutator.close(); } if (currentTable != null) { currentTable.close(); } long en = System.nanoTime(); final String type = clientSideBuffering ? "UPDATE" : "CLEANUP"; measurements.measure(type, (int) ((en - st) / 1000)); threadCount.decrementAndGet(); if (threadCount.get() <= 0) { // Means we are done so ok to shut down the Connection. synchronized (CONNECTION_LOCK) { if (connection != null) { connection.close(); connection = null; } } } } catch (IOException e) { throw new DBException(e); } }
#vulnerable code @Override public void cleanup() throws DBException { // Get the measurements instance as this is the only client that should // count clean up time like an update if client-side buffering is // enabled. Measurements measurements = Measurements.getMeasurements(); try { long st = System.nanoTime(); if (bufferedMutator != null) { bufferedMutator.close(); } if (currentTable != null) { currentTable.close(); } long en = System.nanoTime(); final String type = clientSideBuffering ? "UPDATE" : "CLEANUP"; measurements.measure(type, (int) ((en - st) / 1000)); synchronized(threadCount) { --threadCount; if (threadCount <= 0 && connection != null) { connection.close(); connection = null; } } } catch (IOException e) { throw new DBException(e); } } #location 18 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code private Map<String, byte[]> convertToBytearrayMap(Map<String,ByteIterator> values) { Map<String, byte[]> retVal = new HashMap<String, byte[]>(); for (Map.Entry<String, ByteIterator> entry : values.entrySet()) { retVal.put(entry.getKey(), entry.getValue().toArray()); } return retVal; }
#vulnerable code private Map<String, byte[]> convertToBytearrayMap(Map<String,ByteIterator> values) { Map<String, byte[]> retVal = new HashMap<String, byte[]>(); for (String key : values.keySet()) { retVal.put(key, values.get(key).toArray()); } return retVal; } #location 4 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public Status delete(String table, String key) { if (debug) { System.out.println("Doing delete for key: " + key); } setTable(table); final MutateRowRequest.Builder rowMutation = MutateRowRequest.newBuilder() .setRowKey(ByteString.copyFromUtf8(key)) .setTableNameBytes(ByteStringer.wrap(lastTableBytes)); rowMutation.addMutationsBuilder().setDeleteFromRow( DeleteFromRow.getDefaultInstance()); try { if (clientSideBuffering) { bulkMutation.add(rowMutation.build()); } else { client.mutateRow(rowMutation.build()); } return Status.OK; } catch (RuntimeException e) { System.err.println("Failed to delete key: " + key + " " + e.getMessage()); return Status.ERROR; } }
#vulnerable code @Override public Status delete(String table, String key) { if (debug) { System.out.println("Doing delete for key: " + key); } setTable(table); final MutateRowRequest.Builder rowMutation = MutateRowRequest.newBuilder() .setRowKey(ByteString.copyFromUtf8(key)) .setTableNameBytes(ByteStringer.wrap(lastTableBytes)); rowMutation.addMutationsBuilder().setDeleteFromRow( DeleteFromRow.getDefaultInstance()); try { if (clientSideBuffering) { asyncExecutor.mutateRowAsync(rowMutation.build()); } else { client.mutateRow(rowMutation.build()); } return Status.OK; } catch (ServiceException e) { System.err.println("Failed to delete key: " + key + " " + e.getMessage()); return Status.ERROR; } catch (InterruptedException e) { System.err.println("Interrupted while delete key: " + key + " " + e.getMessage()); Thread.currentThread().interrupt(); return Status.ERROR; // never get here, but lets make the compiler happy } } #location 17 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public void cleanup() throws DBException { if (asyncExecutor != null) { try { asyncExecutor.flush(); } catch (IOException e) { throw new DBException(e); } } synchronized (CONFIG) { --threadCount; if (threadCount <= 0) { try { session.close(); } catch (IOException e) { throw new DBException(e); } } } }
#vulnerable code @Override public void cleanup() throws DBException { if (asyncExecutor != null) { try { asyncExecutor.flush(); } catch (IOException e) { throw new DBException(e); } } synchronized (threadCount) { --threadCount; if (threadCount <= 0) { try { session.close(); } catch (IOException e) { throw new DBException(e); } } } } #location 10 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public void init() throws DBException { if ("true" .equals(getProperties().getProperty("clientbuffering", "false"))) { this.clientSideBuffering = true; } if (getProperties().containsKey("writebuffersize")) { writeBufferSize = Long.parseLong(getProperties().getProperty("writebuffersize")); } if (getProperties().getProperty("durability") != null) { this.durability = Durability.valueOf(getProperties().getProperty("durability")); } if ("kerberos".equalsIgnoreCase(config.get("hbase.security.authentication"))) { config.set("hadoop.security.authentication", "Kerberos"); UserGroupInformation.setConfiguration(config); } if ((getProperties().getProperty("principal")!=null) && (getProperties().getProperty("keytab")!=null)) { try { UserGroupInformation.loginUserFromKeytab(getProperties().getProperty("principal"), getProperties().getProperty("keytab")); } catch (IOException e) { System.err.println("Keytab file is not readable or not found"); throw new DBException(e); } } try { threadCount.getAndIncrement(); synchronized (CONNECTION_LOCK) { if (connection == null) { // Initialize if not set up already. connection = ConnectionFactory.createConnection(config); } } } catch (java.io.IOException e) { throw new DBException(e); } if ((getProperties().getProperty("debug") != null) && (getProperties().getProperty("debug").compareTo("true") == 0)) { debug = true; } if ("false" .equals(getProperties().getProperty("hbase.usepagefilter", "true"))) { usePageFilter = false; } columnFamily = getProperties().getProperty("columnfamily"); if (columnFamily == null) { System.err.println("Error, must specify a columnfamily for HBase table"); throw new DBException("No columnfamily specified"); } columnFamilyBytes = Bytes.toBytes(columnFamily); // Terminate right now if table does not exist, since the client // will not propagate this error upstream once the workload // starts. String table = com.yahoo.ycsb.workloads.CoreWorkload.table; try { final TableName tName = TableName.valueOf(table); synchronized (CONNECTION_LOCK) { connection.getTable(tName).getTableDescriptor(); } } catch (IOException e) { throw new DBException(e); } }
#vulnerable code @Override public void init() throws DBException { if ("true" .equals(getProperties().getProperty("clientbuffering", "false"))) { this.clientSideBuffering = true; } if (getProperties().containsKey("writebuffersize")) { writeBufferSize = Long.parseLong(getProperties().getProperty("writebuffersize")); } if (getProperties().getProperty("durability") != null) { this.durability = Durability.valueOf(getProperties().getProperty("durability")); } if ("kerberos".equalsIgnoreCase(config.get("hbase.security.authentication"))) { config.set("hadoop.security.authentication", "Kerberos"); UserGroupInformation.setConfiguration(config); } if ((getProperties().getProperty("principal")!=null) && (getProperties().getProperty("keytab")!=null)) { try { UserGroupInformation.loginUserFromKeytab(getProperties().getProperty("principal"), getProperties().getProperty("keytab")); } catch (IOException e) { System.err.println("Keytab file is not readable or not found"); throw new DBException(e); } } try { synchronized(threadCount) { ++threadCount; if (connection == null) { connection = ConnectionFactory.createConnection(config); } } } catch (java.io.IOException e) { throw new DBException(e); } if ((getProperties().getProperty("debug") != null) && (getProperties().getProperty("debug").compareTo("true") == 0)) { debug = true; } if ("false" .equals(getProperties().getProperty("hbase.usepagefilter", "true"))) { usePageFilter = false; } columnFamily = getProperties().getProperty("columnfamily"); if (columnFamily == null) { System.err.println("Error, must specify a columnfamily for HBase table"); throw new DBException("No columnfamily specified"); } columnFamilyBytes = Bytes.toBytes(columnFamily); // Terminate right now if table does not exist, since the client // will not propagate this error upstream once the workload // starts. String table = com.yahoo.ycsb.workloads.CoreWorkload.table; try { final TableName tName = TableName.valueOf(table); connection.getTable(tName).getTableDescriptor(); } catch (IOException e) { throw new DBException(e); } } #location 34 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void getHTable(String table) throws IOException { final TableName tName = TableName.valueOf(table); this.currentTable = connection.getTable(tName); if (clientSideBuffering) { final BufferedMutatorParams p = new BufferedMutatorParams(tName); p.writeBufferSize(writeBufferSize); this.bufferedMutator = connection.getBufferedMutator(p); } }
#vulnerable code public void getHTable(String table) throws IOException { final TableName tName = TableName.valueOf(table); synchronized (CONNECTION_LOCK) { this.currentTable = connection.getTable(tName); if (clientSideBuffering) { final BufferedMutatorParams p = new BufferedMutatorParams(tName); p.writeBufferSize(writeBufferSize); this.bufferedMutator = connection.getBufferedMutator(p); } } } #location 7 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void init() throws DBException { Properties props = getProperties(); String url = props.getProperty(URL_PROPERTY); String user = props.getProperty(USER_PROPERTY, USER_PROPERTY_DEFAULT); String password = props.getProperty(PASSWORD_PROPERTY, PASSWORD_PROPERTY_DEFAULT); Boolean newdb = Boolean.parseBoolean(props.getProperty(NEWDB_PROPERTY, NEWDB_PROPERTY_DEFAULT)); if (url == null) { throw new DBException(String.format("Required property \"%s\" missing for OrientDBClient", URL_PROPERTY)); } try { System.out.println("OrientDB loading database url = " + url); OGlobalConfiguration.STORAGE_KEEP_OPEN.setValue(false); db = new ODatabaseDocumentTx(url); if (db.exists()) { db.open(user, password); if (newdb) { System.out.println("OrientDB drop and recreate fresh db"); db.drop(); db.create(); } } else { System.out.println("OrientDB database not found, create fresh db"); db.create(); } System.out.println("OrientDB connection created with " + url); dictionary = db.getMetadata().getIndexManager().getDictionary(); if (!db.getMetadata().getSchema().existsClass(CLASS)) db.getMetadata().getSchema().createClass(CLASS); db.declareIntent(new OIntentMassiveInsert()); } catch (Exception e1) { System.err.println("Could not initialize OrientDB connection pool for Loader: " + e1.toString()); e1.printStackTrace(); return; } }
#vulnerable code public void init() throws DBException { // initialize OrientDB driver Properties props = getProperties(); String url; if (System.getProperty("os.name").toLowerCase().contains("win")) url = props.getProperty("orientdb.url", "plocal:C:/temp/databases/ycsb"); else url = props.getProperty("orientdb.url", "plocal:/temp/databases/ycsb"); String user = props.getProperty("orientdb.user", "admin"); String password = props.getProperty("orientdb.password", "admin"); Boolean newdb = Boolean.parseBoolean(props.getProperty("orientdb.newdb", "false")); try { System.out.println("OrientDB loading database url = " + url); OGlobalConfiguration.STORAGE_KEEP_OPEN.setValue(false); db = new ODatabaseDocumentTx(url); if (db.exists()) { db.open(user, password); if (newdb) { System.out.println("OrientDB drop and recreate fresh db"); db.drop(); db.create(); } } else { System.out.println("OrientDB database not found, create fresh db"); db.create(); } System.out.println("OrientDB connection created with " + url); dictionary = db.getMetadata().getIndexManager().getDictionary(); if (!db.getMetadata().getSchema().existsClass(CLASS)) db.getMetadata().getSchema().createClass(CLASS); db.declareIntent(new OIntentMassiveInsert()); } catch (Exception e1) { System.err.println("Could not initialize OrientDB connection pool for Loader: " + e1.toString()); e1.printStackTrace(); return; } } #location 6 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public Status update(String table, String key, HashMap<String, ByteIterator> values) { if (debug) { System.out.println("Setting up put for key: " + key); } setTable(table); final MutateRowRequest.Builder rowMutation = MutateRowRequest.newBuilder(); rowMutation.setRowKey(ByteString.copyFromUtf8(key)); rowMutation.setTableNameBytes(ByteStringer.wrap(lastTableBytes)); for (final Entry<String, ByteIterator> entry : values.entrySet()) { final Mutation.Builder mutationBuilder = rowMutation.addMutationsBuilder(); final SetCell.Builder setCellBuilder = mutationBuilder.getSetCellBuilder(); setCellBuilder.setFamilyNameBytes(ByteStringer.wrap(columnFamilyBytes)); setCellBuilder.setColumnQualifier(ByteStringer.wrap(entry.getKey().getBytes())); setCellBuilder.setValue(ByteStringer.wrap(entry.getValue().toArray())); // Bigtable uses a 1ms granularity setCellBuilder.setTimestampMicros(System.currentTimeMillis() * 1000); } try { if (clientSideBuffering) { bulkMutation.add(rowMutation.build()); } else { client.mutateRow(rowMutation.build()); } return Status.OK; } catch (RuntimeException e) { System.err.println("Failed to insert key: " + key + " " + e.getMessage()); return Status.ERROR; } }
#vulnerable code @Override public Status update(String table, String key, HashMap<String, ByteIterator> values) { if (debug) { System.out.println("Setting up put for key: " + key); } setTable(table); final MutateRowRequest.Builder rowMutation = MutateRowRequest.newBuilder(); rowMutation.setRowKey(ByteString.copyFromUtf8(key)); rowMutation.setTableNameBytes(ByteStringer.wrap(lastTableBytes)); for (final Entry<String, ByteIterator> entry : values.entrySet()) { final Mutation.Builder mutationBuilder = rowMutation.addMutationsBuilder(); final SetCell.Builder setCellBuilder = mutationBuilder.getSetCellBuilder(); setCellBuilder.setFamilyNameBytes(ByteStringer.wrap(columnFamilyBytes)); setCellBuilder.setColumnQualifier(ByteStringer.wrap(entry.getKey().getBytes())); setCellBuilder.setValue(ByteStringer.wrap(entry.getValue().toArray())); // Bigtable uses a 1ms granularity setCellBuilder.setTimestampMicros(System.currentTimeMillis() * 1000); } try { if (clientSideBuffering) { asyncExecutor.mutateRowAsync(rowMutation.build()); } else { client.mutateRow(rowMutation.build()); } return Status.OK; } catch (ServiceException e) { System.err.println("Failed to insert key: " + key + " " + e.getMessage()); return Status.ERROR; } catch (InterruptedException e) { System.err.println("Interrupted while inserting key: " + key + " " + e.getMessage()); Thread.currentThread().interrupt(); return Status.ERROR; // never get here, but lets make the compiler happy } } #location 28 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void acknowledge(int value) { // read volatile variable to see other threads' changes limit = limit; if (value > limit + WINDOW_SIZE) { throw new RuntimeException("Too many unacknowledged insertion keys."); } window[value % WINDOW_SIZE] = true; if (lock.tryLock()) { // move a contiguous sequence from the window // over to the "limit" variable try { int index; for (index = limit + 1; index <= value; ++index) { int slot = index % WINDOW_SIZE; if (!window[slot]) { break; } window[slot] = false; } limit = index - 1; } finally { lock.unlock(); } } // write volatile variable to make other threads see changes limit = limit; }
#vulnerable code public void acknowledge(int value) { if (value > limit + WINDOW_SIZE) { throw new RuntimeException("This should be a different exception."); } window[value % WINDOW_SIZE] = true; if (lock.tryLock()) { // move a contiguous sequence from the window // over to the "limit" variable try { int index; for (index = limit + 1; index <= value; ++index) { int slot = index % WINDOW_SIZE; if (!window[slot]) { break; } window[slot] = false; } limit = index - 1; } finally { lock.unlock(); } } } #location 3 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public void init() throws DBException { Properties props = getProperties(); // Defaults the user can override if needed CONFIG.set("google.bigtable.auth.service.account.enable", "true"); // make it easy on ourselves by copying all CLI properties into the config object. final Iterator<Entry<Object, Object>> it = props.entrySet().iterator(); while (it.hasNext()) { Entry<Object, Object> entry = it.next(); CONFIG.set((String)entry.getKey(), (String)entry.getValue()); } clientSideBuffering = getProperties().getProperty(CLIENT_SIDE_BUFFERING, "false") .equals("true") ? true : false; System.err.println("Running Google Bigtable with Proto API" + (clientSideBuffering ? " and client side buffering." : ".")); synchronized (CONFIG) { ++threadCount; if (session == null) { try { options = BigtableOptionsFactory.fromConfiguration(CONFIG); session = new BigtableSession(options); // important to instantiate the first client here, otherwise the // other threads may receive an NPE from the options when they try // to read the cluster name. client = session.getDataClient(); } catch (IOException e) { throw new DBException("Error loading options from config: ", e); } } else { client = session.getDataClient(); } if (clientSideBuffering) { heapSizeManager = new HeapSizeManager( Long.parseLong( getProperties().getProperty(ASYNC_MUTATOR_MAX_MEMORY, Long.toString(AsyncExecutor.ASYNC_MUTATOR_MAX_MEMORY_DEFAULT))), Integer.parseInt( getProperties().getProperty(ASYNC_MAX_INFLIGHT_RPCS, Integer.toString(AsyncExecutor.MAX_INFLIGHT_RPCS_DEFAULT)))); asyncExecutor = new AsyncExecutor(client, heapSizeManager); } } if ((getProperties().getProperty("debug") != null) && (getProperties().getProperty("debug").compareTo("true") == 0)) { debug = true; } final String columnFamily = getProperties().getProperty("columnfamily"); if (columnFamily == null) { System.err.println("Error, must specify a columnfamily for Bigtable table"); throw new DBException("No columnfamily specified"); } columnFamilyBytes = Bytes.toBytes(columnFamily); }
#vulnerable code @Override public void init() throws DBException { Properties props = getProperties(); // Defaults the user can override if needed CONFIG.set("google.bigtable.auth.service.account.enable", "true"); // make it easy on ourselves by copying all CLI properties into the config object. final Iterator<Entry<Object, Object>> it = props.entrySet().iterator(); while (it.hasNext()) { Entry<Object, Object> entry = it.next(); CONFIG.set((String)entry.getKey(), (String)entry.getValue()); } clientSideBuffering = getProperties().getProperty(CLIENT_SIDE_BUFFERING, "false") .equals("true") ? true : false; System.err.println("Running Google Bigtable with Proto API" + (clientSideBuffering ? " and client side buffering." : ".")); synchronized (threadCount) { ++threadCount; if (session == null) { try { options = BigtableOptionsFactory.fromConfiguration(CONFIG); session = new BigtableSession(options); // important to instantiate the first client here, otherwise the // other threads may receive an NPE from the options when they try // to read the cluster name. client = session.getDataClient(); } catch (IOException e) { throw new DBException("Error loading options from config: ", e); } } else { client = session.getDataClient(); } if (clientSideBuffering) { heapSizeManager = new HeapSizeManager( Long.parseLong( getProperties().getProperty(ASYNC_MUTATOR_MAX_MEMORY, Long.toString(AsyncExecutor.ASYNC_MUTATOR_MAX_MEMORY_DEFAULT))), Integer.parseInt( getProperties().getProperty(ASYNC_MAX_INFLIGHT_RPCS, Integer.toString(AsyncExecutor.MAX_INFLIGHT_RPCS_DEFAULT)))); asyncExecutor = new AsyncExecutor(client, heapSizeManager); } } if ((getProperties().getProperty("debug") != null) && (getProperties().getProperty("debug").compareTo("true") == 0)) { debug = true; } final String columnFamily = getProperties().getProperty("columnfamily"); if (columnFamily == null) { System.err.println("Error, must specify a columnfamily for Bigtable table"); throw new DBException("No columnfamily specified"); } columnFamilyBytes = Bytes.toBytes(columnFamily); } #location 21 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public void init() throws DBException { if ("true" .equals(getProperties().getProperty("clientbuffering", "false"))) { this.clientSideBuffering = true; } if (getProperties().containsKey("writebuffersize")) { writeBufferSize = Long.parseLong(getProperties().getProperty("writebuffersize")); } if (getProperties().getProperty("durability") != null) { this.durability = Durability.valueOf(getProperties().getProperty("durability")); } if ("kerberos".equalsIgnoreCase(config.get("hbase.security.authentication"))) { config.set("hadoop.security.authentication", "Kerberos"); UserGroupInformation.setConfiguration(config); } if ((getProperties().getProperty("principal")!=null) && (getProperties().getProperty("keytab")!=null)) { try { UserGroupInformation.loginUserFromKeytab(getProperties().getProperty("principal"), getProperties().getProperty("keytab")); } catch (IOException e) { System.err.println("Keytab file is not readable or not found"); throw new DBException(e); } } try { threadCount.getAndIncrement(); synchronized (CONNECTION_LOCK) { if (connection == null) { // Initialize if not set up already. connection = ConnectionFactory.createConnection(config); } } } catch (java.io.IOException e) { throw new DBException(e); } if ((getProperties().getProperty("debug") != null) && (getProperties().getProperty("debug").compareTo("true") == 0)) { debug = true; } if ("false" .equals(getProperties().getProperty("hbase.usepagefilter", "true"))) { usePageFilter = false; } columnFamily = getProperties().getProperty("columnfamily"); if (columnFamily == null) { System.err.println("Error, must specify a columnfamily for HBase table"); throw new DBException("No columnfamily specified"); } columnFamilyBytes = Bytes.toBytes(columnFamily); // Terminate right now if table does not exist, since the client // will not propagate this error upstream once the workload // starts. String table = com.yahoo.ycsb.workloads.CoreWorkload.table; try { final TableName tName = TableName.valueOf(table); synchronized (CONNECTION_LOCK) { connection.getTable(tName).getTableDescriptor(); } } catch (IOException e) { throw new DBException(e); } }
#vulnerable code @Override public void init() throws DBException { if ("true" .equals(getProperties().getProperty("clientbuffering", "false"))) { this.clientSideBuffering = true; } if (getProperties().containsKey("writebuffersize")) { writeBufferSize = Long.parseLong(getProperties().getProperty("writebuffersize")); } if (getProperties().getProperty("durability") != null) { this.durability = Durability.valueOf(getProperties().getProperty("durability")); } if ("kerberos".equalsIgnoreCase(config.get("hbase.security.authentication"))) { config.set("hadoop.security.authentication", "Kerberos"); UserGroupInformation.setConfiguration(config); } if ((getProperties().getProperty("principal")!=null) && (getProperties().getProperty("keytab")!=null)) { try { UserGroupInformation.loginUserFromKeytab(getProperties().getProperty("principal"), getProperties().getProperty("keytab")); } catch (IOException e) { System.err.println("Keytab file is not readable or not found"); throw new DBException(e); } } try { synchronized(threadCount) { ++threadCount; if (connection == null) { connection = ConnectionFactory.createConnection(config); } } } catch (java.io.IOException e) { throw new DBException(e); } if ((getProperties().getProperty("debug") != null) && (getProperties().getProperty("debug").compareTo("true") == 0)) { debug = true; } if ("false" .equals(getProperties().getProperty("hbase.usepagefilter", "true"))) { usePageFilter = false; } columnFamily = getProperties().getProperty("columnfamily"); if (columnFamily == null) { System.err.println("Error, must specify a columnfamily for HBase table"); throw new DBException("No columnfamily specified"); } columnFamilyBytes = Bytes.toBytes(columnFamily); // Terminate right now if table does not exist, since the client // will not propagate this error upstream once the workload // starts. String table = com.yahoo.ycsb.workloads.CoreWorkload.table; try { final TableName tName = TableName.valueOf(table); connection.getTable(tName).getTableDescriptor(); } catch (IOException e) { throw new DBException(e); } } #location 67 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Before public void setup() throws DBException { orientDBClient = new OrientDBClient(); Properties p = new Properties(); // TODO: Extract the property names into final variables in OrientDBClient p.setProperty("orientdb.url", TEST_DB_URL); orientDBClient.setProperties(p); orientDBClient.init(); orientDBDictionary = orientDBClient.db.getDictionary(); }
#vulnerable code @Before public void setup() throws DBException { orientDBClient = new OrientDBClient(); Properties p = new Properties(); // TODO: Extract the property names into final variables in OrientDBClient p.setProperty("orientdb.url", TEST_DB_URL); orientDBClient.setProperties(p); orientDBClient.init(); orientDBConnection = new ODatabaseDocumentTx(TEST_DB_URL).open("admin","admin"); orientDBDictionary = orientDBConnection.getMetadata().getIndexManager().getDictionary(); } #location 11 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public Status update(String table, String key, Map<String, ByteIterator> values) { // Azure Cosmos does not have patch support. Until then we need to read // the document, update in place, and then write back. // This could actually be made more efficient by using a stored procedure // and doing the read/modify write on the server side. Perhaps // that will be a future improvement. String documentLink = getDocumentLink(this.databaseName, table, key); ResourceResponse<Document> updatedResource = null; ResourceResponse<Document> readResouce = null; RequestOptions reqOptions = null; Document document = null; try { reqOptions = getRequestOptions(key); readResouce = AzureCosmosClient.client.readDocument(documentLink, reqOptions); document = readResouce.getResource(); } catch (DocumentClientException e) { if (!this.includeExceptionStackInLog) { e = null; } LOGGER.error("Failed to read key {} in collection {} in database {} during update operation", key, table, this.databaseName, e); return Status.ERROR; } // Update values for (Entry<String, ByteIterator> entry : values.entrySet()) { document.set(entry.getKey(), entry.getValue().toString()); } AccessCondition accessCondition = new AccessCondition(); accessCondition.setCondition(document.getETag()); accessCondition.setType(AccessConditionType.IfMatch); reqOptions.setAccessCondition(accessCondition); try { updatedResource = AzureCosmosClient.client.replaceDocument(documentLink, document, reqOptions); } catch (DocumentClientException e) { if (!this.includeExceptionStackInLog) { e = null; } LOGGER.error("Failed to update key {}", key, e); return Status.ERROR; } return Status.OK; }
#vulnerable code @Override public Status update(String table, String key, Map<String, ByteIterator> values) { String documentLink = getDocumentLink(this.databaseName, table, key); Document document = getDocumentDefinition(key, values); RequestOptions reqOptions = getRequestOptions(key); if (reqOptions == null) { reqOptions = new RequestOptions(); } AccessCondition accessCondition = new AccessCondition(); accessCondition.setCondition(document.getETag()); accessCondition.setType(AccessConditionType.IfMatch); reqOptions.setAccessCondition(accessCondition); ResourceResponse<Document> updatedResource = null; long startTime = System.nanoTime(); try { updatedResource = AzureCosmosClient.client.replaceDocument(documentLink, document, reqOptions); } catch (DocumentClientException e) { if (!this.includeExceptionStackInLog) { e = null; } LOGGER.error("Failed to update key {}", key, e); return Status.ERROR; } finally { long elapsed = (System.nanoTime() - startTime) / NS_IN_US; LOGGER.debug("Updated key {} in {}us - ActivityID: {}", key, elapsed, updatedResource != null ? updatedResource.getActivityId() : NA_STRING); } return Status.OK; } #location 4 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void getHTable(String table) throws IOException { final TableName tName = TableName.valueOf(table); synchronized (CONNECTION_LOCK) { this.currentTable = connection.getTable(tName); if (clientSideBuffering) { final BufferedMutatorParams p = new BufferedMutatorParams(tName); p.writeBufferSize(writeBufferSize); this.bufferedMutator = connection.getBufferedMutator(p); } } }
#vulnerable code public void getHTable(String table) throws IOException { final TableName tName = TableName.valueOf(table); this.currentTable = this.connection.getTable(tName); // suggestions from // http://ryantwopointoh.blogspot.com/2009/01/ // performance-of-hbase-importing.html if (clientSideBuffering) { final BufferedMutatorParams p = new BufferedMutatorParams(tName); p.writeBufferSize(writeBufferSize); this.bufferedMutator = this.connection.getBufferedMutator(p); } } #location 10 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public boolean doTransaction(DB db, Object threadstate) { String operation = operationchooser.nextString(); if(operation == null) { return false; } switch (operation) { case "READ": doTransactionRead(db); break; case "UPDATE": doTransactionUpdate(db); break; case "INSERT": doTransactionInsert(db); break; case "SCAN": doTransactionScan(db); break; default: doTransactionReadModifyWrite(db); } return true; }
#vulnerable code @Override public boolean doTransaction(DB db, Object threadstate) { switch (operationchooser.nextString()) { case "READ": doTransactionRead(db); break; case "UPDATE": doTransactionUpdate(db); break; case "INSERT": doTransactionInsert(db); break; case "SCAN": doTransactionScan(db); break; default: doTransactionReadModifyWrite(db); } return true; } #location 3 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public int insert(String table, String key, HashMap<String, ByteIterator> values) { try { MongoCollection<Document> collection = database .getCollection(table); Document toInsert = new Document("_id", key); for (Map.Entry<String, ByteIterator> entry : values.entrySet()) { toInsert.put(entry.getKey(), entry.getValue().toArray()); } bulkInserts.add(toInsert); if (bulkInserts.size() == batchSize) { collection.insertMany(bulkInserts, INSERT_MANY_OPTIONS); bulkInserts.clear(); } return 0; } catch (Exception e) { System.err.println("Exception while trying bulk insert with " + bulkInserts.size()); e.printStackTrace(); return 1; } }
#vulnerable code @Override public int insert(String table, String key, HashMap<String, ByteIterator> values) { try { MongoCollection<Document> collection = database .getCollection(table); Document toInsert = new Document("_id", key); for (Map.Entry<String, ByteIterator> entry : values.entrySet()) { toInsert.put(entry.getKey(), entry.getValue().toArray()); } bulkInserts.add(toInsert); if (bulkInserts.size() == batchSize) { collection.withWriteConcern(writeConcern) .insertMany(bulkInserts, INSERT_MANY_OPTIONS); bulkInserts.clear(); } return 0; } catch (Exception e) { System.err.println("Exception while trying bulk insert with " + bulkInserts.size()); e.printStackTrace(); return 1; } } #location 14 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void getHTable(String table) throws IOException { final TableName tName = TableName.valueOf(table); this.currentTable = connection.getTable(tName); if (clientSideBuffering) { final BufferedMutatorParams p = new BufferedMutatorParams(tName); p.writeBufferSize(writeBufferSize); this.bufferedMutator = connection.getBufferedMutator(p); } }
#vulnerable code public void getHTable(String table) throws IOException { final TableName tName = TableName.valueOf(table); synchronized (CONNECTION_LOCK) { this.currentTable = connection.getTable(tName); if (clientSideBuffering) { final BufferedMutatorParams p = new BufferedMutatorParams(tName); p.writeBufferSize(writeBufferSize); this.bufferedMutator = connection.getBufferedMutator(p); } } } #location 5 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public Status scan(String table, String startkey, int recordcount, Set<String> fields, Vector<HashMap<String, ByteIterator>> result) { List<Document> documents; FeedResponse<Document> feedResponse = null; try { FeedOptions feedOptions = new FeedOptions(); feedOptions.setEnableCrossPartitionQuery(true); feedOptions.setMaxDegreeOfParallelism(this.maxDegreeOfParallelismForQuery); feedResponse = AzureCosmosClient.client.queryDocuments(getDocumentCollectionLink(this.databaseName, table), new SqlQuerySpec("SELECT TOP @recordcount * FROM root r WHERE r.id >= @startkey", new SqlParameterCollection(new SqlParameter("@recordcount", recordcount), new SqlParameter("@startkey", startkey))), feedOptions); documents = feedResponse.getQueryIterable().toList(); } catch (Exception e) { if (!this.includeExceptionStackInLog) { e = null; } LOGGER.error("Failed to scan with startKey={}, recordCount={}", startkey, recordcount, e); return Status.ERROR; } if (documents != null) { for (Document document : documents) { result.add(this.extractResult(document)); } } return Status.OK; }
#vulnerable code @Override public Status scan(String table, String startkey, int recordcount, Set<String> fields, Vector<HashMap<String, ByteIterator>> result) { List<Document> documents; FeedResponse<Document> feedResponse = null; try { feedResponse = AzureCosmosClient.client.queryDocuments(getDocumentCollectionLink(this.databaseName, table), new SqlQuerySpec(queryText, new SqlParameterCollection(new SqlParameter("@recordcount", recordcount), new SqlParameter("@startkey", startkey))), getFeedOptions(startkey)); documents = feedResponse.getQueryIterable().toList(); } catch (Exception e) { if (!this.includeExceptionStackInLog) { e = null; } LOGGER.error("Failed to scan with startKey={}, recordCount={}", startkey, recordcount, e); return Status.ERROR; } if (documents != null) { for (Document document : documents) { result.add(this.extractResult(document)); } } return Status.OK; } #location 11 #vulnerability type THREAD_SAFETY_VIOLATION
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void writeTo( Writer writer, WriterConfig config ) throws IOException { if( writer == null ) { throw new NullPointerException( "writer is null" ); } if( config == null ) { throw new NullPointerException( "config is null" ); } WritingBuffer buffer = new WritingBuffer( writer, 128 ); write( config.createWriter( buffer ) ); buffer.flush(); }
#vulnerable code public void writeTo( Writer writer, WriterConfig config ) throws IOException { WritingBuffer buffer = new WritingBuffer( writer, 128 ); write( config == null ? new JsonWriter( buffer ) : config.createWriter( buffer ) ); buffer.flush(); } #location 4 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public long query(int end) { end--; final int blockIndex = end / sqrt; final int freq[] = new int[powers.length]; final int queue[] = new int[sqrt]; int count = 0; final int endIndex = end % sqrt; for (int i = 0; i <= endIndex; i++) { if (freq[a[blockIndex * sqrt + i]] == 0) { queue[count++] = a[blockIndex * sqrt + i]; } freq[a[blockIndex * sqrt + i]]++; } long result = blockIndex > 0 ? blocks[blockIndex - 1] : 1; for (int i = 0; i < count; i++) { final int previous = blockIndex > 0 ? frequencies[blockIndex - 1][queue[i]] : 0; result = ((((result * invPowers[previous]) % mod) * powers[previous + freq[queue[i]]]) % mod); } return result; }
#vulnerable code public long query(int end) { end--; final int blockIndex = end / sqrt; final Map<Integer, Integer> map = new HashMap<>(); final int[][] elements = new int[sqrt][2]; int count = 0; final int endIndex = end % sqrt; for (int i = 0; i <= endIndex; i++) { if (!map.containsKey(a[blockIndex * sqrt + i])) { map.put(a[blockIndex * sqrt + i], count); elements[count][0] = a[blockIndex * sqrt + i]; count++; } elements[map.get(a[blockIndex * sqrt + i])][1]++; } BigInteger result = blockIndex > 0 ? blocks[blockIndex - 1] : BigInteger.ONE; for (final Map.Entry<Integer, Integer> entry : map.entrySet()) { final int previous = blockIndex > 0 ? frequencies[blockIndex - 1][entry.getKey()] : 0; result = result .multiply(invPowers[previous]) .mod(modo) .multiply(powers[previous + elements[entry.getValue()][1]]) .mod(modo); } return result.longValue(); } #location 14 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public static void main(String[] args) throws IOException { final InputReader in = new InputReader(System.in); V = in.readInt(); final int Q = in.readInt(); final Query queries[] = new Query[MAX]; final Update updates[] = new Update[MAX]; final Map<Integer, Integer> map = new HashMap<>(); // Used to compress the keys adj = new ArrayList[V + 1]; for (int i = 1; i <= V; i++) { adj[i] = new ArrayList<>(); } val = new int[V + 1]; for (int i = 1; i <= V; i++) { val[i] = in.readInt(); } for (int i = 1; i <= V; i++) { if (!map.containsKey(val[i])) { map.put(val[i], map.size()); } val[i] = map.get(val[i]); } final int currVal[] = new int[V + 1]; System.arraycopy(val, 0, currVal, 0, V + 1); final int edges = V - 1; for (int i = 0; i < edges; i++) { final int u = in.readInt(); final int v = in.readInt(); adj[u].add(v); adj[v].add(u); } start = new int[V + 1]; end = new int[V + 1]; eulerTour = new int[2 * (V + 1)]; level = new int[V + 1]; marked = new boolean[V + 1]; DP = new int[log(V) + 1][V + 1]; parent = new int[V + 1]; final int block[] = new int[2 * (V + 1)]; dfs(1, 0, 0); binaryLift(); int numberOfQueries = 0, numberOfUpdates = 0; for (int i = 0; i < Q; i++) { if (in.readInt() == 1) { // Query final int u = in.readInt(); final int v = in.readInt(); final Query q; if (end[u] < start[v]) // Cousin Nodes { q = new Query(end[u], start[v], numberOfUpdates, LCA(u, v), numberOfQueries); } else if (start[u] > end[v]) { q = new Query(end[v], start[u], numberOfUpdates, LCA(u, v), numberOfQueries); } else // Ancestors { q = new Query(Math.min(start[u], start[v]), Math.max(start[u], start[v]), numberOfUpdates, -1, numberOfQueries); } queries[numberOfQueries++] = q; } else { final int idx = in.readInt(); int newVal = in.readInt(); if (!map.containsKey(newVal)) { map.put(newVal, map.size()); } newVal = map.get(newVal); updates[numberOfUpdates++] = new Update(idx, newVal, currVal[idx]); currVal[idx] = newVal; } } freq = new int[map.size()]; final int BLOCK_SIZE = (int) (Math.pow(2 * V, 2.0 / 3.0) + 1); for (int i = 0; i < block.length; i++) { block[i] = i / BLOCK_SIZE; } Arrays.sort(queries, 0, numberOfQueries, (o1, o2) -> { if (block[o1.L] != block[o2.L]) { return block[o1.L] - block[o2.L]; } else if (block[o1.R] != block[o2.R]) { return block[o1.R] - block[o2.R]; } else { return o1.updatesTillNow - o2.updatesTillNow; } }); final int ans[] = new int[numberOfQueries]; int moLeft = -1, moRight = -1; int currentUpdateCount = 0; for (int i = 0; i < numberOfQueries; i++) { final Query query = queries[i]; while (currentUpdateCount < query.updatesTillNow) { final Update update = updates[currentUpdateCount]; update(update.idx, update.newVal); currentUpdateCount++; } while (currentUpdateCount > query.updatesTillNow) { currentUpdateCount--; final Update update = updates[currentUpdateCount]; update(update.idx, update.prevVal); } while (moLeft < query.L - 1) { moLeft++; visit(eulerTour[moLeft]); } while (moLeft >= query.L) { visit(eulerTour[moLeft]); moLeft--; } while (moRight < query.R) { moRight++; visit(eulerTour[moRight]); } while (moRight > query.R) { visit(eulerTour[moRight]); moRight--; } if (query.LCA != -1) { visit(query.LCA); } ans[query.id] = distinctCount; if (query.LCA != -1) { visit(query.LCA); } } final StringBuilder stringBuilder=new StringBuilder(); for (final int a : ans) { stringBuilder.append(a).append('\n'); } System.out.println(stringBuilder); }
#vulnerable code public static void main(String[] args) throws IOException { final InputReader in = new InputReader(System.in); int qSZ = 0, uSZ = 0; V = in.readInt(); int Q = in.readInt(); int E = V - 1; Query queries[] = new Query[MAX]; Update updates[] = new Update[MAX]; map = new HashMap<>(); // Used to compress the keys adj = new ArrayList[V + 1]; for (int i = 1; i <= V; i++) { adj[i] = new ArrayList<>(); } val = new int[V + 1]; for (int i = 1; i <= V; i++) { val[i] = in.readInt(); } for (int i = 1; i <= V; i++) { if (!map.containsKey(val[i])) { map.put(val[i], map.size()); } val[i] = map.get(val[i]); } int currVal[] = new int[V + 1]; System.arraycopy(val, 0, currVal, 0, V + 1); while (E-- > 0) { final int u = in.readInt(); final int v = in.readInt(); adj[u].add(v); adj[v].add(u); } start = new int[V + 1]; end = new int[V + 1]; eulerTour = new int[2 * (V + 1)]; level = new int[V + 1]; marked = new boolean[V + 1]; DP = new int[log(V) + 1][V + 1]; parent = new int[V + 1]; blockCache = new int[2 * (V + 1)]; dfs(1, 0, 0); binaryLift(); while (Q-- > 0) { if (in.readInt() == 1) { // Query final int u = in.readInt(); final int v = in.readInt(); final Query q; if (end[u] < start[v]) // Cousin Nodes { q = new Query(end[u], start[v], uSZ, LCA(u, v), qSZ); } else if (start[u] > end[v]) { q = new Query(end[v], start[u], uSZ, LCA(u, v), qSZ); } else // Ancestors { q = new Query(Math.min(start[u], start[v]), Math.max(start[u], start[v]), uSZ, -1, qSZ); } queries[qSZ++] = q; } else { final int idx = in.readInt(); int newVal = in.readInt(); if (!map.containsKey(newVal)) { map.put(newVal, map.size()); } newVal = map.get(newVal); updates[uSZ++] = new Update(idx, newVal, currVal[idx]); currVal[idx] = newVal; } } freq = new int[map.size()]; BLOCK_SIZE = (int) (Math.pow(2 * V, 2.0 / 3.0) + 1); for (int i = 0; i < blockCache.length; i++) { blockCache[i] = i / BLOCK_SIZE; } Arrays.sort(queries, 0, qSZ, new MoComparator()); final int ans[] = new int[qSZ]; int moLeft = -1, moRight = -1; int currUpd = 0; for (int i = 0; i < qSZ; i++) { final Query q = queries[i]; while (currUpd < q.numUpdatesLess) { final Update u = updates[currUpd]; update(u.idx, u.newVal); currUpd++; } while (currUpd > q.numUpdatesLess) { final Update u = updates[currUpd - 1]; update(u.idx, u.prevVal); currUpd--; } while (moLeft < q.L - 1) { moLeft++; visit(eulerTour[moLeft]); } while (moLeft >= q.L) { visit(eulerTour[moLeft]); moLeft--; } while (moRight < q.R) { moRight++; visit(eulerTour[moRight]); } while (moRight > q.R) { visit(eulerTour[moRight]); moRight--; } if (q.LCA != -1) { visit(q.LCA); } ans[q.id] = distinctCount; if (q.LCA != -1) { visit(q.LCA); } } for (final int a : ans) { System.out.println(a); } } #location 23 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override @CacheEvict(value={"metaCaches","metaCache"},allEntries=true,beforeInvocation=true) public void saveMeta(String type, String name, Integer mid) { if (StringUtils.isNotBlank(type) && StringUtils.isNotBlank(name)){ MetaCond metaCond = new MetaCond(); metaCond.setName(name); metaCond.setType(type); List<MetaDomain> metas = metaDao.getMetasByCond(metaCond); if (null == metas || metas.size() == 0){ MetaDomain metaDomain = new MetaDomain(); metaDomain.setName(name); if (null != mid){ MetaDomain meta = metaDao.getMetaById(mid); if (null != meta) metaDomain.setMid(mid); metaDao.updateMeta(metaDomain); //更新原有的文章分类 if(meta !=null) { contentService.updateCategory(meta.getName(), name); } } else { metaDomain.setType(type); metaDao.addMeta(metaDomain); } } else { throw BusinessException.withErrorCode(ErrorConstant.Meta.META_IS_EXIST); } } }
#vulnerable code @Override @CacheEvict(value={"metaCaches","metaCache"},allEntries=true,beforeInvocation=true) public void saveMeta(String type, String name, Integer mid) { if (StringUtils.isNotBlank(type) && StringUtils.isNotBlank(name)){ MetaCond metaCond = new MetaCond(); metaCond.setName(name); metaCond.setType(type); List<MetaDomain> metas = metaDao.getMetasByCond(metaCond); if (null == metas || metas.size() == 0){ MetaDomain metaDomain = new MetaDomain(); metaDomain.setName(name); if (null != mid){ MetaDomain meta = metaDao.getMetaById(mid); if (null != meta) metaDomain.setMid(mid); metaDao.updateMeta(metaDomain); //更新原有的文章分类 contentService.updateCategory(meta.getName(), name); } else { metaDomain.setType(type); metaDao.addMeta(metaDomain); } } else { throw BusinessException.withErrorCode(ErrorConstant.Meta.META_IS_EXIST); } } } #location 19 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public String indexDataset(Dataset dataset) { logger.info("indexing dataset " + dataset.getId()); /** * @todo should we use solrDocIdentifierDataset or * IndexableObject.IndexableTypes.DATASET.getName() + "_" ? */ // String solrIdPublished = solrDocIdentifierDataset + dataset.getId(); String solrIdPublished = determinePublishedDatasetSolrDocId(dataset); String solrIdDraftDataset = IndexableObject.IndexableTypes.DATASET.getName() + "_" + dataset.getId() + IndexableDataset.DatasetState.WORKING_COPY.getSuffix(); // String solrIdDeaccessioned = IndexableObject.IndexableTypes.DATASET.getName() + "_" + dataset.getId() + IndexableDataset.DatasetState.DEACCESSIONED.getSuffix(); String solrIdDeaccessioned = determineDeaccesionedDatasetId(dataset); StringBuilder debug = new StringBuilder(); debug.append("\ndebug:\n"); int numReleasedVersions = 0; List<DatasetVersion> versions = dataset.getVersions(); for (DatasetVersion datasetVersion : versions) { Long versionDatabaseId = datasetVersion.getId(); String versionTitle = datasetVersion.getTitle(); String semanticVersion = datasetVersion.getSemanticVersion(); DatasetVersion.VersionState versionState = datasetVersion.getVersionState(); if (versionState.equals(DatasetVersion.VersionState.RELEASED)) { /** * @todo for performance, should just query this rather than * iterating. Would need a new SQL query/method */ numReleasedVersions += 1; } debug.append("version found with database id " + versionDatabaseId + "\n"); debug.append("- title: " + versionTitle + "\n"); debug.append("- semanticVersion-VersionState: " + semanticVersion + "-" + versionState + "\n"); List<FileMetadata> fileMetadatas = datasetVersion.getFileMetadatas(); List<String> fileInfo = new ArrayList<>(); for (FileMetadata fileMetadata : fileMetadatas) { fileInfo.add(fileMetadata.getDataFile().getId() + ":" + fileMetadata.getLabel()); } int numFiles = 0; if (fileMetadatas != null) { numFiles = fileMetadatas.size(); } debug.append("- files: " + numFiles + " " + fileInfo.toString() + "\n"); } DatasetVersion latestVersion = dataset.getLatestVersion(); String latestVersionStateString = latestVersion.getVersionState().name(); DatasetVersion.VersionState latestVersionState = latestVersion.getVersionState(); DatasetVersion releasedVersion = dataset.getReleasedVersion(); if (releasedVersion != null) { if (releasedVersion.getVersionState().equals(DatasetVersion.VersionState.DEACCESSIONED)) { DatasetVersion lookupAttempt2 = releasedVersion.getMostRecentlyReleasedVersion(); String message = "WARNING: called dataset.getReleasedVersion() but version returned was deaccessioned (database id " + releasedVersion.getId() + "). (releasedVersion.getMostRecentlyReleasedVersion() returns database id " + lookupAttempt2.getId() + " so that method may be better?). Look out for strange indexing results."; logger.severe(message); debug.append(message); } } Map<DatasetVersion.VersionState, Boolean> desiredCards = new LinkedHashMap<>(); /** * @todo refactor all of this below and have a single method that takes * the map of desired cards (which correspond to Solr documents) as one * of the arguments and does all the operations necessary to achieve the * desired state. */ StringBuilder results = new StringBuilder(); if (numReleasedVersions == 0) { results.append("No published version, nothing will be indexed as ") .append(solrIdPublished).append("\n"); if (latestVersionState.equals(DatasetVersion.VersionState.DRAFT)) { desiredCards.put(DatasetVersion.VersionState.DRAFT, true); IndexableDataset indexableDraftVersion = new IndexableDataset(latestVersion); String indexDraftResult = addOrUpdateDataset(indexableDraftVersion); results.append("The latest version is a working copy (latestVersionState: ") .append(latestVersionStateString).append(") and indexing was attempted for ") .append(solrIdDraftDataset).append(" (limited discoverability). Result: ") .append(indexDraftResult).append("\n"); desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, false); String deleteDeaccessionedResult = removeDeaccessioned(dataset); results.append("Draft exists, no need for deaccessioned version. Deletion attempted for ") .append(solrIdDeaccessioned).append(" (and files). Result: ").append(deleteDeaccessionedResult); desiredCards.put(DatasetVersion.VersionState.RELEASED, false); /** * @todo delete published? */ /** * Desired state for existence of cards: {DRAFT=true, * DEACCESSIONED=false, RELEASED=false} * * No published version, nothing will be indexed as dataset_17 * * The latest version is a working copy (latestVersionState: * DRAFT) and indexing was attempted for dataset_17_draft * (limited discoverability). Result: indexed dataset 17 as * dataset_17_draft. filesIndexed: [datafile_18_draft] * * Draft exists, no need for deaccessioned version. Deletion * attempted for dataset_17_deaccessioned (and files). Result: * Attempted to delete dataset_17_deaccessioned from Solr index. * updateReponse was: * {responseHeader={status=0,QTime=0}}Attempted to delete * datafile_18_deaccessioned from Solr index. updateReponse was: * {responseHeader={status=0,QTime=0}} */ String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString(); logger.info(result); return result; } else if (latestVersionState.equals(DatasetVersion.VersionState.DEACCESSIONED)) { desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, true); IndexableDataset indexableDeaccessionedVersion = new IndexableDataset(latestVersion); String indexDeaccessionedVersionResult = addOrUpdateDataset(indexableDeaccessionedVersion); results.append("No draft version. Attempting to index as deaccessioned. Result: ").append(indexDeaccessionedVersionResult).append("\n"); desiredCards.put(DatasetVersion.VersionState.RELEASED, false); String deletePublishedResults = removePublished(dataset); results.append("No published version. Attempting to delete traces of published version from index. Result: ").append(deletePublishedResults); desiredCards.put(DatasetVersion.VersionState.DRAFT, false); /** * @todo delete drafts? */ /** * Desired state for existence of cards: {DEACCESSIONED=true, * RELEASED=false, DRAFT=false} * * No published version, nothing will be indexed as dataset_17 * * No draft version. Attempting to index as deaccessioned. * Result: indexed dataset 17 as dataset_17_deaccessioned. * filesIndexed: [] * * No published version. Attempting to delete traces of * published version from index. Result: Attempted to delete * dataset_17 from Solr index. updateReponse was: * {responseHeader={status=0,QTime=1}}Attempted to delete * datafile_18 from Solr index. updateReponse was: * {responseHeader={status=0,QTime=0}} */ String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString(); logger.info(result); return result; } else { return "No-op. Unexpected condition reached: No released version and latest version is neither draft nor deaccesioned"; } } else if (numReleasedVersions > 0) { results.append("Released versions found: ").append(numReleasedVersions) .append(". Will attempt to index as ").append(solrIdPublished).append(" (discoverable by anonymous)\n"); if (latestVersionState.equals(DatasetVersion.VersionState.RELEASED) || latestVersionState.equals(DatasetVersion.VersionState.DEACCESSIONED)) { desiredCards.put(DatasetVersion.VersionState.RELEASED, true); IndexableDataset indexableReleasedVersion = new IndexableDataset(releasedVersion); String indexReleasedVersionResult = addOrUpdateDataset(indexableReleasedVersion); results.append("Attempted to index " + solrIdPublished).append(". Result: ").append(indexReleasedVersionResult).append("\n"); desiredCards.put(DatasetVersion.VersionState.DRAFT, false); List<String> solrDocIdsForDraftFilesToDelete = findSolrDocIdsForDraftFilesToDelete(dataset); String deleteDraftDatasetVersionResult = removeSolrDocFromIndex(solrIdDraftDataset); StringBuilder deleteDraftFilesResults = new StringBuilder(); for (String doomed : solrDocIdsForDraftFilesToDelete) { String result = removeSolrDocFromIndex(doomed); deleteDraftFilesResults.append(result); } results.append("The latest version is published. Attempting to delete drafts. Result: ") .append(deleteDraftDatasetVersionResult).append(deleteDraftFilesResults).append("\n"); desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, false); String deleteDeaccessionedResult = removeDeaccessioned(dataset); results.append("No need for deaccessioned version. Deletion attempted for ") .append(solrIdDeaccessioned).append(". Result: ").append(deleteDeaccessionedResult); /** * Desired state for existence of cards: {RELEASED=true, * DRAFT=false, DEACCESSIONED=false} * * Released versions found: 1. Will attempt to index as * dataset_17 (discoverable by anonymous) * * Attempted to index dataset_17. Result: indexed dataset 17 as * dataset_17. filesIndexed: [datafile_18] * * The latest version is published. Attempting to delete drafts. * Result: Attempted to delete dataset_17_draft from Solr index. * updateReponse was: {responseHeader={status=0,QTime=1}} * * No need for deaccessioned version. Deletion attempted for * dataset_17_deaccessioned. Result: Attempted to delete * dataset_17_deaccessioned from Solr index. updateReponse was: * {responseHeader={status=0,QTime=1}}Attempted to delete * datafile_18_deaccessioned from Solr index. updateReponse was: * {responseHeader={status=0,QTime=0}} */ String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString(); logger.info(result); return result; } else if (latestVersionState.equals(DatasetVersion.VersionState.DRAFT)) { IndexableDataset indexableDraftVersion = new IndexableDataset(latestVersion); desiredCards.put(DatasetVersion.VersionState.DRAFT, true); String indexDraftResult = addOrUpdateDataset(indexableDraftVersion); results.append("The latest version is a working copy (latestVersionState: ") .append(latestVersionStateString).append(") and will be indexed as ") .append(solrIdDraftDataset).append(" (limited visibility). Result: ").append(indexDraftResult).append("\n"); desiredCards.put(DatasetVersion.VersionState.RELEASED, true); IndexableDataset indexableReleasedVersion = new IndexableDataset(releasedVersion); String indexReleasedVersionResult = addOrUpdateDataset(indexableReleasedVersion); results.append("There is a published version we will attempt to index. Result: ").append(indexReleasedVersionResult).append("\n"); desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, false); String deleteDeaccessionedResult = removeDeaccessioned(dataset); results.append("No need for deaccessioned version. Deletion attempted for ") .append(solrIdDeaccessioned).append(". Result: ").append(deleteDeaccessionedResult); /** * Desired state for existence of cards: {DRAFT=true, * RELEASED=true, DEACCESSIONED=false} * * Released versions found: 1. Will attempt to index as * dataset_17 (discoverable by anonymous) * * The latest version is a working copy (latestVersionState: * DRAFT) and will be indexed as dataset_17_draft (limited * visibility). Result: indexed dataset 17 as dataset_17_draft. * filesIndexed: [datafile_18_draft] * * There is a published version we will attempt to index. * Result: indexed dataset 17 as dataset_17. filesIndexed: * [datafile_18] * * No need for deaccessioned version. Deletion attempted for * dataset_17_deaccessioned. Result: Attempted to delete * dataset_17_deaccessioned from Solr index. updateReponse was: * {responseHeader={status=0,QTime=1}}Attempted to delete * datafile_18_deaccessioned from Solr index. updateReponse was: * {responseHeader={status=0,QTime=0}} */ String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString(); logger.info(result); return result; } else { return "No-op. Unexpected condition reached: There is at least one published version but the latest version is neither published nor draft"; } } else { return "No-op. Unexpected condition reached: Negative number of released versions? Count was: " + numReleasedVersions; } }
#vulnerable code public String indexDataset(Dataset dataset) { logger.info("indexing dataset " + dataset.getId()); /** * @todo should we use solrDocIdentifierDataset or * IndexableObject.IndexableTypes.DATASET.getName() + "_" ? */ // String solrIdPublished = solrDocIdentifierDataset + dataset.getId(); String solrIdPublished = determinePublishedDatasetSolrDocId(dataset); String solrIdDraftDataset = IndexableObject.IndexableTypes.DATASET.getName() + "_" + dataset.getId() + IndexableDataset.DatasetState.WORKING_COPY.getSuffix(); // String solrIdDeaccessioned = IndexableObject.IndexableTypes.DATASET.getName() + "_" + dataset.getId() + IndexableDataset.DatasetState.DEACCESSIONED.getSuffix(); String solrIdDeaccessioned = determineDeaccesionedDatasetId(dataset); StringBuilder debug = new StringBuilder(); debug.append("\ndebug:\n"); int numReleasedVersions = 0; List<DatasetVersion> versions = dataset.getVersions(); for (DatasetVersion datasetVersion : versions) { Long versionDatabaseId = datasetVersion.getId(); String versionTitle = datasetVersion.getTitle(); String semanticVersion = datasetVersion.getSemanticVersion(); DatasetVersion.VersionState versionState = datasetVersion.getVersionState(); if (versionState.equals(DatasetVersion.VersionState.RELEASED)) { /** * @todo for performance, should just query this rather than * iterating. Would need a new SQL query/method */ numReleasedVersions += 1; } debug.append("version found with database id " + versionDatabaseId + "\n"); debug.append("- title: " + versionTitle + "\n"); debug.append("- semanticVersion-VersionState: " + semanticVersion + "-" + versionState + "\n"); List<FileMetadata> fileMetadatas = datasetVersion.getFileMetadatas(); List<String> fileInfo = new ArrayList<>(); for (FileMetadata fileMetadata : fileMetadatas) { fileInfo.add(fileMetadata.getDataFile().getId() + ":" + fileMetadata.getLabel()); } int numFiles = 0; if (fileMetadatas != null) { numFiles = fileMetadatas.size(); } debug.append("- files: " + numFiles + " " + fileInfo.toString() + "\n"); } DatasetVersion latestVersion = dataset.getLatestVersion(); String latestVersionStateString = latestVersion.getVersionState().name(); DatasetVersion.VersionState latestVersionState = latestVersion.getVersionState(); DatasetVersion releasedVersion = dataset.getReleasedVersion(); if (releasedVersion.getVersionState().equals(DatasetVersion.VersionState.DEACCESSIONED)) { logger.severe("WARNING: called dataset.getReleasedVersion() but version returned was deaccessioned. Look out for strange indexing results."); } Map<DatasetVersion.VersionState, Boolean> desiredCards = new LinkedHashMap<>(); /** * @todo refactor all of this below and have a single method that takes * the map of desired cards (which correspond to Solr documents) as one * of the arguments and does all the operations necessary to achieve the * desired state. */ StringBuilder results = new StringBuilder(); if (numReleasedVersions == 0) { results.append("No published version, nothing will be indexed as ") .append(solrIdPublished).append("\n"); if (latestVersionState.equals(DatasetVersion.VersionState.DRAFT)) { desiredCards.put(DatasetVersion.VersionState.DRAFT, true); IndexableDataset indexableDraftVersion = new IndexableDataset(latestVersion); String indexDraftResult = addOrUpdateDataset(indexableDraftVersion); results.append("The latest version is a working copy (latestVersionState: ") .append(latestVersionStateString).append(") and indexing was attempted for ") .append(solrIdDraftDataset).append(" (limited discoverability). Result: ") .append(indexDraftResult).append("\n"); desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, false); String deleteDeaccessionedResult = removeDeaccessioned(dataset); results.append("Draft exists, no need for deaccessioned version. Deletion attempted for ") .append(solrIdDeaccessioned).append(" (and files). Result: ").append(deleteDeaccessionedResult); desiredCards.put(DatasetVersion.VersionState.RELEASED, false); /** * @todo delete published? */ /** * Desired state for existence of cards: {DRAFT=true, * DEACCESSIONED=false, RELEASED=false} * * No published version, nothing will be indexed as dataset_17 * * The latest version is a working copy (latestVersionState: * DRAFT) and indexing was attempted for dataset_17_draft * (limited discoverability). Result: indexed dataset 17 as * dataset_17_draft. filesIndexed: [datafile_18_draft] * * Draft exists, no need for deaccessioned version. Deletion * attempted for dataset_17_deaccessioned (and files). Result: * Attempted to delete dataset_17_deaccessioned from Solr index. * updateReponse was: * {responseHeader={status=0,QTime=0}}Attempted to delete * datafile_18_deaccessioned from Solr index. updateReponse was: * {responseHeader={status=0,QTime=0}} */ String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString(); logger.info(result); return result; } else if (latestVersionState.equals(DatasetVersion.VersionState.DEACCESSIONED)) { desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, true); IndexableDataset indexableDeaccessionedVersion = new IndexableDataset(latestVersion); String indexDeaccessionedVersionResult = addOrUpdateDataset(indexableDeaccessionedVersion); results.append("No draft version. Attempting to index as deaccessioned. Result: ").append(indexDeaccessionedVersionResult).append("\n"); desiredCards.put(DatasetVersion.VersionState.RELEASED, false); String deletePublishedResults = removePublished(dataset); results.append("No published version. Attempting to delete traces of published version from index. Result: ").append(deletePublishedResults); desiredCards.put(DatasetVersion.VersionState.DRAFT, false); /** * @todo delete drafts? */ /** * Desired state for existence of cards: {DEACCESSIONED=true, * RELEASED=false, DRAFT=false} * * No published version, nothing will be indexed as dataset_17 * * No draft version. Attempting to index as deaccessioned. * Result: indexed dataset 17 as dataset_17_deaccessioned. * filesIndexed: [] * * No published version. Attempting to delete traces of * published version from index. Result: Attempted to delete * dataset_17 from Solr index. updateReponse was: * {responseHeader={status=0,QTime=1}}Attempted to delete * datafile_18 from Solr index. updateReponse was: * {responseHeader={status=0,QTime=0}} */ String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString(); logger.info(result); return result; } else { return "No-op. Unexpected condition reached: No released version and latest version is neither draft nor deaccesioned"; } } else if (numReleasedVersions > 0) { results.append("Released versions found: ").append(numReleasedVersions) .append(". Will attempt to index as ").append(solrIdPublished).append(" (discoverable by anonymous)\n"); if (latestVersionState.equals(DatasetVersion.VersionState.RELEASED) || latestVersionState.equals(DatasetVersion.VersionState.DEACCESSIONED)) { desiredCards.put(DatasetVersion.VersionState.RELEASED, true); IndexableDataset indexableReleasedVersion = new IndexableDataset(releasedVersion); String indexReleasedVersionResult = addOrUpdateDataset(indexableReleasedVersion); results.append("Attempted to index " + solrIdPublished).append(". Result: ").append(indexReleasedVersionResult).append("\n"); desiredCards.put(DatasetVersion.VersionState.DRAFT, false); List<String> solrDocIdsForDraftFilesToDelete = findSolrDocIdsForDraftFilesToDelete(dataset); String deleteDraftDatasetVersionResult = removeSolrDocFromIndex(solrIdDraftDataset); StringBuilder deleteDraftFilesResults = new StringBuilder(); for (String doomed : solrDocIdsForDraftFilesToDelete) { String result = removeSolrDocFromIndex(doomed); deleteDraftFilesResults.append(result); } results.append("The latest version is published. Attempting to delete drafts. Result: ") .append(deleteDraftDatasetVersionResult).append(deleteDraftFilesResults).append("\n"); desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, false); String deleteDeaccessionedResult = removeDeaccessioned(dataset); results.append("No need for deaccessioned version. Deletion attempted for ") .append(solrIdDeaccessioned).append(". Result: ").append(deleteDeaccessionedResult); /** * Desired state for existence of cards: {RELEASED=true, * DRAFT=false, DEACCESSIONED=false} * * Released versions found: 1. Will attempt to index as * dataset_17 (discoverable by anonymous) * * Attempted to index dataset_17. Result: indexed dataset 17 as * dataset_17. filesIndexed: [datafile_18] * * The latest version is published. Attempting to delete drafts. * Result: Attempted to delete dataset_17_draft from Solr index. * updateReponse was: {responseHeader={status=0,QTime=1}} * * No need for deaccessioned version. Deletion attempted for * dataset_17_deaccessioned. Result: Attempted to delete * dataset_17_deaccessioned from Solr index. updateReponse was: * {responseHeader={status=0,QTime=1}}Attempted to delete * datafile_18_deaccessioned from Solr index. updateReponse was: * {responseHeader={status=0,QTime=0}} */ String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString(); logger.info(result); return result; } else if (latestVersionState.equals(DatasetVersion.VersionState.DRAFT)) { IndexableDataset indexableDraftVersion = new IndexableDataset(latestVersion); desiredCards.put(DatasetVersion.VersionState.DRAFT, true); String indexDraftResult = addOrUpdateDataset(indexableDraftVersion); results.append("The latest version is a working copy (latestVersionState: ") .append(latestVersionStateString).append(") and will be indexed as ") .append(solrIdDraftDataset).append(" (limited visibility). Result: ").append(indexDraftResult).append("\n"); desiredCards.put(DatasetVersion.VersionState.RELEASED, true); IndexableDataset indexableReleasedVersion = new IndexableDataset(releasedVersion); String indexReleasedVersionResult = addOrUpdateDataset(indexableReleasedVersion); results.append("There is a published version we will attempt to index. Result: ").append(indexReleasedVersionResult).append("\n"); desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, false); String deleteDeaccessionedResult = removeDeaccessioned(dataset); results.append("No need for deaccessioned version. Deletion attempted for ") .append(solrIdDeaccessioned).append(". Result: ").append(deleteDeaccessionedResult); /** * Desired state for existence of cards: {DRAFT=true, * RELEASED=true, DEACCESSIONED=false} * * Released versions found: 1. Will attempt to index as * dataset_17 (discoverable by anonymous) * * The latest version is a working copy (latestVersionState: * DRAFT) and will be indexed as dataset_17_draft (limited * visibility). Result: indexed dataset 17 as dataset_17_draft. * filesIndexed: [datafile_18_draft] * * There is a published version we will attempt to index. * Result: indexed dataset 17 as dataset_17. filesIndexed: * [datafile_18] * * No need for deaccessioned version. Deletion attempted for * dataset_17_deaccessioned. Result: Attempted to delete * dataset_17_deaccessioned from Solr index. updateReponse was: * {responseHeader={status=0,QTime=1}}Attempted to delete * datafile_18_deaccessioned from Solr index. updateReponse was: * {responseHeader={status=0,QTime=0}} */ String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString(); logger.info(result); return result; } else { return "No-op. Unexpected condition reached: There is at least one published version but the latest version is neither published nor draft"; } } else { return "No-op. Unexpected condition reached: Negative number of released versions? Count was: " + numReleasedVersions; } } #location 46 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public String save() { Command<Dataverse> cmd = null; //TODO change to Create - for now the page is expecting INFO instead. if (dataverse.getId() == null){ dataverse.setOwner(ownerId != null ? dataverseService.find(ownerId) : null); cmd = new CreateDataverseCommand(dataverse, session.getUser()); } else { cmd = new UpdateDataverseCommand(dataverse, facets.getTarget(), session.getUser()); } try { dataverse = commandEngine.submit(cmd); userNotificationService.sendNotification(session.getUser(), dataverse.getCreateDate(), Type.CREATEDV, dataverse.getId()); editMode = null; } catch (CommandException ex) { JH.addMessage(FacesMessage.SEVERITY_ERROR, ex.getMessage()); return null; } return "/dataverse.xhtml?id=" + dataverse.getId() +"&faces-redirect=true"; }
#vulnerable code public String save() { Command<Dataverse> cmd = null; if ( editMode == EditMode.INFO ) { dataverse.setOwner(ownerId != null ? dataverseService.find(ownerId) : null); cmd = new CreateDataverseCommand(dataverse, session.getUser()); } else if ( editMode == EditMode.SETUP ) { cmd = new UpdateDataverseCommand(dataverse, facets.getTarget(), session.getUser()); } try { dataverse = commandEngine.submit(cmd); userNotificationService.sendNotification(session.getUser(), dataverse.getCreateDate(), Type.CREATEDV, dataverse.getId()); editMode = null; } catch (CommandException ex) { JH.addMessage(FacesMessage.SEVERITY_ERROR, ex.getMessage()); return null; } return "/dataverse.xhtml?id=" + dataverse.getId() +"&faces-redirect=true"; } #location 13 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void init() { if (dataset.getId() != null) { // view mode for a dataset dataset = datasetService.find(dataset.getId()); editVersion = dataset.getLatestVersion(); editVersion.setDatasetFieldValues(editVersion.initDatasetFieldValues()); editValues = editVersion.getDatasetFieldValues(); citationValues = extractValues(editValues, true); otherMetadataValues = extractValues(editValues, false); ownerId = dataset.getOwner().getId(); } else if (ownerId != null) { // create mode for a new child dataset editMode = EditMode.CREATE; dataset.setOwner(dataverseService.find(ownerId)); dataset.setVersions(new ArrayList()); editVersion.setDataset(dataset); editVersion.setFileMetadatas(new ArrayList()); editVersion.setDatasetFieldValues(null); editVersion.setVersionState(VersionState.DRAFT); editVersion.setDatasetFieldValues(editVersion.initDatasetFieldValues()); editVersion.setVersionNumber(new Long(1)); editValues = editVersion.getDatasetFieldValues(); citationValues = extractValues(editValues, true); otherMetadataValues = extractValues(editValues, false); dataset.getVersions().add(editVersion); } else { throw new RuntimeException("On Dataset page without id or ownerid."); // improve error handling } setCitationFields(dataverseService.findCitationDatasetFieldsByDataverseId(ownerId)); setOtherMetadataFields(dataverseService.findOtherMetadataDatasetFieldsByDataverseId(ownerId)); }
#vulnerable code public void init() { if (dataset.getId() != null) { // view mode for a dataset dataset = datasetService.find(dataset.getId()); editVersion = dataset.getLatestVersion(); editValues = editVersion.getDatasetFieldValues(); citationValues = extractValues(editValues, true); otherMetadataValues = extractValues(editValues, false); ownerId = dataset.getOwner().getId(); } else if (ownerId != null) { // create mode for a new child dataset editMode = EditMode.CREATE; dataset.setOwner(dataverseService.find(ownerId)); dataset.setVersions(new ArrayList()); editVersion.setDataset(dataset); editVersion.setFileMetadatas(new ArrayList()); editVersion.setDatasetFieldValues(null); editVersion.setVersionState(VersionState.DRAFT); editVersion.setDatasetFieldValues(editVersion.initDatasetFieldValues()); editVersion.setVersionNumber(new Long(1)); editValues = editVersion.getDatasetFieldValues(); citationValues = extractValues(editValues, true); otherMetadataValues = extractValues(editValues, false); dataset.getVersions().add(editVersion); } else { throw new RuntimeException("On Dataset page without id or ownerid."); // improve error handling } setCitationFields(dataverseService.findCitationDatasetFieldsByDataverseId(ownerId)); setOtherMetadataFields(dataverseService.findOtherMetadataDatasetFieldsByDataverseId(ownerId)); } #location 6 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public void writeTo(DownloadInstance di, Class<?> clazz, Type type, Annotation[] annotation, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream outstream) throws IOException, WebApplicationException { if (di.getDownloadInfo() != null && di.getDownloadInfo().getDataFile() != null) { DataAccessRequest daReq = new DataAccessRequest(); DataFile sf = di.getDownloadInfo().getDataFile(); DataAccessObject accessObject = DataAccess.createDataAccessObject(sf, daReq); if (accessObject != null) { accessObject.open(); if (di.getConversionParam() != null) { // Image Thumbnail conversion: if (di.getConversionParam().equals("imageThumb")) { accessObject = ImageThumbConverter.getImageThumb(sf, (FileAccessObject)accessObject); } /* No other download services are supported just yet. else if (di.getConversionParam().equals("TermsOfUse")) { accessObject = ExportTermsOfUse.export(sf.getStudy()); } else if (di.getConversionParam().equals("package")) { if ("WithTermsOfUse".equals(di.getConversionParamValue())) { accessObject = PackageWithTermsOfUse.repackage(sf, (FileAccessObject)accessObject); } } */ if (sf.isTabularData()) { if (di.getConversionParam().equals("noVarHeader")) { accessObject.setNoVarHeader(Boolean.TRUE); accessObject.setVarHeader(null); } else if (di.getConversionParam().equals("fileFormat")) { if ("original".equals(di.getConversionParamValue())) { accessObject = StoredOriginalFile.retrieve(sf, (FileAccessObject)accessObject); } /* else { // Other format conversions: String requestedMimeType = di.getServiceFormatType(di.getConversionParam(), di.getConversionParamValue()); if (requestedMimeType == null) { // default mime type, in case real type is unknown; // (this shouldn't happen in real life - but just in case): requestedMimeType = "application/octet-stream"; } accessObject = DataFileConverter.performFormatConversion( sf, (FileAccessObject)accessObject, di.getConversionParamValue(), requestedMimeType); } */ } } if (accessObject == null) { throw new WebApplicationException(Response.Status.SERVICE_UNAVAILABLE); } } InputStream instream = accessObject.getInputStream(); if (instream != null) { // headers: String fileName = accessObject.getFileName(); String mimeType = accessObject.getMimeType(); // Provide both the "Content-disposition" and "Content-Type" headers, // to satisfy the widest selection of browsers out there. httpHeaders.add("Content-disposition", "attachment; filename=\"" + fileName + "\""); httpHeaders.add("Content-Type", mimeType + "; name=\"" + fileName); // (the httpHeaders map must be modified *before* writing any // data in the output stream! int bufsize; byte [] bffr = new byte[4*8192]; // before writing out any bytes from the input stream, flush // any extra content, such as the variable header for the // subsettable files: if (accessObject.getVarHeader() != null) { outstream.write(accessObject.getVarHeader().getBytes()); } while ((bufsize = instream.read(bffr)) != -1) { outstream.write(bffr, 0, bufsize); } instream.close(); return; } } } throw new WebApplicationException(Response.Status.NOT_FOUND); }
#vulnerable code @Override public void writeTo(DownloadInstance di, Class<?> clazz, Type type, Annotation[] annotation, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream outstream) throws IOException, WebApplicationException { if (di.getDownloadInfo() != null && di.getDownloadInfo().getDataFile() != null) { DataAccessRequest daReq = new DataAccessRequest(); DataFile sf = di.getDownloadInfo().getDataFile(); DataAccessObject accessObject = DataAccess.createDataAccessObject(sf, daReq); if (accessObject != null) { accessObject.open(); if (di.getConversionParam() != null) { // Image Thumbnail conversion: if (di.getConversionParam().equals("imageThumb")) { accessObject = ImageThumbConverter.getImageThumb(sf, (FileAccessObject)accessObject); } /* No other download services are supported just yet. else if (di.getConversionParam().equals("TermsOfUse")) { accessObject = ExportTermsOfUse.export(sf.getStudy()); } else if (di.getConversionParam().equals("package")) { if ("WithTermsOfUse".equals(di.getConversionParamValue())) { accessObject = PackageWithTermsOfUse.repackage(sf, (FileAccessObject)accessObject); } } */ /* No special services for "Subsettable" files just yet: if (sf.isTabularData()) { if (di.getConversionParam().equals("noVarHeader")) { accessObject.setNoVarHeader(Boolean.TRUE); accessObject.setVarHeader(null); } else if (di.getConversionParam().equals("fileFormat")) { if ("original".equals(di.getConversionParamValue())) { accessObject = StoredOriginalFile.retrieve(sf, (FileAccessObject)accessObject); } else { // Other format conversions: String requestedMimeType = di.getServiceFormatType(di.getConversionParam(), di.getConversionParamValue()); if (requestedMimeType == null) { // default mime type, in case real type is unknown; // (this shouldn't happen in real life - but just in case): requestedMimeType = "application/octet-stream"; } accessObject = DataFileConverter.performFormatConversion( sf, (FileAccessObject)accessObject, di.getConversionParamValue(), requestedMimeType); } } } */ if (accessObject == null) { throw new WebApplicationException(Response.Status.SERVICE_UNAVAILABLE); } } InputStream instream = accessObject.getInputStream(); if (instream != null) { // headers: String fileName = accessObject.getFileName(); String mimeType = accessObject.getMimeType(); // Provide both the "Content-disposition" and "Content-Type" headers, // to satisfy the widest selection of browsers out there. httpHeaders.add("Content-disposition", "attachment; filename=\"" + fileName + "\""); httpHeaders.add("Content-Type", mimeType + "; name=\"" + fileName); // (the httpHeaders map must be modified *before* writing any // data in the output stream! int bufsize; byte [] bffr = new byte[4*8192]; // before writing out any bytes from the input stream, flush // any extra content, such as the variable header for the // subsettable files: if (accessObject.getVarHeader() != null) { outstream.write(accessObject.getVarHeader().getBytes()); } while ((bufsize = instream.read(bffr)) != -1) { outstream.write(bffr, 0, bufsize); } instream.close(); return; } } } throw new WebApplicationException(Response.Status.NOT_FOUND); } #location 101 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public String indexDataset(Dataset dataset) { logger.info("indexing dataset " + dataset.getId()); Collection<SolrInputDocument> docs = new ArrayList<>(); List<String> dataversePathSegmentsAccumulator = new ArrayList<>(); List<String> dataverseSegments = null; try { dataverseSegments = findPathSegments(dataset.getOwner(), dataversePathSegmentsAccumulator); } catch (Exception ex) { logger.info("failed to find dataverseSegments for dataversePaths for " + SearchFields.SUBTREE + ": " + ex); } List<String> dataversePaths = getDataversePathsFromSegments(dataverseSegments); SolrInputDocument solrInputDocument = new SolrInputDocument(); solrInputDocument.addField(SearchFields.ID, "dataset_" + dataset.getId()); solrInputDocument.addField(SearchFields.ENTITY_ID, dataset.getId()); solrInputDocument.addField(SearchFields.TYPE, "datasets"); /** * @todo: should we assign a dataset title to name like this? */ if (dataset.getLatestVersion() != null) { if (dataset.getLatestVersion().getMetadata() != null) { if (dataset.getLatestVersion().getMetadata().getAuthorsStr() != null) { if (!dataset.getLatestVersion().getMetadata().getAuthorsStr().isEmpty()) { solrInputDocument.addField(SearchFields.AUTHOR_STRING, dataset.getLatestVersion().getMetadata().getAuthorsStr()); } else { logger.info("author string was empty"); } } else { logger.info("dataset.getLatestVersion().getMetadata().getAuthorsStr() was null"); } if (dataset.getLatestVersion().getMetadata().getTitle() != null) { if (!dataset.getLatestVersion().getMetadata().getTitle().isEmpty()) { solrInputDocument.addField(SearchFields.TITLE, dataset.getLatestVersion().getMetadata().getTitle()); } else { logger.info("title was empty"); } } if (dataset.getLatestVersion().getMetadata().getProductionDate() != null) { /** * @todo: clean this up, DRY */ SimpleDateFormat inputDateyyyy = new SimpleDateFormat("yyyy", Locale.ENGLISH); try { Date citationDate = inputDateyyyy.parse(dataset.getLatestVersion().getMetadata().getProductionDate()); solrInputDocument.addField(SearchFields.CITATION_DATE, citationDate); SimpleDateFormat yearOnly = new SimpleDateFormat("yyyy"); String citationYear = yearOnly.format(citationDate); solrInputDocument.addField(SearchFields.CITATION_YEAR, Integer.parseInt(citationYear)); } catch (Exception ex) { logger.info("Can't convert " + dataset.getLatestVersion().getMetadata().getProductionDate() + " to a YYYY date from dataset " + dataset.getId()); } SimpleDateFormat inputDateyyyyMMdd = new SimpleDateFormat("yyyy-MM-dd", Locale.ENGLISH); try { Date citationDate = inputDateyyyyMMdd.parse(dataset.getLatestVersion().getMetadata().getProductionDate()); solrInputDocument.addField(SearchFields.CITATION_DATE, citationDate); SimpleDateFormat yearOnly = new SimpleDateFormat("yyyy"); String citationYear = yearOnly.format(citationDate); solrInputDocument.addField(SearchFields.CITATION_YEAR, Integer.parseInt(citationYear)); } catch (Exception ex) { logger.info("Can't convert " + dataset.getLatestVersion().getMetadata().getProductionDate() + " to a YYYY-MM-DD date from dataset " + dataset.getId()); } } else { logger.info("dataset.getLatestVersion().getMetadata().getTitle() was null"); } } else { logger.info("dataset.getLatestVersion().getMetadata() was null"); } } else { logger.info("dataset.getLatestVersion() was null"); } /** * @todo: don't use distributor for category. testing facets */ // solrInputDocument.addField(SearchFields.CATEGORY, dataset.getDistributor()); if (dataset.getDescription() != null && !dataset.getDescription().isEmpty()) { solrInputDocument.addField(SearchFields.DESCRIPTION, dataset.getDescription()); } solrInputDocument.addField(SearchFields.SUBTREE, dataversePaths); solrInputDocument.addField(SearchFields.ORIGINAL_DATAVERSE, dataset.getOwner().getName()); solrInputDocument.addField(SearchFields.PARENT_TYPE, "datasets"); solrInputDocument.addField(SearchFields.PARENT_ID, dataset.getOwner().getId()); solrInputDocument.addField(SearchFields.PARENT_NAME, dataset.getOwner().getName()); docs.add(solrInputDocument); List<DataFile> files = dataset.getFiles(); for (DataFile dataFile : files) { SolrInputDocument datafileSolrInputDocument = new SolrInputDocument(); datafileSolrInputDocument.addField(SearchFields.ID, "datafile_" + dataFile.getId()); datafileSolrInputDocument.addField(SearchFields.ENTITY_ID, dataFile.getId()); datafileSolrInputDocument.addField(SearchFields.TYPE, "files"); datafileSolrInputDocument.addField(SearchFields.NAME, dataFile.getName()); datafileSolrInputDocument.addField(SearchFields.FILE_TYPE, dataFile.getContentType()); datafileSolrInputDocument.addField(SearchFields.FILE_TYPE_GROUP, dataFile.getContentType().split("/")[0]); datafileSolrInputDocument.addField(SearchFields.SUBTREE, dataversePaths); datafileSolrInputDocument.addField(SearchFields.ORIGINAL_DATAVERSE, dataFile.getOwner().getOwner().getName()); datafileSolrInputDocument.addField(SearchFields.PARENT_TYPE, "datasets"); // datafileSolrInputDocument.addField(SearchFields.PARENT_NAME, dataFile.getDataset().getTitle()); datafileSolrInputDocument.addField(SearchFields.PARENT_ID, dataFile.getOwner().getId()); if (!dataFile.getOwner().getLatestVersion().getMetadata().getTitle().isEmpty()) { datafileSolrInputDocument.addField(SearchFields.PARENT_NAME, dataFile.getOwner().getLatestVersion().getMetadata().getTitle()); } docs.add(datafileSolrInputDocument); } /** * @todo allow for configuration of hostname and port */ SolrServer server = new HttpSolrServer("http://localhost:8983/solr/"); try { server.add(docs); } catch (SolrServerException | IOException ex) { return ex.toString(); } try { server.commit(); } catch (SolrServerException | IOException ex) { return ex.toString(); } return "indexed dataset " + dataset.getId(); // + ":" + dataset.getTitle(); }
#vulnerable code public String indexDataset(Dataset dataset) { logger.info("indexing dataset " + dataset.getId()); Collection<SolrInputDocument> docs = new ArrayList<>(); List<String> dataversePathSegmentsAccumulator = new ArrayList<>(); // List<String> dataverseSegments = null; // try { // dataverseSegments = findPathSegments(dataset.getOwner(), dataversePathSegmentsAccumulator); // } catch (Exception ex) { // logger.info("failed to find dataverseSegments for dataversePaths for " + SearchFields.SUBTREE + ": " + ex); // } // List<String> dataversePaths = getDataversePathsFromSegments(dataverseSegments); SolrInputDocument solrInputDocument = new SolrInputDocument(); solrInputDocument.addField(SearchFields.ID, "dataset_" + dataset.getId()); solrInputDocument.addField(SearchFields.ENTITY_ID, dataset.getId()); solrInputDocument.addField(SearchFields.TYPE, "datasets"); /** * @todo: should we assign a dataset title to name like this? */ if (dataset.getLatestVersion() != null) { if (dataset.getLatestVersion().getMetadata() != null) { if (dataset.getLatestVersion().getMetadata().getAuthorsStr() != null) { if (!dataset.getLatestVersion().getMetadata().getAuthorsStr().isEmpty()) { solrInputDocument.addField(SearchFields.AUTHOR_STRING, dataset.getLatestVersion().getMetadata().getAuthorsStr()); } else { logger.info("author string was empty"); } } else { logger.info("dataset.getLatestVersion().getMetadata().getAuthorsStr() was null"); } if (dataset.getLatestVersion().getMetadata().getTitle() != null) { if (!dataset.getLatestVersion().getMetadata().getTitle().isEmpty()) { solrInputDocument.addField(SearchFields.TITLE, dataset.getLatestVersion().getMetadata().getTitle()); } else { logger.info("title was empty"); } } if (dataset.getLatestVersion().getMetadata().getProductionDate() != null) { /** * @todo: clean this up, DRY */ SimpleDateFormat inputDateyyyy = new SimpleDateFormat("yyyy", Locale.ENGLISH); try { Date citationDate = inputDateyyyy.parse(dataset.getLatestVersion().getMetadata().getProductionDate()); solrInputDocument.addField(SearchFields.CITATION_DATE, citationDate); SimpleDateFormat yearOnly = new SimpleDateFormat("yyyy"); String citationYear = yearOnly.format(citationDate); solrInputDocument.addField(SearchFields.CITATION_YEAR, Integer.parseInt(citationYear)); } catch (Exception ex) { logger.info("Can't convert " + dataset.getLatestVersion().getMetadata().getProductionDate() + " to a YYYY date from dataset " + dataset.getId()); } SimpleDateFormat inputDateyyyyMMdd = new SimpleDateFormat("yyyy-MM-dd", Locale.ENGLISH); try { Date citationDate = inputDateyyyyMMdd.parse(dataset.getLatestVersion().getMetadata().getProductionDate()); solrInputDocument.addField(SearchFields.CITATION_DATE, citationDate); SimpleDateFormat yearOnly = new SimpleDateFormat("yyyy"); String citationYear = yearOnly.format(citationDate); solrInputDocument.addField(SearchFields.CITATION_YEAR, Integer.parseInt(citationYear)); } catch (Exception ex) { logger.info("Can't convert " + dataset.getLatestVersion().getMetadata().getProductionDate() + " to a YYYY-MM-DD date from dataset " + dataset.getId()); } } else { logger.info("dataset.getLatestVersion().getMetadata().getTitle() was null"); } } else { logger.info("dataset.getLatestVersion().getMetadata() was null"); } } else { logger.info("dataset.getLatestVersion() was null"); } /** * @todo: don't use distributor for category. testing facets */ // solrInputDocument.addField(SearchFields.CATEGORY, dataset.getDistributor()); if (dataset.getDescription() != null && !dataset.getDescription().isEmpty()) { solrInputDocument.addField(SearchFields.DESCRIPTION, dataset.getDescription()); } // solrInputDocument.addField(SearchFields.SUBTREE, dataversePaths); solrInputDocument.addField(SearchFields.ORIGINAL_DATAVERSE, dataset.getOwner().getName()); solrInputDocument.addField(SearchFields.PARENT_TYPE, "datasets"); solrInputDocument.addField(SearchFields.PARENT_ID, dataset.getOwner().getId()); solrInputDocument.addField(SearchFields.PARENT_NAME, dataset.getOwner().getName()); docs.add(solrInputDocument); List<DataFile> files = dataset.getFiles(); for (DataFile dataFile : files) { SolrInputDocument datafileSolrInputDocument = new SolrInputDocument(); datafileSolrInputDocument.addField(SearchFields.ID, "datafile_" + dataFile.getId()); datafileSolrInputDocument.addField(SearchFields.ENTITY_ID, dataFile.getId()); datafileSolrInputDocument.addField(SearchFields.TYPE, "files"); datafileSolrInputDocument.addField(SearchFields.NAME, dataFile.getName()); datafileSolrInputDocument.addField(SearchFields.FILE_TYPE, dataFile.getContentType()); datafileSolrInputDocument.addField(SearchFields.FILE_TYPE_GROUP, dataFile.getContentType().split("/")[0]); // datafileSolrInputDocument.addField(SearchFields.SUBTREE, dataversePaths); datafileSolrInputDocument.addField(SearchFields.ORIGINAL_DATAVERSE, dataFile.getOwner().getOwner().getName()); datafileSolrInputDocument.addField(SearchFields.PARENT_TYPE, "datasets"); // datafileSolrInputDocument.addField(SearchFields.PARENT_NAME, dataFile.getDataset().getTitle()); datafileSolrInputDocument.addField(SearchFields.PARENT_ID, dataFile.getOwner().getId()); if (!dataFile.getOwner().getLatestVersion().getMetadata().getTitle().isEmpty()) { datafileSolrInputDocument.addField(SearchFields.PARENT_NAME, dataFile.getOwner().getLatestVersion().getMetadata().getTitle()); } docs.add(datafileSolrInputDocument); } /** * @todo allow for configuration of hostname and port */ SolrServer server = new HttpSolrServer("http://localhost:8983/solr/"); try { server.add(docs); } catch (SolrServerException | IOException ex) { return ex.toString(); } try { server.commit(); } catch (SolrServerException | IOException ex) { return ex.toString(); } return "indexed dataset " + dataset.getId(); // + ":" + dataset.getTitle(); } #location 81 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public String releaseDraft() { if (releaseRadio == 1) { return releaseDataset(false); } else { return releaseDataset(true); } }
#vulnerable code public String releaseDraft() { if (releaseRadio == 1) { dataset.getEditVersion().setVersionNumber(new Long(dataset.getReleasedVersion().getVersionNumber().intValue() + 1)); dataset.getEditVersion().setMinorVersionNumber(new Long(0)); } else { dataset.getEditVersion().setVersionNumber(new Long(dataset.getReleasedVersion().getVersionNumber().intValue())); dataset.getEditVersion().setMinorVersionNumber(new Long(dataset.getReleasedVersion().getMinorVersionNumber().intValue() + 1)); } return releaseDataset(false); } #location 6 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public String releaseDraft() { if (releaseRadio == 1) { return releaseDataset(false); } else { return releaseDataset(true); } }
#vulnerable code public String releaseDraft() { if (releaseRadio == 1) { dataset.getEditVersion().setVersionNumber(new Long(dataset.getReleasedVersion().getVersionNumber().intValue() + 1)); dataset.getEditVersion().setMinorVersionNumber(new Long(0)); } else { dataset.getEditVersion().setVersionNumber(new Long(dataset.getReleasedVersion().getVersionNumber().intValue())); dataset.getEditVersion().setMinorVersionNumber(new Long(dataset.getReleasedVersion().getMinorVersionNumber().intValue() + 1)); } return releaseDataset(false); } #location 3 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public void deleteContainer(String uri, AuthCredentials authCredentials, SwordConfiguration sc) throws SwordError, SwordServerException, SwordAuthException { // swordConfiguration = (SwordConfigurationImpl) sc; DataverseUser vdcUser = swordAuth.auth(authCredentials); logger.fine("deleteContainer called with url: " + uri); urlManager.processUrl(uri); logger.fine("original url: " + urlManager.getOriginalUrl()); if (!"edit".equals(urlManager.getServlet())) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "edit servlet expected, not " + urlManager.getServlet()); } String targetType = urlManager.getTargetType(); if (!targetType.isEmpty()) { logger.fine("operating on target type: " + urlManager.getTargetType()); // StudyServiceLocal studyService; Context ctx; try { ctx = new InitialContext(); // studyService = (StudyServiceLocal) ctx.lookup("java:comp/env/studyService"); } catch (NamingException ex) { logger.info("problem looking up studyService"); throw new SwordServerException("problem looking up studyService"); } if ("dataverse".equals(targetType)) { /** * @todo throw SWORD error recommending use of 4.0 "native" API * to delete dataverses */ // String dvAlias = urlManager.getTargetIdentifier(); // List<VDC> userVDCs = vdcService.getUserVDCs(vdcUser.getId()); // VDC dataverseToEmpty = vdcService.findByAlias(dvAlias); // if (dataverseToEmpty != null) { // if ("Admin".equals(vdcUser.getNetworkRole().getName())) { // if (swordConfiguration.allowNetworkAdminDeleteAllStudies()) { // // /** // * @todo: this is the deleteContainer method... // * should move this to some sort of "emptyContainer" // * method // */ // // curl --insecure -s -X DELETE https://sword:sword@localhost:8181/dvn/api/data-deposit/v1/swordv2/edit/dataverse/sword // Collection<Study> studies = dataverseToEmpty.getOwnedStudies(); // for (Study study : studies) { // logger.info("In dataverse " + dataverseToEmpty.getAlias() + " about to delete study id " + study.getId()); // studyService.deleteStudy(study.getId()); // } // } else { // throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "DELETE on a dataverse is not supported"); // } // } else { // throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Role was " + vdcUser.getNetworkRole().getName() + " but admin required."); // } // } else { // throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Couldn't find dataverse to delete from URL: " + uri); // } } else if ("study".equals(targetType)) { String globalId = urlManager.getTargetIdentifier(); logger.info("globalId: " + globalId); if (globalId != null) { Dataset study = null; try { study = datasetService.findByGlobalId(globalId); } catch (EJBException ex) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study based on global id (" + globalId + ") in URL: " + uri); } if (study != null) { Dataverse dvThatOwnsStudy = study.getOwner(); if (swordAuth.hasAccessToModifyDataverse(vdcUser, dvThatOwnsStudy)) { DatasetVersion.VersionState studyState = study.getLatestVersion().getVersionState(); if (studyState.equals(DatasetVersion.VersionState.DRAFT)) { logger.info("destroying working copy version of study " + study.getGlobalId()); /** * @todo in DVN 3.x we had a convenient * destroyWorkingCopyVersion method but the * DeleteDatasetCommand is pretty scary... what * if a released study has a new draft version? * What we need is a * DeleteDatasetVersionCommand, I suppose... */ // studyService.destroyWorkingCopyVersion(study.getLatestVersion().getId()); try { engineSvc.submit(new DeleteDatasetCommand(study, vdcUser)); /** * @todo re-index after deletion * https://redmine.hmdc.harvard.edu/issues/3544#note-21 */ logger.info("dataset deleted"); } catch (CommandExecutionException ex) { // internal error throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Can't delete dataset: " + ex.getMessage()); } catch (CommandException ex) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Can't delete dataset: " + ex.getMessage()); } /** * @todo think about how to handle non-drafts */ } else if (studyState.equals(DatasetVersion.VersionState.RELEASED)) { // logger.fine("deaccessioning latest version of study " + study.getGlobalId()); // studyService.deaccessionStudy(study.getLatestVersion()); } else if (studyState.equals(DatasetVersion.VersionState.DEACCESSIONED)) { // throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of study " + study.getGlobalId() + " has already been deaccessioned."); } else if (studyState.equals(DatasetVersion.VersionState.ARCHIVED)) { // throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of study " + study.getGlobalId() + " has been archived and can not be deleted or deaccessioned."); } else if (studyState.equals(DatasetVersion.VersionState.IN_REVIEW)) { // throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of study " + study.getGlobalId() + " is in review and can not be deleted or deaccessioned."); } else { // throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Operation not valid for study " + study.getGlobalId() + " in state " + studyState); } } else { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + vdcUser.getUserName() + " is not authorized to modify " + dvThatOwnsStudy.getAlias()); } } else { throw new SwordError(404); } } else { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study to delete from URL: " + uri); } } else { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unsupported delete target in URL:" + uri); } } else { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "No target for deletion specified"); } }
#vulnerable code @Override public void deleteContainer(String uri, AuthCredentials authCredentials, SwordConfiguration sc) throws SwordError, SwordServerException, SwordAuthException { // swordConfiguration = (SwordConfigurationImpl) sc; DataverseUser vdcUser = swordAuth.auth(authCredentials); logger.fine("deleteContainer called with url: " + uri); urlManager.processUrl(uri); logger.fine("original url: " + urlManager.getOriginalUrl()); if (!"edit".equals(urlManager.getServlet())) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "edit servlet expected, not " + urlManager.getServlet()); } String targetType = urlManager.getTargetType(); if (!targetType.isEmpty()) { logger.fine("operating on target type: " + urlManager.getTargetType()); // StudyServiceLocal studyService; Context ctx; try { ctx = new InitialContext(); // studyService = (StudyServiceLocal) ctx.lookup("java:comp/env/studyService"); } catch (NamingException ex) { logger.info("problem looking up studyService"); throw new SwordServerException("problem looking up studyService"); } if ("dataverse".equals(targetType)) { /** * @todo throw SWORD error recommending use of 4.0 "native" API * to delete dataverses */ // String dvAlias = urlManager.getTargetIdentifier(); // List<VDC> userVDCs = vdcService.getUserVDCs(vdcUser.getId()); // VDC dataverseToEmpty = vdcService.findByAlias(dvAlias); // if (dataverseToEmpty != null) { // if ("Admin".equals(vdcUser.getNetworkRole().getName())) { // if (swordConfiguration.allowNetworkAdminDeleteAllStudies()) { // // /** // * @todo: this is the deleteContainer method... // * should move this to some sort of "emptyContainer" // * method // */ // // curl --insecure -s -X DELETE https://sword:sword@localhost:8181/dvn/api/data-deposit/v1/swordv2/edit/dataverse/sword // Collection<Study> studies = dataverseToEmpty.getOwnedStudies(); // for (Study study : studies) { // logger.info("In dataverse " + dataverseToEmpty.getAlias() + " about to delete study id " + study.getId()); // studyService.deleteStudy(study.getId()); // } // } else { // throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "DELETE on a dataverse is not supported"); // } // } else { // throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Role was " + vdcUser.getNetworkRole().getName() + " but admin required."); // } // } else { // throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Couldn't find dataverse to delete from URL: " + uri); // } } else if ("study".equals(targetType)) { String globalId = urlManager.getTargetIdentifier(); logger.info("globalId: " + globalId); if (globalId != null) { Dataset study = null; try { /** * @todo don't hard code this, obviously. In DVN 3.x we * had a method for * studyService.getStudyByGlobalId(globalId) */ // study = studyService.getStudyByGlobalId(globalId); long databaseIdForRoastingAtHomeDataset = 10; study = datasetService.find(databaseIdForRoastingAtHomeDataset); } catch (EJBException ex) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study based on global id (" + globalId + ") in URL: " + uri); } if (study != null) { Dataverse dvThatOwnsStudy = study.getOwner(); if (swordAuth.hasAccessToModifyDataverse(vdcUser, dvThatOwnsStudy)) { DatasetVersion.VersionState studyState = study.getLatestVersion().getVersionState(); if (studyState.equals(DatasetVersion.VersionState.DRAFT)) { /** * @todo use getGlobalId when it's available */ logger.info("destroying working copy version of study " + study.getIdentifier()); /** * @todo in DVN 3.x we had a convenient * destroyWorkingCopyVersion method but the * DeleteDatasetCommand is pretty scary... what * if a released study has a new draft version? * What we need is a * DeleteDatasetVersionCommand, I suppose... */ // studyService.destroyWorkingCopyVersion(study.getLatestVersion().getId()); try { engineSvc.submit(new DeleteDatasetCommand(study, vdcUser)); /** * @todo re-index after deletion * https://redmine.hmdc.harvard.edu/issues/3544#note-21 */ logger.info("dataset deleted"); } catch (CommandExecutionException ex) { // internal error throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Can't delete dataset: " + ex.getMessage()); } catch (CommandException ex) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Can't delete dataset: " + ex.getMessage()); } /** * @todo think about how to handle non-drafts */ } else if (studyState.equals(DatasetVersion.VersionState.RELEASED)) { // logger.fine("deaccessioning latest version of study " + study.getGlobalId()); // studyService.deaccessionStudy(study.getLatestVersion()); } else if (studyState.equals(DatasetVersion.VersionState.DEACCESSIONED)) { // throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of study " + study.getGlobalId() + " has already been deaccessioned."); } else if (studyState.equals(DatasetVersion.VersionState.ARCHIVED)) { // throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of study " + study.getGlobalId() + " has been archived and can not be deleted or deaccessioned."); } else if (studyState.equals(DatasetVersion.VersionState.IN_REVIEW)) { // throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of study " + study.getGlobalId() + " is in review and can not be deleted or deaccessioned."); } else { // throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Operation not valid for study " + study.getGlobalId() + " in state " + studyState); } } else { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + vdcUser.getUserName() + " is not authorized to modify " + dvThatOwnsStudy.getAlias()); } } else { throw new SwordError(404); } } else { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study to delete from URL: " + uri); } } else { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unsupported delete target in URL:" + uri); } } else { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "No target for deletion specified"); } } #location 78 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public String indexDataset(Dataset dataset) { logger.info("indexing dataset " + dataset.getId()); String solrIdDraftStudy = "dataset_" + dataset.getId() + "_draft"; String solrIdPublishedStudy = "dataset_" + dataset.getId(); StringBuilder sb = new StringBuilder(); sb.append("rationale:\n"); List<DatasetVersion> versions = dataset.getVersions(); for (DatasetVersion datasetVersion : versions) { Long versionDatabaseId = datasetVersion.getId(); String versionTitle = datasetVersion.getTitle(); String semanticVersion = datasetVersion.getSemanticVersion(); String versionState = datasetVersion.getVersionState().name(); boolean versionIsReleased = datasetVersion.isReleased(); boolean versionIsWorkingCopy = datasetVersion.isWorkingCopy(); sb.append("version found with database id " + versionDatabaseId + "\n"); sb.append("- title: " + versionTitle + "\n"); sb.append("- semanticVersion-STATE: " + semanticVersion + "-" + versionState + "\n"); sb.append("- isWorkingCopy: " + versionIsWorkingCopy + "\n"); sb.append("- isReleased: " + versionIsReleased + "\n"); } DatasetVersion latestVersion = dataset.getLatestVersion(); String latestVersionState = latestVersion.getVersionState().name(); DatasetVersion releasedVersion = dataset.getReleasedVersion(); if (latestVersion.isWorkingCopy()) { sb.append("The latest version is a working copy (latestVersionState: " + latestVersionState + ") and will be indexed as " + solrIdDraftStudy + " (only visible by creator)\n"); if (releasedVersion != null) { String releasedVersionState = releasedVersion.getVersionState().name(); String semanticVersion = releasedVersion.getSemanticVersion(); sb.append("The released version is " + semanticVersion + " (releasedVersionState: " + releasedVersionState + ") and will be indexed as " + solrIdPublishedStudy + " (visible by anonymous)"); /** * The latest version is a working copy (latestVersionState: * DRAFT) and will be indexed as dataset_17_draft (only visible * by creator) * * The released version is 1.0 (releasedVersionState: RELEASED) * and will be indexed as dataset_17 (visible by anonymous) */ logger.info(sb.toString()); String indexDraftResult = indexDatasetAddOrUpdate(dataset); String indexReleasedVersionResult = indexDatasetAddOrUpdate(dataset); return "indexDraftResult:" + indexDraftResult + ", indexReleasedVersionResult:" + indexReleasedVersionResult + ", " + sb.toString(); } else { sb.append("There is no released version yet so nothing will be indexed as " + solrIdPublishedStudy); /** * The latest version is a working copy (latestVersionState: * DRAFT) and will be indexed as dataset_33_draft (only visible * by creator) * * There is no released version yet so nothing will be indexed * as dataset_33 */ logger.info(sb.toString()); String indexDraftResult = indexDatasetAddOrUpdate(dataset); return "indexDraftResult:" + indexDraftResult + ", " + sb.toString(); } } else { sb.append("The latest version is not a working copy (latestVersionState: " + latestVersionState + ") and will be indexed as " + solrIdPublishedStudy + " (visible by anonymous) and we will be deleting " + solrIdDraftStudy + "\n"); if (releasedVersion != null) { String releasedVersionState = releasedVersion.getVersionState().name(); String semanticVersion = releasedVersion.getSemanticVersion(); sb.append("The released version is " + semanticVersion + " (releasedVersionState: " + releasedVersionState + ") and will be (again) indexed as " + solrIdPublishedStudy + " (visible by anonymous)"); /** * The latest version is not a working copy (latestVersionState: * RELEASED) and will be indexed as dataset_34 (visible by * anonymous) and we will be deleting dataset_34_draft * * The released version is 1.0 (releasedVersionState: RELEASED) * and will be (again) indexed as dataset_34 (visible by anonymous) */ logger.info(sb.toString()); String deleteDraftVersionResult = removeDatasetDraftFromIndex(solrIdDraftStudy); String indexReleasedVersionResult = indexDatasetAddOrUpdate(dataset); return "deleteDraftVersionResult: " + deleteDraftVersionResult + ", indexReleasedVersionResult:" + indexReleasedVersionResult + ", " + sb.toString(); } else { sb.append("We don't ever expect to ever get here. Why is there no released version if the latest version is not a working copy? The latestVersionState is " + latestVersionState + " and we don't know what to do with it. Nothing will be added or deleted from the index."); logger.info(sb.toString()); return sb.toString(); } } }
#vulnerable code public String indexDataset(Dataset dataset) { logger.info("indexing dataset " + dataset.getId()); Collection<SolrInputDocument> docs = new ArrayList<>(); List<String> dataversePathSegmentsAccumulator = new ArrayList<>(); List<String> dataverseSegments = new ArrayList<>(); try { dataverseSegments = findPathSegments(dataset.getOwner(), dataversePathSegmentsAccumulator); } catch (Exception ex) { logger.info("failed to find dataverseSegments for dataversePaths for " + SearchFields.SUBTREE + ": " + ex); } List<String> dataversePaths = getDataversePathsFromSegments(dataverseSegments); SolrInputDocument solrInputDocument = new SolrInputDocument(); solrInputDocument.addField(SearchFields.ID, "dataset_" + dataset.getId()); solrInputDocument.addField(SearchFields.ENTITY_ID, dataset.getId()); solrInputDocument.addField(SearchFields.TYPE, "datasets"); if (dataset.isReleased()) { solrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataset.getPublicationDate()); solrInputDocument.addField(SearchFields.PERMS, publicGroupString); } else if (dataset.getOwner().getCreator() != null) { /** * todo why is dataset.getCreateDate() null? For now I guess we'll * use the createDate of it's parent dataverse?! https://redmine.hmdc.harvard.edu/issues/3806 */ // solrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataset.getCreateDate()); solrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataset.getOwner().getCreateDate()); solrInputDocument.addField(SearchFields.PERMS, groupPerUserPrefix + dataset.getOwner().getCreator().getId()); /** * @todo: replace this fake version of granting users access to * dataverses with the real thing, when it's available in the app */ if (dataset.getOwner().getCreator().getUserName().equals("pete")) { // figure out if cathy is around DataverseUser cathy = dataverseUserServiceBean.findByUserName("cathy"); if (cathy != null) { // let cathy see all of pete's dataverses solrInputDocument.addField(SearchFields.PERMS, groupPerUserPrefix + cathy.getId()); } } } else { /** * todo why is dataset.getCreateDate() null? For now I guess we'll * use the createDate of it's parent dataverse?! https://redmine.hmdc.harvard.edu/issues/3806 */ // solrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataset.getCreateDate()); solrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataset.getOwner().getCreateDate()); /** * @todo: remove this once everyone has dropped their database and * won't get NPE's from dataverse.getCreator */ solrInputDocument.addField(SearchFields.PERMS, npeGetCreator); } /** * @todo: remove this fake "has access to all data" group */ solrInputDocument.addField(SearchFields.PERMS, groupPrefix + tmpNsaGroupId); addDatasetReleaseDateToSolrDoc(solrInputDocument, dataset); if (dataset.getLatestVersion() != null) { DatasetVersionUI datasetVersionUI = null; try { datasetVersionUI = new DatasetVersionUI(dataset.getLatestVersion()); } catch (NullPointerException ex) { logger.info("Caught exception trying to instantiate DatasetVersionUI for dataset " + dataset.getId() + ". : " + ex); } if (datasetVersionUI != null) { String citation = null; try { citation = datasetVersionUI.getCitation(); if (citation != null) { solrInputDocument.addField(SearchFields.CITATION, citation); } } catch (NullPointerException ex) { logger.info("Caught exception trying to get citation for dataset " + dataset.getId() + ". : " + ex); } } for (DatasetField dsf : dataset.getLatestVersion().getFlatDatasetFields()) { DatasetFieldType dsfType = dsf.getDatasetFieldType(); String solrFieldSearchable = dsfType.getSolrField().getNameSearchable(); String solrFieldFacetable = dsfType.getSolrField().getNameFacetable(); if (dsf.getValues() != null && !dsf.getValues().isEmpty() && dsf.getValues().get(0) != null && solrFieldSearchable != null) { logger.info("indexing " + dsf.getDatasetFieldType().getName() + ":" + dsf.getValues() + " into " + solrFieldSearchable + " and maybe " + solrFieldFacetable); if (dsfType.getSolrField().getSolrType().equals(SolrField.SolrType.INTEGER)) { String dateAsString = dsf.getValues().get(0); logger.info("date as string: " + dateAsString); if (dateAsString != null && !dateAsString.isEmpty()) { SimpleDateFormat inputDateyyyy = new SimpleDateFormat("yyyy", Locale.ENGLISH); try { /** * @todo when bean validation is working we * won't have to convert strings into dates */ logger.info("Trying to convert " + dateAsString + " to a YYYY date from dataset " + dataset.getId()); Date dateAsDate = inputDateyyyy.parse(dateAsString); SimpleDateFormat yearOnly = new SimpleDateFormat("yyyy"); String datasetFieldFlaggedAsDate = yearOnly.format(dateAsDate); logger.info("YYYY only: " + datasetFieldFlaggedAsDate); solrInputDocument.addField(solrFieldSearchable, Integer.parseInt(datasetFieldFlaggedAsDate)); if (dsfType.getSolrField().isFacetable()) { solrInputDocument.addField(solrFieldFacetable, Integer.parseInt(datasetFieldFlaggedAsDate)); } } catch (Exception ex) { logger.info("unable to convert " + dateAsString + " into YYYY format and couldn't index it (" + dsfType.getName() + ")"); } } } else { // _s (dynamic string) and all other Solr fields if (dsf.getDatasetFieldType().getName().equals("authorAffiliation")) { /** * @todo think about how to tie the fact that this * needs to be multivalued (_ss) because a * multivalued facet (authorAffilition_ss) is being * collapsed into here at index time. The business * logic to determine if a data-driven metadata * field should be indexed into Solr as a single or * multiple value lives in the getSolrField() method * of DatasetField.java */ solrInputDocument.addField(SearchFields.AFFILIATION, dsf.getValues()); } else if (dsf.getDatasetFieldType().getName().equals("title")) { // datasets have titles not names but index title under name as well so we can sort datasets by name along dataverses and files solrInputDocument.addField(SearchFields.NAME_SORT, dsf.getValues()); } if (dsfType.isControlledVocabulary()) { for (ControlledVocabularyValue controlledVocabularyValue : dsf.getControlledVocabularyValues()) { solrInputDocument.addField(solrFieldSearchable, controlledVocabularyValue.getStrValue()); if (dsfType.getSolrField().isFacetable()) { solrInputDocument.addField(solrFieldFacetable, controlledVocabularyValue.getStrValue()); } } } else { solrInputDocument.addField(solrFieldSearchable, dsf.getValues()); if (dsfType.getSolrField().isFacetable()) { solrInputDocument.addField(solrFieldFacetable, dsf.getValues()); } } } } /** * @todo: review all code below... commented out old indexing of * hard coded fields. Also, should we respect the * isAdvancedSearchField boolean? */ // if (datasetField.isAdvancedSearchField()) { // advancedSearchFields.add(idDashName); // logger.info(idDashName + " is an advanced search field (" + title + ")"); // if (name.equals(DatasetFieldConstant.title)) { // String toIndexTitle = datasetFieldValue.getStrValue(); // if (toIndexTitle != null && !toIndexTitle.isEmpty()) { // solrInputDocument.addField(SearchFields.TITLE, toIndexTitle); // } // } else if (name.equals(DatasetFieldConstant.authorName)) { // String toIndexAuthor = datasetFieldValue.getStrValue(); // if (toIndexAuthor != null && !toIndexAuthor.isEmpty()) { // logger.info("index this author: " + toIndexAuthor); // solrInputDocument.addField(SearchFields.AUTHOR_STRING, toIndexAuthor); // } // } else if (name.equals(DatasetFieldConstant.productionDate)) { // String toIndexProductionDateString = datasetFieldValue.getStrValue(); // logger.info("production date: " + toIndexProductionDateString); // if (toIndexProductionDateString != null && !toIndexProductionDateString.isEmpty()) { // SimpleDateFormat inputDateyyyy = new SimpleDateFormat("yyyy", Locale.ENGLISH); // try { // logger.info("Trying to convert " + toIndexProductionDateString + " to a YYYY date from dataset " + dataset.getId()); // Date productionDate = inputDateyyyy.parse(toIndexProductionDateString); // SimpleDateFormat yearOnly = new SimpleDateFormat("yyyy"); // String productionYear = yearOnly.format(productionDate); // logger.info("YYYY only: " + productionYear); // solrInputDocument.addField(SearchFields.PRODUCTION_DATE_YEAR_ONLY, Integer.parseInt(productionYear)); // solrInputDocument.addField(SearchFields.PRODUCTION_DATE_ORIGINAL, productionDate); // } catch (Exception ex) { // logger.info("unable to convert " + toIndexProductionDateString + " into YYYY format"); // } // } // /** // * @todo: DRY! this is the same as above! // */ // } else if (name.equals(DatasetFieldConstant.distributionDate)) { // String toIndexdistributionDateString = datasetFieldValue.getStrValue(); // logger.info("distribution date: " + toIndexdistributionDateString); // if (toIndexdistributionDateString != null && !toIndexdistributionDateString.isEmpty()) { // SimpleDateFormat inputDateyyyy = new SimpleDateFormat("yyyy", Locale.ENGLISH); // try { // logger.info("Trying to convert " + toIndexdistributionDateString + " to a YYYY date from dataset " + dataset.getId()); // Date distributionDate = inputDateyyyy.parse(toIndexdistributionDateString); // SimpleDateFormat yearOnly = new SimpleDateFormat("yyyy"); // String distributionYear = yearOnly.format(distributionDate); // logger.info("YYYY only: " + distributionYear); // solrInputDocument.addField(SearchFields.DISTRIBUTION_DATE_YEAR_ONLY, Integer.parseInt(distributionYear)); // solrInputDocument.addField(SearchFields.DISTRIBUTION_DATE_ORIGINAL, distributionDate); // } catch (Exception ex) { // logger.info("unable to convert " + toIndexdistributionDateString + " into YYYY format"); // } // } // } else if (name.equals(DatasetFieldConstant.keywordValue)) { // String toIndexKeyword = datasetFieldValue.getStrValue(); // if (toIndexKeyword != null && !toIndexKeyword.isEmpty()) { // solrInputDocument.addField(SearchFields.KEYWORD, toIndexKeyword); // } // } else if (name.equals(DatasetFieldConstant.distributorName)) { // String toIndexDistributor = datasetFieldValue.getStrValue(); // if (toIndexDistributor != null && !toIndexDistributor.isEmpty()) { // solrInputDocument.addField(SearchFields.DISTRIBUTOR, toIndexDistributor); // } // } else if (name.equals(DatasetFieldConstant.description)) { // String toIndexDescription = datasetFieldValue.getStrValue(); // if (toIndexDescription != null && !toIndexDescription.isEmpty()) { // solrInputDocument.addField(SearchFields.DESCRIPTION, toIndexDescription); // } // } // } else { // notAdvancedSearchFields.add(idDashName); // logger.info(idDashName + " is not an advanced search field (" + title + ")"); // } } } solrInputDocument.addField(SearchFields.SUBTREE, dataversePaths); // solrInputDocument.addField(SearchFields.HOST_DATAVERSE, dataset.getOwner().getName()); solrInputDocument.addField(SearchFields.PARENT_ID, dataset.getOwner().getId()); solrInputDocument.addField(SearchFields.PARENT_NAME, dataset.getOwner().getName()); docs.add(solrInputDocument); List<DataFile> files = dataset.getFiles(); for (DataFile dataFile : files) { SolrInputDocument datafileSolrInputDocument = new SolrInputDocument(); datafileSolrInputDocument.addField(SearchFields.ID, "datafile_" + dataFile.getId()); datafileSolrInputDocument.addField(SearchFields.ENTITY_ID, dataFile.getId()); datafileSolrInputDocument.addField(SearchFields.TYPE, "files"); datafileSolrInputDocument.addField(SearchFields.NAME, dataFile.getName()); datafileSolrInputDocument.addField(SearchFields.NAME_SORT, dataFile.getName()); if (dataset.isReleased()) { /** * @todo: are datafiles supposed to have release dates? It's * null. For now just set something: https://redmine.hmdc.harvard.edu/issues/3806 */ // datafileSolrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataFile.getReleaseDate()); datafileSolrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataFile.getOwner().getOwner().getCreateDate()); datafileSolrInputDocument.addField(SearchFields.PERMS, publicGroupString); } else if (dataset.getOwner().getCreator() != null) { /** * todo why is dataFile.getCreateDate() null? For now I guess * we'll use the createDate of its parent datase's dataverset?! https://redmine.hmdc.harvard.edu/issues/3806 */ // datafileSolrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataFile.getCreateDate()); datafileSolrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataFile.getOwner().getOwner().getCreateDate()); datafileSolrInputDocument.addField(SearchFields.PERMS, groupPerUserPrefix + dataset.getOwner().getCreator().getId()); /** * @todo: replace this fake version of granting users access to * dataverses with the real thing, when it's available in the * app */ if (dataset.getOwner().getCreator().getUserName().equals("pete")) { // figure out if cathy is around DataverseUser cathy = dataverseUserServiceBean.findByUserName("cathy"); if (cathy != null) { // let cathy see all of pete's dataverses datafileSolrInputDocument.addField(SearchFields.PERMS, groupPerUserPrefix + cathy.getId()); } } } else { /** * @todo: remove this once everyone has dropped their database * and won't get NPE's from dataverse.getCreator */ /** * todo why is dataFile.getCreateDate() null? For now I guess * we'll use the createDate of its parent dataset's dataverse?! https://redmine.hmdc.harvard.edu/issues/3806 */ // datafileSolrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataFile.getCreateDate()); datafileSolrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataFile.getOwner().getOwner().getCreateDate()); datafileSolrInputDocument.addField(SearchFields.PERMS, npeGetCreator); } /** * @todo: remove this fake "has access to all data" group */ datafileSolrInputDocument.addField(SearchFields.PERMS, groupPrefix + tmpNsaGroupId); // For the mime type, we are going to index the "friendly" version, e.g., // "PDF File" instead of "application/pdf", "MS Excel" instead of // "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" (!), etc., // if available: datafileSolrInputDocument.addField(SearchFields.FILE_TYPE_MIME, dataFile.getFriendlyType()); // For the file type facets, we have a property file that maps mime types // to facet-friendly names; "application/fits" should become "FITS", etc.: datafileSolrInputDocument.addField(SearchFields.FILE_TYPE, FileUtil.getFacetFileType(dataFile)); datafileSolrInputDocument.addField(SearchFields.DESCRIPTION, dataFile.getDescription()); datafileSolrInputDocument.addField(SearchFields.SUBTREE, dataversePaths); // datafileSolrInputDocument.addField(SearchFields.HOST_DATAVERSE, dataFile.getOwner().getOwner().getName()); // datafileSolrInputDocument.addField(SearchFields.PARENT_NAME, dataFile.getDataset().getTitle()); datafileSolrInputDocument.addField(SearchFields.PARENT_ID, dataFile.getOwner().getId()); if (!dataFile.getOwner().getLatestVersion().getTitle().isEmpty()) { datafileSolrInputDocument.addField(SearchFields.PARENT_NAME, dataFile.getOwner().getLatestVersion().getTitle()); } // If this is a tabular data file -- i.e., if there are data // variables associated with this file, we index the variable // names and labels: if (dataFile.isTabularData()) { List<DataVariable> variables = dataFile.getDataTable().getDataVariables(); String variableNamesToIndex = null; String variableLabelsToIndex = null; for (DataVariable var : variables) { // Hard-coded search fields, for now: // TODO: immediately: define these as constants in SearchFields; // TODO: eventually: review, decide how datavariables should // be handled for indexing purposes. (should it be a fixed // setup, defined in the code? should it be flexible? unlikely // that this needs to be domain-specific... since these data // variables are quite specific to tabular data, which in turn // is something social science-specific... // anyway -- needs to be reviewed. -- L.A. 4.0alpha1 if (var.getName() != null && !var.getName().equals("")) { if (variableNamesToIndex == null) { variableNamesToIndex = var.getName(); } else { variableNamesToIndex = variableNamesToIndex + " " + var.getName(); } } if (var.getLabel() != null && !var.getLabel().equals("")) { if (variableLabelsToIndex == null) { variableLabelsToIndex = var.getLabel(); } else { variableLabelsToIndex = variableLabelsToIndex + " " + var.getLabel(); } } } if (variableNamesToIndex != null) { logger.info("indexing " + variableNamesToIndex.length() + " bytes"); datafileSolrInputDocument.addField("varname_s", variableNamesToIndex); } if (variableLabelsToIndex != null) { logger.info("indexing " + variableLabelsToIndex.length() + " bytes"); datafileSolrInputDocument.addField("varlabel_s", variableLabelsToIndex); } } // And if the file has indexable file-level metadata associated // with it, we'll index that too: List<FileMetadataFieldValue> fileMetadataFieldValues = dataFile.getFileMetadataFieldValues(); if (fileMetadataFieldValues != null && fileMetadataFieldValues.size() > 0) { for (int j = 0; j < fileMetadataFieldValues.size(); j++) { String fieldValue = fileMetadataFieldValues.get(j).getStrValue(); FileMetadataField fmf = fileMetadataFieldValues.get(j).getFileMetadataField(); String fileMetadataFieldName = fmf.getName(); String fileMetadataFieldFormatName = fmf.getFileFormatName(); String fieldName = fileMetadataFieldFormatName + "-" + fileMetadataFieldName + "_s"; datafileSolrInputDocument.addField(fieldName, fieldValue); } } docs.add(datafileSolrInputDocument); } /** * @todo allow for configuration of hostname and port */ SolrServer server = new HttpSolrServer("http://localhost:8983/solr/"); try { server.add(docs); } catch (SolrServerException | IOException ex) { return ex.toString(); } try { server.commit(); } catch (SolrServerException | IOException ex) { return ex.toString(); } return "indexed dataset " + dataset.getId(); // + ":" + dataset.getTitle(); } #location 7 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public synchronized String CalculateMD5 (String datafile) { FileInputStream fis = null; try { fis = new FileInputStream(datafile); } catch (FileNotFoundException ex) { throw new RuntimeException(ex); } return CalculateMD5(fis); /* byte[] dataBytes = new byte[1024]; int nread; try { while ((nread = fis.read(dataBytes)) != -1) { md.update(dataBytes, 0, nread); } } catch (IOException ex) { throw new RuntimeException(ex); } byte[] mdbytes = md.digest(); StringBuilder sb = new StringBuilder(""); for (int i = 0; i < mdbytes.length; i++) { sb.append(Integer.toString((mdbytes[i] & 0xff) + 0x100, 16).substring(1)); } return sb.toString(); */ }
#vulnerable code public synchronized String CalculateMD5 (String datafile) { MessageDigest md = null; try { md = MessageDigest.getInstance("MD5"); } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); } FileInputStream fis = null; try { fis = new FileInputStream(datafile); } catch (FileNotFoundException ex) { throw new RuntimeException(ex); } byte[] dataBytes = new byte[1024]; int nread; try { while ((nread = fis.read(dataBytes)) != -1) { md.update(dataBytes, 0, nread); } } catch (IOException ex) { throw new RuntimeException(ex); } byte[] mdbytes = md.digest(); StringBuilder sb = new StringBuilder(""); for (int i = 0; i < mdbytes.length; i++) { sb.append(Integer.toString((mdbytes[i] & 0xff) + 0x100, 16).substring(1)); } return sb.toString(); } #location 22 #vulnerability type RESOURCE_LEAK
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public String save() { // Validate boolean dontSave = false; ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); Validator validator = factory.getValidator(); for (DatasetField dsf : editVersion.getFlatDatasetFields()) { for (DatasetFieldValue dsfv : dsf.getDatasetFieldValues()) { // dsfv.setValidationMessage(null); // clear out any existing validation message Set<ConstraintViolation<DatasetFieldValue>> constraintViolations = validator.validate(dsfv); for (ConstraintViolation<DatasetFieldValue> constraintViolation : constraintViolations) { FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Validation Error", constraintViolation.getMessage())); // dsfv.setValidationMessage(constraintViolation.getMessage()); dontSave = true; } } } if (dontSave) { return ""; } dataset.setOwner(dataverseService.find(ownerId)); //TODO get real application-wide protocol/authority dataset.setProtocol("doi"); dataset.setAuthority("10.5072/FK2"); dataset.setIdentifier("5555"); /* * Save and/or ingest files, if there are any: */ if (newFiles != null && newFiles.size() > 0) { try { if (dataset.getFileSystemDirectory() != null && !Files.exists(dataset.getFileSystemDirectory())) { /* Note that "createDirectories()" must be used - not * "createDirectory()", to make sure all the parent * directories that may not yet exist are created as well. */ Files.createDirectories(dataset.getFileSystemDirectory()); } } catch (IOException dirEx) { Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Failed to create study directory " + dataset.getFileSystemDirectory().toString()); } if (dataset.getFileSystemDirectory() != null && Files.exists(dataset.getFileSystemDirectory())) { for (DataFile dFile : newFiles) { String tempFileLocation = getFilesTempDirectory() + "/" + dFile.getFileSystemName(); // These are all brand new files, so they should all have // one filemetadata total. You do NOT want to use // getLatestFilemetadata() here - because it relies on // comparing the object IDs of the corresponding datasetversions... // Which may not have been persisted yet. // -- L.A. 4.0 beta. FileMetadata fileMetadata = dFile.getFileMetadatas().get(0); String fileName = fileMetadata.getLabel(); //boolean ingestedAsTabular = false; boolean metadataExtracted = false; datasetService.generateFileSystemName(dFile); if (ingestService.ingestableAsTabular(dFile)) { /* * Note that we don't try to ingest the file right away - * instead we mark it as "scheduled for ingest", then at * the end of the save process it will be queued for async. * ingest in the background. In the meantime, the file * will be ingested as a regular, non-tabular file, and * appear as such to the user, until the ingest job is * finished with the Ingest Service. */ dFile.SetIngestScheduled(); } else if (ingestService.fileMetadataExtractable(dFile)) { try { dFile.setContentType("application/fits"); metadataExtracted = ingestService.extractIndexableMetadata(tempFileLocation, dFile, editVersion); } catch (IOException mex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Caught exception trying to extract indexable metadata from file " + fileName, mex); } if (metadataExtracted) { Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Successfully extracted indexable metadata from file " + fileName); } else { Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Failed to extract indexable metadata from file " + fileName); } } // Try to save the file in its permanent location: //if (!ingestedAsTabular) { try { Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Will attempt to save the file as: " + dFile.getFileSystemLocation().toString()); Files.copy(new FileInputStream(new File(tempFileLocation)), dFile.getFileSystemLocation(), StandardCopyOption.REPLACE_EXISTING); MD5Checksum md5Checksum = new MD5Checksum(); try { dFile.setmd5(md5Checksum.CalculateMD5(dFile.getFileSystemLocation().toString())); } catch (Exception md5ex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Could not calculate MD5 signature for the new file " + fileName); } } catch (IOException ioex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Failed to save the file " + dFile.getFileSystemLocation()); } //} // Any necessary post-processing: ingestService.performPostProcessingTasks(dFile); } } } Command<Dataset> cmd; try { if (editMode == EditMode.CREATE) { cmd = new CreateDatasetCommand(dataset, session.getUser()); } else { cmd = new UpdateDatasetCommand(dataset, session.getUser()); } dataset = commandEngine.submit(cmd); } catch (EJBException ex) { StringBuilder error = new StringBuilder(); error.append(ex + " "); error.append(ex.getMessage() + " "); Throwable cause = ex; while (cause.getCause() != null) { cause = cause.getCause(); error.append(cause + " "); error.append(cause.getMessage() + " "); } logger.info("Couldn't save dataset: " + error.toString()); FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + error.toString())); return null; } catch (CommandException ex) { FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + ex.toString())); Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, null, ex); } newFiles.clear(); editMode = null; // Queue the ingest jobs for asynchronous execution: for (DataFile dataFile : dataset.getFiles()) { if (dataFile.isIngestScheduled()) { dataFile.SetIngestInProgress(); Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Attempting to queue the file " + dataFile.getFileMetadata().getLabel() + " for ingest."); ingestService.asyncIngestAsTabular(dataFile); } } return "/dataset.xhtml?id=" + dataset.getId() + "&faces-redirect=true"; }
#vulnerable code public String save() { dataset.setOwner(dataverseService.find(ownerId)); //TODO get real application-wide protocol/authority dataset.setProtocol("doi"); dataset.setAuthority("10.5072/FK2"); dataset.setIdentifier("5555"); /* * Save and/or ingest files, if there are any: */ if (newFiles != null && newFiles.size() > 0) { try { if (dataset.getFileSystemDirectory() != null && !Files.exists(dataset.getFileSystemDirectory())) { /* Note that "createDirectories()" must be used - not * "createDirectory()", to make sure all the parent * directories that may not yet exist are created as well. */ Files.createDirectories(dataset.getFileSystemDirectory()); } } catch (IOException dirEx) { Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Failed to create study directory " + dataset.getFileSystemDirectory().toString()); } if (dataset.getFileSystemDirectory() != null && Files.exists(dataset.getFileSystemDirectory())) { for (DataFile dFile : newFiles) { String tempFileLocation = getFilesTempDirectory() + "/" + dFile.getFileSystemName(); // These are all brand new files, so they should all have // one filemetadata total. You do NOT want to use // getLatestFilemetadata() here - because it relies on // comparing the object IDs of the corresponding datasetversions... // Which may not have been persisted yet. // -- L.A. 4.0 beta. FileMetadata fileMetadata = dFile.getFileMetadatas().get(0); String fileName = fileMetadata.getLabel(); //boolean ingestedAsTabular = false; boolean metadataExtracted = false; datasetService.generateFileSystemName(dFile); if (ingestService.ingestableAsTabular(dFile)) { /* * Note that we don't try to ingest the file right away - * instead we mark it as "scheduled for ingest", then at * the end of the save process it will be queued for async. * ingest in the background. In the meantime, the file * will be ingested as a regular, non-tabular file, and * appear as such to the user, until the ingest job is * finished with the Ingest Service. */ dFile.SetIngestScheduled(); } else if (ingestService.fileMetadataExtractable(dFile)) { try { dFile.setContentType("application/fits"); metadataExtracted = ingestService.extractIndexableMetadata(tempFileLocation, dFile, editVersion); } catch (IOException mex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Caught exception trying to extract indexable metadata from file " + fileName, mex); } if (metadataExtracted) { Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Successfully extracted indexable metadata from file " + fileName); } else { Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Failed to extract indexable metadata from file " + fileName); } } // Try to save the file in its permanent location: //if (!ingestedAsTabular) { try { Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Will attempt to save the file as: " + dFile.getFileSystemLocation().toString()); Files.copy(new FileInputStream(new File(tempFileLocation)), dFile.getFileSystemLocation(), StandardCopyOption.REPLACE_EXISTING); MD5Checksum md5Checksum = new MD5Checksum(); try { dFile.setmd5(md5Checksum.CalculateMD5(dFile.getFileSystemLocation().toString())); } catch (Exception md5ex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Could not calculate MD5 signature for the new file " + fileName); } } catch (IOException ioex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Failed to save the file " + dFile.getFileSystemLocation()); } //} // Any necessary post-processing: ingestService.performPostProcessingTasks(dFile); } } } Command<Dataset> cmd; try { if (editMode == EditMode.CREATE) { cmd = new CreateDatasetCommand(dataset, session.getUser()); } else { cmd = new UpdateDatasetCommand(dataset, session.getUser()); } dataset = commandEngine.submit(cmd); } catch (EJBException ex) { StringBuilder error = new StringBuilder(); error.append(ex + " "); error.append(ex.getMessage() + " "); Throwable cause = ex; while (cause.getCause() != null) { cause = cause.getCause(); error.append(cause + " "); error.append(cause.getMessage() + " "); } logger.info("Couldn't save dataset: " + error.toString()); FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + error.toString())); return null; } catch (CommandException ex) { FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + ex.toString())); Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, null, ex); } newFiles.clear(); editMode = null; // Queue the ingest jobs for asynchronous execution: for (DataFile dataFile : dataset.getFiles()) { if (dataFile.isIngestScheduled()) { dataFile.SetIngestInProgress(); Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Attempting to queue the file " + dataFile.getFileMetadata().getLabel() + " for ingest."); ingestService.asyncIngestAsTabular(dataFile); } } return "/dataset.xhtml?id=" + dataset.getId() + "&faces-redirect=true"; } #location 126 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void init() { if (dataset.getId() != null) { // view mode for a dataset dataset = datasetService.find(dataset.getId()); editVersion = dataset.getLatestVersion(); ownerId = dataset.getOwner().getId(); editVersion.setDatasetFields(editVersion.initDatasetFields()); datasetVersionUI = new DatasetVersionUI(editVersion); } else if (ownerId != null) { // create mode for a new child dataset editMode = EditMode.CREATE; dataset.setOwner(dataverseService.find(ownerId)); dataset.setVersions(new ArrayList()); editVersion.setDataset(dataset); editVersion.setFileMetadatas(new ArrayList()); editVersion.setVersionState(VersionState.DRAFT); editVersion.setDatasetFields(editVersion.initDatasetFields()); editVersion.setVersionNumber(new Long(1)); datasetVersionUI = new DatasetVersionUI(editVersion); //TODO add call to initDepositFields if it's decided that they are indeed metadata //initDepositFields(); dataset.getVersions().add(editVersion); FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_INFO, "Add New Dataset", " - Enter metadata to create the dataset's citation. You can add more metadata about this dataset after it's created.")); } else { throw new RuntimeException("On Dataset page without id or ownerid."); // improve error handling } }
#vulnerable code public void init() { if (dataset.getId() != null) { // view mode for a dataset dataset = datasetService.find(dataset.getId()); editVersion = dataset.getLatestVersion(); ownerId = dataset.getOwner().getId(); editVersion.setDatasetFields(editVersion.initDatasetFields()); datasetVersionUI = new DatasetVersionUI(editVersion); } else if (ownerId != null) { // create mode for a new child dataset editMode = EditMode.CREATE; dataset.setOwner(dataverseService.find(ownerId)); dataset.setVersions(new ArrayList()); editVersion.setDataset(dataset); editVersion.setFileMetadatas(new ArrayList()); editVersion.setDatasetFields(null); editVersion.setVersionState(VersionState.DRAFT); editVersion.setDatasetFields(editVersion.initDatasetFields()); editVersion.setVersionNumber(new Long(1)); datasetVersionUI = new DatasetVersionUI(editVersion); //TODO add call to initDepositFields if it's decided that they are indeed metadata //initDepositFields(); dataset.getVersions().add(editVersion); FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_INFO, "Add New Dataset", " - Enter metadata to create the dataset's citation. You can add more metadata about this dataset after it's created.")); } else { throw new RuntimeException("On Dataset page without id or ownerid."); // improve error handling } } #location 7 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public void handleDropBoxUpload(ActionEvent e) { // Read JSON object from the output of the DropBox Chooser: JsonReader dbJsonReader = Json.createReader(new StringReader(dropBoxSelection)); JsonArray dbArray = dbJsonReader.readArray(); dbJsonReader.close(); for (int i = 0; i < dbArray.size(); i++) { JsonObject dbObject = dbArray.getJsonObject(i); // Extract the payload: String fileLink = dbObject.getString("link"); String fileName = dbObject.getString("name"); int fileSize = dbObject.getInt("bytes"); logger.info("DropBox url: " + fileLink + ", filename: " + fileName + ", size: " + fileSize); DataFile dFile = null; // Make http call, download the file: GetMethod dropBoxMethod = new GetMethod(fileLink); int status = 0; InputStream dropBoxStream = null; try { status = getClient().executeMethod(dropBoxMethod); if (status == 200) { dropBoxStream = dropBoxMethod.getResponseBodyAsStream(); // If we've made it this far, we must have been able to // make a successful HTTP call to the DropBox server and // obtain an InputStream - so we can now create a new // DataFile object: dFile = ingestService.createDataFile(editVersion, dropBoxStream, fileName, null); newFiles.add(dFile); } } catch (IOException ex) { logger.warning("Failed to access DropBox url: " + fileLink + "!"); continue; } finally { if (dropBoxMethod != null) { dropBoxMethod.releaseConnection(); } if (dropBoxStream != null) { try { dropBoxStream.close(); } catch (Exception ex) { //logger.whocares("..."); } } } } }
#vulnerable code public void handleDropBoxUpload(ActionEvent e) { // Read JSON object from the output of the DropBox Chooser: JsonReader dbJsonReader = Json.createReader(new StringReader(dropBoxSelection)); JsonArray dbArray = dbJsonReader.readArray(); dbJsonReader.close(); for (int i = 0; i < dbArray.size(); i++) { JsonObject dbObject = dbArray.getJsonObject(i); // Extract the payload: String fileLink = dbObject.getString("link"); String fileName = dbObject.getString("name"); int fileSize = dbObject.getInt("bytes"); logger.info("DropBox url: " + fileLink + ", filename: " + fileName + ", size: " + fileSize); DataFile dFile = null; // Make http call, download the file: GetMethod dropBoxMethod = new GetMethod(fileLink); int status = 0; InputStream dropBoxStream = null; try { status = getClient().executeMethod(dropBoxMethod); if (status == 200) { dropBoxStream = dropBoxMethod.getResponseBodyAsStream(); dFile = new DataFile("application/octet-stream"); dFile.setOwner(dataset); // save the file, in the temporary location for now: datasetService.generateFileSystemName(dFile); if (ingestService.getFilesTempDirectory() != null) { logger.info("Will attempt to save the DropBox file as: " + ingestService.getFilesTempDirectory() + "/" + dFile.getFileSystemName()); Files.copy(dropBoxStream, Paths.get(ingestService.getFilesTempDirectory(), dFile.getFileSystemName()), StandardCopyOption.REPLACE_EXISTING); File tempFile = Paths.get(ingestService.getFilesTempDirectory(), dFile.getFileSystemName()).toFile(); if (tempFile.exists()) { long writtenBytes = tempFile.length(); logger.info("File size, expected: " + fileSize + ", written: " + writtenBytes); } else { throw new IOException(); } } } } catch (IOException ex) { logger.warning("Failed to access DropBox url: " + fileLink + "!"); continue; } finally { if (dropBoxMethod != null) { dropBoxMethod.releaseConnection(); } if (dropBoxStream != null) { try { dropBoxStream.close(); } catch (Exception ex) { } } } // If we've made it this far, we must have downloaded the file // successfully, so let's finish processing it as a new DataFile // object: FileMetadata fmd = new FileMetadata(); fmd.setDataFile(dFile); dFile.getFileMetadatas().add(fmd); fmd.setLabel(fileName); fmd.setCategory(dFile.getContentType()); if (editVersion.getFileMetadatas() == null) { editVersion.setFileMetadatas(new ArrayList()); } editVersion.getFileMetadatas().add(fmd); fmd.setDatasetVersion(editVersion); dataset.getFiles().add(dFile); // When uploading files from dropBox, we don't get the benefit of // having the browser recognize the mime type of the file. So we'll // have to rely on our own utilities (Jhove, etc.) to try and determine // what it is. String fileType = null; try { fileType = FileUtil.determineFileType(Paths.get(ingestService.getFilesTempDirectory(), dFile.getFileSystemName()).toFile(), fileName); logger.fine("File utility recognized the file as " + fileType); if (fileType != null && !fileType.equals("")) { dFile.setContentType(fileType); } } catch (IOException ex) { logger.warning("Failed to run the file utility mime type check on file " + fileName); } newFiles.add(dFile); } } #location 64 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public String save() { dataset.setOwner(dataverseService.find(ownerId)); //TODO get real application-wide protocol/authority dataset.setProtocol("doi"); dataset.setAuthority("10.5072/FK2"); dataset.setIdentifier("5555"); //TODO add replication for logic if necessary if (replicationFor){ //dataset.getVersions().get(0).getDatasetFields(). } //Todo pre populate deposit date //If new ds get create date user if (dataset.getId() == null){ dataset.setCreator(session.getUser()); dataset.setCreateDate(new Timestamp(new Date().getTime())); } if (!(dataset.getVersions().get(0).getFileMetadatas() == null) && !dataset.getVersions().get(0).getFileMetadatas().isEmpty()) { int fmdIndex = 0; for (FileMetadata fmd : dataset.getVersions().get(0).getFileMetadatas()) { for (FileMetadata fmdTest : editVersion.getFileMetadatas()) { if (fmd.equals(fmdTest)) { dataset.getVersions().get(0).getFileMetadatas().get(fmdIndex).setDataFile(fmdTest.getDataFile()); } } fmdIndex++; } } /* * Save and/or ingest files, if there are any: */ if (newFiles != null && newFiles.size() > 0) { try { if (dataset.getFileSystemDirectory() != null && !Files.exists(dataset.getFileSystemDirectory())) { /* Note that "createDirectories()" must be used - not * "createDirectory()", to make sure all the parent * directories that may not yet exist are created as well. */ Files.createDirectories(dataset.getFileSystemDirectory()); } } catch (IOException dirEx) { Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Failed to create study directory " + dataset.getFileSystemDirectory().toString()); } if (dataset.getFileSystemDirectory() != null && Files.exists(dataset.getFileSystemDirectory())) { for (UploadedFile uFile : newFiles.keySet()) { DataFile dFile = newFiles.get(uFile); String tempFileLocation = getFilesTempDirectory() + "/" + dFile.getFileSystemName(); boolean ingestedAsTabular = false; boolean metadataExtracted = false; datasetService.generateFileSystemName(dFile); if (ingestService.ingestableAsTabular(dFile)) { /* try { ingestedAsTabular = ingestService.ingestAsTabular(tempFileLocation, dFile); dFile.setContentType("text/tab-separated-values"); } catch (IOException iex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, null, iex); ingestedAsTabular = false; } */ dFile.SetIngestScheduled(); } else if (ingestService.fileMetadataExtractable(dFile)) { try { dFile.setContentType("application/fits"); metadataExtracted = ingestService.extractIndexableMetadata(tempFileLocation, dFile, editVersion); } catch (IOException mex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Caught exception trying to extract indexable metadata from file " + dFile.getName(), mex); } if (metadataExtracted) { Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Successfully extracted indexable metadata from file " + dFile.getName()); } else { Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Failed to extract indexable metadata from file " + dFile.getName()); } } /* Try to save the file in its permanent location: * (unless it was already ingested and saved as tabular data) */ if (!ingestedAsTabular) { try { Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Will attempt to save the file as: " + dFile.getFileSystemLocation().toString()); Files.copy(uFile.getInputstream(), dFile.getFileSystemLocation(), StandardCopyOption.REPLACE_EXISTING); MD5Checksum md5Checksum = new MD5Checksum(); try { dFile.setmd5(md5Checksum.CalculateMD5(dFile.getFileSystemLocation().toString())); } catch (Exception md5ex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Could not calculate MD5 signature for the new file " + dFile.getName()); } } catch (IOException ioex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Failed to save the file " + dFile.getFileSystemLocation()); } } // Any necessary post-processing: ingestService.performPostProcessingTasks(dFile); } } } try { dataset = datasetService.save(dataset); } catch (EJBException ex) { StringBuilder error = new StringBuilder(); error.append(ex + " "); error.append(ex.getMessage() + " "); Throwable cause = ex; while (cause.getCause() != null) { cause = cause.getCause(); error.append(cause + " "); error.append(cause.getMessage() + " "); } logger.info("Couldn't save dataset: " + error.toString()); FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + error.toString())); return null; } newFiles.clear(); editMode = null; // Queue the ingest jobs for asynchronous execution: for (DataFile dataFile : dataset.getFiles()) { if (dataFile.isIngestScheduled()) { dataFile.SetIngestInProgress(); Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Attempting to queue the file " + dataFile.getName() + " for ingest."); ingestService.asyncIngestAsTabular(dataFile); } } return "/dataset.xhtml?id=" + dataset.getId() + "&faces-redirect=true"; }
#vulnerable code public String save() { dataset.setOwner(dataverseService.find(ownerId)); //TODO get real application-wide protocol/authority dataset.setProtocol("doi"); dataset.setAuthority("10.5072/FK2"); dataset.setIdentifier("5555"); //TODO add replication for logic if necessary if (replicationFor){ //dataset.getVersions().get(0).getDatasetFields(). } //Todo pre populate deposit date //If new ds get create date user if (dataset.getId() == null){ dataset.setCreator(session.getUser()); dataset.setCreateDate(new Timestamp(new Date().getTime())); } if (!(dataset.getVersions().get(0).getFileMetadatas() == null) && !dataset.getVersions().get(0).getFileMetadatas().isEmpty()) { int fmdIndex = 0; for (FileMetadata fmd : dataset.getVersions().get(0).getFileMetadatas()) { for (FileMetadata fmdTest : editVersion.getFileMetadatas()) { if (fmd.equals(fmdTest)) { dataset.getVersions().get(0).getFileMetadatas().get(fmdIndex).setDataFile(fmdTest.getDataFile()); } } fmdIndex++; } } /* * Save and/or ingest files, if there are any: */ if (newFiles != null && newFiles.size() > 0) { try { if (dataset.getFileSystemDirectory() != null && !Files.exists(dataset.getFileSystemDirectory())) { /* Note that "createDirectories()" must be used - not * "createDirectory()", to make sure all the parent * directories that may not yet exist are created as well. */ Files.createDirectories(dataset.getFileSystemDirectory()); } } catch (IOException dirEx) { Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Failed to create study directory " + dataset.getFileSystemDirectory().toString()); } if (dataset.getFileSystemDirectory() != null && Files.exists(dataset.getFileSystemDirectory())) { for (UploadedFile uFile : newFiles.keySet()) { DataFile dFile = newFiles.get(uFile); String tempFileLocation = getFilesTempDirectory() + "/" + dFile.getFileSystemName(); boolean ingestedAsTabular = false; boolean metadataExtracted = false; datasetService.generateFileSystemName(dFile); if (ingestService.ingestableAsTabular(dFile)) { try { ingestedAsTabular = ingestService.ingestAsTabular(tempFileLocation, dFile); dFile.setContentType("text/tab-separated-values"); } catch (IOException iex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, null, iex); ingestedAsTabular = false; } } else if (ingestService.fileMetadataExtractable(dFile)) { try { dFile.setContentType("application/fits"); metadataExtracted = ingestService.extractIndexableMetadata(tempFileLocation, dFile, editVersion); } catch (IOException mex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Caught exception trying to extract indexable metadata from file " + dFile.getName(), mex); } if (metadataExtracted) { Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Successfully extracted indexable metadata from file " + dFile.getName()); } else { Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Failed to extract indexable metadata from file " + dFile.getName()); } } /* Try to save the file in its permanent location: * (unless it was already ingested and saved as tabular data) */ if (!ingestedAsTabular) { try { Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Will attempt to save the file as: " + dFile.getFileSystemLocation().toString()); Files.copy(uFile.getInputstream(), dFile.getFileSystemLocation(), StandardCopyOption.REPLACE_EXISTING); MD5Checksum md5Checksum = new MD5Checksum(); try { dFile.setmd5(md5Checksum.CalculateMD5(dFile.getFileSystemLocation().toString())); } catch (Exception md5ex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Could not calculate MD5 signature for the new file " + dFile.getName()); } } catch (IOException ioex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Failed to save the file " + dFile.getFileSystemLocation()); } } // Any necessary post-processing: ingestService.performPostProcessingTasks(dFile); } } } try { dataset = datasetService.save(dataset); } catch (EJBException ex) { StringBuilder error = new StringBuilder(); error.append(ex + " "); error.append(ex.getMessage() + " "); Throwable cause = ex; while (cause.getCause() != null) { cause = cause.getCause(); error.append(cause + " "); error.append(cause.getMessage() + " "); } logger.info("Couldn't save dataset: " + error.toString()); FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + error.toString())); return null; } newFiles.clear(); editMode = null; return "/dataset.xhtml?id=" + dataset.getId() + "&faces-redirect=true"; } #location 87 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Path("dsPreview/{datasetId}") @GET @Produces({ "image/png" }) public InputStream dsPreview(@PathParam("datasetId") Long datasetId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ { Dataset dataset = datasetService.find(datasetId); if (dataset != null) { logger.warning("Preview: dataset service could not locate a Dataset object for id "+datasetId+"!"); return null; } String imageThumbFileName = null; List<DataFile> dataFiles = dataset.getFiles(); for (DataFile dataFile : dataFiles) { if (dataFile.isImage()) { imageThumbFileName = ImageThumbConverter.generateImageThumb(dataFile.getFileSystemLocation().toString(), 48); break; } } if (imageThumbFileName == null) { imageThumbFileName = getWebappImageResource (DEFAULT_DATASET_ICON); } if (imageThumbFileName != null) { InputStream in; try { in = new FileInputStream(imageThumbFileName); } catch (Exception ex) { // We don't particularly care what the reason why we have // failed to access the file was. // From the point of view of the download subsystem, it's a // binary operation -- it's either successfull or not. // If we can't access it for whatever reason, we are saying // it's 404 NOT FOUND in our HTTP response. return null; } return in; } return null; }
#vulnerable code @Path("dsPreview/{datasetId}") @GET @Produces({ "image/png" }) public InputStream dsPreview(@PathParam("datasetId") Long datasetId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ { Dataset dataset = datasetService.find(datasetId); String imageThumbFileName = null; List<DataFile> dataFiles = dataset.getFiles(); for (DataFile dataFile : dataFiles) { if (dataFile.isImage()) { imageThumbFileName = ImageThumbConverter.generateImageThumb(dataFile.getFileSystemLocation().toString(), 48); break; } } if (imageThumbFileName == null) { imageThumbFileName = getWebappImageResource (DEFAULT_DATASET_ICON); } if (imageThumbFileName != null) { InputStream in; try { in = new FileInputStream(imageThumbFileName); } catch (Exception ex) { // We don't particularly care what the reason why we have // failed to access the file was. // From the point of view of the download subsystem, it's a // binary operation -- it's either successfull or not. // If we can't access it for whatever reason, we are saying // it's 404 NOT FOUND in our HTTP response. return null; } return in; } return null; } #location 14 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code @Override public Statement getStatement(String editUri, Map<String, String> map, AuthCredentials authCredentials, SwordConfiguration swordConfiguration) throws SwordServerException, SwordError, SwordAuthException { this.swordConfiguration = (SwordConfigurationImpl) swordConfiguration; swordConfiguration = (SwordConfigurationImpl) swordConfiguration; if (authCredentials == null) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "auth credentials are null"); } if (swordAuth == null) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "swordAuth is null"); } DataverseUser vdcUser = swordAuth.auth(authCredentials); urlManager.processUrl(editUri); String globalId = urlManager.getTargetIdentifier(); if (urlManager.getTargetType().equals("study") && globalId != null) { logger.fine("request for sword statement by user " + vdcUser.getUserName()); Dataset study = datasetService.findByGlobalId(globalId); // try { // study = studyService.getStudyByGlobalId(globalId); // } catch (EJBException ex) { // throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study based on global id (" + globalId + ") in URL: " + editUri); // } Long studyId; try { studyId = study.getId(); } catch (NullPointerException ex) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "couldn't find study with global ID of " + globalId); } Dataverse dvThatOwnsStudy = study.getOwner(); if (swordAuth.hasAccessToModifyDataverse(vdcUser, dvThatOwnsStudy)) { String feedUri = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit/study/" + study.getGlobalId(); /** * @todo is it safe to use this? */ String author = study.getLatestVersion().getAuthorsStr(); String title = study.getLatestVersion().getTitle(); Date lastUpdated = study.getLatestVersion().getLastUpdateTime(); if (lastUpdated == null) { /** * @todo why is this date null? */ logger.info("why is lastUpdated null?"); lastUpdated = new Date(); } AtomDate atomDate = new AtomDate(lastUpdated); // AtomDate atomDate = new AtomDate(study.getLatestVersion().getLastUpdateTime()); String datedUpdated = atomDate.toString(); Statement statement = new AtomStatement(feedUri, author, title, datedUpdated); Map<String, String> states = new HashMap<String, String>(); states.put("latestVersionState", study.getLatestVersion().getVersionState().toString()); /** * @todo DVN 3.x had a studyLock. What's the equivalent in 4.0? */ // StudyLock lock = study.getStudyLock(); // if (lock != null) { // states.put("locked", "true"); // states.put("lockedDetail", lock.getDetail()); // states.put("lockedStartTime", lock.getStartTime().toString()); // } else { // states.put("locked", "false"); // } statement.setStates(states); List<FileMetadata> fileMetadatas = study.getLatestVersion().getFileMetadatas(); for (FileMetadata fileMetadata : fileMetadatas) { DataFile studyFile = fileMetadata.getDataFile(); // We are exposing the filename for informational purposes. The file id is what you // actually operate on to delete a file, etc. // // Replace spaces to avoid IRISyntaxException String fileNameFinal = fileMetadata.getLabel().replace(' ', '_'); String studyFileUrlString = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit-media/file/" + studyFile.getId() + "/" + fileNameFinal; IRI studyFileUrl; try { studyFileUrl = new IRI(studyFileUrlString); } catch (IRISyntaxException ex) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Invalid URL for file ( " + studyFileUrlString + " ) resulted in " + ex.getMessage()); } ResourcePart resourcePart = new ResourcePart(studyFileUrl.toString()); /** * @todo get this working. show the actual file type */ // resourcePart.setMediaType(studyFile.getOriginalFileFormat()); resourcePart.setMediaType("application/octet-stream"); /** * @todo: Why are properties set on a ResourcePart not * exposed when you GET a Statement? */ // Map<String, String> properties = new HashMap<String, String>(); // properties.put("filename", studyFile.getFileName()); // properties.put("category", studyFile.getLatestCategory()); // properties.put("originalFileType", studyFile.getOriginalFileType()); // properties.put("id", studyFile.getId().toString()); // properties.put("UNF", studyFile.getUnf()); // resourcePart.setProperties(properties); statement.addResource(resourcePart); } return statement; } else { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + vdcUser.getUserName() + " is not authorized to view study with global ID " + globalId); } } else { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not determine target type or identifier from URL: " + editUri); } }
#vulnerable code @Override public Statement getStatement(String editUri, Map<String, String> map, AuthCredentials authCredentials, SwordConfiguration swordConfiguration) throws SwordServerException, SwordError, SwordAuthException { this.swordConfiguration = (SwordConfigurationImpl) swordConfiguration; swordConfiguration = (SwordConfigurationImpl) swordConfiguration; if (authCredentials == null) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "auth credentials are null"); } if (swordAuth == null) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "swordAuth is null"); } DataverseUser vdcUser = swordAuth.auth(authCredentials); urlManager.processUrl(editUri); String globalId = urlManager.getTargetIdentifier(); if (urlManager.getTargetType().equals("study") && globalId != null) { logger.fine("request for sword statement by user " + vdcUser.getUserName()); // Study study = null; /** * @todo don't hard code this, obviously. In DVN 3.x we had a method * for editStudyService.getStudyByGlobalId(globalId) */ // Study study = editStudyService.getStudyByGlobalId(globalId); long databaseIdForRoastingAtHomeDataset = 10; Dataset study = datasetService.find(databaseIdForRoastingAtHomeDataset); // try { // study = studyService.getStudyByGlobalId(globalId); // } catch (EJBException ex) { // throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study based on global id (" + globalId + ") in URL: " + editUri); // } Long studyId; try { studyId = study.getId(); } catch (NullPointerException ex) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "couldn't find study with global ID of " + globalId); } Dataverse dvThatOwnsStudy = study.getOwner(); if (swordAuth.hasAccessToModifyDataverse(vdcUser, dvThatOwnsStudy)) { /** * @todo getIdentifier is equivalent to getGlobalId, right? */ // String feedUri = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit/study/" + study.getGlobalId(); String feedUri = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit/study/" + study.getIdentifier(); /** * @todo is it safe to use this? */ String author = study.getLatestVersion().getAuthorsStr(); String title = study.getLatestVersion().getTitle(); Date lastUpdated = study.getLatestVersion().getLastUpdateTime(); if (lastUpdated == null) { /** * @todo why is this date null? */ logger.info("why is lastUpdated null?"); lastUpdated = new Date(); } AtomDate atomDate = new AtomDate(lastUpdated); // AtomDate atomDate = new AtomDate(study.getLatestVersion().getLastUpdateTime()); String datedUpdated = atomDate.toString(); Statement statement = new AtomStatement(feedUri, author, title, datedUpdated); Map<String, String> states = new HashMap<String, String>(); states.put("latestVersionState", study.getLatestVersion().getVersionState().toString()); /** * @todo DVN 3.x had a studyLock. What's the equivalent in 4.0? */ // StudyLock lock = study.getStudyLock(); // if (lock != null) { // states.put("locked", "true"); // states.put("lockedDetail", lock.getDetail()); // states.put("lockedStartTime", lock.getStartTime().toString()); // } else { // states.put("locked", "false"); // } statement.setStates(states); List<FileMetadata> fileMetadatas = study.getLatestVersion().getFileMetadatas(); for (FileMetadata fileMetadata : fileMetadatas) { DataFile studyFile = fileMetadata.getDataFile(); // We are exposing the filename for informational purposes. The file id is what you // actually operate on to delete a file, etc. // // Replace spaces to avoid IRISyntaxException String fileNameFinal = fileMetadata.getLabel().replace(' ', '_'); String studyFileUrlString = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit-media/file/" + studyFile.getId() + "/" + fileNameFinal; IRI studyFileUrl; try { studyFileUrl = new IRI(studyFileUrlString); } catch (IRISyntaxException ex) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Invalid URL for file ( " + studyFileUrlString + " ) resulted in " + ex.getMessage()); } ResourcePart resourcePart = new ResourcePart(studyFileUrl.toString()); /** * @todo get this working. show the actual file type */ // resourcePart.setMediaType(studyFile.getOriginalFileFormat()); resourcePart.setMediaType("application/octet-stream"); /** * @todo: Why are properties set on a ResourcePart not * exposed when you GET a Statement? */ // Map<String, String> properties = new HashMap<String, String>(); // properties.put("filename", studyFile.getFileName()); // properties.put("category", studyFile.getLatestCategory()); // properties.put("originalFileType", studyFile.getOriginalFileType()); // properties.put("id", studyFile.getId().toString()); // properties.put("UNF", studyFile.getUnf()); // resourcePart.setProperties(properties); statement.addResource(resourcePart); } return statement; } else { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + vdcUser.getUserName() + " is not authorized to view study with global ID " + globalId); } } else { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not determine target type or identifier from URL: " + editUri); } } #location 40 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.
#fixed code public String save() { dataset.setOwner(dataverseService.find(ownerId)); //TODO get real application-wide protocol/authority dataset.setProtocol("doi"); dataset.setAuthority("10.5072/FK2"); dataset.setIdentifier("5555"); /* * Save and/or ingest files, if there are any: */ if (newFiles != null && newFiles.size() > 0) { try { if (dataset.getFileSystemDirectory() != null && !Files.exists(dataset.getFileSystemDirectory())) { /* Note that "createDirectories()" must be used - not * "createDirectory()", to make sure all the parent * directories that may not yet exist are created as well. */ Files.createDirectories(dataset.getFileSystemDirectory()); } } catch (IOException dirEx) { Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Failed to create study directory " + dataset.getFileSystemDirectory().toString()); } if (dataset.getFileSystemDirectory() != null && Files.exists(dataset.getFileSystemDirectory())) { for (DataFile dFile : newFiles) { String tempFileLocation = getFilesTempDirectory() + "/" + dFile.getFileSystemName(); // These are all brand new files, so they should all have // one filemetadata total. You do NOT want to use // getLatestFilemetadata() here - because it relies on // comparing the object IDs of the corresponding datasetversions... // Which may not have been persisted yet. // -- L.A. 4.0 beta. FileMetadata fileMetadata = dFile.getFileMetadatas().get(0); String fileName = fileMetadata.getLabel(); //boolean ingestedAsTabular = false; boolean metadataExtracted = false; datasetService.generateFileSystemName(dFile); if (ingestService.ingestableAsTabular(dFile)) { /* * Note that we don't try to ingest the file right away - * instead we mark it as "scheduled for ingest", then at * the end of the save process it will be queued for async. * ingest in the background. In the meantime, the file * will be ingested as a regular, non-tabular file, and * appear as such to the user, until the ingest job is * finished with the Ingest Service. */ dFile.SetIngestScheduled(); } else if (ingestService.fileMetadataExtractable(dFile)) { try { dFile.setContentType("application/fits"); metadataExtracted = ingestService.extractIndexableMetadata(tempFileLocation, dFile, editVersion); } catch (IOException mex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Caught exception trying to extract indexable metadata from file " + fileName, mex); } if (metadataExtracted) { Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Successfully extracted indexable metadata from file " + fileName); } else { Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Failed to extract indexable metadata from file " + fileName); } } // Try to save the file in its permanent location: //if (!ingestedAsTabular) { try { Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Will attempt to save the file as: " + dFile.getFileSystemLocation().toString()); Files.copy(new FileInputStream(new File(tempFileLocation)), dFile.getFileSystemLocation(), StandardCopyOption.REPLACE_EXISTING); MD5Checksum md5Checksum = new MD5Checksum(); try { dFile.setmd5(md5Checksum.CalculateMD5(dFile.getFileSystemLocation().toString())); } catch (Exception md5ex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Could not calculate MD5 signature for the new file " + fileName); } } catch (IOException ioex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Failed to save the file " + dFile.getFileSystemLocation()); } //} // Any necessary post-processing: ingestService.performPostProcessingTasks(dFile); } } } Command<Dataset> cmd; try { if (editMode == EditMode.CREATE) { cmd = new CreateDatasetCommand(dataset, session.getUser()); } else { cmd = new UpdateDatasetCommand(dataset, session.getUser()); } dataset = commandEngine.submit(cmd); } catch (EJBException ex) { StringBuilder error = new StringBuilder(); error.append(ex + " "); error.append(ex.getMessage() + " "); Throwable cause = ex; while (cause.getCause() != null) { cause = cause.getCause(); error.append(cause + " "); error.append(cause.getMessage() + " "); } logger.info("Couldn't save dataset: " + error.toString()); FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + error.toString())); return null; } catch (CommandException ex) { FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + ex.toString())); Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, null, ex); } newFiles.clear(); editMode = null; // Queue the ingest jobs for asynchronous execution: for (DataFile dataFile : dataset.getFiles()) { if (dataFile.isIngestScheduled()) { dataFile.SetIngestInProgress(); Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Attempting to queue the file " + dataFile.getFileMetadata().getLabel() + " for ingest."); ingestService.asyncIngestAsTabular(dataFile); } } return "/dataset.xhtml?id=" + dataset.getId() + "&faces-redirect=true"; }
#vulnerable code public String save() { dataset.setOwner(dataverseService.find(ownerId)); //TODO get real application-wide protocol/authority dataset.setProtocol("doi"); dataset.setAuthority("10.5072/FK2"); dataset.setIdentifier("5555"); //TODO update title in page itself if (replicationFor) { updateTitle(); } /* * Save and/or ingest files, if there are any: */ if (newFiles != null && newFiles.size() > 0) { try { if (dataset.getFileSystemDirectory() != null && !Files.exists(dataset.getFileSystemDirectory())) { /* Note that "createDirectories()" must be used - not * "createDirectory()", to make sure all the parent * directories that may not yet exist are created as well. */ Files.createDirectories(dataset.getFileSystemDirectory()); } } catch (IOException dirEx) { Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Failed to create study directory " + dataset.getFileSystemDirectory().toString()); } if (dataset.getFileSystemDirectory() != null && Files.exists(dataset.getFileSystemDirectory())) { for (DataFile dFile : newFiles) { String tempFileLocation = getFilesTempDirectory() + "/" + dFile.getFileSystemName(); // These are all brand new files, so they should all have // one filemetadata total. You do NOT want to use // getLatestFilemetadata() here - because it relies on // comparing the object IDs of the corresponding datasetversions... // Which may not have been persisted yet. // -- L.A. 4.0 beta. FileMetadata fileMetadata = dFile.getFileMetadatas().get(0); String fileName = fileMetadata.getLabel(); //boolean ingestedAsTabular = false; boolean metadataExtracted = false; datasetService.generateFileSystemName(dFile); if (ingestService.ingestableAsTabular(dFile)) { /* * Note that we don't try to ingest the file right away - * instead we mark it as "scheduled for ingest", then at * the end of the save process it will be queued for async. * ingest in the background. In the meantime, the file * will be ingested as a regular, non-tabular file, and * appear as such to the user, until the ingest job is * finished with the Ingest Service. */ dFile.SetIngestScheduled(); } else if (ingestService.fileMetadataExtractable(dFile)) { try { dFile.setContentType("application/fits"); metadataExtracted = ingestService.extractIndexableMetadata(tempFileLocation, dFile, editVersion); } catch (IOException mex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Caught exception trying to extract indexable metadata from file " + fileName, mex); } if (metadataExtracted) { Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Successfully extracted indexable metadata from file " + fileName); } else { Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Failed to extract indexable metadata from file " + fileName); } } // Try to save the file in its permanent location: //if (!ingestedAsTabular) { try { Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Will attempt to save the file as: " + dFile.getFileSystemLocation().toString()); Files.copy(new FileInputStream(new File(tempFileLocation)), dFile.getFileSystemLocation(), StandardCopyOption.REPLACE_EXISTING); MD5Checksum md5Checksum = new MD5Checksum(); try { dFile.setmd5(md5Checksum.CalculateMD5(dFile.getFileSystemLocation().toString())); } catch (Exception md5ex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Could not calculate MD5 signature for the new file " + fileName); } } catch (IOException ioex) { Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Failed to save the file " + dFile.getFileSystemLocation()); } //} // Any necessary post-processing: ingestService.performPostProcessingTasks(dFile); } } } Command<Dataset> cmd; try { if (editMode == EditMode.CREATE) { cmd = new CreateDatasetCommand(dataset, session.getUser()); } else { cmd = new UpdateDatasetCommand(dataset, session.getUser()); } dataset = commandEngine.submit(cmd); } catch (EJBException ex) { StringBuilder error = new StringBuilder(); error.append(ex + " "); error.append(ex.getMessage() + " "); Throwable cause = ex; while (cause.getCause() != null) { cause = cause.getCause(); error.append(cause + " "); error.append(cause.getMessage() + " "); } logger.info("Couldn't save dataset: " + error.toString()); FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + error.toString())); return null; } catch (CommandException ex) { FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + ex.toString())); Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, null, ex); } newFiles.clear(); editMode = null; // Queue the ingest jobs for asynchronous execution: for (DataFile dataFile : dataset.getFiles()) { if (dataFile.isIngestScheduled()) { dataFile.SetIngestInProgress(); Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Attempting to queue the file " + dataFile.getFileMetadata().getLabel() + " for ingest."); ingestService.asyncIngestAsTabular(dataFile); } } return "/dataset.xhtml?id=" + dataset.getId() + "&faces-redirect=true"; } #location 130 #vulnerability type NULL_DEREFERENCE
Below is the vulnerable code, please generate the patch based on the following information.