input
stringlengths 205
73.3k
| output
stringlengths 64
73.2k
| instruction
stringclasses 1
value |
---|---|---|
#vulnerable code
@Test
public void whenDesiredStateIsAdmin_serverStartupCreatesJavaOptionsEnvironment() {
configureServer("ms1").withDesiredState(ADMIN_STATE);
addWlsServer("ms1");
invokeStep();
assertThat(
getServerStartupInfo("ms1").getEnvironment(),
hasItem(envVar("JAVA_OPTIONS", "-Dweblogic.management.startupMode=ADMIN")));
}
#location 9
#vulnerability type NULL_DEREFERENCE | #fixed code
@Test
public void whenDesiredStateIsAdmin_serverStartupCreatesJavaOptionsEnvironment() {
configureServer("wls1").withDesiredState(ADMIN_STATE);
addWlsServer("wls1");
invokeStep();
assertThat(
getServerStartupInfo("wls1").getEnvironment(),
hasItem(envVar("JAVA_OPTIONS", "-Dweblogic.management.startupMode=ADMIN")));
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Test
public void testTwoDomainsManagedByTwoOperators() throws Exception {
Assume.assumeFalse(QUICKTEST);
String testMethodName = new Object() {}.getClass().getEnclosingMethod().getName();
logTestBegin(testMethodName);
logger.info("Creating Domain domain4 & verifing the domain creation");
logger.info("Checking if operator1 and domain1 are running, if not creating");
if (operator1 == null) {
operator1 = TestUtils.createOperator(opManagingdefaultAndtest1NSYamlFile);
}
Domain domain1 = null, domain2 = null;
boolean testCompletedSuccessfully = false;
try {
// load input yaml to map and add configOverrides
Map<String, Object> wlstDomainMap = TestUtils.loadYaml(domainOnPVUsingWLSTYamlFile);
wlstDomainMap.put("domainUID", "domain1onpvwlst");
domain1 = TestUtils.createDomain(wlstDomainMap);
domain1.verifyDomainCreated();
testBasicUseCases(domain1);
logger.info("Checking if operator2 is running, if not creating");
if (operator2 == null) {
operator2 = TestUtils.createOperator(opManagingtest2NSYamlFile);
}
// create domain5 with configured cluster
// ToDo: configured cluster support is removed from samples, modify the test to create
// configured cluster
Map<String, Object> wdtDomainMap = TestUtils.loadYaml(domainOnPVUsingWDTYamlFile);
wdtDomainMap.put("domainUID", "domain2onpvwdt");
// wdtDomainMap.put("clusterType", "Configured");
domain2 = TestUtils.createDomain(wdtDomainMap);
domain2.verifyDomainCreated();
testBasicUseCases(domain2);
logger.info("Verify the only remaining running domain domain1 is unaffected");
domain1.verifyDomainCreated();
testClusterScaling(operator2, domain2);
logger.info("Verify the only remaining running domain domain1 is unaffected");
domain1.verifyDomainCreated();
logger.info("Destroy and create domain4 and verify no impact on domain2");
domain1.destroy();
domain1.create();
logger.info("Verify no impact on domain2");
domain2.verifyDomainCreated();
testCompletedSuccessfully = true;
} finally {
String domainUidsToBeDeleted = "";
if (domain1 != null && (JENKINS || testCompletedSuccessfully)) {
// domain1.destroy();
// TestUtils.verifyBeforeDeletion(domain1);
domainUidsToBeDeleted = domain1.getDomainUid();
}
if (domain2 != null && (JENKINS || testCompletedSuccessfully)) {
// domain2.destroy();
domainUidsToBeDeleted = domainUidsToBeDeleted + "," + domain2.getDomainUid();
}
if (!domainUidsToBeDeleted.equals("")) {
logger.info("About to delete domains: " + domainUidsToBeDeleted);
TestUtils.deleteWeblogicDomainResources(domainUidsToBeDeleted);
TestUtils.verifyAfterDeletion(domain1);
TestUtils.verifyAfterDeletion(domain2);
}
}
logger.info("SUCCESS - " + testMethodName);
}
#location 68
#vulnerability type NULL_DEREFERENCE | #fixed code
@Test
public void testTwoDomainsManagedByTwoOperators() throws Exception {
Assume.assumeFalse(QUICKTEST);
String testMethodName = new Object() {}.getClass().getEnclosingMethod().getName();
logTestBegin(testMethodName);
logger.info("Creating Domain domain4 & verifing the domain creation");
logger.info("Checking if operator1 and domain1 are running, if not creating");
if (operator1 == null) {
operator1 = TestUtils.createOperator(opManagingdefaultAndtest1NSYamlFile);
}
Domain domain1 = null, domain2 = null;
boolean testCompletedSuccessfully = false;
try {
// load input yaml to map and add configOverrides
Map<String, Object> wlstDomainMap = TestUtils.loadYaml(domainOnPVUsingWLSTYamlFile);
wlstDomainMap.put("domainUID", "domain1onpvwlst");
wlstDomainMap.put("adminNodePort", new Integer("30702"));
wlstDomainMap.put("t3ChannelPort", new Integer("30031"));
domain1 = TestUtils.createDomain(wlstDomainMap);
domain1.verifyDomainCreated();
testBasicUseCases(domain1);
logger.info("Checking if operator2 is running, if not creating");
if (operator2 == null) {
operator2 = TestUtils.createOperator(opManagingtest2NSYamlFile);
}
// create domain5 with configured cluster
// ToDo: configured cluster support is removed from samples, modify the test to create
// configured cluster
Map<String, Object> wdtDomainMap = TestUtils.loadYaml(domainOnPVUsingWDTYamlFile);
wdtDomainMap.put("domainUID", "domain2onpvwdt");
wdtDomainMap.put("adminNodePort", new Integer("30703"));
wdtDomainMap.put("t3ChannelPort", new Integer("30041"));
// wdtDomainMap.put("clusterType", "Configured");
domain2 = TestUtils.createDomain(wdtDomainMap);
domain2.verifyDomainCreated();
testBasicUseCases(domain2);
logger.info("Verify the only remaining running domain domain1 is unaffected");
domain1.verifyDomainCreated();
testClusterScaling(operator2, domain2);
logger.info("Verify the only remaining running domain domain1 is unaffected");
domain1.verifyDomainCreated();
logger.info("Destroy and create domain4 and verify no impact on domain2");
domain1.destroy();
domain1.create();
logger.info("Verify no impact on domain2");
domain2.verifyDomainCreated();
testCompletedSuccessfully = true;
} finally {
String domainUidsToBeDeleted = "";
if (domain1 != null && (JENKINS || testCompletedSuccessfully)) {
domainUidsToBeDeleted = domain1.getDomainUid();
}
if (domain2 != null && (JENKINS || testCompletedSuccessfully)) {
domainUidsToBeDeleted = domainUidsToBeDeleted + "," + domain2.getDomainUid();
}
if (!domainUidsToBeDeleted.equals("")) {
logger.info("About to delete domains: " + domainUidsToBeDeleted);
TestUtils.deleteWeblogicDomainResources(domainUidsToBeDeleted);
logger.info("domain1 domainMap " + domain1.getDomainMap());
TestUtils.verifyAfterDeletion(domain1);
logger.info("domain2 domainMap " + domain2.getDomainMap());
TestUtils.verifyAfterDeletion(domain2);
}
}
logger.info("SUCCESS - " + testMethodName);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Test
public void whenWlsServerNotInCluster_serverStartupInfoHasNoClusterConfig() {
configureServer("ms1");
addWlsServer("ms1");
invokeStep();
assertThat(getServerStartupInfo("ms1").getClusterName(), nullValue());
}
#location 8
#vulnerability type NULL_DEREFERENCE | #fixed code
@Test
public void whenWlsServerNotInCluster_serverStartupInfoHasNoClusterConfig() {
configureServer("wls1");
addWlsServer("wls1");
invokeStep();
assertThat(getServerStartupInfo("wls1").getClusterName(), nullValue());
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Test
public void testNamespaceWatch() {
Assume.assumeTrue(System.getProperty("os.name").toLowerCase().contains("nix"));
ClientHolder client = ClientHelper.getInstance().take();
try {
Watch<V1Namespace> watch = Watch.createWatch(
client.getApiClient(),
client.getCoreApiClient().listNamespaceCall(null,
null,
null,
null,
null,
5,
null,
60,
Boolean.TRUE,
null,
null),
new TypeToken<Watch.Response<V1Namespace>>() {
}.getType());
for (Watch.Response<V1Namespace> item : watch) {
System.out.printf("%s : %s%n", item.type, item.object.getMetadata().getName());
}
} catch (ApiException e) {
fail();
} catch (RuntimeException e) {
System.out.println("stream finished");
}
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
@Test
public void testNamespaceWatch() throws Exception {
Assume.assumeTrue(TestUtils.isKubernetesAvailable());
ClientHolder client = ClientHelper.getInstance().take();
Watch<V1Namespace> watch = Watch.createWatch(
client.getApiClient(),
client.getCoreApiClient().listNamespaceCall(null,
null,
null,
null,
null,
5,
null,
60,
Boolean.TRUE,
null,
null),
new TypeToken<Watch.Response<V1Namespace>>() {
}.getType());
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void stop() throws IOException {
final List<Channel> channels;
synchronized (managedChannelSet) {
channels = new ArrayList<Channel>(managedChannelSet);
managedChannelSet.clear();
}
for (Channel channel : channels) {
IoUtils.safeClose(channel);
}
for (NioSelectorRunnable runnable : readers) {
runnable.shutdown();
}
for (NioSelectorRunnable runnable : writers) {
runnable.shutdown();
}
for (NioSelectorRunnable runnable : connectors) {
runnable.shutdown();
}
readers.clear();
writers.clear();
connectors.clear();
if (executorService != null) {
try {
AccessController.doPrivileged(new PrivilegedAction<Void>() {
public Void run() {
executorService.shutdown();
return null;
}
});
} catch (Throwable t) {
log.trace(t, "Failed to shut down executor service");
} finally {
executorService = null;
}
}
}
#location 33
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public void stop() throws IOException {
final List<Channel> channels;
synchronized (managedChannelSet) {
channels = new ArrayList<Channel>(managedChannelSet);
managedChannelSet.clear();
}
for (Channel channel : channels) {
IoUtils.safeClose(channel);
}
for (NioSelectorRunnable runnable : readers) {
runnable.shutdown();
}
for (NioSelectorRunnable runnable : writers) {
runnable.shutdown();
}
for (NioSelectorRunnable runnable : connectors) {
runnable.shutdown();
}
readers.clear();
writers.clear();
connectors.clear();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public MulticastMessageChannel createUdpServer(final InetSocketAddress bindAddress, final ReadChannelThread readThread, final WriteChannelThread writeThread, final ChannelListener<? super MulticastMessageChannel> bindListener, final OptionMap optionMap) throws IOException {
if (optionMap.get(Options.MULTICAST, false)) {
return new BioMulticastUdpChannel(optionMap.get(Options.SEND_BUFFER, 8192), optionMap.get(Options.RECEIVE_BUFFER, 8192), new MulticastSocket());
} else {
final DatagramChannel channel = DatagramChannel.open();
channel.configureBlocking(false);
channel.socket().bind(bindAddress);
final NioUdpChannel udpChannel = new NioUdpChannel(this, channel);
udpChannel.setReadThread(readThread);
udpChannel.setWriteThread(writeThread);
//noinspection unchecked
ChannelListeners.invokeChannelListener(udpChannel, bindListener);
return udpChannel;
}
}
#location 13
#vulnerability type RESOURCE_LEAK | #fixed code
public MulticastMessageChannel createUdpServer(final InetSocketAddress bindAddress, final ReadChannelThread readThread, final WriteChannelThread writeThread, final ChannelListener<? super MulticastMessageChannel> bindListener, final OptionMap optionMap) throws IOException {
if (optionMap.get(Options.MULTICAST, false)) {
final MulticastSocket socket = new MulticastSocket(bindAddress);
final BioMulticastUdpChannel channel = new BioMulticastUdpChannel(optionMap.get(Options.SEND_BUFFER, 8192), optionMap.get(Options.RECEIVE_BUFFER, 8192), socket);
channel.setReadThread(readThread);
channel.setWriteThread(writeThread);
channel.open();
//noinspection unchecked
ChannelListeners.invokeChannelListener(channel, bindListener);
return channel;
} else {
final DatagramChannel channel = DatagramChannel.open();
channel.configureBlocking(false);
channel.socket().bind(bindAddress);
final NioUdpChannel udpChannel = new NioUdpChannel(this, channel);
udpChannel.setReadThread(readThread);
udpChannel.setWriteThread(writeThread);
//noinspection unchecked
ChannelListeners.invokeChannelListener(udpChannel, bindListener);
return udpChannel;
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void start() throws IOException {
if (selectorThreadFactory == null) {
selectorThreadFactory = Executors.defaultThreadFactory();
}
if (executor == null) {
executor = executorService = Executors.newCachedThreadPool();
}
for (int i = 0; i < readSelectorThreads; i ++) {
readers.add(new NioSelectorRunnable());
}
for (int i = 0; i < writeSelectorThreads; i ++) {
writers.add(new NioSelectorRunnable());
}
for (int i = 0; i < connectionSelectorThreads; i ++) {
connectors.add(new NioSelectorRunnable());
}
for (NioSelectorRunnable runnable : readers) {
selectorThreadFactory.newThread(runnable).start();
}
for (NioSelectorRunnable runnable : writers) {
selectorThreadFactory.newThread(runnable).start();
}
for (NioSelectorRunnable runnable : connectors) {
selectorThreadFactory.newThread(runnable).start();
}
}
#location 6
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public void start() throws IOException {
if (selectorThreadFactory == null) {
selectorThreadFactory = Executors.defaultThreadFactory();
}
if (executor == null) {
executor = IoUtils.directExecutor();
}
for (int i = 0; i < readSelectorThreads; i ++) {
readers.add(new NioSelectorRunnable());
}
for (int i = 0; i < writeSelectorThreads; i ++) {
writers.add(new NioSelectorRunnable());
}
for (int i = 0; i < connectionSelectorThreads; i ++) {
connectors.add(new NioSelectorRunnable());
}
for (NioSelectorRunnable runnable : readers) {
selectorThreadFactory.newThread(runnable).start();
}
for (NioSelectorRunnable runnable : writers) {
selectorThreadFactory.newThread(runnable).start();
}
for (NioSelectorRunnable runnable : connectors) {
selectorThreadFactory.newThread(runnable).start();
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public MulticastMessageChannel createUdpServer(final InetSocketAddress bindAddress, final ReadChannelThread readThread, final WriteChannelThread writeThread, final ChannelListener<? super MulticastMessageChannel> bindListener, final OptionMap optionMap) throws IOException {
if (optionMap.get(Options.MULTICAST, false)) {
return new BioMulticastUdpChannel(optionMap.get(Options.SEND_BUFFER, 8192), optionMap.get(Options.RECEIVE_BUFFER, 8192), new MulticastSocket());
} else {
final DatagramChannel channel = DatagramChannel.open();
channel.configureBlocking(false);
channel.socket().bind(bindAddress);
final NioUdpChannel udpChannel = new NioUdpChannel(this, channel);
udpChannel.setReadThread(readThread);
udpChannel.setWriteThread(writeThread);
//noinspection unchecked
ChannelListeners.invokeChannelListener(udpChannel, bindListener);
return udpChannel;
}
}
#location 3
#vulnerability type RESOURCE_LEAK | #fixed code
public MulticastMessageChannel createUdpServer(final InetSocketAddress bindAddress, final ReadChannelThread readThread, final WriteChannelThread writeThread, final ChannelListener<? super MulticastMessageChannel> bindListener, final OptionMap optionMap) throws IOException {
if (optionMap.get(Options.MULTICAST, false)) {
final MulticastSocket socket = new MulticastSocket(bindAddress);
final BioMulticastUdpChannel channel = new BioMulticastUdpChannel(optionMap.get(Options.SEND_BUFFER, 8192), optionMap.get(Options.RECEIVE_BUFFER, 8192), socket);
channel.setReadThread(readThread);
channel.setWriteThread(writeThread);
channel.open();
//noinspection unchecked
ChannelListeners.invokeChannelListener(channel, bindListener);
return channel;
} else {
final DatagramChannel channel = DatagramChannel.open();
channel.configureBlocking(false);
channel.socket().bind(bindAddress);
final NioUdpChannel udpChannel = new NioUdpChannel(this, channel);
udpChannel.setReadThread(readThread);
udpChannel.setWriteThread(writeThread);
//noinspection unchecked
ChannelListeners.invokeChannelListener(udpChannel, bindListener);
return udpChannel;
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public NioSocketStreamConnection accept() throws IOException {
final WorkerThread current = WorkerThread.getCurrent();
final NioTcpServerHandle handle = handles[current.getNumber()];
if (! handle.getConnection()) {
return null;
}
final SocketChannel accepted;
boolean ok = false;
try {
accepted = channel.accept();
if (accepted != null) try {
final SocketAddress localAddress = accepted.getLocalAddress();
int hash;
if (localAddress instanceof InetSocketAddress) {
final InetSocketAddress address = (InetSocketAddress) localAddress;
hash = address.getAddress().hashCode() * 23 + address.getPort();
} else if (localAddress instanceof LocalSocketAddress) {
hash = ((LocalSocketAddress) localAddress).getName().hashCode();
} else {
hash = localAddress.hashCode();
}
final SocketAddress remoteAddress = accepted.getRemoteAddress();
if (remoteAddress instanceof InetSocketAddress) {
final InetSocketAddress address = (InetSocketAddress) remoteAddress;
hash = (address.getAddress().hashCode() * 23 + address.getPort()) * 23 + hash;
} else if (remoteAddress instanceof LocalSocketAddress) {
hash = ((LocalSocketAddress) remoteAddress).getName().hashCode() * 23 + hash;
} else {
hash = localAddress.hashCode() * 23 + hash;
}
accepted.configureBlocking(false);
final Socket socket = accepted.socket();
socket.setKeepAlive(keepAlive != 0);
socket.setOOBInline(oobInline != 0);
socket.setTcpNoDelay(tcpNoDelay != 0);
final int sendBuffer = this.sendBuffer;
if (sendBuffer > 0) socket.setSendBufferSize(sendBuffer);
final WorkerThread ioThread = worker.getIoThread(hash);
final SelectionKey selectionKey = ioThread.registerChannel(accepted);
final NioSocketStreamConnection newConnection = new NioSocketStreamConnection(ioThread, selectionKey, handle);
newConnection.setOption(Options.READ_TIMEOUT, Integer.valueOf(readTimeout));
newConnection.setOption(Options.WRITE_TIMEOUT, Integer.valueOf(writeTimeout));
ok = true;
return newConnection;
} finally {
if (! ok) safeClose(accepted);
}
} catch (IOException e) {
return null;
} finally {
if (! ok) {
handle.freeConnection();
}
}
// by contract, only a resume will do
return null;
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
public NioSocketStreamConnection accept() throws IOException {
final WorkerThread current = WorkerThread.getCurrent();
if (current == null) {
return null;
}
final NioTcpServerHandle handle = handles[current.getNumber()];
if (! handle.getConnection()) {
return null;
}
final SocketChannel accepted;
boolean ok = false;
try {
accepted = channel.accept();
if (accepted != null) try {
final SocketAddress localAddress = accepted.getLocalAddress();
int hash;
if (localAddress instanceof InetSocketAddress) {
final InetSocketAddress address = (InetSocketAddress) localAddress;
hash = address.getAddress().hashCode() * 23 + address.getPort();
} else if (localAddress instanceof LocalSocketAddress) {
hash = ((LocalSocketAddress) localAddress).getName().hashCode();
} else {
hash = localAddress.hashCode();
}
final SocketAddress remoteAddress = accepted.getRemoteAddress();
if (remoteAddress instanceof InetSocketAddress) {
final InetSocketAddress address = (InetSocketAddress) remoteAddress;
hash = (address.getAddress().hashCode() * 23 + address.getPort()) * 23 + hash;
} else if (remoteAddress instanceof LocalSocketAddress) {
hash = ((LocalSocketAddress) remoteAddress).getName().hashCode() * 23 + hash;
} else {
hash = localAddress.hashCode() * 23 + hash;
}
accepted.configureBlocking(false);
final Socket socket = accepted.socket();
socket.setKeepAlive(keepAlive != 0);
socket.setOOBInline(oobInline != 0);
socket.setTcpNoDelay(tcpNoDelay != 0);
final int sendBuffer = this.sendBuffer;
if (sendBuffer > 0) socket.setSendBufferSize(sendBuffer);
final WorkerThread ioThread = worker.getIoThread(hash);
final SelectionKey selectionKey = ioThread.registerChannel(accepted);
final NioSocketStreamConnection newConnection = new NioSocketStreamConnection(ioThread, selectionKey, handle);
newConnection.setOption(Options.READ_TIMEOUT, Integer.valueOf(readTimeout));
newConnection.setOption(Options.WRITE_TIMEOUT, Integer.valueOf(writeTimeout));
ok = true;
return newConnection;
} finally {
if (! ok) safeClose(accepted);
}
} catch (IOException e) {
return null;
} finally {
if (! ok) {
handle.freeConnection();
}
}
// by contract, only a resume will do
return null;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public NioSocketStreamConnection accept() throws IOException {
final WorkerThread current = WorkerThread.getCurrent();
final NioTcpServerHandle handle = handles[current.getNumber()];
if (! handle.getConnection()) {
return null;
}
final SocketChannel accepted;
boolean ok = false;
try {
accepted = channel.accept();
if (accepted != null) try {
final SocketAddress localAddress = accepted.getLocalAddress();
int hash;
if (localAddress instanceof InetSocketAddress) {
final InetSocketAddress address = (InetSocketAddress) localAddress;
hash = address.getAddress().hashCode() * 23 + address.getPort();
} else if (localAddress instanceof LocalSocketAddress) {
hash = ((LocalSocketAddress) localAddress).getName().hashCode();
} else {
hash = localAddress.hashCode();
}
final SocketAddress remoteAddress = accepted.getRemoteAddress();
if (remoteAddress instanceof InetSocketAddress) {
final InetSocketAddress address = (InetSocketAddress) remoteAddress;
hash = (address.getAddress().hashCode() * 23 + address.getPort()) * 23 + hash;
} else if (remoteAddress instanceof LocalSocketAddress) {
hash = ((LocalSocketAddress) remoteAddress).getName().hashCode() * 23 + hash;
} else {
hash = localAddress.hashCode() * 23 + hash;
}
accepted.configureBlocking(false);
final Socket socket = accepted.socket();
socket.setKeepAlive(keepAlive != 0);
socket.setOOBInline(oobInline != 0);
socket.setTcpNoDelay(tcpNoDelay != 0);
final int sendBuffer = this.sendBuffer;
if (sendBuffer > 0) socket.setSendBufferSize(sendBuffer);
final WorkerThread ioThread = worker.getIoThread(hash);
final SelectionKey selectionKey = ioThread.registerChannel(accepted);
final NioSocketStreamConnection newConnection = new NioSocketStreamConnection(ioThread, selectionKey, handle);
newConnection.setOption(Options.READ_TIMEOUT, Integer.valueOf(readTimeout));
newConnection.setOption(Options.WRITE_TIMEOUT, Integer.valueOf(writeTimeout));
ok = true;
return newConnection;
} finally {
if (! ok) safeClose(accepted);
}
} catch (IOException e) {
return null;
} finally {
if (! ok) {
handle.freeConnection();
}
}
// by contract, only a resume will do
return null;
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
public NioSocketStreamConnection accept() throws IOException {
final WorkerThread current = WorkerThread.getCurrent();
if (current == null) {
return null;
}
final NioTcpServerHandle handle = handles[current.getNumber()];
if (! handle.getConnection()) {
return null;
}
final SocketChannel accepted;
boolean ok = false;
try {
accepted = channel.accept();
if (accepted != null) try {
final SocketAddress localAddress = accepted.getLocalAddress();
int hash;
if (localAddress instanceof InetSocketAddress) {
final InetSocketAddress address = (InetSocketAddress) localAddress;
hash = address.getAddress().hashCode() * 23 + address.getPort();
} else if (localAddress instanceof LocalSocketAddress) {
hash = ((LocalSocketAddress) localAddress).getName().hashCode();
} else {
hash = localAddress.hashCode();
}
final SocketAddress remoteAddress = accepted.getRemoteAddress();
if (remoteAddress instanceof InetSocketAddress) {
final InetSocketAddress address = (InetSocketAddress) remoteAddress;
hash = (address.getAddress().hashCode() * 23 + address.getPort()) * 23 + hash;
} else if (remoteAddress instanceof LocalSocketAddress) {
hash = ((LocalSocketAddress) remoteAddress).getName().hashCode() * 23 + hash;
} else {
hash = localAddress.hashCode() * 23 + hash;
}
accepted.configureBlocking(false);
final Socket socket = accepted.socket();
socket.setKeepAlive(keepAlive != 0);
socket.setOOBInline(oobInline != 0);
socket.setTcpNoDelay(tcpNoDelay != 0);
final int sendBuffer = this.sendBuffer;
if (sendBuffer > 0) socket.setSendBufferSize(sendBuffer);
final WorkerThread ioThread = worker.getIoThread(hash);
final SelectionKey selectionKey = ioThread.registerChannel(accepted);
final NioSocketStreamConnection newConnection = new NioSocketStreamConnection(ioThread, selectionKey, handle);
newConnection.setOption(Options.READ_TIMEOUT, Integer.valueOf(readTimeout));
newConnection.setOption(Options.WRITE_TIMEOUT, Integer.valueOf(writeTimeout));
ok = true;
return newConnection;
} finally {
if (! ok) safeClose(accepted);
}
} catch (IOException e) {
return null;
} finally {
if (! ok) {
handle.freeConnection();
}
}
// by contract, only a resume will do
return null;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void stop() throws IOException {
final List<Channel> channels;
synchronized (managedChannelSet) {
channels = new ArrayList<Channel>(managedChannelSet);
managedChannelSet.clear();
}
for (Channel channel : channels) {
IoUtils.safeClose(channel);
}
for (NioSelectorRunnable runnable : readers) {
runnable.shutdown();
}
for (NioSelectorRunnable runnable : writers) {
runnable.shutdown();
}
for (NioSelectorRunnable runnable : connectors) {
runnable.shutdown();
}
readers.clear();
writers.clear();
connectors.clear();
if (executorService != null) {
try {
AccessController.doPrivileged(new PrivilegedAction<Void>() {
public Void run() {
executorService.shutdown();
return null;
}
});
} catch (Throwable t) {
log.trace(t, "Failed to shut down executor service");
} finally {
executorService = null;
}
}
}
#location 22
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public void stop() throws IOException {
final List<Channel> channels;
synchronized (managedChannelSet) {
channels = new ArrayList<Channel>(managedChannelSet);
managedChannelSet.clear();
}
for (Channel channel : channels) {
IoUtils.safeClose(channel);
}
for (NioSelectorRunnable runnable : readers) {
runnable.shutdown();
}
for (NioSelectorRunnable runnable : writers) {
runnable.shutdown();
}
for (NioSelectorRunnable runnable : connectors) {
runnable.shutdown();
}
readers.clear();
writers.clear();
connectors.clear();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public Schema encodeSchema(SparkMLEncoder encoder){
T model = getTransformer();
Label label = null;
if(model instanceof HasLabelCol){
HasLabelCol hasLabelCol = (HasLabelCol)model;
String labelCol = hasLabelCol.getLabelCol();
Feature feature = encoder.getOnlyFeature(labelCol);
MiningFunction miningFunction = getMiningFunction();
switch(miningFunction){
case CLASSIFICATION:
{
if(feature instanceof BooleanFeature){
BooleanFeature booleanFeature = (BooleanFeature)feature;
label = new CategoricalLabel(booleanFeature.getName(), booleanFeature.getDataType(), booleanFeature.getValues());
} else
if(feature instanceof CategoricalFeature){
CategoricalFeature categoricalFeature = (CategoricalFeature)feature;
DataField dataField = (DataField)categoricalFeature.getField();
label = new CategoricalLabel(dataField);
} else
if(feature instanceof ContinuousFeature){
ContinuousFeature continuousFeature = (ContinuousFeature)feature;
int numClasses = 2;
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
numClasses = classificationModel.numClasses();
}
List<String> categories = new ArrayList<>();
for(int i = 0; i < numClasses; i++){
categories.add(String.valueOf(i));
}
Field<?> field = encoder.toCategorical(continuousFeature.getName(), categories);
encoder.putOnlyFeature(labelCol, new CategoricalFeature(encoder, field, categories));
label = new CategoricalLabel(field.getName(), field.getDataType(), categories);
} else
{
throw new IllegalArgumentException("Expected a categorical or categorical-like continuous feature, got " + feature);
}
}
break;
case REGRESSION:
{
Field<?> field = encoder.toContinuous(feature.getName());
field.setDataType(DataType.DOUBLE);
label = new ContinuousLabel(field.getName(), field.getDataType());
}
break;
default:
throw new IllegalArgumentException("Mining function " + miningFunction + " is not supported");
}
}
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
int numClasses = classificationModel.numClasses();
CategoricalLabel categoricalLabel = (CategoricalLabel)label;
SchemaUtil.checkSize(numClasses, categoricalLabel);
}
String featuresCol = model.getFeaturesCol();
List<Feature> features = encoder.getFeatures(featuresCol);
if(model instanceof PredictionModel){
PredictionModel<?, ?> predictionModel = (PredictionModel<?, ?>)model;
int numFeatures = predictionModel.numFeatures();
if(numFeatures != -1){
SchemaUtil.checkSize(numFeatures, features);
}
}
Schema result = new Schema(label, features);
SchemaUtil.checkSchema(result);
return result;
}
#location 81
#vulnerability type NULL_DEREFERENCE | #fixed code
public Schema encodeSchema(SparkMLEncoder encoder){
T model = getTransformer();
Label label = null;
if(model instanceof HasLabelCol){
HasLabelCol hasLabelCol = (HasLabelCol)model;
String labelCol = hasLabelCol.getLabelCol();
Feature feature = encoder.getOnlyFeature(labelCol);
MiningFunction miningFunction = getMiningFunction();
switch(miningFunction){
case CLASSIFICATION:
{
if(feature instanceof BooleanFeature){
BooleanFeature booleanFeature = (BooleanFeature)feature;
label = new CategoricalLabel(booleanFeature.getName(), booleanFeature.getDataType(), booleanFeature.getValues());
} else
if(feature instanceof CategoricalFeature){
CategoricalFeature categoricalFeature = (CategoricalFeature)feature;
DataField dataField = (DataField)categoricalFeature.getField();
label = new CategoricalLabel(dataField);
} else
if(feature instanceof ContinuousFeature){
ContinuousFeature continuousFeature = (ContinuousFeature)feature;
int numClasses = 2;
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
numClasses = classificationModel.numClasses();
}
List<Integer> categories = LabelUtil.createTargetCategories(numClasses);
Field<?> field = encoder.toCategorical(continuousFeature.getName(), categories);
encoder.putOnlyFeature(labelCol, new CategoricalFeature(encoder, field, categories));
label = new CategoricalLabel(field.getName(), field.getDataType(), categories);
} else
{
throw new IllegalArgumentException("Expected a categorical or categorical-like continuous feature, got " + feature);
}
}
break;
case REGRESSION:
{
Field<?> field = encoder.toContinuous(feature.getName());
field.setDataType(DataType.DOUBLE);
label = new ContinuousLabel(field.getName(), field.getDataType());
}
break;
default:
throw new IllegalArgumentException("Mining function " + miningFunction + " is not supported");
}
}
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
int numClasses = classificationModel.numClasses();
CategoricalLabel categoricalLabel = (CategoricalLabel)label;
SchemaUtil.checkSize(numClasses, categoricalLabel);
}
String featuresCol = model.getFeaturesCol();
List<Feature> features = encoder.getFeatures(featuresCol);
if(model instanceof PredictionModel){
PredictionModel<?, ?> predictionModel = (PredictionModel<?, ?>)model;
int numFeatures = predictionModel.numFeatures();
if(numFeatures != -1){
SchemaUtil.checkSize(numFeatures, features);
}
}
Schema result = new Schema(label, features);
checkSchema(result);
return result;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
static
private MapValues createMapValues(FieldName name, String identifier, List<Feature> features, List<Double> coefficients){
DocumentBuilder documentBuilder = DOMUtil.createDocumentBuilder();
InlineTable inlineTable = new InlineTable();
List<String> columns = Arrays.asList("input", "output");
ListIterator<Feature> featureIt = features.listIterator();
ListIterator<Double> coefficientIt = coefficients.listIterator();
PMMLEncoder encoder = null;
while(featureIt.hasNext()){
Feature feature = featureIt.next();
Double coefficient = coefficientIt.next();
if(!(feature instanceof BinaryFeature)){
continue;
}
BinaryFeature binaryFeature = (BinaryFeature)feature;
if(!(name).equals(binaryFeature.getName())){
continue;
}
featureIt.remove();
coefficientIt.remove();
if(encoder == null){
encoder = binaryFeature.getEncoder();
}
Row row = DOMUtil.createRow(documentBuilder, columns, Arrays.asList(binaryFeature.getValue(), coefficient));
inlineTable.addRows(row);
}
MapValues mapValues = new MapValues()
.addFieldColumnPairs(new FieldColumnPair(name, columns.get(0)))
.setOutputColumn(columns.get(1))
.setInlineTable(inlineTable)
.setDefaultValue(ValueUtil.formatValue(0d));
DerivedField derivedField = encoder.createDerivedField(FieldName.create("lookup(" + name.getValue() + (identifier != null ? (", " + identifier) : "") + ")"), OpType.CONTINUOUS, DataType.DOUBLE, mapValues);
featureIt.add(new ContinuousFeature(encoder, derivedField));
coefficientIt.add(1d);
return mapValues;
}
#location 45
#vulnerability type NULL_DEREFERENCE | #fixed code
static
private MapValues createMapValues(FieldName name, String identifier, List<Feature> features, List<Double> coefficients){
ListIterator<Feature> featureIt = features.listIterator();
ListIterator<Double> coefficientIt = coefficients.listIterator();
PMMLEncoder encoder = null;
List<String> inputValues = new ArrayList<>();
List<String> outputValues = new ArrayList<>();
while(featureIt.hasNext()){
Feature feature = featureIt.next();
Double coefficient = coefficientIt.next();
if(!(feature instanceof BinaryFeature)){
continue;
}
BinaryFeature binaryFeature = (BinaryFeature)feature;
if(!(name).equals(binaryFeature.getName())){
continue;
}
featureIt.remove();
coefficientIt.remove();
if(encoder == null){
encoder = binaryFeature.getEncoder();
}
inputValues.add(binaryFeature.getValue());
outputValues.add(ValueUtil.formatValue(coefficient));
}
MapValues mapValues = PMMLUtil.createMapValues(name, inputValues, outputValues)
.setDefaultValue(ValueUtil.formatValue(0d));
DerivedField derivedField = encoder.createDerivedField(FieldName.create("lookup(" + name.getValue() + (identifier != null ? (", " + identifier) : "") + ")"), OpType.CONTINUOUS, DataType.DOUBLE, mapValues);
featureIt.add(new ContinuousFeature(encoder, derivedField));
coefficientIt.add(1d);
return mapValues;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
static
private MapValues createMapValues(FieldName name, Object identifier, List<Feature> features, List<Double> coefficients){
ListIterator<Feature> featureIt = features.listIterator();
ListIterator<Double> coefficientIt = coefficients.listIterator();
PMMLEncoder encoder = null;
List<Object> inputValues = new ArrayList<>();
List<Double> outputValues = new ArrayList<>();
while(featureIt.hasNext()){
Feature feature = featureIt.next();
Double coefficient = coefficientIt.next();
if(!(feature instanceof BinaryFeature)){
continue;
}
BinaryFeature binaryFeature = (BinaryFeature)feature;
if(!(name).equals(binaryFeature.getName())){
continue;
}
featureIt.remove();
coefficientIt.remove();
if(encoder == null){
encoder = binaryFeature.getEncoder();
}
inputValues.add(binaryFeature.getValue());
outputValues.add(coefficient);
}
MapValues mapValues = PMMLUtil.createMapValues(name, inputValues, outputValues)
.setDefaultValue(0d);
DerivedField derivedField = encoder.createDerivedField(FieldName.create("lookup(" + name.getValue() + (identifier != null ? (", " + identifier) : "") + ")"), OpType.CONTINUOUS, DataType.DOUBLE, mapValues);
featureIt.add(new ContinuousFeature(encoder, derivedField));
coefficientIt.add(1d);
return mapValues;
}
#location 38
#vulnerability type NULL_DEREFERENCE | #fixed code
static
private MapValues createMapValues(FieldName name, Object identifier, List<Feature> features, List<Double> coefficients){
ListIterator<Feature> featureIt = features.listIterator();
ListIterator<Double> coefficientIt = coefficients.listIterator();
PMMLEncoder encoder = null;
List<Object> inputValues = new ArrayList<>();
List<Double> outputValues = new ArrayList<>();
while(featureIt.hasNext()){
Feature feature = featureIt.next();
Double coefficient = coefficientIt.next();
if(!(feature instanceof BinaryFeature)){
continue;
}
BinaryFeature binaryFeature = (BinaryFeature)feature;
if(!(name).equals(binaryFeature.getName())){
continue;
}
featureIt.remove();
coefficientIt.remove();
if(encoder == null){
encoder = binaryFeature.getEncoder();
}
inputValues.add(binaryFeature.getValue());
outputValues.add(coefficient);
}
MapValues mapValues = PMMLUtil.createMapValues(name, inputValues, outputValues)
.setDefaultValue(0d)
.setDataType(DataType.DOUBLE);
DerivedField derivedField = encoder.createDerivedField(FieldName.create("lookup(" + name.getValue() + (identifier != null ? (", " + identifier) : "") + ")"), OpType.CONTINUOUS, DataType.DOUBLE, mapValues);
featureIt.add(new ContinuousFeature(encoder, derivedField));
coefficientIt.add(1d);
return mapValues;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public Schema encodeSchema(SparkMLEncoder encoder){
T model = getTransformer();
Label label = null;
if(model instanceof HasLabelCol){
HasLabelCol hasLabelCol = (HasLabelCol)model;
String labelCol = hasLabelCol.getLabelCol();
Feature feature = encoder.getOnlyFeature(labelCol);
MiningFunction miningFunction = getMiningFunction();
switch(miningFunction){
case CLASSIFICATION:
{
if(feature instanceof BooleanFeature){
BooleanFeature booleanFeature = (BooleanFeature)feature;
label = new CategoricalLabel(booleanFeature.getName(), booleanFeature.getDataType(), booleanFeature.getValues());
} else
if(feature instanceof CategoricalFeature){
CategoricalFeature categoricalFeature = (CategoricalFeature)feature;
DataField dataField = (DataField)categoricalFeature.getField();
label = new CategoricalLabel(dataField);
} else
if(feature instanceof ContinuousFeature){
ContinuousFeature continuousFeature = (ContinuousFeature)feature;
int numClasses = 2;
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
numClasses = classificationModel.numClasses();
}
List<String> categories = new ArrayList<>();
for(int i = 0; i < numClasses; i++){
categories.add(String.valueOf(i));
}
Field<?> field = encoder.toCategorical(continuousFeature.getName(), categories);
encoder.putOnlyFeature(labelCol, new CategoricalFeature(encoder, field, categories));
label = new CategoricalLabel(field.getName(), field.getDataType(), categories);
} else
{
throw new IllegalArgumentException("Expected a categorical or categorical-like continuous feature, got " + feature);
}
}
break;
case REGRESSION:
{
Field<?> field = encoder.toContinuous(feature.getName());
field.setDataType(DataType.DOUBLE);
label = new ContinuousLabel(field.getName(), field.getDataType());
}
break;
default:
throw new IllegalArgumentException("Mining function " + miningFunction + " is not supported");
}
}
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
CategoricalLabel categoricalLabel = (CategoricalLabel)label;
int numClasses = classificationModel.numClasses();
if(numClasses != categoricalLabel.size()){
throw new IllegalArgumentException("Expected " + numClasses + " target categories, got " + categoricalLabel.size() + " target categories");
}
}
String featuresCol = model.getFeaturesCol();
List<Feature> features = encoder.getFeatures(featuresCol);
if(model instanceof PredictionModel){
PredictionModel<?, ?> predictionModel = (PredictionModel<?, ?>)model;
int numFeatures = predictionModel.numFeatures();
if(numFeatures != -1 && features.size() != numFeatures){
throw new IllegalArgumentException("Expected " + numFeatures + " features, got " + features.size() + " features");
}
}
Schema result = new Schema(label, features);
return result;
}
#location 80
#vulnerability type NULL_DEREFERENCE | #fixed code
public Schema encodeSchema(SparkMLEncoder encoder){
T model = getTransformer();
Label label = null;
if(model instanceof HasLabelCol){
HasLabelCol hasLabelCol = (HasLabelCol)model;
String labelCol = hasLabelCol.getLabelCol();
Feature feature = encoder.getOnlyFeature(labelCol);
MiningFunction miningFunction = getMiningFunction();
switch(miningFunction){
case CLASSIFICATION:
{
if(feature instanceof BooleanFeature){
BooleanFeature booleanFeature = (BooleanFeature)feature;
label = new CategoricalLabel(booleanFeature.getName(), booleanFeature.getDataType(), booleanFeature.getValues());
} else
if(feature instanceof CategoricalFeature){
CategoricalFeature categoricalFeature = (CategoricalFeature)feature;
DataField dataField = (DataField)categoricalFeature.getField();
label = new CategoricalLabel(dataField);
} else
if(feature instanceof ContinuousFeature){
ContinuousFeature continuousFeature = (ContinuousFeature)feature;
int numClasses = 2;
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
numClasses = classificationModel.numClasses();
}
List<String> categories = new ArrayList<>();
for(int i = 0; i < numClasses; i++){
categories.add(String.valueOf(i));
}
Field<?> field = encoder.toCategorical(continuousFeature.getName(), categories);
encoder.putOnlyFeature(labelCol, new CategoricalFeature(encoder, field, categories));
label = new CategoricalLabel(field.getName(), field.getDataType(), categories);
} else
{
throw new IllegalArgumentException("Expected a categorical or categorical-like continuous feature, got " + feature);
}
}
break;
case REGRESSION:
{
Field<?> field = encoder.toContinuous(feature.getName());
field.setDataType(DataType.DOUBLE);
label = new ContinuousLabel(field.getName(), field.getDataType());
}
break;
default:
throw new IllegalArgumentException("Mining function " + miningFunction + " is not supported");
}
}
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
int numClasses = classificationModel.numClasses();
CategoricalLabel categoricalLabel = (CategoricalLabel)label;
SchemaUtil.checkSize(numClasses, categoricalLabel);
}
String featuresCol = model.getFeaturesCol();
List<Feature> features = encoder.getFeatures(featuresCol);
if(model instanceof PredictionModel){
PredictionModel<?, ?> predictionModel = (PredictionModel<?, ?>)model;
int numFeatures = predictionModel.numFeatures();
if(numFeatures != -1){
SchemaUtil.checkSize(numFeatures, features);
}
}
Schema result = new Schema(label, features);
return result;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
SortedMap<Long, Long> getOffsets(MovieFragmentBox moof, long trackId) {
isoBufferWrapper = moof.getIsoFile().getOriginalIso();
SortedMap<Long, Long> offsets2Sizes = new TreeMap<Long, Long>();
List<TrackFragmentBox> traf = moof.getBoxes(TrackFragmentBox.class);
assert traf.size() == 1 : "I cannot deal with movie fragments containing more than one track fragment";
for (TrackFragmentBox trackFragmentBox : traf) {
if (trackFragmentBox.getTrackFragmentHeaderBox().getTrackId() == trackId) {
long baseDataOffset;
if (trackFragmentBox.getTrackFragmentHeaderBox().hasBaseDataOffset()) {
baseDataOffset = trackFragmentBox.getTrackFragmentHeaderBox().getBaseDataOffset();
} else {
baseDataOffset = moof.getOffset();
}
TrackRunBox trun = trackFragmentBox.getTrackRunBox();
long sampleBaseOffset = baseDataOffset + trun.getDataOffset();
long[] sampleOffsets = trun.getSampleOffsets();
long[] sampleSizes = trun.getSampleSizes();
for (int i = 0; i < sampleSizes.length; i++) {
offsets2Sizes.put(sampleOffsets[i] + sampleBaseOffset, sampleSizes[i]);
}
}
}
return offsets2Sizes;
}
#location 15
#vulnerability type NULL_DEREFERENCE | #fixed code
SortedMap<Long, Long> getOffsets(MovieFragmentBox moof, long trackId) {
isoBufferWrapper = moof.getIsoFile().getOriginalIso();
SortedMap<Long, Long> offsets2Sizes = new TreeMap<Long, Long>();
List<TrackFragmentBox> traf = moof.getBoxes(TrackFragmentBox.class);
for (TrackFragmentBox trackFragmentBox : traf) {
if (trackFragmentBox.getTrackFragmentHeaderBox().getTrackId() == trackId) {
long baseDataOffset;
if (trackFragmentBox.getTrackFragmentHeaderBox().hasBaseDataOffset()) {
baseDataOffset = trackFragmentBox.getTrackFragmentHeaderBox().getBaseDataOffset();
} else {
baseDataOffset = moof.getOffset();
}
for (TrackRunBox trun: trackFragmentBox.getBoxes(TrackRunBox.class)) {
long sampleBaseOffset = baseDataOffset + trun.getDataOffset();
long[] sampleOffsets = trun.getSampleOffsets();
long[] sampleSizes = trun.getSampleSizes();
for (int i = 0; i < sampleSizes.length; i++) {
offsets2Sizes.put(sampleOffsets[i] + sampleBaseOffset, sampleSizes[i]);
}
}
}
}
return offsets2Sizes;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void testRoundTrip_1(String resource) throws Exception {
File originalFile = File.createTempFile("pdcf", "original");
FileOutputStream fos = new FileOutputStream(originalFile);
byte[] content = read(getClass().getResourceAsStream(resource));
fos.write(content);
fos.close();
IsoFile isoFile = new IsoFile(InputStreamIsoBufferHelper.get(getClass().getResourceAsStream(resource), 20000));
isoFile.parse();
Walk.through(isoFile);
isoFile.parseMdats();
// isoFile.switchToAutomaticChunkOffsetBox();
// isoFile.getBoxes(MediaDataBox.class)[0].getSample(0).toString();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
isoFile.write(baos);
new FileOutputStream("/home/sannies/a").write(baos.toByteArray());
ArrayAssert.assertEquals(content, baos.toByteArray());
}
#location 18
#vulnerability type RESOURCE_LEAK | #fixed code
public void testRoundTrip_1(String resource) throws Exception {
File originalFile = File.createTempFile("pdcf", "original");
FileOutputStream fos = new FileOutputStream(originalFile);
byte[] content = read(getClass().getResourceAsStream(resource));
fos.write(content);
fos.close();
IsoFile isoFile = new IsoFile(InputStreamIsoBufferHelper.get(getClass().getResourceAsStream(resource), 20000));
isoFile.parse();
Walk.through(isoFile);
isoFile.parseMdats();
isoFile.switchToAutomaticChunkOffsetBox();
isoFile.getBoxes(MediaDataBox.class)[0].getSample(0).toString();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
isoFile.write(baos);
new FileOutputStream("/home/sannies/a").write(baos.toByteArray());
ArrayAssert.assertEquals(content, baos.toByteArray());
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void parseMdat(MediaDataBox<TrackBox> mdat) {
mdat.getTrackMap().clear();
TreeMap<Long, Track<TrackBox>> trackIdsToTracksWithChunks = new TreeMap<Long, Track<TrackBox>>();
long[] trackNumbers = getTrackNumbers();
Map<Long, Long> trackToSampleCount = new HashMap<Long, Long>(trackNumbers.length);
Map<Long, List<Long>> trackToSyncSamples = new HashMap<Long, List<Long>>();
for (long trackNumber : trackNumbers) {
TrackMetaData<TrackBox> trackMetaData = getTrackMetaData(trackNumber);
trackIdsToTracksWithChunks.put(trackNumber, new Track<TrackBox>(trackNumber, trackMetaData, mdat));
SyncSampleBox syncSampleBox = trackMetaData.getSyncSampleBox();
if (syncSampleBox != null) {
long[] sampleNumbers = syncSampleBox.getSampleNumber();
ArrayList<Long> sampleNumberList = new ArrayList<Long>(sampleNumbers.length);
for (long sampleNumber : sampleNumbers) {
sampleNumberList.add(sampleNumber);
}
trackToSyncSamples.put(trackNumber, sampleNumberList);
}
trackToSampleCount.put(trackNumber, 1L);
}
long[] chunkOffsets = getChunkOffsets();
for (long chunkOffset : chunkOffsets) {
//chunk inside this mdat?
if (mdat.getStartOffset() > chunkOffset || chunkOffset > mdat.getStartOffset() + mdat.getSizeIfNotParsed()) {
System.out.println("Chunk offset " + chunkOffset + " not contained in " + this);
continue;
}
long track = getTrackIdForChunk(chunkOffset);
long[] sampleOffsets = getSampleOffsetsForChunk(chunkOffset);
long[] sampleSizes = getSampleSizesForChunk(chunkOffset);
for (int i = 1; i < sampleSizes.length; i++) {
assert sampleOffsets[i] == sampleSizes[i - 1] + sampleOffsets[i - 1];
}
Track<TrackBox> parentTrack = trackIdsToTracksWithChunks.get(track);
Chunk<TrackBox> chunk = new Chunk<TrackBox>(parentTrack, mdat, sampleSizes.length);
parentTrack.addChunk(chunk);
long trackId = parentTrack.getTrackId();
mdat.getTrackMap().put(trackId, parentTrack);
for (int i = 0; i < sampleOffsets.length; i++) {
Long currentSample = trackToSampleCount.get(trackId);
List<Long> syncSamples = trackToSyncSamples.get(trackId);
boolean syncSample = syncSamples != null && syncSamples.contains(currentSample);
MediaDataBox.SampleHolder<TrackBox> sh =
new MediaDataBox.SampleHolder<TrackBox>(
new SampleImpl<TrackBox>(isoBufferWrapper, chunkOffset + sampleOffsets[i], sampleSizes[i], chunk, syncSample));
mdat.getSampleList().add(sh);
chunk.addSample(sh);
trackToSampleCount.put(trackId, currentSample + 1);
}
}
}
#location 61
#vulnerability type NULL_DEREFERENCE | #fixed code
public void parseMdat(MediaDataBox<TrackBox> mdat) {
mdat.getTrackMap().clear();
TreeMap<Long, Track<TrackBox>> trackIdsToTracksWithChunks = new TreeMap<Long, Track<TrackBox>>();
long[] trackNumbers = getTrackNumbers();
Map<Long, Long> trackToSampleCount = new HashMap<Long, Long>(trackNumbers.length);
Map<Long, List<Long>> trackToSyncSamples = new HashMap<Long, List<Long>>();
for (long trackNumber : trackNumbers) {
TrackMetaData<TrackBox> trackMetaData = getTrackMetaData(trackNumber);
trackIdsToTracksWithChunks.put(trackNumber, new Track<TrackBox>(trackNumber, trackMetaData, mdat));
SyncSampleBox syncSampleBox = trackMetaData.getSyncSampleBox();
if (syncSampleBox != null) {
long[] sampleNumbers = syncSampleBox.getSampleNumber();
ArrayList<Long> sampleNumberList = new ArrayList<Long>(sampleNumbers.length);
for (long sampleNumber : sampleNumbers) {
sampleNumberList.add(sampleNumber);
}
trackToSyncSamples.put(trackNumber, sampleNumberList);
}
trackToSampleCount.put(trackNumber, 1L);
}
long[] chunkOffsets = getChunkOffsets();
for (long chunkOffset : chunkOffsets) {
//chunk inside this mdat?
if (mdat.getStartOffset() > chunkOffset || chunkOffset > mdat.getStartOffset() + mdat.getSizeIfNotParsed()) {
System.out.println("Chunk offset " + chunkOffset + " not contained in " + this);
continue;
}
long track = getTrackIdForChunk(chunkOffset);
long[] sampleOffsets = getSampleOffsetsForChunk(chunkOffset);
long[] sampleSizes = getSampleSizesForChunk(chunkOffset);
for (int i = 1; i < sampleSizes.length; i++) {
assert sampleOffsets[i] == sampleSizes[i - 1] + sampleOffsets[i - 1];
}
Track<TrackBox> parentTrack = trackIdsToTracksWithChunks.get(track);
Chunk<TrackBox> chunk = new Chunk<TrackBox>(parentTrack, mdat, sampleSizes.length);
parentTrack.addChunk(chunk);
long trackId = parentTrack.getTrackId();
mdat.getTrackMap().put(trackId, parentTrack);
for (int i = 0; i < sampleOffsets.length; i++) {
Long currentSample = trackToSampleCount.get(trackId);
List<Long> syncSamples = trackToSyncSamples.get(trackId);
boolean syncSample = syncSamples != null && syncSamples.contains(currentSample);
SampleImpl<TrackBox> sample = createSample(isoBufferWrapper, chunkOffset + sampleOffsets[i], sampleOffsets[i], sampleSizes[i], parentTrack, chunk, currentSample, syncSample);
MediaDataBox.SampleHolder<TrackBox> sh = new MediaDataBox.SampleHolder<TrackBox>(sample);
mdat.getSampleList().add(sh);
chunk.addSample(sh);
trackToSampleCount.put(trackId, currentSample + 1);
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public static boolean setDependencyVersion( final ModifiedPomXMLEventReader pom, final String groupId,
final String artifactId, final String oldVersion,
final String newVersion )
throws XMLStreamException
{
Stack<String> stack = new Stack<String>();
String path = "";
final Pattern matchScopeRegex;
final Pattern matchTargetRegex;
boolean inMatchScope = false;
boolean madeReplacement = false;
boolean haveGroupId = false;
boolean haveArtifactId = false;
boolean haveOldVersion = false;
matchScopeRegex = Pattern.compile( "/project" + "(/profiles/profile)?" +
"((/dependencyManagement)|(/build(/pluginManagement)?/plugins/plugin))?"
+ "/dependencies/dependency" );
matchTargetRegex = Pattern.compile( "/project" + "(/profiles/profile)?" +
"((/dependencyManagement)|(/build(/pluginManagement)?/plugins/plugin))?"
+ "/dependencies/dependency" +
"((/groupId)|(/artifactId)|(/version))" );
pom.rewind();
while ( pom.hasNext() )
{
XMLEvent event = pom.nextEvent();
if ( event.isStartElement() )
{
stack.push( path );
final String elementName = event.asStartElement().getName().getLocalPart();
path = new StringBuilder().append( path ).append( "/" ).append( elementName ).toString();
if ( matchScopeRegex.matcher( path ).matches() )
{
// we're in a new match scope
// reset any previous partial matches
inMatchScope = true;
pom.clearMark( 0 );
pom.clearMark( 1 );
haveGroupId = false;
haveArtifactId = false;
haveOldVersion = false;
}
else if ( inMatchScope && matchTargetRegex.matcher( path ).matches() )
{
if ( "groupId".equals( elementName ) )
{
haveGroupId = groupId.equals( pom.getElementText().trim() );
path = stack.pop();
}
else if ( "artifactId".equals( elementName ) )
{
haveArtifactId = artifactId.equals( pom.getElementText().trim() );
path = stack.pop();
}
else if ( "version".equals( elementName ) )
{
pom.mark( 0 );
}
}
}
if ( event.isEndElement() )
{
if ( matchTargetRegex.matcher( path ).matches() && "version".equals(
event.asEndElement().getName().getLocalPart() ) )
{
pom.mark( 1 );
String compressedPomVersion = StringUtils.deleteWhitespace( pom.getBetween( 0, 1 ).trim() );
String compressedOldVersion = StringUtils.deleteWhitespace( oldVersion );
try
{
haveOldVersion = isVersionOverlap( compressedOldVersion, compressedPomVersion );
}
catch ( InvalidVersionSpecificationException e )
{
// fall back to string comparison
haveOldVersion = compressedOldVersion.equals( compressedPomVersion );
}
}
else if ( matchScopeRegex.matcher( path ).matches() )
{
if ( inMatchScope && pom.hasMark( 0 ) && pom.hasMark( 1 ) && haveGroupId && haveArtifactId &&
haveOldVersion )
{
pom.replaceBetween( 0, 1, newVersion );
madeReplacement = true;
}
pom.clearMark( 0 );
pom.clearMark( 1 );
haveArtifactId = false;
haveGroupId = false;
haveOldVersion = false;
inMatchScope = false;
}
path = stack.pop();
}
}
return madeReplacement;
}
#location 30
#vulnerability type NULL_DEREFERENCE | #fixed code
public static boolean setDependencyVersion( final ModifiedPomXMLEventReader pom, final String groupId,
final String artifactId, final String oldVersion,
final String newVersion )
throws XMLStreamException
{
Stack<String> stack = new Stack<String>();
String path = "";
Set<String> implicitPaths = new HashSet<String>(
Arrays.<String>asList( "/project/parent/groupId", "/project/parent/artifactId", "/project/parent/version",
"/project/groupId", "/project/artifactId", "/project/version" ) );
Map<String, String> implicitProperties = new HashMap<String, String>();
pom.rewind();
while ( pom.hasNext() )
{
while ( pom.hasNext() )
{
XMLEvent event = pom.nextEvent();
if ( event.isStartElement() )
{
stack.push( path );
final String elementName = event.asStartElement().getName().getLocalPart();
path = path + "/" + elementName;
if ( implicitPaths.contains( path ) )
{
final String elementText = pom.getElementText().trim();
implicitProperties.put( path.substring( 1 ).replace( '/', '.' ), elementText );
path = stack.pop();
}
}
if ( event.isEndElement() )
{
path = stack.pop();
}
}
}
boolean modified = true;
while ( modified )
{
modified = false;
for ( Map.Entry<String, String> entry : implicitProperties.entrySet() )
{
if ( entry.getKey().contains( ".parent" ) )
{
String child = entry.getKey().replace( ".parent", "" );
if ( !implicitProperties.containsKey( child ) )
{
implicitProperties.put( child, entry.getValue() );
modified = true;
break;
}
}
}
}
System.out.println( "Props: " + implicitProperties );
stack = new Stack<String>();
path = "";
boolean inMatchScope = false;
boolean madeReplacement = false;
boolean haveGroupId = false;
boolean haveArtifactId = false;
boolean haveOldVersion = false;
final Pattern matchScopeRegex = Pattern.compile( "/project" + "(/profiles/profile)?" +
"((/dependencyManagement)|(/build(/pluginManagement)?/plugins/plugin))?"
+ "/dependencies/dependency" );
final Pattern matchTargetRegex = Pattern.compile( "/project" + "(/profiles/profile)?" +
"((/dependencyManagement)|(/build(/pluginManagement)?/plugins/plugin))?"
+ "/dependencies/dependency" +
"((/groupId)|(/artifactId)|(/version))" );
pom.rewind();
while ( pom.hasNext() )
{
XMLEvent event = pom.nextEvent();
if ( event.isStartElement() )
{
stack.push( path );
final String elementName = event.asStartElement().getName().getLocalPart();
path = path + "/" + elementName;
if ( matchScopeRegex.matcher( path ).matches() )
{
// we're in a new match scope
// reset any previous partial matches
inMatchScope = true;
pom.clearMark( 0 );
pom.clearMark( 1 );
haveGroupId = false;
haveArtifactId = false;
haveOldVersion = false;
}
else if ( inMatchScope && matchTargetRegex.matcher( path ).matches() )
{
if ( "groupId".equals( elementName ) )
{
haveGroupId = groupId.equals( evaluate( pom.getElementText().trim(), implicitProperties ) );
path = stack.pop();
}
else if ( "artifactId".equals( elementName ) )
{
haveArtifactId =
artifactId.equals( evaluate( pom.getElementText().trim(), implicitProperties ) );
path = stack.pop();
}
else if ( "version".equals( elementName ) )
{
pom.mark( 0 );
}
}
}
if ( event.isEndElement() )
{
if ( matchTargetRegex.matcher( path ).matches() && "version".equals(
event.asEndElement().getName().getLocalPart() ) )
{
pom.mark( 1 );
String compressedPomVersion = StringUtils.deleteWhitespace( pom.getBetween( 0, 1 ).trim() );
String compressedOldVersion = StringUtils.deleteWhitespace( oldVersion );
try
{
haveOldVersion = isVersionOverlap( compressedOldVersion, compressedPomVersion );
}
catch ( InvalidVersionSpecificationException e )
{
// fall back to string comparison
haveOldVersion = compressedOldVersion.equals( compressedPomVersion );
}
}
else if ( matchScopeRegex.matcher( path ).matches() )
{
if ( inMatchScope && pom.hasMark( 0 ) && pom.hasMark( 1 ) && haveGroupId && haveArtifactId &&
haveOldVersion )
{
pom.replaceBetween( 0, 1, newVersion );
madeReplacement = true;
}
pom.clearMark( 0 );
pom.clearMark( 1 );
haveArtifactId = false;
haveGroupId = false;
haveOldVersion = false;
inMatchScope = false;
}
path = stack.pop();
}
}
return madeReplacement;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private void useLatestReleases( ModifiedPomXMLEventReader pom, Collection dependencies )
throws XMLStreamException, MojoExecutionException
{
Iterator i = dependencies.iterator();
while ( i.hasNext() )
{
Dependency dep = (Dependency) i.next();
if ( isExcludeReactor() && isProducedByReactor( dep ) )
{
continue;
}
String version = dep.getVersion();
Matcher versionMatcher = matchSnapshotRegex.matcher( version );
if ( !versionMatcher.matches() )
{
Artifact artifact = this.findArtifact( dep );
if ( !isIncluded( artifact ) )
{
continue;
}
ArtifactVersions versions = getHelper().lookupArtifactVersions( artifact, false );
try
{
ArtifactVersion[] newer = versions.getNewerVersions( artifact.getSelectedVersion(), false );
if ( newer.length > 0 )
{
if ( PomHelper.setDependencyVersion( pom, dep.getGroupId(), dep.getArtifactId(), version,
newer[newer.length - 1].toString() ) )
{
getLog().debug(
"Version set to " + newer[newer.length - 1].toString() + " for dependnecy: " + dep );
}
}
}
catch ( OverConstrainedVersionException e )
{
getLog().warn( "This should never happen as your build should not work at all if this is thrown",
e );
}
}
}
}
#location 25
#vulnerability type NULL_DEREFERENCE | #fixed code
private void useLatestReleases( ModifiedPomXMLEventReader pom, Collection dependencies )
throws XMLStreamException, MojoExecutionException
{
Iterator i = dependencies.iterator();
while ( i.hasNext() )
{
Dependency dep = (Dependency) i.next();
if ( isExcludeReactor() && isProducedByReactor( dep ) )
{
getLog().info( "Ignoring reactor dependency: " + toString( dep ) );
continue;
}
String version = dep.getVersion();
Matcher versionMatcher = matchSnapshotRegex.matcher( version );
if ( !versionMatcher.matches() )
{
Artifact artifact = this.toArtifact( dep );
if ( !isIncluded( artifact ) )
{
continue;
}
getLog().debug( "Looking for newer versions of " + toString( dep ) );
ArtifactVersions versions = getHelper().lookupArtifactVersions( artifact, false );
ArtifactVersion[] newer = versions.getNewerVersions( version, false );
if ( newer.length > 0 )
{
String newVersion = newer[newer.length - 1].toString();
if ( PomHelper.setDependencyVersion( pom, dep.getGroupId(), dep.getArtifactId(), version,
newVersion ) )
{
getLog().info( "Updated " + toString( dep ) + " to version " + newVersion );
}
}
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private void useReleases( ModifiedPomXMLEventReader pom, Collection dependencies )
throws XMLStreamException, MojoExecutionException
{
Iterator i = dependencies.iterator();
while ( i.hasNext() )
{
Dependency dep = (Dependency) i.next();
if ( isExcludeReactor() && isProducedByReactor( dep ) )
{
continue;
}
String version = dep.getVersion();
Matcher versionMatcher = matchSnapshotRegex.matcher( version );
if ( versionMatcher.matches() )
{
String releaseVersion = versionMatcher.group( 1 );
Artifact artifact = this.findArtifact( dep );
if ( !isIncluded( artifact ) )
{
continue;
}
ArtifactVersions versions = getHelper().lookupArtifactVersions( artifact, false );
if ( versions.containsVersion( releaseVersion ) )
{
if ( PomHelper.setDependencyVersion( pom, dep.getGroupId(), dep.getArtifactId(), version,
releaseVersion ) )
{
getLog().debug( "Version set to " + releaseVersion + " for dependnecy: " + dep );
}
}
}
}
}
#location 26
#vulnerability type NULL_DEREFERENCE | #fixed code
private void useReleases( ModifiedPomXMLEventReader pom, Collection dependencies )
throws XMLStreamException, MojoExecutionException
{
Iterator i = dependencies.iterator();
while ( i.hasNext() )
{
Dependency dep = (Dependency) i.next();
if ( isExcludeReactor() && isProducedByReactor( dep ) )
{
getLog().info( "Ignoring reactor dependency: " + toString( dep ) );
continue;
}
String version = dep.getVersion();
Matcher versionMatcher = matchSnapshotRegex.matcher( version );
if ( versionMatcher.matches() )
{
String releaseVersion = versionMatcher.group( 1 );
Artifact artifact = this.toArtifact( dep );
if ( !isIncluded( artifact ) )
{
continue;
}
getLog().debug( "Looking for a release of " + toString( dep ) );
ArtifactVersions versions = getHelper().lookupArtifactVersions( artifact, false );
if ( versions.containsVersion( releaseVersion ) )
{
if ( PomHelper.setDependencyVersion( pom, dep.getGroupId(), dep.getArtifactId(), version,
releaseVersion ) )
{
getLog().info( "Updated " + toString( dep ) + " to version " + releaseVersion );
}
}
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void execute()
throws MojoExecutionException, MojoFailureException
{
Set childModules = getAllChildModules( getProject() );
removeMissingChildModules( getProject(), childModules );
Iterator i = childModules.iterator();
MojoExecutionException pbe = null;
while ( i.hasNext() )
{
String modulePath = (String) i.next();
File moduleDir = new File( getProject().getBasedir(), modulePath );
File moduleProjectFile;
if ( moduleDir.isDirectory() )
{
moduleProjectFile = new File( moduleDir, "pom.xml" );
}
else
{
// i don't think this should ever happen... but just in case
// the module references the file-name
moduleProjectFile = moduleDir;
}
try
{
// the aim of this goal is to fix problems when the project cannot be parsed by Maven
// so we have to parse the file by hand!
StringBuffer childPom = readFile( moduleProjectFile );
ModifiedPomXMLEventReader pom = newModifiedPomXER( childPom );
Stack stack = new Stack();
String path = "";
String groupId = null;
String artifactId = null;
String version = null;
Pattern pattern = Pattern.compile( "/project/parent/(groupId|artifactId|version)" );
while ( pom.hasNext() )
{
XMLEvent event = pom.nextEvent();
if ( event.isStartDocument() )
{
path = "";
stack.clear();
}
else if ( event.isStartElement() )
{
stack.push( path );
path = path + "/" + event.asStartElement().getName().getLocalPart();
if ( pattern.matcher( path ).matches() )
{
String text = pom.getElementText().trim();
if ( path.endsWith( "groupId" ) )
{
groupId = text;
}
else if ( path.endsWith( "artifactId" ) )
{
artifactId = text;
}
else if ( path.endsWith( "version" ) )
{
version = text;
}
path = (String) stack.pop();
}
}
else if ( event.isEndElement() )
{
if ( "/project/parent".equals( path ) )
{
getLog().info( "Module: " + modulePath );
if ( getProject().getGroupId().equals( groupId ) && getProject().getArtifactId().equals(
artifactId ) )
{
if ( getProject().getVersion().equals( version ) )
{
getLog().info( " Parent is "
+ ArtifactUtils.versionlessKey( getProject().getArtifact() ) + ":"
+ getProject().getVersion() );
}
else
{
getLog().info( " Parent was "
+ ArtifactUtils.versionlessKey( getProject().getArtifact() ) + ":" + version
+ ", now " + ArtifactUtils.versionlessKey( getProject().getArtifact() ) + ":"
+ getProject().getVersion() );
process( moduleProjectFile );
}
}
else
{
getLog().info( " does not use "
+ ArtifactUtils.versionlessKey( getProject().getArtifact() ) + " as its parent" );
}
}
path = (String) stack.pop();
}
}
}
catch ( XMLStreamException e )
{
getLog().debug( "Could not parse " + moduleProjectFile.getPath(), e );
if ( pbe == null )
{
// save this until we get to the end.
pbe = new MojoExecutionException( "Could not parse " + moduleProjectFile.getPath(), e );
}
}
catch ( IOException e )
{
getLog().debug( "Could not parse " + moduleProjectFile.getPath(), e );
if ( pbe == null )
{
// save this until we get to the end.
pbe = new MojoExecutionException( "Could not parse " + moduleProjectFile.getPath(), e );
}
}
}
if ( pbe != null )
{
// ok, now throw the first one to blow up.
throw pbe;
}
}
#location 47
#vulnerability type NULL_DEREFERENCE | #fixed code
public void execute()
throws MojoExecutionException, MojoFailureException
{
Set childModules = PomHelper.getAllChildModules( getProject(), getLog() );
PomHelper.removeMissingChildModules( getLog(), getProject(), childModules );
Iterator i = childModules.iterator();
MojoExecutionException pbe = null;
while ( i.hasNext() )
{
String modulePath = (String) i.next();
File moduleDir = new File( getProject().getBasedir(), modulePath );
File moduleProjectFile;
if ( moduleDir.isDirectory() )
{
moduleProjectFile = new File( moduleDir, "pom.xml" );
}
else
{
// i don't think this should ever happen... but just in case
// the module references the file-name
moduleProjectFile = moduleDir;
}
try
{
// the aim of this goal is to fix problems when the project cannot be parsed by Maven
// so we have to parse the file by hand!
StringBuffer childPom = readFile( moduleProjectFile );
ModifiedPomXMLEventReader pom = newModifiedPomXER( childPom );
Artifact parent = PomHelper.getProjectParent( pom, getHelper() );
if ( parent == null )
{
getLog().info( "Module: " + modulePath + " does not have a parent" );
}
else if ( !getProject().getGroupId().equals( parent.getGroupId() )
|| !getProject().getArtifactId().equals( parent.getArtifactId() ) )
{
getLog().info( "Module: " + modulePath + " does not use "
+ ArtifactUtils.versionlessKey( getProject().getArtifact() ) + " as its parent" );
}
else if ( getProject().getVersion().equals( parent.getVersion() ) )
{
getLog().info( "Module: " + modulePath + " parent is "
+ ArtifactUtils.versionlessKey( getProject().getArtifact() ) + ":"
+ getProject().getVersion() );
}
else
{
getLog().info( "Module: " + modulePath + " parent was "
+ ArtifactUtils.versionlessKey( getProject().getArtifact() ) + ":" + parent.getVersion()
+ ", now " + ArtifactUtils.versionlessKey( getProject().getArtifact() ) + ":"
+ getProject().getVersion() );
process( moduleProjectFile );
}
}
catch ( XMLStreamException e )
{
getLog().debug( "Could not parse " + moduleProjectFile.getPath(), e );
if ( pbe == null )
{
// save this until we get to the end.
pbe = new MojoExecutionException( "Could not parse " + moduleProjectFile.getPath(), e );
}
}
catch ( IOException e )
{
getLog().debug( "Could not parse " + moduleProjectFile.getPath(), e );
if ( pbe == null )
{
// save this until we get to the end.
pbe = new MojoExecutionException( "Could not parse " + moduleProjectFile.getPath(), e );
}
}
}
if ( pbe != null )
{
// ok, now throw the first one to blow up.
throw pbe;
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public boolean isDependencyUpdateAvailable()
{
for ( Iterator i = dependencyVersions.values().iterator(); i.hasNext(); )
{
ArtifactVersions versions = (ArtifactVersions) i.next();
if ( versions.getAllUpdates( UpdateScope.ANY, includeSnapshots ).length > 0 )
{
return true;
}
}
return false;
}
#location 6
#vulnerability type NULL_DEREFERENCE | #fixed code
public boolean isDependencyUpdateAvailable()
{
for ( Iterator i = dependencyVersions.values().iterator(); i.hasNext(); )
{
ArtifactVersions versions = (ArtifactVersions) i.next();
ArtifactVersion[] dependencyUpdates = versions.getAllUpdates( UpdateScope.ANY, includeSnapshots );
if ( dependencyUpdates != null && dependencyUpdates.length > 0 )
{
return true;
}
}
return false;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private void useNextReleases( ModifiedPomXMLEventReader pom, Collection dependencies )
throws XMLStreamException, MojoExecutionException
{
Iterator i = dependencies.iterator();
while ( i.hasNext() )
{
Dependency dep = (Dependency) i.next();
if ( isExcludeReactor() && isProducedByReactor( dep ) )
{
continue;
}
String version = dep.getVersion();
Matcher versionMatcher = matchSnapshotRegex.matcher( version );
if ( !versionMatcher.matches() )
{
Artifact artifact = this.findArtifact( dep );
if ( !isIncluded( artifact ) )
{
continue;
}
ArtifactVersions versions = getHelper().lookupArtifactVersions( artifact, false );
try
{
ArtifactVersion[] newer = versions.getNewerVersions( artifact.getSelectedVersion(), false );
if ( newer.length > 0 )
{
if ( PomHelper.setDependencyVersion( pom, dep.getGroupId(), dep.getArtifactId(), version,
newer[0].toString() ) )
{
getLog().debug( "Version set to " + newer[0].toString() + " for dependnecy: " + dep );
}
}
}
catch ( OverConstrainedVersionException e )
{
getLog().warn( "This should never happen as your build should not work at all if this is thrown",
e );
}
}
}
}
#location 25
#vulnerability type NULL_DEREFERENCE | #fixed code
private void useNextReleases( ModifiedPomXMLEventReader pom, Collection dependencies )
throws XMLStreamException, MojoExecutionException
{
Iterator i = dependencies.iterator();
while ( i.hasNext() )
{
Dependency dep = (Dependency) i.next();
if ( isExcludeReactor() && isProducedByReactor( dep ) )
{
getLog().info( "Ignoring reactor dependency: " + toString( dep ) );
continue;
}
String version = dep.getVersion();
Matcher versionMatcher = matchSnapshotRegex.matcher( version );
if ( !versionMatcher.matches() )
{
getLog().debug( "Looking for newer versions of " + toString( dep ) );
Artifact artifact = this.toArtifact( dep );
if ( !isIncluded( artifact ) )
{
continue;
}
ArtifactVersions versions = getHelper().lookupArtifactVersions( artifact, false );
ArtifactVersion[] newer = versions.getNewerVersions( version, false );
if ( newer.length > 0 )
{
String newVersion = newer[0].toString();
if ( PomHelper.setDependencyVersion( pom, dep.getGroupId(), dep.getArtifactId(), version,
newVersion ) )
{
getLog().info( "Updated " + toString( dep ) + " to version " + newVersion );
}
}
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private void useNextSnapshots( ModifiedPomXMLEventReader pom, Collection dependencies )
throws XMLStreamException, MojoExecutionException, ArtifactMetadataRetrievalException
{
int segment = determineUnchangedSegment(allowMajorUpdates, allowMinorUpdates,
allowIncrementalUpdates);
Iterator i = dependencies.iterator();
while ( i.hasNext() )
{
Dependency dep = (Dependency) i.next();
if ( isExcludeReactor() && isProducedByReactor( dep ) )
{
getLog().info( "Ignoring reactor dependency: " + toString( dep ) );
continue;
}
String version = dep.getVersion();
Matcher versionMatcher = matchSnapshotRegex.matcher( version );
if ( !versionMatcher.matches() )
{
getLog().debug( "Looking for next snapshot of " + toString( dep ) );
Artifact artifact = this.toArtifact( dep );
if ( !isIncluded( artifact ) )
{
continue;
}
ArtifactVersions versions = getHelper().lookupArtifactVersions( artifact, false );
final VersionComparator versionComparator = versions.getVersionComparator();
final DefaultArtifactVersion lowerBound = new DefaultArtifactVersion( version );
if ( segment + 1 > versionComparator.getSegmentCount( lowerBound ) )
{
getLog().info( "Ignoring " + toString( dep ) + " as the version number is too short" );
continue;
}
ArtifactVersion upperBound = segment >= 0 ? versionComparator.incrementSegment( lowerBound, segment ) : null;
getLog().info("Upper bound: " + upperBound.toString());
ArtifactVersion[] newer = versions.getVersions( lowerBound, upperBound, true, false, false );
getLog().debug( "Candidate versions " + Arrays.asList(newer));
for ( int j = 0; j < newer.length; j++ )
{
String newVersion = newer[j].toString();
if ( matchSnapshotRegex.matcher( newVersion ).matches() )
{
if ( PomHelper.setDependencyVersion( pom, dep.getGroupId(), dep.getArtifactId(), version,
newVersion ) )
{
getLog().info( "Updated " + toString( dep ) + " to version " + newVersion );
}
break;
}
}
}
}
}
#location 39
#vulnerability type NULL_DEREFERENCE | #fixed code
private void useNextSnapshots( ModifiedPomXMLEventReader pom, Collection dependencies )
throws XMLStreamException, MojoExecutionException, ArtifactMetadataRetrievalException
{
int segment = determineUnchangedSegment(allowMajorUpdates, allowMinorUpdates,
allowIncrementalUpdates);
Iterator i = dependencies.iterator();
while ( i.hasNext() )
{
Dependency dep = (Dependency) i.next();
if ( isExcludeReactor() && isProducedByReactor( dep ) )
{
getLog().info( "Ignoring reactor dependency: " + toString( dep ) );
continue;
}
String version = dep.getVersion();
Matcher versionMatcher = matchSnapshotRegex.matcher( version );
if ( !versionMatcher.matches() )
{
getLog().debug( "Looking for next snapshot of " + toString( dep ) );
Artifact artifact = this.toArtifact( dep );
if ( !isIncluded( artifact ) )
{
continue;
}
ArtifactVersions versions = getHelper().lookupArtifactVersions( artifact, false );
final VersionComparator versionComparator = versions.getVersionComparator();
final DefaultArtifactVersion lowerBound = new DefaultArtifactVersion( version );
if ( segment + 1 > versionComparator.getSegmentCount( lowerBound ) )
{
getLog().info( "Ignoring " + toString( dep ) + " as the version number is too short" );
continue;
}
ArtifactVersion upperBound = segment >= 0 ? versionComparator.incrementSegment( lowerBound, segment ) : null;
getLog().info("Upper bound: " + ( upperBound == null ? "none" : upperBound.toString() ) );
ArtifactVersion[] newer = versions.getVersions( lowerBound, upperBound, true, false, false );
getLog().debug( "Candidate versions " + Arrays.asList(newer));
for ( int j = 0; j < newer.length; j++ )
{
String newVersion = newer[j].toString();
if ( matchSnapshotRegex.matcher( newVersion ).matches() )
{
if ( PomHelper.setDependencyVersion( pom, dep.getGroupId(), dep.getArtifactId(), version,
newVersion ) )
{
getLog().info( "Updated " + toString( dep ) + " to version " + newVersion );
}
break;
}
}
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public static boolean setProjectVersion( final ModifiedPomXMLEventReader pom, final String value )
throws XMLStreamException
{
Stack<String> stack = new Stack<String>();
String path = "";
final Pattern matchScopeRegex;
boolean madeReplacement = false;
matchScopeRegex = Pattern.compile( "/project/version" );
pom.rewind();
while ( pom.hasNext() )
{
XMLEvent event = pom.nextEvent();
if ( event.isStartElement() )
{
stack.push( path );
path = path + "/" + event.asStartElement().getName().getLocalPart();
if ( matchScopeRegex.matcher( path ).matches() )
{
pom.mark( 0 );
}
}
if ( event.isEndElement() )
{
if ( matchScopeRegex.matcher( path ).matches() )
{
pom.mark( 1 );
if ( pom.hasMark( 0 ) && pom.hasMark( 1 ) )
{
pom.replaceBetween( 0, 1, value );
madeReplacement = true;
}
pom.clearMark( 0 );
pom.clearMark( 1 );
}
path = stack.pop();
}
}
return madeReplacement;
}
#location 15
#vulnerability type NULL_DEREFERENCE | #fixed code
public static boolean setProjectVersion( final ModifiedPomXMLEventReader pom, final String value )
throws XMLStreamException
{
return setProjectValue( pom, "/project/version", value );
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
protected void renderDependencySummaryTableRow( Dependency dependency, ArtifactVersions details,
boolean includeScope, boolean includeClassifier,
boolean includeType )
{
sink.tableRow();
sink.tableCell();
if ( details.getAllUpdates( UpdateScope.ANY ).length == 0 )
{
renderSuccessIcon();
}
else
{
renderWarningIcon();
}
sink.tableCell_();
sink.tableCell();
sink.text( dependency.getGroupId() );
sink.tableCell_();
sink.tableCell();
sink.text( dependency.getArtifactId() );
sink.tableCell_();
sink.tableCell();
sink.text( dependency.getVersion() );
sink.tableCell_();
if ( includeScope )
{
sink.tableCell();
sink.text( dependency.getScope() );
sink.tableCell_();
}
if ( includeClassifier )
{
sink.tableCell();
sink.text( dependency.getClassifier() );
sink.tableCell_();
}
if ( includeType )
{
sink.tableCell();
sink.text( dependency.getType() );
sink.tableCell_();
}
sink.tableCell();
if ( details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ) != null )
{
safeBold();
sink.text( details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ).toString() );
safeBold_();
}
sink.tableCell_();
sink.tableCell();
if ( details.getOldestUpdate( UpdateScope.INCREMENTAL ) != null )
{
safeBold();
sink.text( details.getOldestUpdate( UpdateScope.INCREMENTAL ).toString() );
safeBold_();
}
sink.tableCell_();
sink.tableCell();
if ( details.getOldestUpdate( UpdateScope.MINOR ) != null )
{
safeBold();
sink.text( details.getOldestUpdate( UpdateScope.MINOR ).toString() );
safeBold_();
}
sink.tableCell_();
sink.tableCell();
if ( details.getOldestUpdate( UpdateScope.MAJOR ) != null )
{
safeBold();
sink.text( details.getOldestUpdate( UpdateScope.MAJOR ).toString() );
safeBold_();
}
sink.tableCell_();
sink.tableRow_();
}
#location 7
#vulnerability type NULL_DEREFERENCE | #fixed code
protected void renderDependencySummaryTableRow( Dependency dependency, ArtifactVersions details,
boolean includeScope, boolean includeClassifier,
boolean includeType )
{
sink.tableRow();
sink.tableCell();
ArtifactVersion[] allUpdates = details.getAllUpdates( UpdateScope.ANY );
if ( allUpdates == null || allUpdates.length == 0 )
{
renderSuccessIcon();
}
else
{
renderWarningIcon();
}
sink.tableCell_();
sink.tableCell();
sink.text( dependency.getGroupId() );
sink.tableCell_();
sink.tableCell();
sink.text( dependency.getArtifactId() );
sink.tableCell_();
sink.tableCell();
sink.text( dependency.getVersion() );
sink.tableCell_();
if ( includeScope )
{
sink.tableCell();
sink.text( dependency.getScope() );
sink.tableCell_();
}
if ( includeClassifier )
{
sink.tableCell();
sink.text( dependency.getClassifier() );
sink.tableCell_();
}
if ( includeType )
{
sink.tableCell();
sink.text( dependency.getType() );
sink.tableCell_();
}
sink.tableCell();
if ( details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ) != null )
{
safeBold();
sink.text( details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ).toString() );
safeBold_();
}
sink.tableCell_();
sink.tableCell();
if ( details.getOldestUpdate( UpdateScope.INCREMENTAL ) != null )
{
safeBold();
sink.text( details.getOldestUpdate( UpdateScope.INCREMENTAL ).toString() );
safeBold_();
}
sink.tableCell_();
sink.tableCell();
if ( details.getOldestUpdate( UpdateScope.MINOR ) != null )
{
safeBold();
sink.text( details.getOldestUpdate( UpdateScope.MINOR ).toString() );
safeBold_();
}
sink.tableCell_();
sink.tableCell();
if ( details.getOldestUpdate( UpdateScope.MAJOR ) != null )
{
safeBold();
sink.text( details.getOldestUpdate( UpdateScope.MAJOR ).toString() );
safeBold_();
}
sink.tableCell_();
sink.tableRow_();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private void useLatestSnapshots( ModifiedPomXMLEventReader pom, Collection dependencies )
throws XMLStreamException, MojoExecutionException, ArtifactMetadataRetrievalException
{
int segment = determineUnchangedSegment(allowMajorUpdates, allowMinorUpdates,
allowIncrementalUpdates);
Iterator i = dependencies.iterator();
while ( i.hasNext() )
{
Dependency dep = (Dependency) i.next();
if ( isExcludeReactor() && isProducedByReactor( dep ) )
{
getLog().info( "Ignoring reactor dependency: " + toString( dep ) );
continue;
}
String version = dep.getVersion();
Matcher versionMatcher = matchSnapshotRegex.matcher( version );
if ( !versionMatcher.matches() )
{
getLog().debug( "Looking for latest snapshot of " + toString( dep ) );
Artifact artifact = this.toArtifact( dep );
if ( !isIncluded( artifact ) )
{
continue;
}
ArtifactVersions versions = getHelper().lookupArtifactVersions( artifact, false );
final VersionComparator versionComparator = versions.getVersionComparator();
final DefaultArtifactVersion lowerBound = new DefaultArtifactVersion( version );
if ( segment + 1 > versionComparator.getSegmentCount( lowerBound ) )
{
getLog().info( "Ignoring " + toString( dep ) + " as the version number is too short" );
continue;
}
ArtifactVersion upperBound = segment >= 0 ? versionComparator.incrementSegment( lowerBound, segment ) : null;
getLog().info("Upper bound: " + upperBound.toString());
ArtifactVersion[] newer = versions.getVersions( lowerBound, upperBound, true, false, false );
getLog().debug( "Candidate versions " + Arrays.asList(newer));
String latestVersion = null;
for ( int j = 0; j < newer.length; j++ )
{
String newVersion = newer[j].toString();
if ( matchSnapshotRegex.matcher( newVersion ).matches() )
{
latestVersion = newVersion;
}
}
if ( latestVersion != null )
{
if ( PomHelper.setDependencyVersion( pom, dep.getGroupId(), dep.getArtifactId(), version,
latestVersion ) )
{
getLog().info( "Updated " + toString( dep ) + " to version " + latestVersion );
}
}
}
}
}
#location 39
#vulnerability type NULL_DEREFERENCE | #fixed code
private void useLatestSnapshots( ModifiedPomXMLEventReader pom, Collection dependencies )
throws XMLStreamException, MojoExecutionException, ArtifactMetadataRetrievalException
{
int segment = determineUnchangedSegment(allowMajorUpdates, allowMinorUpdates,
allowIncrementalUpdates);
Iterator i = dependencies.iterator();
while ( i.hasNext() )
{
Dependency dep = (Dependency) i.next();
if ( isExcludeReactor() && isProducedByReactor( dep ) )
{
getLog().info( "Ignoring reactor dependency: " + toString( dep ) );
continue;
}
String version = dep.getVersion();
Matcher versionMatcher = matchSnapshotRegex.matcher( version );
if ( !versionMatcher.matches() )
{
getLog().debug( "Looking for latest snapshot of " + toString( dep ) );
Artifact artifact = this.toArtifact( dep );
if ( !isIncluded( artifact ) )
{
continue;
}
ArtifactVersions versions = getHelper().lookupArtifactVersions( artifact, false );
final VersionComparator versionComparator = versions.getVersionComparator();
final DefaultArtifactVersion lowerBound = new DefaultArtifactVersion( version );
if ( segment + 1 > versionComparator.getSegmentCount( lowerBound ) )
{
getLog().info( "Ignoring " + toString( dep ) + " as the version number is too short" );
continue;
}
ArtifactVersion upperBound = segment >= 0 ? versionComparator.incrementSegment( lowerBound, segment ) : null;
getLog().info("Upper bound: " + ( upperBound == null ? "none" : upperBound.toString() ) );
ArtifactVersion[] newer = versions.getVersions( lowerBound, upperBound, true, false, false );
getLog().debug( "Candidate versions " + Arrays.asList(newer));
String latestVersion = null;
for ( int j = 0; j < newer.length; j++ )
{
String newVersion = newer[j].toString();
if ( matchSnapshotRegex.matcher( newVersion ).matches() )
{
latestVersion = newVersion;
}
}
if ( latestVersion != null )
{
if ( PomHelper.setDependencyVersion( pom, dep.getGroupId(), dep.getArtifactId(), version,
latestVersion ) )
{
getLog().info( "Updated " + toString( dep ) + " to version " + latestVersion );
}
}
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
protected final void writeFile( File outFile, StringBuffer input )
throws IOException
{
OutputStream out = new BufferedOutputStream( new FileOutputStream( outFile ) );
out.write( input.toString().getBytes( PomHelper.POM_ENCODING ) );
out.close();
}
#location 7
#vulnerability type RESOURCE_LEAK | #fixed code
protected final void writeFile( File outFile, StringBuffer input )
throws IOException
{
Writer writer = WriterFactory.newXmlWriter( outFile );
try
{
IOUtil.copy( input.toString(), writer );
}
finally
{
IOUtil.close( writer );
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
protected void renderDependencyDetailTable( Dependency dependency, ArtifactVersions details, boolean includeScope,
boolean includeClassifier, boolean includeType )
{
final String cellWidth = "80%";
final String headerWidth = "20%";
sink.table();
sink.tableRows( new int[]{Parser.JUSTIFY_RIGHT, Parser.JUSTIFY_LEFT}, false );
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.status" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
ArtifactVersion[] versions = details.getAllUpdates( UpdateScope.ANY );
if ( details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ) != null )
{
renderWarningIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.otherUpdatesAvailable" ) );
}
else if ( details.getOldestUpdate( UpdateScope.INCREMENTAL ) != null )
{
renderWarningIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.incrementalUpdatesAvailable" ) );
}
else if ( details.getOldestUpdate( UpdateScope.MINOR ) != null )
{
renderWarningIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.minorUpdatesAvailable" ) );
}
else if ( details.getOldestUpdate( UpdateScope.MAJOR ) != null )
{
renderWarningIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.majorUpdatesAvailable" ) );
}
else
{
renderSuccessIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.noUpdatesAvailable" ) );
}
sink.tableCell_();
sink.tableRow_();
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.groupId" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getGroupId() );
sink.tableCell_();
sink.tableRow_();
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.artifactId" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getArtifactId() );
sink.tableCell_();
sink.tableRow_();
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.currentVersion" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getVersion() );
sink.tableCell_();
sink.tableRow_();
if ( includeScope )
{
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.scope" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getScope() );
sink.tableCell_();
sink.tableRow_();
}
if ( includeClassifier )
{
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.classifier" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getClassifier() );
sink.tableCell_();
sink.tableRow_();
}
if ( includeType )
{
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.type" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getType() );
sink.tableCell_();
sink.tableRow_();
}
if ( versions.length > 0 )
{
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.updateVersions" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
for ( int i = 0; i < versions.length; i++ )
{
if ( i > 0 )
{
sink.lineBreak();
}
boolean bold = equals( versions[i], details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ) )
|| equals( versions[i], details.getOldestUpdate( UpdateScope.INCREMENTAL ) )
|| equals( versions[i], details.getNewestUpdate( UpdateScope.INCREMENTAL ) )
|| equals( versions[i], details.getOldestUpdate( UpdateScope.MINOR ) )
|| equals( versions[i], details.getNewestUpdate( UpdateScope.MINOR ) )
|| equals( versions[i], details.getOldestUpdate( UpdateScope.MAJOR ) ) || equals( versions[i],
details.getNewestUpdate(
UpdateScope.MAJOR ) );
if ( bold )
{
safeBold();
}
sink.text( versions[i].toString() );
if ( bold )
{
safeBold_();
sink.nonBreakingSpace();
safeItalic();
if ( equals( versions[i], details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ) ) )
{
sink.text( getText( "report.nextVersion" ) );
}
else if ( equals( versions[i], details.getOldestUpdate( UpdateScope.INCREMENTAL ) ) )
{
sink.text( getText( "report.nextIncremental" ) );
}
else if ( equals( versions[i], details.getNewestUpdate( UpdateScope.INCREMENTAL ) ) )
{
sink.text( getText( "report.latestIncremental" ) );
}
else if ( equals( versions[i], details.getOldestUpdate( UpdateScope.MINOR ) ) )
{
sink.text( getText( "report.nextMinor" ) );
}
else if ( equals( versions[i], details.getNewestUpdate( UpdateScope.MINOR ) ) )
{
sink.text( getText( "report.latestMinor" ) );
}
else if ( equals( versions[i], details.getOldestUpdate( UpdateScope.MAJOR ) ) )
{
sink.text( getText( "report.nextMajor" ) );
}
else if ( equals( versions[i], details.getNewestUpdate( UpdateScope.MAJOR ) ) )
{
sink.text( getText( "report.latestMajor" ) );
}
safeItalic_();
}
}
sink.tableCell_();
sink.tableRow_();
}
sink.tableRows_();
sink.table_();
}
#location 103
#vulnerability type NULL_DEREFERENCE | #fixed code
protected void renderDependencyDetailTable( Dependency dependency, ArtifactVersions details, boolean includeScope,
boolean includeClassifier, boolean includeType )
{
final String cellWidth = "80%";
final String headerWidth = "20%";
sink.table();
sink.tableRows( new int[]{Parser.JUSTIFY_RIGHT, Parser.JUSTIFY_LEFT}, false );
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.status" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
ArtifactVersion[] versions = details.getAllUpdates( UpdateScope.ANY );
if ( details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ) != null )
{
renderWarningIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.otherUpdatesAvailable" ) );
}
else if ( details.getOldestUpdate( UpdateScope.INCREMENTAL ) != null )
{
renderWarningIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.incrementalUpdatesAvailable" ) );
}
else if ( details.getOldestUpdate( UpdateScope.MINOR ) != null )
{
renderWarningIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.minorUpdatesAvailable" ) );
}
else if ( details.getOldestUpdate( UpdateScope.MAJOR ) != null )
{
renderWarningIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.majorUpdatesAvailable" ) );
}
else
{
renderSuccessIcon();
sink.nonBreakingSpace();
sink.text( getText( "report.noUpdatesAvailable" ) );
}
sink.tableCell_();
sink.tableRow_();
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.groupId" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getGroupId() );
sink.tableCell_();
sink.tableRow_();
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.artifactId" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getArtifactId() );
sink.tableCell_();
sink.tableRow_();
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.currentVersion" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getVersion() );
sink.tableCell_();
sink.tableRow_();
if ( includeScope )
{
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.scope" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getScope() );
sink.tableCell_();
sink.tableRow_();
}
if ( includeClassifier )
{
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.classifier" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getClassifier() );
sink.tableCell_();
sink.tableRow_();
}
if ( includeType )
{
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.type" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
sink.text( dependency.getType() );
sink.tableCell_();
sink.tableRow_();
}
if ( versions != null && versions.length > 0 )
{
sink.tableRow();
sink.tableHeaderCell( headerWidth );
sink.text( getText( "report.updateVersions" ) );
sink.tableHeaderCell_();
sink.tableCell( cellWidth );
for ( int i = 0; i < versions.length; i++ )
{
if ( i > 0 )
{
sink.lineBreak();
}
boolean bold = equals( versions[i], details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ) )
|| equals( versions[i], details.getOldestUpdate( UpdateScope.INCREMENTAL ) )
|| equals( versions[i], details.getNewestUpdate( UpdateScope.INCREMENTAL ) )
|| equals( versions[i], details.getOldestUpdate( UpdateScope.MINOR ) )
|| equals( versions[i], details.getNewestUpdate( UpdateScope.MINOR ) )
|| equals( versions[i], details.getOldestUpdate( UpdateScope.MAJOR ) ) || equals( versions[i],
details.getNewestUpdate(
UpdateScope.MAJOR ) );
if ( bold )
{
safeBold();
}
sink.text( versions[i].toString() );
if ( bold )
{
safeBold_();
sink.nonBreakingSpace();
safeItalic();
if ( equals( versions[i], details.getOldestUpdate( UpdateScope.SUBINCREMENTAL ) ) )
{
sink.text( getText( "report.nextVersion" ) );
}
else if ( equals( versions[i], details.getOldestUpdate( UpdateScope.INCREMENTAL ) ) )
{
sink.text( getText( "report.nextIncremental" ) );
}
else if ( equals( versions[i], details.getNewestUpdate( UpdateScope.INCREMENTAL ) ) )
{
sink.text( getText( "report.latestIncremental" ) );
}
else if ( equals( versions[i], details.getOldestUpdate( UpdateScope.MINOR ) ) )
{
sink.text( getText( "report.nextMinor" ) );
}
else if ( equals( versions[i], details.getNewestUpdate( UpdateScope.MINOR ) ) )
{
sink.text( getText( "report.latestMinor" ) );
}
else if ( equals( versions[i], details.getOldestUpdate( UpdateScope.MAJOR ) ) )
{
sink.text( getText( "report.nextMajor" ) );
}
else if ( equals( versions[i], details.getNewestUpdate( UpdateScope.MAJOR ) ) )
{
sink.text( getText( "report.latestMajor" ) );
}
safeItalic_();
}
}
sink.tableCell_();
sink.tableRow_();
}
sink.tableRows_();
sink.table_();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
protected void update( ModifiedPomXMLEventReader pom )
throws MojoExecutionException, MojoFailureException, XMLStreamException
{
getLog().debug( "Updating parent to " + getProject().getVersion() );
Stack stack = new Stack();
String path = "";
while ( pom.hasNext() )
{
XMLEvent event = pom.nextEvent();
if ( event.isStartElement() )
{
stack.push( path );
path = new StringBuffer()
.append( path )
.append( "/" )
.append( event.asStartElement().getName().getLocalPart() )
.toString();
if ( "/project/parent/version".equals( path ) )
{
pom.mark( 0 );
}
}
if ( event.isEndElement() )
{
if ( "/project/parent/version".equals( path ) )
{
pom.mark( 1 );
if ( pom.hasMark( 0 ) )
{
pom.replaceBetween( 0, 1, getProject().getVersion() );
getLog().debug( "Made an update to " + getProject().getVersion() );
return;
}
}
path = (String) stack.pop();
}
}
}
#location 12
#vulnerability type NULL_DEREFERENCE | #fixed code
protected void update( ModifiedPomXMLEventReader pom )
throws MojoExecutionException, MojoFailureException, XMLStreamException
{
getLog().debug( "Updating parent to " + getProject().getVersion() );
if ( PomHelper.setProjectParentVersion( pom, getProject().getVersion() ) )
{
getLog().debug( "Made an update to " + getProject().getVersion() );
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void writeObjectData(ObjectOutputStream out) throws IOException {
try (ObjectOutputStream outputStream = new ObjectOutputStream(out)) {
if (this.principal instanceof Serializable) {
outputStream.writeBoolean(true);
outputStream.writeObject(this.principal);
} else {
outputStream.writeBoolean(false);
}
outputStream.flush();
}
}
#location 5
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public void writeObjectData(ObjectOutputStream out) throws IOException {
try (ObjectOutputStream outputStream = new ObjectOutputStream(out)) {
outputStream.writeObject(this);
outputStream.flush();
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public static void runExperiment(MultilabelClassifier h, String options[]) throws Exception {
// Help
if(Utils.getOptionPos('h',options) >= 0) {
System.out.println("\nHelp requested");
Evaluation.printOptions(h.listOptions());
return;
}
h.setOptions(options);
//Load Instances
Instances allInstances = null;
try {
String filename = Utils.getOption('t', options);
allInstances = new Instances(new BufferedReader(new FileReader(filename)));
} catch(IOException e) {
throw new Exception("[Error] Failed to Load Instances from file");
}
//Get the Options in the @relation name (in format 'dataset-name: <options>')
String doptions[] = null;
try {
doptions = MLUtils.getDatasetOptions(allInstances);
} catch(Exception e) {
throw new Exception("[Error] Failed to Get Options from @Relation Name");
}
//Concatenate the Options in the @relation name to the cmd line options
String full = "";
for(String s : options) {
if (s.length() > 0)
full += (s + " ");
}
for(String s : doptions) {
if (s.length() > 0)
full += (s + " ");
}
options = Utils.splitOptions(full);
//Set Options from the command line, any leftover options will most likely be used in the code that follows
try {
int c = (Utils.getOptionPos('C', options) >= 0) ? Integer.parseInt(Utils.getOption('C',options)) : Integer.parseInt(Utils.getOption('c',options));
// if negative, then invert ...
if ( c < 0) {
c = -c;
allInstances = MLUtils.switchAttributes(allInstances,c);
}
// end
allInstances.setClassIndex(c);
} catch(Exception e) {
System.err.println("[Error] Failed to Set Options from Command Line -- Check\n\t the spelling of the base classifier;\n \t that options are specified in the correct order (respective to the '--' divider); and\n\t that the class index is set properly.");
System.exit(1);
}
//Check for the essential -C option. If still nothing set, we can't continue
if(allInstances.classIndex() < 0)
throw new Exception("You must supply the number of labels either in the @Relation Name of the dataset or on the command line using the option: -C <num. labels>");
//Set Range
if(Utils.getOptionPos('p',options) >= 0) {
// Randomize
if(Utils.getOptionPos('R',options) >= 0) {
allInstances.randomize(new Random());
}
try {
String range = Utils.getOption('p',options);
System.out.println("Selecting Range "+range+"");
RemoveRange remove = new RemoveRange();
remove.setInstancesIndices(range);
remove.setInvertSelection(true);
remove.setInputFormat(allInstances);
allInstances = Filter.useFilter(allInstances, remove);
} catch(Exception e) {
System.out.println(""+e);
e.printStackTrace();
throw new Exception("Failed to Remove Range");
}
}
int seed = (Utils.getOptionPos('s',options) >= 0) ? Integer.parseInt(Utils.getOption('s',options)) : 0;
// Randomize (Instances)
if(Utils.getOptionPos('R',options) >= 0) {
allInstances.randomize(new Random(seed));
}
// Randomize (Method)
if (h instanceof Randomizable) {
((Randomizable)h).setSeed(seed + 1); // (@NOTE because previously we were using seed '1' as the default in BaggingML, we want to maintain reproducibility of older results with the same seed).
}
try {
Result r = null;
// Get Split
if(Utils.getOptionPos('x',options) >= 0) {
// CROSS-FOLD-VALIDATION
int numFolds = MLUtils.getIntegerOption(Utils.getOption('x',options),10); // default 10
r = new Result();
Result fold[] = Evaluation.cvModel(h,allInstances,numFolds,(Utils.getOptionPos('T',options) >= 0) ? Utils.getOption('T',options) : "c");
r.info = fold[0].info;
for(String v : fold[0].vals.keySet()) {
r.info.put(v,Result.getValues(v,fold));
}
HashMap<String,double[]> o = Result.getStats(fold);
for(String s : o.keySet()) {
double values[] = o.get(s);
r.info.put(s,Utils.doubleToString(Utils.mean(values),5,3)+" +/- "+Utils.doubleToString(Math.sqrt(Utils.variance(values)),5,3));
}
r.setInfo("Type","CV");
System.out.println(r.toString());
}
else {
// TRAIN/TEST SPLIT
int TRAIN = (int)(allInstances.numInstances() * 0.60), TEST;
if(Utils.getOptionPos("split-percentage",options) >= 0) {
double percentTrain = Double.parseDouble(Utils.getOption("split-percentage",options));
TRAIN = (int)Math.round((allInstances.numInstances() * (percentTrain/100.0)));
}
else if(Utils.getOptionPos("split-number",options) >= 0) {
TRAIN = Integer.parseInt(Utils.getOption("split-number",options));
}
TEST = allInstances.numInstances() - TRAIN;
Instances train = new Instances(allInstances,0,TRAIN);
train.setClassIndex(allInstances.classIndex());
Instances test = new Instances(allInstances,TRAIN,TEST);
test.setClassIndex(allInstances.classIndex());
// Invert the split?
if(Utils.getFlag('i',options)) { //boolean INVERT = Utils.getFlag('i',options);
//Get Debug/Verbosity/Output Level
Instances holder = test;
test = train;
train = holder;
}
// We're going to do parameter tuning
if(Utils.getOptionPos('u',options) >= 0) {
double percentageSplit = Double.parseDouble(Utils.getOption('u',options));
TRAIN = (int)(train.numInstances() * percentageSplit);
TEST = train.numInstances() - TRAIN;
train = new Instances(train,0,TRAIN);
test = new Instances(train,TRAIN,TEST);
}
if (h.getDebug()) System.out.println(":- Dataset -: "+MLUtils.getDatasetName(allInstances)+"\tL="+allInstances.classIndex()+"\tD(t:T)=("+train.numInstances()+":"+test.numInstances()+")\tLC(t:T)="+Utils.roundDouble(MLUtils.labelCardinality(train,allInstances.classIndex()),2)+":"+Utils.roundDouble(MLUtils.labelCardinality(test,allInstances.classIndex()),2)+")");
r = evaluateModel(h,train,test,(Utils.getOptionPos('T',options) >= 0) ? Utils.getOption('T',options) : "c");
r.output = Result.getStats(r);
System.out.println(r.toString());
}
// Save ranking data?
if (Utils.getOptionPos('f',options) >= 0) {
Result.writeResultToFile(r,Utils.getOption('f',options));
}
} catch(Exception e) {
e.printStackTrace();
System.exit(1);
}
System.exit(0);
}
#location 16
#vulnerability type RESOURCE_LEAK | #fixed code
public static void runExperiment(MultilabelClassifier h, String options[]) throws Exception {
// Help
if(Utils.getOptionPos('h',options) >= 0) {
System.out.println("\nHelp requested");
Evaluation.printOptions(h.listOptions());
return;
}
h.setOptions(options);
//Load Instances
Instances allInstances = null;
String filename = null;
try {
filename = Utils.getOption('t', options);
allInstances = DataSource.read(filename);
} catch(Exception e) {
throw new Exception("[Error] Failed to Load Instances from file '" + filename + "'", e);
}
//Get the Options in the @relation name (in format 'dataset-name: <options>')
String doptions[] = null;
try {
doptions = MLUtils.getDatasetOptions(allInstances);
} catch(Exception e) {
throw new Exception("[Error] Failed to Get Options from @Relation Name", e);
}
//Concatenate the Options in the @relation name to the cmd line options
String full = "";
for(String s : options) {
if (s.length() > 0)
full += (s + " ");
}
for(String s : doptions) {
if (s.length() > 0)
full += (s + " ");
}
options = Utils.splitOptions(full);
//Set Options from the command line, any leftover options will most likely be used in the code that follows
try {
int c = (Utils.getOptionPos('C', options) >= 0) ? Integer.parseInt(Utils.getOption('C',options)) : Integer.parseInt(Utils.getOption('c',options));
// if negative, then invert ...
if ( c < 0) {
c = -c;
allInstances = MLUtils.switchAttributes(allInstances,c);
}
// end
allInstances.setClassIndex(c);
} catch(Exception e) {
System.err.println("[Error] Failed to Set Options from Command Line -- Check\n\t the spelling of the base classifier;\n \t that options are specified in the correct order (respective to the '--' divider); and\n\t that the class index is set properly.");
System.exit(1);
}
//Check for the essential -C option. If still nothing set, we can't continue
if(allInstances.classIndex() < 0)
throw new Exception("You must supply the number of labels either in the @Relation Name of the dataset or on the command line using the option: -C <num. labels>");
//Set Range
if(Utils.getOptionPos('p',options) >= 0) {
// Randomize
if(Utils.getOptionPos('R',options) >= 0) {
allInstances.randomize(new Random());
}
try {
String range = Utils.getOption('p',options);
System.out.println("Selecting Range "+range+"");
RemoveRange remove = new RemoveRange();
remove.setInstancesIndices(range);
remove.setInvertSelection(true);
remove.setInputFormat(allInstances);
allInstances = Filter.useFilter(allInstances, remove);
} catch(Exception e) {
System.out.println(""+e);
e.printStackTrace();
throw new Exception("Failed to Remove Range", e);
}
}
int seed = (Utils.getOptionPos('s',options) >= 0) ? Integer.parseInt(Utils.getOption('s',options)) : 0;
// Randomize (Instances)
if(Utils.getOptionPos('R',options) >= 0) {
allInstances.randomize(new Random(seed));
}
// Randomize (Method)
if (h instanceof Randomizable) {
((Randomizable)h).setSeed(seed + 1); // (@NOTE because previously we were using seed '1' as the default in BaggingML, we want to maintain reproducibility of older results with the same seed).
}
try {
Result r = null;
// Get Split
if(Utils.getOptionPos('x',options) >= 0) {
// CROSS-FOLD-VALIDATION
int numFolds = MLUtils.getIntegerOption(Utils.getOption('x',options),10); // default 10
r = new Result();
Result fold[] = Evaluation.cvModel(h,allInstances,numFolds,(Utils.getOptionPos('T',options) >= 0) ? Utils.getOption('T',options) : "c");
r.info = fold[0].info;
for(String v : fold[0].vals.keySet()) {
r.info.put(v,Result.getValues(v,fold));
}
HashMap<String,double[]> o = Result.getStats(fold);
for(String s : o.keySet()) {
double values[] = o.get(s);
r.info.put(s,Utils.doubleToString(Utils.mean(values),5,3)+" +/- "+Utils.doubleToString(Math.sqrt(Utils.variance(values)),5,3));
}
r.setInfo("Type","CV");
System.out.println(r.toString());
}
else {
// TRAIN/TEST SPLIT
int TRAIN = (int)(allInstances.numInstances() * 0.60), TEST;
if(Utils.getOptionPos("split-percentage",options) >= 0) {
double percentTrain = Double.parseDouble(Utils.getOption("split-percentage",options));
TRAIN = (int)Math.round((allInstances.numInstances() * (percentTrain/100.0)));
}
else if(Utils.getOptionPos("split-number",options) >= 0) {
TRAIN = Integer.parseInt(Utils.getOption("split-number",options));
}
TEST = allInstances.numInstances() - TRAIN;
Instances train = new Instances(allInstances,0,TRAIN);
train.setClassIndex(allInstances.classIndex());
Instances test = new Instances(allInstances,TRAIN,TEST);
test.setClassIndex(allInstances.classIndex());
// Invert the split?
if(Utils.getFlag('i',options)) { //boolean INVERT = Utils.getFlag('i',options);
//Get Debug/Verbosity/Output Level
Instances holder = test;
test = train;
train = holder;
}
// We're going to do parameter tuning
if(Utils.getOptionPos('u',options) >= 0) {
double percentageSplit = Double.parseDouble(Utils.getOption('u',options));
TRAIN = (int)(train.numInstances() * percentageSplit);
TEST = train.numInstances() - TRAIN;
train = new Instances(train,0,TRAIN);
test = new Instances(train,TRAIN,TEST);
}
if (h.getDebug()) System.out.println(":- Dataset -: "+MLUtils.getDatasetName(allInstances)+"\tL="+allInstances.classIndex()+"\tD(t:T)=("+train.numInstances()+":"+test.numInstances()+")\tLC(t:T)="+Utils.roundDouble(MLUtils.labelCardinality(train,allInstances.classIndex()),2)+":"+Utils.roundDouble(MLUtils.labelCardinality(test,allInstances.classIndex()),2)+")");
r = evaluateModel(h,train,test,(Utils.getOptionPos('T',options) >= 0) ? Utils.getOption('T',options) : "c");
r.output = Result.getStats(r);
System.out.println(r.toString());
}
// Save ranking data?
if (Utils.getOptionPos('f',options) >= 0) {
Result.writeResultToFile(r,Utils.getOption('f',options));
}
} catch(Exception e) {
e.printStackTrace();
System.exit(1);
}
System.exit(0);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
protected boolean save(int index) {
boolean result;
int retVal;
Result res;
File file;
BufferedWriter writer;
retVal = m_FileChooser.showSaveDialog(this);
if (retVal != JFileChooser.APPROVE_OPTION)
return false;
file = m_FileChooser.getSelectedFile();
res = getResultAt(index);
try {
writer = new BufferedWriter(new FileWriter(file));
writer.append(res.toString());
writer.flush();
writer.close();
result = true;
}
catch (Exception e) {
result = false;
System.err.println("Failed to write result to file '" + file + "':");
e.printStackTrace();
JOptionPane.showMessageDialog(
this,
"Failed to write result to file '" + file + "':\n" + e,
"Error saving",
JOptionPane.ERROR_MESSAGE);
}
return result;
}
#location 21
#vulnerability type RESOURCE_LEAK | #fixed code
protected boolean save(int index) {
boolean result;
int retVal;
Result res;
File file;
retVal = m_FileChooser.showSaveDialog(this);
if (retVal != JFileChooser.APPROVE_OPTION)
return false;
file = m_FileChooser.getSelectedFile();
res = getResultAt(index);
try {
Result.writeResultToFile(res, file.getAbsolutePath());
result = true;
}
catch (Exception e) {
result = false;
System.err.println("Failed to write result to file '" + file + "':");
e.printStackTrace();
JOptionPane.showMessageDialog(
this,
"Failed to write result to file '" + file + "':\n" + e,
"Error saving",
JOptionPane.ERROR_MESSAGE);
}
return result;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public JsonParser createParser(File f) throws IOException {
return _createParser(new FileInputStream(f), _createContext(f, true));
}
#location 3
#vulnerability type RESOURCE_LEAK | #fixed code
@Override
public JsonParser createParser(File f) throws IOException {
IOContext ctxt = _createContext(f, true);
return _createParser(_decorate(new FileInputStream(f), ctxt), ctxt);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public static void compile(String projectPath) throws IOException {
Runtime runtime = Runtime.getRuntime();
Process exec = runtime.exec("mvn clean package -f " + projectPath);
try {
exec.waitFor();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
#location 5
#vulnerability type RESOURCE_LEAK | #fixed code
public static void compile(String projectPath) throws IOException, TimeoutException, InterruptedException {
Runtime runtime = Runtime.getRuntime();
Process exec = runtime.exec("mvn clean package -f " + projectPath);
Worker worker = new Worker(exec);
worker.start();
ProcessStatus ps = worker.getProcessStatus();
try {
worker.join(30000);
if (ps.exitCode == ProcessStatus.CODE_STARTED) {
// not finished
worker.interrupt();
throw new TimeoutException();
}
} catch (InterruptedException e) {
// canceled by other thread.
worker.interrupt();
throw e;
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void run() {
CodegenConfigurator configurator = new CodegenConfigurator();
CodeGenerator codegenerator = GeneratorFactory.getGenerator(CodeGenerator.class, "default");
// add additional property
Optional.ofNullable(properties).ifPresent(properties ->
Arrays.stream(properties.split(",")).forEach(property -> {
String[] split = property.split("=");
if (split != null && split.length == 2) {
configurator.addAdditionalProperty(split[0], split[1]);
}
})
);
configurator.setOutputDir(output)
.setGroupId(groupId)
.setArtifactId(artifactId)
.setArtifactVersion(artifactVersion)
.setLibrary(programmingModel)
.setGeneratorName(framework)
.setApiPackage(apiPackage)
.setModelPackage(modelPackage);
configurator.addAdditionalProperty(ProjectMetaConstant.SERVICE_TYPE, serviceType);
if (isNotEmpty(specFile)) {
File contractFile = new File(specFile);
if (contractFile.isDirectory()) {
try {
Files.walkFileTree(Paths.get(contractFile.toURI()), new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
configurator.setInputSpec(file.toFile().getCanonicalPath())
.addAdditionalProperty("apiName", file.toFile().getName().split("\\.")[0]);
try {
codegenerator.configure(Collections.singletonMap("configurator", configurator));
codegenerator.generate();
} catch (RuntimeException e) {
throw new RuntimeException("Failed to generate code base on file " + file.toFile().getName());
}
return super.visitFile(file, attrs);
}
});
} catch (RuntimeException | IOException e) {
LOGGER.error(e.getMessage());
return;
}
} else {
configurator.setInputSpec(specFile);
codegenerator.configure(Collections.singletonMap("configurator", configurator));
codegenerator.generate();
}
LOGGER.info("Success to generate code, the directory is: {}", output);
}
}
#location 43
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public void run() {
CodeGenerator codegenerator = GeneratorFactory.getGenerator(CodeGenerator.class, "default");
if (codegenerator == null) {
LOGGER.warn("Not CodeGenerator found");
return;
}
CodegenConfigurator configurator = new CodegenConfigurator();
// add additional property
Optional.ofNullable(properties).ifPresent(properties ->
Arrays.stream(properties.split(",")).forEach(property -> {
String[] split = property.split("=");
if (split != null && split.length == 2) {
configurator.addAdditionalProperty(split[0], split[1]);
}
})
);
configurator.setOutputDir(output)
.setGroupId(groupId)
.setArtifactId(artifactId)
.setArtifactVersion(artifactVersion)
.setLibrary(programmingModel)
.setGeneratorName(framework)
.setApiPackage(apiPackage)
.setModelPackage(modelPackage);
configurator.addAdditionalProperty(ProjectMetaConstant.SERVICE_TYPE, serviceType);
if (isNotEmpty(specFile)) {
File contractFile = new File(specFile);
if (contractFile.isDirectory()) {
try {
Files.walkFileTree(Paths.get(contractFile.toURI()), new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
configurator.setInputSpec(file.toFile().getCanonicalPath())
.addAdditionalProperty("apiName", file.toFile().getName().split("\\.")[0]);
try {
codegenerator.configure(Collections.singletonMap("configurator", configurator));
codegenerator.generate();
} catch (RuntimeException e) {
throw new RuntimeException("Failed to generate code base on file " + file.toFile().getName());
}
return super.visitFile(file, attrs);
}
});
} catch (RuntimeException | IOException e) {
LOGGER.error(e.getMessage());
return;
}
} else {
configurator.setInputSpec(specFile).addAdditionalProperty("apiName", contractFile.getName().split("\\.")[0]);
codegenerator.configure(Collections.singletonMap("configurator", configurator));
codegenerator.generate();
}
LOGGER.info("Success to generate code, the directory is: {}", output);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public final void doHandle(HttpServletRequest baseRequest,
HttpServletRequest request, HttpServletResponse response,
InputStream is) throws IOException, S3Exception {
String method = request.getMethod();
String uri = request.getRequestURI();
if (!this.servicePath.isEmpty()) {
if (uri.length() > this.servicePath.length()) {
uri = uri.substring(this.servicePath.length());
}
}
logger.debug("request: {}", request);
String hostHeader = request.getHeader(HttpHeaders.HOST);
if (hostHeader != null && virtualHost.isPresent()) {
hostHeader = HostAndPort.fromString(hostHeader).getHostText();
String virtualHostSuffix = "." + virtualHost.get();
if (!hostHeader.equals(virtualHost.get())) {
if (hostHeader.endsWith(virtualHostSuffix)) {
String bucket = hostHeader.substring(0,
hostHeader.length() - virtualHostSuffix.length());
uri = "/" + bucket + uri;
} else {
String bucket = hostHeader.toLowerCase();
uri = "/" + bucket + uri;
}
}
}
boolean hasDateHeader = false;
boolean hasXAmzDateHeader = false;
for (String headerName : Collections.list(request.getHeaderNames())) {
for (String headerValue : Collections.list(request.getHeaders(
headerName))) {
logger.debug("header: {}: {}", headerName,
Strings.nullToEmpty(headerValue));
}
if (headerName.equalsIgnoreCase(HttpHeaders.DATE)) {
hasDateHeader = true;
} else if (headerName.equalsIgnoreCase("x-amz-date")) {
logger.debug("have the x-amz-date heaer {}", headerName);
// why x-amz-date name exist,but value is null?
if ("".equals(request.getHeader("x-amz-date")) ||
request.getHeader("x-amz-date") == null) {
logger.debug("have empty x-amz-date");
} else {
hasXAmzDateHeader = true;
}
}
}
boolean haveBothDateHeader = false;
if (hasDateHeader && hasXAmzDateHeader) {
haveBothDateHeader = true;
}
// when access information is not provided in request header,
// treat it as anonymous, return all public accessible information
if (!anonymousIdentity &&
(method.equals("GET") || method.equals("HEAD") ||
method.equals("POST")) &&
request.getHeader(HttpHeaders.AUTHORIZATION) == null &&
// v2 or /v4
request.getParameter("X-Amz-Algorithm") == null && // v4 query
request.getParameter("AWSAccessKeyId") == null && // v2 query
defaultBlobStore != null) {
doHandleAnonymous(request, response, is, uri, defaultBlobStore);
return;
}
// should according the AWSAccessKeyId= Signature or auth header nil
if (!anonymousIdentity && !hasDateHeader && !hasXAmzDateHeader &&
request.getParameter("X-Amz-Date") == null &&
request.getParameter("Expires") == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED,
"AWS authentication requires a valid Date or" +
" x-amz-date header");
}
BlobStore blobStore;
String requestIdentity = null;
String headerAuthorization = request.getHeader(
HttpHeaders.AUTHORIZATION);
S3AuthorizationHeader authHeader = null;
boolean presignedUrl = false;
if (!anonymousIdentity) {
if (headerAuthorization == null) {
String algorithm = request.getParameter("X-Amz-Algorithm");
if (algorithm == null) { //v2 query
String identity = request.getParameter("AWSAccessKeyId");
String signature = request.getParameter("Signature");
if (identity == null || signature == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
headerAuthorization = "AWS " + identity + ":" + signature;
presignedUrl = true;
} else if (algorithm.equals("AWS4-HMAC-SHA256")) { //v4 query
String credential = request.getParameter(
"X-Amz-Credential");
String signedHeaders = request.getParameter(
"X-Amz-SignedHeaders");
String signature = request.getParameter(
"X-Amz-Signature");
if (credential == null || signedHeaders == null ||
signature == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
headerAuthorization = "AWS4-HMAC-SHA256" +
" Credential=" + credential +
", requestSignedHeaders=" + signedHeaders +
", Signature=" + signature;
presignedUrl = true;
}
}
try {
authHeader = new S3AuthorizationHeader(headerAuthorization);
//whether v2 or v4 (normal header and query)
} catch (IllegalArgumentException iae) {
throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, iae);
}
requestIdentity = authHeader.identity;
}
long dateSkew = 0; //date for timeskew check
//v2 GET /s3proxy-1080747708/foo?AWSAccessKeyId=local-identity&Expires=
//1510322602&Signature=UTyfHY1b1Wgr5BFEn9dpPlWdtFE%3D)
//have no date
boolean haveDate = true;
AuthenticationType finalAuthType = null;
if (authHeader.authenticationType == AuthenticationType.AWS_V2 &&
(authenticationType == AuthenticationType.AWS_V2 ||
authenticationType == AuthenticationType.AWS_V2_OR_V4)) {
finalAuthType = AuthenticationType.AWS_V2;
} else if (authHeader.authenticationType == AuthenticationType.AWS_V4 &&
(authenticationType == AuthenticationType.AWS_V4 ||
authenticationType == AuthenticationType.AWS_V2_OR_V4)) {
finalAuthType = AuthenticationType.AWS_V4;
} else if (authenticationType != AuthenticationType.NONE) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
if (hasXAmzDateHeader) { //format diff between v2 and v4
if (finalAuthType == AuthenticationType.AWS_V2) {
dateSkew = request.getDateHeader("x-amz-date");
dateSkew /= 1000;
//case sensetive?
} else if (finalAuthType == AuthenticationType.AWS_V4) {
logger.debug("into process v4 {}",
request.getHeader("x-amz-date"));
dateSkew = parseIso8601(request.getHeader("x-amz-date"));
}
} else if (request.getParameter("X-Amz-Date") != null) { // v4 query
String dateString = request.getParameter("X-Amz-Date");
dateSkew = parseIso8601(dateString);
} else if (hasDateHeader) {
try {
dateSkew = request.getDateHeader(HttpHeaders.DATE);
logger.debug("dateheader {}", dateSkew);
} catch (IllegalArgumentException iae) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED, iae);
}
dateSkew /= 1000;
logger.debug("dateheader {}", dateSkew);
} else {
haveDate = false;
}
logger.debug("dateSkew {}", dateSkew);
if (haveDate) {
isTimeSkewed(dateSkew);
}
String[] path = uri.split("/", 3);
for (int i = 0; i < path.length; i++) {
path[i] = URLDecoder.decode(path[i], "UTF-8");
}
Map.Entry<String, BlobStore> provider =
blobStoreLocator.locateBlobStore(
requestIdentity, path.length > 1 ? path[1] : null,
path.length > 2 ? path[2] : null);
if (anonymousIdentity) {
blobStore = provider.getValue();
String contentSha256 = request.getHeader("x-amz-content-sha256");
if ("STREAMING-AWS4-HMAC-SHA256-PAYLOAD".equals(contentSha256)) {
is = new ChunkedInputStream(is);
}
} else if (requestIdentity == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
} else {
if (provider == null) {
throw new S3Exception(S3ErrorCode.INVALID_ACCESS_KEY_ID);
}
String credential = provider.getKey();
blobStore = provider.getValue();
String expiresString = request.getParameter("Expires");
if (expiresString != null) { // v2 query
long expires = Long.parseLong(expiresString);
long nowSeconds = System.currentTimeMillis() / 1000;
if (nowSeconds >= expires) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
}
String dateString = request.getParameter("X-Amz-Date");
//from para v4 query
expiresString = request.getParameter("X-Amz-Expires");
if (dateString != null && expiresString != null) { //v4 query
long date = parseIso8601(dateString);
long expires = Long.parseLong(expiresString);
long nowSeconds = System.currentTimeMillis() / 1000;
if (nowSeconds >= date + expires) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED,
"Request has expired");
}
}
// The aim ?
switch (authHeader.authenticationType) {
case AWS_V2:
switch (authenticationType) {
case AWS_V2:
case AWS_V2_OR_V4:
case NONE:
break;
default:
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
break;
case AWS_V4:
switch (authenticationType) {
case AWS_V4:
case AWS_V2_OR_V4:
case NONE:
break;
default:
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
break;
case NONE:
break;
default:
throw new IllegalArgumentException("Unhandled type: " +
authHeader.authenticationType);
}
String expectedSignature = null;
// When presigned url is generated, it doesn't consider service path
String uriForSigning = presignedUrl ? uri : this.servicePath + uri;
if (authHeader.hmacAlgorithm == null) { //v2
expectedSignature = AwsSignature.createAuthorizationSignature(
request, uriForSigning, credential, presignedUrl,
haveBothDateHeader);
} else {
String contentSha256 = request.getHeader(
"x-amz-content-sha256");
try {
byte[] payload;
if (request.getParameter("X-Amz-Algorithm") != null) {
payload = new byte[0];
} else if ("STREAMING-AWS4-HMAC-SHA256-PAYLOAD".equals(
contentSha256)) {
payload = new byte[0];
is = new ChunkedInputStream(is);
} else if ("UNSIGNED-PAYLOAD".equals(contentSha256)) {
payload = new byte[0];
} else {
// buffer the entire stream to calculate digest
// why input stream read contentlength of header?
payload = ByteStreams.toByteArray(ByteStreams.limit(
is, v4MaxNonChunkedRequestSize + 1));
if (payload.length == v4MaxNonChunkedRequestSize + 1) {
throw new S3Exception(
S3ErrorCode.MAX_MESSAGE_LENGTH_EXCEEDED);
}
// maybe we should check this when signing,
// a lot of dup code with aws sign code.
MessageDigest md = MessageDigest.getInstance(
authHeader.hashAlgorithm);
byte[] hash = md.digest(payload);
if (!contentSha256.equals(
BaseEncoding.base16().lowerCase()
.encode(hash))) {
throw new S3Exception(
S3ErrorCode
.X_AMZ_CONTENT_S_H_A_256_MISMATCH);
}
is = new ByteArrayInputStream(payload);
}
expectedSignature = AwsSignature
.createAuthorizationSignatureV4(// v4 sign
baseRequest, authHeader, payload, uriForSigning,
credential);
} catch (InvalidKeyException | NoSuchAlgorithmException e) {
throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, e);
}
}
if (!expectedSignature.equals(authHeader.signature)) {
logger.debug("fail to validate signature");
throw new S3Exception(S3ErrorCode.SIGNATURE_DOES_NOT_MATCH);
}
}
for (String parameter : Collections.list(
request.getParameterNames())) {
if (UNSUPPORTED_PARAMETERS.contains(parameter)) {
logger.error("Unknown parameters {} with URI {}",
parameter, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
}
// emit NotImplemented for unknown x-amz- headers
for (String headerName : Collections.list(request.getHeaderNames())) {
if (ignoreUnknownHeaders) {
continue;
}
if (!headerName.startsWith("x-amz-")) {
continue;
}
if (headerName.startsWith("x-amz-meta-")) {
continue;
}
if (headerName.equals("x-amz-storage-class") &&
request.getHeader(headerName).equals("STANDARD")) {
continue;
}
if (!SUPPORTED_X_AMZ_HEADERS.contains(headerName.toLowerCase())) {
logger.error("Unknown header {} with URI {}",
headerName, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
}
// Validate container name
if (!uri.equals("/") && !isValidContainer(path[1])) {
if (method.equals("PUT") &&
(path.length <= 2 || path[2].isEmpty()) &&
!("".equals(request.getParameter("acl")))) {
throw new S3Exception(S3ErrorCode.INVALID_BUCKET_NAME);
} else {
throw new S3Exception(S3ErrorCode.NO_SUCH_BUCKET);
}
}
String uploadId = request.getParameter("uploadId");
switch (method) {
case "DELETE":
if (path.length <= 2 || path[2].isEmpty()) {
handleContainerDelete(response, blobStore, path[1]);
return;
} else if (uploadId != null) {
handleAbortMultipartUpload(response, blobStore, path[1],
path[2], uploadId);
return;
} else {
handleBlobRemove(response, blobStore, path[1], path[2]);
return;
}
case "GET":
if (uri.equals("/")) {
handleContainerList(response, blobStore);
return;
} else if (path.length <= 2 || path[2].isEmpty()) {
if ("".equals(request.getParameter("acl"))) {
handleGetContainerAcl(response, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("location"))) {
handleContainerLocation(response, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("uploads"))) {
handleListMultipartUploads(request, response, blobStore,
path[1]);
return;
}
handleBlobList(request, response, blobStore, path[1]);
return;
} else {
if ("".equals(request.getParameter("acl"))) {
handleGetBlobAcl(response, blobStore, path[1],
path[2]);
return;
} else if (uploadId != null) {
handleListParts(request, response, blobStore, path[1],
path[2], uploadId);
return;
}
handleGetBlob(request, response, blobStore, path[1],
path[2]);
return;
}
case "HEAD":
if (path.length <= 2 || path[2].isEmpty()) {
handleContainerExists(blobStore, path[1]);
return;
} else {
handleBlobMetadata(request, response, blobStore, path[1],
path[2]);
return;
}
case "POST":
if ("".equals(request.getParameter("delete"))) {
handleMultiBlobRemove(response, is, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("uploads"))) {
handleInitiateMultipartUpload(request, response, blobStore,
path[1], path[2]);
return;
} else if (uploadId != null &&
request.getParameter("partNumber") == null) {
handleCompleteMultipartUpload(response, is, blobStore, path[1],
path[2], uploadId);
return;
}
break;
case "PUT":
if (path.length <= 2 || path[2].isEmpty()) {
if ("".equals(request.getParameter("acl"))) {
handleSetContainerAcl(request, response, is, blobStore,
path[1]);
return;
}
handleContainerCreate(request, response, is, blobStore,
path[1]);
return;
} else if (uploadId != null) {
if (request.getHeader("x-amz-copy-source") != null) {
handleCopyPart(request, response, blobStore, path[1],
path[2], uploadId);
} else {
handleUploadPart(request, response, is, blobStore, path[1],
path[2], uploadId);
}
return;
} else if (request.getHeader("x-amz-copy-source") != null) {
handleCopyBlob(request, response, is, blobStore, path[1],
path[2]);
return;
} else {
if ("".equals(request.getParameter("acl"))) {
handleSetBlobAcl(request, response, is, blobStore, path[1],
path[2]);
return;
}
handlePutBlob(request, response, is, blobStore, path[1],
path[2]);
return;
}
default:
break;
}
logger.error("Unknown method {} with URI {}",
method, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
#location 118
#vulnerability type NULL_DEREFERENCE | #fixed code
public final void doHandle(HttpServletRequest baseRequest,
HttpServletRequest request, HttpServletResponse response,
InputStream is) throws IOException, S3Exception {
String method = request.getMethod();
String uri = request.getRequestURI();
if (!this.servicePath.isEmpty()) {
if (uri.length() > this.servicePath.length()) {
uri = uri.substring(this.servicePath.length());
}
}
logger.debug("request: {}", request);
String hostHeader = request.getHeader(HttpHeaders.HOST);
if (hostHeader != null && virtualHost.isPresent()) {
hostHeader = HostAndPort.fromString(hostHeader).getHostText();
String virtualHostSuffix = "." + virtualHost.get();
if (!hostHeader.equals(virtualHost.get())) {
if (hostHeader.endsWith(virtualHostSuffix)) {
String bucket = hostHeader.substring(0,
hostHeader.length() - virtualHostSuffix.length());
uri = "/" + bucket + uri;
} else {
String bucket = hostHeader.toLowerCase();
uri = "/" + bucket + uri;
}
}
}
boolean hasDateHeader = false;
boolean hasXAmzDateHeader = false;
for (String headerName : Collections.list(request.getHeaderNames())) {
for (String headerValue : Collections.list(request.getHeaders(
headerName))) {
logger.debug("header: {}: {}", headerName,
Strings.nullToEmpty(headerValue));
}
if (headerName.equalsIgnoreCase(HttpHeaders.DATE)) {
hasDateHeader = true;
} else if (headerName.equalsIgnoreCase("x-amz-date")) {
logger.debug("have the x-amz-date heaer {}", headerName);
// why x-amz-date name exist,but value is null?
if ("".equals(request.getHeader("x-amz-date")) ||
request.getHeader("x-amz-date") == null) {
logger.debug("have empty x-amz-date");
} else {
hasXAmzDateHeader = true;
}
}
}
boolean haveBothDateHeader = false;
if (hasDateHeader && hasXAmzDateHeader) {
haveBothDateHeader = true;
}
// when access information is not provided in request header,
// treat it as anonymous, return all public accessible information
if (!anonymousIdentity &&
(method.equals("GET") || method.equals("HEAD") ||
method.equals("POST")) &&
request.getHeader(HttpHeaders.AUTHORIZATION) == null &&
// v2 or /v4
request.getParameter("X-Amz-Algorithm") == null && // v4 query
request.getParameter("AWSAccessKeyId") == null && // v2 query
defaultBlobStore != null) {
doHandleAnonymous(request, response, is, uri, defaultBlobStore);
return;
}
// should according the AWSAccessKeyId= Signature or auth header nil
if (!anonymousIdentity && !hasDateHeader && !hasXAmzDateHeader &&
request.getParameter("X-Amz-Date") == null &&
request.getParameter("Expires") == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED,
"AWS authentication requires a valid Date or" +
" x-amz-date header");
}
BlobStore blobStore;
String requestIdentity = null;
String headerAuthorization = request.getHeader(
HttpHeaders.AUTHORIZATION);
S3AuthorizationHeader authHeader = null;
boolean presignedUrl = false;
if (!anonymousIdentity) {
if (headerAuthorization == null) {
String algorithm = request.getParameter("X-Amz-Algorithm");
if (algorithm == null) { //v2 query
String identity = request.getParameter("AWSAccessKeyId");
String signature = request.getParameter("Signature");
if (identity == null || signature == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
headerAuthorization = "AWS " + identity + ":" + signature;
presignedUrl = true;
} else if (algorithm.equals("AWS4-HMAC-SHA256")) { //v4 query
String credential = request.getParameter(
"X-Amz-Credential");
String signedHeaders = request.getParameter(
"X-Amz-SignedHeaders");
String signature = request.getParameter(
"X-Amz-Signature");
if (credential == null || signedHeaders == null ||
signature == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
headerAuthorization = "AWS4-HMAC-SHA256" +
" Credential=" + credential +
", requestSignedHeaders=" + signedHeaders +
", Signature=" + signature;
presignedUrl = true;
}
}
try {
authHeader = new S3AuthorizationHeader(headerAuthorization);
//whether v2 or v4 (normal header and query)
} catch (IllegalArgumentException iae) {
throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, iae);
}
requestIdentity = authHeader.identity;
}
long dateSkew = 0; //date for timeskew check
//v2 GET /s3proxy-1080747708/foo?AWSAccessKeyId=local-identity&Expires=
//1510322602&Signature=UTyfHY1b1Wgr5BFEn9dpPlWdtFE%3D)
//have no date
if (!anonymousIdentity) {
boolean haveDate = true;
AuthenticationType finalAuthType = null;
if (authHeader.authenticationType == AuthenticationType.AWS_V2 &&
(authenticationType == AuthenticationType.AWS_V2 ||
authenticationType == AuthenticationType.AWS_V2_OR_V4)) {
finalAuthType = AuthenticationType.AWS_V2;
} else if (
authHeader.authenticationType == AuthenticationType.AWS_V4 &&
(authenticationType == AuthenticationType.AWS_V4 ||
authenticationType == AuthenticationType.AWS_V2_OR_V4)) {
finalAuthType = AuthenticationType.AWS_V4;
} else if (authenticationType != AuthenticationType.NONE) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
if (hasXAmzDateHeader) { //format diff between v2 and v4
if (finalAuthType == AuthenticationType.AWS_V2) {
dateSkew = request.getDateHeader("x-amz-date");
dateSkew /= 1000;
//case sensetive?
} else if (finalAuthType == AuthenticationType.AWS_V4) {
logger.debug("into process v4 {}",
request.getHeader("x-amz-date"));
dateSkew = parseIso8601(request.getHeader("x-amz-date"));
}
} else if (request.getParameter("X-Amz-Date") != null) { // v4 query
String dateString = request.getParameter("X-Amz-Date");
dateSkew = parseIso8601(dateString);
} else if (hasDateHeader) {
try {
dateSkew = request.getDateHeader(HttpHeaders.DATE);
logger.debug("dateheader {}", dateSkew);
} catch (IllegalArgumentException iae) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED, iae);
}
dateSkew /= 1000;
logger.debug("dateheader {}", dateSkew);
} else {
haveDate = false;
}
logger.debug("dateSkew {}", dateSkew);
if (haveDate) {
isTimeSkewed(dateSkew);
}
}
String[] path = uri.split("/", 3);
for (int i = 0; i < path.length; i++) {
path[i] = URLDecoder.decode(path[i], "UTF-8");
}
Map.Entry<String, BlobStore> provider =
blobStoreLocator.locateBlobStore(
requestIdentity, path.length > 1 ? path[1] : null,
path.length > 2 ? path[2] : null);
if (anonymousIdentity) {
blobStore = provider.getValue();
String contentSha256 = request.getHeader("x-amz-content-sha256");
if ("STREAMING-AWS4-HMAC-SHA256-PAYLOAD".equals(contentSha256)) {
is = new ChunkedInputStream(is);
}
} else if (requestIdentity == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
} else {
if (provider == null) {
throw new S3Exception(S3ErrorCode.INVALID_ACCESS_KEY_ID);
}
String credential = provider.getKey();
blobStore = provider.getValue();
String expiresString = request.getParameter("Expires");
if (expiresString != null) { // v2 query
long expires = Long.parseLong(expiresString);
long nowSeconds = System.currentTimeMillis() / 1000;
if (nowSeconds >= expires) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
}
String dateString = request.getParameter("X-Amz-Date");
//from para v4 query
expiresString = request.getParameter("X-Amz-Expires");
if (dateString != null && expiresString != null) { //v4 query
long date = parseIso8601(dateString);
long expires = Long.parseLong(expiresString);
long nowSeconds = System.currentTimeMillis() / 1000;
if (nowSeconds >= date + expires) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED,
"Request has expired");
}
}
// The aim ?
switch (authHeader.authenticationType) {
case AWS_V2:
switch (authenticationType) {
case AWS_V2:
case AWS_V2_OR_V4:
case NONE:
break;
default:
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
break;
case AWS_V4:
switch (authenticationType) {
case AWS_V4:
case AWS_V2_OR_V4:
case NONE:
break;
default:
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
break;
case NONE:
break;
default:
throw new IllegalArgumentException("Unhandled type: " +
authHeader.authenticationType);
}
String expectedSignature = null;
// When presigned url is generated, it doesn't consider service path
String uriForSigning = presignedUrl ? uri : this.servicePath + uri;
if (authHeader.hmacAlgorithm == null) { //v2
expectedSignature = AwsSignature.createAuthorizationSignature(
request, uriForSigning, credential, presignedUrl,
haveBothDateHeader);
} else {
String contentSha256 = request.getHeader(
"x-amz-content-sha256");
try {
byte[] payload;
if (request.getParameter("X-Amz-Algorithm") != null) {
payload = new byte[0];
} else if ("STREAMING-AWS4-HMAC-SHA256-PAYLOAD".equals(
contentSha256)) {
payload = new byte[0];
is = new ChunkedInputStream(is);
} else if ("UNSIGNED-PAYLOAD".equals(contentSha256)) {
payload = new byte[0];
} else {
// buffer the entire stream to calculate digest
// why input stream read contentlength of header?
payload = ByteStreams.toByteArray(ByteStreams.limit(
is, v4MaxNonChunkedRequestSize + 1));
if (payload.length == v4MaxNonChunkedRequestSize + 1) {
throw new S3Exception(
S3ErrorCode.MAX_MESSAGE_LENGTH_EXCEEDED);
}
// maybe we should check this when signing,
// a lot of dup code with aws sign code.
MessageDigest md = MessageDigest.getInstance(
authHeader.hashAlgorithm);
byte[] hash = md.digest(payload);
if (!contentSha256.equals(
BaseEncoding.base16().lowerCase()
.encode(hash))) {
throw new S3Exception(
S3ErrorCode
.X_AMZ_CONTENT_S_H_A_256_MISMATCH);
}
is = new ByteArrayInputStream(payload);
}
expectedSignature = AwsSignature
.createAuthorizationSignatureV4(// v4 sign
baseRequest, authHeader, payload, uriForSigning,
credential);
} catch (InvalidKeyException | NoSuchAlgorithmException e) {
throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, e);
}
}
if (!expectedSignature.equals(authHeader.signature)) {
logger.debug("fail to validate signature");
throw new S3Exception(S3ErrorCode.SIGNATURE_DOES_NOT_MATCH);
}
}
for (String parameter : Collections.list(
request.getParameterNames())) {
if (UNSUPPORTED_PARAMETERS.contains(parameter)) {
logger.error("Unknown parameters {} with URI {}",
parameter, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
}
// emit NotImplemented for unknown x-amz- headers
for (String headerName : Collections.list(request.getHeaderNames())) {
if (ignoreUnknownHeaders) {
continue;
}
if (!headerName.startsWith("x-amz-")) {
continue;
}
if (headerName.startsWith("x-amz-meta-")) {
continue;
}
if (headerName.equals("x-amz-storage-class") &&
request.getHeader(headerName).equals("STANDARD")) {
continue;
}
if (!SUPPORTED_X_AMZ_HEADERS.contains(headerName.toLowerCase())) {
logger.error("Unknown header {} with URI {}",
headerName, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
}
// Validate container name
if (!uri.equals("/") && !isValidContainer(path[1])) {
if (method.equals("PUT") &&
(path.length <= 2 || path[2].isEmpty()) &&
!("".equals(request.getParameter("acl")))) {
throw new S3Exception(S3ErrorCode.INVALID_BUCKET_NAME);
} else {
throw new S3Exception(S3ErrorCode.NO_SUCH_BUCKET);
}
}
String uploadId = request.getParameter("uploadId");
switch (method) {
case "DELETE":
if (path.length <= 2 || path[2].isEmpty()) {
handleContainerDelete(response, blobStore, path[1]);
return;
} else if (uploadId != null) {
handleAbortMultipartUpload(response, blobStore, path[1],
path[2], uploadId);
return;
} else {
handleBlobRemove(response, blobStore, path[1], path[2]);
return;
}
case "GET":
if (uri.equals("/")) {
handleContainerList(response, blobStore);
return;
} else if (path.length <= 2 || path[2].isEmpty()) {
if ("".equals(request.getParameter("acl"))) {
handleGetContainerAcl(response, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("location"))) {
handleContainerLocation(response, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("uploads"))) {
handleListMultipartUploads(request, response, blobStore,
path[1]);
return;
}
handleBlobList(request, response, blobStore, path[1]);
return;
} else {
if ("".equals(request.getParameter("acl"))) {
handleGetBlobAcl(response, blobStore, path[1],
path[2]);
return;
} else if (uploadId != null) {
handleListParts(request, response, blobStore, path[1],
path[2], uploadId);
return;
}
handleGetBlob(request, response, blobStore, path[1],
path[2]);
return;
}
case "HEAD":
if (path.length <= 2 || path[2].isEmpty()) {
handleContainerExists(blobStore, path[1]);
return;
} else {
handleBlobMetadata(request, response, blobStore, path[1],
path[2]);
return;
}
case "POST":
if ("".equals(request.getParameter("delete"))) {
handleMultiBlobRemove(response, is, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("uploads"))) {
handleInitiateMultipartUpload(request, response, blobStore,
path[1], path[2]);
return;
} else if (uploadId != null &&
request.getParameter("partNumber") == null) {
handleCompleteMultipartUpload(response, is, blobStore, path[1],
path[2], uploadId);
return;
}
break;
case "PUT":
if (path.length <= 2 || path[2].isEmpty()) {
if ("".equals(request.getParameter("acl"))) {
handleSetContainerAcl(request, response, is, blobStore,
path[1]);
return;
}
handleContainerCreate(request, response, is, blobStore,
path[1]);
return;
} else if (uploadId != null) {
if (request.getHeader("x-amz-copy-source") != null) {
handleCopyPart(request, response, blobStore, path[1],
path[2], uploadId);
} else {
handleUploadPart(request, response, is, blobStore, path[1],
path[2], uploadId);
}
return;
} else if (request.getHeader("x-amz-copy-source") != null) {
handleCopyBlob(request, response, is, blobStore, path[1],
path[2]);
return;
} else {
if ("".equals(request.getParameter("acl"))) {
handleSetBlobAcl(request, response, is, blobStore, path[1],
path[2]);
return;
}
handlePutBlob(request, response, is, blobStore, path[1],
path[2]);
return;
}
default:
break;
}
logger.error("Unknown method {} with URI {}",
method, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public final void doHandle(HttpServletRequest baseRequest,
HttpServletRequest request, HttpServletResponse response,
InputStream is) throws IOException, S3Exception {
String method = request.getMethod();
String uri = request.getRequestURI();
if (!this.servicePath.isEmpty()) {
if (uri.length() > this.servicePath.length()) {
uri = uri.substring(this.servicePath.length());
}
}
logger.debug("request: {}", request);
String hostHeader = request.getHeader(HttpHeaders.HOST);
if (hostHeader != null && virtualHost.isPresent()) {
hostHeader = HostAndPort.fromString(hostHeader).getHostText();
String virtualHostSuffix = "." + virtualHost.get();
if (!hostHeader.equals(virtualHost.get())) {
if (hostHeader.endsWith(virtualHostSuffix)) {
String bucket = hostHeader.substring(0,
hostHeader.length() - virtualHostSuffix.length());
uri = "/" + bucket + uri;
} else {
String bucket = hostHeader.toLowerCase();
uri = "/" + bucket + uri;
}
}
}
boolean hasDateHeader = false;
boolean hasXAmzDateHeader = false;
for (String headerName : Collections.list(request.getHeaderNames())) {
for (String headerValue : Collections.list(request.getHeaders(
headerName))) {
logger.debug("header: {}: {}", headerName,
Strings.nullToEmpty(headerValue));
}
if (headerName.equalsIgnoreCase(HttpHeaders.DATE)) {
hasDateHeader = true;
} else if (headerName.equalsIgnoreCase("x-amz-date")) {
logger.debug("have the x-amz-date heaer {}", headerName);
// why x-amz-date name exist,but value is null?
if ("".equals(request.getHeader("x-amz-date")) ||
request.getHeader("x-amz-date") == null) {
logger.debug("have empty x-amz-date");
} else {
hasXAmzDateHeader = true;
}
}
}
boolean haveBothDateHeader = false;
if (hasDateHeader && hasXAmzDateHeader) {
haveBothDateHeader = true;
}
// when access information is not provided in request header,
// treat it as anonymous, return all public accessible information
if (!anonymousIdentity &&
(method.equals("GET") || method.equals("HEAD") ||
method.equals("POST")) &&
request.getHeader(HttpHeaders.AUTHORIZATION) == null &&
// v2 or /v4
request.getParameter("X-Amz-Algorithm") == null && // v4 query
request.getParameter("AWSAccessKeyId") == null && // v2 query
defaultBlobStore != null) {
doHandleAnonymous(request, response, is, uri, defaultBlobStore);
return;
}
// should according the AWSAccessKeyId= Signature or auth header nil
if (!anonymousIdentity && !hasDateHeader && !hasXAmzDateHeader &&
request.getParameter("X-Amz-Date") == null &&
request.getParameter("Expires") == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED,
"AWS authentication requires a valid Date or" +
" x-amz-date header");
}
BlobStore blobStore;
String requestIdentity = null;
String headerAuthorization = request.getHeader(
HttpHeaders.AUTHORIZATION);
S3AuthorizationHeader authHeader = null;
boolean presignedUrl = false;
if (!anonymousIdentity) {
if (headerAuthorization == null) {
String algorithm = request.getParameter("X-Amz-Algorithm");
if (algorithm == null) { //v2 query
String identity = request.getParameter("AWSAccessKeyId");
String signature = request.getParameter("Signature");
if (identity == null || signature == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
headerAuthorization = "AWS " + identity + ":" + signature;
presignedUrl = true;
} else if (algorithm.equals("AWS4-HMAC-SHA256")) { //v4 query
String credential = request.getParameter(
"X-Amz-Credential");
String signedHeaders = request.getParameter(
"X-Amz-SignedHeaders");
String signature = request.getParameter(
"X-Amz-Signature");
if (credential == null || signedHeaders == null ||
signature == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
headerAuthorization = "AWS4-HMAC-SHA256" +
" Credential=" + credential +
", requestSignedHeaders=" + signedHeaders +
", Signature=" + signature;
presignedUrl = true;
}
}
try {
authHeader = new S3AuthorizationHeader(headerAuthorization);
//whether v2 or v4 (normal header and query)
} catch (IllegalArgumentException iae) {
throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, iae);
}
requestIdentity = authHeader.identity;
}
long dateSkew = 0; //date for timeskew check
//v2 GET /s3proxy-1080747708/foo?AWSAccessKeyId=local-identity&Expires=
//1510322602&Signature=UTyfHY1b1Wgr5BFEn9dpPlWdtFE%3D)
//have no date
boolean haveDate = true;
AuthenticationType finalAuthType = null;
if (authHeader.authenticationType == AuthenticationType.AWS_V2 &&
(authenticationType == AuthenticationType.AWS_V2 ||
authenticationType == AuthenticationType.AWS_V2_OR_V4)) {
finalAuthType = AuthenticationType.AWS_V2;
} else if (authHeader.authenticationType == AuthenticationType.AWS_V4 &&
(authenticationType == AuthenticationType.AWS_V4 ||
authenticationType == AuthenticationType.AWS_V2_OR_V4)) {
finalAuthType = AuthenticationType.AWS_V4;
} else if (authenticationType != AuthenticationType.NONE) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
if (hasXAmzDateHeader) { //format diff between v2 and v4
if (finalAuthType == AuthenticationType.AWS_V2) {
dateSkew = request.getDateHeader("x-amz-date");
dateSkew /= 1000;
//case sensetive?
} else if (finalAuthType == AuthenticationType.AWS_V4) {
logger.debug("into process v4 {}",
request.getHeader("x-amz-date"));
dateSkew = parseIso8601(request.getHeader("x-amz-date"));
}
} else if (request.getParameter("X-Amz-Date") != null) { // v4 query
String dateString = request.getParameter("X-Amz-Date");
dateSkew = parseIso8601(dateString);
} else if (hasDateHeader) {
try {
dateSkew = request.getDateHeader(HttpHeaders.DATE);
logger.debug("dateheader {}", dateSkew);
} catch (IllegalArgumentException iae) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED, iae);
}
dateSkew /= 1000;
logger.debug("dateheader {}", dateSkew);
} else {
haveDate = false;
}
logger.debug("dateSkew {}", dateSkew);
if (haveDate) {
isTimeSkewed(dateSkew);
}
String[] path = uri.split("/", 3);
for (int i = 0; i < path.length; i++) {
path[i] = URLDecoder.decode(path[i], "UTF-8");
}
Map.Entry<String, BlobStore> provider =
blobStoreLocator.locateBlobStore(
requestIdentity, path.length > 1 ? path[1] : null,
path.length > 2 ? path[2] : null);
if (anonymousIdentity) {
blobStore = provider.getValue();
String contentSha256 = request.getHeader("x-amz-content-sha256");
if ("STREAMING-AWS4-HMAC-SHA256-PAYLOAD".equals(contentSha256)) {
is = new ChunkedInputStream(is);
}
} else if (requestIdentity == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
} else {
if (provider == null) {
throw new S3Exception(S3ErrorCode.INVALID_ACCESS_KEY_ID);
}
String credential = provider.getKey();
blobStore = provider.getValue();
String expiresString = request.getParameter("Expires");
if (expiresString != null) { // v2 query
long expires = Long.parseLong(expiresString);
long nowSeconds = System.currentTimeMillis() / 1000;
if (nowSeconds >= expires) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
}
String dateString = request.getParameter("X-Amz-Date");
//from para v4 query
expiresString = request.getParameter("X-Amz-Expires");
if (dateString != null && expiresString != null) { //v4 query
long date = parseIso8601(dateString);
long expires = Long.parseLong(expiresString);
long nowSeconds = System.currentTimeMillis() / 1000;
if (nowSeconds >= date + expires) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED,
"Request has expired");
}
}
// The aim ?
switch (authHeader.authenticationType) {
case AWS_V2:
switch (authenticationType) {
case AWS_V2:
case AWS_V2_OR_V4:
case NONE:
break;
default:
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
break;
case AWS_V4:
switch (authenticationType) {
case AWS_V4:
case AWS_V2_OR_V4:
case NONE:
break;
default:
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
break;
case NONE:
break;
default:
throw new IllegalArgumentException("Unhandled type: " +
authHeader.authenticationType);
}
String expectedSignature = null;
// When presigned url is generated, it doesn't consider service path
String uriForSigning = presignedUrl ? uri : this.servicePath + uri;
if (authHeader.hmacAlgorithm == null) { //v2
expectedSignature = AwsSignature.createAuthorizationSignature(
request, uriForSigning, credential, presignedUrl,
haveBothDateHeader);
} else {
String contentSha256 = request.getHeader(
"x-amz-content-sha256");
try {
byte[] payload;
if (request.getParameter("X-Amz-Algorithm") != null) {
payload = new byte[0];
} else if ("STREAMING-AWS4-HMAC-SHA256-PAYLOAD".equals(
contentSha256)) {
payload = new byte[0];
is = new ChunkedInputStream(is);
} else if ("UNSIGNED-PAYLOAD".equals(contentSha256)) {
payload = new byte[0];
} else {
// buffer the entire stream to calculate digest
// why input stream read contentlength of header?
payload = ByteStreams.toByteArray(ByteStreams.limit(
is, v4MaxNonChunkedRequestSize + 1));
if (payload.length == v4MaxNonChunkedRequestSize + 1) {
throw new S3Exception(
S3ErrorCode.MAX_MESSAGE_LENGTH_EXCEEDED);
}
// maybe we should check this when signing,
// a lot of dup code with aws sign code.
MessageDigest md = MessageDigest.getInstance(
authHeader.hashAlgorithm);
byte[] hash = md.digest(payload);
if (!contentSha256.equals(
BaseEncoding.base16().lowerCase()
.encode(hash))) {
throw new S3Exception(
S3ErrorCode
.X_AMZ_CONTENT_S_H_A_256_MISMATCH);
}
is = new ByteArrayInputStream(payload);
}
expectedSignature = AwsSignature
.createAuthorizationSignatureV4(// v4 sign
baseRequest, authHeader, payload, uriForSigning,
credential);
} catch (InvalidKeyException | NoSuchAlgorithmException e) {
throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, e);
}
}
if (!expectedSignature.equals(authHeader.signature)) {
logger.debug("fail to validate signature");
throw new S3Exception(S3ErrorCode.SIGNATURE_DOES_NOT_MATCH);
}
}
for (String parameter : Collections.list(
request.getParameterNames())) {
if (UNSUPPORTED_PARAMETERS.contains(parameter)) {
logger.error("Unknown parameters {} with URI {}",
parameter, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
}
// emit NotImplemented for unknown x-amz- headers
for (String headerName : Collections.list(request.getHeaderNames())) {
if (ignoreUnknownHeaders) {
continue;
}
if (!headerName.startsWith("x-amz-")) {
continue;
}
if (headerName.startsWith("x-amz-meta-")) {
continue;
}
if (headerName.equals("x-amz-storage-class") &&
request.getHeader(headerName).equals("STANDARD")) {
continue;
}
if (!SUPPORTED_X_AMZ_HEADERS.contains(headerName.toLowerCase())) {
logger.error("Unknown header {} with URI {}",
headerName, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
}
// Validate container name
if (!uri.equals("/") && !isValidContainer(path[1])) {
if (method.equals("PUT") &&
(path.length <= 2 || path[2].isEmpty()) &&
!("".equals(request.getParameter("acl")))) {
throw new S3Exception(S3ErrorCode.INVALID_BUCKET_NAME);
} else {
throw new S3Exception(S3ErrorCode.NO_SUCH_BUCKET);
}
}
String uploadId = request.getParameter("uploadId");
switch (method) {
case "DELETE":
if (path.length <= 2 || path[2].isEmpty()) {
handleContainerDelete(response, blobStore, path[1]);
return;
} else if (uploadId != null) {
handleAbortMultipartUpload(response, blobStore, path[1],
path[2], uploadId);
return;
} else {
handleBlobRemove(response, blobStore, path[1], path[2]);
return;
}
case "GET":
if (uri.equals("/")) {
handleContainerList(response, blobStore);
return;
} else if (path.length <= 2 || path[2].isEmpty()) {
if ("".equals(request.getParameter("acl"))) {
handleGetContainerAcl(response, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("location"))) {
handleContainerLocation(response, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("uploads"))) {
handleListMultipartUploads(request, response, blobStore,
path[1]);
return;
}
handleBlobList(request, response, blobStore, path[1]);
return;
} else {
if ("".equals(request.getParameter("acl"))) {
handleGetBlobAcl(response, blobStore, path[1],
path[2]);
return;
} else if (uploadId != null) {
handleListParts(request, response, blobStore, path[1],
path[2], uploadId);
return;
}
handleGetBlob(request, response, blobStore, path[1],
path[2]);
return;
}
case "HEAD":
if (path.length <= 2 || path[2].isEmpty()) {
handleContainerExists(blobStore, path[1]);
return;
} else {
handleBlobMetadata(request, response, blobStore, path[1],
path[2]);
return;
}
case "POST":
if ("".equals(request.getParameter("delete"))) {
handleMultiBlobRemove(response, is, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("uploads"))) {
handleInitiateMultipartUpload(request, response, blobStore,
path[1], path[2]);
return;
} else if (uploadId != null &&
request.getParameter("partNumber") == null) {
handleCompleteMultipartUpload(response, is, blobStore, path[1],
path[2], uploadId);
return;
}
break;
case "PUT":
if (path.length <= 2 || path[2].isEmpty()) {
if ("".equals(request.getParameter("acl"))) {
handleSetContainerAcl(request, response, is, blobStore,
path[1]);
return;
}
handleContainerCreate(request, response, is, blobStore,
path[1]);
return;
} else if (uploadId != null) {
if (request.getHeader("x-amz-copy-source") != null) {
handleCopyPart(request, response, blobStore, path[1],
path[2], uploadId);
} else {
handleUploadPart(request, response, is, blobStore, path[1],
path[2], uploadId);
}
return;
} else if (request.getHeader("x-amz-copy-source") != null) {
handleCopyBlob(request, response, is, blobStore, path[1],
path[2]);
return;
} else {
if ("".equals(request.getParameter("acl"))) {
handleSetBlobAcl(request, response, is, blobStore, path[1],
path[2]);
return;
}
handlePutBlob(request, response, is, blobStore, path[1],
path[2]);
return;
}
default:
break;
}
logger.error("Unknown method {} with URI {}",
method, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
#location 135
#vulnerability type NULL_DEREFERENCE | #fixed code
public final void doHandle(HttpServletRequest baseRequest,
HttpServletRequest request, HttpServletResponse response,
InputStream is) throws IOException, S3Exception {
String method = request.getMethod();
String uri = request.getRequestURI();
if (!this.servicePath.isEmpty()) {
if (uri.length() > this.servicePath.length()) {
uri = uri.substring(this.servicePath.length());
}
}
logger.debug("request: {}", request);
String hostHeader = request.getHeader(HttpHeaders.HOST);
if (hostHeader != null && virtualHost.isPresent()) {
hostHeader = HostAndPort.fromString(hostHeader).getHostText();
String virtualHostSuffix = "." + virtualHost.get();
if (!hostHeader.equals(virtualHost.get())) {
if (hostHeader.endsWith(virtualHostSuffix)) {
String bucket = hostHeader.substring(0,
hostHeader.length() - virtualHostSuffix.length());
uri = "/" + bucket + uri;
} else {
String bucket = hostHeader.toLowerCase();
uri = "/" + bucket + uri;
}
}
}
boolean hasDateHeader = false;
boolean hasXAmzDateHeader = false;
for (String headerName : Collections.list(request.getHeaderNames())) {
for (String headerValue : Collections.list(request.getHeaders(
headerName))) {
logger.debug("header: {}: {}", headerName,
Strings.nullToEmpty(headerValue));
}
if (headerName.equalsIgnoreCase(HttpHeaders.DATE)) {
hasDateHeader = true;
} else if (headerName.equalsIgnoreCase("x-amz-date")) {
logger.debug("have the x-amz-date heaer {}", headerName);
// why x-amz-date name exist,but value is null?
if ("".equals(request.getHeader("x-amz-date")) ||
request.getHeader("x-amz-date") == null) {
logger.debug("have empty x-amz-date");
} else {
hasXAmzDateHeader = true;
}
}
}
boolean haveBothDateHeader = false;
if (hasDateHeader && hasXAmzDateHeader) {
haveBothDateHeader = true;
}
// when access information is not provided in request header,
// treat it as anonymous, return all public accessible information
if (!anonymousIdentity &&
(method.equals("GET") || method.equals("HEAD") ||
method.equals("POST")) &&
request.getHeader(HttpHeaders.AUTHORIZATION) == null &&
// v2 or /v4
request.getParameter("X-Amz-Algorithm") == null && // v4 query
request.getParameter("AWSAccessKeyId") == null && // v2 query
defaultBlobStore != null) {
doHandleAnonymous(request, response, is, uri, defaultBlobStore);
return;
}
// should according the AWSAccessKeyId= Signature or auth header nil
if (!anonymousIdentity && !hasDateHeader && !hasXAmzDateHeader &&
request.getParameter("X-Amz-Date") == null &&
request.getParameter("Expires") == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED,
"AWS authentication requires a valid Date or" +
" x-amz-date header");
}
BlobStore blobStore;
String requestIdentity = null;
String headerAuthorization = request.getHeader(
HttpHeaders.AUTHORIZATION);
S3AuthorizationHeader authHeader = null;
boolean presignedUrl = false;
if (!anonymousIdentity) {
if (headerAuthorization == null) {
String algorithm = request.getParameter("X-Amz-Algorithm");
if (algorithm == null) { //v2 query
String identity = request.getParameter("AWSAccessKeyId");
String signature = request.getParameter("Signature");
if (identity == null || signature == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
headerAuthorization = "AWS " + identity + ":" + signature;
presignedUrl = true;
} else if (algorithm.equals("AWS4-HMAC-SHA256")) { //v4 query
String credential = request.getParameter(
"X-Amz-Credential");
String signedHeaders = request.getParameter(
"X-Amz-SignedHeaders");
String signature = request.getParameter(
"X-Amz-Signature");
if (credential == null || signedHeaders == null ||
signature == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
headerAuthorization = "AWS4-HMAC-SHA256" +
" Credential=" + credential +
", requestSignedHeaders=" + signedHeaders +
", Signature=" + signature;
presignedUrl = true;
}
}
try {
authHeader = new S3AuthorizationHeader(headerAuthorization);
//whether v2 or v4 (normal header and query)
} catch (IllegalArgumentException iae) {
throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, iae);
}
requestIdentity = authHeader.identity;
}
long dateSkew = 0; //date for timeskew check
//v2 GET /s3proxy-1080747708/foo?AWSAccessKeyId=local-identity&Expires=
//1510322602&Signature=UTyfHY1b1Wgr5BFEn9dpPlWdtFE%3D)
//have no date
if (!anonymousIdentity) {
boolean haveDate = true;
AuthenticationType finalAuthType = null;
if (authHeader.authenticationType == AuthenticationType.AWS_V2 &&
(authenticationType == AuthenticationType.AWS_V2 ||
authenticationType == AuthenticationType.AWS_V2_OR_V4)) {
finalAuthType = AuthenticationType.AWS_V2;
} else if (
authHeader.authenticationType == AuthenticationType.AWS_V4 &&
(authenticationType == AuthenticationType.AWS_V4 ||
authenticationType == AuthenticationType.AWS_V2_OR_V4)) {
finalAuthType = AuthenticationType.AWS_V4;
} else if (authenticationType != AuthenticationType.NONE) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
if (hasXAmzDateHeader) { //format diff between v2 and v4
if (finalAuthType == AuthenticationType.AWS_V2) {
dateSkew = request.getDateHeader("x-amz-date");
dateSkew /= 1000;
//case sensetive?
} else if (finalAuthType == AuthenticationType.AWS_V4) {
logger.debug("into process v4 {}",
request.getHeader("x-amz-date"));
dateSkew = parseIso8601(request.getHeader("x-amz-date"));
}
} else if (request.getParameter("X-Amz-Date") != null) { // v4 query
String dateString = request.getParameter("X-Amz-Date");
dateSkew = parseIso8601(dateString);
} else if (hasDateHeader) {
try {
dateSkew = request.getDateHeader(HttpHeaders.DATE);
logger.debug("dateheader {}", dateSkew);
} catch (IllegalArgumentException iae) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED, iae);
}
dateSkew /= 1000;
logger.debug("dateheader {}", dateSkew);
} else {
haveDate = false;
}
logger.debug("dateSkew {}", dateSkew);
if (haveDate) {
isTimeSkewed(dateSkew);
}
}
String[] path = uri.split("/", 3);
for (int i = 0; i < path.length; i++) {
path[i] = URLDecoder.decode(path[i], "UTF-8");
}
Map.Entry<String, BlobStore> provider =
blobStoreLocator.locateBlobStore(
requestIdentity, path.length > 1 ? path[1] : null,
path.length > 2 ? path[2] : null);
if (anonymousIdentity) {
blobStore = provider.getValue();
String contentSha256 = request.getHeader("x-amz-content-sha256");
if ("STREAMING-AWS4-HMAC-SHA256-PAYLOAD".equals(contentSha256)) {
is = new ChunkedInputStream(is);
}
} else if (requestIdentity == null) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
} else {
if (provider == null) {
throw new S3Exception(S3ErrorCode.INVALID_ACCESS_KEY_ID);
}
String credential = provider.getKey();
blobStore = provider.getValue();
String expiresString = request.getParameter("Expires");
if (expiresString != null) { // v2 query
long expires = Long.parseLong(expiresString);
long nowSeconds = System.currentTimeMillis() / 1000;
if (nowSeconds >= expires) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
}
String dateString = request.getParameter("X-Amz-Date");
//from para v4 query
expiresString = request.getParameter("X-Amz-Expires");
if (dateString != null && expiresString != null) { //v4 query
long date = parseIso8601(dateString);
long expires = Long.parseLong(expiresString);
long nowSeconds = System.currentTimeMillis() / 1000;
if (nowSeconds >= date + expires) {
throw new S3Exception(S3ErrorCode.ACCESS_DENIED,
"Request has expired");
}
}
// The aim ?
switch (authHeader.authenticationType) {
case AWS_V2:
switch (authenticationType) {
case AWS_V2:
case AWS_V2_OR_V4:
case NONE:
break;
default:
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
break;
case AWS_V4:
switch (authenticationType) {
case AWS_V4:
case AWS_V2_OR_V4:
case NONE:
break;
default:
throw new S3Exception(S3ErrorCode.ACCESS_DENIED);
}
break;
case NONE:
break;
default:
throw new IllegalArgumentException("Unhandled type: " +
authHeader.authenticationType);
}
String expectedSignature = null;
// When presigned url is generated, it doesn't consider service path
String uriForSigning = presignedUrl ? uri : this.servicePath + uri;
if (authHeader.hmacAlgorithm == null) { //v2
expectedSignature = AwsSignature.createAuthorizationSignature(
request, uriForSigning, credential, presignedUrl,
haveBothDateHeader);
} else {
String contentSha256 = request.getHeader(
"x-amz-content-sha256");
try {
byte[] payload;
if (request.getParameter("X-Amz-Algorithm") != null) {
payload = new byte[0];
} else if ("STREAMING-AWS4-HMAC-SHA256-PAYLOAD".equals(
contentSha256)) {
payload = new byte[0];
is = new ChunkedInputStream(is);
} else if ("UNSIGNED-PAYLOAD".equals(contentSha256)) {
payload = new byte[0];
} else {
// buffer the entire stream to calculate digest
// why input stream read contentlength of header?
payload = ByteStreams.toByteArray(ByteStreams.limit(
is, v4MaxNonChunkedRequestSize + 1));
if (payload.length == v4MaxNonChunkedRequestSize + 1) {
throw new S3Exception(
S3ErrorCode.MAX_MESSAGE_LENGTH_EXCEEDED);
}
// maybe we should check this when signing,
// a lot of dup code with aws sign code.
MessageDigest md = MessageDigest.getInstance(
authHeader.hashAlgorithm);
byte[] hash = md.digest(payload);
if (!contentSha256.equals(
BaseEncoding.base16().lowerCase()
.encode(hash))) {
throw new S3Exception(
S3ErrorCode
.X_AMZ_CONTENT_S_H_A_256_MISMATCH);
}
is = new ByteArrayInputStream(payload);
}
expectedSignature = AwsSignature
.createAuthorizationSignatureV4(// v4 sign
baseRequest, authHeader, payload, uriForSigning,
credential);
} catch (InvalidKeyException | NoSuchAlgorithmException e) {
throw new S3Exception(S3ErrorCode.INVALID_ARGUMENT, e);
}
}
if (!expectedSignature.equals(authHeader.signature)) {
logger.debug("fail to validate signature");
throw new S3Exception(S3ErrorCode.SIGNATURE_DOES_NOT_MATCH);
}
}
for (String parameter : Collections.list(
request.getParameterNames())) {
if (UNSUPPORTED_PARAMETERS.contains(parameter)) {
logger.error("Unknown parameters {} with URI {}",
parameter, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
}
// emit NotImplemented for unknown x-amz- headers
for (String headerName : Collections.list(request.getHeaderNames())) {
if (ignoreUnknownHeaders) {
continue;
}
if (!headerName.startsWith("x-amz-")) {
continue;
}
if (headerName.startsWith("x-amz-meta-")) {
continue;
}
if (headerName.equals("x-amz-storage-class") &&
request.getHeader(headerName).equals("STANDARD")) {
continue;
}
if (!SUPPORTED_X_AMZ_HEADERS.contains(headerName.toLowerCase())) {
logger.error("Unknown header {} with URI {}",
headerName, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
}
}
// Validate container name
if (!uri.equals("/") && !isValidContainer(path[1])) {
if (method.equals("PUT") &&
(path.length <= 2 || path[2].isEmpty()) &&
!("".equals(request.getParameter("acl")))) {
throw new S3Exception(S3ErrorCode.INVALID_BUCKET_NAME);
} else {
throw new S3Exception(S3ErrorCode.NO_SUCH_BUCKET);
}
}
String uploadId = request.getParameter("uploadId");
switch (method) {
case "DELETE":
if (path.length <= 2 || path[2].isEmpty()) {
handleContainerDelete(response, blobStore, path[1]);
return;
} else if (uploadId != null) {
handleAbortMultipartUpload(response, blobStore, path[1],
path[2], uploadId);
return;
} else {
handleBlobRemove(response, blobStore, path[1], path[2]);
return;
}
case "GET":
if (uri.equals("/")) {
handleContainerList(response, blobStore);
return;
} else if (path.length <= 2 || path[2].isEmpty()) {
if ("".equals(request.getParameter("acl"))) {
handleGetContainerAcl(response, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("location"))) {
handleContainerLocation(response, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("uploads"))) {
handleListMultipartUploads(request, response, blobStore,
path[1]);
return;
}
handleBlobList(request, response, blobStore, path[1]);
return;
} else {
if ("".equals(request.getParameter("acl"))) {
handleGetBlobAcl(response, blobStore, path[1],
path[2]);
return;
} else if (uploadId != null) {
handleListParts(request, response, blobStore, path[1],
path[2], uploadId);
return;
}
handleGetBlob(request, response, blobStore, path[1],
path[2]);
return;
}
case "HEAD":
if (path.length <= 2 || path[2].isEmpty()) {
handleContainerExists(blobStore, path[1]);
return;
} else {
handleBlobMetadata(request, response, blobStore, path[1],
path[2]);
return;
}
case "POST":
if ("".equals(request.getParameter("delete"))) {
handleMultiBlobRemove(response, is, blobStore, path[1]);
return;
} else if ("".equals(request.getParameter("uploads"))) {
handleInitiateMultipartUpload(request, response, blobStore,
path[1], path[2]);
return;
} else if (uploadId != null &&
request.getParameter("partNumber") == null) {
handleCompleteMultipartUpload(response, is, blobStore, path[1],
path[2], uploadId);
return;
}
break;
case "PUT":
if (path.length <= 2 || path[2].isEmpty()) {
if ("".equals(request.getParameter("acl"))) {
handleSetContainerAcl(request, response, is, blobStore,
path[1]);
return;
}
handleContainerCreate(request, response, is, blobStore,
path[1]);
return;
} else if (uploadId != null) {
if (request.getHeader("x-amz-copy-source") != null) {
handleCopyPart(request, response, blobStore, path[1],
path[2], uploadId);
} else {
handleUploadPart(request, response, is, blobStore, path[1],
path[2], uploadId);
}
return;
} else if (request.getHeader("x-amz-copy-source") != null) {
handleCopyBlob(request, response, is, blobStore, path[1],
path[2]);
return;
} else {
if ("".equals(request.getParameter("acl"))) {
handleSetBlobAcl(request, response, is, blobStore, path[1],
path[2]);
return;
}
handlePutBlob(request, response, is, blobStore, path[1],
path[2]);
return;
}
default:
break;
}
logger.error("Unknown method {} with URI {}",
method, request.getRequestURI());
throw new S3Exception(S3ErrorCode.NOT_IMPLEMENTED);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private ModelAttr getAndPutModelAttr(Map<String, Map<String, Object>> swaggerMap, Map<String, ModelAttr> resMap, String modeName) {
if (resMap.get(modeName) != null) {
return resMap.get("#/definitions/" + modeName);
}
Map<String, Object> modeProperties = (Map<String, Object>) swaggerMap.get(modeName).get("properties");
if (modeProperties == null) {
return null;
}
Iterator<Entry<String, Object>> mIt = modeProperties.entrySet().iterator();
List<ModelAttr> attrList = new ArrayList<>();
//解析属性
while (mIt.hasNext()) {
Entry<String, Object> mEntry = mIt.next();
Map<String, Object> attrInfoMap = (Map<String, Object>) mEntry.getValue();
ModelAttr modeAttr = new ModelAttr();
modeAttr.setName(mEntry.getKey());
modeAttr.setType((String) attrInfoMap.get("type"));
if (attrInfoMap.get("format") != null) {
modeAttr.setType(modeAttr.getType() + "(" + attrInfoMap.get("format") + ")");
}
Object ref = attrInfoMap.get("$ref");
Object items = attrInfoMap.get("items");
if (ref != null || (items != null && (ref = ((Map)items).get("$ref")) != null)) {
String refName = ref.toString();
//截取 #/definitions/ 后面的
String clsName = refName.substring(14);
ModelAttr refModel = getAndPutModelAttr(swaggerMap, resMap, clsName);
modeAttr.setProperties(refModel.getProperties());
}
modeAttr.setType(StringUtils.defaultIfBlank(modeAttr.getType(), "object"));
modeAttr.setDescription((String) attrInfoMap.get("description"));
attrList.add(modeAttr);
}
//BaseResult«PageData«DopAppRecordVo»»
ModelAttr modeAttr = new ModelAttr();
Object title = swaggerMap.get(modeName).get("title");
Object description = swaggerMap.get(modeName).get("description");
modeAttr.setClassName(title == null ? "" : title.toString());
modeAttr.setDescription(description == null ? "" : description.toString());
modeAttr.setProperties(attrList);
resMap.put("#/definitions/" + modeName, modeAttr);
return modeAttr;
}
#location 5
#vulnerability type NULL_DEREFERENCE | #fixed code
private ModelAttr getAndPutModelAttr(Map<String, Map<String, Object>> swaggerMap, Map<String, ModelAttr> resMap, String modeName) {
ModelAttr modeAttr;
if ((modeAttr = resMap.get("#/definitions/" + modeName)) == null) {
modeAttr = new ModelAttr();
resMap.put("#/definitions/" + modeName, modeAttr);
} else if (modeAttr.isCompleted()) {
return resMap.get("#/definitions/" + modeName);
}
Map<String, Object> modeProperties = (Map<String, Object>) swaggerMap.get(modeName).get("properties");
if (modeProperties == null) {
return null;
}
Iterator<Entry<String, Object>> mIt = modeProperties.entrySet().iterator();
List<ModelAttr> attrList = new ArrayList<>();
//解析属性
while (mIt.hasNext()) {
Entry<String, Object> mEntry = mIt.next();
Map<String, Object> attrInfoMap = (Map<String, Object>) mEntry.getValue();
ModelAttr child = new ModelAttr();
child.setName(mEntry.getKey());
child.setType((String) attrInfoMap.get("type"));
if (attrInfoMap.get("format") != null) {
child.setType(child.getType() + "(" + attrInfoMap.get("format") + ")");
}
child.setType(StringUtils.defaultIfBlank(child.getType(), "object"));
Object ref = attrInfoMap.get("$ref");
Object items = attrInfoMap.get("items");
if (ref != null || (items != null && (ref = ((Map) items).get("$ref")) != null)) {
String refName = ref.toString();
//截取 #/definitions/ 后面的
String clsName = refName.substring(14);
modeAttr.setCompleted(true);
ModelAttr refModel = getAndPutModelAttr(swaggerMap, resMap, clsName);
child.setProperties(refModel.getProperties());
child.setType(child.getType() + ":" + clsName);
}
child.setDescription((String) attrInfoMap.get("description"));
attrList.add(child);
}
Object title = swaggerMap.get(modeName).get("title");
Object description = swaggerMap.get(modeName).get("description");
modeAttr.setClassName(title == null ? "" : title.toString());
modeAttr.setDescription(description == null ? "" : description.toString());
modeAttr.setProperties(attrList);
return modeAttr;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public SynonymMap reloadSynonymMap() {
try {
Reader rulesReader = getReader();
SynonymMap.Builder parser = null;
if ("wordnet".equalsIgnoreCase(format)) {
parser = new WordnetSynonymParser(true, expand, analyzer);
((WordnetSynonymParser) parser).parse(rulesReader);
} else {
parser = new SolrSynonymParser(true, expand, analyzer);
((SolrSynonymParser) parser).parse(rulesReader);
}
return parser.build();
} catch (Exception e) {
logger.error("reload remote synonym {} error!", e, location);
throw new ElasticsearchIllegalArgumentException(
"could not reload remote synonyms file", e);
}
}
#location 9
#vulnerability type RESOURCE_LEAK | #fixed code
@Override
public SynonymMap reloadSynonymMap() {
try {
logger.info("start reload remote synonym from {}.", location);
Reader rulesReader = getReader();
SynonymMap.Builder parser = null;
if ("wordnet".equalsIgnoreCase(format)) {
parser = new WordnetSynonymParser(true, expand, analyzer);
((WordnetSynonymParser) parser).parse(rulesReader);
} else {
parser = new SolrSynonymParser(true, expand, analyzer);
((SolrSynonymParser) parser).parse(rulesReader);
}
return parser.build();
} catch (Exception e) {
logger.error("reload remote synonym {} error!", e, location);
throw new ElasticsearchIllegalArgumentException(
"could not reload remote synonyms file to build synonyms",
e);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public SynonymMap reloadSynonymMap() {
try {
Reader rulesReader = getReader();
SynonymMap.Builder parser = null;
if ("wordnet".equalsIgnoreCase(format)) {
parser = new WordnetSynonymParser(true, expand, analyzer);
((WordnetSynonymParser) parser).parse(rulesReader);
} else {
parser = new SolrSynonymParser(true, expand, analyzer);
((SolrSynonymParser) parser).parse(rulesReader);
}
return parser.build();
} catch (Exception e) {
logger.error("reload remote synonym {} error!", e, location);
throw new ElasticsearchIllegalArgumentException(
"could not reload remote synonyms file", e);
}
}
#location 12
#vulnerability type RESOURCE_LEAK | #fixed code
@Override
public SynonymMap reloadSynonymMap() {
try {
logger.info("start reload remote synonym from {}.", location);
Reader rulesReader = getReader();
SynonymMap.Builder parser = null;
if ("wordnet".equalsIgnoreCase(format)) {
parser = new WordnetSynonymParser(true, expand, analyzer);
((WordnetSynonymParser) parser).parse(rulesReader);
} else {
parser = new SolrSynonymParser(true, expand, analyzer);
((SolrSynonymParser) parser).parse(rulesReader);
}
return parser.build();
} catch (Exception e) {
logger.error("reload remote synonym {} error!", e, location);
throw new ElasticsearchIllegalArgumentException(
"could not reload remote synonyms file to build synonyms",
e);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
protected Rectangle computeBoundingBox(Collection<? extends Shape> shapes, SpatialContext ctx) {
if (shapes.isEmpty())
return ctx.makeRectangle(Double.NaN, Double.NaN, Double.NaN, Double.NaN);
Range xRange = null;
double minY = Double.POSITIVE_INFINITY;
double maxY = Double.NEGATIVE_INFINITY;
for (Shape geom : shapes) {
Rectangle r = geom.getBoundingBox();
Range xRange2 = Range.xRange(r, ctx);
if (xRange == null) {
xRange = xRange2;
} else {
xRange = xRange.expandTo(xRange2);
}
minY = Math.min(minY, r.getMinY());
maxY = Math.max(maxY, r.getMaxY());
}
return ctx.makeRectangle(xRange.getMin(), xRange.getMax(), minY, maxY);
}
#location 19
#vulnerability type NULL_DEREFERENCE | #fixed code
protected Rectangle computeBoundingBox(Collection<? extends Shape> shapes, SpatialContext ctx) {
if (shapes.isEmpty())
return ctx.makeRectangle(Double.NaN, Double.NaN, Double.NaN, Double.NaN);
BBoxCalculator bboxCalc = new BBoxCalculator(ctx);
for (Shape geom : shapes) {
bboxCalc.expandRange(geom.getBoundingBox());
}
return bboxCalc.getBoundary();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void doGenerate(String excludeTables) {
System.out.println("start generate...");
DataSource dataSource = CodeGenHelpler.getDatasource();
List<TableMeta> tableMetaList = new MetaBuilder(dataSource).build();
CodeGenHelpler.excludeTables(tableMetaList, excludeTables);
new JbootServiceInterfaceGenerator(basePackage, modelPacket).generate(tableMetaList);
new JbootServiceImplGenerator(basePackage + ".impl", modelPacket).generate(tableMetaList);
System.out.println("service generate finished !!!");
}
#location 5
#vulnerability type RESOURCE_LEAK | #fixed code
public void doGenerate(String excludeTables) {
System.out.println("start generate...");
List<TableMeta> tableMetaList = CodeGenHelpler.createMetaBuilder().build();
CodeGenHelpler.excludeTables(tableMetaList, excludeTables);
new JbootServiceInterfaceGenerator(basePackage, modelPacket).generate(tableMetaList);
new JbootServiceImplGenerator(basePackage + ".impl", modelPacket).generate(tableMetaList);
System.out.println("service generate finished !!!");
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void set(String key, String value) {
Jboot.getJbootCache().put(cache_name, key, value);
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public void set(String key, String value) {
Jboot.getCache().put(cache_name, key, value);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
protected <T> T doGet(Class<T> targetClass, int injectDepth) throws ReflectiveOperationException {
// Aop.get(obj.getClass()) 可以用 Aop.inject(obj),所以注掉下一行代码
// targetClass = (Class<T>)getUsefulClass(targetClass);
targetClass = (Class<T>) getMappingClass(targetClass);
Singleton si = targetClass.getAnnotation(Singleton.class);
boolean singleton = (si != null ? si.value() : this.singleton);
Object ret;
if (!singleton) {
ret = createObject(targetClass);
doInject(targetClass, ret, injectDepth);
return (T) ret;
}
ret = singletonCache.get(targetClass);
if (ret != null) {
return (T) ret;
}
//只有在循环依赖的时候,这个context才会有值
ret = context.get().get(targetClass);
if (ret != null) {
return (T) ret;
}
synchronized (this) {
ret = singletonCache.get(targetClass);
if (ret == null) {
// ret = createObject(targetClass);
// doInject(targetClass, ret, injectDepth);
// singletonCache.put(targetClass, ret);
ret = createObject(targetClass);
//保存到本次初始化的上下文
context.get().put(targetClass, ret);
//循环注入
doInject(targetClass, ret, injectDepth);
//保存到缓存、并清除上下文数据
singletonCache.put(targetClass, ret);
context.get().clear();
context.remove();
}
}
return (T) ret;
}
#location 13
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
protected <T> T doGet(Class<T> targetClass, int injectDepth) throws ReflectiveOperationException {
return doGet(targetClass);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public M getCache(Object key) {
return Jboot.getJbootCache().get(tableName(), key);
}
#location 2
#vulnerability type NULL_DEREFERENCE | #fixed code
public M getCache(Object key) {
return Jboot.getCache().get(tableName(), key);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public List<M> getListCache(Object key, IDataLoader dataloader) {
return Jboot.getJbootCache().get(tableName(), key, dataloader);
}
#location 2
#vulnerability type NULL_DEREFERENCE | #fixed code
public List<M> getListCache(Object key, IDataLoader dataloader) {
return Jboot.getCache().get(tableName(), key, dataloader);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private void copyConnStreamToResponse(HttpURLConnection conn, HttpServletResponse resp) throws IOException {
InputStream inStream = null;
InputStreamReader reader = null;
try {
if (!resp.isCommitted()) {
PrintWriter writer = resp.getWriter();
inStream = getInputStream(conn);
reader = new InputStreamReader(inStream);
int len;
char[] buffer = new char[1024];
while ((len = reader.read(buffer)) != -1) {
writer.write(buffer, 0, len);
}
}
} finally {
quetlyClose(inStream, reader);
}
}
#location 16
#vulnerability type RESOURCE_LEAK | #fixed code
private void copyConnStreamToResponse(HttpURLConnection conn, HttpServletResponse resp) throws IOException {
if (resp.isCommitted()) {
return;
}
InputStream inStream = null;
OutputStream outStream = null;
try {
inStream = getInputStream(conn);
outStream = resp.getOutputStream();
byte[] buffer = new byte[1024];
for (int len = -1; (len = inStream.read(buffer)) != -1; ) {
outStream.write(buffer, 0, len);
}
outStream.flush();
} finally {
quetlyClose(inStream, outStream);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void removeCache(Object key) {
if (key == null) return;
Jboot.getJbootCache().remove(tableName(), key);
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
public void removeCache(Object key) {
if (key == null) return;
Jboot.getCache().remove(tableName(), key);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public String get(String key) {
return Jboot.getJbootCache().get(cache_name, key);
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public String get(String key) {
return Jboot.getCache().get(cache_name, key);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void handle(String target, HttpServletRequest request, HttpServletResponse response, boolean[] isHandled) {
if (target.indexOf('.') != -1 || JbootWebsocketManager.me().containsEndPoint(target)) {
return;
}
/**
* 初始化 当前线程的 Hystrix
*/
HystrixRequestContext context = HystrixRequestContext.initializeContext();
/**
* 通过 JbootRequestContext 去保存 request,然后可以在当前线程的任何地方
* 通过 JbootRequestContext.getRequest() 去获取。
*/
JbootRequestContext.handle(request, response);
/**
* 初始化 异常记录器,用于记录异常信息,然后在页面输出
*/
JbootExceptionHolder.init();
try {
/**
* 执行请求逻辑
*/
doHandle(target, new JbootServletRequestWrapper(request), response, isHandled);
} finally {
context.shutdown();
JbootRequestContext.release();
JbootExceptionHolder.release();
}
}
#location 4
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public void handle(String target, HttpServletRequest request, HttpServletResponse response, boolean[] isHandled) {
if (target.indexOf('.') != -1 || JbootWebsocketManager.me().isWebsokcetEndPoint(target)) {
return;
}
/**
* 初始化 当前线程的 Hystrix
*/
HystrixRequestContext context = HystrixRequestContext.initializeContext();
/**
* 通过 JbootRequestContext 去保存 request,然后可以在当前线程的任何地方
* 通过 JbootRequestContext.getRequest() 去获取。
*/
JbootRequestContext.handle(request, response);
/**
* 初始化 异常记录器,用于记录异常信息,然后在页面输出
*/
JbootExceptionHolder.init();
try {
/**
* 执行请求逻辑
*/
doHandle(target, new JbootServletRequestWrapper(request), response, isHandled);
} finally {
context.shutdown();
JbootRequestContext.release();
JbootExceptionHolder.release();
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
static void putDataToCache(int liveSeconds, String cacheName, String cacheKey, Object data) {
liveSeconds = liveSeconds > 0
? liveSeconds
: CONFIG.getAopCacheLiveSeconds();
if (liveSeconds > 0) {
getAopCache().put(cacheName, cacheKey, data, liveSeconds);
} else {
getAopCache().put(cacheName, cacheKey, data);
}
}
#location 8
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
static void putDataToCache(int liveSeconds, String cacheName, String cacheKey, Object data) {
liveSeconds = liveSeconds > 0
? liveSeconds
: CONFIG.getAopCacheLiveSeconds();
if (liveSeconds > 0) {
AopCache.put(cacheName, cacheKey, data, liveSeconds);
} else {
AopCache.put(cacheName, cacheKey, data);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void run() {
Long result = null;
for (int i = 0; i < 6; i++) {
result = redis.setnx(key, "locked");
//error
if (result == null) {
quietSleep();
}
//setnx fail
else if (result == 0) {
Long ttl = redis.ttl(key);
if (ttl == null || ttl <= 0 || ttl > expire) {
//防止死锁
reset();
} else {
// 休息 2 秒钟,重新去抢,因为可能别的设置好后,但是却执行失败了
quietSleep();
}
}
//set success
else if (result == 1) {
break;
}
}
//抢了5次都抢不到,证明已经被别的应用抢走了
if (result == null || result == 0) {
return;
}
//抢到了,但是设置超时时间设置失败,删除后,让分布式的其他app去抢
Long expireResult = redis.expire(key, 50);
if (expireResult == null && expireResult <= 0) {
reset();
return;
}
try {
boolean runSuccess = execute();
//run()执行失败,让别的分布式应用APP去执行
//如果run()执行的时间很长(超过30秒),那么别的分布式应用可能也抢不到了,只能等待下次轮休
//作用:故障转移
if (!runSuccess) {
reset();
}
}
// 如果 run() 执行异常,让别的分布式应用APP去执行
// 作用:故障转移
catch (Throwable ex) {
LOG.error(ex.toString(), ex);
reset();
}
}
#location 40
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public void run() {
if (redis == null) {
return;
}
Long result = null;
for (int i = 0; i < 5; i++) {
Long setTimeMillis = System.currentTimeMillis();
result = redis.setnx(key, setTimeMillis);
//error
if (result == null) {
quietSleep();
}
//setnx fail
else if (result == 0) {
Long saveTimeMillis = redis.get(key);
if (saveTimeMillis == null) {
reset();
}
long ttl = System.currentTimeMillis() - saveTimeMillis;
if (ttl > expire) {
//防止死锁
reset();
}
// 休息 2 秒钟,重新去抢,因为可能别的应用执行失败了
quietSleep();
}
//set success
else if (result == 1) {
break;
}
}
//抢了5次都抢不到,证明已经被别的应用抢走了
if (result == null || result == 0) {
return;
}
try {
if (runnable != null) {
runnable.run();
} else {
boolean runSuccess = execute();
//run()执行失败,让别的分布式应用APP去执行
//如果run()执行的时间很长(超过30秒),那么别的分布式应用可能也抢不到了,只能等待下次轮休
//作用:故障转移
if (!runSuccess) {
reset();
}
}
}
// 如果 run() 执行异常,让别的分布式应用APP去执行
// 作用:故障转移
catch (Throwable ex) {
LOG.error(ex.toString(), ex);
reset();
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
protected <T> T doGet(Class<T> targetClass, int injectDepth) throws ReflectiveOperationException {
// Aop.get(obj.getClass()) 可以用 Aop.inject(obj),所以注掉下一行代码
// targetClass = (Class<T>)getUsefulClass(targetClass);
targetClass = (Class<T>) getMappingClass(targetClass);
Singleton si = targetClass.getAnnotation(Singleton.class);
boolean singleton = (si != null ? si.value() : this.singleton);
Object ret;
if (!singleton) {
ret = createObject(targetClass);
doInject(targetClass, ret, injectDepth);
return (T) ret;
}
ret = singletonCache.get(targetClass);
if (ret != null) {
return (T) ret;
}
//只有在循环依赖的时候,这个context才会有值
ret = context.get().get(targetClass);
if (ret != null) {
return (T) ret;
}
synchronized (this) {
ret = singletonCache.get(targetClass);
if (ret == null) {
// ret = createObject(targetClass);
// doInject(targetClass, ret, injectDepth);
// singletonCache.put(targetClass, ret);
ret = createObject(targetClass);
//保存到本次初始化的上下文
context.get().put(targetClass, ret);
//循环注入
doInject(targetClass, ret, injectDepth);
//保存到缓存、并清除上下文数据
singletonCache.put(targetClass, ret);
context.get().clear();
context.remove();
}
}
return (T) ret;
}
#location 14
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
protected <T> T doGet(Class<T> targetClass, int injectDepth) throws ReflectiveOperationException {
return doGet(targetClass);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void intercept(FixedInvocation inv) {
if (!config.isConfigOK()) {
inv.invoke();
return;
}
SsoShiroBridge ssoShiroBridge = JbootShiroManager.me().getSsoShiroBridge();
if (ssoShiroBridge != null && ssoShiroBridge.isSsoCallBackRequest(inv.getController())) {
ssoShiroBridge.subjectLogin(inv.getController());
}
AuthorizeResult result = JbootShiroManager.me().invoke(inv.getActionKey());
if (result == null || result.isOk()) {
inv.invoke();
return;
}
int errorCode = result.getErrorCode();
ShiroErrorProcess shiroErrorProcess = JbootShiroManager.me().getShiroErrorProcess();
switch (errorCode) {
case AuthorizeResult.ERROR_CODE_UNAUTHENTICATED:
shiroErrorProcess.doProcessUnauthenticated(inv.getController());
break;
case AuthorizeResult.ERROR_CODE_UNAUTHORIZATION:
shiroErrorProcess.doProcessuUnauthorization(inv.getController());
break;
default:
inv.getController().renderError(404);
}
}
#location 25
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public void intercept(FixedInvocation inv) {
if (!config.isConfigOK()) {
inv.invoke();
return;
}
SsoShiroBridge ssoShiroBridge = JbootShiroManager.me().getSsoShiroBridge();
if (ssoShiroBridge != null && ssoShiroBridge.isSsoCallBackRequest(inv.getController())) {
ssoShiroBridge.subjectLogin(inv.getController());
}
AuthorizeResult result = JbootShiroManager.me().invoke(inv.getActionKey());
if (result == null || result.isOk()) {
inv.invoke();
return;
}
ShiroErrorProcess shiroErrorProcess = JbootShiroManager.me().getShiroErrorProcess();
shiroErrorProcess.doProcessError(inv, result.getErrorCode());
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private static void copyStreamToResponse(HttpURLConnection connection, HttpServletResponse response) throws IOException {
InputStream inStream = null;
try {
if (!response.isCommitted()) {
PrintWriter writer = response.getWriter();
inStream = getInputStream(connection);
int len;
char[] buffer = new char[1024];
InputStreamReader r = new InputStreamReader(inStream);
while ((len = r.read(buffer)) != -1) {
writer.write(buffer, 0, len);
}
}
} finally {
quetlyClose(inStream);
}
}
#location 15
#vulnerability type RESOURCE_LEAK | #fixed code
private static void copyStreamToResponse(HttpURLConnection connection, HttpServletResponse response) throws IOException {
InputStream inStream = null;
InputStreamReader reader = null;
try {
if (!response.isCommitted()) {
PrintWriter writer = response.getWriter();
inStream = getInputStream(connection);
reader = new InputStreamReader(inStream);
int len;
char[] buffer = new char[1024];
while ((len = reader.read(buffer)) != -1) {
writer.write(buffer, 0, len);
}
}
} finally {
quetlyClose(inStream, reader);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public M getCache(Object key, IDataLoader dataloader) {
return Jboot.getJbootCache().get(tableName(), key, dataloader);
}
#location 2
#vulnerability type NULL_DEREFERENCE | #fixed code
public M getCache(Object key, IDataLoader dataloader) {
return Jboot.getCache().get(tableName(), key, dataloader);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
protected <T> T doGet(Class<T> targetClass, int injectDepth) throws ReflectiveOperationException {
// Aop.get(obj.getClass()) 可以用 Aop.inject(obj),所以注掉下一行代码
// targetClass = (Class<T>)getUsefulClass(targetClass);
targetClass = (Class<T>) getMappingClass(targetClass);
Singleton si = targetClass.getAnnotation(Singleton.class);
boolean singleton = (si != null ? si.value() : this.singleton);
Object ret;
if (!singleton) {
ret = createObject(targetClass);
doInject(targetClass, ret, injectDepth);
return (T) ret;
}
ret = singletonCache.get(targetClass);
if (ret != null) {
return (T) ret;
}
//只有在循环依赖的时候,这个context才会有值
ret = context.get().get(targetClass);
if (ret != null) {
return (T) ret;
}
synchronized (this) {
ret = singletonCache.get(targetClass);
if (ret == null) {
// ret = createObject(targetClass);
// doInject(targetClass, ret, injectDepth);
// singletonCache.put(targetClass, ret);
ret = createObject(targetClass);
//保存到本次初始化的上下文
context.get().put(targetClass, ret);
//循环注入
doInject(targetClass, ret, injectDepth);
//保存到缓存、并清除上下文数据
singletonCache.put(targetClass, ret);
context.get().clear();
context.remove();
}
}
return (T) ret;
}
#location 14
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
protected <T> T doGet(Class<T> targetClass, int injectDepth) throws ReflectiveOperationException {
return doGet(targetClass);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void removeAttribute(String name) {
Jboot.getJbootCache().remove(SESSION_CACHE_NAME, buildKey(name));
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public void removeAttribute(String name) {
Jboot.getCache().remove(SESSION_CACHE_NAME, buildKey(name));
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public byte[] serialize(Object obj) throws IOException {
return Jboot.me().getSerializer().serialize(obj);
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public byte[] serialize(Object obj) throws IOException {
return SerializerManager.me().getSerializer(config.getSerializer()).serialize(obj);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Test
public void testHttp() {
JbootHttpResponse response = Jboot.me().getHttp().handle(JbootHttpRequest.create("https://www.baidu.com"));
System.out.println(response.getContentType());
System.out.println(response.getHeaders());
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
@Test
public void testHttp() {
// JbootHttpResponse response = Jboot.me().getHttp().handle(JbootHttpRequest.create("https://www.baidu.com"));
//
// System.out.println(response.getContentType());
// System.out.println(response.getHeaders());
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void remove(String key) {
Jboot.getJbootCache().remove(cache_name, key);
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public void remove(String key) {
Jboot.getCache().remove(cache_name, key);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public int getErrorCode() {
return (int) get("errorCode");
}
#location 2
#vulnerability type NULL_DEREFERENCE | #fixed code
public int getErrorCode() {
return errorCode;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private void doProcess(JbootHttpRequest request, JbootHttpResponse response) {
HttpURLConnection connection = null;
InputStream stream = null;
try {
connection = getConnection(request);
configConnection(connection, request);
if (request.isGetRquest()) {
connection.setInstanceFollowRedirects(true);
connection.connect();
if (connection.getResponseCode() >= 400) {
stream = connection.getErrorStream();
} else {
stream = connection.getInputStream();
}
}
/**
* 处理 post请求
*/
else if (request.isPostRquest()) {
connection.setRequestMethod("POST");
connection.setDoOutput(true);
if (!request.isMultipartFormData()) {
String postContent = buildParams(request);
if (StringUtils.isNotEmpty(postContent)) {
DataOutputStream dos = new DataOutputStream(connection.getOutputStream());
dos.write(postContent.getBytes(request.getCharset()));
dos.flush();
dos.close();
}
stream = connection.getInputStream();
}
/**
* 处理文件上传
*/
else {
if (request.getParams() != null && request.getParams().size() > 0) {
String endFlag = "\r\n";
String boundary = "---------" + StringUtils.uuid();
connection.setRequestProperty("Content-Type", "multipart/form-data;boundary=" + boundary);
DataOutputStream dos = new DataOutputStream(connection.getOutputStream());
for (Map.Entry entry : request.getParams().entrySet()) {
if (entry.getValue() instanceof File) {
File file = (File) entry.getValue();
checkFileNormal(file);
dos.writeBytes(boundary + endFlag);
dos.writeBytes(String.format("Content-Disposition: form-data; name=\"%s\"; filename=\"%s\"", entry.getKey(), file.getName()) + endFlag);
dos.writeBytes(endFlag);
FileInputStream fStream = new FileInputStream(file);
byte[] buffer = new byte[2028];
for (int len = 0; (len = fStream.read(buffer)) > 0; ) {
dos.write(buffer, 0, len);
}
dos.writeBytes(endFlag);
} else {
dos.writeBytes("Content-Disposition: form-data; name=\"" + entry.getKey() + "\"");
dos.writeBytes(endFlag);
dos.writeBytes(endFlag);
dos.writeBytes(String.valueOf(entry.getValue()));
dos.writeBytes(endFlag);
}
}
dos.writeBytes("--" + boundary + "--" + endFlag);
}
}
}
response.setContentType(connection.getContentType());
response.setResponseCode(connection.getResponseCode());
response.setHeaders(connection.getHeaderFields());
response.pipe(stream);
response.finish();
} catch (Throwable ex) {
response.setError(ex);
} finally {
if (connection != null) {
connection.disconnect();
}
if (stream != null) {
try {
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
#location 81
#vulnerability type NULL_DEREFERENCE | #fixed code
private void doProcess(JbootHttpRequest request, JbootHttpResponse response) {
HttpURLConnection connection = null;
InputStream stream = null;
try {
connection = getConnection(request);
configConnection(connection, request);
if (request.isGetRquest()) {
if (Jboot.me().isDevMode()) {
LOG.debug("do get request:" + request.getRequestUrl());
}
connection.setInstanceFollowRedirects(true);
connection.connect();
if (connection.getResponseCode() >= 400) {
stream = connection.getErrorStream();
} else {
stream = connection.getInputStream();
}
}
/**
* 处理 post请求
*/
else if (request.isPostRquest()) {
if (Jboot.me().isDevMode()) {
LOG.debug("do post request:" + request.getRequestUrl());
}
connection.setRequestMethod("POST");
connection.setDoOutput(true);
if (!request.isMultipartFormData()) {
String postContent = buildParams(request);
if (StringUtils.isNotEmpty(postContent)) {
DataOutputStream dos = new DataOutputStream(connection.getOutputStream());
dos.write(postContent.getBytes(request.getCharset()));
dos.flush();
dos.close();
}
stream = connection.getInputStream();
}
/**
* 处理文件上传
*/
else {
if (request.getParams() != null && request.getParams().size() > 0) {
String endFlag = "\r\n";
String boundary = "---------" + StringUtils.uuid();
connection.setRequestProperty("Content-Type", "multipart/form-data;boundary=" + boundary);
DataOutputStream dos = new DataOutputStream(connection.getOutputStream());
for (Map.Entry entry : request.getParams().entrySet()) {
if (entry.getValue() instanceof File) {
File file = (File) entry.getValue();
checkFileNormal(file);
dos.writeBytes(boundary + endFlag);
dos.writeBytes(String.format("Content-Disposition: form-data; name=\"%s\"; filename=\"%s\"", entry.getKey(), file.getName()) + endFlag);
dos.writeBytes(endFlag);
FileInputStream fStream = new FileInputStream(file);
byte[] buffer = new byte[2028];
for (int len = 0; (len = fStream.read(buffer)) > 0; ) {
dos.write(buffer, 0, len);
}
dos.writeBytes(endFlag);
} else {
dos.writeBytes("Content-Disposition: form-data; name=\"" + entry.getKey() + "\"");
dos.writeBytes(endFlag);
dos.writeBytes(endFlag);
dos.writeBytes(String.valueOf(entry.getValue()));
dos.writeBytes(endFlag);
}
}
dos.writeBytes("--" + boundary + "--" + endFlag);
}
}
}
response.setContentType(connection.getContentType());
response.setResponseCode(connection.getResponseCode());
response.setHeaders(connection.getHeaderFields());
response.pipe(stream);
response.finish();
} catch (Throwable ex) {
LOG.warn(ex.toString(), ex);
response.setError(ex);
} finally {
if (connection != null) {
connection.disconnect();
}
if (stream != null) {
try {
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void doGenerate(String excludeTables) {
System.out.println("start generate...");
DataSource dataSource = CodeGenHelpler.getDatasource();
List<TableMeta> tableMetaList = new MetaBuilder(dataSource).build();
CodeGenHelpler.excludeTables(tableMetaList, excludeTables);
generate(tableMetaList);
System.out.println("generate finished !!!");
}
#location 5
#vulnerability type RESOURCE_LEAK | #fixed code
public void doGenerate(String excludeTables) {
System.out.println("start generate...");
List<TableMeta> tableMetaList = CodeGenHelpler.createMetaBuilder().build();
CodeGenHelpler.excludeTables(tableMetaList, excludeTables);
generate(tableMetaList);
System.out.println("generate finished !!!");
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private static void copyStreamToResponse(HttpURLConnection connection, HttpServletResponse response) throws IOException {
InputStream inStream = null;
try {
if (!response.isCommitted()) {
PrintWriter writer = response.getWriter();
inStream = getInputStream(connection);
int len;
char[] buffer = new char[1024];
InputStreamReader r = new InputStreamReader(inStream);
while ((len = r.read(buffer)) != -1) {
writer.write(buffer, 0, len);
}
}
} finally {
quetlyClose(inStream);
}
}
#location 10
#vulnerability type RESOURCE_LEAK | #fixed code
private static void copyStreamToResponse(HttpURLConnection connection, HttpServletResponse response) throws IOException {
InputStream inStream = null;
InputStreamReader reader = null;
try {
if (!response.isCommitted()) {
PrintWriter writer = response.getWriter();
inStream = getInputStream(connection);
reader = new InputStreamReader(inStream);
int len;
char[] buffer = new char[1024];
while ((len = reader.read(buffer)) != -1) {
writer.write(buffer, 0, len);
}
}
} finally {
quetlyClose(inStream, reader);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public static void unzip(String zipFilePath, String targetPath) throws IOException {
ZipFile zipFile = new ZipFile(zipFilePath);
try{
Enumeration<?> entryEnum = zipFile.entries();
if (null != entryEnum) {
while (entryEnum.hasMoreElements()) {
OutputStream os = null;
InputStream is = null;
try {
ZipEntry zipEntry = (ZipEntry) entryEnum.nextElement();
if (!zipEntry.isDirectory()) {
File targetFile = new File(targetPath + File.separator + zipEntry.getName());
if (!targetFile.getParentFile().exists()) {
targetFile.getParentFile().mkdirs();
}
os = new BufferedOutputStream(new FileOutputStream(targetFile));
is = zipFile.getInputStream(zipEntry);
byte[] buffer = new byte[4096];
int readLen = 0;
while ((readLen = is.read(buffer, 0, 4096)) > 0) {
os.write(buffer, 0, readLen);
}
}
} finally {
if (is != null)
is.close();
if (os != null)
os.close();
}
}
}
}finally{
zipFile.close();
}
}
#location 33
#vulnerability type RESOURCE_LEAK | #fixed code
public static void unzip(String zipFilePath, String targetPath) throws IOException {
unzip(zipFilePath, targetPath, true);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void doGenerate(String excludeTables) {
System.out.println("start generate...");
DatasourceConfig datasourceConfig = JbootProperties.get("jboot.datasource", DatasourceConfig.class);
HikariConfig config = new HikariConfig();
config.setJdbcUrl(datasourceConfig.getUrl());
config.setUsername(datasourceConfig.getUser());
config.setPassword(datasourceConfig.getPassword());
config.addDataSourceProperty("cachePrepStmts", "true");
config.addDataSourceProperty("prepStmtCacheSize", "250");
config.addDataSourceProperty("prepStmtCacheSqlLimit", "2048");
config.setDriverClassName("com.mysql.jdbc.Driver");
HikariDataSource dataSource = new HikariDataSource(config);
List<TableMeta> tableMetaList = new MetaBuilder(dataSource).build();
if (StringUtils.isNotBlank(excludeTables)) {
List<TableMeta> newTableMetaList = new ArrayList<>();
Set<String> excludeTableSet = StringUtils.splitToSet(excludeTables.toLowerCase(), ",");
for (TableMeta tableMeta : tableMetaList) {
if (excludeTableSet.contains(tableMeta.name.toLowerCase())) {
System.out.println("exclude table : " + tableMeta.name);
continue;
}
newTableMetaList.add(tableMeta);
}
tableMetaList = newTableMetaList;
}
new JbootServiceInterfaceGenerator(basePackage, modelPacket).generate(tableMetaList);
new JbootServiceImplGenerator(basePackage + ".impl", modelPacket).generate(tableMetaList);
System.out.println("service generate finished !!!");
}
#location 17
#vulnerability type RESOURCE_LEAK | #fixed code
public void doGenerate(String excludeTables) {
System.out.println("start generate...");
DataSource dataSource = CodeGenHelpler.getDatasource();
List<TableMeta> tableMetaList = new MetaBuilder(dataSource).build();
CodeGenHelpler.excludeTables(tableMetaList, excludeTables);
new JbootServiceInterfaceGenerator(basePackage, modelPacket).generate(tableMetaList);
new JbootServiceImplGenerator(basePackage + ".impl", modelPacket).generate(tableMetaList);
System.out.println("service generate finished !!!");
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public Object getValue(String name) {
return Jboot.getJbootCache().get(SESSION_CACHE_NAME, buildKey(name));
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public Object getValue(String name) {
return Jboot.getCache().get(SESSION_CACHE_NAME, buildKey(name));
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void put(String cacheName, Object key, Object value) {
try {
ehcache.put(cacheName, key, value);
redisCache.put(cacheName, key, value);
} finally {
Jboot.me().getMq().publish(new JbootEhredisMessage(clientId, JbootEhredisMessage.ACTION_PUT, cacheName, key), channel);
}
}
#location 7
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public void put(String cacheName, Object key, Object value) {
try {
ehcache.put(cacheName, key, value);
redisCache.put(cacheName, key, value);
} finally {
publishMessage(JbootEhredisMessage.ACTION_PUT, cacheName, key);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public boolean isConfigOk() {
if (configOk != null) {
return configOk;
}
synchronized (this) {
if (configOk == null) {
configOk = StrUtil.isNotBlank(uri);
if (configOk) {
ensureUriConfigCorrect();
}
}
}
return configOk;
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public boolean isConfigOk() {
if (configOk != null) {
return configOk;
}
synchronized (this) {
if (configOk == null) {
configOk = uri != null && uri.length > 0;
if (configOk) {
ensureUriConfigCorrect();
}
}
}
return configOk;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void init() {
getJbootrpc().onInitBefore();
if (config.isAutoExportEnable()) {
autoExportRPCBean();
}
getJbootrpc().onInited();
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
public void init() {
if (!defaultConfig.isConfigOk()) {
return;
}
Jbootrpc jbootrpc = getJbootrpc();
jbootrpc.onInitBefore();
if (defaultConfig.isAutoExportEnable()) {
exportRPCBean(jbootrpc);
}
jbootrpc.onInited();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void doGenerate(String excludeTables) {
String modelPackage = basePackage;
String baseModelPackage = basePackage + ".base";
String modelDir = PathKit.getWebRootPath() + "/src/main/java/" + modelPackage.replace(".", "/");
String baseModelDir = PathKit.getWebRootPath() + "/src/main/java/" + baseModelPackage.replace(".", "/");
System.out.println("start generate...");
System.out.println("generate dir:" + modelDir);
DataSource dataSource = CodeGenHelpler.getDatasource();
List<TableMeta> tableMetaList = new MetaBuilder(dataSource).build();
CodeGenHelpler.excludeTables(tableMetaList, excludeTables);
new JbootBaseModelGenerator(baseModelPackage, baseModelDir).generate(tableMetaList);
new JbootModelnfoGenerator(modelPackage, baseModelPackage, modelDir).generate(tableMetaList);
System.out.println("model generate finished !!!");
}
#location 13
#vulnerability type RESOURCE_LEAK | #fixed code
public void doGenerate(String excludeTables) {
String modelPackage = basePackage;
String baseModelPackage = basePackage + ".base";
String modelDir = PathKit.getWebRootPath() + "/src/main/java/" + modelPackage.replace(".", "/");
String baseModelDir = PathKit.getWebRootPath() + "/src/main/java/" + baseModelPackage.replace(".", "/");
System.out.println("start generate...");
System.out.println("generate dir:" + modelDir);
List<TableMeta> tableMetaList = CodeGenHelpler.createMetaBuilder().build();
CodeGenHelpler.excludeTables(tableMetaList, excludeTables);
new JbootBaseModelGenerator(baseModelPackage, baseModelDir).generate(tableMetaList);
new JbootModelnfoGenerator(modelPackage, baseModelPackage, modelDir).generate(tableMetaList);
System.out.println("model generate finished !!!");
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
static void doCacheEvict(Object[] arguments, Class targetClass, Method method, CacheEvict evict) {
String unless = AnnotationUtil.get(evict.unless());
if (Utils.isUnless(unless, method, arguments)) {
return;
}
String cacheName = AnnotationUtil.get(evict.name());
if (StrUtil.isBlank(cacheName)) {
throw new JbootException(String.format("CacheEvict.name() must not empty in method [%s].",
ClassUtil.buildMethodString(method)));
}
String cacheKey = AnnotationUtil.get(evict.key());
if (StrUtil.isBlank(cacheKey) || "*".equals(cacheKey)) {
getAopCache().removeAll(cacheName);
} else {
cacheKey = Utils.buildCacheKey(cacheKey, targetClass, method, arguments);
getAopCache().remove(cacheName, cacheKey);
}
}
#location 7
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
static void doCacheEvict(Object[] arguments, Class targetClass, Method method, CacheEvict evict) {
String unless = AnnotationUtil.get(evict.unless());
if (Utils.isUnless(unless, method, arguments)) {
return;
}
String cacheName = AnnotationUtil.get(evict.name());
if (StrUtil.isBlank(cacheName)) {
throw new JbootException(String.format("CacheEvict.name() must not empty in method [%s].",
ClassUtil.buildMethodString(method)));
}
String cacheKey = AnnotationUtil.get(evict.key());
if (StrUtil.isBlank(cacheKey) || "*".equals(cacheKey)) {
AopCache.removeAll(cacheName);
} else {
cacheKey = Utils.buildCacheKey(cacheKey, targetClass, method, arguments);
AopCache.remove(cacheName, cacheKey);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void index() {
List<Record> records = Db.find("select * from `user`");
System.out.println("index .... ");
LogKit.error("xxxxxxx");
Jboot.getCache().put("test","test","valueeeeeeeeee");
String value = Jboot.getCache().get("test","test");
System.out.println("value:"+value);
renderText("hello " + serviceTest.getName());
// render();
}
#location 11
#vulnerability type NULL_DEREFERENCE | #fixed code
public void index() {
renderText("hello " + serviceTest.getName("aaa"));
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void remove(String cacheName, Object key) {
try {
ehcache.remove(cacheName, key);
redisCache.remove(cacheName, key);
} finally {
Jboot.me().getMq().publish(new JbootEhredisMessage(clientId, JbootEhredisMessage.ACTION_REMOVE, cacheName, key), channel);
}
}
#location 7
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public void remove(String cacheName, Object key) {
try {
ehcache.remove(cacheName, key);
redisCache.remove(cacheName, key);
} finally {
publishMessage(JbootEhredisMessage.ACTION_REMOVE, cacheName, key);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void inject(Class<?> targetClass, Object targetObject, int injectDepth) throws ReflectiveOperationException {
if ((injectDepth--) <= 0) {
return;
}
targetClass = getUsefulClass(targetClass);
Field[] fields = targetClass.getDeclaredFields();
if (fields.length == 0) {
return;
}
for (Field field : fields) {
Inject inject = field.getAnnotation(Inject.class);
if (inject != null) {
injectByJFinalInject(targetObject, field, inject, injectDepth);
continue;
}
javax.inject.Inject javaxInject = field.getAnnotation(javax.inject.Inject.class);
if (javaxInject != null) {
injectByJavaxInject(targetObject, field, javaxInject, injectDepth);
continue;
}
ConfigInject configInject = field.getAnnotation(ConfigInject.class);
if (configInject != null) {
injectByConfig(targetObject, field, configInject);
continue;
}
RPCInject rpcInject = field.getAnnotation(RPCInject.class);
if (rpcInject != null) {
injectByRPC(targetObject, field, rpcInject);
continue;
}
}
}
#location 29
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
public void inject(Class<?> targetClass, Object targetObject, int injectDepth) throws ReflectiveOperationException {
if ((injectDepth--) <= 0) {
return;
}
targetClass = getUsefulClass(targetClass);
Field[] fields = targetClass.getDeclaredFields();
if (fields.length == 0) {
return;
}
for (Field field : fields) {
Inject inject = field.getAnnotation(Inject.class);
if (inject != null) {
injectByJFinalInject(targetObject, field, inject, injectDepth);
continue;
}
ConfigInject configInject = field.getAnnotation(ConfigInject.class);
if (configInject != null) {
injectByConfig(targetObject, field, configInject);
continue;
}
RPCInject rpcInject = field.getAnnotation(RPCInject.class);
if (rpcInject != null) {
injectByRPC(targetObject, field, rpcInject);
continue;
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
static void doCacheEvict(Object[] arguments, Class targetClass, Method method, CacheEvict evict) {
String unless = AnnotationUtil.get(evict.unless());
if (Utils.isUnless(unless, method, arguments)) {
return;
}
String cacheName = AnnotationUtil.get(evict.name());
if (StrUtil.isBlank(cacheName)) {
throw new JbootException(String.format("CacheEvict.name() must not empty in method [%s].",
ClassUtil.buildMethodString(method)));
}
String cacheKey = AnnotationUtil.get(evict.key());
if (StrUtil.isBlank(cacheKey) || "*".equals(cacheKey)) {
getAopCache().removeAll(cacheName);
} else {
cacheKey = Utils.buildCacheKey(cacheKey, targetClass, method, arguments);
getAopCache().remove(cacheName, cacheKey);
}
}
#location 13
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
static void doCacheEvict(Object[] arguments, Class targetClass, Method method, CacheEvict evict) {
String unless = AnnotationUtil.get(evict.unless());
if (Utils.isUnless(unless, method, arguments)) {
return;
}
String cacheName = AnnotationUtil.get(evict.name());
if (StrUtil.isBlank(cacheName)) {
throw new JbootException(String.format("CacheEvict.name() must not empty in method [%s].",
ClassUtil.buildMethodString(method)));
}
String cacheKey = AnnotationUtil.get(evict.key());
if (StrUtil.isBlank(cacheKey) || "*".equals(cacheKey)) {
AopCache.removeAll(cacheName);
} else {
cacheKey = Utils.buildCacheKey(cacheKey, targetClass, method, arguments);
AopCache.remove(cacheName, cacheKey);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private static void findClassPathsAndJarsByClassPath(Set<String> jarPaths, Set<String> classPaths) {
String[] classPathArray = System.getProperty("java.class.path").split(File.pathSeparator);
for (String path : classPathArray) {
path = path.trim();
if (path.startsWith("./")) {
path = path.substring(2);
}
if (path.startsWith("/") && path.indexOf(":") == 2) {
path = path.substring(1);
}
if (!path.toLowerCase().endsWith(".jar") && !jarPaths.contains(path)) {
try {
classPaths.add(new File(path).getCanonicalPath());
} catch (IOException e) {
}
} else {
jarPaths.add(path);
}
}
}
#location 2
#vulnerability type NULL_DEREFERENCE | #fixed code
private static void findClassPathsAndJarsByClassPath(Set<String> jarPaths, Set<String> classPaths) {
String classPath = System.getProperty("java.class.path");
if (classPath == null || classPath.trim().length() == 0) {
return;
}
String[] classPathArray = classPath.split(File.pathSeparator);
if (classPathArray == null || classPathArray.length == 0) {
return;
}
for (String path : classPathArray) {
path = path.trim();
if (path.startsWith("./")) {
path = path.substring(2);
}
if (path.startsWith("/") && path.indexOf(":") == 2) {
path = path.substring(1);
}
if (!path.toLowerCase().endsWith(".jar") && !jarPaths.contains(path)) {
try {
classPaths.add(new File(path).getCanonicalPath().replace('\\', '/'));
} catch (IOException e) {
}
} else {
jarPaths.add(path.replace('\\', '/'));
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void doGenerate(String excludeTables) {
System.out.println("start generate...");
DatasourceConfig datasourceConfig = JbootProperties.get("jboot.datasource", DatasourceConfig.class);
HikariConfig config = new HikariConfig();
config.setJdbcUrl(datasourceConfig.getUrl());
config.setUsername(datasourceConfig.getUser());
config.setPassword(datasourceConfig.getPassword());
config.addDataSourceProperty("cachePrepStmts", "true");
config.addDataSourceProperty("prepStmtCacheSize", "250");
config.addDataSourceProperty("prepStmtCacheSqlLimit", "2048");
config.setDriverClassName("com.mysql.jdbc.Driver");
HikariDataSource dataSource = new HikariDataSource(config);
List<TableMeta> tableMetaList = new MetaBuilder(dataSource).build();
if (StringUtils.isNotBlank(excludeTables)) {
List<TableMeta> newTableMetaList = new ArrayList<>();
Set<String> excludeTableSet = StringUtils.splitToSet(excludeTables.toLowerCase(), ",");
for (TableMeta tableMeta : tableMetaList) {
if (excludeTableSet.contains(tableMeta.name.toLowerCase())) {
System.out.println("exclude table : " + tableMeta.name);
continue;
}
newTableMetaList.add(tableMeta);
}
tableMetaList = newTableMetaList;
}
generate(tableMetaList);
System.out.println("generate finished !!!");
}
#location 17
#vulnerability type RESOURCE_LEAK | #fixed code
public void doGenerate(String excludeTables) {
System.out.println("start generate...");
DataSource dataSource = CodeGenHelpler.getDatasource();
List<TableMeta> tableMetaList = new MetaBuilder(dataSource).build();
CodeGenHelpler.excludeTables(tableMetaList, excludeTables);
generate(tableMetaList);
System.out.println("generate finished !!!");
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void putValue(String name, Object value) {
Jboot.getJbootCache().put(SESSION_CACHE_NAME, buildKey(name), value);
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public void putValue(String name, Object value) {
Jboot.getCache().put(SESSION_CACHE_NAME, buildKey(name), value);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void removeAll(String cacheName) {
try {
ehcache.removeAll(cacheName);
redisCache.removeAll(cacheName);
} finally {
Jboot.me().getMq().publish(new JbootEhredisMessage(clientId, JbootEhredisMessage.ACTION_REMOVE_ALL, cacheName, null), channel);
}
}
#location 7
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public void removeAll(String cacheName) {
try {
ehcache.removeAll(cacheName);
redisCache.removeAll(cacheName);
} finally {
publishMessage(JbootEhredisMessage.ACTION_REMOVE_ALL, cacheName, null);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
static void doCacheEvict(Object[] arguments, Class targetClass, Method method, CacheEvict evict) {
String unless = AnnotationUtil.get(evict.unless());
if (Utils.isUnless(unless, method, arguments)) {
return;
}
String cacheName = AnnotationUtil.get(evict.name());
if (StrUtil.isBlank(cacheName)) {
throw new JbootException(String.format("CacheEvict.name() must not empty in method [%s].",
ClassUtil.buildMethodString(method)));
}
String cacheKey = AnnotationUtil.get(evict.key());
if (StrUtil.isBlank(cacheKey) || "*".equals(cacheKey)) {
getAopCache().removeAll(cacheName);
} else {
cacheKey = Utils.buildCacheKey(cacheKey, targetClass, method, arguments);
getAopCache().remove(cacheName, cacheKey);
}
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
static void doCacheEvict(Object[] arguments, Class targetClass, Method method, CacheEvict evict) {
String unless = AnnotationUtil.get(evict.unless());
if (Utils.isUnless(unless, method, arguments)) {
return;
}
String cacheName = AnnotationUtil.get(evict.name());
if (StrUtil.isBlank(cacheName)) {
throw new JbootException(String.format("CacheEvict.name() must not empty in method [%s].",
ClassUtil.buildMethodString(method)));
}
String cacheKey = AnnotationUtil.get(evict.key());
if (StrUtil.isBlank(cacheKey) || "*".equals(cacheKey)) {
AopCache.removeAll(cacheName);
} else {
cacheKey = Utils.buildCacheKey(cacheKey, targetClass, method, arguments);
AopCache.remove(cacheName, cacheKey);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public boolean acquire() {
long timeout = timeoutMsecs;
do {
long expires = System.currentTimeMillis() + expireMsecs + 1;
Long result = Jboot.me().getRedis().setnx(lockName, expires);
if (result != null && result == 1) {
// lock acquired
locked = true;
return true;
}
Long currentValue = Jboot.me().getRedis().get(lockName);
if (currentValue != null && currentValue < System.currentTimeMillis()) {
//判断是否为空,不为空的情况下,如果被其他线程设置了值,则第二个条件判断是过不去的
// lock is expired
Long oldValue = Jboot.me().getRedis().getSet(lockName, expires);
//获取上一个锁到期时间,并设置现在的锁到期时间,
//只有一个线程才能获取上一个线上的设置时间,因为jedis.getSet是同步的
if (oldValue != null && oldValue.equals(currentValue)) {
//如果这个时候,多个线程恰好都到了这里
//只有一个线程的设置值和当前值相同,他才有权利获取锁
//lock acquired
locked = true;
return true;
}
}
if (timeout > 0) {
timeout -= 100;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} while (timeout > 0);
return false;
}
#location 7
#vulnerability type NULL_DEREFERENCE | #fixed code
public boolean acquire() {
long timeout = timeoutMsecs;
do {
long expires = System.currentTimeMillis() + expireMsecs + 1;
Long result = redis.setnx(lockName, expires);
if (result != null && result == 1) {
// lock acquired
locked = true;
return true;
}
Long currentValue = redis.get(lockName);
if (currentValue != null && currentValue < System.currentTimeMillis()) {
//判断是否为空,不为空的情况下,如果被其他线程设置了值,则第二个条件判断是过不去的
// lock is expired
Long oldValue = redis.getSet(lockName, expires);
//获取上一个锁到期时间,并设置现在的锁到期时间,
//只有一个线程才能获取上一个线上的设置时间,因为jedis.getSet是同步的
if (oldValue != null && oldValue.equals(currentValue)) {
//如果这个时候,多个线程恰好都到了这里
//只有一个线程的设置值和当前值相同,他才有权利获取锁
//lock acquired
locked = true;
return true;
}
}
if (timeout > 0) {
timeout -= 100;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} while (timeout > 0);
return false;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private void publishMessage(int action, String cacheName, Object key) {
Jboot.me().getMq().publish(new JbootEhredisMessage(clientId, action, cacheName, key), channel);
}
#location 2
#vulnerability type NULL_DEREFERENCE | #fixed code
private void publishMessage(int action, String cacheName, Object key) {
JbootEhredisMessage message = new JbootEhredisMessage(clientId, action, cacheName, key);
redis.publish(serializer.serialize(channel), serializer.serialize(message));
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public Object getAttribute(String name) {
return Jboot.getJbootCache().get(SESSION_CACHE_NAME, buildKey(name));
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public Object getAttribute(String name) {
return Jboot.getCache().get(SESSION_CACHE_NAME, buildKey(name));
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public static <T> T service(Class<T> clazz, String group, String version) {
return me().getRpc().serviceObtain(clazz, group, version);
}
#location 2
#vulnerability type NULL_DEREFERENCE | #fixed code
public static <T> T service(Class<T> clazz, String group, String version) {
return jboot.getRpc().serviceObtain(clazz, group, version);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void doGenerate(String excludeTables) {
String modelPackage = basePackage ;
String baseModelPackage = basePackage + ".base";
String modelDir = PathKit.getWebRootPath() + "/src/main/java/" + modelPackage.replace(".", "/");
String baseModelDir = PathKit.getWebRootPath() + "/src/main/java/" + baseModelPackage.replace(".", "/");
System.out.println("start generate...");
System.out.println("generate dir:" + modelDir);
DatasourceConfig datasourceConfig = JbootProperties.get("jboot.datasource", DatasourceConfig.class);
HikariConfig config = new HikariConfig();
config.setJdbcUrl(datasourceConfig.getUrl());
config.setUsername(datasourceConfig.getUser());
config.setPassword(datasourceConfig.getPassword());
config.addDataSourceProperty("cachePrepStmts", "true");
config.addDataSourceProperty("prepStmtCacheSize", "250");
config.addDataSourceProperty("prepStmtCacheSqlLimit", "2048");
config.setDriverClassName("com.mysql.jdbc.Driver");
HikariDataSource dataSource = new HikariDataSource(config);
List<TableMeta> tableMetaList = new MetaBuilder(dataSource).build();
if (StringUtils.isNotBlank(excludeTables)) {
List<TableMeta> newTableMetaList = new ArrayList<>();
Set<String> excludeTableSet = StringUtils.splitToSet(excludeTables.toLowerCase(), ",");
for (TableMeta tableMeta : tableMetaList) {
if (excludeTableSet.contains(tableMeta.name.toLowerCase())) {
System.out.println("exclude table : " + tableMeta.name);
continue;
}
newTableMetaList.add(tableMeta);
}
tableMetaList = newTableMetaList;
}
new JbootBaseModelGenerator(baseModelPackage, baseModelDir).generate(tableMetaList);
new JbootModelnfoGenerator(modelPackage, baseModelPackage, modelDir).generate(tableMetaList);
System.out.println("model generate finished !!!");
}
#location 24
#vulnerability type RESOURCE_LEAK | #fixed code
public void doGenerate(String excludeTables) {
String modelPackage = basePackage;
String baseModelPackage = basePackage + ".base";
String modelDir = PathKit.getWebRootPath() + "/src/main/java/" + modelPackage.replace(".", "/");
String baseModelDir = PathKit.getWebRootPath() + "/src/main/java/" + baseModelPackage.replace(".", "/");
System.out.println("start generate...");
System.out.println("generate dir:" + modelDir);
DataSource dataSource = CodeGenHelpler.getDatasource();
List<TableMeta> tableMetaList = new MetaBuilder(dataSource).build();
CodeGenHelpler.excludeTables(tableMetaList, excludeTables);
new JbootBaseModelGenerator(baseModelPackage, baseModelDir).generate(tableMetaList);
new JbootModelnfoGenerator(modelPackage, baseModelPackage, modelDir).generate(tableMetaList);
System.out.println("model generate finished !!!");
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void setAttribute(String name, Object value) {
Jboot.getJbootCache().put(SESSION_CACHE_NAME, buildKey(name), value);
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public void setAttribute(String name, Object value) {
Jboot.getCache().put(SESSION_CACHE_NAME, buildKey(name), value);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public <T> T get(Class<T> clazz, String prefix, String file) {
/**
* 开发模式下,热加载会导致由于Config是不同的 ClassLoader,
* 如果走缓存会Class转化异常
*/
if (JbootApplication.isDevMode()) {
return createConfigObject(clazz, prefix, file);
}
Object configObject = configCache.get(clazz.getName() + prefix);
if (configObject == null) {
synchronized (clazz) {
if (configObject == null) {
configObject = createConfigObject(clazz, prefix, file);
configCache.put(clazz.getName() + prefix, configObject);
}
}
}
return (T) configObject;
}
#location 7
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public <T> T get(Class<T> clazz, String prefix, String file) {
/**
* 开发模式下,热加载会导致由于Config是不同的 ClassLoader,
* 如果走缓存会Class转化异常
*/
if (isDevMode()) {
return createConfigObject(clazz, prefix, file);
}
Object configObject = configCache.get(clazz.getName() + prefix);
if (configObject == null) {
synchronized (clazz) {
if (configObject == null) {
configObject = createConfigObject(clazz, prefix, file);
configCache.put(clazz.getName() + prefix, configObject);
}
}
}
return (T) configObject;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public <T> T get(Class<T> clazz, String prefix, String file) {
/**
* 开发模式下,热加载会导致由于Config是不同的 ClassLoader,
* 如果走缓存会Class转化异常
*/
if (isDevMode()) {
return createConfigObject(clazz, prefix, file);
}
Object configObject = configCache.get(clazz.getName() + prefix);
if (configObject == null) {
synchronized (clazz) {
if (configObject == null) {
configObject = createConfigObject(clazz, prefix, file);
configCache.put(clazz.getName() + prefix, configObject);
}
}
}
return (T) configObject;
}
#location 8
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public <T> T get(Class<T> clazz, String prefix, String file) {
/**
* 开发模式下,热加载会导致由于 Config 是不同的 ClassLoader 而导致异常,
* 如果走缓存会Class转化异常
*/
if (isDevMode()) {
return createConfigObject(clazz, prefix, file);
}
Object configObject = configCache.get(clazz.getName() + prefix);
if (configObject == null) {
synchronized (clazz) {
if (configObject == null) {
configObject = createConfigObject(clazz, prefix, file);
configCache.put(clazz.getName() + prefix, configObject);
}
}
}
return (T) configObject;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
protected void doInject(Class<?> targetClass, Object targetObject, int injectDepth) throws ReflectiveOperationException {
if ((injectDepth--) <= 0) {
return;
}
targetClass = getUsefulClass(targetClass);
Field[] fields = targetClass.getDeclaredFields();
if (fields.length == 0) {
return;
}
for (Field field : fields) {
// Inject inject = field.getAnnotation(Inject.class);
// if (inject == null) {
// continue ;
// }
//
// Class<?> fieldInjectedClass = inject.value();
// if (fieldInjectedClass == Void.class) {
// fieldInjectedClass = field.getType();
// }
//
// Object fieldInjectedObject = doGet(fieldInjectedClass, injectDepth);
// field.setAccessible(true);
// field.set(targetObject, fieldInjectedObject);
Inject inject = field.getAnnotation(Inject.class);
if (inject != null) {
injectByJFinalInject(targetObject, field, inject, injectDepth);
continue;
}
InjectConfigValue injectConfigValue = field.getAnnotation(InjectConfigValue.class);
if (injectConfigValue != null) {
doInjectConfigValue(targetObject, field, injectConfigValue);
continue;
}
InjectConfigModel injectConfigModel = field.getAnnotation(InjectConfigModel.class);
if (injectConfigModel != null) {
doInjectConfigModel(targetObject, field, injectConfigModel);
continue;
}
RPCInject rpcInject = field.getAnnotation(RPCInject.class);
if (rpcInject != null) {
doInjectRPC(targetObject, field, rpcInject);
continue;
}
}
}
#location 42
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
protected void doInject(Class<?> targetClass, Object targetObject, int injectDepth) throws ReflectiveOperationException {
if ((injectDepth--) <= 0) {
return;
}
targetClass = getUsefulClass(targetClass);
Field[] fields = targetClass.getDeclaredFields();
if (fields.length == 0) {
return;
}
for (Field field : fields) {
// Inject inject = field.getAnnotation(Inject.class);
// if (inject == null) {
// continue ;
// }
//
// Class<?> fieldInjectedClass = inject.value();
// if (fieldInjectedClass == Void.class) {
// fieldInjectedClass = field.getType();
// }
//
// Object fieldInjectedObject = doGet(fieldInjectedClass, injectDepth);
// field.setAccessible(true);
// field.set(targetObject, fieldInjectedObject);
Inject inject = field.getAnnotation(Inject.class);
if (inject != null) {
doInjectJFinalOrginal(targetObject, field, inject, injectDepth);
continue;
}
ConfigValue configValue = field.getAnnotation(ConfigValue.class);
if (configValue != null) {
doInjectConfigValue(targetObject, field, configValue);
continue;
}
RPCInject rpcInject = field.getAnnotation(RPCInject.class);
if (rpcInject != null) {
doInjectRPC(targetObject, field, rpcInject);
continue;
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
static void doCacheEvict(Object[] arguments, Class targetClass, Method method, CacheEvict evict) {
String unless = AnnotationUtil.get(evict.unless());
if (Utils.isUnless(unless, method, arguments)) {
return;
}
String cacheName = AnnotationUtil.get(evict.name());
if (StrUtil.isBlank(cacheName)) {
throw new JbootException(String.format("CacheEvict.name() must not empty in method [%s].",
ClassUtil.buildMethodString(method)));
}
String cacheKey = AnnotationUtil.get(evict.key());
if (StrUtil.isBlank(cacheKey) || "*".equals(cacheKey)) {
getAopCache().removeAll(cacheName);
} else {
cacheKey = Utils.buildCacheKey(cacheKey, targetClass, method, arguments);
getAopCache().remove(cacheName, cacheKey);
}
}
#location 19
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
static void doCacheEvict(Object[] arguments, Class targetClass, Method method, CacheEvict evict) {
String unless = AnnotationUtil.get(evict.unless());
if (Utils.isUnless(unless, method, arguments)) {
return;
}
String cacheName = AnnotationUtil.get(evict.name());
if (StrUtil.isBlank(cacheName)) {
throw new JbootException(String.format("CacheEvict.name() must not empty in method [%s].",
ClassUtil.buildMethodString(method)));
}
String cacheKey = AnnotationUtil.get(evict.key());
if (StrUtil.isBlank(cacheKey) || "*".equals(cacheKey)) {
AopCache.removeAll(cacheName);
} else {
cacheKey = Utils.buildCacheKey(cacheKey, targetClass, method, arguments);
AopCache.remove(cacheName, cacheKey);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
protected <T> T doGet(Class<T> targetClass, int injectDepth) throws ReflectiveOperationException {
// Aop.get(obj.getClass()) 可以用 Aop.inject(obj),所以注掉下一行代码
// targetClass = (Class<T>)getUsefulClass(targetClass);
targetClass = (Class<T>) getMappingClass(targetClass);
Singleton si = targetClass.getAnnotation(Singleton.class);
boolean singleton = (si != null ? si.value() : this.singleton);
Object ret;
if (!singleton) {
ret = createObject(targetClass);
doInject(targetClass, ret, injectDepth);
return (T) ret;
}
ret = singletonCache.get(targetClass);
if (ret != null) {
return (T) ret;
}
//只有在循环依赖的时候,这个context才会有值
ret = context.get().get(targetClass);
if (ret != null) {
return (T) ret;
}
synchronized (this) {
ret = singletonCache.get(targetClass);
if (ret == null) {
// ret = createObject(targetClass);
// doInject(targetClass, ret, injectDepth);
// singletonCache.put(targetClass, ret);
ret = createObject(targetClass);
//保存到本次初始化的上下文
context.get().put(targetClass, ret);
//循环注入
doInject(targetClass, ret, injectDepth);
//保存到缓存、并清除上下文数据
singletonCache.put(targetClass, ret);
context.get().clear();
context.remove();
}
}
return (T) ret;
}
#location 13
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
protected <T> T doGet(Class<T> targetClass, int injectDepth) throws ReflectiveOperationException {
return doGet(targetClass);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Test
public void testRedis() {
Jboot.setBootArg("jboot.redis.host", "127.0.0.1");
// Jboot.setBootArg("jboot.redis.password", "123456");
JbootRedis redis = Jboot.me().getRedis();
redis.set("mykey", "myvalue");
redis.lpush("list", 1,2,3,4,5);
System.out.println(redis.get("mykey").toString());
System.out.println(redis.lrange("list", 0, -1));
System.out.println(redis.blpop(10000, "list"));
}
#location 8
#vulnerability type NULL_DEREFERENCE | #fixed code
@Test
public void testRedis() {
// Jboot.setBootArg("jboot.redis.host", "127.0.0.1");
//// Jboot.setBootArg("jboot.redis.password", "123456");
//
// JbootRedis redis = Jboot.me().getRedis();
// redis.set("mykey", "myvalue");
//
// redis.lpush("list", 1,2,3,4,5);
//
// System.out.println(redis.get("mykey").toString());
// System.out.println(redis.lrange("list", 0, -1));
//
// System.out.println(redis.blpop(10000, "list"));
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void inject(Class<?> targetClass, Object targetObject, int injectDepth) throws ReflectiveOperationException {
if ((injectDepth--) <= 0) {
return;
}
targetClass = getUsefulClass(targetClass);
Field[] fields = targetClass.getDeclaredFields();
if (fields.length == 0) {
return;
}
for (Field field : fields) {
Inject inject = field.getAnnotation(Inject.class);
if (inject != null) {
injectByJFinalInject(targetObject, field, inject, injectDepth);
continue;
}
javax.inject.Inject javaxInject = field.getAnnotation(javax.inject.Inject.class);
if (javaxInject != null) {
injectByJavaxInject(targetObject, field, javaxInject, injectDepth);
continue;
}
ConfigInject configInject = field.getAnnotation(ConfigInject.class);
if (configInject != null) {
injectByConfig(targetObject, field, configInject);
continue;
}
RPCInject rpcInject = field.getAnnotation(RPCInject.class);
if (rpcInject != null) {
injectByRPC(targetObject, field, rpcInject);
continue;
}
}
}
#location 35
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
public void inject(Class<?> targetClass, Object targetObject, int injectDepth) throws ReflectiveOperationException {
if ((injectDepth--) <= 0) {
return;
}
targetClass = getUsefulClass(targetClass);
Field[] fields = targetClass.getDeclaredFields();
if (fields.length == 0) {
return;
}
for (Field field : fields) {
Inject inject = field.getAnnotation(Inject.class);
if (inject != null) {
injectByJFinalInject(targetObject, field, inject, injectDepth);
continue;
}
ConfigInject configInject = field.getAnnotation(ConfigInject.class);
if (configInject != null) {
injectByConfig(targetObject, field, configInject);
continue;
}
RPCInject rpcInject = field.getAnnotation(RPCInject.class);
if (rpcInject != null) {
injectByRPC(targetObject, field, rpcInject);
continue;
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public <T> T deserialize(byte[] bytes, Class<T> clz) throws IOException {
return (T) Jboot.me().getSerializer().deserialize(bytes);
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public <T> T deserialize(byte[] bytes, Class<T> clz) throws IOException {
return (T) SerializerManager.me().getSerializer(config.getSerializer()).deserialize(bytes);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Test
public void testHttpDownload() {
String url = "http://www.xxx.com/abc.zip";
File downloadToFile = new File("/xxx/abc.zip");
JbootHttpRequest request = JbootHttpRequest.create(url, null, JbootHttpRequest.METHOD_GET);
request.setDownloadFile(downloadToFile);
JbootHttpResponse response = Jboot.me().getHttp().handle(request);
if (response.isError()){
downloadToFile.delete();
}
System.out.println(downloadToFile.length());
}
#location 13
#vulnerability type NULL_DEREFERENCE | #fixed code
@Test
public void testHttpDownload() {
// String url = "http://www.xxx.com/abc.zip";
//
// File downloadToFile = new File("/xxx/abc.zip");
//
//
// JbootHttpRequest request = JbootHttpRequest.create(url, null, JbootHttpRequest.METHOD_GET);
// request.setDownloadFile(downloadToFile);
//
//
// JbootHttpResponse response = Jboot.me().getHttp().handle(request);
//
// if (response.isError()){
// downloadToFile.delete();
// }
//
// System.out.println(downloadToFile.length());
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void putCache(Object key, Object value) {
Jboot.getJbootCache().put(tableName(), key, value);
}
#location 2
#vulnerability type NULL_DEREFERENCE | #fixed code
public void putCache(Object key, Object value) {
Jboot.getCache().put(tableName(), key, value);
} | Below is the vulnerable code, please generate the patch based on the following information. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.