output
stringlengths 79
30.1k
| instruction
stringclasses 1
value | input
stringlengths 216
28.9k
|
---|---|---|
#fixed code
@EventHandler
public void onStructureGrow(StructureGrowEvent event) {
Location location = event.getLocation();
GWorld world = gw.getTableManager().getWorldTable().getWorld(location.getWorld().getName());
TreeTable treeTable = gw.getTableManager().getTreeTable();
GPlayer planter;
// TODO: Add TreeType species to reduction model
if (treeTable.getLocationMap().containsKey(location)) {
Tree tree = treeTable.getTreeMap().get(treeTable.getLocationMap().get(location));
planter = tree.getOwner();
Reduction reduction = ClimateEngine.getInstance().getClimateEngine(world.getWorldName()).treeGrow(planter, event.getSpecies(), event.getBlocks());
int carbonScore = planter.getCarbonScore();
planter.setCarbonScore((int) (carbonScore - reduction.getReductionValue()));
tree.setSapling(false);
tree.setSize(event.getBlocks().size()); // TODO: Only consider core species blocks as tree size
// Queue tree update query
TreeUpdateQuery treeUpdateQuery = new TreeUpdateQuery(tree);
AsyncDBQueue.getInstance().queueUpdateQuery(treeUpdateQuery);
} else {
PlayerTable playerTable = GlobalWarming.getInstance().getTableManager().getPlayerTable();
planter = playerTable.getOrCreatePlayer(untrackedUUID, true);
// First create a new tree object and store it
Long uniqueId = GlobalWarming.getInstance().getRandom().nextLong();
// TODO: Only consider core species blocks as tree size
Tree tree = new Tree(uniqueId, planter, location, false, event.getBlocks().size());
TreeInsertQuery insertQuery = new TreeInsertQuery(tree);
AsyncDBQueue.getInstance().queueInsertQuery(insertQuery);
gw.getLogger().warning("Untracked structure grow occured:");
gw.getLogger().warning("@ " + location.toString());
}
// Create a new reduction object using the worlds climate engine
Reduction reduction = ClimateEngine.getInstance().getClimateEngine(world.getWorldName()).treeGrow(planter, event.getSpecies(), event.getBlocks());
int carbonScore = planter.getCarbonScore();
planter.setCarbonScore((int) (carbonScore - reduction.getReductionValue()));
// Queue player update query
PlayerUpdateQuery playerUpdateQuery = new PlayerUpdateQuery(planter);
AsyncDBQueue.getInstance().queueUpdateQuery(playerUpdateQuery);
// Queue reduction insert query
ReductionInsertQuery insertQuery = new ReductionInsertQuery(reduction);
AsyncDBQueue.getInstance().queueInsertQuery(insertQuery);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@EventHandler
public void onStructureGrow(StructureGrowEvent event) {
Location location = event.getLocation();
GWorld world = gw.getTableManager().getWorldTable().getWorld(location.getWorld().getName());
TreeTable treeTable = gw.getTableManager().getTreeTable();
if (treeTable.getLocationMap().containsKey(location)) {
Long uuid = treeTable.getLocationMap().get(location);
Tree tree = treeTable.getTreeMap().get(uuid);
UUID ownerUUID = tree.getOwner().getUuid();
GPlayer planter = gw.getTableManager().getPlayerTable().getPlayers().get(ownerUUID);
Reduction reduction = ClimateEngine.getInstance().getClimateEngine(world.getWorldName()).treeGrow(planter, event.getSpecies(), event.getBlocks());
int carbonScore = planter.getCarbonScore();
planter.setCarbonScore((int) (carbonScore - reduction.getReductionValue()));
tree.setSapling(false);
// TODO: Queue tree DB update
// TODO: Queue planter score DB update
// TODO: Queue new reduction DB insert
} else {
gw.getLogger().severe("Untracked structure grow occured:");
gw.getLogger().severe("@ " + location.toString());
}
}
#location 12
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Test
public void verifyValidateClusterStartupWarnsIfNoServersInCluster() {
WlsClusterConfig wlsClusterConfig = new WlsClusterConfig("cluster1");
wlsClusterConfig.validateCluster(1, null);
assertThat(logRecords, containsWarning(NO_WLS_SERVER_IN_CLUSTER, "cluster1"));
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void verifyValidateClusterStartupWarnsIfNoServersInCluster() {
WlsClusterConfig wlsClusterConfig = new WlsClusterConfig("cluster1");
ClusterStartup cs = new ClusterStartup().withClusterName("cluster1").withReplicas(1);
wlsClusterConfig.validateClusterStartup(cs, null);
assertThat(logRecords, containsWarning(NO_WLS_SERVER_IN_CLUSTER, "cluster1"));
assertThat(logRecords, containsWarning(REPLICA_MORE_THAN_WLS_SERVERS));
}
#location 5
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Test
public void whenDesiredStateIsAdmin_serverStartupCreatesJavaOptionsEnvironment() {
configureServer("wls1").withDesiredState(ADMIN_STATE);
addWlsServer("wls1");
invokeStep();
assertThat(
getServerStartupInfo("wls1").getEnvironment(),
hasItem(envVar("JAVA_OPTIONS", "-Dweblogic.management.startupMode=ADMIN")));
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void whenDesiredStateIsAdmin_serverStartupCreatesJavaOptionsEnvironment() {
configureServer("ms1").withDesiredState(ADMIN_STATE);
addWlsServer("ms1");
invokeStep();
assertThat(
getServerStartupInfo("ms1").getEnvironment(),
hasItem(envVar("JAVA_OPTIONS", "-Dweblogic.management.startupMode=ADMIN")));
}
#location 9
#vulnerability type NULL_DEREFERENCE |
#fixed code
public Schema encodeSchema(SparkMLEncoder encoder){
T model = getTransformer();
Label label = null;
if(model instanceof HasLabelCol){
HasLabelCol hasLabelCol = (HasLabelCol)model;
String labelCol = hasLabelCol.getLabelCol();
Feature feature = encoder.getOnlyFeature(labelCol);
MiningFunction miningFunction = getMiningFunction();
switch(miningFunction){
case CLASSIFICATION:
{
if(feature instanceof BooleanFeature){
BooleanFeature booleanFeature = (BooleanFeature)feature;
label = new CategoricalLabel(booleanFeature.getName(), booleanFeature.getDataType(), booleanFeature.getValues());
} else
if(feature instanceof CategoricalFeature){
CategoricalFeature categoricalFeature = (CategoricalFeature)feature;
DataField dataField = (DataField)categoricalFeature.getField();
label = new CategoricalLabel(dataField);
} else
if(feature instanceof ContinuousFeature){
ContinuousFeature continuousFeature = (ContinuousFeature)feature;
int numClasses = 2;
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
numClasses = classificationModel.numClasses();
}
List<String> categories = new ArrayList<>();
for(int i = 0; i < numClasses; i++){
categories.add(String.valueOf(i));
}
Field<?> field = encoder.toCategorical(continuousFeature.getName(), categories);
encoder.putOnlyFeature(labelCol, new CategoricalFeature(encoder, field, categories));
label = new CategoricalLabel(field.getName(), field.getDataType(), categories);
} else
{
throw new IllegalArgumentException("Expected a categorical or categorical-like continuous feature, got " + feature);
}
}
break;
case REGRESSION:
{
Field<?> field = encoder.toContinuous(feature.getName());
field.setDataType(DataType.DOUBLE);
label = new ContinuousLabel(field.getName(), field.getDataType());
}
break;
default:
throw new IllegalArgumentException("Mining function " + miningFunction + " is not supported");
}
}
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
int numClasses = classificationModel.numClasses();
CategoricalLabel categoricalLabel = (CategoricalLabel)label;
SchemaUtil.checkSize(numClasses, categoricalLabel);
}
String featuresCol = model.getFeaturesCol();
List<Feature> features = encoder.getFeatures(featuresCol);
if(model instanceof PredictionModel){
PredictionModel<?, ?> predictionModel = (PredictionModel<?, ?>)model;
int numFeatures = predictionModel.numFeatures();
if(numFeatures != -1){
SchemaUtil.checkSize(numFeatures, features);
}
}
Schema result = new Schema(label, features);
return result;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public Schema encodeSchema(SparkMLEncoder encoder){
T model = getTransformer();
Label label = null;
if(model instanceof HasLabelCol){
HasLabelCol hasLabelCol = (HasLabelCol)model;
String labelCol = hasLabelCol.getLabelCol();
Feature feature = encoder.getOnlyFeature(labelCol);
MiningFunction miningFunction = getMiningFunction();
switch(miningFunction){
case CLASSIFICATION:
{
if(feature instanceof BooleanFeature){
BooleanFeature booleanFeature = (BooleanFeature)feature;
label = new CategoricalLabel(booleanFeature.getName(), booleanFeature.getDataType(), booleanFeature.getValues());
} else
if(feature instanceof CategoricalFeature){
CategoricalFeature categoricalFeature = (CategoricalFeature)feature;
DataField dataField = (DataField)categoricalFeature.getField();
label = new CategoricalLabel(dataField);
} else
if(feature instanceof ContinuousFeature){
ContinuousFeature continuousFeature = (ContinuousFeature)feature;
int numClasses = 2;
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
numClasses = classificationModel.numClasses();
}
List<String> categories = new ArrayList<>();
for(int i = 0; i < numClasses; i++){
categories.add(String.valueOf(i));
}
Field<?> field = encoder.toCategorical(continuousFeature.getName(), categories);
encoder.putOnlyFeature(labelCol, new CategoricalFeature(encoder, field, categories));
label = new CategoricalLabel(field.getName(), field.getDataType(), categories);
} else
{
throw new IllegalArgumentException("Expected a categorical or categorical-like continuous feature, got " + feature);
}
}
break;
case REGRESSION:
{
Field<?> field = encoder.toContinuous(feature.getName());
field.setDataType(DataType.DOUBLE);
label = new ContinuousLabel(field.getName(), field.getDataType());
}
break;
default:
throw new IllegalArgumentException("Mining function " + miningFunction + " is not supported");
}
}
if(model instanceof ClassificationModel){
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>)model;
CategoricalLabel categoricalLabel = (CategoricalLabel)label;
int numClasses = classificationModel.numClasses();
if(numClasses != categoricalLabel.size()){
throw new IllegalArgumentException("Expected " + numClasses + " target categories, got " + categoricalLabel.size() + " target categories");
}
}
String featuresCol = model.getFeaturesCol();
List<Feature> features = encoder.getFeatures(featuresCol);
if(model instanceof PredictionModel){
PredictionModel<?, ?> predictionModel = (PredictionModel<?, ?>)model;
int numFeatures = predictionModel.numFeatures();
if(numFeatures != -1 && features.size() != numFeatures){
throw new IllegalArgumentException("Expected " + numFeatures + " features, got " + features.size() + " features");
}
}
Schema result = new Schema(label, features);
return result;
}
#location 80
#vulnerability type NULL_DEREFERENCE |
#fixed code
public static void compile(String projectPath) throws IOException, TimeoutException, InterruptedException {
Runtime runtime = Runtime.getRuntime();
Process exec = runtime.exec("mvn clean package -f " + projectPath);
Worker worker = new Worker(exec);
worker.start();
ProcessStatus ps = worker.getProcessStatus();
try {
worker.join(30000);
if (ps.exitCode == ProcessStatus.CODE_STARTED) {
// not finished
worker.interrupt();
throw new TimeoutException();
}
} catch (InterruptedException e) {
// canceled by other thread.
worker.interrupt();
throw e;
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public static void compile(String projectPath) throws IOException {
Runtime runtime = Runtime.getRuntime();
Process exec = runtime.exec("mvn clean package -f " + projectPath);
try {
exec.waitFor();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
#location 5
#vulnerability type RESOURCE_LEAK |
#fixed code
@Override
public String get(String key) {
return Jboot.getCache().get(cache_name, key);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public String get(String key) {
return Jboot.getJbootCache().get(cache_name, key);
}
#location 3
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Override
protected <T> T doGet(Class<T> targetClass, int injectDepth) throws ReflectiveOperationException {
return doGet(targetClass);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
protected <T> T doGet(Class<T> targetClass, int injectDepth) throws ReflectiveOperationException {
// Aop.get(obj.getClass()) 可以用 Aop.inject(obj),所以注掉下一行代码
// targetClass = (Class<T>)getUsefulClass(targetClass);
targetClass = (Class<T>) getMappingClass(targetClass);
Singleton si = targetClass.getAnnotation(Singleton.class);
boolean singleton = (si != null ? si.value() : this.singleton);
Object ret;
if (!singleton) {
ret = createObject(targetClass);
doInject(targetClass, ret, injectDepth);
return (T) ret;
}
ret = singletonCache.get(targetClass);
if (ret != null) {
return (T) ret;
}
//只有在循环依赖的时候,这个context才会有值
ret = context.get().get(targetClass);
if (ret != null) {
return (T) ret;
}
synchronized (this) {
ret = singletonCache.get(targetClass);
if (ret == null) {
// ret = createObject(targetClass);
// doInject(targetClass, ret, injectDepth);
// singletonCache.put(targetClass, ret);
ret = createObject(targetClass);
//保存到本次初始化的上下文
context.get().put(targetClass, ret);
//循环注入
doInject(targetClass, ret, injectDepth);
//保存到缓存、并清除上下文数据
singletonCache.put(targetClass, ret);
context.get().clear();
context.remove();
}
}
return (T) ret;
}
#location 14
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
private static void copyStreamToResponse(HttpURLConnection connection, HttpServletResponse response) throws IOException {
InputStream inStream = null;
InputStreamReader reader = null;
try {
if (!response.isCommitted()) {
PrintWriter writer = response.getWriter();
inStream = getInputStream(connection);
reader = new InputStreamReader(inStream);
int len;
char[] buffer = new char[1024];
while ((len = reader.read(buffer)) != -1) {
writer.write(buffer, 0, len);
}
}
} finally {
quetlyClose(inStream, reader);
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
private static void copyStreamToResponse(HttpURLConnection connection, HttpServletResponse response) throws IOException {
InputStream inStream = null;
try {
if (!response.isCommitted()) {
PrintWriter writer = response.getWriter();
inStream = getInputStream(connection);
int len;
char[] buffer = new char[1024];
InputStreamReader r = new InputStreamReader(inStream);
while ((len = r.read(buffer)) != -1) {
writer.write(buffer, 0, len);
}
}
} finally {
quetlyClose(inStream);
}
}
#location 15
#vulnerability type RESOURCE_LEAK |
#fixed code
@Override
protected <T> T doGet(Class<T> targetClass, int injectDepth) throws ReflectiveOperationException {
return doGet(targetClass);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
protected <T> T doGet(Class<T> targetClass, int injectDepth) throws ReflectiveOperationException {
// Aop.get(obj.getClass()) 可以用 Aop.inject(obj),所以注掉下一行代码
// targetClass = (Class<T>)getUsefulClass(targetClass);
targetClass = (Class<T>) getMappingClass(targetClass);
Singleton si = targetClass.getAnnotation(Singleton.class);
boolean singleton = (si != null ? si.value() : this.singleton);
Object ret;
if (!singleton) {
ret = createObject(targetClass);
doInject(targetClass, ret, injectDepth);
return (T) ret;
}
ret = singletonCache.get(targetClass);
if (ret != null) {
return (T) ret;
}
//只有在循环依赖的时候,这个context才会有值
ret = context.get().get(targetClass);
if (ret != null) {
return (T) ret;
}
synchronized (this) {
ret = singletonCache.get(targetClass);
if (ret == null) {
// ret = createObject(targetClass);
// doInject(targetClass, ret, injectDepth);
// singletonCache.put(targetClass, ret);
ret = createObject(targetClass);
//保存到本次初始化的上下文
context.get().put(targetClass, ret);
//循环注入
doInject(targetClass, ret, injectDepth);
//保存到缓存、并清除上下文数据
singletonCache.put(targetClass, ret);
context.get().clear();
context.remove();
}
}
return (T) ret;
}
#location 14
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
public void doGenerate(String excludeTables) {
System.out.println("start generate...");
List<TableMeta> tableMetaList = CodeGenHelpler.createMetaBuilder().build();
CodeGenHelpler.excludeTables(tableMetaList, excludeTables);
generate(tableMetaList);
System.out.println("generate finished !!!");
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public void doGenerate(String excludeTables) {
System.out.println("start generate...");
DataSource dataSource = CodeGenHelpler.getDatasource();
List<TableMeta> tableMetaList = new MetaBuilder(dataSource).build();
CodeGenHelpler.excludeTables(tableMetaList, excludeTables);
generate(tableMetaList);
System.out.println("generate finished !!!");
}
#location 5
#vulnerability type RESOURCE_LEAK |
#fixed code
public void index() {
renderText("hello " + serviceTest.getName("aaa"));
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public void index() {
List<Record> records = Db.find("select * from `user`");
System.out.println("index .... ");
LogKit.error("xxxxxxx");
Jboot.getCache().put("test","test","valueeeeeeeeee");
String value = Jboot.getCache().get("test","test");
System.out.println("value:"+value);
renderText("hello " + serviceTest.getName());
// render();
}
#location 11
#vulnerability type NULL_DEREFERENCE |
#fixed code
static void doCacheEvict(Object[] arguments, Class targetClass, Method method, CacheEvict evict) {
String unless = AnnotationUtil.get(evict.unless());
if (Utils.isUnless(unless, method, arguments)) {
return;
}
String cacheName = AnnotationUtil.get(evict.name());
if (StrUtil.isBlank(cacheName)) {
throw new JbootException(String.format("CacheEvict.name() must not empty in method [%s].",
ClassUtil.buildMethodString(method)));
}
String cacheKey = AnnotationUtil.get(evict.key());
if (StrUtil.isBlank(cacheKey) || "*".equals(cacheKey)) {
AopCache.removeAll(cacheName);
} else {
cacheKey = Utils.buildCacheKey(cacheKey, targetClass, method, arguments);
AopCache.remove(cacheName, cacheKey);
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
static void doCacheEvict(Object[] arguments, Class targetClass, Method method, CacheEvict evict) {
String unless = AnnotationUtil.get(evict.unless());
if (Utils.isUnless(unless, method, arguments)) {
return;
}
String cacheName = AnnotationUtil.get(evict.name());
if (StrUtil.isBlank(cacheName)) {
throw new JbootException(String.format("CacheEvict.name() must not empty in method [%s].",
ClassUtil.buildMethodString(method)));
}
String cacheKey = AnnotationUtil.get(evict.key());
if (StrUtil.isBlank(cacheKey) || "*".equals(cacheKey)) {
getAopCache().removeAll(cacheName);
} else {
cacheKey = Utils.buildCacheKey(cacheKey, targetClass, method, arguments);
getAopCache().remove(cacheName, cacheKey);
}
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
@Override
public Object getAttribute(String name) {
return Jboot.getCache().get(SESSION_CACHE_NAME, buildKey(name));
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public Object getAttribute(String name) {
return Jboot.getJbootCache().get(SESSION_CACHE_NAME, buildKey(name));
}
#location 3
#vulnerability type NULL_DEREFERENCE |
#fixed code
@RequiresPermissions("adminSystemVariable")
@RequestMapping(value="variableSave${url.suffix}", method = RequestMethod.POST)
@ResponseBody
public BaseVO variableSave(System sys, Model model, HttpServletRequest request){
System system = sqlService.findAloneByProperty(System.class, "name", sys.getName());
if(system == null){
//新增
system = new System();
system.setName(sys.getName());
}else{
//编辑
}
system.setDescription(sys.getDescription());
system.setLasttime(DateUtil.timeForUnix10());
system.setValue(sys.getValue());
sqlService.save(system);
/***更新内存数据****/
systemService.refreshSystemCache();
ActionLogCache.insert(request, system.getId(), "保存系统变量", system.getName()+"="+system.getValue());
return success();
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@RequiresPermissions("adminSystemVariable")
@RequestMapping(value="variableSave${url.suffix}", method = RequestMethod.POST)
@ResponseBody
public BaseVO variableSave(System sys, Model model, HttpServletRequest request){
System system;
if(Global.system.get(sys.getName()) == null){
system = new System();
system.setName(sys.getName());
}else{
//有,编辑即可
system = sqlService.findAloneByProperty(System.class, "name", sys.getName());
}
system.setDescription(sys.getDescription());
system.setLasttime(DateUtil.timeForUnix10());
system.setValue(sys.getValue());
sqlService.save(system);
/***更新内存数据****/
systemService.refreshSystemCache();
ActionLogCache.insert(request, system.getId(), "保存系统变量", system.getName()+"="+system.getValue());
return success();
}
#location 23
#vulnerability type NULL_DEREFERENCE |
#fixed code
@SuppressWarnings( "unchecked" )
private static List<Principal> getPrincipals(HttpServletRequest request) {
List<Principal> principals = null;
Session session = (Session) ThreadContext.get( ThreadContext.SESSION_KEY );
if( session != null ) {
principals = (List<Principal>) session.getAttribute( PRINCIPALS_SESSION_KEY );
} else {
HttpSession httpSession = request.getSession( false );
if( httpSession != null ) {
principals = (List<Principal>) httpSession.getAttribute( PRINCIPALS_SESSION_KEY );
}
}
return principals;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@SuppressWarnings( "unchecked" )
private static List<Principal> getPrincipals(HttpServletRequest request) {
List<Principal> principals = null;
Session session = ThreadLocalSecurityContext.current().getSession( false );
if( session != null ) {
principals = (List<Principal>) session.getAttribute( PRINCIPALS_SESSION_KEY );
} else {
HttpSession httpSession = request.getSession( false );
if( httpSession != null ) {
principals = (List<Principal>) httpSession.getAttribute( PRINCIPALS_SESSION_KEY );
}
}
return principals;
}
#location 5
#vulnerability type NULL_DEREFERENCE |
#fixed code
public void testXvideosRipper() throws IOException {
if (!DOWNLOAD_CONTENT) {
return;
}
List<URL> contentURLs = new ArrayList<URL>();
contentURLs.add(new URL("http://www.xvideos.com/video1428195/stephanie_first_time_anal"));
contentURLs.add(new URL("http://www.xvideos.com/video7136868/vid-20140205-wa0011"));
for (URL url : contentURLs) {
try {
XvideosRipper ripper = new XvideosRipper(url);
ripper.rip();
assert(ripper.getWorkingDir().listFiles().length > 1);
deleteDir(ripper.getWorkingDir());
} catch (Exception e) {
e.printStackTrace();
fail("Error while ripping URL " + url + ": " + e.getMessage());
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public void testXvideosRipper() throws IOException {
if (false && !DOWNLOAD_CONTENT) {
return;
}
List<URL> contentURLs = new ArrayList<URL>();
contentURLs.add(new URL("http://www.xvideos.com/video1428195/stephanie_first_time_anal"));
contentURLs.add(new URL("http://www.xvideos.com/video7136868/vid-20140205-wa0011"));
for (URL url : contentURLs) {
try {
XvideosRipper ripper = new XvideosRipper(url);
ripper.rip();
assert(ripper.getWorkingDir().listFiles().length > 1);
deleteDir(ripper.getWorkingDir());
} catch (Exception e) {
e.printStackTrace();
fail("Error while ripping URL " + url + ": " + e.getMessage());
}
}
}
#location 12
#vulnerability type NULL_DEREFERENCE |
#fixed code
public void downloadCompleted(URL url, File saveAs) {
if (observer == null) {
return;
}
try {
String path = Utils.removeCWD(saveAs);
RipStatusMessage msg = new RipStatusMessage(STATUS.DOWNLOAD_COMPLETE, path);
itemsPending.remove(url);
itemsCompleted.put(url, saveAs);
observer.update(this, msg);
checkIfComplete();
} catch (Exception e) {
logger.error("Exception while updating observer: ", e);
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public void downloadCompleted(URL url, File saveAs) {
if (observer == null) {
return;
}
try {
String path = Utils.removeCWD(saveAs);
RipStatusMessage msg = new RipStatusMessage(STATUS.DOWNLOAD_COMPLETE, path);
synchronized(observer) {
itemsPending.remove(url);
itemsCompleted.put(url, saveAs);
observer.update(this, msg);
observer.notifyAll();
checkIfComplete();
}
} catch (Exception e) {
logger.error("Exception while updating observer: ", e);
}
}
#location 8
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
public void setFactory(KeyedPoolableObjectFactory factory) throws IllegalStateException {
Map toDestroy = new HashMap();
final KeyedPoolableObjectFactory oldFactory = _factory;
synchronized (this) {
assertOpen();
if (0 < getNumActive()) {
throw new IllegalStateException("Objects are already active");
} else {
for (Iterator it = _poolMap.keySet().iterator(); it.hasNext();) {
Object key = it.next();
ObjectQueue pool = (ObjectQueue)_poolMap.get(key);
if (pool != null) {
// Copy objects to new list so pool.queue can be cleared
// inside the sync
List objects = new ArrayList();
objects.addAll(pool.queue);
toDestroy.put(key, objects);
it.remove();
_poolList.remove(key);
_totalIdle = _totalIdle - pool.queue.size();
_totalInternalProcessing =
_totalInternalProcessing + pool.queue.size();
pool.queue.clear();
}
}
_factory = factory;
}
}
destroy(toDestroy, oldFactory);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public void setFactory(KeyedPoolableObjectFactory factory) throws IllegalStateException {
Map toDestroy = new HashMap();
synchronized (this) {
assertOpen();
if (0 < getNumActive()) {
throw new IllegalStateException("Objects are already active");
} else {
for (Iterator it = _poolMap.keySet().iterator(); it.hasNext();) {
Object key = it.next();
ObjectQueue pool = (ObjectQueue)_poolMap.get(key);
if (pool != null) {
// Copy objects to new list so pool.queue can be cleared
// inside the sync
List objects = new ArrayList();
objects.addAll(pool.queue);
toDestroy.put(key, objects);
it.remove();
_poolList.remove(key);
_totalIdle = _totalIdle - pool.queue.size();
_totalInternalProcessing =
_totalInternalProcessing + pool.queue.size();
pool.queue.clear();
}
}
_factory = factory;
}
}
destroy(toDestroy);
}
#location 28
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
public T borrowObject(K key, long borrowMaxWait) throws Exception {
assertOpen();
PooledObject<T> p = null;
// Get local copy of current config so it is consistent for entire
// method execution
boolean blockWhenExhausted = getBlockWhenExhausted();
boolean create;
long waitTime = 0;
ObjectDeque<T> objectDeque = register(key);
try {
while (p == null) {
create = false;
if (blockWhenExhausted) {
if (objectDeque != null) {
p = objectDeque.getIdleObjects().pollFirst();
}
if (p == null) {
create = true;
p = create(key);
}
if (p == null && objectDeque != null) {
if (borrowMaxWait < 0) {
p = objectDeque.getIdleObjects().takeFirst();
} else {
waitTime = System.currentTimeMillis();
p = objectDeque.getIdleObjects().pollFirst(
borrowMaxWait, TimeUnit.MILLISECONDS);
waitTime = System.currentTimeMillis() - waitTime;
}
}
if (p == null) {
throw new NoSuchElementException(
"Timeout waiting for idle object");
}
if (!p.allocate()) {
p = null;
}
} else {
if (objectDeque != null) {
p = objectDeque.getIdleObjects().pollFirst();
}
if (p == null) {
create = true;
p = create(key);
}
if (p == null) {
throw new NoSuchElementException("Pool exhausted");
}
if (!p.allocate()) {
p = null;
}
}
if (p != null) {
try {
factory.activateObject(key, p.getObject());
} catch (Exception e) {
try {
destroy(key, p, true);
} catch (Exception e1) {
// Ignore - activation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to activate object");
nsee.initCause(e);
throw nsee;
}
}
if (p != null && getTestOnBorrow()) {
boolean validate = false;
Throwable validationThrowable = null;
try {
validate = factory.validateObject(key, p.getObject());
} catch (Throwable t) {
PoolUtils.checkRethrow(t);
}
if (!validate) {
try {
destroy(key, p, true);
destroyedByBorrowValidationCount.incrementAndGet();
} catch (Exception e) {
// Ignore - validation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to validate object");
nsee.initCause(validationThrowable);
throw nsee;
}
}
}
}
}
} finally {
deregister(key);
}
borrowedCount.incrementAndGet();
synchronized (idleTimes) {
idleTimes.add(Long.valueOf(p.getIdleTimeMillis()));
idleTimes.poll();
}
synchronized (waitTimes) {
waitTimes.add(Long.valueOf(waitTime));
waitTimes.poll();
}
synchronized (maxBorrowWaitTimeMillisLock) {
if (waitTime > maxBorrowWaitTimeMillis) {
maxBorrowWaitTimeMillis = waitTime;
}
}
return p.getObject();
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public T borrowObject(K key, long borrowMaxWait) throws Exception {
assertOpen();
PooledObject<T> p = null;
// Get local copy of current config so it is consistent for entire
// method execution
boolean blockWhenExhausted = getBlockWhenExhausted();
boolean create;
long waitTime = 0;
ObjectDeque<T> objectDeque = register(key);
try {
while (p == null) {
create = false;
if (blockWhenExhausted) {
if (objectDeque != null) {
p = objectDeque.getIdleObjects().pollFirst();
}
if (p == null) {
create = true;
p = create(key);
}
if (p == null && objectDeque != null) {
if (borrowMaxWait < 0) {
p = objectDeque.getIdleObjects().takeFirst();
} else {
waitTime = System.currentTimeMillis();
p = objectDeque.getIdleObjects().pollFirst(
borrowMaxWait, TimeUnit.MILLISECONDS);
waitTime = System.currentTimeMillis() - waitTime;
}
}
if (p == null) {
throw new NoSuchElementException(
"Timeout waiting for idle object");
}
if (!p.allocate()) {
p = null;
}
} else {
if (objectDeque != null) {
p = objectDeque.getIdleObjects().pollFirst();
}
if (p == null) {
create = true;
p = create(key);
}
if (p == null) {
throw new NoSuchElementException("Pool exhausted");
}
if (!p.allocate()) {
p = null;
}
}
if (p != null) {
try {
_factory.activateObject(key, p.getObject());
} catch (Exception e) {
try {
destroy(key, p, true);
} catch (Exception e1) {
// Ignore - activation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to activate object");
nsee.initCause(e);
throw nsee;
}
}
if (p != null && getTestOnBorrow()) {
boolean validate = false;
Throwable validationThrowable = null;
try {
validate = _factory.validateObject(key, p.getObject());
} catch (Throwable t) {
PoolUtils.checkRethrow(t);
}
if (!validate) {
try {
destroy(key, p, true);
destroyedByBorrowValidationCount.incrementAndGet();
} catch (Exception e) {
// Ignore - validation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to validate object");
nsee.initCause(validationThrowable);
throw nsee;
}
}
}
}
}
} finally {
deregister(key);
}
borrowedCount.incrementAndGet();
synchronized (idleTimes) {
idleTimes.add(Long.valueOf(p.getIdleTimeMillis()));
idleTimes.poll();
}
synchronized (waitTimes) {
waitTimes.add(Long.valueOf(waitTime));
waitTimes.poll();
}
synchronized (maxBorrowWaitTimeMillisLock) {
if (waitTime > maxBorrowWaitTimeMillis) {
maxBorrowWaitTimeMillis = waitTime;
}
}
return p.getObject();
}
#location 24
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
@Override
public void printStackTrace(PrintWriter writer) {
Exception borrowedBy = this.borrowedBy;
if (borrowedBy != null) {
borrowedBy.printStackTrace(writer);
}
Exception usedBy = this.usedBy;
if (usedBy != null) {
usedBy.printStackTrace(writer);
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public void printStackTrace(PrintWriter writer) {
if (borrowedBy != null) {
borrowedBy.printStackTrace(writer);
}
if (usedBy != null) {
usedBy.printStackTrace(writer);
}
}
#location 4
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
@Test
public void testMissingSlotMillis() throws IOException {
final String JOB_HISTORY_FILE_NAME =
"src/test/resources/job_1329348432999_0003-1329348443227-user-Sleep+job-1329348468601-10-1-SUCCEEDED-default.jhist";
File jobHistoryfile = new File(JOB_HISTORY_FILE_NAME);
byte[] contents = Files.toByteArray(jobHistoryfile);
final String JOB_CONF_FILE_NAME =
"src/test/resources/job_1329348432655_0001_conf.xml";
Configuration jobConf = new Configuration();
jobConf.addResource(new Path(JOB_CONF_FILE_NAME));
JobHistoryFileParserHadoop2 historyFileParser =
new JobHistoryFileParserHadoop2(jobConf);
assertNotNull(historyFileParser);
JobKey jobKey = new JobKey("cluster1", "user", "Sleep", 1, "job_1329348432655_0001");
historyFileParser.parse(contents, jobKey);
// this history file has only map slot millis no reduce millis
Long mapMbMillis = historyFileParser.getMapMbMillis();
assertNotNull(mapMbMillis);
assertEquals(mapMbMillis, new Long(178169856L));
Long reduceMbMillis = historyFileParser.getReduceMbMillis();
assertNotNull(reduceMbMillis);
assertEquals(reduceMbMillis, Constants.NOTFOUND_VALUE);
Long mbMillis = historyFileParser.getMegaByteMillis();
assertNotNull(mbMillis);
Long expValue = 188559872L;
assertEquals(expValue, mbMillis);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void testMissingSlotMillis() throws IOException {
final String JOB_HISTORY_FILE_NAME =
"src/test/resources/job_1329348432999_0003-1329348443227-user-Sleep+job-1329348468601-10-1-SUCCEEDED-default.jhist";
File jobHistoryfile = new File(JOB_HISTORY_FILE_NAME);
byte[] contents = Files.toByteArray(jobHistoryfile);
final String JOB_CONF_FILE_NAME =
"src/test/resources/job_1329348432655_0001_conf.xml";
Configuration jobConf = new Configuration();
jobConf.addResource(new Path(JOB_CONF_FILE_NAME));
JobHistoryFileParser historyFileParser =
JobHistoryFileParserFactory.createJobHistoryFileParser(contents, jobConf);
assertNotNull(historyFileParser);
// confirm that we get back an object that can parse hadoop 2.0 files
assertTrue(historyFileParser instanceof JobHistoryFileParserHadoop2);
JobKey jobKey = new JobKey("cluster1", "user", "Sleep", 1, "job_1329348432655_0001");
historyFileParser.parse(contents, jobKey);
// this history file has only map slot millis no reduce millis
Long mbMillis = historyFileParser.getMegaByteMillis();
assertNotNull(mbMillis);
Long expValue = 10402816L;
assertEquals(expValue, mbMillis);
}
#location 23
#vulnerability type NULL_DEREFERENCE |
#fixed code
public int getQSize() {
int res = 0;
final Actor actors[] = this.actors;
for (int i = 0; i < actors.length; i++) {
Actor a = actors[i];
res+=a.__mailbox.size();
res+=a.__cbQueue.size();
}
return res;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public int getQSize() {
int res = 0;
for (int i = 0; i < queues.length; i++) {
Queue queue = queues[i];
res+=queue.size();
}
for (int i = 0; i < queues.length; i++) {
Queue queue = cbQueues[i];
res+=queue.size();
}
return res;
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
public boolean isEmpty() {
for (int i = 0; i < actors.length; i++) {
Actor act = actors[i];
if ( ! act.__mailbox.isEmpty() || ! act.__cbQueue.isEmpty() )
return false;
}
return true;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public boolean isEmpty() {
for (int i = 0; i < queues.length; i++) {
Queue queue = queues[i];
if ( ! queue.isEmpty() )
return false;
}
for (int i = 0; i < cbQueues.length; i++) {
Queue queue = cbQueues[i];
if ( ! queue.isEmpty() )
return false;
}
return true;
}
#location 8
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
public boolean pollQs() {
CallEntry poll = pollQueues(actors); // first callback actors
if (poll != null) {
try {
Actor.sender.set(poll.getTargetActor());
Object invoke = null;
profileCounter++;
if ( profileCounter > nextProfile && poll.getTarget() instanceof Actor ) {
profileCounter = 0;
invoke = profiledCall(poll);
} else {
invoke = poll.getMethod().invoke(poll.getTarget(), poll.getArgs());
}
if (poll.getFutureCB() != null) {
final Future futureCB = poll.getFutureCB(); // the future of caller side
final Promise invokeResult = (Promise) invoke; // the future returned sync from call
invokeResult.then(
new Callback() {
@Override
public void receiveResult(Object result, Object error) {
futureCB.receiveResult(result, error );
}
}
);
}
return true;
} catch ( Exception e) {
if ( e instanceof InvocationTargetException && ((InvocationTargetException) e).getTargetException() == ActorStoppedException.Instance ) {
Actor actor = (Actor) poll.getTarget();
actor.__stopped = true;
removeActorImmediate(actor);
return true;
}
if (poll.getFutureCB() != null)
poll.getFutureCB().receiveResult(null, e);
if (e.getCause() != null)
e.getCause().printStackTrace();
else
e.printStackTrace();
}
}
return false;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public boolean pollQs() {
CallEntry poll = pollQueues(cbQueues, queues); // first callback queues
if (poll != null) {
try {
Actor.sender.set(poll.getTargetActor());
Object invoke = null;
profileCounter++;
if ( profileCounter > nextProfile && queueList.size() > 1 && poll.getTarget() instanceof Actor ) {
profileCounter = 0;
invoke = profiledCall(poll);
} else {
invoke = poll.getMethod().invoke(poll.getTarget(), poll.getArgs());
}
if (poll.getFutureCB() != null) {
final Future futureCB = poll.getFutureCB(); // the future of caller side
final Promise invokeResult = (Promise) invoke; // the future returned sync from call
invokeResult.then(
new Callback() {
@Override
public void receiveResult(Object result, Object error) {
futureCB.receiveResult(result, error );
}
}
);
}
return true;
} catch ( Exception e) {
if ( e instanceof InvocationTargetException && ((InvocationTargetException) e).getTargetException() == ActorStoppedException.Instance ) {
Actor actor = (Actor) poll.getTarget();
actor.__stopped = true;
removeActor(actor);
applyQueueList();
return true;
}
if (poll.getFutureCB() != null)
poll.getFutureCB().receiveResult(null, e);
if (e.getCause() != null)
e.getCause().printStackTrace();
else
e.printStackTrace();
}
}
return false;
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
@Override
public boolean compareAndSwapValue(long expected, long value) {
BytesStore bytes = this.bytes;
return bytes != null && bytes.compareAndSwapLong(offset, expected, value);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public boolean compareAndSwapValue(long expected, long value) {
if (value == LONG_NOT_COMPLETE && binaryLongReferences != null)
binaryLongReferences.add(new WeakReference<>(this));
return bytes.compareAndSwapLong(offset, expected, value);
}
#location 5
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Test
public void shouldBeReadOnly() throws Exception {
final File tempFile = File.createTempFile("mapped", "bytes");
tempFile.deleteOnExit();
try (RandomAccessFile raf = new RandomAccessFile(tempFile, "rw")) {
raf.setLength(4096);
assertTrue(tempFile.setWritable(false));
try (MappedBytes mappedBytes = MappedBytes.readOnly(tempFile)) {
assertTrue(mappedBytes.isBackingFileReadOnly());
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void shouldBeReadOnly() throws Exception {
final File tempFile = File.createTempFile("mapped", "bytes");
final RandomAccessFile raf = new RandomAccessFile(tempFile, "rw");
raf.setLength(4096);
assertTrue(tempFile.setWritable(false));
final MappedBytes mappedBytes = MappedBytes.readOnly(tempFile);
assertTrue(mappedBytes.
isBackingFileReadOnly());
mappedBytes.release();
}
#location 5
#vulnerability type RESOURCE_LEAK |
#fixed code
protected void handleConnect(IoSession session, ConnectMessage msg) {
LOG.info("handleConnect invoked");
m_messaging.connect(session, msg);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
protected void handleConnect(IoSession session, ConnectMessage msg) {
LOG.info("handleConnect invoked");
if (msg.getProcotolVersion() != 0x03) {
ConnAckMessage badProto = new ConnAckMessage();
badProto.setReturnCode(ConnAckMessage.UNNACEPTABLE_PROTOCOL_VERSION);
session.write(badProto);
session.close(false);
return;
}
if (msg.getClientID() == null || msg.getClientID().length() > 23) {
ConnAckMessage okResp = new ConnAckMessage();
okResp.setReturnCode(ConnAckMessage.IDENTIFIER_REJECTED);
session.write(okResp);
return;
}
m_clientIDsLock.lock();
try {
//if an old client with the same ID already exists close its session.
if (m_clientIDs.containsKey(msg.getClientID())) {
//clean the subscriptions if the old used a cleanSession = true
IoSession oldSession = m_clientIDs.get(msg.getClientID()).getSession();
boolean cleanSession = (Boolean) oldSession.getAttribute(Constants.CLEAN_SESSION);
if (cleanSession) {
//cleanup topic subscriptions
m_messaging.removeSubscriptions(msg.getClientID());
}
m_clientIDs.get(msg.getClientID()).getSession().close(false);
}
ConnectionDescriptor connDescr = new ConnectionDescriptor(msg.getClientID(), session, msg.isCleanSession());
m_clientIDs.put(msg.getClientID(), connDescr);
} finally {
m_clientIDsLock.unlock();
}
int keepAlive = msg.getKeepAlive();
session.setAttribute("keepAlive", keepAlive);
session.setAttribute(Constants.CLEAN_SESSION, msg.isCleanSession());
//used to track the client in the subscription and publishing phases.
session.setAttribute(Constants.ATTR_CLIENTID, msg.getClientID());
session.getConfig().setIdleTime(IdleStatus.READER_IDLE, Math.round(keepAlive * 1.5f));
//Handle will flag
if (msg.isWillFlag()) {
QOSType willQos = QOSType.values()[msg.getWillQos()];
m_messaging.publish(msg.getWillTopic(), msg.getWillMessage().getBytes(),
willQos, msg.isWillRetain(), msg.getClientID(), session);
}
//handle user authentication
if (msg.isUserFlag()) {
String pwd = null;
if (msg.isPasswordFlag()) {
pwd = msg.getPassword();
}
if (!m_authenticator.checkValid(msg.getUsername(), pwd)) {
ConnAckMessage okResp = new ConnAckMessage();
okResp.setReturnCode(ConnAckMessage.BAD_USERNAME_OR_PASSWORD);
session.write(okResp);
return;
}
}
//handle clean session flag
if (msg.isCleanSession()) {
//remove all prev subscriptions
//cleanup topic subscriptions
m_messaging.removeSubscriptions(msg.getClientID());
} else {
//force the republish of stored QoS1 and QoS2
m_messaging.republishStored(msg.getClientID());
}
ConnAckMessage okResp = new ConnAckMessage();
okResp.setReturnCode(ConnAckMessage.CONNECTION_ACCEPTED);
session.write(okResp);
}
#location 75
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
public static GitRepositoryState getGitRepositoryState() throws IOException {
Properties properties = new Properties();
try {
InputStream inputStream = new FileInputStream("config/git.properties");
BufferedReader bf = new BufferedReader(new InputStreamReader(inputStream));
properties.load(bf);
} catch (IOException e) {
}
GitRepositoryState gitRepositoryState = new GitRepositoryState(properties);
return gitRepositoryState;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public static GitRepositoryState getGitRepositoryState() throws IOException {
Properties properties = new Properties();
try {
properties.load(new FileInputStream("config/git.properties"));
} catch (IOException e) {
}
GitRepositoryState gitRepositoryState = new GitRepositoryState(properties);
return gitRepositoryState;
}
#location 4
#vulnerability type RESOURCE_LEAK |
#fixed code
@Test
public void testNewAuthorization() throws AcmeException {
Authorization auth = new Authorization();
auth.setDomain("example.org");
Connection connection = new DummyConnection() {
@Override
public int sendSignedRequest(URI uri, ClaimBuilder claims, Session session, Account account) throws AcmeException {
assertThat(uri, is(resourceUri));
assertThat(claims.toString(), sameJSONAs(getJson("newAuthorizationRequest")));
assertThat(session, is(notNullValue()));
assertThat(account, is(sameInstance(testAccount)));
return HttpURLConnection.HTTP_CREATED;
}
@Override
public Map<String, Object> readJsonResponse() throws AcmeException {
return getJsonAsMap("newAuthorizationResponse");
}
@Override
public URI getLocation() throws AcmeException {
return locationUri;
}
};
HttpChallenge httpChallenge = new HttpChallenge();
DnsChallenge dnsChallenge = new DnsChallenge();
TestableAbstractAcmeClient client = new TestableAbstractAcmeClient(connection);
client.putTestResource(Resource.NEW_AUTHZ, resourceUri);
client.putTestChallenge("http-01", httpChallenge);
client.putTestChallenge("dns-01", dnsChallenge);
client.newAuthorization(testAccount, auth);
assertThat(auth.getDomain(), is("example.org"));
assertThat(auth.getStatus(), is("pending"));
assertThat(auth.getExpires(), is(nullValue()));
assertThat(auth.getLocation(), is(locationUri));
assertThat(auth.getChallenges(), containsInAnyOrder(
(Challenge) httpChallenge, (Challenge) dnsChallenge));
assertThat(auth.getCombinations(), hasSize(2));
assertThat(auth.getCombinations().get(0), containsInAnyOrder(
(Challenge) httpChallenge));
assertThat(auth.getCombinations().get(1), containsInAnyOrder(
(Challenge) httpChallenge, (Challenge) dnsChallenge));
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void testNewAuthorization() throws AcmeException {
Authorization auth = new Authorization();
auth.setDomain("example.org");
Connection connection = new DummyConnection() {
@Override
public int sendSignedRequest(URI uri, ClaimBuilder claims, Session session, Account account) throws AcmeException {
assertThat(uri, is(resourceUri));
assertThat(claims.toString(), sameJSONAs(getJson("newAuthorizationRequest")));
assertThat(session, is(notNullValue()));
assertThat(account, is(sameInstance(testAccount)));
return HttpURLConnection.HTTP_CREATED;
}
@Override
public Map<String, Object> readJsonResponse() throws AcmeException {
return getJsonAsMap("newAuthorizationResponse");
}
};
HttpChallenge httpChallenge = new HttpChallenge();
DnsChallenge dnsChallenge = new DnsChallenge();
TestableAbstractAcmeClient client = new TestableAbstractAcmeClient(connection);
client.putTestResource(Resource.NEW_AUTHZ, resourceUri);
client.putTestChallenge("http-01", httpChallenge);
client.putTestChallenge("dns-01", dnsChallenge);
client.newAuthorization(testAccount, auth);
assertThat(auth.getDomain(), is("example.org"));
assertThat(auth.getStatus(), is("pending"));
assertThat(auth.getExpires(), is(nullValue()));
assertThat(auth.getChallenges(), containsInAnyOrder(
(Challenge) httpChallenge, (Challenge) dnsChallenge));
assertThat(auth.getCombinations(), hasSize(2));
assertThat(auth.getCombinations().get(0), containsInAnyOrder(
(Challenge) httpChallenge));
assertThat(auth.getCombinations().get(1), containsInAnyOrder(
(Challenge) httpChallenge, (Challenge) dnsChallenge));
}
#location 30
#vulnerability type RESOURCE_LEAK |
#fixed code
@Test
public void testUpdateRetryAfter() throws Exception {
final Instant retryAfter = Instant.now().plus(Duration.ofSeconds(30));
TestableConnectionProvider provider = new TestableConnectionProvider() {
@Override
public void sendRequest(URL url, Session session) {
assertThat(url, is(locationUrl));
}
@Override
public JSON readJsonResponse() {
return getJSON("updateAuthorizationResponse");
}
@Override
public void handleRetryAfter(String message) throws AcmeException {
throw new AcmeRetryAfterException(message, retryAfter);
}
};
Login login = provider.createLogin();
provider.putTestChallenge("http-01", Http01Challenge::new);
provider.putTestChallenge("dns-01", Dns01Challenge::new);
Authorization auth = new Authorization(login, locationUrl);
try {
auth.update();
fail("Expected AcmeRetryAfterException");
} catch (AcmeRetryAfterException ex) {
assertThat(ex.getRetryAfter(), is(retryAfter));
}
assertThat(auth.getDomain(), is("example.org"));
assertThat(auth.getStatus(), is(Status.VALID));
assertThat(auth.isWildcard(), is(false));
assertThat(auth.getExpires(), is(parseTimestamp("2016-01-02T17:12:40Z")));
assertThat(auth.getLocation(), is(locationUrl));
assertThat(auth.getChallenges(), containsInAnyOrder(
provider.getChallenge(Http01Challenge.TYPE),
provider.getChallenge(Dns01Challenge.TYPE)));
provider.close();
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void testUpdateRetryAfter() throws Exception {
final Instant retryAfter = Instant.now().plus(Duration.ofSeconds(30));
TestableConnectionProvider provider = new TestableConnectionProvider() {
@Override
public void sendRequest(URL url, Session session) {
assertThat(url, is(locationUrl));
}
@Override
public JSON readJsonResponse() {
return getJSON("updateAuthorizationResponse");
}
@Override
public void handleRetryAfter(String message) throws AcmeException {
throw new AcmeRetryAfterException(message, retryAfter);
}
};
Login login = provider.createLogin();
provider.putTestChallenge("http-01", Http01Challenge::new);
provider.putTestChallenge("dns-01", Dns01Challenge::new);
Authorization auth = new Authorization(login, locationUrl);
try {
auth.update();
fail("Expected AcmeRetryAfterException");
} catch (AcmeRetryAfterException ex) {
assertThat(ex.getRetryAfter(), is(retryAfter));
}
assertThat(auth.getDomain(), is("example.org"));
assertThat(auth.getStatus(), is(Status.VALID));
assertThat(auth.getExpires(), is(parseTimestamp("2016-01-02T17:12:40Z")));
assertThat(auth.getLocation(), is(locationUrl));
assertThat(auth.getChallenges(), containsInAnyOrder(
provider.getChallenge(Http01Challenge.TYPE),
provider.getChallenge(Dns01Challenge.TYPE)));
provider.close();
}
#location 9
#vulnerability type RESOURCE_LEAK |
#fixed code
@Test
public void testGet() {
assertEquals(100, genericTrie.get("杨尚川").intValue());
assertEquals(99, genericTrie.get("杨尚喜").intValue());
assertEquals(98, genericTrie.get("杨尚丽").intValue());
assertEquals(1, genericTrie.get("中华人民共和国").intValue());
assertEquals(null, genericTrie.get("杨"));
assertEquals(null, genericTrie.get("杨尚"));
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void testGet() {
Assert.assertEquals(100, trie.get("杨尚川"), 0);
Assert.assertEquals(99, trie.get("杨尚喜"), 0);
Assert.assertEquals(98, trie.get("杨尚丽"), 0);
Assert.assertEquals(1, trie.get("中华人民共和国"), 0);
Assert.assertEquals(null, trie.get("杨"));
Assert.assertEquals(null, trie.get("杨尚"));
}
#location 3
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Override
public final boolean incrementToken() throws IOException {
String token = getToken();
if (token != null) {
charTermAttribute.setEmpty().append(token);
return true;
}
return false;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public final boolean incrementToken() throws IOException {
Word word = getWord();
if (word != null) {
int positionIncrement = 1;
//忽略停用词
while(StopWord.is(word.getText())){
positionIncrement++;
startOffset += word.getText().length();
LOGGER.debug("忽略停用词:"+word.getText());
word = getWord();
if(word == null){
return false;
}
}
charTermAttribute.setEmpty().append(word.getText());
offsetAttribute.setOffset(startOffset, startOffset+word.getText().length());
positionIncrementAttribute.setPositionIncrement(positionIncrement);
startOffset += word.getText().length();
//词性标注
if(POS){
PartOfSpeechTagging.process(Arrays.asList(word));
partOfSpeechAttribute.setEmpty().append(word.getPartOfSpeech().getPos());
}
//拼音标注
if(PINYIN){
PinyinTagging.process(Arrays.asList(word));
acronymPinyinAttribute.setEmpty().append(word.getAcronymPinYin());
fullPinyinAttribute.setEmpty().append(word.getFullPinYin());
}
//同义标注
if(SYNONYM){
SynonymTagging.process(Arrays.asList(word));
StringBuilder synonym = new StringBuilder();
word.getSynonym().forEach(w -> synonym.append(w.getText()).append(" "));
synonymAttribute.setEmpty().append(synonym.toString().trim());
}
//反义标注
if(ANTONYM){
AntonymTagging.process(Arrays.asList(word));
StringBuilder antonym = new StringBuilder();
word.getAntonym().forEach(w -> antonym.append(w.getText()).append(" "));
antonymAttribute.setEmpty().append(antonym.toString().trim());
}
return true;
}
return false;
}
#location 17
#vulnerability type NULL_DEREFERENCE |
#fixed code
public void clear() {
if (staticResourceHandlers != null) {
staticResourceHandlers.clear();
staticResourceHandlers = null;
}
staticResourcesSet = false;
externalStaticResourcesSet = false;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public void clear() {
if (staticResourceHandlers != null) {
staticResourceHandlers.clear();
staticResourceHandlers = null;
}
if (jarResourceHandlers != null) {
jarResourceHandlers.clear();
jarResourceHandlers = null;
}
staticResourcesSet = false;
externalStaticResourcesSet = false;
}
#location 9
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
@Override
public void messageReceived(ChannelHandlerContext ctx, MessageEvent me){
if(!(me.getMessage() instanceof CoapMessage)){
ctx.sendUpstream(me);
return;
}
CoapMessage coapMessage = (CoapMessage) me.getMessage();
if(coapMessage.getCode().isError() || coapMessage.getMessageType().equals(MsgType.RST)){
errorMessageReceived(ctx, me);
return;
}
if(me.getMessage() instanceof CoapResponse){
CoapResponse response = (CoapResponse) me.getMessage();
final byte[] token = response.getToken();
BlockwiseTransfer transfer;
//Add latest received payload to already received payload
synchronized (incompleteResponseMonitor){
transfer = incompleteResponsePayload.get(new ByteArrayWrapper(token));
if(transfer != null){
try {
if(response.getBlockNumber(BLOCK_2) == transfer.getNextBlockNumber()){
log.debug("Received response (Token: " + (new ByteArrayWrapper(token).toHexString()) +
" , Block: " + response.getBlockNumber(BLOCK_2) + "), ");
if (log.isDebugEnabled()){
//Copy Payload
ChannelBuffer payloadCopy = ChannelBuffers.copiedBuffer(response.getPayload());
byte[] bytes = new byte[payloadCopy.readableBytes()];
payloadCopy.getBytes(0, bytes);
log.debug("Payload Hex: " + new ByteArrayWrapper(bytes).toHexString());
}
transfer.getPartialPayload()
.writeBytes(response.getPayload(), 0, response.getPayload().readableBytes());
transfer.setNextBlockNumber(transfer.getNextBlockNumber() + 1);
}
else{
log.debug("Received unexpected response (Token: " + (new ByteArrayWrapper(token).toHexString()) +
" , Block: " + response.getBlockNumber(BLOCK_2) + "). IGNORE!");
me.getFuture().setSuccess();
return;
}
}
catch (InvalidOptionException e) {
log.error("This should never happen!", e);
}
}
}
//Check whether payload of the response is complete
if(transfer != null){
try {
if(response.isLastBlock(BLOCK_2)){
//Send response with complete payload to application
log.debug("Block " + response.getBlockNumber(BLOCK_2) + " for response with token " +
new ByteArrayWrapper(token).toHexString() +
" received. Payload complete. Forward to client application.");
response.getOptionList().removeAllOptions(BLOCK_2);
response.setPayload(transfer.getPartialPayload());
MessageEvent event = new UpstreamMessageEvent(me.getChannel(), response, me.getRemoteAddress());
ctx.sendUpstream(event);
synchronized (incompleteResponseMonitor){
if(incompleteResponsePayload.remove(new ByteArrayWrapper(token)) == null){
log.error("This should never happen! No incomplete payload found for token " +
new ByteArrayWrapper(token).toHexString());
}
else{
log.debug("Deleted not anymore incomplete payload for token " +
new ByteArrayWrapper(token).toHexString() + " from list");
}
}
return;
}
else{
final long receivedBlockNumber = response.getBlockNumber(BLOCK_2);
log.debug("Block " + receivedBlockNumber + " for response with token " +
new ByteArrayWrapper(token).toHexString() +
" received. Payload (still) incomplete.");
CoapRequest nextCoapRequest = (CoapRequest) transfer.getCoapMessage();
nextCoapRequest.setMessageID(-1);
nextCoapRequest.setBlockOption(BLOCK_2, receivedBlockNumber + 1,
false, response.getMaxBlocksizeForResponse());
ChannelFuture future = Channels.future(me.getChannel());
future.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
log.debug("Request for block " + (receivedBlockNumber + 1) + " for token " +
new ByteArrayWrapper(token).toHexString() + " sent succesfully.");
}
});
MessageEvent event = new DownstreamMessageEvent(me.getChannel(),
future, nextCoapRequest, me.getRemoteAddress());
log.debug("Send request for block " + (receivedBlockNumber + 1) + " for token " +
new ByteArrayWrapper(token).toHexString() + ".");
ctx.sendDownstream(event);
return;
}
}
catch (InvalidOptionException e) {
log.error("This should never happen!", e);
}
catch (MessageDoesNotAllowPayloadException e) {
log.error("This should never happen!", e);
}
catch (ToManyOptionsException e){
log.error("This should never happen!", e);
}
catch (InvalidHeaderException e) {
log.error("This should never happen!", e);
}
}
}
ctx.sendUpstream(me);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public void messageReceived(ChannelHandlerContext ctx, MessageEvent me){
if(!(me.getMessage() instanceof CoapMessage)){
ctx.sendUpstream(me);
return;
}
if(me.getMessage() instanceof CoapResponse){
CoapResponse response = (CoapResponse) me.getMessage();
if(response.getMaxBlocksizeForResponse() != null){
final byte[] token = response.getToken();
//Add latest received payload to already received payload
BlockwiseTransfer transfer;
synchronized (incompleteResponseMonitor){
transfer = incompleteResponsePayload.get(new ByteArrayWrapper(token));
try {
if(response.getBlockNumber(BLOCK_2) == transfer.getNextBlockNumber()){
log.debug("Received response (Token: " + (new ByteArrayWrapper(token).toHexString()) +
" , Block: " + response.getBlockNumber(BLOCK_2) + "), ");
//Copy Payload
ChannelBuffer payloadCopy = ChannelBuffers.copiedBuffer(response.getPayload());
byte[] bytes = new byte[payloadCopy.readableBytes()];
payloadCopy.getBytes(0, bytes);
log.debug("Payload Hex: " + new ByteArrayWrapper(bytes).toHexString());
//**********************************************
transfer.getPartialPayload()
.writeBytes(response.getPayload(), 0, response.getPayload().readableBytes());
transfer.setNextBlockNumber(transfer.getNextBlockNumber() + 1);
}
else{
log.debug("Received duplicate response (Token: " + (new ByteArrayWrapper(token).toHexString()) +
" , Block: " + response.getBlockNumber(BLOCK_2) + "). IGNORE!");
me.getFuture().setSuccess();
return;
}
}
catch (InvalidOptionException e) {
log.error("This should never happen!", e);
}
}
//Check whether payload of the response is complete
try {
if(response.isLastBlock(BLOCK_2)){
//Send response with complete payload to application
log.debug("Block " + response.getBlockNumber(BLOCK_2) + " for response with token " +
new ByteArrayWrapper(token).toHexString() +
" received. Payload complete. Forward to client application.");
response.getOptionList().removeAllOptions(BLOCK_2);
response.setPayload(transfer.getPartialPayload());
MessageEvent event = new UpstreamMessageEvent(me.getChannel(), response, me.getRemoteAddress());
ctx.sendUpstream(event);
synchronized (incompleteResponseMonitor){
if(incompleteResponsePayload.remove(new ByteArrayWrapper(token)) == null){
log.error("This should never happen! No incomplete payload found for token " +
new ByteArrayWrapper(token).toHexString());
}
else{
log.debug("Deleted not anymore incomplete payload for token " +
new ByteArrayWrapper(token).toHexString() + " from list");
}
}
//End the original ChannelFuture
me.getFuture().isSuccess();
return;
}
else{
final long receivedBlockNumber = response.getBlockNumber(BLOCK_2);
log.debug("Block " + receivedBlockNumber + " for response with token " +
new ByteArrayWrapper(token).toHexString() +
" received. Payload (still) incomplete.");
CoapRequest nextCoapRequest = (CoapRequest) transfer.getCoapMessage();
nextCoapRequest.setMessageID(-1);
nextCoapRequest.setBlockOption(BLOCK_2, receivedBlockNumber + 1,
false, response.getMaxBlocksizeForResponse());
ChannelFuture future = Channels.future(me.getChannel());
future.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
log.debug("Request for block " + (receivedBlockNumber + 1) + " for token " +
new ByteArrayWrapper(token).toHexString() + " sent succesfully.");
}
});
MessageEvent event = new DownstreamMessageEvent(me.getChannel(),
future, nextCoapRequest, me.getRemoteAddress());
log.debug("Send request for block " + (receivedBlockNumber + 1) + " for token " +
new ByteArrayWrapper(token).toHexString() + ".");
ctx.sendDownstream(event);
return;
}
}
catch (InvalidOptionException e) {
log.error("This should never happen!", e);
}
catch (MessageDoesNotAllowPayloadException e) {
log.error("This should never happen!", e);
}
catch (ToManyOptionsException e) {
log.error("This should never happen!", e);
} catch (InvalidHeaderException e) {
log.error("This should never happen!", e);
}
}
ctx.sendUpstream(me);
}
}
#location 20
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Override
public void receiveEmptyACK(){
if(!emptyAckNotificationTimes.add(System.currentTimeMillis())){
log.error("Could not add notification time for empty ACK.");
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public void receiveEmptyACK(){
if(receiveEnabled && !emptyAckNotificationTimes.add(System.currentTimeMillis())){
log.error("Could not add notification time for empty ACK.");
}
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
public void stopRecording() throws IOException {
Integer processId = androidScreenRecordProcess.get(Thread.currentThread().getId());
stopRunningProcess(processId);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public void stopRecording() throws IOException {
Integer processId = androidScreenRecordProcess.get(Thread.currentThread().getId());
if (processId != -1) {
String process = "pgrep -P " + processId;
System.out.println(process);
Process p2 = Runtime.getRuntime().exec(process);
BufferedReader r = new BufferedReader(new InputStreamReader(p2.getInputStream()));
String command = "kill " + processId;
System.out.println("Stopping Video Recording");
System.out.println("******************" + command);
try {
runCommandThruProcess(command);
Thread.sleep(10000);
System.out.println("Killed video recording with exit code :" + command);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
#location 7
#vulnerability type RESOURCE_LEAK |
#fixed code
public String getAppiumServerPath(String host) throws Exception {
return appiumServerProp(host, "appiumServerPath");
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public String getAppiumServerPath(String host) throws Exception {
JSONArray hostMachineObject = CapabilityManager.getInstance().getHostMachineObject();
List<Object> objects = hostMachineObject.toList();
Object o = objects.stream().filter(object -> ((HashMap) object).get("machineIP")
.toString().equalsIgnoreCase(host)
&& ((HashMap) object).get("appiumServerPath") != null)
.findFirst().orElse(null);
if (o != null) {
return ((HashMap) o).get("appiumServerPath").toString();
}
return null;
}
#location 9
#vulnerability type NULL_DEREFERENCE |
#fixed code
public AppiumServiceBuilder getAppiumServiceBuilder(String methodName) throws Exception {
String webKitPort = iosDevice.startIOSWebKit(device_udid);
return appiumMan.appiumServerForIOS(device_udid, methodName, webKitPort);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public AppiumServiceBuilder getAppiumServiceBuilder(String methodName) throws Exception {
if (iosDevice.checkiOSDevice(device_udid)) {
String webKitPort = iosDevice.startIOSWebKit(device_udid);
return appiumMan.appiumServerForIOS(device_udid, methodName, webKitPort);
} else if (!iosDevice.checkiOSDevice(device_udid)) {
return appiumMan.appiumServerForAndroid(device_udid, methodName);
}
return null;
}
#location 6
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
@Override
public void startAppiumServer(String host) throws Exception {
System.out.println(
"**************************************************************************\n");
System.out.println("Starting Appium Server on host " + host);
System.out.println(
"**************************************************************************\n");
String serverPath = CapabilityManager.getInstance().getAppiumServerPath(host);
String serverPort = CapabilityManager.getInstance().getAppiumServerPort(host);
if (serverPath == null
&& serverPort == null) {
System.out.println("Picking Default Path for AppiumServiceBuilder");
new Api().getResponse("http://" + host + ":4567"
+ "/appium/start").body().string();
} else if (serverPath != null && serverPort != null ) {
System.out.println("Picking UserSpecified Path & Port for AppiumServiceBuilder");
new Api().getResponse("http://" + host + ":4567"
+ "/appium/start?URL=" + serverPath
+ "&PORT=" + serverPort).body().string();
} else if (serverPath != null) {
System.out.println("Picking UserSpecified Path & Using default Port for AppiumServiceBuilder");
new Api().getResponse("http://" + host + ":4567"
+ "/appium/start?URL=" + serverPath).body().string();
} else if (serverPort != null) {
System.out.println("Picking Default Path & User Port for AppiumServiceBuilder");
new Api().getResponse("http://" + host + ":4567"
+ "/appium/start?PORT=" + serverPort).body().string();
}
boolean status = Boolean.getBoolean(new JSONObject(new Api().getResponse("http://" + host + ":4567"
+ "/appium/isRunning").body().string()).get("status").toString());
if (status) {
System.out.println(
"***************************************************************\n");
System.out.println("Appium Server started successfully on " + host);
System.out.println(
"****************************************************************\n");
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public void startAppiumServer(String host) throws Exception {
System.out.println(
"**************************************************************************\n");
System.out.println("Starting Appium Server on host " + host);
System.out.println(
"**************************************************************************\n");
if (CapabilityManager.getInstance().getAppiumServerPath(host) == null) {
System.out.println("Picking Default Path for AppiumServiceBuilder");
new Api().getResponse("http://" + host + ":4567"
+ "/appium/start").body().string();
} else {
System.out.println("Picking UserSpecified Path for AppiumServiceBuilder");
String appiumServerPath = CapabilityManager.getInstance().getAppiumServerPath(host);
new Api().getResponse("http://" + host + ":4567"
+ "/appium/start?URL=" + appiumServerPath).body().string();
}
boolean status = Boolean.getBoolean(new JSONObject(new Api().getResponse("http://" + host + ":4567"
+ "/appium/isRunning").body().string()).get("status").toString());
if (status) {
System.out.println(
"***************************************************************\n");
System.out.println("Appium Server started successfully on " + host);
System.out.println(
"****************************************************************\n");
}
}
#location 11
#vulnerability type NULL_DEREFERENCE |
#fixed code
public XmlSuite constructXmlSuiteForDistribution(String pack, List<String> tests,
Map<String, List<Method>> methods, int deviceCount) {
include(listeners, "LISTENERS");
include(groupsInclude, "INCLUDE_GROUPS");
XmlSuite suite = new XmlSuite();
suite.setName("TestNG Forum");
suite.setThreadCount(deviceCount);
suite.setParallel(ParallelMode.CLASSES);
suite.setVerbose(2);
listeners.add("com.appium.manager.AppiumParallelTest");
listeners.add("com.appium.utils.RetryListener");
suite.setListeners(listeners);
if (prop.getProperty("LISTENERS") != null) {
suite.setListeners(listeners);
}
XmlTest test = new XmlTest(suite);
test.setName("TestNG Test");
test.addParameter("device", "");
include(groupsExclude, "EXCLUDE_GROUPS");
test.setIncludedGroups(groupsInclude);
test.setExcludedGroups(groupsExclude);
List<XmlClass> xmlClasses = new ArrayList<>();
writeXmlClass(tests, methods, xmlClasses);
test.setXmlClasses(xmlClasses);
return suite;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public XmlSuite constructXmlSuiteForDistribution(String pack, List<String> tests,
Map<String, List<Method>> methods, int deviceCount) {
try {
prop.load(new FileInputStream("config.properties"));
} catch (IOException e) {
e.printStackTrace();
}
include(listeners, "LISTENERS");
include(groupsInclude, "INCLUDE_GROUPS");
XmlSuite suite = new XmlSuite();
suite.setName("TestNG Forum");
suite.setThreadCount(deviceCount);
suite.setParallel(ParallelMode.CLASSES);
suite.setVerbose(2);
listeners.add("com.appium.manager.AppiumParallelTest");
listeners.add("com.appium.utils.RetryListener");
suite.setListeners(listeners);
if (prop.getProperty("LISTENERS") != null) {
suite.setListeners(listeners);
}
XmlTest test = new XmlTest(suite);
test.setName("TestNG Test");
test.addParameter("device", "");
include(groupsExclude, "EXCLUDE_GROUPS");
test.setIncludedGroups(groupsInclude);
test.setExcludedGroups(groupsExclude);
List<XmlClass> xmlClasses = new ArrayList<>();
writeXmlClass(tests, methods, xmlClasses);
test.setXmlClasses(xmlClasses);
return suite;
}
#location 4
#vulnerability type RESOURCE_LEAK |
#fixed code
public void captureScreenShot(String screenShotName) throws IOException, InterruptedException {
File framePath = new File(System.getProperty("user.dir")+"/src/main/resources/");
File scrFile = ((TakesScreenshot) driver).getScreenshotAs(OutputType.FILE);
if(driver.toString().split(":")[0].trim().equals("AndroidDriver")){
String androidModel = androidDevice.deviceModel(device_udid);
try {
FileUtils.copyFile(scrFile, new File(System.getProperty("user.dir") + "/target/screenshot/android/" + device_udid.replaceAll("\\W", "_") + "/"
+ androidModel + "/" + screenShotName + ".png"));
File [] files1 = framePath.listFiles();
for (int i = 0; i < files1.length; i++){
if (files1[i].isFile()){ //this line weeds out other directories/folders
System.out.println(files1[i]);
Path p = Paths.get(files1[i].toString());
String fileName=p.getFileName().toString().toLowerCase();
if(androidModel.toString().toLowerCase().contains(fileName.split(".png")[0].toLowerCase())){
try {
imageUtils.wrapDeviceFrames(files1[i].toString(),System.getProperty("user.dir") + "/target/screenshot/android/" + device_udid.replaceAll("\\W", "_") + "/"
+ androidModel + "/" + screenShotName + ".png",System.getProperty("user.dir") + "/target/screenshot/android/" + device_udid.replaceAll("\\W", "_") + "/"
+ androidModel + "/" + screenShotName + "_framed.png");
ExtentTestManager.logOutPut(System.getProperty("user.dir") + "/target/screenshot/android/" + device_udid.replaceAll("\\W", "_") + "/"
+ androidModel + "/" + screenShotName + "_framed.png",screenShotName.toUpperCase());
break;
} catch (InterruptedException e) {
e.printStackTrace();
} catch (IM4JavaException e) {
e.printStackTrace();
}
}
}
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}else if(driver.toString().split(":")[0].trim().equals("IOSDriver"))
{
String iosModel=iosDevice.getIOSDeviceProductTypeAndVersion(device_udid);
try {
FileUtils.copyFile(scrFile, new File(System.getProperty("user.dir") + "/target/screenshot/iPhone/" + device_udid.replaceAll("\\W", "_") + "/"
+ iosModel + "/" + screenShotName + ".png"));
File [] files1 = framePath.listFiles();
for (int i = 0; i < files1.length; i++){
if (files1[i].isFile()){ //this line weeds out other directories/folders
System.out.println(files1[i]);
Path p = Paths.get(files1[i].toString());
String fileName=p.getFileName().toString().toLowerCase();
if(iosModel.toString().toLowerCase().contains(fileName.split(".png")[0].toLowerCase())){
try {
imageUtils.wrapDeviceFrames(files1[i].toString(),System.getProperty("user.dir") + "/target/screenshot/iPhone/" + device_udid.replaceAll("\\W", "_") + "/"
+ iosModel + "/" + screenShotName + ".png",System.getProperty("user.dir") + "/target/screenshot/iPhone/" + device_udid.replaceAll("\\W", "_") + "/"
+ iosModel + "/" + screenShotName + "_framed.png");
ExtentTestManager.logOutPut(System.getProperty("user.dir") + "/target/screenshot/iPhone/" + device_udid.replaceAll("\\W", "_") + "/"
+ iosModel + "/" + screenShotName + "_framed.png",screenShotName.toUpperCase());
break;
} catch (InterruptedException e) {
e.printStackTrace();
} catch (IM4JavaException e) {
e.printStackTrace();
}
}
}
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public void captureScreenShot(String screenShotName) throws IOException, InterruptedException {
File framePath = new File(System.getProperty("user.dir")+"/src/main/resources/");
File scrFile = ((TakesScreenshot) driver).getScreenshotAs(OutputType.FILE);
if(driver.toString().split(":")[0].trim().equals("AndroidDriver")){
String androidModel = androidDevice.deviceModel(device_udid);
try {
FileUtils.copyFile(scrFile, new File(System.getProperty("user.dir") + "/target/screenshot/android/" + device_udid.replaceAll("\\W", "_") + "/"
+ androidModel + "/" + screenShotName + ".png"));
File [] files1 = framePath.listFiles();
for (int i = 0; i < files1.length; i++){
if (files1[i].isFile()){ //this line weeds out other directories/folders
System.out.println(files1[i]);
Path p = Paths.get(files1[i].toString());
String fileName=p.getFileName().toString().toLowerCase();
if(androidModel.toString().toLowerCase().contains(fileName.split(".png")[0].toLowerCase())){
try {
imageUtils.wrapDeviceFrames(files1[i].toString(),System.getProperty("user.dir") + "/target/screenshot/android/" + device_udid.replaceAll("\\W", "_") + "/"
+ androidModel + "/" + screenShotName + ".png",System.getProperty("user.dir") + "/target/screenshot/android/" + device_udid.replaceAll("\\W", "_") + "/"
+ androidModel + "/" + screenShotName + "_framed.png");
ExtentTestManager.logOutPut(System.getProperty("user.dir") + "/target/screenshot/android/" + device_udid.replaceAll("\\W", "_") + "/"
+ androidModel + "/" + screenShotName + "_framed.png",screenShotName.toUpperCase());
break;
} catch (InterruptedException e) {
e.printStackTrace();
} catch (IM4JavaException e) {
e.printStackTrace();
}
}
}
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}else if(driver.toString().split(":")[0].trim().equals("IOSDriver"))
{
String iosModel=iosDevice.getDeviceName(device_udid);
try {
FileUtils.copyFile(scrFile, new File(System.getProperty("user.dir") + "/target/screenshot/iPhone/" + device_udid.replaceAll("\\W", "_") + "/"
+ iosModel + "/" + screenShotName + ".png"));
File [] files1 = framePath.listFiles();
for (int i = 0; i < files1.length; i++){
if (files1[i].isFile()){ //this line weeds out other directories/folders
System.out.println(files1[i]);
Path p = Paths.get(files1[i].toString());
String fileName=p.getFileName().toString().toLowerCase();
if(iosModel.toString().toLowerCase().contains(fileName.split(".png")[0].toLowerCase())){
try {
imageUtils.wrapDeviceFrames(files1[i].toString(),System.getProperty("user.dir") + "/target/screenshot/iPhone/" + device_udid.replaceAll("\\W", "_") + "/"
+ iosModel + "/" + screenShotName + ".png",System.getProperty("user.dir") + "/target/screenshot/iPhone/" + device_udid.replaceAll("\\W", "_") + "/"
+ iosModel + "/" + screenShotName + "_framed.png");
ExtentTestManager.logOutPut(System.getProperty("user.dir") + "/target/screenshot/iPhone/" + device_udid.replaceAll("\\W", "_") + "/"
+ iosModel + "/" + screenShotName + "_framed.png",screenShotName.toUpperCase());
break;
} catch (InterruptedException e) {
e.printStackTrace();
} catch (IM4JavaException e) {
e.printStackTrace();
}
}
}
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
#location 10
#vulnerability type NULL_DEREFERENCE |
#fixed code
public Map<String, String> getDevices() throws Exception {
startADB(); // start adb service
String output = cmd.runCommand("adb devices");
String[] lines = output.split("\n");
if (lines.length <= 1) {
System.out.println("No Android Device Connected");
stopADB();
return null;
} else {
for (int i = 1; i < lines.length; i++) {
lines[i] = lines[i].replaceAll("\\s+", "");
if (lines[i].contains("device")) {
lines[i] = lines[i].replaceAll("device", "");
String deviceID = lines[i];
if (validDeviceIds == null
|| (validDeviceIds != null && validDeviceIds.contains(deviceID))) {
String model =
cmd.runCommand("adb -s " + deviceID
+ " shell getprop ro.product.model")
.replaceAll("\\s+", "");
String brand =
cmd.runCommand("adb -s " + deviceID
+ " shell getprop ro.product.brand")
.replaceAll("\\s+", "");
String osVersion = cmd.runCommand(
"adb -s " + deviceID + " shell getprop ro.build.version.release")
.replaceAll("\\s+", "");
String deviceName = brand + " " + model;
String apiLevel =
cmd.runCommand("adb -s " + deviceID
+ " shell getprop ro.build.version.sdk")
.replaceAll("\n", "");
devices.put("deviceID" + i, deviceID);
devices.put("deviceName" + i, deviceName);
devices.put("osVersion" + i, osVersion);
devices.put(deviceID, apiLevel);
deviceSerial.add(deviceID);
}
} else if (lines[i].contains("unauthorized")) {
lines[i] = lines[i].replaceAll("unauthorized", "");
String deviceID = lines[i];
} else if (lines[i].contains("offline")) {
lines[i] = lines[i].replaceAll("offline", "");
String deviceID = lines[i];
}
}
return devices;
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public Map<String, String> getDevices() throws Exception {
startADB(); // start adb service
String output = cmd.runCommand("adb devices");
String[] lines = output.split("\n");
if (lines.length <= 1) {
System.out.println("No Device Connected");
stopADB();
return null;
} else {
for (int i = 1; i < lines.length; i++) {
lines[i] = lines[i].replaceAll("\\s+", "");
if (lines[i].contains("device")) {
lines[i] = lines[i].replaceAll("device", "");
String deviceID = lines[i];
String model =
cmd.runCommand("adb -s " + deviceID + " shell getprop ro.product.model")
.replaceAll("\\s+", "");
String brand =
cmd.runCommand("adb -s " + deviceID + " shell getprop ro.product.brand")
.replaceAll("\\s+", "");
String osVersion = cmd.runCommand(
"adb -s " + deviceID + " shell getprop ro.build.version.release")
.replaceAll("\\s+", "");
String deviceName = brand + " " + model;
String apiLevel =
cmd.runCommand("adb -s " + deviceID + " shell getprop ro.build.version.sdk")
.replaceAll("\n", "");
devices.put("deviceID" + i, deviceID);
devices.put("deviceName" + i, deviceName);
devices.put("osVersion" + i, osVersion);
devices.put(deviceID, apiLevel);
} else if (lines[i].contains("unauthorized")) {
lines[i] = lines[i].replaceAll("unauthorized", "");
String deviceID = lines[i];
} else if (lines[i].contains("offline")) {
lines[i] = lines[i].replaceAll("offline", "");
String deviceID = lines[i];
}
}
return devices;
}
}
#location 24
#vulnerability type RESOURCE_LEAK |
#fixed code
public Map<String, String> getDevices() throws Exception {
startADB(); // start adb service
String output = cmd.runCommand("adb devices");
String[] lines = output.split("\n");
if (lines.length <= 1) {
System.out.println("No Android Device Connected");
stopADB();
return null;
} else {
for (int i = 1; i < lines.length; i++) {
lines[i] = lines[i].replaceAll("\\s+", "");
if (lines[i].contains("device")) {
lines[i] = lines[i].replaceAll("device", "");
String deviceID = lines[i];
if (validDeviceIds == null
|| (validDeviceIds != null && validDeviceIds.contains(deviceID))) {
String model =
cmd.runCommand("adb -s " + deviceID
+ " shell getprop ro.product.model")
.replaceAll("\\s+", "");
String brand =
cmd.runCommand("adb -s " + deviceID
+ " shell getprop ro.product.brand")
.replaceAll("\\s+", "");
String osVersion = cmd.runCommand(
"adb -s " + deviceID + " shell getprop ro.build.version.release")
.replaceAll("\\s+", "");
String deviceName = brand + " " + model;
String apiLevel =
cmd.runCommand("adb -s " + deviceID
+ " shell getprop ro.build.version.sdk")
.replaceAll("\n", "");
devices.put("deviceID" + i, deviceID);
devices.put("deviceName" + i, deviceName);
devices.put("osVersion" + i, osVersion);
devices.put(deviceID, apiLevel);
deviceSerial.add(deviceID);
}
} else if (lines[i].contains("unauthorized")) {
lines[i] = lines[i].replaceAll("unauthorized", "");
String deviceID = lines[i];
} else if (lines[i].contains("offline")) {
lines[i] = lines[i].replaceAll("offline", "");
String deviceID = lines[i];
}
}
return devices;
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public Map<String, String> getDevices() throws Exception {
startADB(); // start adb service
String output = cmd.runCommand("adb devices");
String[] lines = output.split("\n");
if (lines.length <= 1) {
System.out.println("No Device Connected");
stopADB();
return null;
} else {
for (int i = 1; i < lines.length; i++) {
lines[i] = lines[i].replaceAll("\\s+", "");
if (lines[i].contains("device")) {
lines[i] = lines[i].replaceAll("device", "");
String deviceID = lines[i];
String model =
cmd.runCommand("adb -s " + deviceID + " shell getprop ro.product.model")
.replaceAll("\\s+", "");
String brand =
cmd.runCommand("adb -s " + deviceID + " shell getprop ro.product.brand")
.replaceAll("\\s+", "");
String osVersion = cmd.runCommand(
"adb -s " + deviceID + " shell getprop ro.build.version.release")
.replaceAll("\\s+", "");
String deviceName = brand + " " + model;
String apiLevel =
cmd.runCommand("adb -s " + deviceID + " shell getprop ro.build.version.sdk")
.replaceAll("\n", "");
devices.put("deviceID" + i, deviceID);
devices.put("deviceName" + i, deviceName);
devices.put("osVersion" + i, osVersion);
devices.put(deviceID, apiLevel);
} else if (lines[i].contains("unauthorized")) {
lines[i] = lines[i].replaceAll("unauthorized", "");
String deviceID = lines[i];
} else if (lines[i].contains("offline")) {
lines[i] = lines[i].replaceAll("offline", "");
String deviceID = lines[i];
}
}
return devices;
}
}
#location 4
#vulnerability type RESOURCE_LEAK |
#fixed code
@Override
public Environment setUpEnvironment(@SuppressWarnings("rawtypes") AbstractBuild build, Launcher launcher, BuildListener listener) throws IOException, InterruptedException, Run.RunnerAbortedException {
GhprbTrigger trigger = Ghprb.extractTrigger(build);
if (trigger != null && trigger.getBuilds() != null) {
trigger.getBuilds().onEnvironmentSetup(build, launcher, listener);
}
return new hudson.model.Environment(){};
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public Environment setUpEnvironment(@SuppressWarnings("rawtypes") AbstractBuild build, Launcher launcher, BuildListener listener) throws IOException, InterruptedException, Run.RunnerAbortedException {
GhprbTrigger trigger = Ghprb.extractTrigger(build);
if (trigger != null) {
trigger.getBuilds().onEnvironmentSetup(build, launcher, listener);
}
return new hudson.model.Environment(){};
}
#location 5
#vulnerability type NULL_DEREFERENCE |
#fixed code
public void check(GHIssueComment comment) {
if (helper.isProjectDisabled()) {
logger.log(Level.FINE, "Project is disabled, ignoring comment");
return;
}
synchronized (this) {
try {
checkComment(comment);
} catch (IOException ex) {
logger.log(Level.SEVERE, "Couldn't check comment #" + comment.getId(), ex);
return;
}
try {
GHUser user = null;
try {
user = comment.getUser();
} catch (IOException e) {
logger.log(Level.SEVERE, "Couldn't get the user that made the comment", e);
}
updatePR(getPullRequest(true), user);
} catch (IOException ex) {
logger.log(Level.SEVERE, "Unable to get a new copy of the pull request!");
}
tryBuild();
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public void check(GHIssueComment comment) {
if (helper.isProjectDisabled()) {
logger.log(Level.FINE, "Project is disabled, ignoring comment");
return;
}
synchronized (this) {
try {
checkComment(comment);
} catch (IOException ex) {
logger.log(Level.SEVERE, "Couldn't check comment #" + comment.getId(), ex);
return;
}
try {
GHUser user = null;
try {
user = comment.getUser();
} catch (IOException e) {
logger.log(Level.SEVERE, "Couldn't get the user that made the comment", e);
}
updatePR(getPullRequest(true), user);
} catch (IOException ex) {
logger.log(Level.SEVERE, "Unable to get a new copy of the pull request!");
}
checkSkipBuild(comment.getParent());
tryBuild();
}
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
public QueueTaskFuture<?> startJob(GhprbCause cause, GhprbRepository repo) {
for (GhprbExtension ext : Ghprb.getJobExtensions(this, GhprbBuildStep.class)) {
if (ext instanceof GhprbBuildStep) {
((GhprbBuildStep)ext).onStartBuild(super.job, cause);
}
}
ArrayList<ParameterValue> values = getDefaultParameters();
final String commitSha = cause.isMerged() ? "origin/pr/" + cause.getPullID() + "/merge" : cause.getCommit();
values.add(new StringParameterValue("sha1", commitSha));
values.add(new StringParameterValue("ghprbActualCommit", cause.getCommit()));
String triggerAuthor = "";
String triggerAuthorEmail = "";
String triggerAuthorLogin = "";
GhprbPullRequest pr = getRepository().getPullRequest(cause.getPullID());
String lastBuildId = pr.getLastBuildId();
BuildData buildData = null;
if (!(job instanceof MatrixProject) && !StringUtils.isEmpty(lastBuildId)) {
AbstractBuild<?, ?> lastBuild = job.getBuild(lastBuildId);
buildData = lastBuild.getAction(BuildData.class);
}
try {
triggerAuthor = getString(cause.getTriggerSender().getName(), "");
} catch (Exception e) {}
try {
triggerAuthorEmail = getString(cause.getTriggerSender().getEmail(), "");
} catch (Exception e) {}
try {
triggerAuthorLogin = getString(cause.getTriggerSender().getLogin(), "");
} catch (Exception e) {}
setCommitAuthor(cause, values);
values.add(new StringParameterValue("ghprbAuthorRepoGitUrl", getString(cause.getAuthorRepoGitUrl(), "")));
values.add(new StringParameterValue("ghprbTriggerAuthor", triggerAuthor));
values.add(new StringParameterValue("ghprbTriggerAuthorEmail", triggerAuthorEmail));
values.add(new StringParameterValue("ghprbTriggerAuthorLogin", triggerAuthorLogin));
values.add(new StringParameterValue("ghprbTriggerAuthorLoginMention", !triggerAuthorLogin.isEmpty() ? "@"
+ triggerAuthorLogin : ""));
final StringParameterValue pullIdPv = new StringParameterValue("ghprbPullId", String.valueOf(cause.getPullID()));
values.add(pullIdPv);
values.add(new StringParameterValue("ghprbTargetBranch", String.valueOf(cause.getTargetBranch())));
values.add(new StringParameterValue("ghprbSourceBranch", String.valueOf(cause.getSourceBranch())));
values.add(new StringParameterValue("GIT_BRANCH", String.valueOf(cause.getSourceBranch())));
// it's possible the GHUser doesn't have an associated email address
values.add(new StringParameterValue("ghprbPullAuthorEmail", getString(cause.getAuthorEmail(), "")));
values.add(new StringParameterValue("ghprbPullAuthorLogin", String.valueOf(cause.getPullRequestAuthor().getLogin())));
values.add(new StringParameterValue("ghprbPullAuthorLoginMention", "@" + cause.getPullRequestAuthor().getLogin()));
values.add(new StringParameterValue("ghprbPullDescription", escapeText(String.valueOf(cause.getShortDescription()))));
values.add(new StringParameterValue("ghprbPullTitle", String.valueOf(cause.getTitle())));
values.add(new StringParameterValue("ghprbPullLink", String.valueOf(cause.getUrl())));
values.add(new StringParameterValue("ghprbPullLongDescription", escapeText(String.valueOf(cause.getDescription()))));
values.add(new StringParameterValue("ghprbCommentBody", escapeText(String.valueOf(cause.getCommentBody()))));
values.add(new StringParameterValue("ghprbGhRepository", repo.getName()));
values.add(new StringParameterValue("ghprbCredentialsId", getString(getGitHubApiAuth().getCredentialsId(), "")));
// add the previous pr BuildData as an action so that the correct change log is generated by the GitSCM plugin
// note that this will be removed from the Actions list after the job is completed so that the old (and incorrect)
// one isn't there
return this.job.scheduleBuild2(job.getQuietPeriod(), cause, new ParametersAction(values), buildData);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public QueueTaskFuture<?> startJob(GhprbCause cause, GhprbRepository repo) {
ArrayList<ParameterValue> values = getDefaultParameters();
final String commitSha = cause.isMerged() ? "origin/pr/" + cause.getPullID() + "/merge" : cause.getCommit();
values.add(new StringParameterValue("sha1", commitSha));
values.add(new StringParameterValue("ghprbActualCommit", cause.getCommit()));
String triggerAuthor = "";
String triggerAuthorEmail = "";
String triggerAuthorLogin = "";
GhprbPullRequest pr = getRepository().getPullRequest(cause.getPullID());
String lastBuildId = pr.getLastBuildId();
BuildData buildData = null;
if (!(job instanceof MatrixProject) && !StringUtils.isEmpty(lastBuildId)) {
AbstractBuild<?, ?> lastBuild = job.getBuild(lastBuildId);
buildData = lastBuild.getAction(BuildData.class);
}
try {
triggerAuthor = getString(cause.getTriggerSender().getName(), "");
} catch (Exception e) {}
try {
triggerAuthorEmail = getString(cause.getTriggerSender().getEmail(), "");
} catch (Exception e) {}
try {
triggerAuthorLogin = getString(cause.getTriggerSender().getLogin(), "");
} catch (Exception e) {}
setCommitAuthor(cause, values);
values.add(new StringParameterValue("ghprbAuthorRepoGitUrl", getString(cause.getAuthorRepoGitUrl(), "")));
values.add(new StringParameterValue("ghprbTriggerAuthor", triggerAuthor));
values.add(new StringParameterValue("ghprbTriggerAuthorEmail", triggerAuthorEmail));
values.add(new StringParameterValue("ghprbTriggerAuthorLogin", triggerAuthorLogin));
values.add(new StringParameterValue("ghprbTriggerAuthorLoginMention", !triggerAuthorLogin.isEmpty() ? "@"
+ triggerAuthorLogin : ""));
final StringParameterValue pullIdPv = new StringParameterValue("ghprbPullId", String.valueOf(cause.getPullID()));
values.add(pullIdPv);
values.add(new StringParameterValue("ghprbTargetBranch", String.valueOf(cause.getTargetBranch())));
values.add(new StringParameterValue("ghprbSourceBranch", String.valueOf(cause.getSourceBranch())));
values.add(new StringParameterValue("GIT_BRANCH", String.valueOf(cause.getSourceBranch())));
// it's possible the GHUser doesn't have an associated email address
values.add(new StringParameterValue("ghprbPullAuthorEmail", getString(cause.getAuthorEmail(), "")));
values.add(new StringParameterValue("ghprbPullAuthorLogin", String.valueOf(cause.getPullRequestAuthor().getLogin())));
values.add(new StringParameterValue("ghprbPullAuthorLoginMention", "@" + cause.getPullRequestAuthor().getLogin()));
values.add(new StringParameterValue("ghprbPullDescription", escapeText(String.valueOf(cause.getShortDescription()))));
values.add(new StringParameterValue("ghprbPullTitle", String.valueOf(cause.getTitle())));
values.add(new StringParameterValue("ghprbPullLink", String.valueOf(cause.getUrl())));
values.add(new StringParameterValue("ghprbPullLongDescription", escapeText(String.valueOf(cause.getDescription()))));
values.add(new StringParameterValue("ghprbCommentBody", escapeText(String.valueOf(cause.getCommentBody()))));
values.add(new StringParameterValue("ghprbGhRepository", repo.getName()));
values.add(new StringParameterValue("ghprbCredentialsId", getString(getGitHubApiAuth().getCredentialsId(), "")));
// add the previous pr BuildData as an action so that the correct change log is generated by the GitSCM plugin
// note that this will be removed from the Actions list after the job is completed so that the old (and incorrect)
// one isn't there
return this.job.scheduleBuild2(job.getQuietPeriod(), cause, new ParametersAction(values), buildData);
}
#location 10
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Override
public void makeBuildVariables(@SuppressWarnings("rawtypes") AbstractBuild build, Map<String,String> variables){
variables.put("ghprbUpstreamStatus", "true");
variables.put("ghprbCommitStatusContext", commitStatusContext);
variables.put("ghprbTriggeredStatus", triggeredStatus);
variables.put("ghprbStartedStatus", startedStatus);
variables.put("ghprbStatusUrl", statusUrl);
Map<GHCommitState, StringBuilder> statusMessages = new HashMap<GHCommitState, StringBuilder>(5);
for (GhprbBuildResultMessage message : completedStatus) {
GHCommitState state = message.getResult();
StringBuilder sb;
if (!statusMessages.containsKey(state)) {
sb = new StringBuilder();
statusMessages.put(state, sb);
} else {
sb = statusMessages.get(state);
sb.append("\n");
}
sb.append(message.getMessage());
}
for (Entry<GHCommitState, StringBuilder> next : statusMessages.entrySet()) {
String key = String.format("ghprb%sMessage", next.getKey().name());
variables.put(key, next.getValue().toString());
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public void makeBuildVariables(@SuppressWarnings("rawtypes") AbstractBuild build, Map<String,String> variables){
variables.put("ghprbUpstreamStatus", "true");
variables.put("ghprbCommitStatusContext", commitStatusContext);
variables.put("ghprbTriggeredStatus", triggeredStatus);
variables.put("ghprbStartedStatus", startedStatus);
variables.put("ghprbStatusUrl", statusUrl);
Map<GHCommitState, StringBuilder> statusMessages = new HashMap<GHCommitState, StringBuilder>(5);
for (GhprbBuildResultMessage message : completedStatus) {
GHCommitState state = message.getResult();
StringBuilder sb;
if (statusMessages.containsKey(state)){
sb = new StringBuilder();
statusMessages.put(state, sb);
} else {
sb = statusMessages.get(state);
sb.append("\n");
}
sb.append(message.getMessage());
}
for (Entry<GHCommitState, StringBuilder> next : statusMessages.entrySet()) {
String key = String.format("ghprb%sMessage", next.getKey().name());
variables.put(key, next.getValue().toString());
}
}
#location 19
#vulnerability type NULL_DEREFERENCE |
#fixed code
@SuppressWarnings("unchecked")
private static Map<Class<?>, BiConsumer<ValueBinder<?>, Iterable>> createIterableTypeMapping() {
Map<Class<?>, BiConsumer<ValueBinder<?>, Iterable>> map = new LinkedHashMap<>();
map.put(com.google.cloud.Date.class, ValueBinder::toDateArray);
map.put(Boolean.class, ValueBinder::toBoolArray);
map.put(Long.class, ValueBinder::toInt64Array);
map.put(String.class, ValueBinder::toStringArray);
map.put(Double.class, ValueBinder::toFloat64Array);
map.put(Timestamp.class, ValueBinder::toTimestampArray);
map.put(ByteArray.class, ValueBinder::toBytesArray);
return Collections.unmodifiableMap(map);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@SuppressWarnings("unchecked")
private static Map<Class<?>, BiConsumer<ValueBinder<?>, Iterable>> createIterableTypeMapping() {
// Java 8 has compile errors when using the builder extension methods
// @formatter:off
ImmutableMap.Builder<Class<?>, BiConsumer<ValueBinder<?>, Iterable>> builder =
new ImmutableMap.Builder<>();
// @formatter:on
builder.put(com.google.cloud.Date.class, ValueBinder::toDateArray);
builder.put(Boolean.class, ValueBinder::toBoolArray);
builder.put(Long.class, ValueBinder::toInt64Array);
builder.put(String.class, ValueBinder::toStringArray);
builder.put(Double.class, ValueBinder::toFloat64Array);
builder.put(Timestamp.class, ValueBinder::toTimestampArray);
builder.put(ByteArray.class, ValueBinder::toBytesArray);
return builder.build();
}
#location 2
#vulnerability type CHECKERS_IMMUTABLE_CAST |
#fixed code
@Override
public BrowserManager useTaobaoMirror() {
return useTaobaoMirror("wdm.chromeDriverTaobaoUrl");
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public BrowserManager useTaobaoMirror() {
String taobaoUrl = null;
try {
taobaoUrl = WdmConfig.getString(
WdmConfig.getString("wdm.chromeDriverTaobaoUrl"));
driverUrl = new URL(taobaoUrl);
} catch (MalformedURLException e) {
String errorMessage = "Malformed URL " + taobaoUrl;
log.error(errorMessage, e);
throw new WebDriverManagerException(errorMessage, e);
}
return instance;
}
#location 13
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
protected void exportDriver(String variableValue) {
downloadedDriverVersion = driverVersionToDownload;
binaryPath = variableValue;
Optional<String> exportParameter = getExportParameter();
if (!config.isAvoidExport() && exportParameter.isPresent()) {
String variableName = exportParameter.get();
log.info("Exporting {} as {}", variableName, variableValue);
System.setProperty(variableName, variableValue);
} else {
log.info("Resulting binary {}", variableValue);
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
protected void exportDriver(String variableValue) {
downloadedVersion = versionToDownload;
binaryPath = variableValue;
Optional<String> exportParameter = getExportParameter();
if (!config.isAvoidExport() && exportParameter.isPresent()) {
String variableName = exportParameter.get();
log.info("Exporting {} as {}", variableName, variableValue);
System.setProperty(variableName, variableValue);
} else {
log.info("Resulting binary {}", variableValue);
}
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
@Override
public BrowserManager useTaobaoMirror() {
return useTaobaoMirror("wdm.phantomjsDriverTaobaoUrl");
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public BrowserManager useTaobaoMirror() {
String taobaoUrl = null;
try {
taobaoUrl = getString(getString("wdm.phantomjsDriverTaobaoUrl"));
driverUrl = new URL(taobaoUrl);
} catch (MalformedURLException e) {
String errorMessage = "Malformed URL " + taobaoUrl;
log.error(errorMessage, e);
throw new WebDriverManagerException(errorMessage, e);
}
return instance;
}
#location 12
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
protected List<URL> filterCandidateUrls(Architecture arch, String version,
boolean getLatest) throws IOException {
List<URL> urls = getDrivers();
List<URL> candidateUrls;
log.trace("All URLs: {}", urls);
boolean continueSearchingVersion;
do {
// Get the latest or concrete version
String filterName = getDriverName().equalsIgnoreCase("msedgedriver")
? "edgedriver"
: getDriverName();
candidateUrls = getLatest ? checkLatest(urls, filterName)
: getVersion(urls, filterName, version);
log.trace("Candidate URLs: {}", candidateUrls);
if (versionToDownload == null) {
break;
}
// Filter by OS
if (!getDriverName().equalsIgnoreCase("IEDriverServer")
&& !getDriverName()
.equalsIgnoreCase("selenium-server-standalone")) {
candidateUrls = urlFilter.filterByOs(candidateUrls,
config().getOs());
}
// Filter by architecture
candidateUrls = urlFilter.filterByArch(candidateUrls, arch,
forcedArch);
// Filter by distro
candidateUrls = filterByDistro(candidateUrls);
// Filter by ignored versions
candidateUrls = filterByIgnoredVersions(candidateUrls);
// Find out if driver version has been found or not
continueSearchingVersion = candidateUrls.isEmpty() && getLatest;
if (continueSearchingVersion) {
log.info(
"No binary found for {} {} ... seeking another version",
getDriverName(), versionToDownload);
urls = removeFromList(urls, versionToDownload);
versionToDownload = null;
}
} while (continueSearchingVersion);
return candidateUrls;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
protected List<URL> filterCandidateUrls(Architecture arch, String version,
boolean getLatest) throws IOException {
List<URL> urls = getDrivers();
List<URL> candidateUrls;
log.trace("All URLs: {}", urls);
boolean continueSearchingVersion;
do {
// Get the latest or concrete version
candidateUrls = getLatest ? checkLatest(urls, getDriverName())
: getVersion(urls, getDriverName(), version);
log.trace("Candidate URLs: {}", candidateUrls);
if (versionToDownload == null
|| this.getClass().equals(EdgeDriverManager.class)) {
break;
}
// Filter by OS
if (!getDriverName().equalsIgnoreCase("IEDriverServer")
&& !getDriverName()
.equalsIgnoreCase("selenium-server-standalone")) {
candidateUrls = urlFilter.filterByOs(candidateUrls,
config().getOs());
}
// Filter by architecture
candidateUrls = urlFilter.filterByArch(candidateUrls, arch,
forcedArch);
// Filter by distro
candidateUrls = filterByDistro(candidateUrls);
// Filter by ignored versions
candidateUrls = filterByIgnoredVersions(candidateUrls);
// Find out if driver version has been found or not
continueSearchingVersion = candidateUrls.isEmpty() && getLatest;
if (continueSearchingVersion) {
log.info(
"No binary found for {} {} ... seeking another version",
getDriverName(), versionToDownload);
urls = removeFromList(urls, versionToDownload);
versionToDownload = null;
}
} while (continueSearchingVersion);
return candidateUrls;
}
#location 11
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
@Test
public void testCache() throws Exception {
BrowserManager browserManager = WebDriverManager
.getInstance(driverClass);
browserManager.architecture(architecture).version(driverVersion)
.setup();
Downloader downloader = new Downloader(browserManager);
Method method = BrowserManager.class.getDeclaredMethod(
"existsDriverInCache", String.class, String.class,
Architecture.class);
method.setAccessible(true);
String driverInChachePath = (String) method.invoke(browserManager,
downloader.getTargetPath(), driverVersion, architecture);
assertThat(driverInChachePath, notNullValue());
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void testCache() throws Exception {
BrowserManager browserManager = null;
if (browserManagerClass.equals(ChromeDriverManager.class)) {
browserManager = ChromeDriverManager.getInstance();
} else if (browserManagerClass.equals(OperaDriverManager.class)) {
browserManager = OperaDriverManager.getInstance();
} else if (browserManagerClass.equals(PhantomJsDriverManager.class)) {
browserManager = PhantomJsDriverManager.getInstance();
} else if (browserManagerClass.equals(FirefoxDriverManager.class)) {
browserManager = FirefoxDriverManager.getInstance();
}
browserManager.architecture(architecture).version(driverVersion)
.setup();
Downloader downloader = new Downloader(browserManager);
Method method = BrowserManager.class.getDeclaredMethod(
"existsDriverInCache", String.class, String.class,
Architecture.class);
method.setAccessible(true);
String driverInChachePath = (String) method.invoke(browserManager,
downloader.getTargetPath(), driverVersion, architecture);
assertThat(driverInChachePath, notNullValue());
}
#location 14
#vulnerability type NULL_DEREFERENCE |
#fixed code
protected void manage(String driverVersion) {
httpClient = new HttpClient(config());
try (HttpClient wdmHttpClient = httpClient) {
downloader = new Downloader(getDriverManagerType());
urlFilter = new UrlFilter();
if (isUnknown(driverVersion)) {
driverVersion = resolveDriverVersion(driverVersion);
}
downloadAndExport(driverVersion);
} catch (Exception e) {
handleException(e, driverVersion);
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
protected void manage(String driverVersion) {
httpClient = new HttpClient(config());
try (HttpClient wdmHttpClient = httpClient) {
downloader = new Downloader(getDriverManagerType());
urlFilter = new UrlFilter();
if (isUnknown(driverVersion)) {
preferenceKey = getDriverManagerType().getNameInLowerCase();
Optional<String> browserVersion = empty();
if (usePreferences()
&& preferences.checkKeyInPreferences(preferenceKey)) {
browserVersion = Optional.of(
preferences.getValueFromPreferences(preferenceKey));
}
if (!browserVersion.isPresent()) {
browserVersion = detectBrowserVersion();
}
if (browserVersion.isPresent()) {
// Calculate driverVersion using browserVersion
preferenceKey = getDriverManagerType().getNameInLowerCase()
+ browserVersion.get();
if (usePreferences() && preferences
.checkKeyInPreferences(preferenceKey)) {
driverVersion = preferences
.getValueFromPreferences(preferenceKey);
}
if (isUnknown(driverVersion)) {
Optional<String> driverVersionFromRepository = getDriverVersionFromRepository(
browserVersion);
if (driverVersionFromRepository.isPresent()) {
driverVersion = driverVersionFromRepository.get();
}
}
if (isUnknown(driverVersion)) {
Optional<String> driverVersionFromProperties = getDriverVersionFromProperties(
preferenceKey);
if (driverVersionFromProperties.isPresent()) {
driverVersion = driverVersionFromProperties.get();
}
} else {
log.info(
"Using {} {} (since {} {} is installed in your machine)",
getDriverName(), driverVersion,
getDriverManagerType(), browserVersion.get());
}
if (usePreferences()) {
preferences.putValueInPreferencesIfEmpty(
getDriverManagerType().getNameInLowerCase(),
browserVersion.get());
preferences.putValueInPreferencesIfEmpty(preferenceKey,
driverVersion);
}
if (isUnknown(driverVersion)) {
log.debug(
"The driver version for {} {} is unknown ... trying with latest",
getDriverManagerType(), browserVersion.get());
}
}
// if driverVersion is still unknown, try with latest
if (isUnknown(driverVersion)) {
Optional<String> latestDriverVersionFromRepository = getLatestDriverVersionFromRepository();
if (latestDriverVersionFromRepository.isPresent()) {
driverVersion = latestDriverVersionFromRepository.get();
}
}
}
downloadAndExport(driverVersion);
} catch (Exception e) {
handleException(e, driverVersion);
}
}
#location 36
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
protected void manage(Architecture arch, String version) {
httpClient = new HttpClient(config().getTimeout());
try (HttpClient wdmHttpClient = httpClient) {
downloader = new Downloader(driverManagerType);
urlFilter = new UrlFilter();
boolean getLatest = isVersionLatest(version);
boolean cache = config().isForceCache();
if (getLatest && !config().isAvoidAutoVersion()) {
version = getVersionForInstalledBrowser(driverManagerType);
getLatest = version.isEmpty();
}
if (version.equals("insiders")) {
String systemRoot = System.getenv("SystemRoot");
File microsoftWebDriverFile = new File(
systemRoot + File.separator + "System32"
+ File.separator + "MicrosoftWebDriver.exe");
if (microsoftWebDriverFile.exists()) {
exportDriver(microsoftWebDriverFile.toString());
return;
} else {
retry = false;
throw new WebDriverManagerException(
"MicrosoftWebDriver.exe should be installed in an elevated command prompt executing: "
+ "dism /Online /Add-Capability /CapabilityName:Microsoft.WebDriver~~~~0.0.1.0");
}
}
String os = config().getOs();
log.trace("Managing {} arch={} version={} getLatest={} cache={}",
driverName, arch, version, getLatest, cache);
if (getLatest && latestVersion != null) {
log.debug("Latest version of {} is {} (recently resolved)",
driverName, latestVersion);
version = latestVersion;
cache = true;
}
Optional<String> driverInCache = handleCache(arch, version, os,
getLatest, cache);
String versionStr = getLatest ? "(latest version)" : version;
if (driverInCache.isPresent() && !config().isOverride()) {
storeVersionToDownload(version);
downloadedVersion = version;
log.debug("Driver {} {} found in cache", driverName,
versionStr);
exportDriver(driverInCache.get());
} else {
List<URL> candidateUrls = filterCandidateUrls(arch, version,
getLatest);
if (candidateUrls.isEmpty()) {
String errorMessage = driverName + " " + versionStr
+ " for " + os + arch.toString() + " not found in "
+ config().getDriverUrl(driverUrlKey);
log.error(errorMessage);
throw new WebDriverManagerException(errorMessage);
}
downloadCandidateUrls(candidateUrls);
}
} catch (Exception e) {
handleException(e, arch, version);
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
protected void manage(Architecture arch, String version) {
httpClient = new HttpClient(config().getTimeout());
try (HttpClient wdmHttpClient = httpClient) {
downloader = new Downloader(driverManagerType);
urlFilter = new UrlFilter();
boolean getLatest = isVersionLatest(version);
boolean cache = config().isForceCache();
if (getLatest && !config().isAvoidAutoVersion()) {
version = getVersionForInstalledBrowser(driverManagerType);
getLatest = version.isEmpty();
}
if (version.equals("insiders")) {
String systemRoot = System.getenv("SystemRoot");
File microsoftWebDriverFile = new File(
systemRoot + File.separator + "System32"
+ File.separator + "MicrosoftWebDriver.exe");
if (microsoftWebDriverFile.exists()) {
exportDriver(microsoftWebDriverFile.toString());
return;
} else {
retry = false;
throw new WebDriverManagerException(
"MicrosoftWebDriver.exe should be installed in an elevated command prompt executing: "
+ "dism /Online /Add-Capability /CapabilityName:Microsoft.WebDriver~~~~0.0.1.0");
}
}
String os = config().getOs();
log.trace("Managing {} arch={} version={} getLatest={} cache={}",
driverName, arch, version, getLatest, cache);
if (getLatest && latestVersion != null) {
log.debug("Latest version of {} is {} (recently resolved)",
driverName, latestVersion);
version = latestVersion;
cache = true;
}
Optional<String> driverInCache = handleCache(arch, version, os,
getLatest, cache);
String versionStr = getLatest ? "(latest version)" : version;
if (driverInCache.isPresent() && !config().isOverride()) {
versionToDownload = version;
downloadedVersion = version;
log.debug("Driver {} {} found in cache", driverName,
versionStr);
exportDriver(driverInCache.get());
} else {
List<URL> candidateUrls = filterCandidateUrls(arch, version,
getLatest);
if (candidateUrls.isEmpty()) {
String errorMessage = driverName + " " + versionStr
+ " for " + os + arch.toString() + " not found in "
+ config().getDriverUrl(driverUrlKey);
log.error(errorMessage);
throw new WebDriverManagerException(errorMessage);
}
downloadCandidateUrls(candidateUrls);
}
} catch (Exception e) {
handleException(e, arch, version);
}
}
#location 41
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
protected String resolveDriverVersion(String driverVersion) {
String preferenceKey = getKeyForPreferences();
Optional<String> browserVersion = empty();
browserVersion = getValueFromPreferences(preferenceKey, browserVersion);
if (!browserVersion.isPresent()) {
browserVersion = detectBrowserVersion();
}
if (browserVersion.isPresent()) {
preferenceKey = getKeyForPreferences() + browserVersion.get();
driverVersion = preferences.getValueFromPreferences(preferenceKey);
Optional<String> optionalDriverVersion = empty();
if (isUnknown(driverVersion)) {
optionalDriverVersion = getDriverVersionFromRepository(
browserVersion);
}
if (isUnknown(driverVersion)) {
optionalDriverVersion = getDriverVersionFromProperties(
preferenceKey);
}
if (optionalDriverVersion.isPresent()) {
driverVersion = optionalDriverVersion.get();
}
if (isUnknown(driverVersion)) {
log.debug(
"The driver version for {} {} is unknown ... trying with latest",
getDriverManagerType(), browserVersion.get());
} else if (!isUnknown(driverVersion)) {
log.info(
"Using {} {} (since {} {} is installed in your machine)",
getDriverName(), driverVersion, getDriverManagerType(),
browserVersion.get());
storeInPreferences(preferenceKey, driverVersion,
browserVersion.get());
}
}
// if driverVersion is still unknown, try with latest
if (isUnknown(driverVersion)) {
Optional<String> latestDriverVersionFromRepository = getLatestDriverVersionFromRepository();
if (latestDriverVersionFromRepository.isPresent()) {
driverVersion = latestDriverVersionFromRepository.get();
}
}
return driverVersion;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
protected String resolveDriverVersion(String driverVersion) {
preferenceKey = getKeyForPreferences();
Optional<String> browserVersion = empty();
browserVersion = getValueFromPreferences(browserVersion);
if (!browserVersion.isPresent()) {
browserVersion = detectBrowserVersion();
}
if (browserVersion.isPresent()) {
preferenceKey = getKeyForPreferences() + browserVersion.get();
driverVersion = preferences.getValueFromPreferences(preferenceKey);
Optional<String> optionalDriverVersion = empty();
if (isUnknown(driverVersion)) {
optionalDriverVersion = getDriverVersionFromRepository(
browserVersion);
}
if (isUnknown(driverVersion)) {
optionalDriverVersion = getDriverVersionFromProperties(
preferenceKey);
}
if (optionalDriverVersion.isPresent()) {
driverVersion = optionalDriverVersion.get();
}
if (isUnknown(driverVersion)) {
log.debug(
"The driver version for {} {} is unknown ... trying with latest",
getDriverManagerType(), browserVersion.get());
} else if (!isUnknown(driverVersion)) {
log.info(
"Using {} {} (since {} {} is installed in your machine)",
getDriverName(), driverVersion, getDriverManagerType(),
browserVersion.get());
storeInPreferences(driverVersion, browserVersion.get());
}
}
// if driverVersion is still unknown, try with latest
if (isUnknown(driverVersion)) {
Optional<String> latestDriverVersionFromRepository = getLatestDriverVersionFromRepository();
if (latestDriverVersionFromRepository.isPresent()) {
driverVersion = latestDriverVersionFromRepository.get();
}
}
return driverVersion;
}
#location 9
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
protected void manage(Architecture arch, String version) {
httpClient = new HttpClient(config());
try (HttpClient wdmHttpClient = httpClient) {
downloader = new Downloader(getDriverManagerType());
urlFilter = new UrlFilter();
boolean getLatest = isVersionLatest(version);
boolean cache = config().isForceCache();
if (getLatest) {
version = detectDriverVersionFromBrowser();
}
// Special case for Chromium snap packages
if (getDriverManagerType() == CHROMIUM && isSnap
&& ((ChromiumDriverManager) this).snapDriverExists()) {
return;
}
getLatest = isNullOrEmpty(version);
// Check latest version
if (getLatest && !config().isUseBetaVersions()) {
Optional<String> lastVersion = getLatestVersion();
getLatest = !lastVersion.isPresent();
if (!getLatest) {
version = lastVersion.get();
}
}
String os = config().getOs();
log.trace("Managing {} arch={} version={} getLatest={} cache={}",
getDriverName(), arch, version, getLatest, cache);
if (getLatest && latestVersion != null) {
log.debug("Latest version of {} is {} (recently resolved)",
getDriverName(), latestVersion);
version = latestVersion;
cache = true;
}
// Manage driver
downloadAndExport(arch, version, getLatest, cache, os);
} catch (Exception e) {
handleException(e, arch, version);
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
protected void manage(Architecture arch, String version) {
httpClient = new HttpClient(config());
try (HttpClient wdmHttpClient = httpClient) {
downloader = new Downloader(getDriverManagerType());
urlFilter = new UrlFilter();
boolean getLatest = isVersionLatest(version);
boolean cache = config().isForceCache();
if (getLatest) {
version = detectDriverVersionFromBrowser();
}
// Special case for Chromium snap packages
if (getDriverManagerType() == CHROMIUM && isSnap
&& ((ChromiumDriverManager) this).snapDriverExists()) {
return;
}
getLatest = isNullOrEmpty(version);
// Check latest version
if (getLatest && !config().isUseBetaVersions()) {
Optional<String> lastVersion = getLatestVersion();
getLatest = !lastVersion.isPresent();
if (!getLatest) {
version = lastVersion.get();
}
}
// Special case for Edge
if (checkPreInstalledVersion(version)) {
return;
}
String os = config().getOs();
log.trace("Managing {} arch={} version={} getLatest={} cache={}",
getDriverName(), arch, version, getLatest, cache);
if (getLatest && latestVersion != null) {
log.debug("Latest version of {} is {} (recently resolved)",
getDriverName(), latestVersion);
version = latestVersion;
cache = true;
}
// Manage driver
downloadAndExport(arch, version, getLatest, cache, os);
} catch (Exception e) {
handleException(e, arch, version);
}
}
#location 11
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
protected void reset() {
useBetaVersions = false;
mirrorLog = false;
isForcingCache = false;
isForcingDownload = false;
listVersions = null;
architecture = null;
driverUrl = null;
version = null;
proxyValue = null;
proxyUser = null;
proxyPass = null;
ignoredVersions = null;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
protected void reset() {
useBetaVersions = getBoolean("wdm.useBetaVersions");
mirrorLog = false;
isForcingCache = false;
isForcingDownload = false;
listVersions = null;
architecture = null;
driverUrl = null;
versionToDownload = null;
version = null;
proxyValue = null;
binaryPath = null;
proxyUser = null;
proxyPass = null;
ignoredVersions = null;
}
#location 12
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
protected void reset() {
config().reset();
mirrorLog = false;
driverVersionToDownload = null;
forcedArch = false;
forcedOs = false;
retryCount = 0;
isSnap = false;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
protected void reset() {
config().reset();
mirrorLog = false;
versionToDownload = null;
forcedArch = false;
forcedOs = false;
retryCount = 0;
isSnap = false;
}
#location 4
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
@SuppressWarnings("Duplicates")
private void checkIconResource(KapuaTicon icon) {
ConsoleSetting config = ConsoleSetting.getInstance();
String iconResource = icon.getResource();
//
// Check if the resource is an HTTP URL or not
if (iconResource != null &&
(iconResource.toLowerCase().startsWith("http://") ||
iconResource.toLowerCase().startsWith("https://"))) {
File tmpFile = null;
try {
logger.info("Got configuration component icon from URL: {}", iconResource);
//
// Tmp file name creation
String systemTmpDir = System.getProperty("java.io.tmpdir");
String iconResourcesTmpDir = config.getString(ConsoleSettingKeys.DEVICE_CONFIGURATION_ICON_FOLDER);
String tmpFileName = Base64.encodeBase64String(MessageDigest.getInstance("MD5").digest(iconResource.getBytes(StandardCharsets.UTF_8)));
// Conversions needed got security reasons!
// On the file servlet we use the regex [0-9A-Za-z]{1,} to validate the given file id.
// This validation prevents the caller of the file servlet to try to move out of the directory where the icons are stored.
tmpFileName = tmpFileName.replaceAll("/", "a");
tmpFileName = tmpFileName.replaceAll("\\+", "m");
tmpFileName = tmpFileName.replaceAll("=", "z");
//
// Tmp dir check and creation
StringBuilder tmpDirPathSb = new StringBuilder().append(systemTmpDir);
if (!systemTmpDir.endsWith("/")) {
tmpDirPathSb.append("/");
}
tmpDirPathSb.append(iconResourcesTmpDir);
File tmpDir = new File(tmpDirPathSb.toString());
if (!tmpDir.exists()) {
logger.info("Creating tmp dir on path: {}", tmpDir.toString());
tmpDir.mkdir();
}
//
// Tmp file check and creation
tmpDirPathSb.append("/")
.append(tmpFileName);
tmpFile = new File(tmpDirPathSb.toString());
// Check date of modification to avoid caching forever
if (tmpFile.exists()) {
long lastModifiedDate = tmpFile.lastModified();
long maxCacheTime = config.getLong(ConsoleSettingKeys.DEVICE_CONFIGURATION_ICON_CACHE_TIME);
if (System.currentTimeMillis() - lastModifiedDate > maxCacheTime) {
logger.info("Deleting old cached file: {}", tmpFile.toString());
tmpFile.delete();
}
}
// If file is not cached, download it.
if (!tmpFile.exists()) {
// Url connection
URL iconUrl = new URL(iconResource);
URLConnection urlConnection = iconUrl.openConnection();
urlConnection.setConnectTimeout(2000);
urlConnection.setReadTimeout(2000);
// Length check
String contentLengthString = urlConnection.getHeaderField("Content-Length");
long maxLength = config.getLong(ConsoleSettingKeys.DEVICE_CONFIGURATION_ICON_SIZE_MAX);
try {
Long contentLength = Long.parseLong(contentLengthString);
if (contentLength > maxLength) {
logger.warn("Content lenght exceeded ({}/{}) for URL: {}", contentLength, maxLength, iconResource);
throw new IOException("Content-Length reported a length of " + contentLength + " which exceeds the maximum allowed size of " + maxLength);
}
} catch (NumberFormatException nfe) {
logger.warn("Cannot get Content-Length header!");
}
logger.info("Creating file: {}", tmpFile.toString());
tmpFile.createNewFile();
// Icon download
final InputStream is = urlConnection.getInputStream();
try {
byte[] buffer = new byte[4096];
final OutputStream os = new FileOutputStream(tmpFile);
try {
int len;
while ((len = is.read(buffer)) > 0) {
os.write(buffer, 0, len);
maxLength -= len;
if (maxLength < 0) {
logger.warn("Maximum content lenght exceeded ({}) for URL: {}", maxLength, iconResource);
throw new IOException("Maximum content lenght exceeded (" + maxLength + ") for URL: " + iconResource);
}
}
} finally {
os.close();
}
} finally {
is.close();
}
logger.info("Downloaded file: {}", tmpFile.toString());
// Image metadata content checks
ImageFormat imgFormat = Sanselan.guessFormat(tmpFile);
if (imgFormat.equals(ImageFormat.IMAGE_FORMAT_BMP) ||
imgFormat.equals(ImageFormat.IMAGE_FORMAT_GIF) ||
imgFormat.equals(ImageFormat.IMAGE_FORMAT_JPEG) ||
imgFormat.equals(ImageFormat.IMAGE_FORMAT_PNG)) {
logger.info("Detected image format: {}", imgFormat.name);
} else if (imgFormat.equals(ImageFormat.IMAGE_FORMAT_UNKNOWN)) {
logger.error("Unknown file format for URL: {}", iconResource);
throw new IOException("Unknown file format for URL: " + iconResource);
} else {
logger.error("Usupported file format ({}) for URL: {}", imgFormat, iconResource);
throw new IOException("Unknown file format for URL: {}" + iconResource);
}
logger.info("Image validation passed for URL: {}", iconResource);
} else {
logger.info("Using cached file: {}", tmpFile.toString());
}
//
// Injecting new URL for the icon resource
String newResourceURL = "img://console/file/icons?id=" +
tmpFileName;
logger.info("Injecting configuration component icon: {}", newResourceURL);
icon.setResource(newResourceURL);
} catch (Exception e) {
if (tmpFile != null &&
tmpFile.exists()) {
tmpFile.delete();
}
icon.setResource("Default");
logger.error("Error while checking component configuration icon. Using the default plugin icon.", e);
}
}
//
// If not, all is fine.
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@SuppressWarnings("Duplicates")
private void checkIconResource(KapuaTicon icon) {
ConsoleSetting config = ConsoleSetting.getInstance();
String iconResource = icon.getResource();
//
// Check if the resource is an HTTP URL or not
if (iconResource != null &&
(iconResource.toLowerCase().startsWith("http://") ||
iconResource.toLowerCase().startsWith("https://"))) {
File tmpFile = null;
try {
s_logger.info("Got configuration component icon from URL: {}", iconResource);
//
// Tmp file name creation
String systemTmpDir = System.getProperty("java.io.tmpdir");
String iconResourcesTmpDir = config.getString(ConsoleSettingKeys.DEVICE_CONFIGURATION_ICON_FOLDER);
String tmpFileName = Base64.encodeBase64String(MessageDigest.getInstance("MD5").digest(iconResource.getBytes(StandardCharsets.UTF_8)));
// Conversions needed got security reasons!
// On the file servlet we use the regex [0-9A-Za-z]{1,} to validate the given file id.
// This validation prevents the caller of the file servlet to try to move out of the directory where the icons are stored.
tmpFileName = tmpFileName.replaceAll("/", "a");
tmpFileName = tmpFileName.replaceAll("\\+", "m");
tmpFileName = tmpFileName.replaceAll("=", "z");
//
// Tmp dir check and creation
StringBuilder tmpDirPathSb = new StringBuilder().append(systemTmpDir);
if (!systemTmpDir.endsWith("/")) {
tmpDirPathSb.append("/");
}
tmpDirPathSb.append(iconResourcesTmpDir);
File tmpDir = new File(tmpDirPathSb.toString());
if (!tmpDir.exists()) {
s_logger.info("Creating tmp dir on path: {}", tmpDir.toString());
tmpDir.mkdir();
}
//
// Tmp file check and creation
tmpDirPathSb.append("/")
.append(tmpFileName);
tmpFile = new File(tmpDirPathSb.toString());
// Check date of modification to avoid caching forever
if (tmpFile.exists()) {
long lastModifiedDate = tmpFile.lastModified();
long maxCacheTime = config.getLong(ConsoleSettingKeys.DEVICE_CONFIGURATION_ICON_CACHE_TIME);
if (System.currentTimeMillis() - lastModifiedDate > maxCacheTime) {
s_logger.info("Deleting old cached file: {}", tmpFile.toString());
tmpFile.delete();
}
}
// If file is not cached, download it.
if (!tmpFile.exists()) {
// Url connection
URL iconUrl = new URL(iconResource);
URLConnection urlConnection = iconUrl.openConnection();
urlConnection.setConnectTimeout(2000);
urlConnection.setReadTimeout(2000);
// Length check
String contentLengthString = urlConnection.getHeaderField("Content-Length");
long maxLength = config.getLong(ConsoleSettingKeys.DEVICE_CONFIGURATION_ICON_SIZE_MAX);
try {
Long contentLength = Long.parseLong(contentLengthString);
if (contentLength > maxLength) {
s_logger.warn("Content lenght exceeded ({}/{}) for URL: {}",
contentLength, maxLength, iconResource);
throw new IOException("Content-Length reported a length of " + contentLength + " which exceeds the maximum allowed size of " + maxLength);
}
} catch (NumberFormatException nfe) {
s_logger.warn("Cannot get Content-Length header!");
}
s_logger.info("Creating file: {}", tmpFile.toString());
tmpFile.createNewFile();
// Icon download
InputStream is = urlConnection.getInputStream();
byte[] buffer = new byte[4096];
try (OutputStream os = new FileOutputStream(tmpFile)) {
int len;
while ((len = is.read(buffer)) > 0) {
os.write(buffer, 0, len);
maxLength -= len;
if (maxLength < 0) {
s_logger.warn("Maximum content lenght exceeded ({}) for URL: {}",
new Object[] { maxLength, iconResource });
throw new IOException("Maximum content lenght exceeded (" + maxLength + ") for URL: " + iconResource);
}
}
}
s_logger.info("Downloaded file: {}", tmpFile.toString());
// Image metadata content checks
ImageFormat imgFormat = Sanselan.guessFormat(tmpFile);
if (imgFormat.equals(ImageFormat.IMAGE_FORMAT_BMP) ||
imgFormat.equals(ImageFormat.IMAGE_FORMAT_GIF) ||
imgFormat.equals(ImageFormat.IMAGE_FORMAT_JPEG) ||
imgFormat.equals(ImageFormat.IMAGE_FORMAT_PNG)) {
s_logger.info("Detected image format: {}", imgFormat.name);
} else if (imgFormat.equals(ImageFormat.IMAGE_FORMAT_UNKNOWN)) {
s_logger.error("Unknown file format for URL: {}", iconResource);
throw new IOException("Unknown file format for URL: " + iconResource);
} else {
s_logger.error("Usupported file format ({}) for URL: {}", imgFormat, iconResource);
throw new IOException("Unknown file format for URL: {}" + iconResource);
}
s_logger.info("Image validation passed for URL: {}", iconResource);
} else {
s_logger.info("Using cached file: {}", tmpFile.toString());
}
//
// Injecting new URL for the icon resource
String newResourceURL = "img://console/file/icons?id=" +
tmpFileName;
s_logger.info("Injecting configuration component icon: {}", newResourceURL);
icon.setResource(newResourceURL);
} catch (Exception e) {
if (tmpFile != null &&
tmpFile.exists()) {
tmpFile.delete();
}
icon.setResource("Default");
s_logger.error("Error while checking component configuration icon. Using the default plugin icon.", e);
}
}
//
// If not, all is fine.
}
#location 137
#vulnerability type RESOURCE_LEAK |
#fixed code
public void commitAndRelease(boolean modifiesInedges, boolean modifiesOutedges) throws IOException {
int nblocks = blockIds.length;
if (modifiesInedges) {
int startStreamBlock = rangeStartEdgePtr / blocksize;
for(int i=0; i < nblocks; i++) {
String blockFilename = ChiFilenames.getFilenameShardEdataBlock(edgeDataFilename, i, blocksize);
if (i >= startStreamBlock) {
// Synchronous write
CompressedIO.writeCompressed(new File(blockFilename),
dataBlockManager.getRawBlock(blockIds[i]),
blockSizes[i]);
} else {
// Asynchronous write (not implemented yet, so is same as synchronous)
CompressedIO.writeCompressed(new File(blockFilename),
dataBlockManager.getRawBlock(blockIds[i]),
blockSizes[i]);
}
}
} else if (modifiesOutedges) {
int last = streamingOffsetEdgePtr;
if (last == 0) {
last = edataFilesize;
}
int startblock = (int) (rangeStartEdgePtr / blocksize);
int endblock = (int) (last / blocksize);
for(int i=startblock; i <= endblock; i++) {
String blockFilename = ChiFilenames.getFilenameShardEdataBlock(edgeDataFilename, i, blocksize);
CompressedIO.writeCompressed(new File(blockFilename),
dataBlockManager.getRawBlock(blockIds[i]),
blockSizes[i]);
}
}
/* Release all blocks */
for(Integer blockId : blockIds) {
dataBlockManager.release(blockId);
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public void commitAndRelease(boolean modifiesInedges, boolean modifiesOutedges) throws IOException {
byte[] data = dataBlockManager.getRawBlock(blockId);
if (modifiesInedges) {
FileOutputStream fos = new FileOutputStream(new File(edgeDataFilename));
fos.write(data);
fos.close();
} else if (modifiesOutedges) {
ucar.unidata.io.RandomAccessFile rFile =
new ucar.unidata.io.RandomAccessFile(edgeDataFilename, "rwd");
rFile.seek(rangeStartEdgePtr);
int last = streamingOffsetEdgePtr;
if (last == 0) last = edataFilesize;
rFile.write(data, rangeStartEdgePtr, last - rangeStartEdgePtr);
rFile.close();
}
dataBlockManager.release(blockId);
}
#location 16
#vulnerability type RESOURCE_LEAK |
#fixed code
public void commitAndRelease(boolean modifiesInedges, boolean modifiesOutedges) throws IOException {
int nblocks = blockIds.length;
if (modifiesInedges) {
int startStreamBlock = rangeStartEdgePtr / blocksize;
for(int i=0; i < nblocks; i++) {
String blockFilename = ChiFilenames.getFilenameShardEdataBlock(edgeDataFilename, i, blocksize);
if (i >= startStreamBlock) {
// Synchronous write
CompressedIO.writeCompressed(new File(blockFilename),
dataBlockManager.getRawBlock(blockIds[i]),
blockSizes[i]);
} else {
// Asynchronous write (not implemented yet, so is same as synchronous)
CompressedIO.writeCompressed(new File(blockFilename),
dataBlockManager.getRawBlock(blockIds[i]),
blockSizes[i]);
}
}
} else if (modifiesOutedges) {
int last = streamingOffsetEdgePtr;
if (last == 0) {
last = edataFilesize;
}
int startblock = (int) (rangeStartEdgePtr / blocksize);
int endblock = (int) (last / blocksize);
for(int i=startblock; i <= endblock; i++) {
String blockFilename = ChiFilenames.getFilenameShardEdataBlock(edgeDataFilename, i, blocksize);
CompressedIO.writeCompressed(new File(blockFilename),
dataBlockManager.getRawBlock(blockIds[i]),
blockSizes[i]);
}
}
/* Release all blocks */
for(Integer blockId : blockIds) {
dataBlockManager.release(blockId);
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public void commitAndRelease(boolean modifiesInedges, boolean modifiesOutedges) throws IOException {
byte[] data = dataBlockManager.getRawBlock(blockId);
if (modifiesInedges) {
FileOutputStream fos = new FileOutputStream(new File(edgeDataFilename));
fos.write(data);
fos.close();
} else if (modifiesOutedges) {
ucar.unidata.io.RandomAccessFile rFile =
new ucar.unidata.io.RandomAccessFile(edgeDataFilename, "rwd");
rFile.seek(rangeStartEdgePtr);
int last = streamingOffsetEdgePtr;
if (last == 0) last = edataFilesize;
rFile.write(data, rangeStartEdgePtr, last - rangeStartEdgePtr);
rFile.close();
}
dataBlockManager.release(blockId);
}
#location 16
#vulnerability type RESOURCE_LEAK |
#fixed code
@Override
public Collection<FileAbstractModel> getFiles(String dir) {
logger.debug("Listing local files from {}", dir);
File[] files = new File(dir).listFiles();
Collection<FileAbstractModel> result;
if (files != null) {
result = new ArrayList<>(files.length);
// Iterate other files
for (File file : files) {
result.add(toFileAbstractModel(dir, file));
}
} else {
logger.debug("Symlink on windows gives null for listFiles(). Skipping [{}]", dir);
result = Collections.EMPTY_LIST;
}
logger.debug("{} local files found", result.size());
return result;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public Collection<FileAbstractModel> getFiles(String dir) {
if (logger.isDebugEnabled()) logger.debug("Listing local files from {}", dir);
File[] files = new File(dir).listFiles();
Collection<FileAbstractModel> result = new ArrayList<>(files.length);
// Iterate other files
for (File file : files) {
result.add(toFileAbstractModel(dir, file));
}
if (logger.isDebugEnabled()) logger.debug("{} local files found", result.size());
return result;
}
#location 6
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Test
public void testConfigure2() throws Exception {
double epsilon = 0.000000000000001 ;
Settings smsemoaSettings = new SMSEMOASettings("Fonseca");
SMSEMOA algorithm = (SMSEMOA) smsemoaSettings.configure(configuration) ;
Problem problem = new Fonseca("Real") ;
SBXCrossover crossover = (SBXCrossover) algorithm.getCrossoverOperator() ;
double pc = crossover.getCrossoverProbability() ;
double dic = crossover.getDistributionIndex() ;
PolynomialMutation mutation = (PolynomialMutation)algorithm.getMutationOperator() ;
double pm = mutation.getMutationProbability() ;
double dim = mutation.getDistributionIndex() ;
double offset = algorithm.getOffset() ;
assertEquals(100, algorithm.getPopulationSize());
assertEquals(25000, algorithm.getMaxEvaluations());
assertEquals(0.9, pc, epsilon);
assertEquals(20.0, dic, epsilon);
assertEquals(1.0/problem.getNumberOfVariables(), pm, epsilon);
assertEquals(20.0, dim, epsilon);
assertEquals(100.0, offset, epsilon);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void testConfigure2() throws Exception {
double epsilon = 0.000000000000001 ;
Settings smsemoaSettings = new SMSEMOASettings("Fonseca");
Algorithm algorithm = smsemoaSettings.configure(configuration_) ;
Problem problem = new Fonseca("Real") ;
SBXCrossover crossover = (SBXCrossover)algorithm.getOperator("crossover") ;
double pc = (Double)crossover.getParameter("probability") ;
double dic = (Double)crossover.getParameter("distributionIndex") ;
PolynomialMutation mutation = (PolynomialMutation)algorithm.getOperator("mutation") ;
double pm = (Double)mutation.getParameter("probability") ;
double dim = (Double)mutation.getParameter("distributionIndex") ;
assertEquals("SMSEMOA_SettingsTest", 100, ((Integer)algorithm.getInputParameter("populationSize")).intValue());
assertEquals("SMSEMOA_SettingsTest", 25000, ((Integer)algorithm.getInputParameter("maxEvaluations")).intValue());
assertEquals("SMSEMOA_SettingsTest", 0.9, pc, epsilon);
assertEquals("SMSEMOA_SettingsTest", 20.0, dic, epsilon);
assertEquals("SMSEMOA_SettingsTest", 1.0/problem.getNumberOfVariables(), pm, epsilon);
assertEquals("SMSEMOA_SettingsTest", 20.0, dim, epsilon);
assertEquals("SMSEMOA_SettingsTest", 100.0, ((Double)algorithm.getInputParameter("offset")).doubleValue(), epsilon);
}
#location 7
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Override
public int compare(Solution o1, Solution o2) {
if (o1 == null) {
return 1;
} else if (o2 == null) {
return -1;
}
int flagComparatorRank = RANK_COMPARATOR.compare(o1, o2);
if (flagComparatorRank != 0) {
return flagComparatorRank;
}
/* His rank is equal, then distance crowding RANK_COMPARATOR */
double distance1 = NSGAIIAttr.getAttributes(o1).getCrowdingDistance() ;
double distance2 = NSGAIIAttr.getAttributes(o2).getCrowdingDistance() ;
//double distance1 = (double)o1.getAlgorithmAttributes().getAttribute("CrowdingDistance");
//double distance2 = (double)o2.getAlgorithmAttributes().getAttribute("CrowdingDistance");
if (distance1 > distance2) {
return -1;
}
if (distance1 < distance2) {
return 1;
}
return 0;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public int compare(Solution o1, Solution o2) {
if (o1 == null) {
return 1;
} else if (o2 == null) {
return -1;
}
int flagComparatorRank = RANK_COMPARATOR.compare(o1, o2);
if (flagComparatorRank != 0) {
return flagComparatorRank;
}
/* His rank is equal, then distance crowding RANK_COMPARATOR */
double distance1 = (double)o1.getAlgorithmAttributes().getAttribute("CrowdingDistance");
double distance2 = (double)o2.getAlgorithmAttributes().getAttribute("CrowdingDistance");
if (distance1 > distance2) {
return -1;
}
if (distance1 < distance2) {
return 1;
}
return 0;
}
#location 15
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Test
public void test2() throws JMException {
double epsilon = 0.000000000000001;
Settings GDE3Settings = new GDE3_Settings("Fonseca");
GDE3 algorithm = (GDE3) GDE3Settings.configure(configuration_);
DifferentialEvolutionCrossover crossover =
(DifferentialEvolutionCrossover) algorithm.getCrossoverOperator();
Assert.assertEquals("GDE3_SettingsTest", 100, algorithm.getPopulationSize());
Assert.assertEquals("GDE3_SettingsTest", 250, algorithm.getMaxIterations());
Assert.assertEquals("GDE3_SettingsTest", 0.5, crossover.getCr(), epsilon);
Assert.assertEquals("GDE3_SettingsTest", 0.5, crossover.getF(), epsilon);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void test2() throws JMException {
double epsilon = 0.000000000000001;
Settings GDE3Settings = new GDE3_Settings("Fonseca");
Algorithm algorithm = GDE3Settings.configure(configuration_);
//Problem problem = new Fonseca("Real");
DifferentialEvolutionCrossover crossover = (DifferentialEvolutionCrossover)algorithm.getOperator("crossover") ;
double CR = (Double)crossover.getParameter("CR") ;
double F = (Double)crossover.getParameter("F") ;
Assert.assertEquals("GDE3_SettingsTest", 100, ((Integer)algorithm.getInputParameter("populationSize")).intValue());
Assert.assertEquals("GDE3_SettingsTest", 250, ((Integer)algorithm.getInputParameter("maxIterations")).intValue());
Assert.assertEquals("GDE3_SettingsTest", 0.5, CR, epsilon);
Assert.assertEquals("GDE3_SettingsTest", 0.5, F, epsilon);
}
#location 8
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Override
public long getSwapUsed() {
PdhFmtCounterValue phPagefileCounterValue = new PdhFmtCounterValue();
int ret = Pdh.INSTANCE.PdhGetFormattedCounterValue(pPagefile.getValue(), Pdh.PDH_FMT_LARGE | Pdh.PDH_FMT_1000,
null, phPagefileCounterValue);
if (ret != 0) {
LOG.warn("Failed to get Pagefile % Usage counter. Error code: {}", String.format("0x%08X", ret));
return 0L;
}
// Returns results in 1000's of percent, e.g. 5% is 5000
// Multiply by page file size and Divide by 100 * 1000
// Putting division at end avoids need to cast division to double
return getSwapTotal() * phPagefileCounterValue.value.largeValue / 100000;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public long getSwapUsed() {
long total;
long available;
if (!Kernel32.INSTANCE.GlobalMemoryStatusEx(this._memory)) {
LOG.error("Failed to Initialize MemoryStatusEx. Error code: {}", Kernel32.INSTANCE.GetLastError());
this._memory = null;
return 0L;
}
total = this.getSwapTotal();
available = this._memory.ullAvailPageFile.longValue() - this._memory.ullAvailPhys.longValue();
return total - available;
}
#location 12
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Override
public HWDiskStore[] getDisks() {
List<HWDiskStore> result;
result = new ArrayList<>();
readMap.clear();
writeMap.clear();
populateReadWriteMaps();
Map<String, List<Object>> vals = WmiUtil.selectObjectsFrom(null, "Win32_DiskDrive",
"Name,Manufacturer,Model,SerialNumber,Size,Index", null, DRIVE_TYPES);
for (int i = 0; i < vals.get("Name").size(); i++) {
HWDiskStore ds = new HWDiskStore();
ds.setName((String) vals.get("Name").get(i));
ds.setModel(String.format("%s %s", vals.get("Model").get(i), vals.get("Manufacturer").get(i)).trim());
// Most vendors store serial # as a hex string; convert
ds.setSerial(ParseUtil.hexStringToString((String) vals.get("SerialNumber").get(i)));
String index = vals.get("Index").get(i).toString();
if (readMap.containsKey(index)) {
ds.setReads(readMap.get(index));
}
if (writeMap.containsKey(index)) {
ds.setWrites(writeMap.get(index));
}
// If successful this line is the desired value
try {
ds.setSize(Long.parseLong((String) vals.get("Size").get(i)));
} catch (NumberFormatException e) {
// If we failed to parse, give up
// This is expected for an empty string on some drives
ds.setSize(0L);
}
result.add(ds);
}
return result.toArray(new HWDiskStore[result.size()]);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public HWDiskStore[] getDisks() {
List<HWDiskStore> result;
result = new ArrayList<>();
Map<String, List<String>> vals = WmiUtil.selectStringsFrom(null, "Win32_DiskDrive",
"Name,Manufacturer,Model,SerialNumber,Size", null);
for (int i = 0; i < vals.get("Name").size(); i++) {
HWDiskStore ds = new HWDiskStore();
ds.setName(vals.get("Name").get(i));
ds.setModel(String.format("%s %s", vals.get("Model").get(i), vals.get("Manufacturer").get(i)).trim());
// Most vendors store serial # as a hex string; convert
ds.setSerial(ParseUtil.hexStringToString(vals.get("SerialNumber").get(i)));
// If successful this line is the desired value
try {
ds.setSize(Long.parseLong(vals.get("Size").get(i)));
} catch (NumberFormatException e) {
// If we failed to parse, give up
// This is expected for an empty string on some drives
ds.setSize(0L);
}
result.add(ds);
}
return result.toArray(new HWDiskStore[result.size()]);
}
#location 16
#vulnerability type NULL_DEREFERENCE |
#fixed code
public OSFileStore[] getFileStores() {
// Use getfsstat to map filesystem paths to types
Map<String, String> fstype = new HashMap<>();
// Query with null to get total # required
int numfs = SystemB.INSTANCE.getfsstat64(null, 0, 0);
if (numfs > 0) {
// Create array to hold results
Statfs[] fs = new Statfs[numfs];
// Fill array with results
numfs = SystemB.INSTANCE.getfsstat64(fs, numfs * (new Statfs()).size(), SystemB.MNT_NOWAIT);
for (int f = 0; f < numfs; f++) {
// Mount to name will match canonical path.
// Byte arrays are null-terminated strings
fstype.put(new String(fs[f].f_mntonname).trim(), new String(fs[f].f_fstypename).trim());
}
}
// Now list file systems
List<OSFileStore> fsList = new ArrayList<>();
FileSystemView fsv = FileSystemView.getFileSystemView();
// Mac file systems are mounted in /Volumes
File volumes = new File("/Volumes");
if (volumes != null && volumes.listFiles() != null) {
for (File f : volumes.listFiles()) {
// Everyone hates DS Store
if (f.getName().endsWith(".DS_Store")) {
continue;
}
String name = fsv.getSystemDisplayName(f);
String description = "Volume";
String type = "unknown";
try {
String cp = f.getCanonicalPath();
if (cp.equals("/"))
name = name + " (/)";
FileStore fs = Files.getFileStore(f.toPath());
if (localDisk.matcher(fs.name()).matches()) {
description = "Local Disk";
}
if (fs.name().startsWith("localhost:") || fs.name().startsWith("//")) {
description = "Network Drive";
}
if (fstype.containsKey(cp)) {
type = fstype.get(cp);
}
} catch (IOException e) {
LOG.trace("", e);
continue;
}
fsList.add(new OSFileStore(name, description, type, f.getUsableSpace(), f.getTotalSpace()));
}
}
return fsList.toArray(new OSFileStore[fsList.size()]);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public OSFileStore[] getFileStores() {
// Use getfsstat to map filesystem paths to types
Map<String, String> fstype = new HashMap<>();
// Query with null to get total # required
int numfs = SystemB.INSTANCE.getfsstat64(null, 0, 0);
if (numfs > 0) {
// Create array to hold results
Statfs[] fs = new Statfs[numfs];
// Fill array with results
numfs = SystemB.INSTANCE.getfsstat64(fs, numfs * (new Statfs()).size(), SystemB.MNT_NOWAIT);
for (int f = 0; f < numfs; f++) {
// Mount to name will match canonical path.
// Byte arrays are null-terminated strings
fstype.put(new String(fs[f].f_mntonname).trim(), new String(fs[f].f_fstypename).trim());
}
}
// Now list file systems
List<OSFileStore> fsList = new ArrayList<>();
FileSystemView fsv = FileSystemView.getFileSystemView();
// Mac file systems are mounted in /Volumes
File volumes = new File("/Volumes");
if (volumes != null) {
for (File f : volumes.listFiles()) {
// Everyone hates DS Store
if (f.getName().endsWith(".DS_Store")) {
continue;
}
String name = fsv.getSystemDisplayName(f);
String description = "Volume";
String type = "unknown";
try {
String cp = f.getCanonicalPath();
if (cp.equals("/"))
name = name + " (/)";
FileStore fs = Files.getFileStore(f.toPath());
if (localDisk.matcher(fs.name()).matches()) {
description = "Local Disk";
}
if (fs.name().startsWith("localhost:") || fs.name().startsWith("//")) {
description = "Network Drive";
}
if (fstype.containsKey(cp)) {
type = fstype.get(cp);
}
} catch (IOException e) {
LOG.trace("", e);
continue;
}
fsList.add(new OSFileStore(name, description, type, f.getUsableSpace(), f.getTotalSpace()));
}
}
return fsList.toArray(new OSFileStore[fsList.size()]);
}
#location 23
#vulnerability type NULL_DEREFERENCE |
#fixed code
public static Memory sysctl(String name) {
IntByReference size = new IntByReference();
if (0 != FreeBsdLibc.INSTANCE.sysctlbyname(name, null, size, null, 0)) {
LOG.error(SYSCTL_FAIL, name, Native.getLastError());
return null;
}
Memory m = new Memory(size.getValue());
if (0 != FreeBsdLibc.INSTANCE.sysctlbyname(name, m, size, null, 0)) {
LOG.error(SYSCTL_FAIL, name, Native.getLastError());
return null;
}
return m;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public static Memory sysctl(String name) {
IntByReference size = new IntByReference();
if (0 != SystemB.INSTANCE.sysctlbyname(name, null, size, null, 0)) {
LOG.error(SYSCTL_FAIL, name, Native.getLastError());
return null;
}
Memory m = new Memory(size.getValue());
if (0 != SystemB.INSTANCE.sysctlbyname(name, m, size, null, 0)) {
LOG.error(SYSCTL_FAIL, name, Native.getLastError());
return null;
}
return m;
}
#location 8
#vulnerability type INTERFACE_NOT_THREAD_SAFE |
#fixed code
@Override
public HWDiskStore[] getDisks() {
List<HWDiskStore> result;
result = new ArrayList<>();
readMap.clear();
writeMap.clear();
populateReadWriteMaps();
Map<String, List<Object>> vals = WmiUtil.selectObjectsFrom(null, "Win32_DiskDrive",
"Name,Manufacturer,Model,SerialNumber,Size,Index", null, DRIVE_TYPES);
for (int i = 0; i < vals.get("Name").size(); i++) {
HWDiskStore ds = new HWDiskStore();
ds.setName((String) vals.get("Name").get(i));
ds.setModel(String.format("%s %s", vals.get("Model").get(i), vals.get("Manufacturer").get(i)).trim());
// Most vendors store serial # as a hex string; convert
ds.setSerial(ParseUtil.hexStringToString((String) vals.get("SerialNumber").get(i)));
String index = vals.get("Index").get(i).toString();
if (readMap.containsKey(index)) {
ds.setReads(readMap.get(index));
}
if (writeMap.containsKey(index)) {
ds.setWrites(writeMap.get(index));
}
// If successful this line is the desired value
try {
ds.setSize(Long.parseLong((String) vals.get("Size").get(i)));
} catch (NumberFormatException e) {
// If we failed to parse, give up
// This is expected for an empty string on some drives
ds.setSize(0L);
}
result.add(ds);
}
return result.toArray(new HWDiskStore[result.size()]);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public HWDiskStore[] getDisks() {
List<HWDiskStore> result;
result = new ArrayList<>();
Map<String, List<String>> vals = WmiUtil.selectStringsFrom(null, "Win32_DiskDrive",
"Name,Manufacturer,Model,SerialNumber,Size", null);
for (int i = 0; i < vals.get("Name").size(); i++) {
HWDiskStore ds = new HWDiskStore();
ds.setName(vals.get("Name").get(i));
ds.setModel(String.format("%s %s", vals.get("Model").get(i), vals.get("Manufacturer").get(i)).trim());
// Most vendors store serial # as a hex string; convert
ds.setSerial(ParseUtil.hexStringToString(vals.get("SerialNumber").get(i)));
// If successful this line is the desired value
try {
ds.setSize(Long.parseLong(vals.get("Size").get(i)));
} catch (NumberFormatException e) {
// If we failed to parse, give up
// This is expected for an empty string on some drives
ds.setSize(0L);
}
result.add(ds);
}
return result.toArray(new HWDiskStore[result.size()]);
}
#location 13
#vulnerability type NULL_DEREFERENCE |
#fixed code
private static void enumerateProperties(Map<String, List<Object>> values, EnumWbemClassObject enumerator,
String[] properties, ValueType[] propertyTypes) {
if (propertyTypes.length > 1 && properties.length != propertyTypes.length) {
throw new IllegalArgumentException("Property type array size must be 1 or equal to properties array size.");
}
// Step 7: -------------------------------------------------
// Get the data from the query in step 6 -------------------
PointerByReference pclsObj = new PointerByReference();
LongByReference uReturn = new LongByReference(0L);
while (enumerator.getPointer() != Pointer.NULL) {
HRESULT hres = enumerator.Next(new NativeLong(EnumWbemClassObject.WBEM_INFINITE), new NativeLong(1),
pclsObj, uReturn);
// Requested 1; if 0 objects returned, we're done
if (0L == uReturn.getValue() || COMUtils.FAILED(hres)) {
// Enumerator will be released by calling method so no need to
// release it here.
return;
}
VARIANT.ByReference vtProp = new VARIANT.ByReference();
// Get the value of the properties
WbemClassObject clsObj = new WbemClassObject(pclsObj.getValue());
for (int p = 0; p < properties.length; p++) {
String property = properties[p];
hres = clsObj.Get(new BSTR(property), new NativeLong(0L), vtProp, null, null);
ValueType propertyType = propertyTypes.length > 1 ? propertyTypes[p] : propertyTypes[0];
switch (propertyType) {
// WMI Longs will return as strings
case STRING:
values.get(property).add(vtProp.getValue() == null ? "unknown" : vtProp.stringValue());
break;
// WMI Uint32s will return as longs
case UINT32: // WinDef.LONG TODO improve in JNA 4.3
values.get(property)
.add(vtProp.getValue() == null ? 0L : vtProp._variant.__variant.lVal.longValue());
break;
case FLOAT:
values.get(property).add(vtProp.getValue() == null ? 0f : vtProp.floatValue());
break;
case DATETIME:
// Read a string in format 20160513072950.782000-420 and
// parse to a long representing ms since eopch
values.get(property)
.add(vtProp.getValue() == null ? 0L : ParseUtil.cimDateTimeToMillis(vtProp.stringValue()));
break;
default:
// Should never get here! If you get this exception you've
// added something to the enum without adding it here. Tsk.
throw new IllegalArgumentException("Unimplemented enum type: " + propertyType.toString());
}
OleAuto.INSTANCE.VariantClear(vtProp.getPointer());
}
clsObj.Release();
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
private static void enumerateProperties(Map<String, List<Object>> values, EnumWbemClassObject enumerator,
String[] properties, ValueType[] propertyTypes) {
if (propertyTypes.length > 1 && properties.length != propertyTypes.length) {
throw new IllegalArgumentException("Property type array size must be 1 or equal to properties array size.");
}
// Step 7: -------------------------------------------------
// Get the data from the query in step 6 -------------------
PointerByReference pclsObj = new PointerByReference();
LongByReference uReturn = new LongByReference(0L);
while (enumerator.getPointer() != Pointer.NULL) {
HRESULT hres = enumerator.Next(new NativeLong(EnumWbemClassObject.WBEM_INFINITE), new NativeLong(1),
pclsObj, uReturn);
// Requested 1; if 0 objects returned, we're done
if (0L == uReturn.getValue() || COMUtils.FAILED(hres)) {
// Enumerator will be released by calling method so no need to
// release it here.
return;
}
VARIANT.ByReference vtProp = new VARIANT.ByReference();
// Get the value of the properties
WbemClassObject clsObj = new WbemClassObject(pclsObj.getValue());
for (int p = 0; p < properties.length; p++) {
String property = properties[p];
hres = clsObj.Get(new BSTR(property), new NativeLong(0L), vtProp, null, null);
ValueType propertyType = propertyTypes.length > 1 ? propertyTypes[p] : propertyTypes[0];
switch (propertyType) {
// WMI Longs will return as strings
case STRING:
values.get(property).add(vtProp.getValue() == null ? "unknown" : vtProp.stringValue());
break;
// WMI Uint32s will return as longs
case UINT32: // WinDef.LONG TODO improve in JNA 4.3
values.get(property)
.add(vtProp.getValue() == null ? 0L : vtProp._variant.__variant.lVal.longValue());
break;
case FLOAT:
values.get(property).add(vtProp.getValue() == null ? 0f : vtProp.floatValue());
break;
case DATETIME:
// Read a string in format 20160513072950.782000-420 and
// parse to a long representing ms since eopch
values.get(property).add(ParseUtil.cimDateTimeToMillis(vtProp.stringValue()));
break;
default:
// Should never get here! If you get this exception you've
// added something to the enum without adding it here. Tsk.
throw new IllegalArgumentException("Unimplemented enum type: " + propertyType.toString());
}
OleAuto.INSTANCE.VariantClear(vtProp.getPointer());
}
clsObj.Release();
}
}
#location 36
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Test
public void testBatchUpdatePersistentVertices() {
Vertex v1 = this.sqlgGraph.addVertex(T.label, "Person", "name", "a");
Vertex v2 = this.sqlgGraph.addVertex(T.label, "Person", "surname", "b");
this.sqlgGraph.tx().commit();
assertEquals("a", this.sqlgGraph.traversal().V(v1.id()).next().value("name"));
assertEquals("b", this.sqlgGraph.traversal().V(v2.id()).next().value("surname"));
this.sqlgGraph.tx().rollback();
this.sqlgGraph.tx().normalBatchModeOn();
v1.property("name", "aa");
v2.property("surname", "bb");
this.sqlgGraph.tx().commit();
assertEquals("aa", this.sqlgGraph.traversal().V(v1.id()).next().value("name"));
assertEquals("bb", this.sqlgGraph.traversal().V(v2.id()).next().value("surname"));
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void testBatchUpdatePersistentVertices() {
Vertex v1 = this.sqlgGraph.addVertex(T.label, "Person", "name", "a");
Vertex v2 = this.sqlgGraph.addVertex(T.label, "Person", "surname", "b");
this.sqlgGraph.tx().commit();
assertEquals("a", this.sqlgGraph.v(v1.id()).value("name"));
assertEquals("b", this.sqlgGraph.v(v2.id()).value("surname"));
this.sqlgGraph.tx().rollback();
this.sqlgGraph.tx().normalBatchModeOn();
v1.property("name", "aa");
v2.property("surname", "bb");
this.sqlgGraph.tx().commit();
assertEquals("aa", this.sqlgGraph.v(v1.id()).value("name"));
assertEquals("bb", this.sqlgGraph.v(v2.id()).value("surname"));
}
#location 6
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Test
public void testLoadVertexProperties() {
Vertex marko = this.sqlgGraph.addVertex(T.label, "Person", "name", "marko");
this.sqlgGraph.tx().commit();
marko = this.sqlgGraph.traversal().V(marko.id()).next();
Assert.assertEquals("marko", marko.property("name").value());
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void testLoadVertexProperties() {
Vertex marko = this.sqlgGraph.addVertex(T.label, "Person", "name", "marko");
this.sqlgGraph.tx().commit();
marko = this.sqlgGraph.v(marko.id());
Assert.assertEquals("marko", marko.property("name").value());
}
#location 6
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Test
public void testPropertiesNotBeingCachedOnVertexOut() {
Vertex v1 = this.sqlgGraph.addVertex(T.label, "Person");
Vertex v2 = this.sqlgGraph.addVertex(T.label, "Car", "name", "a");
Vertex v3 = this.sqlgGraph.addVertex(T.label, "Car", "name", "b");
Vertex v4 = this.sqlgGraph.addVertex(T.label, "Car", "name", "c");
v1.addEdge("car", v2);
v1.addEdge("car", v3);
v1.addEdge("car", v4);
this.sqlgGraph.tx().commit();
v1 = this.sqlgGraph.traversal().V(v1.id()).next();
List<Vertex> cars = vertexTraversal(v1).out("car").toList();
Assert.assertEquals(3, cars.size());
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void testPropertiesNotBeingCachedOnVertexOut() {
Vertex v1 = this.sqlgGraph.addVertex(T.label, "Person");
Vertex v2 = this.sqlgGraph.addVertex(T.label, "Car", "name", "a");
Vertex v3 = this.sqlgGraph.addVertex(T.label, "Car", "name", "b");
Vertex v4 = this.sqlgGraph.addVertex(T.label, "Car", "name", "c");
v1.addEdge("car", v2);
v1.addEdge("car", v3);
v1.addEdge("car", v4);
this.sqlgGraph.tx().commit();
v1 = this.sqlgGraph.v(v1.id());
List<Vertex> cars = vertexTraversal(v1).out("car").toList();
Assert.assertEquals(3, cars.size());
}
#location 16
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Test
public void testIdNotLoadedAsProperty() throws Exception {
Vertex v = this.sqlgGraph.addVertex(T.label, "Person", "name", "a");
this.sqlgGraph.tx().commit();
this.sqlgGraph.close();
try (SqlgGraph sqlgGraph1 = SqlgGraph.open(configuration)) {
Vertex vv = sqlgGraph1.traversal().V(v.id()).next();
Assert.assertFalse(vv.property("ID").isPresent());
Map<String, PropertyType> propertyTypeMap = sqlgGraph1.getTopology().getAllTables().get(SchemaTable.of(
sqlgGraph1.getSqlDialect().getPublicSchema(), "V_Person").toString());
Assert.assertFalse(propertyTypeMap.containsKey("ID"));
sqlgGraph1.tx().rollback();
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void testIdNotLoadedAsProperty() throws Exception {
Vertex v = this.sqlgGraph.addVertex(T.label, "Person", "name", "a");
this.sqlgGraph.tx().commit();
this.sqlgGraph.close();
try (SqlgGraph sqlgGraph1 = SqlgGraph.open(configuration)) {
Vertex vv = sqlgGraph1.traversal().V(v.id()).next();
assertFalse(vv.property("ID").isPresent());
Map<String, PropertyType> propertyTypeMap = sqlgGraph1.getTopology().getAllTables().get(SchemaTable.of(
sqlgGraph1.getSqlDialect().getPublicSchema(), "V_Person").toString());
assertFalse(propertyTypeMap.containsKey("ID"));
sqlgGraph1.tx().rollback();
}
}
#location 9
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Test
public void testAddingVmargsInBeforeContainerCreated() throws IOException, InterruptedException {
// Given
final String vmargs = "-Dhttp.proxyPort=8080";
final DockerComputerJNLPConnector connector = new DockerComputerJNLPConnector(new JNLPLauncher(null, vmargs));
final CreateContainerCmd createCmd = mock(CreateContainerCmd.class);
final Map<String, String> containerLabels = new TreeMap<>();
when(createCmd.getLabels()).thenReturn(containerLabels);
DockerTemplate.setNodeNameInContainerConfig(createCmd, "nodeName");
// When
connector.beforeContainerCreated(null, null, createCmd);
// Then
verify(createCmd, times(1)).withEnv(new String[]{
"JAVA_OPT=" + vmargs
});
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void testAddingVmargsInBeforeContainerCreated() throws IOException, InterruptedException {
String vmargs = "-Dhttp.proxyPort=8080";
DockerComputerJNLPConnector connector = new DockerComputerJNLPConnector(new JNLPLauncher(null, vmargs));
CreateContainerCmd createCmd = new CreateContainerCmdImpl(createContainerCmd -> null, "hello-world");
createCmd.withName("container-name");
connector.beforeContainerCreated(null, null, createCmd);
String[] env = createCmd.getEnv();
assertNotNull("Environment variable is expected", env);
assertEquals("Environment variable is expected", 1, env.length);
assertTrue("Original environment variable is not found", env[0].endsWith(vmargs));
}
#location 11
#vulnerability type RESOURCE_LEAK |
#fixed code
@Override
public synchronized Collection<NodeProvisioner.PlannedNode> provision(Label label, int excessWorkload) {
try {
LOGGER.log(Level.INFO, "Asked to provision {0} slave(s) for: {1}", new Object[]{excessWorkload,label});
List<NodeProvisioner.PlannedNode> r = new ArrayList<NodeProvisioner.PlannedNode>();
final List<DockerTemplate> templates = getTemplates(label);
while (excessWorkload > 0 && !templates.isEmpty()) {
final DockerTemplate t = templates.get(0); // get first
LOGGER.log(Level.INFO, "Will provision \"{0}\" for: {1}", new Object[]{t.image,label});
try {
if (!addProvisionedSlave(t)) {
templates.remove(t);
continue;
}
} catch (Exception e) {
LOGGER.log(Level.WARNING, "Bad template {0}: {1}. Trying next template...",
new Object[]{t.image, e.getMessage()});
templates.remove(t);
continue;
}
r.add(new NodeProvisioner.PlannedNode(t.getDisplayName(),
Computer.threadPoolForRemoting.submit(new Callable<Node>() {
public Node call() throws Exception {
// TODO: record the output somewhere
DockerSlave slave = null;
try {
slave = t.provision(new StreamTaskListener(System.out));
final Jenkins jenkins = Jenkins.getInstance();
// TODO once the baseline is 1.592+ switch to Queue.withLock
synchronized (jenkins.getQueue()) {
jenkins.addNode(slave);
}
// Docker instances may have a long init script. If we declare
// the provisioning complete by returning without the connect
// operation, NodeProvisioner may decide that it still wants
// one more instance, because it sees that (1) all the slaves
// are offline (because it's still being launched) and
// (2) there's no capacity provisioned yet.
//
// deferring the completion of provisioning until the launch
// goes successful prevents this problem.
slave.toComputer().connect(false).get();
return slave;
}
catch(Exception ex) {
LOGGER.log(Level.SEVERE, "Error in provisioning; slave=" + slave + ", template=" + t);
ex.printStackTrace();
throw Throwables.propagate(ex);
}
finally {
decrementAmiSlaveProvision(t.image);
}
}
})
,t.getNumExecutors()));
excessWorkload -= t.getNumExecutors();
}
return r;
} catch (Exception e) {
LOGGER.log(Level.SEVERE,"Exception while provisioning for: " + label,e);
return Collections.emptyList();
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public synchronized Collection<NodeProvisioner.PlannedNode> provision(Label label, int excessWorkload) {
try {
LOGGER.log(Level.INFO, "Asked to provision {0} slave(s) for: {1}", new Object[]{excessWorkload,label});
List<NodeProvisioner.PlannedNode> r = new ArrayList<NodeProvisioner.PlannedNode>();
final DockerTemplate t = getTemplate(label);
LOGGER.log(Level.INFO, "Will provision \"{0}\" for: {1}", new Object[]{t.image,label});
while (excessWorkload>0) {
if (!addProvisionedSlave(t.image, t.instanceCap)) {
break;
}
r.add(new NodeProvisioner.PlannedNode(t.getDisplayName(),
Computer.threadPoolForRemoting.submit(new Callable<Node>() {
public Node call() throws Exception {
// TODO: record the output somewhere
DockerSlave slave = null;
try {
slave = t.provision(new StreamTaskListener(System.out));
final Jenkins jenkins = Jenkins.getInstance();
// TODO once the baseline is 1.592+ switch to Queue.withLock
synchronized (jenkins.getQueue()) {
jenkins.addNode(slave);
}
// Docker instances may have a long init script. If we declare
// the provisioning complete by returning without the connect
// operation, NodeProvisioner may decide that it still wants
// one more instance, because it sees that (1) all the slaves
// are offline (because it's still being launched) and
// (2) there's no capacity provisioned yet.
//
// deferring the completion of provisioning until the launch
// goes successful prevents this problem.
slave.toComputer().connect(false).get();
return slave;
}
catch(Exception ex) {
LOGGER.log(Level.SEVERE, "Error in provisioning; slave=" + slave + ", template=" + t);
ex.printStackTrace();
throw Throwables.propagate(ex);
}
finally {
decrementAmiSlaveProvision(t.image);
}
}
})
,t.getNumExecutors()));
excessWorkload -= t.getNumExecutors();
}
return r;
} catch (Exception e) {
LOGGER.log(Level.SEVERE,"Exception while provisioning for: " + label,e);
return Collections.emptyList();
}
}
#location 11
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Override
public void close() throws SQLException {
if (closed) {
return;
}
synchronized (this) {
if (closed) {
return;
}
closed = true;
SQLException sqlE = null;
try {
// Attempt to return any unused sequences.
if (connection != null) returnAllSequences(this.sequenceMap);
} catch (SQLException e) {
sqlE = e;
} finally {
try {
// Clear any client-side caches.
statsManager.clearStats();
} catch (SQLException e) {
if (sqlE == null) {
sqlE = e;
} else {
sqlE.setNextException(e);
}
} finally {
try {
childServices.clear();
latestMetaData = null;
if (connection != null) connection.close();
} catch (IOException e) {
if (sqlE == null) {
sqlE = ServerUtil.parseServerException(e);
} else {
sqlE.setNextException(ServerUtil.parseServerException(e));
}
} finally {
try {
super.close();
} catch (SQLException e) {
if (sqlE == null) {
sqlE = e;
} else {
sqlE.setNextException(e);
}
} finally {
if (sqlE != null) {
throw sqlE;
}
}
}
}
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public void close() throws SQLException {
SQLException sqlE = null;
try {
// Attempt to return any unused sequences.
returnAllSequences(this.sequenceMap);
} catch (SQLException e) {
sqlE = e;
} finally {
try {
// Clear any client-side caches.
statsManager.clearStats();
} catch (SQLException e) {
if (sqlE == null) {
sqlE = e;
} else {
sqlE.setNextException(e);
}
} finally {
try {
childServices.clear();
latestMetaData = null;
connection.close();
} catch (IOException e) {
if (sqlE == null) {
sqlE = ServerUtil.parseServerException(e);
} else {
sqlE.setNextException(ServerUtil.parseServerException(e));
}
} finally {
try {
super.close();
} catch (SQLException e) {
if (sqlE == null) {
sqlE = e;
} else {
sqlE.setNextException(e);
}
} finally {
if (sqlE != null) {
throw sqlE;
}
}
}
}
}
}
#location 6
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
@Override
public void modifyTable(byte[] tableName, HTableDescriptor newDesc) throws IOException,
InterruptedException, TimeoutException {
try (HBaseAdmin admin = new HBaseAdmin(config)) {
if (!allowOnlineTableSchemaUpdate()) {
admin.disableTable(tableName);
admin.modifyTable(tableName, newDesc);
admin.enableTable(tableName);
} else {
admin.modifyTable(tableName, newDesc);
pollForUpdatedTableDescriptor(admin, newDesc, tableName);
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public void modifyTable(byte[] tableName, HTableDescriptor newDesc) throws IOException,
InterruptedException, TimeoutException {
HBaseAdmin admin = new HBaseAdmin(config);
if (!allowOnlineTableSchemaUpdate()) {
admin.disableTable(tableName);
admin.modifyTable(tableName, newDesc);
admin.enableTable(tableName);
} else {
admin.modifyTable(tableName, newDesc);
pollForUpdatedTableDescriptor(admin, newDesc, tableName);
}
}
#location 11
#vulnerability type RESOURCE_LEAK |
#fixed code
protected static void setupTxManager() throws SQLException, IOException {
TransactionFactory.getTransactionProvider().getTransactionContext().setupTxManager(config, getUrl());
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
protected static void setupTxManager() throws SQLException, IOException {
TransactionFactory.getTransactionFactory().getTransactionContext().setupTxManager(config, getUrl());
}
#location 2
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Test
public void writeMetrics() throws Exception {
Connection conn = getConnectionWithoutTracing();
latch = new CountDownLatch(1);
testTraceWriter.start();
// create a simple metrics record
long traceid = 987654;
String description = "Some generic trace";
long spanid = 10;
long parentid = 11;
long startTime = 12;
long endTime = 13;
String processid = "Some process";
String annotation = "test annotation for a span";
Span span = createNewSpan(traceid, parentid, spanid, description, startTime, endTime,
processid, annotation);
Tracer.getInstance().deliver(span);
assertTrue("Span never committed to table", latch.await(30, TimeUnit.SECONDS));
// make sure we only get expected stat entry (matcing the trace id), otherwise we could the
// stats for the update as well
TraceReader reader = new TraceReader(conn, tracingTableName);
Collection<TraceHolder> traces = reader.readAll(10);
assertEquals("Wrong number of traces in the tracing table", 1, traces.size());
// validate trace
TraceHolder trace = traces.iterator().next();
// we are just going to get an orphan span b/c we don't send in a parent
assertEquals("Didn't get expected orphaned spans!" + trace.orphans, 1, trace.orphans.size());
assertEquals(traceid, trace.traceid);
SpanInfo spanInfo = trace.orphans.get(0);
assertEquals(description, spanInfo.description);
assertEquals(parentid, spanInfo.getParentIdForTesting());
assertEquals(startTime, spanInfo.start);
assertEquals(endTime, spanInfo.end);
assertEquals("Wrong number of tags", 0, spanInfo.tagCount);
assertEquals("Wrong number of annotations", 1, spanInfo.annotationCount);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void writeMetrics() throws Exception {
Connection conn = getConnectionWithoutTracing();
String tableName = generateUniqueName();
TraceSpanReceiver traceSpanReceiver = new TraceSpanReceiver();
latch = new CountDownLatch(1);
testTraceWriter = new TestTraceWriter(tableName, defaultTracingThreadPoolForTest, defaultTracingBatchSizeForTest);
// create a simple metrics record
long traceid = 987654;
String description = "Some generic trace";
long spanid = 10;
long parentid = 11;
long startTime = 12;
long endTime = 13;
String processid = "Some process";
String annotation = "test annotation for a span";
Span span = createNewSpan(traceid, parentid, spanid, description, startTime, endTime,
processid, annotation);
traceSpanReceiver.getSpanQueue().add(span);
assertTrue("Span never committed to table", latch.await(30, TimeUnit.SECONDS));
// make sure we only get expected stat entry (matcing the trace id), otherwise we could the
// stats for the update as well
TraceReader reader = new TraceReader(conn, tableName);
Collection<TraceHolder> traces = reader.readAll(10);
assertEquals("Wrong number of traces in the tracing table", 1, traces.size());
// validate trace
TraceHolder trace = traces.iterator().next();
// we are just going to get an orphan span b/c we don't send in a parent
assertEquals("Didn't get expected orphaned spans!" + trace.orphans, 1, trace.orphans.size());
assertEquals(traceid, trace.traceid);
SpanInfo spanInfo = trace.orphans.get(0);
assertEquals(description, spanInfo.description);
assertEquals(parentid, spanInfo.getParentIdForTesting());
assertEquals(startTime, spanInfo.start);
assertEquals(endTime, spanInfo.end);
assertEquals("Wrong number of tags", 0, spanInfo.tagCount);
assertEquals("Wrong number of annotations", 1, spanInfo.annotationCount);
}
#location 23
#vulnerability type RESOURCE_LEAK |
#fixed code
@Test
public void testSelectUpsertWithOldClient() throws Exception {
// Insert data with new client and read with old client
executeQueriesWithCurrentVersion(CREATE_ADD);
executeQueryWithClientVersion(compatibleClientVersion, QUERY);
assertExpectedOutput(CREATE_ADD, QUERY);
// Insert more data with old client and read with new client
executeQueryWithClientVersion(compatibleClientVersion, ADD_DATA);
executeQueriesWithCurrentVersion(QUERY_MORE);
assertExpectedOutput(ADD_DATA, QUERY_MORE);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void testSelectUpsertWithOldClient() throws Exception {
checkForPreConditions();
// Insert data with new client and read with old client
executeQueriesWithCurrentVersion(CREATE_ADD);
executeQueryWithClientVersion(compatibleClientVersion, QUERY);
assertTrue(compareOutput(CREATE_ADD, QUERY));
// Insert more data with old client and read with new client
executeQueryWithClientVersion(compatibleClientVersion, ADD_DATA);
executeQueriesWithCurrentVersion(QUERY_MORE);
assertTrue(compareOutput(ADD_DATA, QUERY_MORE));
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
@Override
protected RegionScanner doPostScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c, final Scan scan, final RegionScanner s) throws IOException, SQLException {
RegionCoprocessorEnvironment env = c.getEnvironment();
Region region = env.getRegion();
long ts = scan.getTimeRange().getMax();
boolean localIndexScan = ScanUtil.isLocalIndex(scan);
if (ScanUtil.isAnalyzeTable(scan)) {
byte[] gp_width_bytes =
scan.getAttribute(BaseScannerRegionObserver.GUIDEPOST_WIDTH_BYTES);
byte[] gp_per_region_bytes =
scan.getAttribute(BaseScannerRegionObserver.GUIDEPOST_PER_REGION);
// Let this throw, as this scan is being done for the sole purpose of collecting stats
StatisticsCollector statsCollector = StatisticsCollectorFactory.createStatisticsCollector(
env, region.getRegionInfo().getTable().getNameAsString(), ts,
gp_width_bytes, gp_per_region_bytes);
return collectStats(s, statsCollector, region, scan, env.getConfiguration());
} else if (ScanUtil.isIndexRebuild(scan)) { return rebuildIndices(s, region, scan, env.getConfiguration()); }
int offsetToBe = 0;
if (localIndexScan) {
/*
* For local indexes, we need to set an offset on row key expressions to skip
* the region start key.
*/
offsetToBe = region.getRegionInfo().getStartKey().length != 0 ? region.getRegionInfo().getStartKey().length :
region.getRegionInfo().getEndKey().length;
ScanUtil.setRowKeyOffset(scan, offsetToBe);
}
final int offset = offsetToBe;
PTable projectedTable = null;
PTable writeToTable = null;
byte[][] values = null;
byte[] descRowKeyTableBytes = scan.getAttribute(UPGRADE_DESC_ROW_KEY);
boolean isDescRowKeyOrderUpgrade = descRowKeyTableBytes != null;
if (isDescRowKeyOrderUpgrade) {
logger.debug("Upgrading row key for " + region.getRegionInfo().getTable().getNameAsString());
projectedTable = deserializeTable(descRowKeyTableBytes);
try {
writeToTable = PTableImpl.makePTable(projectedTable, true);
} catch (SQLException e) {
ServerUtil.throwIOException("Upgrade failed", e); // Impossible
}
values = new byte[projectedTable.getPKColumns().size()][];
}
byte[] localIndexBytes = scan.getAttribute(LOCAL_INDEX_BUILD);
List<IndexMaintainer> indexMaintainers = localIndexBytes == null ? null : IndexMaintainer.deserialize(localIndexBytes);
List<Mutation> indexMutations = localIndexBytes == null ? Collections.<Mutation>emptyList() : Lists.<Mutation>newArrayListWithExpectedSize(1024);
RegionScanner theScanner = s;
byte[] indexUUID = scan.getAttribute(PhoenixIndexCodec.INDEX_UUID);
byte[] txState = scan.getAttribute(BaseScannerRegionObserver.TX_STATE);
List<Expression> selectExpressions = null;
byte[] upsertSelectTable = scan.getAttribute(BaseScannerRegionObserver.UPSERT_SELECT_TABLE);
boolean isUpsert = false;
boolean isDelete = false;
byte[] deleteCQ = null;
byte[] deleteCF = null;
byte[] emptyCF = null;
HTable targetHTable = null;
boolean areMutationInSameRegion = true;
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
if (upsertSelectTable != null) {
isUpsert = true;
projectedTable = deserializeTable(upsertSelectTable);
targetHTable = new HTable(env.getConfiguration(), projectedTable.getPhysicalName().getBytes());
selectExpressions = deserializeExpressions(scan.getAttribute(BaseScannerRegionObserver.UPSERT_SELECT_EXPRS));
values = new byte[projectedTable.getPKColumns().size()][];
areMutationInSameRegion = Bytes.compareTo(targetHTable.getTableName(),
region.getTableDesc().getTableName().getName()) == 0
&& !isPkPositionChanging(new TableRef(projectedTable), selectExpressions);
} else {
byte[] isDeleteAgg = scan.getAttribute(BaseScannerRegionObserver.DELETE_AGG);
isDelete = isDeleteAgg != null && Bytes.compareTo(PDataType.TRUE_BYTES, isDeleteAgg) == 0;
if (!isDelete) {
deleteCF = scan.getAttribute(BaseScannerRegionObserver.DELETE_CF);
deleteCQ = scan.getAttribute(BaseScannerRegionObserver.DELETE_CQ);
}
emptyCF = scan.getAttribute(BaseScannerRegionObserver.EMPTY_CF);
}
TupleProjector tupleProjector = null;
byte[][] viewConstants = null;
ColumnReference[] dataColumns = IndexUtil.deserializeDataTableColumnsToJoin(scan);
final TupleProjector p = TupleProjector.deserializeProjectorFromScan(scan);
final HashJoinInfo j = HashJoinInfo.deserializeHashJoinFromScan(scan);
if ((localIndexScan && !isDelete && !isDescRowKeyOrderUpgrade) || (j == null && p != null)) {
if (dataColumns != null) {
tupleProjector = IndexUtil.getTupleProjector(scan, dataColumns);
viewConstants = IndexUtil.deserializeViewConstantsFromScan(scan);
}
ImmutableBytesWritable tempPtr = new ImmutableBytesWritable();
theScanner =
getWrappedScanner(c, theScanner, offset, scan, dataColumns, tupleProjector,
region, indexMaintainers == null ? null : indexMaintainers.get(0), viewConstants, p, tempPtr);
}
if (j != null) {
theScanner = new HashJoinRegionScanner(theScanner, p, j, ScanUtil.getTenantId(scan), env);
}
int batchSize = 0;
long batchSizeBytes = 0L;
List<Mutation> mutations = Collections.emptyList();
boolean needToWrite = false;
Configuration conf = c.getEnvironment().getConfiguration();
long flushSize = region.getTableDesc().getMemStoreFlushSize();
if (flushSize <= 0) {
flushSize = conf.getLong(HConstants.HREGION_MEMSTORE_FLUSH_SIZE,
HTableDescriptor.DEFAULT_MEMSTORE_FLUSH_SIZE);
}
/**
* Slow down the writes if the memstore size more than
* (hbase.hregion.memstore.block.multiplier - 1) times hbase.hregion.memstore.flush.size
* bytes. This avoids flush storm to hdfs for cases like index building where reads and
* write happen to all the table regions in the server.
*/
final long blockingMemStoreSize = flushSize * (
conf.getLong(HConstants.HREGION_MEMSTORE_BLOCK_MULTIPLIER,
HConstants.DEFAULT_HREGION_MEMSTORE_BLOCK_MULTIPLIER)-1) ;
boolean buildLocalIndex = indexMaintainers != null && dataColumns==null && !localIndexScan;
if (isDescRowKeyOrderUpgrade || isDelete || isUpsert || (deleteCQ != null && deleteCF != null) || emptyCF != null || buildLocalIndex) {
needToWrite = true;
// TODO: size better
mutations = Lists.newArrayListWithExpectedSize(1024);
batchSize = env.getConfiguration().getInt(MUTATE_BATCH_SIZE_ATTRIB, QueryServicesOptions.DEFAULT_MUTATE_BATCH_SIZE);
batchSizeBytes = env.getConfiguration().getLong(MUTATE_BATCH_SIZE_BYTES_ATTRIB,
QueryServicesOptions.DEFAULT_MUTATE_BATCH_SIZE_BYTES);
}
Aggregators aggregators = ServerAggregators.deserialize(
scan.getAttribute(BaseScannerRegionObserver.AGGREGATORS), env.getConfiguration());
Aggregator[] rowAggregators = aggregators.getAggregators();
boolean hasMore;
boolean hasAny = false;
MultiKeyValueTuple result = new MultiKeyValueTuple();
if (logger.isDebugEnabled()) {
logger.debug(LogUtil.addCustomAnnotations("Starting ungrouped coprocessor scan " + scan + " "+region.getRegionInfo(), ScanUtil.getCustomAnnotations(scan)));
}
long rowCount = 0;
final RegionScanner innerScanner = theScanner;
byte[] indexMaintainersPtr = scan.getAttribute(PhoenixIndexCodec.INDEX_MD);
boolean acquiredLock = false;
try {
if(needToWrite) {
synchronized (lock) {
scansReferenceCount++;
}
}
region.startRegionOperation();
acquiredLock = true;
synchronized (innerScanner) {
do {
List<Cell> results = new ArrayList<Cell>();
// Results are potentially returned even when the return value of s.next is false
// since this is an indication of whether or not there are more values after the
// ones returned
hasMore = innerScanner.nextRaw(results);
if (!results.isEmpty()) {
rowCount++;
result.setKeyValues(results);
if (isDescRowKeyOrderUpgrade) {
Arrays.fill(values, null);
Cell firstKV = results.get(0);
RowKeySchema schema = projectedTable.getRowKeySchema();
int maxOffset = schema.iterator(firstKV.getRowArray(), firstKV.getRowOffset() + offset, firstKV.getRowLength(), ptr);
for (int i = 0; i < schema.getFieldCount(); i++) {
Boolean hasValue = schema.next(ptr, i, maxOffset);
if (hasValue == null) {
break;
}
Field field = schema.getField(i);
if (field.getSortOrder() == SortOrder.DESC) {
// Special case for re-writing DESC ARRAY, as the actual byte value needs to change in this case
if (field.getDataType().isArrayType()) {
field.getDataType().coerceBytes(ptr, null, field.getDataType(),
field.getMaxLength(), field.getScale(), field.getSortOrder(),
field.getMaxLength(), field.getScale(), field.getSortOrder(), true); // force to use correct separator byte
}
// Special case for re-writing DESC CHAR or DESC BINARY, to force the re-writing of trailing space characters
else if (field.getDataType() == PChar.INSTANCE || field.getDataType() == PBinary.INSTANCE) {
int len = ptr.getLength();
while (len > 0 && ptr.get()[ptr.getOffset() + len - 1] == StringUtil.SPACE_UTF8) {
len--;
}
ptr.set(ptr.get(), ptr.getOffset(), len);
// Special case for re-writing DESC FLOAT and DOUBLE, as they're not inverted like they should be (PHOENIX-2171)
} else if (field.getDataType() == PFloat.INSTANCE || field.getDataType() == PDouble.INSTANCE) {
byte[] invertedBytes = SortOrder.invert(ptr.get(), ptr.getOffset(), ptr.getLength());
ptr.set(invertedBytes);
}
} else if (field.getDataType() == PBinary.INSTANCE) {
// Remove trailing space characters so that the setValues call below will replace them
// with the correct zero byte character. Note this is somewhat dangerous as these
// could be legit, but I don't know what the alternative is.
int len = ptr.getLength();
while (len > 0 && ptr.get()[ptr.getOffset() + len - 1] == StringUtil.SPACE_UTF8) {
len--;
}
ptr.set(ptr.get(), ptr.getOffset(), len);
}
values[i] = ptr.copyBytes();
}
writeToTable.newKey(ptr, values);
if (Bytes.compareTo(
firstKV.getRowArray(), firstKV.getRowOffset() + offset, firstKV.getRowLength(),
ptr.get(),ptr.getOffset() + offset,ptr.getLength()) == 0) {
continue;
}
byte[] newRow = ByteUtil.copyKeyBytesIfNecessary(ptr);
if (offset > 0) { // for local indexes (prepend region start key)
byte[] newRowWithOffset = new byte[offset + newRow.length];
System.arraycopy(firstKV.getRowArray(), firstKV.getRowOffset(), newRowWithOffset, 0, offset);;
System.arraycopy(newRow, 0, newRowWithOffset, offset, newRow.length);
newRow = newRowWithOffset;
}
byte[] oldRow = Bytes.copy(firstKV.getRowArray(), firstKV.getRowOffset(), firstKV.getRowLength());
for (Cell cell : results) {
// Copy existing cell but with new row key
Cell newCell = new KeyValue(newRow, 0, newRow.length,
cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),
cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),
cell.getTimestamp(), KeyValue.Type.codeToType(cell.getTypeByte()),
cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
switch (KeyValue.Type.codeToType(cell.getTypeByte())) {
case Put:
// If Put, point delete old Put
Delete del = new Delete(oldRow);
del.addDeleteMarker(new KeyValue(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),
cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),
cell.getQualifierArray(), cell.getQualifierOffset(),
cell.getQualifierLength(), cell.getTimestamp(), KeyValue.Type.Delete,
ByteUtil.EMPTY_BYTE_ARRAY, 0, 0));
mutations.add(del);
Put put = new Put(newRow);
put.add(newCell);
mutations.add(put);
break;
case Delete:
case DeleteColumn:
case DeleteFamily:
case DeleteFamilyVersion:
Delete delete = new Delete(newRow);
delete.addDeleteMarker(newCell);
mutations.add(delete);
break;
}
}
} else if (buildLocalIndex) {
for (IndexMaintainer maintainer : indexMaintainers) {
if (!results.isEmpty()) {
result.getKey(ptr);
ValueGetter valueGetter =
maintainer.createGetterFromKeyValues(
ImmutableBytesPtr.copyBytesIfNecessary(ptr),
results);
Put put = maintainer.buildUpdateMutation(kvBuilder,
valueGetter, ptr, results.get(0).getTimestamp(),
env.getRegion().getRegionInfo().getStartKey(),
env.getRegion().getRegionInfo().getEndKey());
indexMutations.add(put);
}
}
result.setKeyValues(results);
} else if (isDelete) {
// FIXME: the version of the Delete constructor without the lock
// args was introduced in 0.94.4, thus if we try to use it here
// we can no longer use the 0.94.2 version of the client.
Cell firstKV = results.get(0);
Delete delete = new Delete(firstKV.getRowArray(),
firstKV.getRowOffset(), firstKV.getRowLength(),ts);
mutations.add(delete);
// force tephra to ignore this deletes
delete.setAttribute(TxConstants.TX_ROLLBACK_ATTRIBUTE_KEY, new byte[0]);
} else if (isUpsert) {
Arrays.fill(values, null);
int bucketNumOffset = 0;
if (projectedTable.getBucketNum() != null) {
values[0] = new byte[] { 0 };
bucketNumOffset = 1;
}
int i = bucketNumOffset;
List<PColumn> projectedColumns = projectedTable.getColumns();
for (; i < projectedTable.getPKColumns().size(); i++) {
Expression expression = selectExpressions.get(i - bucketNumOffset);
if (expression.evaluate(result, ptr)) {
values[i] = ptr.copyBytes();
// If SortOrder from expression in SELECT doesn't match the
// column being projected into then invert the bits.
if (expression.getSortOrder() !=
projectedColumns.get(i).getSortOrder()) {
SortOrder.invert(values[i], 0, values[i], 0,
values[i].length);
}
}else{
values[i] = ByteUtil.EMPTY_BYTE_ARRAY;
}
}
projectedTable.newKey(ptr, values);
PRow row = projectedTable.newRow(kvBuilder, ts, ptr, false);
for (; i < projectedColumns.size(); i++) {
Expression expression = selectExpressions.get(i - bucketNumOffset);
if (expression.evaluate(result, ptr)) {
PColumn column = projectedColumns.get(i);
if (!column.getDataType().isSizeCompatible(ptr, null,
expression.getDataType(), expression.getSortOrder(),
expression.getMaxLength(), expression.getScale(),
column.getMaxLength(), column.getScale())) {
throw new DataExceedsCapacityException(
column.getDataType(), column.getMaxLength(),
column.getScale(), column.getName().getString(), ptr);
}
column.getDataType().coerceBytes(ptr, null,
expression.getDataType(), expression.getMaxLength(),
expression.getScale(), expression.getSortOrder(),
column.getMaxLength(), column.getScale(),
column.getSortOrder(), projectedTable.rowKeyOrderOptimizable());
byte[] bytes = ByteUtil.copyKeyBytesIfNecessary(ptr);
row.setValue(column, bytes);
}
}
for (Mutation mutation : row.toRowMutations()) {
mutations.add(mutation);
}
for (i = 0; i < selectExpressions.size(); i++) {
selectExpressions.get(i).reset();
}
} else if (deleteCF != null && deleteCQ != null) {
// No need to search for delete column, since we project only it
// if no empty key value is being set
if (emptyCF == null ||
result.getValue(deleteCF, deleteCQ) != null) {
Delete delete = new Delete(results.get(0).getRowArray(),
results.get(0).getRowOffset(),
results.get(0).getRowLength());
delete.deleteColumns(deleteCF, deleteCQ, ts);
// force tephra to ignore this deletes
delete.setAttribute(TxConstants.TX_ROLLBACK_ATTRIBUTE_KEY, new byte[0]);
mutations.add(delete);
}
}
if (emptyCF != null) {
/*
* If we've specified an emptyCF, then we need to insert an empty
* key value "retroactively" for any key value that is visible at
* the timestamp that the DDL was issued. Key values that are not
* visible at this timestamp will not ever be projected up to
* scans past this timestamp, so don't need to be considered.
* We insert one empty key value per row per timestamp.
*/
Set<Long> timeStamps =
Sets.newHashSetWithExpectedSize(results.size());
for (Cell kv : results) {
long kvts = kv.getTimestamp();
if (!timeStamps.contains(kvts)) {
Put put = new Put(kv.getRowArray(), kv.getRowOffset(),
kv.getRowLength());
put.add(emptyCF, QueryConstants.EMPTY_COLUMN_BYTES, kvts,
ByteUtil.EMPTY_BYTE_ARRAY);
mutations.add(put);
}
}
// Commit in batches based on UPSERT_BATCH_SIZE_BYTES_ATTRIB in config
List<List<Mutation>> batchMutationList =
MutationState.getMutationBatchList(batchSize, batchSizeBytes, mutations);
for (List<Mutation> batchMutations : batchMutationList) {
commit(region, batchMutations, indexUUID, blockingMemStoreSize, indexMaintainersPtr,
txState, areMutationInSameRegion, targetHTable);
batchMutations.clear();
}
mutations.clear();
// Commit in batches based on UPSERT_BATCH_SIZE_BYTES_ATTRIB in config
List<List<Mutation>> batchIndexMutationList =
MutationState.getMutationBatchList(batchSize, batchSizeBytes, indexMutations);
for (List<Mutation> batchIndexMutations : batchIndexMutationList) {
commitBatch(region, batchIndexMutations, null, blockingMemStoreSize, null, txState);
batchIndexMutations.clear();
}
indexMutations.clear();
}
aggregators.aggregate(rowAggregators, result);
hasAny = true;
}
} while (hasMore);
if (!mutations.isEmpty()) {
commit(region, mutations, indexUUID, blockingMemStoreSize, indexMaintainersPtr, txState,
areMutationInSameRegion, targetHTable);
mutations.clear();
}
if (!indexMutations.isEmpty()) {
commitBatch(region, indexMutations, null, blockingMemStoreSize, indexMaintainersPtr, txState);
indexMutations.clear();
}
}
} finally {
if(needToWrite) {
synchronized (lock) {
scansReferenceCount--;
}
}
if (targetHTable != null) {
targetHTable.close();
}
try {
innerScanner.close();
} finally {
if (acquiredLock) region.closeRegionOperation();
}
}
if (logger.isDebugEnabled()) {
logger.debug(LogUtil.addCustomAnnotations("Finished scanning " + rowCount + " rows for ungrouped coprocessor scan " + scan, ScanUtil.getCustomAnnotations(scan)));
}
final boolean hadAny = hasAny;
KeyValue keyValue = null;
if (hadAny) {
byte[] value = aggregators.toBytes(rowAggregators);
keyValue = KeyValueUtil.newKeyValue(UNGROUPED_AGG_ROW_KEY, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, AGG_TIMESTAMP, value, 0, value.length);
}
final KeyValue aggKeyValue = keyValue;
RegionScanner scanner = new BaseRegionScanner(innerScanner) {
private boolean done = !hadAny;
@Override
public boolean isFilterDone() {
return done;
}
@Override
public boolean next(List<Cell> results) throws IOException {
if (done) return false;
done = true;
results.add(aggKeyValue);
return false;
}
@Override
public long getMaxResultSize() {
return scan.getMaxResultSize();
}
};
return scanner;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
protected RegionScanner doPostScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c, final Scan scan, final RegionScanner s) throws IOException, SQLException {
RegionCoprocessorEnvironment env = c.getEnvironment();
Region region = env.getRegion();
long ts = scan.getTimeRange().getMax();
boolean localIndexScan = ScanUtil.isLocalIndex(scan);
if (ScanUtil.isAnalyzeTable(scan)) {
byte[] gp_width_bytes =
scan.getAttribute(BaseScannerRegionObserver.GUIDEPOST_WIDTH_BYTES);
byte[] gp_per_region_bytes =
scan.getAttribute(BaseScannerRegionObserver.GUIDEPOST_PER_REGION);
// Let this throw, as this scan is being done for the sole purpose of collecting stats
StatisticsCollector statsCollector = StatisticsCollectorFactory.createStatisticsCollector(
env, region.getRegionInfo().getTable().getNameAsString(), ts,
gp_width_bytes, gp_per_region_bytes);
return collectStats(s, statsCollector, region, scan, env.getConfiguration());
} else if (ScanUtil.isIndexRebuild(scan)) { return rebuildIndices(s, region, scan, env.getConfiguration()); }
int offsetToBe = 0;
if (localIndexScan) {
/*
* For local indexes, we need to set an offset on row key expressions to skip
* the region start key.
*/
offsetToBe = region.getRegionInfo().getStartKey().length != 0 ? region.getRegionInfo().getStartKey().length :
region.getRegionInfo().getEndKey().length;
ScanUtil.setRowKeyOffset(scan, offsetToBe);
}
final int offset = offsetToBe;
PTable projectedTable = null;
PTable writeToTable = null;
byte[][] values = null;
byte[] descRowKeyTableBytes = scan.getAttribute(UPGRADE_DESC_ROW_KEY);
boolean isDescRowKeyOrderUpgrade = descRowKeyTableBytes != null;
if (isDescRowKeyOrderUpgrade) {
logger.debug("Upgrading row key for " + region.getRegionInfo().getTable().getNameAsString());
projectedTable = deserializeTable(descRowKeyTableBytes);
try {
writeToTable = PTableImpl.makePTable(projectedTable, true);
} catch (SQLException e) {
ServerUtil.throwIOException("Upgrade failed", e); // Impossible
}
values = new byte[projectedTable.getPKColumns().size()][];
}
byte[] localIndexBytes = scan.getAttribute(LOCAL_INDEX_BUILD);
List<IndexMaintainer> indexMaintainers = localIndexBytes == null ? null : IndexMaintainer.deserialize(localIndexBytes);
List<Mutation> indexMutations = localIndexBytes == null ? Collections.<Mutation>emptyList() : Lists.<Mutation>newArrayListWithExpectedSize(1024);
RegionScanner theScanner = s;
byte[] indexUUID = scan.getAttribute(PhoenixIndexCodec.INDEX_UUID);
byte[] txState = scan.getAttribute(BaseScannerRegionObserver.TX_STATE);
List<Expression> selectExpressions = null;
byte[] upsertSelectTable = scan.getAttribute(BaseScannerRegionObserver.UPSERT_SELECT_TABLE);
boolean isUpsert = false;
boolean isDelete = false;
byte[] deleteCQ = null;
byte[] deleteCF = null;
byte[] emptyCF = null;
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
if (upsertSelectTable != null) {
isUpsert = true;
projectedTable = deserializeTable(upsertSelectTable);
selectExpressions = deserializeExpressions(scan.getAttribute(BaseScannerRegionObserver.UPSERT_SELECT_EXPRS));
values = new byte[projectedTable.getPKColumns().size()][];
} else {
byte[] isDeleteAgg = scan.getAttribute(BaseScannerRegionObserver.DELETE_AGG);
isDelete = isDeleteAgg != null && Bytes.compareTo(PDataType.TRUE_BYTES, isDeleteAgg) == 0;
if (!isDelete) {
deleteCF = scan.getAttribute(BaseScannerRegionObserver.DELETE_CF);
deleteCQ = scan.getAttribute(BaseScannerRegionObserver.DELETE_CQ);
}
emptyCF = scan.getAttribute(BaseScannerRegionObserver.EMPTY_CF);
}
TupleProjector tupleProjector = null;
byte[][] viewConstants = null;
ColumnReference[] dataColumns = IndexUtil.deserializeDataTableColumnsToJoin(scan);
final TupleProjector p = TupleProjector.deserializeProjectorFromScan(scan);
final HashJoinInfo j = HashJoinInfo.deserializeHashJoinFromScan(scan);
if ((localIndexScan && !isDelete && !isDescRowKeyOrderUpgrade) || (j == null && p != null)) {
if (dataColumns != null) {
tupleProjector = IndexUtil.getTupleProjector(scan, dataColumns);
viewConstants = IndexUtil.deserializeViewConstantsFromScan(scan);
}
ImmutableBytesWritable tempPtr = new ImmutableBytesWritable();
theScanner =
getWrappedScanner(c, theScanner, offset, scan, dataColumns, tupleProjector,
region, indexMaintainers == null ? null : indexMaintainers.get(0), viewConstants, p, tempPtr);
}
if (j != null) {
theScanner = new HashJoinRegionScanner(theScanner, p, j, ScanUtil.getTenantId(scan), env);
}
int batchSize = 0;
long batchSizeBytes = 0L;
List<Mutation> mutations = Collections.emptyList();
boolean needToWrite = false;
Configuration conf = c.getEnvironment().getConfiguration();
long flushSize = region.getTableDesc().getMemStoreFlushSize();
if (flushSize <= 0) {
flushSize = conf.getLong(HConstants.HREGION_MEMSTORE_FLUSH_SIZE,
HTableDescriptor.DEFAULT_MEMSTORE_FLUSH_SIZE);
}
/**
* Slow down the writes if the memstore size more than
* (hbase.hregion.memstore.block.multiplier - 1) times hbase.hregion.memstore.flush.size
* bytes. This avoids flush storm to hdfs for cases like index building where reads and
* write happen to all the table regions in the server.
*/
final long blockingMemStoreSize = flushSize * (
conf.getLong(HConstants.HREGION_MEMSTORE_BLOCK_MULTIPLIER,
HConstants.DEFAULT_HREGION_MEMSTORE_BLOCK_MULTIPLIER)-1) ;
boolean buildLocalIndex = indexMaintainers != null && dataColumns==null && !localIndexScan;
if (isDescRowKeyOrderUpgrade || isDelete || isUpsert || (deleteCQ != null && deleteCF != null) || emptyCF != null || buildLocalIndex) {
needToWrite = true;
// TODO: size better
mutations = Lists.newArrayListWithExpectedSize(1024);
batchSize = env.getConfiguration().getInt(MUTATE_BATCH_SIZE_ATTRIB, QueryServicesOptions.DEFAULT_MUTATE_BATCH_SIZE);
batchSizeBytes = env.getConfiguration().getLong(MUTATE_BATCH_SIZE_BYTES_ATTRIB,
QueryServicesOptions.DEFAULT_MUTATE_BATCH_SIZE_BYTES);
}
Aggregators aggregators = ServerAggregators.deserialize(
scan.getAttribute(BaseScannerRegionObserver.AGGREGATORS), env.getConfiguration());
Aggregator[] rowAggregators = aggregators.getAggregators();
boolean hasMore;
boolean hasAny = false;
MultiKeyValueTuple result = new MultiKeyValueTuple();
if (logger.isDebugEnabled()) {
logger.debug(LogUtil.addCustomAnnotations("Starting ungrouped coprocessor scan " + scan + " "+region.getRegionInfo(), ScanUtil.getCustomAnnotations(scan)));
}
long rowCount = 0;
final RegionScanner innerScanner = theScanner;
byte[] indexMaintainersPtr = scan.getAttribute(PhoenixIndexCodec.INDEX_MD);
boolean acquiredLock = false;
try {
if(needToWrite) {
synchronized (lock) {
scansReferenceCount++;
}
}
region.startRegionOperation();
acquiredLock = true;
synchronized (innerScanner) {
do {
List<Cell> results = new ArrayList<Cell>();
// Results are potentially returned even when the return value of s.next is false
// since this is an indication of whether or not there are more values after the
// ones returned
hasMore = innerScanner.nextRaw(results);
if (!results.isEmpty()) {
rowCount++;
result.setKeyValues(results);
if (isDescRowKeyOrderUpgrade) {
Arrays.fill(values, null);
Cell firstKV = results.get(0);
RowKeySchema schema = projectedTable.getRowKeySchema();
int maxOffset = schema.iterator(firstKV.getRowArray(), firstKV.getRowOffset() + offset, firstKV.getRowLength(), ptr);
for (int i = 0; i < schema.getFieldCount(); i++) {
Boolean hasValue = schema.next(ptr, i, maxOffset);
if (hasValue == null) {
break;
}
Field field = schema.getField(i);
if (field.getSortOrder() == SortOrder.DESC) {
// Special case for re-writing DESC ARRAY, as the actual byte value needs to change in this case
if (field.getDataType().isArrayType()) {
field.getDataType().coerceBytes(ptr, null, field.getDataType(),
field.getMaxLength(), field.getScale(), field.getSortOrder(),
field.getMaxLength(), field.getScale(), field.getSortOrder(), true); // force to use correct separator byte
}
// Special case for re-writing DESC CHAR or DESC BINARY, to force the re-writing of trailing space characters
else if (field.getDataType() == PChar.INSTANCE || field.getDataType() == PBinary.INSTANCE) {
int len = ptr.getLength();
while (len > 0 && ptr.get()[ptr.getOffset() + len - 1] == StringUtil.SPACE_UTF8) {
len--;
}
ptr.set(ptr.get(), ptr.getOffset(), len);
// Special case for re-writing DESC FLOAT and DOUBLE, as they're not inverted like they should be (PHOENIX-2171)
} else if (field.getDataType() == PFloat.INSTANCE || field.getDataType() == PDouble.INSTANCE) {
byte[] invertedBytes = SortOrder.invert(ptr.get(), ptr.getOffset(), ptr.getLength());
ptr.set(invertedBytes);
}
} else if (field.getDataType() == PBinary.INSTANCE) {
// Remove trailing space characters so that the setValues call below will replace them
// with the correct zero byte character. Note this is somewhat dangerous as these
// could be legit, but I don't know what the alternative is.
int len = ptr.getLength();
while (len > 0 && ptr.get()[ptr.getOffset() + len - 1] == StringUtil.SPACE_UTF8) {
len--;
}
ptr.set(ptr.get(), ptr.getOffset(), len);
}
values[i] = ptr.copyBytes();
}
writeToTable.newKey(ptr, values);
if (Bytes.compareTo(
firstKV.getRowArray(), firstKV.getRowOffset() + offset, firstKV.getRowLength(),
ptr.get(),ptr.getOffset() + offset,ptr.getLength()) == 0) {
continue;
}
byte[] newRow = ByteUtil.copyKeyBytesIfNecessary(ptr);
if (offset > 0) { // for local indexes (prepend region start key)
byte[] newRowWithOffset = new byte[offset + newRow.length];
System.arraycopy(firstKV.getRowArray(), firstKV.getRowOffset(), newRowWithOffset, 0, offset);;
System.arraycopy(newRow, 0, newRowWithOffset, offset, newRow.length);
newRow = newRowWithOffset;
}
byte[] oldRow = Bytes.copy(firstKV.getRowArray(), firstKV.getRowOffset(), firstKV.getRowLength());
for (Cell cell : results) {
// Copy existing cell but with new row key
Cell newCell = new KeyValue(newRow, 0, newRow.length,
cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),
cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),
cell.getTimestamp(), KeyValue.Type.codeToType(cell.getTypeByte()),
cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
switch (KeyValue.Type.codeToType(cell.getTypeByte())) {
case Put:
// If Put, point delete old Put
Delete del = new Delete(oldRow);
del.addDeleteMarker(new KeyValue(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),
cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),
cell.getQualifierArray(), cell.getQualifierOffset(),
cell.getQualifierLength(), cell.getTimestamp(), KeyValue.Type.Delete,
ByteUtil.EMPTY_BYTE_ARRAY, 0, 0));
mutations.add(del);
Put put = new Put(newRow);
put.add(newCell);
mutations.add(put);
break;
case Delete:
case DeleteColumn:
case DeleteFamily:
case DeleteFamilyVersion:
Delete delete = new Delete(newRow);
delete.addDeleteMarker(newCell);
mutations.add(delete);
break;
}
}
} else if (buildLocalIndex) {
for (IndexMaintainer maintainer : indexMaintainers) {
if (!results.isEmpty()) {
result.getKey(ptr);
ValueGetter valueGetter =
maintainer.createGetterFromKeyValues(
ImmutableBytesPtr.copyBytesIfNecessary(ptr),
results);
Put put = maintainer.buildUpdateMutation(kvBuilder,
valueGetter, ptr, results.get(0).getTimestamp(),
env.getRegion().getRegionInfo().getStartKey(),
env.getRegion().getRegionInfo().getEndKey());
indexMutations.add(put);
}
}
result.setKeyValues(results);
} else if (isDelete) {
// FIXME: the version of the Delete constructor without the lock
// args was introduced in 0.94.4, thus if we try to use it here
// we can no longer use the 0.94.2 version of the client.
Cell firstKV = results.get(0);
Delete delete = new Delete(firstKV.getRowArray(),
firstKV.getRowOffset(), firstKV.getRowLength(),ts);
mutations.add(delete);
// force tephra to ignore this deletes
delete.setAttribute(TxConstants.TX_ROLLBACK_ATTRIBUTE_KEY, new byte[0]);
} else if (isUpsert) {
Arrays.fill(values, null);
int i = 0;
List<PColumn> projectedColumns = projectedTable.getColumns();
for (; i < projectedTable.getPKColumns().size(); i++) {
Expression expression = selectExpressions.get(i);
if (expression.evaluate(result, ptr)) {
values[i] = ptr.copyBytes();
// If SortOrder from expression in SELECT doesn't match the
// column being projected into then invert the bits.
if (expression.getSortOrder() !=
projectedColumns.get(i).getSortOrder()) {
SortOrder.invert(values[i], 0, values[i], 0,
values[i].length);
}
}
}
projectedTable.newKey(ptr, values);
PRow row = projectedTable.newRow(kvBuilder, ts, ptr, false);
for (; i < projectedColumns.size(); i++) {
Expression expression = selectExpressions.get(i);
if (expression.evaluate(result, ptr)) {
PColumn column = projectedColumns.get(i);
if (!column.getDataType().isSizeCompatible(ptr, null,
expression.getDataType(), expression.getSortOrder(),
expression.getMaxLength(), expression.getScale(),
column.getMaxLength(), column.getScale())) {
throw new DataExceedsCapacityException(
column.getDataType(), column.getMaxLength(),
column.getScale(), column.getName().getString(), ptr);
}
column.getDataType().coerceBytes(ptr, null,
expression.getDataType(), expression.getMaxLength(),
expression.getScale(), expression.getSortOrder(),
column.getMaxLength(), column.getScale(),
column.getSortOrder(), projectedTable.rowKeyOrderOptimizable());
byte[] bytes = ByteUtil.copyKeyBytesIfNecessary(ptr);
row.setValue(column, bytes);
}
}
for (Mutation mutation : row.toRowMutations()) {
mutations.add(mutation);
}
for (i = 0; i < selectExpressions.size(); i++) {
selectExpressions.get(i).reset();
}
} else if (deleteCF != null && deleteCQ != null) {
// No need to search for delete column, since we project only it
// if no empty key value is being set
if (emptyCF == null ||
result.getValue(deleteCF, deleteCQ) != null) {
Delete delete = new Delete(results.get(0).getRowArray(),
results.get(0).getRowOffset(),
results.get(0).getRowLength());
delete.deleteColumns(deleteCF, deleteCQ, ts);
// force tephra to ignore this deletes
delete.setAttribute(TxConstants.TX_ROLLBACK_ATTRIBUTE_KEY, new byte[0]);
mutations.add(delete);
}
}
if (emptyCF != null) {
/*
* If we've specified an emptyCF, then we need to insert an empty
* key value "retroactively" for any key value that is visible at
* the timestamp that the DDL was issued. Key values that are not
* visible at this timestamp will not ever be projected up to
* scans past this timestamp, so don't need to be considered.
* We insert one empty key value per row per timestamp.
*/
Set<Long> timeStamps =
Sets.newHashSetWithExpectedSize(results.size());
for (Cell kv : results) {
long kvts = kv.getTimestamp();
if (!timeStamps.contains(kvts)) {
Put put = new Put(kv.getRowArray(), kv.getRowOffset(),
kv.getRowLength());
put.add(emptyCF, QueryConstants.EMPTY_COLUMN_BYTES, kvts,
ByteUtil.EMPTY_BYTE_ARRAY);
mutations.add(put);
}
}
// Commit in batches based on UPSERT_BATCH_SIZE_BYTES_ATTRIB in config
List<List<Mutation>> batchMutationList =
MutationState.getMutationBatchList(batchSize, batchSizeBytes, mutations);
for (List<Mutation> batchMutations : batchMutationList) {
commitBatch(region, batchMutations, indexUUID, blockingMemStoreSize, indexMaintainersPtr,
txState);
batchMutations.clear();
}
mutations.clear();
// Commit in batches based on UPSERT_BATCH_SIZE_BYTES_ATTRIB in config
List<List<Mutation>> batchIndexMutationList =
MutationState.getMutationBatchList(batchSize, batchSizeBytes, indexMutations);
for (List<Mutation> batchIndexMutations : batchIndexMutationList) {
commitBatch(region, batchIndexMutations, null, blockingMemStoreSize, null, txState);
batchIndexMutations.clear();
}
indexMutations.clear();
}
aggregators.aggregate(rowAggregators, result);
hasAny = true;
}
} while (hasMore);
if (!mutations.isEmpty()) {
commitBatch(region, mutations, indexUUID, blockingMemStoreSize, indexMaintainersPtr, txState);
}
if (!indexMutations.isEmpty()) {
commitBatch(region, indexMutations, null, blockingMemStoreSize, indexMaintainersPtr, txState);
indexMutations.clear();
}
}
} finally {
if(needToWrite) {
synchronized (lock) {
scansReferenceCount--;
}
}
try {
innerScanner.close();
} finally {
if (acquiredLock) region.closeRegionOperation();
}
}
if (logger.isDebugEnabled()) {
logger.debug(LogUtil.addCustomAnnotations("Finished scanning " + rowCount + " rows for ungrouped coprocessor scan " + scan, ScanUtil.getCustomAnnotations(scan)));
}
final boolean hadAny = hasAny;
KeyValue keyValue = null;
if (hadAny) {
byte[] value = aggregators.toBytes(rowAggregators);
keyValue = KeyValueUtil.newKeyValue(UNGROUPED_AGG_ROW_KEY, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, AGG_TIMESTAMP, value, 0, value.length);
}
final KeyValue aggKeyValue = keyValue;
RegionScanner scanner = new BaseRegionScanner(innerScanner) {
private boolean done = !hadAny;
@Override
public boolean isFilterDone() {
return done;
}
@Override
public boolean next(List<Cell> results) throws IOException {
if (done) return false;
done = true;
results.add(aggKeyValue);
return false;
}
@Override
public long getMaxResultSize() {
return scan.getMaxResultSize();
}
};
return scanner;
}
#location 200
#vulnerability type THREAD_SAFETY_VIOLATION |
#fixed code
public void run(ClassGroup one, ClassGroup two)
{
eone = new Execution(one);
eone.populateInitialMethods();
List<Method> initial1 = eone.getInitialMethods().stream().sorted((m1, m2) -> m1.getName().compareTo(m2.getName())).collect(Collectors.toList());
eone.run();
etwo = new Execution(two);
etwo.populateInitialMethods();
List<Method> initial2 = etwo.getInitialMethods().stream().sorted((m1, m2) -> m1.getName().compareTo(m2.getName())).collect(Collectors.toList());
etwo.run();
assert initial1.size() == initial2.size();
for (int i = 0; i < initial1.size(); ++i)
{
Method m1 = initial1.get(i), m2 = initial2.get(i);
objMap.put(m1, m2);
}
// process(
// initial1.get(0).getMethod(),
// initial2.get(0).getMethod()
// );
// processed.add(initial1.get(0).getMethod());
process(
one.findClass("class143").findMethod("run"),
two.findClass("class143").findMethod("run")
);
// processed.add(one.findClass("client").findMethod("init"));
// for (;;)
// {
// Optional next = objMap.keySet().stream()
// .filter(m -> !processed.contains(m))
// .findAny();
// if (!next.isPresent())
// break;
//
// Method m = (Method) next.get();
// Method m2 = (Method) objMap.get(m);
//
// System.out.println("Scanning " + m.getName() + " -> " + m2.getName());
// process(m, m2);
// processed.add(m);
// }
for (Entry<Object, Object> e : objMap.entrySet())
{
Method m1 = (Method) e.getKey();
Method m2 = (Method) e.getValue();
System.out.println("FINAL " + m1.getMethods().getClassFile().getName() + "." + m1.getName() + " -> " + m2.getMethods().getClassFile().getName() + "." + m2.getName());
}
System.out.println("done");
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public void run(ClassGroup one, ClassGroup two)
{
eone = new Execution(one);
eone.populateInitialMethods();
eone.run();
etwo = new Execution(two);
etwo.populateInitialMethods();
etwo.run();
process(
one.findClass("client").findMethod("init"),
two.findClass("client").findMethod("init")
);
System.out.println("done");
}
#location 12
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Override
public List<net.runelite.deob.Method> getMethods()
{
return myMethods != null ? myMethods : Arrays.asList();
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public List<net.runelite.deob.Method> getMethods()
{
ClassGroup group = this.getInstructions().getCode().getAttributes().getClassFile().getGroup();
ClassFile otherClass = group.findClass(method.getClassEntry().getName());
if (otherClass == null)
return new ArrayList<>(); // not our class
// look up this method in this class and anything that inherits from it
List<net.runelite.deob.Method> list = new ArrayList<>();
findMethodFromClass(list, otherClass);
return list;
}
#location 4
#vulnerability type NULL_DEREFERENCE |
#fixed code
private ParallelExecutorMapping mapMethods(ClassGroup one, ClassGroup two)
{
MethodSignatureMapper msm = new MethodSignatureMapper();
msm.map(one, two);
List<ParallelExecutorMapping> pmes = new ArrayList<>();
for (Method m : msm.getMap().keySet())
{
Collection<Method> methods = msm.getMap().get(m);
ExecutionMapper em = new ExecutionMapper(m, methods);
ParallelExecutorMapping mapping = em.run();
if (mapping == null)
continue;
mapping.map(mapping.m1, mapping.m2);
pmes.add(mapping);
}
ParallelExecutorMapping finalm = new ParallelExecutorMapping(one, two);
for (ParallelExecutorMapping pme : pmes)
finalm.merge(pme);
return finalm;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
private ParallelExecutorMapping mapMethods(ClassGroup one, ClassGroup two)
{
MethodSignatureMapper msm = new MethodSignatureMapper();
msm.map(one, two);
List<ParallelExecutorMapping> pmes = new ArrayList<>();
for (Method m : msm.getMap().keySet())
{
Collection<Method> methods = msm.getMap().get(m);
ExecutionMapper em = new ExecutionMapper(m, methods);
ParallelExecutorMapping mapping = em.run();
mapping.map(mapping.m1, mapping.m2);
pmes.add(mapping);
}
ParallelExecutorMapping finalm = new ParallelExecutorMapping(one, two);
for (ParallelExecutorMapping pme : pmes)
finalm.merge(pme);
return finalm;
}
#location 15
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Test
public void test1() throws IOException
{
File file = folder.newFile();
Store store = new Store(folder.getRoot());
DataFile df = new DataFile(store, file);
DataFileWriteResult res = df.write(42, 3, ByteBuffer.wrap("test".getBytes()), 0, 0);
DataFileReadResult res2 = df.read(42, 3, res.sector, res.compressedLength);
byte[] buf = res2.data;
String str = new String(buf);
Assert.assertEquals("test", str);
file.delete();
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void test1() throws IOException
{
File file = folder.newFile();
Store store = new Store(folder.getRoot());
DataFile df = new DataFile(store, file);
int sector = df.write(42, 3, ByteBuffer.wrap("test".getBytes()));
byte[] buf = df.read(42, 3, sector, 4);
String str = new String(buf);
Assert.assertEquals("test", str);
file.delete();
}
#location 8
#vulnerability type RESOURCE_LEAK |
#fixed code
@Override
public void run() {
logger.info("Start proxy server at port:{}", bindPort);
while (!stop) {
try {
Socket socket = serverSocket.accept();
socket = processSocketBeforeUse(socket);
socket.setSoTimeout(timeout);
Session session = sessionManager.newSession(socket);
logger.info("Create SESSION[{}] for {}", session.getId(), session.getClientAddress());
try {
sessionFilterChain.doFilterChain(session);
} catch (InterruptedException e) {
session.close();
logger.info(e.getMessage());
continue;
}
SocksHandler socksHandler = createSocksHandler();
/* initialize socks handler */
socksHandler.setSession(session);
initializeSocksHandler(socksHandler);
executorService.execute(socksHandler);
} catch (IOException e) {
// Catches the exception that cause by shutdown method.
if (e.getMessage().equals("Socket closed") && stop) {
logger.debug("Server shutdown");
return;
}
logger.debug(e.getMessage(), e);
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public void run() {
logger.info("Start proxy server at port:{}", bindPort);
while (!stop) {
try {
Socket socket = serverSocket.accept();
socket = processSocketBeforeUse(socket);
socket.setSoTimeout(timeout);
Session session = new SocksSession(getNextSessionId(), socket, sessions);
sessions.put(session.getId(), session);
logger.info("Create SESSION[{}] for {}", session.getId(), session.getClientAddress());
try {
sessionFilterChain.doFilterChain(session);
} catch (InterruptedException e) {
session.close();
logger.info(e.getMessage());
continue;
}
SocksHandler socksHandler = createSocksHandler();
/* initialize socks handler */
socksHandler.setSession(session);
initializeSocksHandler(socksHandler);
executorService.execute(socksHandler);
} catch (IOException e) {
// Catches the exception that cause by shutdown method.
if (e.getMessage().equals("Socket closed") && stop) {
logger.debug("Server shutdown");
return;
}
logger.debug(e.getMessage(), e);
}
}
}
#location 24
#vulnerability type RESOURCE_LEAK |
#fixed code
@Test
public void testObjectIdSaved() {
ObjectIdId object = new ObjectIdId();
org.bson.types.ObjectId id = new org.bson.types.ObjectId();
object._id = id;
JacksonDBCollection<ObjectIdId, org.bson.types.ObjectId> coll = getCollection(ObjectIdId.class,
org.bson.types.ObjectId.class);
coll.insert(object);
ObjectIdId result = coll.findOneById(id);
assertThat(result._id, equalTo(id));
assertThat((org.bson.types.ObjectId) coll.getDbCollection().findOne().get("_id"), equalTo(id));
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void testObjectIdSaved() {
ObjectIdId object = new ObjectIdId();
org.bson.types.ObjectId id = new org.bson.types.ObjectId();
object._id = id;
JacksonDBCollection<ObjectIdId, org.bson.types.ObjectId> coll = getCollection(ObjectIdId.class,
org.bson.types.ObjectId.class);
coll.insert(object);
ObjectIdId result = coll.findOneById(id);
assertThat(result._id, equalTo(id));
}
#location 12
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Test
public void testCreatorGetterObjectIdAnnotated() throws Exception {
CreatorGetterObjectIdAnnotated o = new CreatorGetterObjectIdAnnotated(new org.bson.types.ObjectId().toString());
JacksonDBCollection<CreatorGetterObjectIdAnnotated, String> coll = createCollFor(o, String.class);
coll.insert(o);
CreatorGetterObjectIdAnnotated result = coll.findOneById(o.id);
assertThat(result, notNullValue());
assertThat(result.getId(), equalTo(o.id));
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void testCreatorGetterObjectIdAnnotated() throws Exception {
CreatorGetterObjectIdAnnotated o = new CreatorGetterObjectIdAnnotated(null);
JacksonDBCollection<CreatorGetterObjectIdAnnotated, String> coll = createCollFor(o, String.class);
WriteResult<CreatorGetterObjectIdAnnotated, String> writeResult = coll.insert(o);
assertThat(writeResult.getSavedId(), notNullValue());
assertThat(writeResult.getSavedId(), instanceOf(String.class));
assertThat(writeResult.getDbObject().get("id"), nullValue());
assertThat(writeResult.getSavedId(), equalTo(writeResult.getDbObject().get("_id").toString()));
CreatorGetterObjectIdAnnotated result = coll.findOneById(writeResult.getSavedId());
assertThat(result, notNullValue());
assertThat(result.getId(), equalTo(writeResult.getSavedId()));
}
#location 12
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Benchmark
public void jsoniter() throws IOException {
Jsoniter jsoniter = Jsoniter.parseBytes(JsoniterBenchmarkState.inputBytes);
byte[] val = new byte[3];
jsoniter.Read(val);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Benchmark
public void jsoniter() throws IOException {
Jsoniter iter = Jsoniter.parseBytes(JsoniterBenchmarkState.inputBytes);
while (iter.ReadArray()) {
iter.ReadUnsignedInt();
}
}
#location 6
#vulnerability type RESOURCE_LEAK |
#fixed code
public void readFromFile(String path) throws IOException {
readCoreFromFile(path);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public void readFromFile(String path) throws IOException {
FileInputStream fis = new FileInputStream(path);
DataInputStream is = new DataInputStream(fis);
readFromStream(is);
}
#location 4
#vulnerability type RESOURCE_LEAK |
#fixed code
protected List<IptcBlock> parseAllBlocks(byte bytes[], boolean verbose,
boolean strict) throws ImageReadException, IOException {
List<IptcBlock> blocks = new ArrayList<IptcBlock>();
BinaryInputStream bis = null;
try {
bis = new BinaryInputStream(bytes, APP13_BYTE_ORDER);
// Note that these are unsigned quantities. Name is always an even
// number of bytes (including the 1st byte, which is the size.)
byte[] idString = bis.readByteArray(
PHOTOSHOP_IDENTIFICATION_STRING.size(),
"App13 Segment missing identification string");
if (!PHOTOSHOP_IDENTIFICATION_STRING.equals(idString))
throw new ImageReadException("Not a Photoshop App13 Segment");
// int index = PHOTOSHOP_IDENTIFICATION_STRING.length;
while (true) {
byte[] imageResourceBlockSignature = bis
.readByteArray(CONST_8BIM.size(),
"App13 Segment missing identification string",
false, false);
if (null == imageResourceBlockSignature)
break;
if (!CONST_8BIM.equals(imageResourceBlockSignature))
throw new ImageReadException(
"Invalid Image Resource Block Signature");
int blockType = bis
.read2ByteInteger("Image Resource Block missing type");
if (verbose)
Debug.debug("blockType",
blockType + " (0x" + Integer.toHexString(blockType)
+ ")");
int blockNameLength = bis
.read1ByteInteger("Image Resource Block missing name length");
if (verbose && blockNameLength > 0)
Debug.debug("blockNameLength", blockNameLength + " (0x"
+ Integer.toHexString(blockNameLength) + ")");
byte[] blockNameBytes;
if (blockNameLength == 0) {
bis.read1ByteInteger("Image Resource Block has invalid name");
blockNameBytes = new byte[0];
} else {
blockNameBytes = bis.readByteArray(blockNameLength,
"Invalid Image Resource Block name", verbose, strict);
if (null == blockNameBytes)
break;
if (blockNameLength % 2 == 0)
bis.read1ByteInteger("Image Resource Block missing padding byte");
}
int blockSize = bis
.read4ByteInteger("Image Resource Block missing size");
if (verbose)
Debug.debug("blockSize",
blockSize + " (0x" + Integer.toHexString(blockSize)
+ ")");
/*
* doesn't catch cases where blocksize is invalid but is still less
* than bytes.length but will at least prevent OutOfMemory errors
*/
if (blockSize > bytes.length) {
throw new ImageReadException("Invalid Block Size : "
+ blockSize + " > " + bytes.length);
}
byte[] blockData = bis.readByteArray(blockSize,
"Invalid Image Resource Block data", verbose, strict);
if (null == blockData)
break;
blocks.add(new IptcBlock(blockType, blockNameBytes, blockData));
if ((blockSize % 2) != 0)
bis.read1ByteInteger("Image Resource Block missing padding byte");
}
return blocks;
} finally {
if (bis != null) {
bis.close();
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
protected List<IptcBlock> parseAllBlocks(byte bytes[], boolean verbose,
boolean strict) throws ImageReadException, IOException {
List<IptcBlock> blocks = new ArrayList<IptcBlock>();
BinaryInputStream bis = new BinaryInputStream(bytes, APP13_BYTE_ORDER);
// Note that these are unsigned quantities. Name is always an even
// number of bytes (including the 1st byte, which is the size.)
byte[] idString = bis.readByteArray(
PHOTOSHOP_IDENTIFICATION_STRING.size(),
"App13 Segment missing identification string");
if (!PHOTOSHOP_IDENTIFICATION_STRING.equals(idString))
throw new ImageReadException("Not a Photoshop App13 Segment");
// int index = PHOTOSHOP_IDENTIFICATION_STRING.length;
while (true) {
byte[] imageResourceBlockSignature = bis
.readByteArray(CONST_8BIM.size(),
"App13 Segment missing identification string",
false, false);
if (null == imageResourceBlockSignature)
break;
if (!CONST_8BIM.equals(imageResourceBlockSignature))
throw new ImageReadException(
"Invalid Image Resource Block Signature");
int blockType = bis
.read2ByteInteger("Image Resource Block missing type");
if (verbose)
Debug.debug("blockType",
blockType + " (0x" + Integer.toHexString(blockType)
+ ")");
int blockNameLength = bis
.read1ByteInteger("Image Resource Block missing name length");
if (verbose && blockNameLength > 0)
Debug.debug("blockNameLength", blockNameLength + " (0x"
+ Integer.toHexString(blockNameLength) + ")");
byte[] blockNameBytes;
if (blockNameLength == 0) {
bis.read1ByteInteger("Image Resource Block has invalid name");
blockNameBytes = new byte[0];
} else {
blockNameBytes = bis.readByteArray(blockNameLength,
"Invalid Image Resource Block name", verbose, strict);
if (null == blockNameBytes)
break;
if (blockNameLength % 2 == 0)
bis.read1ByteInteger("Image Resource Block missing padding byte");
}
int blockSize = bis
.read4ByteInteger("Image Resource Block missing size");
if (verbose)
Debug.debug("blockSize",
blockSize + " (0x" + Integer.toHexString(blockSize)
+ ")");
/*
* doesn't catch cases where blocksize is invalid but is still less
* than bytes.length but will at least prevent OutOfMemory errors
*/
if (blockSize > bytes.length) {
throw new ImageReadException("Invalid Block Size : "
+ blockSize + " > " + bytes.length);
}
byte[] blockData = bis.readByteArray(blockSize,
"Invalid Image Resource Block data", verbose, strict);
if (null == blockData)
break;
blocks.add(new IptcBlock(blockType, blockNameBytes, blockData));
if ((blockSize % 2) != 0)
bis.read1ByteInteger("Image Resource Block missing padding byte");
}
return blocks;
}
#location 14
#vulnerability type RESOURCE_LEAK |
#fixed code
@Test
public void testRemove() throws Exception {
final ByteSource byteSource = new ByteSourceFile(imageFile);
final Map<String, Object> params = new HashMap<String, Object>();
final boolean ignoreImageData = isPhilHarveyTestImage(imageFile);
params.put(PARAM_KEY_READ_THUMBNAILS, Boolean.valueOf(!ignoreImageData));
final JpegPhotoshopMetadata metadata = new JpegImageParser()
.getPhotoshopMetadata(byteSource, params);
assertNotNull(metadata);
final File noIptcFile = removeIptc(byteSource);
final JpegPhotoshopMetadata outMetadata = new JpegImageParser()
.getPhotoshopMetadata(new ByteSourceFile(noIptcFile),
params);
// FIXME should either be null or empty
assertTrue(outMetadata == null
|| outMetadata.getItems().size() == 0);
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Test
public void testRemove() throws Exception {
final ByteSource byteSource = new ByteSourceFile(imageFile);
final Map<String, Object> params = new HashMap<String, Object>();
final boolean ignoreImageData = isPhilHarveyTestImage(imageFile);
params.put(PARAM_KEY_READ_THUMBNAILS, new Boolean(!ignoreImageData));
final JpegPhotoshopMetadata metadata = new JpegImageParser()
.getPhotoshopMetadata(byteSource, params);
assertNotNull(metadata);
// metadata.dump();
final File noIptcFile = createTempFile(imageFile.getName()
+ ".iptc.remove.", ".jpg");
{
// test remove
OutputStream os = null;
boolean canThrow = false;
try {
os = new FileOutputStream(noIptcFile);
os = new BufferedOutputStream(os);
new JpegIptcRewriter().removeIPTC(byteSource, os);
canThrow = true;
} finally {
IoUtils.closeQuietly(canThrow, os);
}
final JpegPhotoshopMetadata outMetadata = new JpegImageParser()
.getPhotoshopMetadata(new ByteSourceFile(noIptcFile),
params);
assertTrue(outMetadata == null
|| outMetadata.getItems().size() == 0);
}
}
#location 24
#vulnerability type RESOURCE_LEAK |
#fixed code
@Override
public void performAction() {
AbstractFile targetFile = mainFrame.getActiveTable().getSelectedFile();
if (targetFile == null) {
targetFile = mainFrame.getActiveTable().getFileTableModel().getFileAt(0).getParent();
}
AbstractFile linkPath = mainFrame.getInactivePanel().getCurrentFolder();
new CreateSymLinkDialog(mainFrame, linkPath, targetFile).showDialog();
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public void performAction() {
AbstractFile targetFile = mainFrame.getActiveTable().getSelectedFile();
if (targetFile == null) {
targetFile = mainFrame.getInactiveTable().getFileTableModel().getFileAt(0).getParent();
}
AbstractFile linkPath = mainFrame.getActivePanel().getCurrentFolder();
new CreateSymLinkDialog(mainFrame, linkPath, targetFile).showDialog();
}
#location 5
#vulnerability type NULL_DEREFERENCE |
#fixed code
protected LookupResult extractNode(List<LexicalPatternElement> nextElements, TokenDistributor distributor) {
int skip = 0;
for (LexicalPatternElement nextElement : nextElements) {
if (!nextElement.isWildcard()) {
break;
}
skip++;
}
int indexBackup = distributor.getIndex();
for (int i = skip; i < nextElements.size(); i++) {
LexicalPatternElement element = nextElements.get(i);
distributor.setIndex(indexBackup);
// consider exclusion of wildcards here
LookupResult result = elementLookupExtractor.extractNode(nextElements.subList(0, skip), element, distributor);
result.matchedIndex = i;
if (result.getMergedResults().isMatched()) {
return result;
}
if (element.isOptional()) {
continue;
}
break;
}
return new LookupResult();
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
protected LookupResult extractNode(List<LexicalPatternElement> nextElements, TokenDistributor distributor) {
int skip = 0;
for (LexicalPatternElement nextElement : nextElements) {
if (!nextElement.isWildcard()) {
break;
}
skip++;
}
int indexBackup = distributor.getIndex();
for (int i = skip; i < nextElements.size(); i++) {
LexicalPatternElement element = nextElements.get(i);
distributor.setIndex(indexBackup);
LookupResult result = elementLookupExtractor.extractNode(nextElements.subList(0, skip), element, distributor);
result.matchedIndex = i;
if (result.getMergedResults().isMatched()) {
return result;
}
if (element.isOptional()) {
continue;
}
break;
}
return new LookupResult();
}
#location 19
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Override
public void parse(TokenizedSource source, ParserInfo info) {
PandaScript script = info.getComponent(Components.SCRIPT);
TokenReader reader = new PandaTokenReader(source);
Extractor extractor = PATTERN.extractor();
List<TokenizedSource> gaps = extractor.extract(reader);
if (gaps == null) {
throw new PandaParserException("Cannot parse expression::instance");
}
String className = gaps.get(0).asString();
ImportRegistry importRegistry = script.getImportRegistry();
this.returnType = importRegistry.forClass(className);
if (returnType == null) {
throw new PandaParserException("Unknown return type '" + className + "'");
}
ArgumentParser argumentParser = new ArgumentParser();
this.arguments = argumentParser.parse(info, gaps.get(1));
this.constructor = ConstructorUtils.matchConstructor(returnType, arguments);
if (constructor == null) {
throw new PandaParserException("Cannot find " + className + " constructor for the specified arguments");
}
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Override
public void parse(TokenizedSource source, ParserInfo info) {
PandaScript script = info.getComponent(Components.SCRIPT);
TokenReader reader = new PandaTokenReader(source);
Extractor extractor = PATTERN.extractor();
List<TokenizedSource> gaps = extractor.extract(reader);
if (gaps == null) {
throw new PandaParserException("Cannot parse expression::instance");
}
String className = gaps.get(0).asString();
ImportRegistry importRegistry = script.getImportRegistry();
this.returnType = importRegistry.forClass(className);
ArgumentParser argumentParser = new ArgumentParser();
this.arguments = argumentParser.parse(info, gaps.get(1));
this.constructor = ConstructorUtils.matchConstructor(returnType, arguments);
if (constructor == null) {
throw new PandaParserException("Cannot find " + className + " constructor for the specified arguments");
}
}
#location 19
#vulnerability type NULL_DEREFERENCE |
#fixed code
@Autowired
public void parse(ParserData data, LocalData localData) {
localData.allocateInstance(data.getComponent(PandaComponents.CONTAINER).reserveCell());
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
@Autowired
public void parse(ParserData data, LocalData localData) {
if (localData == null || data.getComponent(PandaComponents.CONTAINER) == null) {
System.out.println("xxx");
}
localData.allocateInstance(data.getComponent(PandaComponents.CONTAINER).reserveCell());
}
#location 7
#vulnerability type NULL_DEREFERENCE |
#fixed code
public ClassPrototype generate(Class<?> type) {
Module module = ModuleRegistry.getDefault().getOrCreate(type.getPackage().getName());
ClassPrototype prototype = module.get(type.getSimpleName());
if (prototype != null) {
return prototype;
}
prototype = new ClassPrototype(module, type.getSimpleName());
prototype.getAssociated().add(type);
for (Field field : type.getFields()) {
ClassPrototypeFieldGenerator generator = new ClassPrototypeFieldGenerator(type, prototype, field);
PrototypeField prototypeField = generator.generate();
prototype.getFields().add(prototypeField);
}
for (Constructor<?> constructor : type.getConstructors()) {
ClassPrototypeConstructorGenerator generator = new ClassPrototypeConstructorGenerator(type, prototype, constructor);
PrototypeConstructor prototypeField = generator.generate();
prototype.getConstructors().add(prototypeField);
}
for (Method method : type.getMethods()) {
switch (method.getName()) {
case "finalize":
case "notify":
case "notifyAll":
case "wait":
continue;
}
ClassPrototypeMethodGenerator generator = new ClassPrototypeMethodGenerator(type, prototype, method);
PrototypeMethod prototypeMethod = generator.generate();
prototype.getMethods().registerMethod(prototypeMethod);
}
return prototype;
} | Below is the vulnerable code, please generate the patch based on the following information. | #vulnerable code
public ClassPrototype generate(Class<?> type) {
Module module = ModuleRegistry.getDefault().getOrCreate(type.getPackage().getName());
ClassPrototype prototype = new ClassPrototype(module, type.getSimpleName());
prototype.getAssociated().add(type);
for (Field field : type.getFields()) {
ClassPrototypeFieldGenerator generator = new ClassPrototypeFieldGenerator(type, prototype, field);
PrototypeField prototypeField = generator.generate();
prototype.getFields().add(prototypeField);
}
for (Constructor<?> constructor : type.getConstructors()) {
ClassPrototypeConstructorGenerator generator = new ClassPrototypeConstructorGenerator(type, prototype, constructor);
PrototypeConstructor prototypeField = generator.generate();
prototype.getConstructors().add(prototypeField);
}
for (Method method : type.getMethods()) {
ClassPrototypeMethodGenerator generator = new ClassPrototypeMethodGenerator(type, prototype, method);
PrototypeMethod prototypeMethod = generator.generate();
prototype.getMethods().registerMethod(prototypeMethod);
}
return prototype;
}
#location 22
#vulnerability type NULL_DEREFERENCE |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.