input
stringlengths 205
73.3k
| output
stringlengths 64
73.2k
| instruction
stringclasses 1
value |
---|---|---|
#vulnerable code
@Override
public long countEntitiesOfType(Class<?> entity) {
ClassInfo classInfo = session.metaData().classInfo(entity.getName());
if (classInfo == null) {
return 0;
}
RowModelQuery countStatement = new AggregateStatements().countNodesLabelledWith(classInfo.labels());
String url = session.ensureTransaction().url();
try (Neo4jResponse<RowModel> response = session.requestHandler().execute(countStatement, url)) {
RowModel queryResult = response.next();
return queryResult == null ? 0 : ((Number) queryResult.getValues()[0]).longValue();
}
}
#location 9
#vulnerability type RESOURCE_LEAK | #fixed code
@Override
public long countEntitiesOfType(Class<?> entity) {
ClassInfo classInfo = session.metaData().classInfo(entity.getName());
if (classInfo == null) {
return 0;
}
RowModelQuery countStatement = new AggregateStatements().countNodesLabelledWith(classInfo.labels());
Transaction tx = session.ensureTransaction();
try (Neo4jResponse<RowModel> response = session.requestHandler().execute(countStatement, tx)) {
RowModel queryResult = response.next();
return queryResult == null ? 0 : ((Number) queryResult.getValues()[0]).longValue();
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void close() {
if (graphDatabaseService != null) {
graphDatabaseService.shutdown();
}
}
#location 4
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
public void close() {
if (graphDatabaseService != null) {
logger.debug(" *** Now shutting down embedded database instance: " + this);
graphDatabaseService.shutdown();
//
graphDatabaseService = null;
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private Object createRelationshipEntity(Edge edge, Object startEntity, Object endEntity) {
// create and hydrate the new RE
Object relationshipEntity = entityFactory.newObject(getRelationshipEntity(edge));
EntityUtils.setIdentity(relationshipEntity, edge.getId(), metadata);
// REs also have properties
setProperties(edge.getPropertyList(), relationshipEntity);
// register it in the mapping context
mappingContext.addRelationshipEntity(relationshipEntity, edge.getId());
// set the start and end entities
ClassInfo relEntityInfo = metadata.classInfo(relationshipEntity);
FieldInfo startNodeWriter = relEntityInfo.getStartNodeReader();
if (startNodeWriter != null) {
startNodeWriter.write(relationshipEntity, startEntity);
} else {
throw new RuntimeException(
"Cannot find a writer for the StartNode of relational entity " + relEntityInfo.name());
}
FieldInfo endNodeWriter = relEntityInfo.getEndNodeReader();
if (endNodeWriter != null) {
endNodeWriter.write(relationshipEntity, endEntity);
} else {
throw new RuntimeException(
"Cannot find a writer for the EndNode of relational entity " + relEntityInfo.name());
}
return relationshipEntity;
}
#location 16
#vulnerability type NULL_DEREFERENCE | #fixed code
private Object createRelationshipEntity(Edge edge, Object startEntity, Object endEntity) {
ClassInfo relationClassInfo = getRelationshipEntity(edge);
if (relationClassInfo == null) {
throw new MappingException("Could not find a class to map for relation " + edge);
}
Map<String, Object> allProps = new HashMap<>(toMap(edge.getPropertyList()));
getCompositeProperties(edge.getPropertyList(), relationClassInfo).forEach( (k, v) -> {
allProps.put(k.getName(), v);
});
// also add start and end node as valid constructor values
allProps.put(relationClassInfo.getStartNodeReader().getName(), startEntity);
allProps.put(relationClassInfo.getEndNodeReader().getName(), endEntity);
// create and hydrate the new RE
Object relationshipEntity = entityFactory
.newObject(relationClassInfo.getUnderlyingClass(), allProps);
EntityUtils.setIdentity(relationshipEntity, edge.getId(), metadata);
// REs also have properties
setProperties(edge.getPropertyList(), relationshipEntity);
// register it in the mapping context
mappingContext.addRelationshipEntity(relationshipEntity, edge.getId());
// set the start and end entities
ClassInfo relEntityInfo = metadata.classInfo(relationshipEntity);
FieldInfo startNodeWriter = relEntityInfo.getStartNodeReader();
if (startNodeWriter != null) {
startNodeWriter.write(relationshipEntity, startEntity);
} else {
throw new RuntimeException(
"Cannot find a writer for the StartNode of relational entity " + relEntityInfo.name());
}
FieldInfo endNodeWriter = relEntityInfo.getEndNodeReader();
if (endNodeWriter != null) {
endNodeWriter.write(relationshipEntity, endEntity);
} else {
throw new RuntimeException(
"Cannot find a writer for the EndNode of relational entity " + relEntityInfo.name());
}
return relationshipEntity;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private void mapOneToMany(Object instance, Class<?> valueType, Object values, String relationshipType, String relationshipDirection) {
ClassInfo classInfo = metadata.classInfo(instance);
RelationalWriter writer = entityAccessStrategy.getIterableWriter(classInfo, valueType, relationshipType, relationshipDirection);
if (writer != null) {
if (writer.type().isArray() || Iterable.class.isAssignableFrom(writer.type())) {
RelationalReader reader = entityAccessStrategy.getIterableReader(classInfo, valueType, relationshipType, relationshipDirection);
Object currentValues;
if (reader != null) {
currentValues = reader.read(instance);
if (writer.type().isArray()) {
values = EntityAccess.merge(writer.type(), (Iterable<?>) values, (Object[]) currentValues, valueType);
} else {
values = EntityAccess.merge(writer.type(), (Iterable<?>) values, (Collection) currentValues, valueType);
}
}
}
writer.write(instance, values);
return;
}
// this is not necessarily an error. but we can't tell.
logger.debug("Unable to map iterable of type: {} onto property of {}", valueType, classInfo.name());
}
#location 23
#vulnerability type NULL_DEREFERENCE | #fixed code
private void mapOneToMany(Object instance, Class<?> valueType, Object values, String relationshipType, String relationshipDirection) {
ClassInfo classInfo = metadata.classInfo(instance);
RelationalWriter writer = EntityAccessManager.getIterableWriter(classInfo, valueType, relationshipType, relationshipDirection);
if (writer != null) {
if (writer.type().isArray() || Iterable.class.isAssignableFrom(writer.type())) {
RelationalReader reader = EntityAccessManager.getIterableReader(classInfo, valueType, relationshipType, relationshipDirection);
Object currentValues;
if (reader != null) {
currentValues = reader.read(instance);
if (writer.type().isArray()) {
values = EntityAccess.merge(writer.type(), (Iterable<?>) values, (Object[]) currentValues, valueType);
} else {
values = EntityAccess.merge(writer.type(), (Iterable<?>) values, (Collection) currentValues, valueType);
}
}
}
writer.write(instance, values);
return;
}
// this is not necessarily an error. but we can't tell.
logger.debug("Unable to map iterable of type: {} onto property of {}", valueType, classInfo.name());
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public <T> Collection<T> loadAll(Class<T> type, Collection<Long> ids, SortOrder sortOrder, Pagination pagination, int depth) {
String url = session.ensureTransaction().url();
String entityType = session.entityType(type.getName());
QueryStatements queryStatements = session.queryStatementsFor(type);
Query qry = queryStatements.findAllByType(entityType, ids, depth)
.setSortOrder(sortOrder)
.setPagination(pagination);
try (Neo4jResponse<GraphModel> response = session.requestHandler().execute(qry, url)) {
return session.responseHandler().loadAll(type, response);
}
}
#location 4
#vulnerability type RESOURCE_LEAK | #fixed code
@Override
public <T> Collection<T> loadAll(Class<T> type, Collection<Long> ids, SortOrder sortOrder, Pagination pagination, int depth) {
Transaction tx = session.ensureTransaction();
String entityType = session.entityType(type.getName());
QueryStatements queryStatements = session.queryStatementsFor(type);
Query qry = queryStatements.findAllByType(entityType, ids, depth)
.setSortOrder(sortOrder)
.setPagination(pagination);
try (Neo4jResponse<GraphModel> response = session.requestHandler().execute(qry, tx)) {
return session.responseHandler().loadAll(type, response);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Before
public void setUp() throws Exception {
new File("target/test-classes/META-INF/services/").mkdirs();
FileWriter out = new FileWriter(PLUGIN_LIFECYCLE);
out.write(TestOgmPluginLifecycle.class.getName());
out.close();
}
#location 7
#vulnerability type RESOURCE_LEAK | #fixed code
@Before
public void setUp() throws Exception {
TestOgmPluginLifecycle.shouldInitialize = true;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private void hydrateCourses(Collection<Teacher> teachers) {
session.setDriver(new TeacherRequest());
session.setDriver(new CoursesRequest());
session.loadAll(Course.class);
}
#location 4
#vulnerability type RESOURCE_LEAK | #fixed code
private void hydrateCourses(Collection<Teacher> teachers) {
session.loadAll(Course.class);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public <T> void deleteAll(Class<T> type) {
ClassInfo classInfo = session.metaData().classInfo(type.getName());
if (classInfo != null) {
String url = session.ensureTransaction().url();
ParameterisedStatement request = getDeleteStatementsBasedOnType(type).deleteByType(session.entityType(classInfo.name()));
try (Neo4jResponse<String> response = session.requestHandler().execute(request, url)) {
session.context().clear(type);
}
} else {
session.info(type.getName() + " is not a persistable class");
}
}
#location 5
#vulnerability type RESOURCE_LEAK | #fixed code
@Override
public <T> void deleteAll(Class<T> type) {
ClassInfo classInfo = session.metaData().classInfo(type.getName());
if (classInfo != null) {
Transaction tx = session.ensureTransaction();
ParameterisedStatement request = getDeleteStatementsBasedOnType(type).deleteByType(session.entityType(classInfo.name()));
try (Neo4jResponse<String> response = session.requestHandler().execute(request, tx)) {
session.context().clear(type);
}
} else {
session.info(type.getName() + " is not a persistable class");
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public <T> Iterable<T> map(Class<T> type, Response<RestModel> response) {
//TODO refactor to decouple from the REST response format
RestStatisticsModel restStatisticsModel = new RestStatisticsModel();
RestModel model;
Collection<Map<String, Object>> result = new ArrayList<>();
String[] columns = response.columns();
Map<Long, String> relationshipEntityColumns = new HashMap<>();
model = response.next();
restStatisticsModel.setStatistics(model.getStats());
while (model.getValues() != null) {
List<RelationshipModel> relationshipModels = new ArrayList<>();
Map<String, Object> row = new HashMap<>();
for (int i = 0; i < columns.length; i++) {
String column = columns[i];
Object value = model.getValues()[i];
if (value instanceof List) {
List entityList = (List) value;
//If the entities in this list aren't mappable, then just add the entity directly to the resultset
if (isMappable(entityList)) {
List<Object> rowVals = new ArrayList<>();
for (Object entityObj : entityList) {
Object mapped = map(relationshipModels, relationshipEntityColumns, column, value, entityObj);
if (mapped != null) {
rowVals.add(mapped);
}
}
row.put(column, rowVals);
}
else {
Class arrayClass = null;
for (Object element : entityList) {
Class clazz = element.getClass();
if (arrayClass == null) {
arrayClass = clazz;
}
else {
if (arrayClass != clazz) {
arrayClass = null;
break;
}
}
}
if (arrayClass == null) {
row.put(column,entityList.toArray());
}
else {
Object array = Array.newInstance(arrayClass, entityList.size());
for (int j = 0; j < entityList.size(); j++) {
Array.set(array, j, Utils.coerceTypes(arrayClass, entityList.get(j)));
}
row.put(column, array);
}
//row.put(column, value);
}
} else {
Object mapped = map(relationshipModels, relationshipEntityColumns, column, value, value);
row.put(column, mapped);
}
}
//Map all relationships
DefaultGraphModel graphModel = new DefaultGraphModel();
graphModel.setRelationships(relationshipModels.toArray(new RelationshipModel[relationshipModels.size()]));
Map<Long, Object> relationshipEntities = graphEntityMapper.mapRelationships(graphModel);
for (Map.Entry<Long, Object> entry : relationshipEntities.entrySet()) {
if ((row.get(relationshipEntityColumns.get(entry.getKey())) instanceof List)) {
((List) row.get(relationshipEntityColumns.get(entry.getKey()))).add(entry.getValue());
} else {
row.put(relationshipEntityColumns.get(entry.getKey()), entry.getValue());
}
}
result.add(row);
model = response.next();
}
restStatisticsModel.setResult(result);
return (Iterable<T>) Arrays.asList(restStatisticsModel);
}
#location 68
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public <T> Iterable<T> map(Class<T> type, Response<RestModel> response) {
RestStatisticsModel restStatisticsModel = new RestStatisticsModel();
RestModel model = response.next();
Collection<Map<String, Object>> result = new ArrayList<>();
Map<Long, String> relationshipEntityColumns = new HashMap<>(); //Relationship ID to column name
restStatisticsModel.setStatistics(model.getStats());
while (model.getRow().entrySet().size() > 0) {
Map<String,Object> row = model.getRow();
List<RelationshipModel> relationshipModels = new ArrayList<>();
for (Map.Entry<String,Object> entry : row.entrySet()) {
Object value = entry.getValue();
if (value instanceof List) {
List entityList = (List) value;
if (isMappable(entityList)) {
for (int i=0; i< entityList.size(); i++) {
Object mapped = mapEntity(entry.getKey(), entityList.get(i), relationshipModels, relationshipEntityColumns);
if (mapped != null) { //if null, it'll be a relationship, which we're mapping after all nodes
entityList.set(i, mapped);
}
}
}
else {
convertListValueToArray(entityList, entry);
}
}
else {
if (isMappable(Arrays.asList(value))) {
Object mapped = mapEntity(entry.getKey(), value, relationshipModels, relationshipEntityColumns);
if (mapped != null) {
entry.setValue(mapped);
}
}
}
}
//Map all relationships
DefaultGraphModel graphModel = new DefaultGraphModel();
graphModel.setRelationships(relationshipModels.toArray(new RelationshipModel[relationshipModels.size()]));
Map<Long, Object> relationshipEntities = graphEntityMapper.mapRelationships(graphModel);
for (Map.Entry<Long, Object> entry : relationshipEntities.entrySet()) {
Object rels = row.get(relationshipEntityColumns.get(entry.getKey()));
if (rels instanceof List) {
List relsList = (List)rels;
for (int i=0;i<relsList.size(); i++) {
if (relsList.get(i) instanceof RelationshipModel) {
if (((RelationshipModel) relsList.get(i)).getId().equals(entry.getKey())) {
relsList.set(i, entry.getValue());
}
}
}
} else {
row.put(relationshipEntityColumns.get(entry.getKey()), entry.getValue());
}
}
result.add(row);
model = response.next();
}
restStatisticsModel.setResult(result);
return (Iterable<T>) Arrays.asList(restStatisticsModel);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private String newTransactionUrl() {
String url = transactionEndpoint(driverConfig.getURI());
LOGGER.debug( "Thread {}: POST {}", Thread.currentThread().getId(), url );
try (CloseableHttpResponse response = executeHttpRequest(new HttpPost(url))) {
Header location = response.getHeaders("Location")[0];
response.close();
return location.getValue();
} catch (Exception e) {
throw new ResultProcessingException("Could not obtain new Transaction: ", e);
}
}
#location 7
#vulnerability type NULL_DEREFERENCE | #fixed code
private String newTransactionUrl() {
String url = transactionEndpoint(driverConfig.getURI());
LOGGER.debug( "Thread: {}, POST {}", Thread.currentThread().getId(), url );
HttpPost request = new HttpPost(url);
try (CloseableHttpResponse response = executeHttpRequest(request)) {
Header location = response.getHeaders("Location")[0];
return location.getValue();
} catch (IOException ioe) {
throw new HttpRequestException(request, ioe);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private RelationshipBuilder getRelationshipBuilder(Compiler cypherBuilder, Object entity, DirectedRelationship directedRelationship, boolean mapBothDirections) {
RelationshipBuilder relationshipBuilder;
if (isRelationshipEntity(entity)) {
Long relId = (Long) metaData.classInfo(entity).identityField().readProperty(entity);
boolean relationshipEndsChanged = haveRelationEndsChanged(entity, relId);
if (relId == null || relationshipEndsChanged) { //if the RE itself is new, or it exists but has one of it's end nodes changed
relationshipBuilder = cypherBuilder.newRelationship(directedRelationship.type());
if (relationshipEndsChanged) {
Field identityField = metaData.classInfo(entity).getField(metaData.classInfo(entity).identityField());
FieldInfo.write(identityField, entity, null); //reset the ID to null
}
} else {
relationshipBuilder = cypherBuilder.existingRelationship(relId, directedRelationship.type());
}
} else {
relationshipBuilder = cypherBuilder.newRelationship(directedRelationship.type(), mapBothDirections);
}
relationshipBuilder.direction(directedRelationship.direction());
if (isRelationshipEntity(entity)) {
relationshipBuilder.setSingleton(false); // indicates that this relationship type can be mapped multiple times between 2 nodes
relationshipBuilder.setReference(EntityUtils.identity(entity, metaData));
relationshipBuilder.setRelationshipEntity(true);
}
return relationshipBuilder;
}
#location 6
#vulnerability type NULL_DEREFERENCE | #fixed code
private RelationshipBuilder getRelationshipBuilder(Compiler cypherBuilder, Object entity, DirectedRelationship directedRelationship, boolean mapBothDirections) {
RelationshipBuilder relationshipBuilder;
if (isRelationshipEntity(entity)) {
Long relId = (Long) EntityUtils.identity(entity, metaData);
boolean relationshipEndsChanged = haveRelationEndsChanged(entity, relId);
if (relId < 0 || relationshipEndsChanged) { //if the RE itself is new, or it exists but has one of it's end nodes changed
relationshipBuilder = cypherBuilder.newRelationship(directedRelationship.type());
if (relationshipEndsChanged) {
Field identityField = metaData.classInfo(entity).getField(metaData.classInfo(entity).identityField());
FieldInfo.write(identityField, entity, null); //reset the ID to null
}
} else {
relationshipBuilder = cypherBuilder.existingRelationship(relId, directedRelationship.type());
}
} else {
relationshipBuilder = cypherBuilder.newRelationship(directedRelationship.type(), mapBothDirections);
}
relationshipBuilder.direction(directedRelationship.direction());
if (isRelationshipEntity(entity)) {
relationshipBuilder.setSingleton(false); // indicates that this relationship type can be mapped multiple times between 2 nodes
relationshipBuilder.setReference(EntityUtils.identity(entity, metaData));
relationshipBuilder.setRelationshipEntity(true);
}
return relationshipBuilder;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private Object createRelationshipEntity(Edge edge, Object startEntity, Object endEntity) {
// create and hydrate the new RE
Object relationshipEntity = entityFactory.newObject(getRelationshipEntity(edge));
EntityUtils.setIdentity(relationshipEntity, edge.getId(), metadata);
// REs also have properties
setProperties(edge.getPropertyList(), relationshipEntity);
// register it in the mapping context
mappingContext.addRelationshipEntity(relationshipEntity, edge.getId());
// set the start and end entities
ClassInfo relEntityInfo = metadata.classInfo(relationshipEntity);
FieldInfo startNodeWriter = relEntityInfo.getStartNodeReader();
if (startNodeWriter != null) {
startNodeWriter.write(relationshipEntity, startEntity);
} else {
throw new RuntimeException(
"Cannot find a writer for the StartNode of relational entity " + relEntityInfo.name());
}
FieldInfo endNodeWriter = relEntityInfo.getEndNodeReader();
if (endNodeWriter != null) {
endNodeWriter.write(relationshipEntity, endEntity);
} else {
throw new RuntimeException(
"Cannot find a writer for the EndNode of relational entity " + relEntityInfo.name());
}
return relationshipEntity;
}
#location 4
#vulnerability type NULL_DEREFERENCE | #fixed code
private Object createRelationshipEntity(Edge edge, Object startEntity, Object endEntity) {
ClassInfo relationClassInfo = getRelationshipEntity(edge);
if (relationClassInfo == null) {
throw new MappingException("Could not find a class to map for relation " + edge);
}
Map<String, Object> allProps = new HashMap<>(toMap(edge.getPropertyList()));
getCompositeProperties(edge.getPropertyList(), relationClassInfo).forEach( (k, v) -> {
allProps.put(k.getName(), v);
});
// also add start and end node as valid constructor values
allProps.put(relationClassInfo.getStartNodeReader().getName(), startEntity);
allProps.put(relationClassInfo.getEndNodeReader().getName(), endEntity);
// create and hydrate the new RE
Object relationshipEntity = entityFactory
.newObject(relationClassInfo.getUnderlyingClass(), allProps);
EntityUtils.setIdentity(relationshipEntity, edge.getId(), metadata);
// REs also have properties
setProperties(edge.getPropertyList(), relationshipEntity);
// register it in the mapping context
mappingContext.addRelationshipEntity(relationshipEntity, edge.getId());
// set the start and end entities
ClassInfo relEntityInfo = metadata.classInfo(relationshipEntity);
FieldInfo startNodeWriter = relEntityInfo.getStartNodeReader();
if (startNodeWriter != null) {
startNodeWriter.write(relationshipEntity, startEntity);
} else {
throw new RuntimeException(
"Cannot find a writer for the StartNode of relational entity " + relEntityInfo.name());
}
FieldInfo endNodeWriter = relEntityInfo.getEndNodeReader();
if (endNodeWriter != null) {
endNodeWriter.write(relationshipEntity, endEntity);
} else {
throw new RuntimeException(
"Cannot find a writer for the EndNode of relational entity " + relEntityInfo.name());
}
return relationshipEntity;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private <T> Iterable<T> executeAndMap(Class<T> type, String cypher, Map<String, ?> parameters, RowModelMapper<T> rowModelMapper) {
if (StringUtils.isEmpty(cypher)) {
throw new RuntimeException("Supplied cypher statement must not be null or empty.");
}
if (parameters == null) {
throw new RuntimeException("Supplied Parameters cannot be null.");
}
String url = session.ensureTransaction().url();
if (type != null && session.metaData().classInfo(type.getSimpleName()) != null) {
Query qry = new GraphModelQuery(cypher, parameters);
try (Neo4jResponse<GraphModel> response = session.requestHandler().execute(qry, url)) {
return session.responseHandler().loadAll(type, response);
}
} else {
RowModelQuery qry = new RowModelQuery(cypher, parameters);
try (Neo4jResponse<RowModel> response = session.requestHandler().execute(qry, url)) {
String[] variables = response.columns();
Collection<T> result = new ArrayList<>();
RowModel rowModel;
while ((rowModel = response.next()) != null) {
rowModelMapper.mapIntoResult(result, rowModel.getValues(), variables);
}
return result;
}
}
}
#location 10
#vulnerability type RESOURCE_LEAK | #fixed code
private <T> Iterable<T> executeAndMap(Class<T> type, String cypher, Map<String, ?> parameters, RowModelMapper<T> rowModelMapper) {
if (StringUtils.isEmpty(cypher)) {
throw new RuntimeException("Supplied cypher statement must not be null or empty.");
}
if (parameters == null) {
throw new RuntimeException("Supplied Parameters cannot be null.");
}
Transaction tx = session.ensureTransaction();
if (type != null && session.metaData().classInfo(type.getSimpleName()) != null) {
Query qry = new GraphModelQuery(cypher, parameters);
try (Neo4jResponse<GraphModel> response = session.requestHandler().execute(qry, tx)) {
return session.responseHandler().loadAll(type, response);
}
} else {
RowModelQuery qry = new RowModelQuery(cypher, parameters);
try (Neo4jResponse<RowModel> response = session.requestHandler().execute(qry, tx)) {
String[] variables = response.columns();
Collection<T> result = new ArrayList<>();
RowModel rowModel;
while ((rowModel = response.next()) != null) {
rowModelMapper.mapIntoResult(result, rowModel.getValues(), variables);
}
return result;
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public QueryStatistics execute(String statement) {
if (StringUtils.isEmpty(statement)) {
throw new RuntimeException("Supplied cypher statement must not be null or empty.");
}
assertNothingReturned(statement);
RowModelQueryWithStatistics parameterisedStatement = new RowModelQueryWithStatistics(statement, Utils.map());
String url = session.ensureTransaction().url();
try (Neo4jResponse<RowQueryStatisticsResult> response = session.requestHandler().execute(parameterisedStatement, url)) {
RowQueryStatisticsResult result = response.next();
return result == null ? null : result.getStats();
}
}
#location 8
#vulnerability type RESOURCE_LEAK | #fixed code
@Override
public QueryStatistics execute(String statement) {
if (StringUtils.isEmpty(statement)) {
throw new RuntimeException("Supplied cypher statement must not be null or empty.");
}
assertNothingReturned(statement);
RowModelQueryWithStatistics parameterisedStatement = new RowModelQueryWithStatistics(statement, Utils.map());
Transaction tx = session.ensureTransaction();
try (Neo4jResponse<RowQueryStatisticsResult> response = session.requestHandler().execute(parameterisedStatement, tx)) {
RowQueryStatisticsResult result = response.next();
return result == null ? null : result.getStats();
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Test
public void shouldParseDataInRowResponseCorrectly() {
try (Response<DefaultRestModel> rsp = new TestRestHttpResponse((rowResultsAndNoErrors()))) {
DefaultRestModel restModel = rsp.next();
assertNotNull(restModel);
Object[] rows = restModel.getValues();
assertEquals(3,rows.length);
assertEquals(1, rows[0]);
Map data = (Map) rows[1];
assertEquals(1931,((Map)data.get("data")).get("born"));
data = (Map) rows[2];
assertEquals("The Birdcage", ((Map)data.get("data")).get("title"));
assertEquals(395, ((Map)data.get("metadata")).get("id"));
restModel = rsp.next();
rows = restModel.getValues();
assertEquals(3,rows.length);
assertEquals(1, rows[0]);
data = (Map) rows[1];
assertEquals(1931,((Map)data.get("data")).get("born"));
data = (Map) rows[2];
assertEquals(2007, ((Map)data.get("data")).get("released"));
}
}
#location 6
#vulnerability type NULL_DEREFERENCE | #fixed code
@Test
public void shouldParseDataInRowResponseCorrectly() {
try (Response<DefaultRestModel> rsp = new TestRestHttpResponse((rowResultsAndNoErrors()))) {
DefaultRestModel restModel = rsp.next();
assertNotNull(restModel);
Map<String,Object> rows = restModel.getRow();
assertEquals(3,rows.entrySet().size());
assertEquals(1, rows.get("count"));
NodeModel data = (NodeModel) rows.get("director");
assertEquals(1931,data.property("born"));
data = (NodeModel) rows.get("movie");
assertEquals("The Birdcage", data.property("title"));
assertEquals(395L, data.getId().longValue());
restModel = rsp.next();
rows = restModel.getRow();
assertEquals(3,rows.entrySet().size());
assertEquals(1, rows.get("count"));
data = (NodeModel) rows.get("director");
assertEquals(1931,data.property("born"));
data = (NodeModel) rows.get("movie");
assertEquals(2007,data.property("released"));
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private void mapOneToMany(Collection<Edge> oneToManyRelationships) {
EntityCollector entityCollector = new EntityCollector();
List<MappedRelationship> relationshipsToRegister = new ArrayList<>();
Set<Edge> registeredEdges = new HashSet<>();
// first, build the full set of related entities of each type and direction for each source entity in the relationship
for (Edge edge : oneToManyRelationships) {
Object instance = mappingContext.getNodeEntity(edge.getStartNode());
Object parameter = mappingContext.getNodeEntity(edge.getEndNode());
// is this a relationship entity we're trying to map?
Object relationshipEntity = mappingContext.getRelationshipEntity(edge.getId());
if (relationshipEntity != null) {
// establish a relationship between
FieldInfo outgoingWriter = findIterableWriter(instance, relationshipEntity, edge.getType(), OUTGOING);
if (outgoingWriter != null) {
entityCollector.recordTypeRelationship(edge.getStartNode(), relationshipEntity, edge.getType(), OUTGOING);
relationshipsToRegister.add(new MappedRelationship(edge.getStartNode(), edge.getType(), edge.getEndNode(), edge.getId(), instance.getClass(), ClassUtils.getType(outgoingWriter.typeParameterDescriptor())));
}
FieldInfo incomingWriter = findIterableWriter(parameter, relationshipEntity, edge.getType(), Relationship.INCOMING);
if (incomingWriter != null) {
entityCollector.recordTypeRelationship(edge.getEndNode(), relationshipEntity, edge.getType(), Relationship.INCOMING);
relationshipsToRegister.add(new MappedRelationship(edge.getStartNode(), edge.getType(), edge.getEndNode(), edge.getId(), instance.getClass(), ClassUtils.getType(incomingWriter.typeParameterDescriptor())));
}
if (incomingWriter != null || outgoingWriter != null) {
registeredEdges.add(edge);
}
} else {
FieldInfo outgoingWriter = findIterableWriter(instance, parameter, edge.getType(), OUTGOING);
if (outgoingWriter != null) {
entityCollector.recordTypeRelationship(edge.getStartNode(), parameter, edge.getType(), OUTGOING);
relationshipsToRegister.add(new MappedRelationship(edge.getStartNode(), edge.getType(), edge.getEndNode(), edge.getId(), instance.getClass(), ClassUtils.getType(outgoingWriter.typeParameterDescriptor())));
}
FieldInfo incomingWriter = findIterableWriter(parameter, instance, edge.getType(), Relationship.INCOMING);
if (incomingWriter != null) {
entityCollector.recordTypeRelationship(edge.getEndNode(), instance, edge.getType(), Relationship.INCOMING);
relationshipsToRegister.add(new MappedRelationship(edge.getStartNode(), edge.getType(), edge.getEndNode(), edge.getId(), instance.getClass(), ClassUtils.getType(incomingWriter.typeParameterDescriptor())));
}
if (incomingWriter != null || outgoingWriter != null) {
registeredEdges.add(edge);
}
}
}
// then set the entire collection at the same time for each owning type
for (Long instanceId : entityCollector.getOwningTypes()) {
//get all relationship types for which we're trying to set collections of instances
for (String relationshipType : entityCollector.getOwningRelationshipTypes(instanceId)) {
//for each relationship type, get all the directions for which we're trying to set collections of instances
for (String relationshipDirection : entityCollector.getRelationshipDirectionsForOwningTypeAndRelationshipType(instanceId, relationshipType)) {
//for each direction, get all the entity types for which we're trying to set collections of instances
for (Class entityClass : entityCollector.getEntityClassesForOwningTypeAndRelationshipTypeAndRelationshipDirection(instanceId, relationshipType, relationshipDirection)) {
Collection<?> entities = entityCollector.getCollectiblesForOwnerAndRelationship(instanceId, relationshipType, relationshipDirection, entityClass);
//Class entityType = entityCollector.getCollectibleTypeForOwnerAndRelationship(instanceId, relationshipType, relationshipDirection);
mapOneToMany(mappingContext.getNodeEntity(instanceId), entityClass, entities, relationshipType, relationshipDirection);
}
}
}
}
// now register all the relationships we've mapped as iterable types into the mapping context
for (MappedRelationship mappedRelationship : relationshipsToRegister) {
mappingContext.addRelationship(mappedRelationship);
}
// finally, register anything left over. These will be singleton relationships that
// were not mapped during one->one mapping, or one->many mapping.
for (Edge edge : oneToManyRelationships) {
if (!registeredEdges.contains(edge)) {
Object source = mappingContext.getNodeEntity(edge.getStartNode());
Object target = mappingContext.getNodeEntity(edge.getEndNode());
FieldInfo writer = getRelationalWriter(metadata.classInfo(source), edge.getType(), OUTGOING, target);
if (writer == null) {
writer = getRelationalWriter(metadata.classInfo(target), edge.getType(), INCOMING, source);
}
// ensures its tracked in the domain
if (writer != null) {
MappedRelationship mappedRelationship = new MappedRelationship(edge.getStartNode(), edge.getType(), edge.getEndNode(), edge.getId(), source.getClass(), ClassUtils.getType(writer.typeParameterDescriptor()));
if (!mappingContext.containsRelationship(mappedRelationship)) {
mappingContext.addRelationship(mappedRelationship);
}
}
}
}
}
#location 31
#vulnerability type NULL_DEREFERENCE | #fixed code
private void mapOneToMany(Collection<Edge> oneToManyRelationships) {
EntityCollector entityCollector = new EntityCollector();
List<MappedRelationship> relationshipsToRegister = new ArrayList<>();
// first, build the full set of related entities of each type and direction for each source entity in the relationship
for (Edge edge : oneToManyRelationships) {
Object instance = mappingContext.getNodeEntity(edge.getStartNode());
Object parameter = mappingContext.getNodeEntity(edge.getEndNode());
// is this a relationship entity we're trying to map?
Object relationshipEntity = mappingContext.getRelationshipEntity(edge.getId());
if (relationshipEntity != null) {
// establish a relationship between
FieldInfo outgoingWriter = findIterableWriter(instance, relationshipEntity, edge.getType(), OUTGOING);
if (outgoingWriter != null) {
entityCollector.recordTypeRelationship(edge.getStartNode(), relationshipEntity, edge.getType(), OUTGOING);
relationshipsToRegister.add(new MappedRelationship(edge.getStartNode(), edge.getType(), edge.getEndNode(), edge.getId(), instance.getClass(), ClassUtils.getType(outgoingWriter.typeParameterDescriptor())));
}
FieldInfo incomingWriter = findIterableWriter(parameter, relationshipEntity, edge.getType(), INCOMING);
if (incomingWriter != null) {
entityCollector.recordTypeRelationship(edge.getEndNode(), relationshipEntity, edge.getType(), INCOMING);
relationshipsToRegister.add(new MappedRelationship(edge.getStartNode(), edge.getType(), edge.getEndNode(), edge.getId(), instance.getClass(), ClassUtils.getType(incomingWriter.typeParameterDescriptor())));
}
} else {
// Use getRelationalWriter instead of findIterableWriter
// findIterableWriter will return matching iterable even when there is better matching single field
FieldInfo outgoingWriter = getRelationalWriter(metadata.classInfo(instance), edge.getType(), OUTGOING, parameter);
if (outgoingWriter != null) {
if (!outgoingWriter.forScalar()) {
entityCollector.recordTypeRelationship(edge.getStartNode(), parameter, edge.getType(), OUTGOING);
} else {
outgoingWriter.write(instance, parameter);
}
MappedRelationship mappedRelationship = new MappedRelationship(edge.getStartNode(), edge.getType(), edge.getEndNode(), edge.getId(), instance.getClass(), ClassUtils.getType(outgoingWriter.typeParameterDescriptor()));
relationshipsToRegister.add(mappedRelationship);
}
FieldInfo incomingWriter = getRelationalWriter(metadata.classInfo(parameter), edge.getType(), INCOMING, instance);
if (incomingWriter != null) {
if (!incomingWriter.forScalar()) {
entityCollector.recordTypeRelationship(edge.getEndNode(), instance, edge.getType(), INCOMING);
} else {
incomingWriter.write(parameter, instance);
}
relationshipsToRegister.add(new MappedRelationship(edge.getStartNode(), edge.getType(), edge.getEndNode(), edge.getId(), instance.getClass(), ClassUtils.getType(incomingWriter.typeParameterDescriptor())));
}
}
}
// then set the entire collection at the same time for each owning type
for (Long instanceId : entityCollector.getOwningTypes()) {
//get all relationship types for which we're trying to set collections of instances
for (String relationshipType : entityCollector.getOwningRelationshipTypes(instanceId)) {
//for each relationship type, get all the directions for which we're trying to set collections of instances
for (String relationshipDirection : entityCollector.getRelationshipDirectionsForOwningTypeAndRelationshipType(instanceId, relationshipType)) {
//for each direction, get all the entity types for which we're trying to set collections of instances
for (Class entityClass : entityCollector.getEntityClassesForOwningTypeAndRelationshipTypeAndRelationshipDirection(instanceId, relationshipType, relationshipDirection)) {
Collection<?> entities = entityCollector.getCollectiblesForOwnerAndRelationship(instanceId, relationshipType, relationshipDirection, entityClass);
//Class entityType = entityCollector.getCollectibleTypeForOwnerAndRelationship(instanceId, relationshipType, relationshipDirection);
mapOneToMany(mappingContext.getNodeEntity(instanceId), entityClass, entities, relationshipType, relationshipDirection);
}
}
}
}
// now register all the relationships we've mapped as iterable types into the mapping context
for (MappedRelationship mappedRelationship : relationshipsToRegister) {
mappingContext.addRelationship(mappedRelationship);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Test
public void testRelatedObjectChangeDoesNotAffectNodeMemoisation() {
ClassInfo classInfo = metaData.classInfo(Teacher.class.getName());
Teacher teacher = new Teacher("Miss White");
objectMemo.remember(teacher, classInfo);
teacher.setId(115L); // the id field must not be part of the memoised property list
teacher.setSchool(new School("Roedean")); // a related object does not affect the property list.
assertTrue(objectMemo.remembered(teacher, classInfo));
}
#location 7
#vulnerability type NULL_DEREFERENCE | #fixed code
@Test
public void testRelatedObjectChangeDoesNotAffectNodeMemoisation() {
Teacher teacher = new Teacher("Miss White");
teacher.setId(115L); // the id field must not be part of the memoised property list
mappingContext.remember(teacher);
teacher.setSchool(new School("Roedean")); // a related object does not affect the property list.
assertFalse(mappingContext.isDirty(teacher));
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public RelationalReader getRelationalReader(ClassInfo classInfo, String relationshipType, String relationshipDirection) {
if(relationalReaderCache.get(classInfo) == null) {
relationalReaderCache.put(classInfo, new HashMap<DirectedRelationship, RelationalReader>());
}
DirectedRelationship directedRelationship = new DirectedRelationship(relationshipType,relationshipDirection);
if(relationalReaderCache.get(classInfo).containsKey(directedRelationship)) {
return relationalReaderCache.get(classInfo).get(directedRelationship);
}
// 1st, try to find a method explicitly annotated with the relationship type and direction.
MethodInfo methodInfo = classInfo.relationshipGetter(relationshipType, relationshipDirection, STRICT_MODE);
if (methodInfo != null && !methodInfo.getAnnotations().isEmpty()) {
MethodReader methodReader = new MethodReader(classInfo, methodInfo);
relationalReaderCache.get(classInfo).put(directedRelationship, methodReader);
return methodReader;
}
// 2nd, try to find a field explicitly annotated with the neo4j relationship type and direction
FieldInfo fieldInfo = classInfo.relationshipField(relationshipType, relationshipDirection, STRICT_MODE);
if (fieldInfo != null && !fieldInfo.getAnnotations().isEmpty()) {
FieldReader fieldReader = new FieldReader(classInfo, fieldInfo);
relationalReaderCache.get(classInfo).put(directedRelationship, fieldReader);
return fieldReader;
}
//If the direction is INCOMING, then the annotation should have been present and we should have found a match already.
//If it's outgoing, then proceed to find other matches
if(!relationshipDirection.equals(Relationship.INCOMING)) {
// 3rd, try to find a method annotated with the relationship type and direction, allowing for implied relationships
methodInfo = classInfo.relationshipGetter(relationshipType, relationshipDirection, INFERRED_MODE);
if (methodInfo != null && !methodInfo.getAnnotations().isEmpty()) {
MethodReader methodReader = new MethodReader(classInfo, methodInfo);
relationalReaderCache.get(classInfo).put(directedRelationship, methodReader);
return methodReader;
}
// 4th, try to find a field annotated with the neo4j relationship type and direction, allowing for implied relationships
fieldInfo = classInfo.relationshipField(relationshipType, relationshipDirection, INFERRED_MODE);
if (fieldInfo != null && !fieldInfo.getAnnotations().isEmpty()) {
FieldReader fieldReader = new FieldReader(classInfo, fieldInfo);
relationalReaderCache.get(classInfo).put(directedRelationship, fieldReader);
return fieldReader;
}
// 5th, try to find a "getXYZ" method where XYZ is derived from the given relationship type
if (methodInfo != null) {
MethodReader methodReader = new MethodReader(classInfo, methodInfo);
relationalReaderCache.get(classInfo).put(directedRelationship, methodReader);
return methodReader;
}
// 6th, try to find a "XYZ" field name where XYZ is derived from the relationship type
if (fieldInfo != null) {
FieldReader fieldReader = new FieldReader(classInfo, fieldInfo);
relationalReaderCache.get(classInfo).put(directedRelationship, fieldReader);
return fieldReader;
}
}
relationalReaderCache.get(classInfo).put(directedRelationship, null);
return null;
}
#location 7
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public RelationalReader getRelationalReader(ClassInfo classInfo, String relationshipType, String relationshipDirection) {
if(!relationalReaderCache.containsKey(classInfo)) {
relationalReaderCache.put(classInfo, new HashMap<DirectedRelationship, RelationalReader>());
}
DirectedRelationship directedRelationship = new DirectedRelationship(relationshipType,relationshipDirection);
if(relationalReaderCache.get(classInfo).containsKey(directedRelationship)) {
return relationalReaderCache.get(classInfo).get(directedRelationship);
}
// 1st, try to find a method explicitly annotated with the relationship type and direction.
MethodInfo methodInfo = classInfo.relationshipGetter(relationshipType, relationshipDirection, STRICT_MODE);
if (methodInfo != null && !methodInfo.getAnnotations().isEmpty()) {
MethodReader methodReader = new MethodReader(classInfo, methodInfo);
relationalReaderCache.get(classInfo).put(directedRelationship, methodReader);
return methodReader;
}
// 2nd, try to find a field explicitly annotated with the neo4j relationship type and direction
FieldInfo fieldInfo = classInfo.relationshipField(relationshipType, relationshipDirection, STRICT_MODE);
if (fieldInfo != null && !fieldInfo.getAnnotations().isEmpty()) {
FieldReader fieldReader = new FieldReader(classInfo, fieldInfo);
relationalReaderCache.get(classInfo).put(directedRelationship, fieldReader);
return fieldReader;
}
//If the direction is INCOMING, then the annotation should have been present and we should have found a match already.
//If it's outgoing, then proceed to find other matches
if(!relationshipDirection.equals(Relationship.INCOMING)) {
// 3rd, try to find a method annotated with the relationship type and direction, allowing for implied relationships
methodInfo = classInfo.relationshipGetter(relationshipType, relationshipDirection, INFERRED_MODE);
if (methodInfo != null && !methodInfo.getAnnotations().isEmpty()) {
MethodReader methodReader = new MethodReader(classInfo, methodInfo);
relationalReaderCache.get(classInfo).put(directedRelationship, methodReader);
return methodReader;
}
// 4th, try to find a field annotated with the neo4j relationship type and direction, allowing for implied relationships
fieldInfo = classInfo.relationshipField(relationshipType, relationshipDirection, INFERRED_MODE);
if (fieldInfo != null && !fieldInfo.getAnnotations().isEmpty()) {
FieldReader fieldReader = new FieldReader(classInfo, fieldInfo);
relationalReaderCache.get(classInfo).put(directedRelationship, fieldReader);
return fieldReader;
}
// 5th, try to find a "getXYZ" method where XYZ is derived from the given relationship type
if (methodInfo != null) {
MethodReader methodReader = new MethodReader(classInfo, methodInfo);
relationalReaderCache.get(classInfo).put(directedRelationship, methodReader);
return methodReader;
}
// 6th, try to find a "XYZ" field name where XYZ is derived from the relationship type
if (fieldInfo != null) {
FieldReader fieldReader = new FieldReader(classInfo, fieldInfo);
relationalReaderCache.get(classInfo).put(directedRelationship, fieldReader);
return fieldReader;
}
}
relationalReaderCache.get(classInfo).put(directedRelationship, null);
return null;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private <T> Iterable<T> executeAndMap(Class<T> type, String cypher, Map<String, ?> parameters, RowModelMapper<T> rowModelMapper) {
if (StringUtils.isEmpty(cypher)) {
throw new RuntimeException("Supplied cypher statement must not be null or empty.");
}
if (parameters == null) {
throw new RuntimeException("Supplied Parameters cannot be null.");
}
String url = session.ensureTransaction().url();
if (type != null && session.metaData().classInfo(type.getSimpleName()) != null) {
Query qry = new GraphModelQuery(cypher, parameters);
try (Neo4jResponse<GraphModel> response = session.requestHandler().execute(qry, url)) {
return session.responseHandler().loadAll(type, response);
}
} else {
RowModelQuery qry = new RowModelQuery(cypher, parameters);
try (Neo4jResponse<RowModel> response = session.requestHandler().execute(qry, url)) {
String[] variables = response.columns();
Collection<T> result = new ArrayList<>();
RowModel rowModel;
while ((rowModel = response.next()) != null) {
rowModelMapper.mapIntoResult(result, rowModel.getValues(), variables);
}
return result;
}
}
}
#location 10
#vulnerability type RESOURCE_LEAK | #fixed code
private <T> Iterable<T> executeAndMap(Class<T> type, String cypher, Map<String, ?> parameters, RowModelMapper<T> rowModelMapper) {
if (StringUtils.isEmpty(cypher)) {
throw new RuntimeException("Supplied cypher statement must not be null or empty.");
}
if (parameters == null) {
throw new RuntimeException("Supplied Parameters cannot be null.");
}
Transaction tx = session.ensureTransaction();
if (type != null && session.metaData().classInfo(type.getSimpleName()) != null) {
Query qry = new GraphModelQuery(cypher, parameters);
try (Neo4jResponse<GraphModel> response = session.requestHandler().execute(qry, tx)) {
return session.responseHandler().loadAll(type, response);
}
} else {
RowModelQuery qry = new RowModelQuery(cypher, parameters);
try (Neo4jResponse<RowModel> response = session.requestHandler().execute(qry, tx)) {
String[] variables = response.columns();
Collection<T> result = new ArrayList<>();
RowModel rowModel;
while ((rowModel = response.next()) != null) {
rowModelMapper.mapIntoResult(result, rowModel.getValues(), variables);
}
return result;
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public QueryStatistics execute(String statement) {
if (StringUtils.isEmpty(statement)) {
throw new RuntimeException("Supplied cypher statement must not be null or empty.");
}
assertNothingReturned(statement);
RowModelQueryWithStatistics parameterisedStatement = new RowModelQueryWithStatistics(statement, Utils.map());
String url = session.ensureTransaction().url();
try (Neo4jResponse<RowQueryStatisticsResult> response = session.requestHandler().execute(parameterisedStatement, url)) {
RowQueryStatisticsResult result = response.next();
return result == null ? null : result.getStats();
}
}
#location 8
#vulnerability type RESOURCE_LEAK | #fixed code
@Override
public QueryStatistics execute(String statement) {
if (StringUtils.isEmpty(statement)) {
throw new RuntimeException("Supplied cypher statement must not be null or empty.");
}
assertNothingReturned(statement);
RowModelQueryWithStatistics parameterisedStatement = new RowModelQueryWithStatistics(statement, Utils.map());
Transaction tx = session.ensureTransaction();
try (Neo4jResponse<RowQueryStatisticsResult> response = session.requestHandler().execute(parameterisedStatement, tx)) {
RowQueryStatisticsResult result = response.next();
return result == null ? null : result.getStats();
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void purgeDatabase() {
String url = session.ensureTransaction().url();
session.requestHandler().execute(new DeleteNodeStatements().purge(), url).close();
session.context().clear();
}
#location 3
#vulnerability type RESOURCE_LEAK | #fixed code
@Override
public void purgeDatabase() {
Transaction tx = session.ensureTransaction();
session.requestHandler().execute(new DeleteNodeStatements().purge(), tx).close();
session.context().clear();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public QueryStatistics execute(String cypher, Map<String, Object> parameters) {
if (StringUtils.isEmpty(cypher)) {
throw new RuntimeException("Supplied cypher statement must not be null or empty.");
}
if (parameters == null) {
throw new RuntimeException("Supplied Parameters cannot be null.");
}
assertNothingReturned(cypher);
String url = session.ensureTransaction().url();
// NOTE: No need to check if domain objects are parameters and flatten them to json as this is done
// for us using the existing execute() method.
RowModelQueryWithStatistics parameterisedStatement = new RowModelQueryWithStatistics(cypher, parameters);
try (Neo4jResponse<RowQueryStatisticsResult> response = session.requestHandler().execute(parameterisedStatement, url)) {
RowQueryStatisticsResult result = response.next();
return result == null ? null : result.getStats();
}
}
#location 11
#vulnerability type RESOURCE_LEAK | #fixed code
@Override
public QueryStatistics execute(String cypher, Map<String, Object> parameters) {
if (StringUtils.isEmpty(cypher)) {
throw new RuntimeException("Supplied cypher statement must not be null or empty.");
}
if (parameters == null) {
throw new RuntimeException("Supplied Parameters cannot be null.");
}
assertNothingReturned(cypher);
Transaction tx = session.ensureTransaction();
// NOTE: No need to check if domain objects are parameters and flatten them to json as this is done
// for us using the existing execute() method.
RowModelQueryWithStatistics parameterisedStatement = new RowModelQueryWithStatistics(cypher, parameters);
try (Neo4jResponse<RowQueryStatisticsResult> response = session.requestHandler().execute(parameterisedStatement, tx)) {
RowQueryStatisticsResult result = response.next();
return result == null ? null : result.getStats();
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public static void configure(String configurationFileName) {
try (InputStream is = classPathResource(configurationFileName)) {
configure(is);
} catch (Exception e) {
logger.warn("Could not configure OGM from {}", configurationFileName);
}
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public static void configure(String configurationFileName) {
try (InputStream is = toInputStream(configurationFileName)) {
configure(is);
} catch (Exception e) {
logger.warn("Could not configure OGM from {}", configurationFileName);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public FieldInfo propertyField(String propertyName) {
if (propertyFields == null) {
if (propertyFields == null) {
Collection<FieldInfo> fieldInfos = propertyFields();
propertyFields = new HashMap<>(fieldInfos.size());
for (FieldInfo fieldInfo : fieldInfos) {
propertyFields.put(fieldInfo.property().toLowerCase(), fieldInfo);
}
}
}
return propertyFields.get(propertyName.toLowerCase());
}
#location 9
#vulnerability type NULL_DEREFERENCE | #fixed code
public FieldInfo propertyField(String propertyName) {
if (propertyFields == null) {
initPropertyFields();
}
return propertyFields.get(propertyName.toLowerCase());
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Test
public void testIndexesAreSuccessfullyValidated() {
createLoginConstraint();
Components.getConfiguration().setAutoIndex("validate");
AutoIndexManager indexManager = new AutoIndexManager(metaData, Components.driver());
assertEquals(AutoIndexMode.VALIDATE.getName(), Components.getConfiguration().getAutoIndex());
assertEquals(1, indexManager.getIndexes().size());
indexManager.build();
dropLoginConstraint();
}
#location 9
#vulnerability type NULL_DEREFERENCE | #fixed code
@Test
public void testIndexesAreSuccessfullyValidated() {
createLoginConstraint();
baseConfiguration.setAutoIndex("validate");
AutoIndexManager indexManager = new AutoIndexManager(metaData, Components.driver(), baseConfiguration);
assertEquals(AutoIndexMode.VALIDATE.getName(), baseConfiguration.getAutoIndex());
assertEquals(1, indexManager.getIndexes().size());
indexManager.build();
dropLoginConstraint();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
void removeEntity(Object entity) {
Class<?> type = entity.getClass();
ClassInfo classInfo = metaData.classInfo(type.getName());
FieldInfo identityReader = classInfo.identityField();
Long id = (Long) identityReader.readProperty(entity);
purge(entity, identityReader, type);
if (id != null) {
typeRegister.remove(metaData, type, id);
}
}
#location 4
#vulnerability type NULL_DEREFERENCE | #fixed code
boolean removeRelationship(MappedRelationship mappedRelationship) {
return relationshipRegister.remove(mappedRelationship);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Test
public void testIndexDumpMatchesDatabaseIndexes() throws IOException {
createLoginConstraint();
Components.getConfiguration().setAutoIndex("dump");
Components.getConfiguration().setDumpDir(".");
Components.getConfiguration().setDumpFilename("test.cql");
File file = new File("./test.cql");
try {
AutoIndexManager indexManager = new AutoIndexManager(metaData, Components.driver());
assertEquals(AutoIndexMode.DUMP.getName(), Components.getConfiguration().getAutoIndex());
assertEquals(1, indexManager.getIndexes().size());
indexManager.build();
assertTrue(file.exists());
assertTrue(file.length() > 0);
BufferedReader reader = new BufferedReader(new FileReader(file));
String actual = reader.readLine();
assertEquals(CREATE_LOGIN_CONSTRAINT_CYPHER, actual);
reader.close();
} finally {
file.delete();
}
dropLoginConstraint();
}
#location 15
#vulnerability type NULL_DEREFERENCE | #fixed code
@Test
public void testIndexDumpMatchesDatabaseIndexes() throws IOException {
createLoginConstraint();
baseConfiguration.setAutoIndex("dump");
baseConfiguration.setDumpDir(".");
baseConfiguration.setDumpFilename("test.cql");
File file = new File("./test.cql");
try {
AutoIndexManager indexManager = new AutoIndexManager(metaData, Components.driver(), baseConfiguration);
assertEquals(AutoIndexMode.DUMP.getName(), baseConfiguration.getAutoIndex());
assertEquals(1, indexManager.getIndexes().size());
indexManager.build();
assertTrue(file.exists());
assertTrue(file.length() > 0);
BufferedReader reader = new BufferedReader(new FileReader(file));
String actual = reader.readLine();
assertEquals(CREATE_LOGIN_CONSTRAINT_CYPHER, actual);
reader.close();
} finally {
file.delete();
}
dropLoginConstraint();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public EntityAccess getPropertyWriter(final ClassInfo classInfo, String propertyName) {
if(propertyWriterCache.get(classInfo) == null) {
propertyWriterCache.put(classInfo,new HashMap<String, EntityAccess>());
}
EntityAccess writer = propertyWriterCache.get(classInfo).get(propertyName);
if(writer != null) {
return writer;
}
MethodInfo setterInfo = classInfo.propertySetter(propertyName);
EntityAccess propertyWriter = determinePropertyAccessor(classInfo, propertyName, setterInfo, new AccessorFactory<EntityAccess>() {
@Override
public EntityAccess makeMethodAccessor(MethodInfo methodInfo) {
return new MethodWriter(classInfo, methodInfo);
}
@Override
public EntityAccess makeFieldAccessor(FieldInfo fieldInfo) {
return new FieldWriter(classInfo, fieldInfo);
}
});
propertyWriterCache.get(classInfo).put(propertyName, propertyWriter);
return propertyWriter;
}
#location 6
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public EntityAccess getPropertyWriter(final ClassInfo classInfo, String propertyName) {
if(!propertyWriterCache.containsKey(classInfo)) {
propertyWriterCache.put(classInfo,new HashMap<String, EntityAccess>());
}
if(propertyWriterCache.get(classInfo).containsKey(propertyName)) {
return propertyWriterCache.get(classInfo).get(propertyName);
}
MethodInfo setterInfo = classInfo.propertySetter(propertyName);
EntityAccess propertyWriter = determinePropertyAccessor(classInfo, propertyName, setterInfo, new AccessorFactory<EntityAccess>() {
@Override
public EntityAccess makeMethodAccessor(MethodInfo methodInfo) {
return new MethodWriter(classInfo, methodInfo);
}
@Override
public EntityAccess makeFieldAccessor(FieldInfo fieldInfo) {
return new FieldWriter(classInfo, fieldInfo);
}
});
propertyWriterCache.get(classInfo).put(propertyName, propertyWriter);
return propertyWriter;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private Object createRelationshipEntity(Edge edge, Object startEntity, Object endEntity) {
// create and hydrate the new RE
Object relationshipEntity = entityFactory.newObject(getRelationshipEntity(edge));
setIdentity(relationshipEntity, edge.getId());
// REs also have properties
setProperties(edge, relationshipEntity);
// register it in the mapping context
mappingContext.registerRelationshipEntity(relationshipEntity, edge.getId());
// set the start and end entities
ClassInfo relEntityInfo = metadata.classInfo(relationshipEntity);
RelationalWriter startNodeWriter = entityAccessStrategy.getRelationalEntityWriter(relEntityInfo, StartNode.class);
if (startNodeWriter != null) {
startNodeWriter.write(relationshipEntity, startEntity);
} else {
throw new RuntimeException("Cannot find a writer for the StartNode of relational entity " + relEntityInfo.name());
}
RelationalWriter endNodeWriter = entityAccessStrategy.getRelationalEntityWriter(relEntityInfo, EndNode.class);
if (endNodeWriter != null) {
endNodeWriter.write(relationshipEntity, endEntity);
} else {
throw new RuntimeException("Cannot find a writer for the EndNode of relational entity " + relEntityInfo.name());
}
return relationshipEntity;
}
#location 20
#vulnerability type NULL_DEREFERENCE | #fixed code
private Object createRelationshipEntity(Edge edge, Object startEntity, Object endEntity) {
// create and hydrate the new RE
Object relationshipEntity = entityFactory.newObject(getRelationshipEntity(edge));
setIdentity(relationshipEntity, edge.getId());
// REs also have properties
setProperties(edge, relationshipEntity);
// register it in the mapping context
mappingContext.registerRelationshipEntity(relationshipEntity, edge.getId());
// set the start and end entities
ClassInfo relEntityInfo = metadata.classInfo(relationshipEntity);
RelationalWriter startNodeWriter = EntityAccessManager.getRelationalEntityWriter(relEntityInfo, StartNode.class);
if (startNodeWriter != null) {
startNodeWriter.write(relationshipEntity, startEntity);
} else {
throw new RuntimeException("Cannot find a writer for the StartNode of relational entity " + relEntityInfo.name());
}
RelationalWriter endNodeWriter = EntityAccessManager.getRelationalEntityWriter(relEntityInfo, EndNode.class);
if (endNodeWriter != null) {
endNodeWriter.write(relationshipEntity, endEntity);
} else {
throw new RuntimeException("Cannot find a writer for the EndNode of relational entity " + relEntityInfo.name());
}
return relationshipEntity;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
void removeNodeEntity(Object entity, boolean deregisterDependentRelationshipEntity) {
Long id = nativeId(entity);
nodeEntityRegister.remove(id);
final ClassInfo primaryIndexClassInfo = metaData.classInfo(entity);
final FieldInfo primaryIndexField = primaryIndexClassInfo
.primaryIndexField(); // also need to add the class to key to prevent collisions.
if (primaryIndexField != null) {
final Object primaryIndexValue = primaryIndexField.read(entity);
if (primaryIndexValue != null) {
primaryIndexNodeRegister.remove(new LabelPrimaryId(primaryIndexClassInfo, primaryIndexValue));
}
}
if (deregisterDependentRelationshipEntity) {
deregisterDependentRelationshipEntity(entity);
}
}
#location 8
#vulnerability type NULL_DEREFERENCE | #fixed code
boolean removeRelationship(MappedRelationship mappedRelationship) {
return relationshipRegister.remove(mappedRelationship);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public QueryStatistics execute(String cypher, Map<String, Object> parameters) {
if (StringUtils.isEmpty(cypher)) {
throw new RuntimeException("Supplied cypher statement must not be null or empty.");
}
if (parameters == null) {
throw new RuntimeException("Supplied Parameters cannot be null.");
}
assertNothingReturned(cypher);
String url = session.ensureTransaction().url();
// NOTE: No need to check if domain objects are parameters and flatten them to json as this is done
// for us using the existing execute() method.
RowModelQueryWithStatistics parameterisedStatement = new RowModelQueryWithStatistics(cypher, parameters);
try (Neo4jResponse<RowQueryStatisticsResult> response = session.requestHandler().execute(parameterisedStatement, url)) {
RowQueryStatisticsResult result = response.next();
return result == null ? null : result.getStats();
}
}
#location 11
#vulnerability type RESOURCE_LEAK | #fixed code
@Override
public QueryStatistics execute(String cypher, Map<String, Object> parameters) {
if (StringUtils.isEmpty(cypher)) {
throw new RuntimeException("Supplied cypher statement must not be null or empty.");
}
if (parameters == null) {
throw new RuntimeException("Supplied Parameters cannot be null.");
}
assertNothingReturned(cypher);
Transaction tx = session.ensureTransaction();
// NOTE: No need to check if domain objects are parameters and flatten them to json as this is done
// for us using the existing execute() method.
RowModelQueryWithStatistics parameterisedStatement = new RowModelQueryWithStatistics(cypher, parameters);
try (Neo4jResponse<RowQueryStatisticsResult> response = session.requestHandler().execute(parameterisedStatement, tx)) {
RowQueryStatisticsResult result = response.next();
return result == null ? null : result.getStats();
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Test
public void testUnchangedObjectDetected() {
ClassInfo classInfo = metaData.classInfo(Teacher.class.getName());
Teacher mrsJones = new Teacher();
objectMemo.remember(mrsJones, classInfo);
mrsJones.setId(115L); // the id field must not be part of the memoised property list
assertTrue(objectMemo.remembered(mrsJones, classInfo));
}
#location 7
#vulnerability type NULL_DEREFERENCE | #fixed code
@Test
public void testUnchangedObjectDetected() {
Teacher mrsJones = new Teacher();
mrsJones.setId(115L); // the id field must not be part of the memoised property list
mappingContext.remember(mrsJones);
assertFalse(mappingContext.isDirty(mrsJones));
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public <T> void delete(T object) {
if (object.getClass().isArray() || Iterable.class.isAssignableFrom(object.getClass())) {
deleteAll(object);
} else {
ClassInfo classInfo = session.metaData().classInfo(object);
if (classInfo != null) {
Field identityField = classInfo.getField(classInfo.identityField());
Long identity = (Long) FieldWriter.read(identityField, object);
if (identity != null) {
String url = session.ensureTransaction().url();
ParameterisedStatement request = getDeleteStatementsBasedOnType(object.getClass()).delete(identity);
try (Neo4jResponse<String> response = session.requestHandler().execute(request, url)) {
session.context().clear(object);
}
}
} else {
session.info(object.getClass().getName() + " is not an instance of a persistable class");
}
}
}
#location 11
#vulnerability type RESOURCE_LEAK | #fixed code
@Override
public <T> void delete(T object) {
if (object.getClass().isArray() || Iterable.class.isAssignableFrom(object.getClass())) {
deleteAll(object);
} else {
ClassInfo classInfo = session.metaData().classInfo(object);
if (classInfo != null) {
Field identityField = classInfo.getField(classInfo.identityField());
Long identity = (Long) FieldWriter.read(identityField, object);
if (identity != null) {
Transaction tx = session.ensureTransaction();
ParameterisedStatement request = getDeleteStatementsBasedOnType(object.getClass()).delete(identity);
try (Neo4jResponse<String> response = session.requestHandler().execute(request, tx)) {
session.context().clear(object);
}
}
} else {
session.info(object.getClass().getName() + " is not an instance of a persistable class");
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private String createTemporaryEphemeralFileStore() {
try {
System.out.format("java tmpdir root: %s\n", System.getProperty("java.io.tmpdir"));
Path path = Files.createTempDirectory("neo4j.db");
System.out.format("Check temporary directory %s\n", path.toString());
File f = path.toFile();
System.out.format("Checking directory actually exists as a file %s\n", f.exists());
f.deleteOnExit();
URI uri = f.toURI();
System.out.format("Checking URI object is not null: %s\n", uri != null);
System.out.format("Checking URI as String %s\n", uri.toString());
String fileStoreUri = uri.toString();
return fileStoreUri;
} catch (Exception e) {
System.out.println("Caught an exception:");
e.printStackTrace();
throw new RuntimeException(e);
}
}
#location 16
#vulnerability type NULL_DEREFERENCE | #fixed code
private String createTemporaryEphemeralFileStore() {
try {
Path path = Files.createTempDirectory("neo4j.db");
File f = path.toFile();
f.deleteOnExit();
URI uri = f.toURI();
String fileStoreUri = uri.toString();
return fileStoreUri;
} catch (Exception e) {
throw new RuntimeException(e);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public PropertyReader getPropertyReader(final ClassInfo classInfo, String propertyName) {
if(propertyReaderCache.get(classInfo) == null) {
propertyReaderCache.put(classInfo, new HashMap<String, PropertyReader>());
}
PropertyReader reader = propertyReaderCache.get(classInfo).get(propertyName);
if(reader != null) {
return reader;
}
MethodInfo getterInfo = classInfo.propertyGetter(propertyName);
PropertyReader propertyReader = determinePropertyAccessor(classInfo, propertyName, getterInfo, new AccessorFactory<PropertyReader>() {
@Override
public PropertyReader makeMethodAccessor(MethodInfo methodInfo) {
return new MethodReader(classInfo, methodInfo);
}
@Override
public PropertyReader makeFieldAccessor(FieldInfo fieldInfo) {
return new FieldReader(classInfo, fieldInfo);
}
});
propertyReaderCache.get(classInfo).put(propertyName, propertyReader);
return propertyReader;
}
#location 6
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public PropertyReader getPropertyReader(final ClassInfo classInfo, String propertyName) {
if(!propertyReaderCache.containsKey(classInfo)) {
propertyReaderCache.put(classInfo, new HashMap<String, PropertyReader>());
}
if(propertyReaderCache.get(classInfo).containsKey(propertyName)) {
return propertyReaderCache.get(classInfo).get(propertyName);
}
MethodInfo getterInfo = classInfo.propertyGetter(propertyName);
PropertyReader propertyReader = determinePropertyAccessor(classInfo, propertyName, getterInfo, new AccessorFactory<PropertyReader>() {
@Override
public PropertyReader makeMethodAccessor(MethodInfo methodInfo) {
return new MethodReader(classInfo, methodInfo);
}
@Override
public PropertyReader makeFieldAccessor(FieldInfo fieldInfo) {
return new FieldReader(classInfo, fieldInfo);
}
});
propertyReaderCache.get(classInfo).put(propertyName, propertyReader);
return propertyReader;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void setup() {
// Might have to manually load a checkpoint.
// In that case, the input splits are not set, they will be faked by
// the checkpoint files. Each checkpoint file will be an input split
// and the input split
superstepCounter = getContext().getCounter(
"Giraph Stats", "Superstep");
vertexCounter = getContext().getCounter(
"Giraph Stats", "Aggregate vertices");
finishedVertexCounter = getContext().getCounter(
"Giraph Stats", "Aggregate finished vertices");
edgeCounter = getContext().getCounter(
"Giraph Stats", "Aggregate edges");
sentMessagesCounter = getContext().getCounter(
"Giraph Stats", "Sent messages");
currentWorkersCounter = getContext().getCounter(
"Giraph Stats", "Current workers");
if (getRestartedSuperstep() == -1) {
return;
}
else if (getRestartedSuperstep() < -1) {
LOG.fatal("setup: Impossible to restart superstep " +
getRestartedSuperstep());
setJobState(BspService.State.FAILED, -1, -1);
} else {
superstepCounter.increment(getRestartedSuperstep());
}
}
#location 24
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public void setup() {
// Might have to manually load a checkpoint.
// In that case, the input splits are not set, they will be faked by
// the checkpoint files. Each checkpoint file will be an input split
// and the input split
superstepCounter = getContext().getCounter(
"Giraph Stats", "Superstep");
vertexCounter = getContext().getCounter(
"Giraph Stats", "Aggregate vertices");
finishedVertexCounter = getContext().getCounter(
"Giraph Stats", "Aggregate finished vertices");
edgeCounter = getContext().getCounter(
"Giraph Stats", "Aggregate edges");
sentMessagesCounter = getContext().getCounter(
"Giraph Stats", "Sent messages");
currentWorkersCounter = getContext().getCounter(
"Giraph Stats", "Current workers");
if (getRestartedSuperstep() != UNSET_SUPERSTEP) {
superstepCounter.increment(getRestartedSuperstep());
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
protected boolean processEvent(WatchedEvent event) {
boolean foundEvent = false;
if (event.getPath().startsWith(MASTER_JOB_STATE_PATH) &&
(event.getType() == EventType.NodeChildrenChanged)) {
if (LOG.isInfoEnabled()) {
LOG.info("processEvent: Job state changed, checking " +
"to see if it needs to restart");
}
JSONObject jsonObj = getJobState();
try {
if ((State.valueOf(jsonObj.getString(JSONOBJ_STATE_KEY)) ==
State.START_SUPERSTEP) &&
jsonObj.getLong(JSONOBJ_APPLICATION_ATTEMPT_KEY) !=
getApplicationAttempt() &&
getSuperstep() > 0) {
LOG.fatal("processEvent: Worker will restart " +
"from command - " + jsonObj.toString());
System.exit(-1);
}
} catch (JSONException e) {
throw new RuntimeException(
"processEvent: Couldn't properly get job state from " +
jsonObj.toString());
}
return true;
}
return foundEvent;
}
#location 15
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
protected boolean processEvent(WatchedEvent event) {
boolean foundEvent = false;
if (event.getPath().startsWith(MASTER_JOB_STATE_PATH) &&
(event.getType() == EventType.NodeChildrenChanged)) {
if (LOG.isInfoEnabled()) {
LOG.info("processEvent: Job state changed, checking " +
"to see if it needs to restart");
}
JSONObject jsonObj = getJobState();
try {
if ((ApplicationState.valueOf(jsonObj.getString(JSONOBJ_STATE_KEY)) ==
ApplicationState.START_SUPERSTEP) &&
jsonObj.getLong(JSONOBJ_APPLICATION_ATTEMPT_KEY) !=
getApplicationAttempt()) {
LOG.fatal("processEvent: Worker will restart " +
"from command - " + jsonObj.toString());
System.exit(-1);
}
} catch (JSONException e) {
throw new RuntimeException(
"processEvent: Couldn't properly get job state from " +
jsonObj.toString());
}
return true;
}
return foundEvent;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void onlineZooKeeperServers() {
Integer taskId = zkServerPortMap.get(myHostname);
if ((taskId != null) && (taskId.intValue() == taskPartition)) {
File zkDirFile = new File(this.zkDir);
try {
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperServers: Trying to delete old " +
"directory " + this.zkDir);
}
FileUtils.deleteDirectory(zkDirFile);
} catch (IOException e) {
LOG.warn("onlineZooKeeperServers: Failed to delete " +
"directory " + this.zkDir);
}
generateZooKeeperConfigFile(
new ArrayList<String>(zkServerPortMap.keySet()));
ProcessBuilder processBuilder = new ProcessBuilder();
List<String> commandList = new ArrayList<String>();
String javaHome = System.getProperty("java.home");
if (javaHome == null) {
throw new IllegalArgumentException(
"onlineZooKeeperServers: java.home is not set!");
}
commandList.add(javaHome + "/bin/java");
commandList.add(conf.get(GiraphJob.ZOOKEEPER_JAVA_OPTS,
GiraphJob.ZOOKEEPER_JAVA_OPTS_DEFAULT));
commandList.add("-cp");
Path fullJarPath = new Path(conf.get(GiraphJob.ZOOKEEPER_JAR));
commandList.add(fullJarPath.toString());
commandList.add(QuorumPeerMain.class.getName());
commandList.add(configFilePath);
processBuilder.command(commandList);
File execDirectory = new File(zkDir);
processBuilder.directory(execDirectory);
processBuilder.redirectErrorStream(true);
LOG.info("onlineZooKeeperServers: Attempting to start ZooKeeper " +
"server with command " + commandList);
try {
synchronized (this) {
zkProcess = processBuilder.start();
}
zkProcessCollector =
new StreamCollector(zkProcess.getInputStream());
zkProcessCollector.start();
Runnable runnable = new Runnable() {
public void run() {
synchronized (this) {
if (zkProcess != null) {
LOG.warn("onlineZooKeeperServers: "+
"Forced a shutdown hook kill of the " +
"ZooKeeper process.");
zkProcess.destroy();
}
}
}
};
Runtime.getRuntime().addShutdownHook(new Thread(runnable));
} catch (IOException e) {
LOG.error("onlineZooKeeperServers: Failed to start " +
"ZooKeeper process");
throw new RuntimeException(e);
}
/*
* Once the server is up and running, notify that this server is up
* and running by dropping a ready stamp.
*/
int connectAttempts = 0;
while (connectAttempts < 5) {
try {
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperServers: Connect attempt " +
connectAttempts + " trying to connect to " +
myHostname + ":" + zkBasePort +
" with poll msecs = " + pollMsecs);
}
InetSocketAddress zkServerAddress =
new InetSocketAddress(myHostname, zkBasePort);
Socket testServerSock = new Socket();
testServerSock.connect(zkServerAddress, 5000);
LOG.info("onlineZooKeeperServers: Connected!");
break;
} catch (SocketTimeoutException e) {
LOG.warn("onlineZooKeeperServers: Got " +
"SocketTimeoutException", e);
} catch (ConnectException e) {
LOG.warn("onlineZooKeeperServers: Got " +
"ConnectException", e);
} catch (IOException e) {
LOG.warn("onlineZooKeeperServers: Got " +
"IOException", e);
}
++connectAttempts;
try {
Thread.sleep(pollMsecs);
} catch (InterruptedException e) {
LOG.warn("onlineZooKeeperServers: Sleep of " + pollMsecs +
" interrupted - " + e.getMessage());
}
}
if (connectAttempts == 5) {
throw new IllegalStateException(
"onlineZooKeeperServers: Failed to connect in 5 tries!");
}
Path myReadyPath = new Path(
serverDirectory, myHostname +
HOSTNAME_TASK_SEPARATOR + taskPartition);
try {
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperServers: Creating my filestamp " +
myReadyPath);
}
fs.createNewFile(myReadyPath);
} catch (IOException e) {
LOG.error("onlineZooKeeperServers: Failed (maybe previous " +
"task failed) to create filestamp " + myReadyPath);
}
}
else {
List<String> foundList = new ArrayList<String>();
int readyRetrievalAttempt = 0;
while (true) {
try {
FileStatus [] fileStatusArray =
fs.listStatus(serverDirectory);
foundList.clear();
if ((fileStatusArray != null) &&
(fileStatusArray.length > 0)) {
for (int i = 0; i < fileStatusArray.length; ++i) {
String[] hostnameTaskArray =
fileStatusArray[i].getPath().getName().split(
HOSTNAME_TASK_SEPARATOR);
if (hostnameTaskArray.length != 2) {
throw new RuntimeException(
"getZooKeeperServerList: Task 0 failed " +
"to parse " +
fileStatusArray[i].getPath().getName());
}
foundList.add(hostnameTaskArray[0]);
}
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperServers: Got " +
foundList + " " +
foundList.size() + " hosts from " +
fileStatusArray.length +
" ready servers when " +
serverCount +
" required (polling period is " +
pollMsecs + ") on attempt " +
readyRetrievalAttempt);
}
if (foundList.containsAll(zkServerPortMap.keySet())) {
break;
}
} else {
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperSErvers: Empty " +
"directory " + serverDirectory +
", waiting " + pollMsecs + " msecs.");
}
}
Thread.sleep(pollMsecs);
++readyRetrievalAttempt;
} catch (IOException e) {
throw new RuntimeException(e);
} catch (InterruptedException e) {
LOG.warn("onlineZooKeeperServers: Strange interrupt from " +
e.getMessage());
}
}
}
}
#location 43
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public void onlineZooKeeperServers() {
Integer taskId = zkServerPortMap.get(myHostname);
if ((taskId != null) && (taskId.intValue() == taskPartition)) {
File zkDirFile = new File(this.zkDir);
try {
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperServers: Trying to delete old " +
"directory " + this.zkDir);
}
FileUtils.deleteDirectory(zkDirFile);
} catch (IOException e) {
LOG.warn("onlineZooKeeperServers: Failed to delete " +
"directory " + this.zkDir);
}
generateZooKeeperConfigFile(
new ArrayList<String>(zkServerPortMap.keySet()));
ProcessBuilder processBuilder = new ProcessBuilder();
List<String> commandList = new ArrayList<String>();
String javaHome = System.getProperty("java.home");
if (javaHome == null) {
throw new IllegalArgumentException(
"onlineZooKeeperServers: java.home is not set!");
}
commandList.add(javaHome + "/bin/java");
commandList.add(conf.get(GiraphJob.ZOOKEEPER_JAVA_OPTS,
GiraphJob.ZOOKEEPER_JAVA_OPTS_DEFAULT));
commandList.add("-cp");
Path fullJarPath = new Path(conf.get(GiraphJob.ZOOKEEPER_JAR));
commandList.add(fullJarPath.toString());
commandList.add(QuorumPeerMain.class.getName());
commandList.add(configFilePath);
processBuilder.command(commandList);
File execDirectory = new File(zkDir);
processBuilder.directory(execDirectory);
processBuilder.redirectErrorStream(true);
LOG.info("onlineZooKeeperServers: Attempting to start ZooKeeper " +
"server with command " + commandList);
try {
synchronized (this) {
zkProcess = processBuilder.start();
zkProcessCollector =
new StreamCollector(zkProcess.getInputStream());
zkProcessCollector.start();
}
Runnable runnable = new Runnable() {
public void run() {
synchronized (this) {
if (zkProcess != null) {
LOG.warn("onlineZooKeeperServers: "+
"Forced a shutdown hook kill of the " +
"ZooKeeper process.");
zkProcess.destroy();
}
}
}
};
Runtime.getRuntime().addShutdownHook(new Thread(runnable));
} catch (IOException e) {
LOG.error("onlineZooKeeperServers: Failed to start " +
"ZooKeeper process");
throw new RuntimeException(e);
}
/*
* Once the server is up and running, notify that this server is up
* and running by dropping a ready stamp.
*/
int connectAttempts = 0;
while (connectAttempts < 5) {
try {
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperServers: Connect attempt " +
connectAttempts + " trying to connect to " +
myHostname + ":" + zkBasePort +
" with poll msecs = " + pollMsecs);
}
InetSocketAddress zkServerAddress =
new InetSocketAddress(myHostname, zkBasePort);
Socket testServerSock = new Socket();
testServerSock.connect(zkServerAddress, 5000);
LOG.info("onlineZooKeeperServers: Connected!");
break;
} catch (SocketTimeoutException e) {
LOG.warn("onlineZooKeeperServers: Got " +
"SocketTimeoutException", e);
} catch (ConnectException e) {
LOG.warn("onlineZooKeeperServers: Got " +
"ConnectException", e);
} catch (IOException e) {
LOG.warn("onlineZooKeeperServers: Got " +
"IOException", e);
}
++connectAttempts;
try {
Thread.sleep(pollMsecs);
} catch (InterruptedException e) {
LOG.warn("onlineZooKeeperServers: Sleep of " + pollMsecs +
" interrupted - " + e.getMessage());
}
}
if (connectAttempts == 5) {
throw new IllegalStateException(
"onlineZooKeeperServers: Failed to connect in 5 tries!");
}
Path myReadyPath = new Path(
serverDirectory, myHostname +
HOSTNAME_TASK_SEPARATOR + taskPartition);
try {
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperServers: Creating my filestamp " +
myReadyPath);
}
fs.createNewFile(myReadyPath);
} catch (IOException e) {
LOG.error("onlineZooKeeperServers: Failed (maybe previous " +
"task failed) to create filestamp " + myReadyPath);
}
}
else {
List<String> foundList = new ArrayList<String>();
int readyRetrievalAttempt = 0;
while (true) {
try {
FileStatus [] fileStatusArray =
fs.listStatus(serverDirectory);
foundList.clear();
if ((fileStatusArray != null) &&
(fileStatusArray.length > 0)) {
for (int i = 0; i < fileStatusArray.length; ++i) {
String[] hostnameTaskArray =
fileStatusArray[i].getPath().getName().split(
HOSTNAME_TASK_SEPARATOR);
if (hostnameTaskArray.length != 2) {
throw new RuntimeException(
"getZooKeeperServerList: Task 0 failed " +
"to parse " +
fileStatusArray[i].getPath().getName());
}
foundList.add(hostnameTaskArray[0]);
}
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperServers: Got " +
foundList + " " +
foundList.size() + " hosts from " +
fileStatusArray.length +
" ready servers when " +
serverCount +
" required (polling period is " +
pollMsecs + ") on attempt " +
readyRetrievalAttempt);
}
if (foundList.containsAll(zkServerPortMap.keySet())) {
break;
}
} else {
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperSErvers: Empty " +
"directory " + serverDirectory +
", waiting " + pollMsecs + " msecs.");
}
}
Thread.sleep(pollMsecs);
++readyRetrievalAttempt;
} catch (IOException e) {
throw new RuntimeException(e);
} catch (InterruptedException e) {
LOG.warn("onlineZooKeeperServers: Strange interrupt from " +
e.getMessage());
}
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public SuperstepState coordinateSuperstep() throws
KeeperException, InterruptedException {
// 1. Get chosen workers and set up watches on them.
// 2. Assign partitions to the workers
// or possibly reload from a superstep
// 3. Wait for all workers to complete
// 4. Collect and process aggregators
// 5. Create superstep finished node
// 6. If the checkpoint frequency is met, finalize the checkpoint
Map<String, JSONArray> chosenWorkerHostnamePortMap = checkWorkers();
if (chosenWorkerHostnamePortMap == null) {
LOG.fatal("coordinateSuperstep: Not enough healthy workers for " +
"superstep " + getSuperstep());
setJobState(State.FAILED, -1, -1);
} else {
for (Entry<String, JSONArray> entry :
chosenWorkerHostnamePortMap.entrySet()) {
String workerHealthyPath =
getWorkerHealthyPath(getApplicationAttempt(),
getSuperstep()) + "/" + entry.getKey();
if (getZkExt().exists(workerHealthyPath, true) == null) {
LOG.warn("coordinateSuperstep: Chosen worker " +
workerHealthyPath +
" is no longer valid, failing superstep");
}
}
}
currentWorkersCounter.increment(chosenWorkerHostnamePortMap.size() -
currentWorkersCounter.getValue());
if (getRestartedSuperstep() == getSuperstep()) {
try {
if (LOG.isInfoEnabled()) {
LOG.info("coordinateSuperstep: Reloading from superstep " +
getSuperstep());
}
mapFilesToWorkers(
getRestartedSuperstep(),
new ArrayList<String>(
chosenWorkerHostnamePortMap.keySet()));
inputSplitsToVertexRanges(chosenWorkerHostnamePortMap);
} catch (IOException e) {
throw new IllegalStateException(
"coordinateSuperstep: Failed to reload", e);
}
} else {
if (getSuperstep() > 0) {
VertexRangeBalancer<I, V, E, M> vertexRangeBalancer =
BspUtils.<I, V, E, M>createVertexRangeBalancer(
getConfiguration());
synchronized (vertexRangeSynchronization) {
balanceVertexRanges(vertexRangeBalancer,
chosenWorkerHostnamePortMap);
}
}
}
String finishedWorkerPath =
getWorkerFinishedPath(getApplicationAttempt(), getSuperstep());
try {
getZkExt().createExt(finishedWorkerPath,
null,
Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT,
true);
} catch (KeeperException.NodeExistsException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("coordinateSuperstep: finishedWorkers " +
finishedWorkerPath +
" already exists, no need to create");
}
}
String workerHealthyPath =
getWorkerHealthyPath(getApplicationAttempt(), getSuperstep());
List<String> finishedWorkerList = null;
long nextInfoMillis = System.currentTimeMillis();
while (true) {
try {
finishedWorkerList =
getZkExt().getChildrenExt(finishedWorkerPath,
true,
false,
false);
} catch (KeeperException e) {
throw new IllegalStateException(
"coordinateSuperstep: Couldn't get children of " +
finishedWorkerPath, e);
}
if (LOG.isDebugEnabled()) {
LOG.debug("coordinateSuperstep: Got finished worker list = " +
finishedWorkerList + ", size = " +
finishedWorkerList.size() +
", chosen worker list = " +
chosenWorkerHostnamePortMap.keySet() + ", size = " +
chosenWorkerHostnamePortMap.size() +
" from " + finishedWorkerPath);
}
if (LOG.isInfoEnabled() &&
(System.currentTimeMillis() > nextInfoMillis)) {
nextInfoMillis = System.currentTimeMillis() + 30000;
LOG.info("coordinateSuperstep: " + finishedWorkerList.size() +
" out of " +
chosenWorkerHostnamePortMap.size() +
" chosen workers finished on superstep " +
getSuperstep());
}
getContext().setStatus(getGraphMapper().getMapFunctions() + " " +
finishedWorkerList.size() +
" finished out of " +
chosenWorkerHostnamePortMap.size() +
" on superstep " + getSuperstep());
if (finishedWorkerList.containsAll(
chosenWorkerHostnamePortMap.keySet())) {
break;
}
getSuperstepStateChangedEvent().waitForever();
getSuperstepStateChangedEvent().reset();
// Did a worker die?
if ((getSuperstep() > 0) &&
!superstepChosenWorkerAlive(
workerHealthyPath,
chosenWorkerHostnamePortMap.keySet())) {
return SuperstepState.WORKER_FAILURE;
}
}
collectAndProcessAggregatorValues(getSuperstep());
JSONObject globalInfoObject = aggregateWorkerStats(getSuperstep());
// Convert the input split stats to vertex ranges in superstep 0
if (getSuperstep() == 0) {
inputSplitsToVertexRanges(chosenWorkerHostnamePortMap);
}
// Let everyone know the aggregated application state through the
// superstep
String superstepFinishedNode =
getSuperstepFinishedPath(getApplicationAttempt(), getSuperstep());
try {
getZkExt().createExt(superstepFinishedNode,
globalInfoObject.toString().getBytes(),
Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT,
true);
vertexCounter.increment(
globalInfoObject.getLong(JSONOBJ_NUM_VERTICES_KEY) -
vertexCounter.getValue());
finishedVertexCounter.increment(
globalInfoObject.getLong(JSONOBJ_FINISHED_VERTICES_KEY) -
finishedVertexCounter.getValue());
edgeCounter.increment(
globalInfoObject.getLong(JSONOBJ_NUM_EDGES_KEY) -
edgeCounter.getValue());
sentMessagesCounter.increment(
globalInfoObject.getLong(JSONOBJ_NUM_MESSAGES_KEY) -
sentMessagesCounter.getValue());
} catch (JSONException e) {
throw new IllegalStateException("coordinateSuperstep: " +
"JSONException", e);
}
// Finalize the valid checkpoint file prefixes and possibly
// the aggregators.
if (checkpointFrequencyMet(getSuperstep())) {
try {
finalizeCheckpoint(
getSuperstep(),
new ArrayList<String>(chosenWorkerHostnamePortMap.keySet()));
} catch (IOException e) {
throw new IllegalStateException(
"coordinateSuperstep: IOException on finalizing checkpoint",
e);
}
}
// Clean up the old supersteps (always keep this one)
long removeableSuperstep = getSuperstep() - 1;
if ((getConfiguration().getBoolean(
GiraphJob.KEEP_ZOOKEEPER_DATA,
GiraphJob.KEEP_ZOOKEEPER_DATA_DEFAULT) == false) &&
(removeableSuperstep >= 0)) {
String oldSuperstepPath =
getSuperstepPath(getApplicationAttempt()) + "/" +
(removeableSuperstep);
try {
if (LOG.isInfoEnabled()) {
LOG.info("coordinateSuperstep: Cleaning up old Superstep " +
oldSuperstepPath);
}
getZkExt().deleteExt(oldSuperstepPath,
-1,
true);
} catch (KeeperException.NoNodeException e) {
LOG.warn("coordinateBarrier: Already cleaned up " +
oldSuperstepPath);
} catch (KeeperException e) {
throw new IllegalStateException(
"coordinateSuperstep: KeeperException on " +
"finalizing checkpoint", e);
}
}
incrCachedSuperstep();
superstepCounter.increment(1);
try {
if ((globalInfoObject.getLong(JSONOBJ_FINISHED_VERTICES_KEY) ==
globalInfoObject.getLong(JSONOBJ_NUM_VERTICES_KEY)) &&
(globalInfoObject.getLong(JSONOBJ_NUM_MESSAGES_KEY)) == 0) {
return SuperstepState.ALL_SUPERSTEPS_DONE;
} else {
return SuperstepState.THIS_SUPERSTEP_DONE;
}
} catch (JSONException e) {
throw new IllegalStateException(
"coordinateSuperstep: JSONException on checking if " +
"the application is done", e);
}
}
#location 42
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
public SuperstepState coordinateSuperstep() throws
KeeperException, InterruptedException {
// 1. Get chosen workers and set up watches on them.
// 2. Assign partitions to the workers
// or possibly reload from a superstep
// 3. Wait for all workers to complete
// 4. Collect and process aggregators
// 5. Create superstep finished node
// 6. If the checkpoint frequency is met, finalize the checkpoint
Map<String, JSONArray> chosenWorkerHostnamePortMap = checkWorkers();
if (chosenWorkerHostnamePortMap == null) {
LOG.fatal("coordinateSuperstep: Not enough healthy workers for " +
"superstep " + getSuperstep());
setJobState(ApplicationState.FAILED, -1, -1);
} else {
for (Entry<String, JSONArray> entry :
chosenWorkerHostnamePortMap.entrySet()) {
String workerHealthyPath =
getWorkerHealthyPath(getApplicationAttempt(),
getSuperstep()) + "/" + entry.getKey();
if (getZkExt().exists(workerHealthyPath, true) == null) {
LOG.warn("coordinateSuperstep: Chosen worker " +
workerHealthyPath +
" is no longer valid, failing superstep");
}
}
}
currentWorkersCounter.increment(chosenWorkerHostnamePortMap.size() -
currentWorkersCounter.getValue());
if (getRestartedSuperstep() == getSuperstep()) {
try {
if (LOG.isInfoEnabled()) {
LOG.info("coordinateSuperstep: Reloading from superstep " +
getSuperstep());
}
mapFilesToWorkers(
getRestartedSuperstep(),
new ArrayList<String>(
chosenWorkerHostnamePortMap.keySet()));
inputSplitsToVertexRanges(chosenWorkerHostnamePortMap);
} catch (IOException e) {
throw new IllegalStateException(
"coordinateSuperstep: Failed to reload", e);
}
} else {
if (getSuperstep() > INPUT_SUPERSTEP) {
VertexRangeBalancer<I, V, E, M> vertexRangeBalancer =
BspUtils.<I, V, E, M>createVertexRangeBalancer(
getConfiguration());
synchronized (vertexRangeSynchronization) {
balanceVertexRanges(vertexRangeBalancer,
chosenWorkerHostnamePortMap);
}
}
}
String finishedWorkerPath =
getWorkerFinishedPath(getApplicationAttempt(), getSuperstep());
try {
getZkExt().createExt(finishedWorkerPath,
null,
Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT,
true);
} catch (KeeperException.NodeExistsException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("coordinateSuperstep: finishedWorkers " +
finishedWorkerPath +
" already exists, no need to create");
}
}
String workerHealthyPath =
getWorkerHealthyPath(getApplicationAttempt(), getSuperstep());
List<String> finishedWorkerList = null;
long nextInfoMillis = System.currentTimeMillis();
while (true) {
try {
finishedWorkerList =
getZkExt().getChildrenExt(finishedWorkerPath,
true,
false,
false);
} catch (KeeperException e) {
throw new IllegalStateException(
"coordinateSuperstep: Couldn't get children of " +
finishedWorkerPath, e);
}
if (LOG.isDebugEnabled()) {
LOG.debug("coordinateSuperstep: Got finished worker list = " +
finishedWorkerList + ", size = " +
finishedWorkerList.size() +
", chosen worker list = " +
chosenWorkerHostnamePortMap.keySet() + ", size = " +
chosenWorkerHostnamePortMap.size() +
" from " + finishedWorkerPath);
}
if (LOG.isInfoEnabled() &&
(System.currentTimeMillis() > nextInfoMillis)) {
nextInfoMillis = System.currentTimeMillis() + 30000;
LOG.info("coordinateSuperstep: " + finishedWorkerList.size() +
" out of " +
chosenWorkerHostnamePortMap.size() +
" chosen workers finished on superstep " +
getSuperstep());
}
getContext().setStatus(getGraphMapper().getMapFunctions() + " - " +
finishedWorkerList.size() +
" finished out of " +
chosenWorkerHostnamePortMap.size() +
" on superstep " + getSuperstep());
if (finishedWorkerList.containsAll(
chosenWorkerHostnamePortMap.keySet())) {
break;
}
getSuperstepStateChangedEvent().waitForever();
getSuperstepStateChangedEvent().reset();
// Did a worker die?
if ((getSuperstep() > 0) &&
!superstepChosenWorkerAlive(
workerHealthyPath,
chosenWorkerHostnamePortMap.keySet())) {
return SuperstepState.WORKER_FAILURE;
}
}
collectAndProcessAggregatorValues(getSuperstep());
JSONObject globalInfoObject = aggregateWorkerStats(getSuperstep());
// Convert the input split stats to vertex ranges in INPUT_SUPERSTEP
if (getSuperstep() == INPUT_SUPERSTEP) {
inputSplitsToVertexRanges(chosenWorkerHostnamePortMap);
}
// Let everyone know the aggregated application state through the
// superstep
String superstepFinishedNode =
getSuperstepFinishedPath(getApplicationAttempt(), getSuperstep());
try {
getZkExt().createExt(superstepFinishedNode,
globalInfoObject.toString().getBytes(),
Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT,
true);
vertexCounter.increment(
globalInfoObject.getLong(JSONOBJ_NUM_VERTICES_KEY) -
vertexCounter.getValue());
finishedVertexCounter.increment(
globalInfoObject.getLong(JSONOBJ_FINISHED_VERTICES_KEY) -
finishedVertexCounter.getValue());
edgeCounter.increment(
globalInfoObject.getLong(JSONOBJ_NUM_EDGES_KEY) -
edgeCounter.getValue());
sentMessagesCounter.increment(
globalInfoObject.getLong(JSONOBJ_NUM_MESSAGES_KEY) -
sentMessagesCounter.getValue());
} catch (JSONException e) {
throw new IllegalStateException("coordinateSuperstep: " +
"JSONException", e);
}
// Finalize the valid checkpoint file prefixes and possibly
// the aggregators.
if (checkpointFrequencyMet(getSuperstep())) {
try {
finalizeCheckpoint(
getSuperstep(),
new ArrayList<String>(chosenWorkerHostnamePortMap.keySet()));
} catch (IOException e) {
throw new IllegalStateException(
"coordinateSuperstep: IOException on finalizing checkpoint",
e);
}
}
// Clean up the old supersteps (always keep this one)
long removeableSuperstep = getSuperstep() - 1;
if ((getConfiguration().getBoolean(
GiraphJob.KEEP_ZOOKEEPER_DATA,
GiraphJob.KEEP_ZOOKEEPER_DATA_DEFAULT) == false) &&
(removeableSuperstep >= 0)) {
String oldSuperstepPath =
getSuperstepPath(getApplicationAttempt()) + "/" +
(removeableSuperstep);
try {
if (LOG.isInfoEnabled()) {
LOG.info("coordinateSuperstep: Cleaning up old Superstep " +
oldSuperstepPath);
}
getZkExt().deleteExt(oldSuperstepPath,
-1,
true);
} catch (KeeperException.NoNodeException e) {
LOG.warn("coordinateBarrier: Already cleaned up " +
oldSuperstepPath);
} catch (KeeperException e) {
throw new IllegalStateException(
"coordinateSuperstep: KeeperException on " +
"finalizing checkpoint", e);
}
}
incrCachedSuperstep();
superstepCounter.increment(1);
try {
if ((globalInfoObject.getLong(JSONOBJ_FINISHED_VERTICES_KEY) ==
globalInfoObject.getLong(JSONOBJ_NUM_VERTICES_KEY)) &&
(globalInfoObject.getLong(JSONOBJ_NUM_MESSAGES_KEY)) == 0) {
return SuperstepState.ALL_SUPERSTEPS_DONE;
} else {
return SuperstepState.THIS_SUPERSTEP_DONE;
}
} catch (JSONException e) {
throw new IllegalStateException(
"coordinateSuperstep: JSONException on checking if " +
"the application is done", e);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void onlineZooKeeperServers() {
Integer taskId = zkServerPortMap.get(myHostname);
if ((taskId != null) && (taskId.intValue() == taskPartition)) {
File zkDirFile = new File(this.zkDir);
try {
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperServers: Trying to delete old " +
"directory " + this.zkDir);
}
FileUtils.deleteDirectory(zkDirFile);
} catch (IOException e) {
LOG.warn("onlineZooKeeperServers: Failed to delete " +
"directory " + this.zkDir);
}
generateZooKeeperConfigFile(
new ArrayList<String>(zkServerPortMap.keySet()));
ProcessBuilder processBuilder = new ProcessBuilder();
List<String> commandList = new ArrayList<String>();
String javaHome = System.getProperty("java.home");
if (javaHome == null) {
throw new IllegalArgumentException(
"onlineZooKeeperServers: java.home is not set!");
}
commandList.add(javaHome + "/bin/java");
commandList.add(conf.get(GiraphJob.ZOOKEEPER_JAVA_OPTS,
GiraphJob.ZOOKEEPER_JAVA_OPTS_DEFAULT));
commandList.add("-cp");
Path fullJarPath = new Path(conf.get(GiraphJob.ZOOKEEPER_JAR));
commandList.add(fullJarPath.toString());
commandList.add(QuorumPeerMain.class.getName());
commandList.add(configFilePath);
processBuilder.command(commandList);
File execDirectory = new File(zkDir);
processBuilder.directory(execDirectory);
processBuilder.redirectErrorStream(true);
LOG.info("onlineZooKeeperServers: Attempting to start ZooKeeper " +
"server with command " + commandList);
try {
synchronized (this) {
zkProcess = processBuilder.start();
}
zkProcessCollector =
new StreamCollector(zkProcess.getInputStream());
zkProcessCollector.start();
Runnable runnable = new Runnable() {
public void run() {
synchronized (this) {
if (zkProcess != null) {
LOG.warn("onlineZooKeeperServers: "+
"Forced a shutdown hook kill of the " +
"ZooKeeper process.");
zkProcess.destroy();
}
}
}
};
Runtime.getRuntime().addShutdownHook(new Thread(runnable));
} catch (IOException e) {
LOG.error("onlineZooKeeperServers: Failed to start " +
"ZooKeeper process");
throw new RuntimeException(e);
}
/*
* Once the server is up and running, notify that this server is up
* and running by dropping a ready stamp.
*/
int connectAttempts = 0;
while (connectAttempts < 5) {
try {
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperServers: Connect attempt " +
connectAttempts + " trying to connect to " +
myHostname + ":" + zkBasePort +
" with poll msecs = " + pollMsecs);
}
InetSocketAddress zkServerAddress =
new InetSocketAddress(myHostname, zkBasePort);
Socket testServerSock = new Socket();
testServerSock.connect(zkServerAddress, 5000);
LOG.info("onlineZooKeeperServers: Connected!");
break;
} catch (SocketTimeoutException e) {
LOG.warn("onlineZooKeeperServers: Got " +
"SocketTimeoutException", e);
} catch (ConnectException e) {
LOG.warn("onlineZooKeeperServers: Got " +
"ConnectException", e);
} catch (IOException e) {
LOG.warn("onlineZooKeeperServers: Got " +
"IOException", e);
}
++connectAttempts;
try {
Thread.sleep(pollMsecs);
} catch (InterruptedException e) {
LOG.warn("onlineZooKeeperServers: Sleep of " + pollMsecs +
" interrupted - " + e.getMessage());
}
}
if (connectAttempts == 5) {
throw new IllegalStateException(
"onlineZooKeeperServers: Failed to connect in 5 tries!");
}
Path myReadyPath = new Path(
serverDirectory, myHostname +
HOSTNAME_TASK_SEPARATOR + taskPartition);
try {
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperServers: Creating my filestamp " +
myReadyPath);
}
fs.createNewFile(myReadyPath);
} catch (IOException e) {
LOG.error("onlineZooKeeperServers: Failed (maybe previous " +
"task failed) to create filestamp " + myReadyPath);
}
}
else {
List<String> foundList = new ArrayList<String>();
int readyRetrievalAttempt = 0;
while (true) {
try {
FileStatus [] fileStatusArray =
fs.listStatus(serverDirectory);
foundList.clear();
if ((fileStatusArray != null) &&
(fileStatusArray.length > 0)) {
for (int i = 0; i < fileStatusArray.length; ++i) {
String[] hostnameTaskArray =
fileStatusArray[i].getPath().getName().split(
HOSTNAME_TASK_SEPARATOR);
if (hostnameTaskArray.length != 2) {
throw new RuntimeException(
"getZooKeeperServerList: Task 0 failed " +
"to parse " +
fileStatusArray[i].getPath().getName());
}
foundList.add(hostnameTaskArray[0]);
}
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperServers: Got " +
foundList + " " +
foundList.size() + " hosts from " +
fileStatusArray.length +
" ready servers when " +
serverCount +
" required (polling period is " +
pollMsecs + ") on attempt " +
readyRetrievalAttempt);
}
if (foundList.containsAll(zkServerPortMap.keySet())) {
break;
}
} else {
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperSErvers: Empty " +
"directory " + serverDirectory +
", waiting " + pollMsecs + " msecs.");
}
}
Thread.sleep(pollMsecs);
++readyRetrievalAttempt;
} catch (IOException e) {
throw new RuntimeException(e);
} catch (InterruptedException e) {
LOG.warn("onlineZooKeeperServers: Strange interrupt from " +
e.getMessage());
}
}
}
}
#location 44
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public void onlineZooKeeperServers() {
Integer taskId = zkServerPortMap.get(myHostname);
if ((taskId != null) && (taskId.intValue() == taskPartition)) {
File zkDirFile = new File(this.zkDir);
try {
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperServers: Trying to delete old " +
"directory " + this.zkDir);
}
FileUtils.deleteDirectory(zkDirFile);
} catch (IOException e) {
LOG.warn("onlineZooKeeperServers: Failed to delete " +
"directory " + this.zkDir);
}
generateZooKeeperConfigFile(
new ArrayList<String>(zkServerPortMap.keySet()));
ProcessBuilder processBuilder = new ProcessBuilder();
List<String> commandList = new ArrayList<String>();
String javaHome = System.getProperty("java.home");
if (javaHome == null) {
throw new IllegalArgumentException(
"onlineZooKeeperServers: java.home is not set!");
}
commandList.add(javaHome + "/bin/java");
commandList.add(conf.get(GiraphJob.ZOOKEEPER_JAVA_OPTS,
GiraphJob.ZOOKEEPER_JAVA_OPTS_DEFAULT));
commandList.add("-cp");
Path fullJarPath = new Path(conf.get(GiraphJob.ZOOKEEPER_JAR));
commandList.add(fullJarPath.toString());
commandList.add(QuorumPeerMain.class.getName());
commandList.add(configFilePath);
processBuilder.command(commandList);
File execDirectory = new File(zkDir);
processBuilder.directory(execDirectory);
processBuilder.redirectErrorStream(true);
LOG.info("onlineZooKeeperServers: Attempting to start ZooKeeper " +
"server with command " + commandList);
try {
synchronized (this) {
zkProcess = processBuilder.start();
zkProcessCollector =
new StreamCollector(zkProcess.getInputStream());
zkProcessCollector.start();
}
Runnable runnable = new Runnable() {
public void run() {
synchronized (this) {
if (zkProcess != null) {
LOG.warn("onlineZooKeeperServers: "+
"Forced a shutdown hook kill of the " +
"ZooKeeper process.");
zkProcess.destroy();
}
}
}
};
Runtime.getRuntime().addShutdownHook(new Thread(runnable));
} catch (IOException e) {
LOG.error("onlineZooKeeperServers: Failed to start " +
"ZooKeeper process");
throw new RuntimeException(e);
}
/*
* Once the server is up and running, notify that this server is up
* and running by dropping a ready stamp.
*/
int connectAttempts = 0;
while (connectAttempts < 5) {
try {
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperServers: Connect attempt " +
connectAttempts + " trying to connect to " +
myHostname + ":" + zkBasePort +
" with poll msecs = " + pollMsecs);
}
InetSocketAddress zkServerAddress =
new InetSocketAddress(myHostname, zkBasePort);
Socket testServerSock = new Socket();
testServerSock.connect(zkServerAddress, 5000);
LOG.info("onlineZooKeeperServers: Connected!");
break;
} catch (SocketTimeoutException e) {
LOG.warn("onlineZooKeeperServers: Got " +
"SocketTimeoutException", e);
} catch (ConnectException e) {
LOG.warn("onlineZooKeeperServers: Got " +
"ConnectException", e);
} catch (IOException e) {
LOG.warn("onlineZooKeeperServers: Got " +
"IOException", e);
}
++connectAttempts;
try {
Thread.sleep(pollMsecs);
} catch (InterruptedException e) {
LOG.warn("onlineZooKeeperServers: Sleep of " + pollMsecs +
" interrupted - " + e.getMessage());
}
}
if (connectAttempts == 5) {
throw new IllegalStateException(
"onlineZooKeeperServers: Failed to connect in 5 tries!");
}
Path myReadyPath = new Path(
serverDirectory, myHostname +
HOSTNAME_TASK_SEPARATOR + taskPartition);
try {
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperServers: Creating my filestamp " +
myReadyPath);
}
fs.createNewFile(myReadyPath);
} catch (IOException e) {
LOG.error("onlineZooKeeperServers: Failed (maybe previous " +
"task failed) to create filestamp " + myReadyPath);
}
}
else {
List<String> foundList = new ArrayList<String>();
int readyRetrievalAttempt = 0;
while (true) {
try {
FileStatus [] fileStatusArray =
fs.listStatus(serverDirectory);
foundList.clear();
if ((fileStatusArray != null) &&
(fileStatusArray.length > 0)) {
for (int i = 0; i < fileStatusArray.length; ++i) {
String[] hostnameTaskArray =
fileStatusArray[i].getPath().getName().split(
HOSTNAME_TASK_SEPARATOR);
if (hostnameTaskArray.length != 2) {
throw new RuntimeException(
"getZooKeeperServerList: Task 0 failed " +
"to parse " +
fileStatusArray[i].getPath().getName());
}
foundList.add(hostnameTaskArray[0]);
}
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperServers: Got " +
foundList + " " +
foundList.size() + " hosts from " +
fileStatusArray.length +
" ready servers when " +
serverCount +
" required (polling period is " +
pollMsecs + ") on attempt " +
readyRetrievalAttempt);
}
if (foundList.containsAll(zkServerPortMap.keySet())) {
break;
}
} else {
if (LOG.isInfoEnabled()) {
LOG.info("onlineZooKeeperSErvers: Empty " +
"directory " + serverDirectory +
", waiting " + pollMsecs + " msecs.");
}
}
Thread.sleep(pollMsecs);
++readyRetrievalAttempt;
} catch (IOException e) {
throw new RuntimeException(e);
} catch (InterruptedException e) {
LOG.warn("onlineZooKeeperServers: Strange interrupt from " +
e.getMessage());
}
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private Map<String, Object> doUpdate(EntityManager entityManager, int ciTypeId, Map<String, Object> ci, boolean enableStateTransition) {
DynamicEntityMeta entityMeta = getDynamicEntityMetaMap().get(ciTypeId);
Map<String, Object> convertedCi = MultiValueFeildOperationUtils.convertMultiValueFieldsForCICreation(entityManager, ciTypeId, ci, (String) ci.get(CmdbConstants.DEFAULT_FIELD_GUID), ciTypeAttrRepository, this);
String guid = convertedCi.get(GUID).toString();
Object entityBean = validateCi(ciTypeId, guid, entityMeta, entityManager, ACTION_MODIFICATION);
DynamicEntityHolder entityHolder = new DynamicEntityHolder(entityMeta, entityBean);
ciDataInterceptorService.preUpdate(entityHolder, convertedCi);
Map<String, Object> updatedMap = null;
if (onlyIncludeRefreshableFields(ciTypeId, convertedCi.keySet()) || !enableStateTransition) {
entityHolder.update(convertedCi, CmdbThreadLocal.getIntance().getCurrentUser(), entityManager);
entityManager.merge(entityHolder.getEntityObj());
} else {
updatedMap = stateTransEngine.process(entityManager, ciTypeId, guid, StateOperation.Update.getCode(), convertedCi, entityHolder);
}
ciDataInterceptorService.postUpdate(entityHolder, entityManager, ci);
updatedMap = ClassUtils.convertBeanToMap(entityHolder.getEntityObj(), entityHolder.getEntityMeta(), false);
return updatedMap;
}
#location 5
#vulnerability type NULL_DEREFERENCE | #fixed code
private Map<String, Object> doUpdate(EntityManager entityManager, int ciTypeId, Map<String, Object> ci, boolean enableStateTransition) {
DynamicEntityMeta entityMeta = getDynamicEntityMetaMap().get(ciTypeId);
String guid = ci.get(GUID).toString();
Object entityBean = validateCi(ciTypeId, guid, entityMeta, entityManager, ACTION_MODIFICATION);
DynamicEntityHolder entityHolder = new DynamicEntityHolder(entityMeta, entityBean);
ciDataInterceptorService.preUpdate(entityHolder, ci);
Map<String, Object> convertedCi = MultiValueFeildOperationUtils.convertMultiValueFieldsForCICreation(entityManager, ciTypeId, ci, (String) ci.get(CmdbConstants.DEFAULT_FIELD_GUID), ciTypeAttrRepository, this);
Map<String, Object> updatedMap = null;
if (onlyIncludeRefreshableFields(ciTypeId, convertedCi.keySet()) || !enableStateTransition) {
entityHolder.update(convertedCi, CmdbThreadLocal.getIntance().getCurrentUser(), entityManager);
entityManager.merge(entityHolder.getEntityObj());
} else {
updatedMap = stateTransEngine.process(entityManager, ciTypeId, guid, StateOperation.Update.getCode(), convertedCi, entityHolder);
}
ciDataInterceptorService.postUpdate(entityHolder, entityManager, ci);
updatedMap = ClassUtils.convertBeanToMap(entityHolder.getEntityObj(), entityHolder.getEntityMeta(), false);
return updatedMap;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public Result<Integer> save(RcRole role, List<Integer> permissionIds) {
Result<Integer> result=new Result<>();
result.setStatus(false);
result.setCode(MsgCode.FAILED);
if (selectByRoleName(role.getName()) != null){
result.setMsg("角色名已存在");
return result;
}
if (selectByRoleValue(role.getValue()) != null){
result.setMsg("角色值已存在");
return result;
}
roleMapper.insert(role);
role = selectByRoleName(role.getName());
result = relationService.save(role.getId(),permissionIds);
return result;
}
#location 16
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public Result<Integer> save(RcRole role, List<Integer> permissionIds) {
Result<Integer> result=new Result<>();
result.setStatus(false);
result.setCode(MsgCode.FAILED);
if (selectByRoleName(role.getName()) != null){
result.setMsg("角色名已存在");
return result;
}
if (selectByRoleValue(role.getValue()) != null){
result.setMsg("角色值已存在");
return result;
}
roleMapper.insert(role);
role = selectByRoleName(role.getName());
return result;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
private void load(String name, List<String> col) throws IOException {
BufferedReader r = new BufferedReader(new InputStreamReader(getClass().getResourceAsStream(name),"US-ASCII"));
String line;
while ((line=r.readLine())!=null)
col.add(line);
r.close();
}
#location 7
#vulnerability type RESOURCE_LEAK | #fixed code
private void load(String name, List<String> col) throws IOException {
BufferedReader r = new BufferedReader(new InputStreamReader(getClass().getResourceAsStream(name),"US-ASCII"));
try {
String line;
while ((line=r.readLine())!=null)
col.add(line);
} finally {
r.close();
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void setPlatform(String platform) throws FileNotFoundException, IOException {
if(!platformPattern.matcher(platform).matches())
throw new IllegalArgumentException("Platform must match " + platformPattern.pattern());
File tempFile = new File(file.getAbsolutePath() + ".tmp");
BufferedReader br;
PrintWriter pw;
if(compressed) {
GZIPInputStream zipStream = new GZIPInputStream(new FileInputStream(file));
br = new BufferedReader(new InputStreamReader(zipStream));
GZIPOutputStream outStream = new GZIPOutputStream(new FileOutputStream(tempFile));
pw = new PrintWriter(outStream);
} else {
br = new BufferedReader(new InputStreamReader(new FileInputStream(file)));
pw = new PrintWriter(new FileWriter(tempFile));
}
String line = null;
boolean written = false;
while ((line = br.readLine()) != null) {
/* If line is found overwrite it */
if (!written && line.startsWith(("PLATFORM"))) {
pw.println("PLATFORM " + platform);
written = true;
/* If header has no such field add it */
} else if(!written && line.startsWith("(")) {
pw.println("PLATFORM " + platform);
pw.println(line);
written = true;
/* Write all other header lines */
} else {
pw.println(line);
pw.flush();
}
}
pw.close();
br.close();
if (!file.delete()) {
logger.log(Level.WARNING, "Could not delete old file");
return;
}
if (!tempFile.renameTo(file)) {
logger.log(Level.WARNING, "Could not rename new file to old filename");
}
this.platform = platform;
}
#location 10
#vulnerability type RESOURCE_LEAK | #fixed code
public void setPlatform(String platform) throws FileNotFoundException, IOException {
if(!platformPattern.matcher(platform).matches())
throw new IllegalArgumentException("Platform must match " + platformPattern.pattern());
this.platform = platform;
rewriteHeader();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void addSendJob(Frame f, int sec, int usec) {
synchronized(output) {
output.print("< add "
+ Integer.toString(sec) + " "
+ Integer.toString(usec) + " "
+ Integer.toHexString(f.getIdentifier()) + " "
+ Integer.toString(f.getLength()) + " "
+ Util.byteArrayToHexString(f.getData()) + " >");
output.flush();
}
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public void addSendJob(Frame f, int sec, int usec) {
StringBuilder sb = new StringBuilder(40);
sb.append("< add ");
sb.append(Integer.toString(sec));
sb.append(' ');
sb.append(Integer.toString(usec));
sb.append(' ');
sb.append(Integer.toHexString(f.getIdentifier()));
sb.append(' ');
sb.append(Integer.toString(f.getLength()));
sb.append(' ');
sb.append(Util.byteArrayToHexString(f.getData()));
sb.append(" >");
send(sb.toString());
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
Object readProperties(java.util.Properties p) {
if (instance == null) {
instance = this;
}
instance.readPropertiesImpl(p);
return instance;
}
#location 6
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
void writeProperties(java.util.Properties p) {
// better to version settings since initial version as advocated at
// http://wiki.apidesign.org/wiki/PropertyFiles
p.setProperty("version", "1.0");
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
Object readProperties(java.util.Properties p) {
if (instance == null) {
instance = this;
}
instance.readPropertiesImpl(p);
return instance;
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
void writeProperties(java.util.Properties p) {
p.setProperty("version", "1.0");
p.setProperty("busName", bus.getName());
ProjectManager manager = ProjectManager.getGlobalProjectManager();
p.setProperty("projectName", manager.getOpenedProject().getName());
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void readDirectory() {
logger.log(Level.INFO, "Opening folder {0}", logFolder.getPath());
logFileDir.clear();
platforms.clear();
if (logFolder.isFolder()) {
Enumeration<? extends FileObject> children = logFolder.getChildren(true);
while (children.hasMoreElements()) {
FileObject file = children.nextElement();
if (file.getNameExt().endsWith(".log") || file.getNameExt().endsWith(".log.gz")) {
try {
LogFile logFile = LogFile.fromFile(FileUtil.toFile(file));
if (platformList.containsKey(logFile.getPlatform())) {
ArrayList<LogFile> platform = platformList.get(logFile.getPlatform());
platform.add(logFile);
} else {
ArrayList<LogFile> platform = new ArrayList<LogFile>();
platform.add(logFile);
platformList.put(logFile.getPlatform(), platform);
platforms.add(logFile.getPlatform());
}
} catch (Exception ex) {
logger.log(Level.WARNING, "Found malformed log file: {0}. Ignoring...", file.getName());
}
}
}
}
}
#location 16
#vulnerability type NULL_DEREFERENCE | #fixed code
public void readDirectory() {
logger.log(Level.INFO, "Opening folder {0}", logFolder.getPath());
logFileDir.clear();
platforms.clear();
if (logFolder.isFolder()) {
Enumeration<? extends FileObject> children = logFolder.getChildren(true);
while (children.hasMoreElements()) {
FileObject file = children.nextElement();
if (file.getNameExt().endsWith(".log") || file.getNameExt().endsWith(".log.gz")) {
try {
LogFile logFile = new LogFile(FileUtil.toFile(file));
if (platformList.containsKey(logFile.getPlatform())) {
ArrayList<LogFile> platform = platformList.get(logFile.getPlatform());
platform.add(logFile);
} else {
ArrayList<LogFile> platform = new ArrayList<LogFile>();
platform.add(logFile);
platformList.put(logFile.getPlatform(), platform);
platforms.add(logFile.getPlatform());
}
} catch (Exception ex) {
logger.log(Level.WARNING, "Found malformed log file: {0}. Ignoring...", file.getName());
}
}
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void unsubscribeFrom(int id) {
synchronized(output) {
output.print("< unsubscribe " + Integer.toHexString(id) + " >");
output.flush();
}
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public void unsubscribeFrom(int id) {
StringBuilder sb = new StringBuilder(30);
sb.append("< unsubscribe ");
sb.append(Integer.toHexString(id));
sb.append(" >");
send(sb.toString());
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
protected Node[] createNodesForKey(BusDescription key) {
Bus bus = null;
for(Bus b : project.getBusses()) {
if(b.getDescription() == key)
bus = b;
}
AbstractNode node = new AbstractNode(Children.create(new MessageNodeFactory(key, bus), true), Lookups.fixed(key, bus));
node.setIconBaseWithExtension("org/freedesktop/tango/16x16/places/network-workgroup.png");
node.setDisplayName(bus.getName() + " (" + key.getName() + ")");
return new Node[] { node };
}
#location 11
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
protected Node[] createNodesForKey(BusDescription key) {
Bus bus = null;
for(Bus b : project.getBusses()) {
if(b.getDescription() == key)
bus = b;
}
return new Node[] { new BusNode(key, bus) };
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void setDescription(String description) throws FileNotFoundException, IOException {
if(!descriptionPattern.matcher(description).matches())
throw new IllegalArgumentException("Description must match " + descriptionPattern.pattern());
File tempFile = new File(file.getAbsolutePath() + ".tmp");
BufferedReader br;
PrintWriter pw;
if(compressed) {
GZIPInputStream zipStream = new GZIPInputStream(new FileInputStream(file));
br = new BufferedReader(new InputStreamReader(zipStream));
GZIPOutputStream outStream = new GZIPOutputStream(new FileOutputStream(tempFile));
pw = new PrintWriter(outStream);
} else {
br = new BufferedReader(new InputStreamReader(new FileInputStream(file)));
pw = new PrintWriter(new FileWriter(tempFile));
}
String line = null;
boolean written = false;
while ((line = br.readLine()) != null) {
/* If line is found overwrite it */
if (!written && line.startsWith(("DESCRIPTION"))) {
pw.println("DESCRIPTION \"" + description + "\"");
written = true;
/* If header has no such field add it */
} else if(!written && line.startsWith("(")) {
pw.println("DESCRIPTION \"" + description + "\"");
pw.println(line);
written = true;
/* Write all other header lines */
} else {
pw.println(line);
pw.flush();
}
}
pw.close();
br.close();
if (!file.delete()) {
logger.log(Level.WARNING, "Could not delete old file");
return;
}
if (!tempFile.renameTo(file)) {
logger.log(Level.WARNING, "Could not rename new file to old filename");
}
this.description = description;
}
#location 17
#vulnerability type RESOURCE_LEAK | #fixed code
public void setDescription(String description) throws FileNotFoundException, IOException {
if(!descriptionPattern.matcher(description).matches())
throw new IllegalArgumentException("Description must match " + descriptionPattern.pattern());
this.description = description;
rewriteHeader();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public Boolean checkConnection() {
Socket socket = new Socket();
InetSocketAddress address = new InetSocketAddress(host, port);
try {
socket.setSoTimeout(10);
socket.connect(address, 50);
InputStreamReader input = new InputStreamReader(
socket.getInputStream());
/*String ret = "";
for(int i=0;i<)
if (!ret.equals("< hi >")) {
logger.log(Level.SEVERE, "Did not receive greeting from host.");
}*/
} catch (IOException ex) {
return false;
}
return true;
}
#location 10
#vulnerability type RESOURCE_LEAK | #fixed code
public Boolean checkConnection() {
Socket socket = new Socket();
InetSocketAddress address = new InetSocketAddress(host, port);
InputStreamReader input = null;
try {
socket.setSoTimeout(10);
socket.connect(address, 50);
input = new InputStreamReader(
socket.getInputStream());
String ret = "< hi >";
for(int i=0;i<6;i++) {
if(input.read() != ret.charAt(i)) {
logger.log(Level.INFO, "Could not connect to host");
return false;
}
}
} catch (IOException ex) {
logger.log(Level.INFO, "Could not connect to host", ex);
return false;
}
finally {
if(input != null) {
try {
input.close();
} catch (IOException ex) {
Logger.getLogger(BusURL.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
return true;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void subscribeTo(int id, int sec, int usec) {
synchronized(output) {
output.print("< subscribe "
+ Integer.toString(sec) + " "
+ Integer.toString(usec) + " "
+ Integer.toHexString(id) + " >");
output.flush();
}
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public void subscribeTo(int id, int sec, int usec) {
StringBuilder sb = new StringBuilder(30);
sb.append("< subscribe ");
sb.append(String.valueOf(sec));
sb.append(' ');
sb.append(String.valueOf(usec));
sb.append(' ');
sb.append(Integer.toHexString(id));
sb.append(" >");
send(sb.toString());
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void subscriptionAllChanged(boolean all, Subscription s) {
/* BCM subscription switched to RAW subscription */
if (all == true) {
subscriptionsBCM.remove(s);
if(!subscriptionsRAW.contains(s))
subscriptionsRAW.add(s);
Set<Integer> ids = s.getAllIdentifiers();
for(Integer identifier : ids) {
safeUnsubscribe(identifier);
}
if(mode == TimeSource.Mode.PLAY) {
openRAWConnection();
}
/* RAW subscription switched to BCM subscription */
} else {
subscriptionsRAW.remove(s);
if(!subscriptionsBCM.contains(s))
subscriptionsBCM.add(s);
if(subscriptionsRAW.isEmpty() && rawConnection != null && rawConnection.isConnected()) {
logger.log(Level.INFO, "No more raw subscriptions. Closing connection.");
rawConnection.close();
}
/* Make sure BCM connection is opened */
if(mode == TimeSource.Mode.PLAY) {
openBCMConnection();
for(Integer identifier : s.getAllIdentifiers()) {
bcmConnection.subscribeTo(identifier, 0, 0);
}
}
for(Integer identifier : s.getAllIdentifiers()) {
subscribedIDs.add(identifier);
}
}
}
#location 23
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
public void subscriptionAllChanged(boolean all, Subscription s) {
/* BCM subscription switched to RAW subscription */
if (all == true) {
subscriptionsBCM.remove(s);
if(!subscriptionsRAW.contains(s))
subscriptionsRAW.add(s);
Set<Integer> ids = s.getAllIdentifiers();
for(Integer identifier : ids) {
safeUnsubscribe(identifier);
}
if(mode == TimeSource.Mode.PLAY) {
openRAWConnection();
}
/* RAW subscription switched to BCM subscription */
} else {
subscriptionsRAW.remove(s);
if(!subscriptionsBCM.contains(s))
subscriptionsBCM.add(s);
if(subscriptionsRAW.isEmpty() && rawConnection != null && rawConnection.isConnected()) {
logger.log(Level.INFO, "No more raw subscriptions. Closing connection.");
rawConnection.close();
}
/* Make sure BCM connection is opened */
if(mode == TimeSource.Mode.PLAY) {
openBCMConnection();
for(Integer identifier : s.getAllIdentifiers()) {
bcmConnection.subscribeTo(identifier, 0, 0);
}
}
synchronized(subscribedIDs) {
for(Integer identifier : s.getAllIdentifiers()) {
subscribedIDs.add(identifier);
}
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public Boolean checkConnection() {
Socket socket = new Socket();
InetSocketAddress address = new InetSocketAddress(host, port);
InputStreamReader input = null;
try {
socket.setSoTimeout(10);
socket.connect(address, 50);
input = new InputStreamReader(
socket.getInputStream());
String ret = "< hi >";
for(int i=0;i<6;i++) {
if(input.read() != ret.charAt(i)) {
logger.log(Level.INFO, "Could not connect to host");
return false;
}
}
} catch (IOException ex) {
logger.log(Level.INFO, "Could not connect to host", ex);
return false;
}
finally {
if(input != null) {
try {
input.close();
} catch (IOException ex) {
Logger.getLogger(BusURL.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
return true;
}
#location 10
#vulnerability type RESOURCE_LEAK | #fixed code
public Boolean checkConnection() {
Socket socket = new Socket();
InetSocketAddress address = new InetSocketAddress(host, port);
InputStreamReader input = null;
OutputStreamWriter output = null;
try {
socket.setSoTimeout(10);
socket.connect(address, 50);
input = new InputStreamReader(
socket.getInputStream());
output = new OutputStreamWriter(socket.getOutputStream());
String ret = "< hi >";
for(int i=0;i<6;i++) {
if(input.read() != ret.charAt(i)) {
logger.log(Level.INFO, "Could not connect to host");
return false;
}
}
output.write("< open " + bus + " >");
output.flush();
ret = "< ok >";
for(int i=0;i<6;i++) {
if(input.read() != ret.charAt(i)) {
logger.log(Level.INFO, "Could not open bus");
return false;
}
}
} catch (IOException ex) {
logger.log(Level.INFO, "Could not connect to host", ex);
return false;
}
finally {
if(input != null) {
try {
input.close();
output.close();
} catch (IOException ex) {
logger.log(Level.SEVERE, null, ex);
}
}
}
return true;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void updateSendJob(Frame f) {
synchronized(output) {
output.print("< add "
+ Integer.toHexString(f.getIdentifier()) + " "
+ Integer.toString(f.getLength()) + " "
+ Util.byteArrayToHexString(f.getData()) + " >");
output.flush();
}
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public void updateSendJob(Frame f) {
StringBuilder sb = new StringBuilder(40);
sb.append("< add ");
sb.append(Integer.toHexString(f.getIdentifier()));
sb.append(' ');
sb.append(Integer.toString(f.getLength()));
sb.append(' ');
sb.append(Util.byteArrayToHexString(f.getData()));
sb.append(" >");
send(sb.toString());
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void setPlatform(String platform) throws FileNotFoundException, IOException {
if(!platformPattern.matcher(platform).matches())
throw new IllegalArgumentException("Platform must match " + platformPattern.pattern());
File tempFile = new File(file.getAbsolutePath() + ".tmp");
BufferedReader br;
PrintWriter pw;
if(compressed) {
GZIPInputStream zipStream = new GZIPInputStream(new FileInputStream(file));
br = new BufferedReader(new InputStreamReader(zipStream));
GZIPOutputStream outStream = new GZIPOutputStream(new FileOutputStream(tempFile));
pw = new PrintWriter(outStream);
} else {
br = new BufferedReader(new InputStreamReader(new FileInputStream(file)));
pw = new PrintWriter(new FileWriter(tempFile));
}
String line = null;
boolean written = false;
while ((line = br.readLine()) != null) {
/* If line is found overwrite it */
if (!written && line.startsWith(("PLATFORM"))) {
pw.println("PLATFORM " + platform);
written = true;
/* If header has no such field add it */
} else if(!written && line.startsWith("(")) {
pw.println("PLATFORM " + platform);
pw.println(line);
written = true;
/* Write all other header lines */
} else {
pw.println(line);
pw.flush();
}
}
pw.close();
br.close();
if (!file.delete()) {
logger.log(Level.WARNING, "Could not delete old file");
return;
}
if (!tempFile.renameTo(file)) {
logger.log(Level.WARNING, "Could not rename new file to old filename");
}
this.platform = platform;
}
#location 17
#vulnerability type RESOURCE_LEAK | #fixed code
public void setPlatform(String platform) throws FileNotFoundException, IOException {
if(!platformPattern.matcher(platform).matches())
throw new IllegalArgumentException("Platform must match " + platformPattern.pattern());
this.platform = platform;
rewriteHeader();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void setPlatform(String platform) throws FileNotFoundException, IOException {
if(!platformPattern.matcher(platform).matches())
throw new IllegalArgumentException("Platform must match " + platformPattern.pattern());
File tempFile = new File(file.getAbsolutePath() + ".tmp");
BufferedReader br;
PrintWriter pw;
if(compressed) {
GZIPInputStream zipStream = new GZIPInputStream(new FileInputStream(file));
br = new BufferedReader(new InputStreamReader(zipStream));
GZIPOutputStream outStream = new GZIPOutputStream(new FileOutputStream(tempFile));
pw = new PrintWriter(outStream);
} else {
br = new BufferedReader(new InputStreamReader(new FileInputStream(file)));
pw = new PrintWriter(new FileWriter(tempFile));
}
String line = null;
boolean written = false;
while ((line = br.readLine()) != null) {
/* If line is found overwrite it */
if (!written && line.startsWith(("PLATFORM"))) {
pw.println("PLATFORM " + platform);
written = true;
/* If header has no such field add it */
} else if(!written && line.startsWith("(")) {
pw.println("PLATFORM " + platform);
pw.println(line);
written = true;
/* Write all other header lines */
} else {
pw.println(line);
pw.flush();
}
}
pw.close();
br.close();
if (!file.delete()) {
logger.log(Level.WARNING, "Could not delete old file");
return;
}
if (!tempFile.renameTo(file)) {
logger.log(Level.WARNING, "Could not rename new file to old filename");
}
this.platform = platform;
}
#location 12
#vulnerability type RESOURCE_LEAK | #fixed code
public void setPlatform(String platform) throws FileNotFoundException, IOException {
if(!platformPattern.matcher(platform).matches())
throw new IllegalArgumentException("Platform must match " + platformPattern.pattern());
this.platform = platform;
rewriteHeader();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void unsubscribed(int id, Subscription s) {
if(subscriptionsBCM.contains(s)) {
safeUnsubscribe(id);
}
}
#location 4
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
public void unsubscribed(int id, Subscription s) {
if(subscriptionsBCM.contains(s)) {
safeUnsubscribe(id);
} else {
logger.log(Level.WARNING, "Unregistered subscription tried to unsubscribe!");
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void setConnection(BusURL url) {
disconnect();
this.url = url;
rawConnection = new RAWConnection(url);
bcmConnection = new BCMConnection(url);
rawConnection.setReceiver(rawReceiver);
bcmConnection.setReceiver(bcmReceiver);
notifyListenersConnection();
}
#location 4
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public void setConnection(BusURL url) {
disconnect();
this.url = url;
rawConnection = new RAWConnection(url);
bcmConnection = new BCMConnection(url);
rawConnection.setReceiver(rawReceiver);
bcmConnection.setReceiver(bcmReceiver);
notifyListenersConnection();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void setDescription(String description) throws FileNotFoundException, IOException {
if(!descriptionPattern.matcher(description).matches())
throw new IllegalArgumentException("Description must match " + descriptionPattern.pattern());
File tempFile = new File(file.getAbsolutePath() + ".tmp");
BufferedReader br;
PrintWriter pw;
if(compressed) {
GZIPInputStream zipStream = new GZIPInputStream(new FileInputStream(file));
br = new BufferedReader(new InputStreamReader(zipStream));
GZIPOutputStream outStream = new GZIPOutputStream(new FileOutputStream(tempFile));
pw = new PrintWriter(outStream);
} else {
br = new BufferedReader(new InputStreamReader(new FileInputStream(file)));
pw = new PrintWriter(new FileWriter(tempFile));
}
String line = null;
boolean written = false;
while ((line = br.readLine()) != null) {
/* If line is found overwrite it */
if (!written && line.startsWith(("DESCRIPTION"))) {
pw.println("DESCRIPTION \"" + description + "\"");
written = true;
/* If header has no such field add it */
} else if(!written && line.startsWith("(")) {
pw.println("DESCRIPTION \"" + description + "\"");
pw.println(line);
written = true;
/* Write all other header lines */
} else {
pw.println(line);
pw.flush();
}
}
pw.close();
br.close();
if (!file.delete()) {
logger.log(Level.WARNING, "Could not delete old file");
return;
}
if (!tempFile.renameTo(file)) {
logger.log(Level.WARNING, "Could not rename new file to old filename");
}
this.description = description;
}
#location 10
#vulnerability type RESOURCE_LEAK | #fixed code
public void setDescription(String description) throws FileNotFoundException, IOException {
if(!descriptionPattern.matcher(description).matches())
throw new IllegalArgumentException("Description must match " + descriptionPattern.pattern());
this.description = description;
rewriteHeader();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void unsubscribeRange(int from, int to) {
synchronized(this) {
for (int i = from; i <= to; i++) {
if(ids.contains(i)) {
ids.remove(i);
changeReceiver.unsubscribed(i, this);
}
}
}
logStatus();
}
#location 10
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public void unsubscribeRange(int from, int to) {
synchronized(ids) {
for (int i = from; i <= to; i++) {
if(ids.contains(i)) {
ids.remove(i);
changeReceiver.unsubscribed(i, this);
}
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void sendFrame(Frame f) {
StringBuilder sb = new StringBuilder();
sb.append("< send ");
sb.append(Integer.toHexString(f.getIdentifier()));
sb.append(' ');
sb.append(Integer.toString(f.getLength()));
sb.append(' ');
String data = Util.byteArrayToHexString(f.getData());
for(int i=0;i<data.length();i+=2) {
sb.append(data.charAt(i));
sb.append(data.charAt(i+1));
sb.append(' ');
}
sb.append(">");
synchronized(output) {
output.print(sb.toString());
output.flush();
}
}
#location 15
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public void sendFrame(Frame f) {
StringBuilder sb = new StringBuilder(50);
sb.append("< send ");
sb.append(Integer.toHexString(f.getIdentifier()));
sb.append(' ');
sb.append(Integer.toString(f.getLength()));
sb.append(' ');
String data = Util.byteArrayToHexString(f.getData());
for(int i=0;i<data.length();i+=2) {
sb.append(data.charAt(i));
sb.append(data.charAt(i+1));
sb.append(' ');
}
sb.append(">");
send(sb.toString());
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void setDescription(String description) throws FileNotFoundException, IOException {
if(!descriptionPattern.matcher(description).matches())
throw new IllegalArgumentException("Description must match " + descriptionPattern.pattern());
File tempFile = new File(file.getAbsolutePath() + ".tmp");
BufferedReader br;
PrintWriter pw;
if(compressed) {
GZIPInputStream zipStream = new GZIPInputStream(new FileInputStream(file));
br = new BufferedReader(new InputStreamReader(zipStream));
GZIPOutputStream outStream = new GZIPOutputStream(new FileOutputStream(tempFile));
pw = new PrintWriter(outStream);
} else {
br = new BufferedReader(new InputStreamReader(new FileInputStream(file)));
pw = new PrintWriter(new FileWriter(tempFile));
}
String line = null;
boolean written = false;
while ((line = br.readLine()) != null) {
/* If line is found overwrite it */
if (!written && line.startsWith(("DESCRIPTION"))) {
pw.println("DESCRIPTION \"" + description + "\"");
written = true;
/* If header has no such field add it */
} else if(!written && line.startsWith("(")) {
pw.println("DESCRIPTION \"" + description + "\"");
pw.println(line);
written = true;
/* Write all other header lines */
} else {
pw.println(line);
pw.flush();
}
}
pw.close();
br.close();
if (!file.delete()) {
logger.log(Level.WARNING, "Could not delete old file");
return;
}
if (!tempFile.renameTo(file)) {
logger.log(Level.WARNING, "Could not rename new file to old filename");
}
this.description = description;
}
#location 17
#vulnerability type RESOURCE_LEAK | #fixed code
public void setDescription(String description) throws FileNotFoundException, IOException {
if(!descriptionPattern.matcher(description).matches())
throw new IllegalArgumentException("Description must match " + descriptionPattern.pattern());
this.description = description;
rewriteHeader();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void subscriptionAllChanged(boolean all, Subscription s) {
/* BCM subscription switched to RAW subscription */
if (all == true) {
subscriptionsBCM.remove(s);
if(!subscriptionsRAW.contains(s))
subscriptionsRAW.add(s);
Set<Integer> ids = s.getAllIdentifiers();
for(Integer identifier : ids) {
safeUnsubscribe(identifier);
}
if(mode == TimeSource.Mode.PLAY) {
openRAWConnection();
}
/* RAW subscription switched to BCM subscription */
} else {
subscriptionsRAW.remove(s);
if(!subscriptionsBCM.contains(s))
subscriptionsBCM.add(s);
if(subscriptionsRAW.isEmpty() && rawConnection != null && rawConnection.isConnected()) {
logger.log(Level.INFO, "No more raw subscriptions. Closing connection.");
rawConnection.close();
}
/* Make sure BCM connection is opened */
if(mode == TimeSource.Mode.PLAY) {
openBCMConnection();
for(Integer identifier : s.getAllIdentifiers()) {
bcmConnection.subscribeTo(identifier, 0, 0);
}
}
for(Integer identifier : s.getAllIdentifiers()) {
subscribedIDs.add(identifier);
}
}
}
#location 11
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
public void subscriptionAllChanged(boolean all, Subscription s) {
/* BCM subscription switched to RAW subscription */
if (all == true) {
subscriptionsBCM.remove(s);
if(!subscriptionsRAW.contains(s))
subscriptionsRAW.add(s);
Set<Integer> ids = s.getAllIdentifiers();
for(Integer identifier : ids) {
safeUnsubscribe(identifier);
}
if(mode == TimeSource.Mode.PLAY) {
openRAWConnection();
}
/* RAW subscription switched to BCM subscription */
} else {
subscriptionsRAW.remove(s);
if(!subscriptionsBCM.contains(s))
subscriptionsBCM.add(s);
if(subscriptionsRAW.isEmpty() && rawConnection != null && rawConnection.isConnected()) {
logger.log(Level.INFO, "No more raw subscriptions. Closing connection.");
rawConnection.close();
}
/* Make sure BCM connection is opened */
if(mode == TimeSource.Mode.PLAY) {
openBCMConnection();
for(Integer identifier : s.getAllIdentifiers()) {
bcmConnection.subscribeTo(identifier, 0, 0);
}
}
synchronized(subscribedIDs) {
for(Integer identifier : s.getAllIdentifiers()) {
subscribedIDs.add(identifier);
}
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void sendFrame(Frame frame) {
/* Try to open BCM connection if not present */
if(url != null) {
openBCMConnection();
if (bcmConnection != null) {
bcmConnection.sendFrame(frame);
}
/* If no BCM connection is present we have to do loopback locally */
} else {
frame.setTimestamp(timeSource.getTime());
deliverBCMFrame(frame);
deliverRAWFrame(frame);
}
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public void sendFrame(Frame frame) {
/* Try to open BCM connection if not present */
if(url != null) {
openBCMConnection();
if (bcmConnection != null) {
bcmConnection.sendFrame(frame);
}
/* If no BCM connection is present we have to do loopback locally */
} else {
frame.setTimestamp(timeSource.getTime());
deliverBCMFrame(frame);
deliverRAWFrame(frame);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void setDescription(String description) throws FileNotFoundException, IOException {
if(!descriptionPattern.matcher(description).matches())
throw new IllegalArgumentException("Description must match " + descriptionPattern.pattern());
File tempFile = new File(file.getAbsolutePath() + ".tmp");
BufferedReader br;
PrintWriter pw;
if(compressed) {
GZIPInputStream zipStream = new GZIPInputStream(new FileInputStream(file));
br = new BufferedReader(new InputStreamReader(zipStream));
GZIPOutputStream outStream = new GZIPOutputStream(new FileOutputStream(tempFile));
pw = new PrintWriter(outStream);
} else {
br = new BufferedReader(new InputStreamReader(new FileInputStream(file)));
pw = new PrintWriter(new FileWriter(tempFile));
}
String line = null;
boolean written = false;
while ((line = br.readLine()) != null) {
/* If line is found overwrite it */
if (!written && line.startsWith(("DESCRIPTION"))) {
pw.println("DESCRIPTION \"" + description + "\"");
written = true;
/* If header has no such field add it */
} else if(!written && line.startsWith("(")) {
pw.println("DESCRIPTION \"" + description + "\"");
pw.println(line);
written = true;
/* Write all other header lines */
} else {
pw.println(line);
pw.flush();
}
}
pw.close();
br.close();
if (!file.delete()) {
logger.log(Level.WARNING, "Could not delete old file");
return;
}
if (!tempFile.renameTo(file)) {
logger.log(Level.WARNING, "Could not rename new file to old filename");
}
this.description = description;
}
#location 17
#vulnerability type RESOURCE_LEAK | #fixed code
public void setDescription(String description) throws FileNotFoundException, IOException {
if(!descriptionPattern.matcher(description).matches())
throw new IllegalArgumentException("Description must match " + descriptionPattern.pattern());
this.description = description;
rewriteHeader();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void setPlatform(String platform) throws FileNotFoundException, IOException {
if(!platformPattern.matcher(platform).matches())
throw new IllegalArgumentException("Platform must match " + platformPattern.pattern());
File tempFile = new File(file.getAbsolutePath() + ".tmp");
BufferedReader br;
PrintWriter pw;
if(compressed) {
GZIPInputStream zipStream = new GZIPInputStream(new FileInputStream(file));
br = new BufferedReader(new InputStreamReader(zipStream));
GZIPOutputStream outStream = new GZIPOutputStream(new FileOutputStream(tempFile));
pw = new PrintWriter(outStream);
} else {
br = new BufferedReader(new InputStreamReader(new FileInputStream(file)));
pw = new PrintWriter(new FileWriter(tempFile));
}
String line = null;
boolean written = false;
while ((line = br.readLine()) != null) {
/* If line is found overwrite it */
if (!written && line.startsWith(("PLATFORM"))) {
pw.println("PLATFORM " + platform);
written = true;
/* If header has no such field add it */
} else if(!written && line.startsWith("(")) {
pw.println("PLATFORM " + platform);
pw.println(line);
written = true;
/* Write all other header lines */
} else {
pw.println(line);
pw.flush();
}
}
pw.close();
br.close();
if (!file.delete()) {
logger.log(Level.WARNING, "Could not delete old file");
return;
}
if (!tempFile.renameTo(file)) {
logger.log(Level.WARNING, "Could not rename new file to old filename");
}
this.platform = platform;
}
#location 17
#vulnerability type RESOURCE_LEAK | #fixed code
public void setPlatform(String platform) throws FileNotFoundException, IOException {
if(!platformPattern.matcher(platform).matches())
throw new IllegalArgumentException("Platform must match " + platformPattern.pattern());
this.platform = platform;
rewriteHeader();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
Object readProperties(java.util.Properties p) {
if (instance == null) {
instance = this;
}
instance.readPropertiesImpl(p);
return instance;
}
#location 6
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
void writeProperties(java.util.Properties p) {
p.setProperty("version", "1.0");
p.setProperty("busName", bus.getName());
ProjectManager manager = ProjectManager.getGlobalProjectManager();
p.setProperty("projectName", manager.getOpenedProject().getName());
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void clear() {
Integer[] identifiers = new Integer[0];
synchronized(this) {
identifiers = ids.toArray(new Integer[ids.size()]);
}
for (int i=0;i<identifiers.length;i++) {
unsubscribe(identifiers[i]);
}
logStatus();
}
#location 9
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public void clear() {
Integer[] identifiers = null;
synchronized(ids) {
identifiers = ids.toArray(new Integer[ids.size()]);
ids.clear();
}
for (int i=0;i<identifiers.length;i++) {
changeReceiver.unsubscribed(identifiers[i], this);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void subscribed(int id, Subscription s) {
if (subscriptionsBCM.contains(s)) {
/* Check if the ID was already subscribed in any subscription */
if(!subscribedIDs.contains(id)) {
subscribedIDs.add(id);
if (bcmConnection != null && bcmConnection.isConnected()) {
bcmConnection.subscribeTo(id, 0, 0);
} else {
logger.log(Level.WARNING, "A BCM subscription was made but no BCM connection is present");
}
}
}
}
#location 7
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
public void subscribed(int id, Subscription s) {
if (subscriptionsBCM.contains(s)) {
/* Check if the ID was already subscribed in any subscription */
synchronized(subscribedIDs) {
if(!subscribedIDs.contains(id)) {
subscribedIDs.add(id);
if (bcmConnection != null && bcmConnection.isConnected()) {
bcmConnection.subscribeTo(id, 0, 0);
} else {
logger.log(Level.WARNING, "A BCM subscription was made but no BCM connection is present");
}
}
}
} else {
logger.log(Level.WARNING, "Unregistered subscription tried to subscribe!");
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void disconnect() {
if (rawConnection != null && rawConnection.isConnected()) {
rawConnection.close();
}
if (bcmConnection != null && bcmConnection.isConnected()) {
bcmConnection.close();
}
url = null;
notifyListenersConnection();
}
#location 10
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public void disconnect() {
if (rawConnection != null && rawConnection.isConnected()) {
rawConnection.close();
}
if (bcmConnection != null && bcmConnection.isConnected()) {
bcmConnection.close();
}
notifyListenersConnection();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public static void print(EventFrame ef) {
synchronized(io) {
createOutput();
OutputWriter out = io.getOut();
Date date = new Date();
out.write("[");
out.write(dateFormat.format(date));
out.write("] EVENT ");
out.write(ef.getMessage());
out.write("\n");
out.close();
}
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public static void print(EventFrame ef) {
createOutput();
synchronized(io) {
OutputWriter out = io.getOut();
Date date = new Date();
out.write("[");
out.write(dateFormat.format(date));
out.write("] EVENT ");
out.write(ef.getMessage());
out.write("\n");
out.close();
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void subscriptionAllChanged(boolean all, Subscription s) {
/* BCM subscription switched to RAW subscription */
if (all == true) {
subscriptionsBCM.remove(s);
if(!subscriptionsRAW.contains(s))
subscriptionsRAW.add(s);
Set<Integer> ids = s.getAllIdentifiers();
for(Integer identifier : ids) {
safeUnsubscribe(identifier);
}
if(mode == TimeSource.Mode.PLAY) {
openRAWConnection();
}
/* RAW subscription switched to BCM subscription */
} else {
subscriptionsRAW.remove(s);
if(!subscriptionsBCM.contains(s))
subscriptionsBCM.add(s);
if(subscriptionsRAW.isEmpty() && rawConnection != null && rawConnection.isConnected()) {
logger.log(Level.INFO, "No more raw subscriptions. Closing connection.");
rawConnection.close();
}
/* Make sure BCM connection is opened */
if(mode == TimeSource.Mode.PLAY) {
openBCMConnection();
for(Integer identifier : s.getAllIdentifiers()) {
bcmConnection.subscribeTo(identifier, 0, 0);
}
}
for(Integer identifier : s.getAllIdentifiers()) {
subscribedIDs.add(identifier);
}
}
}
#location 25
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
public void subscriptionAllChanged(boolean all, Subscription s) {
/* BCM subscription switched to RAW subscription */
if (all == true) {
subscriptionsBCM.remove(s);
if(!subscriptionsRAW.contains(s))
subscriptionsRAW.add(s);
Set<Integer> ids = s.getAllIdentifiers();
for(Integer identifier : ids) {
safeUnsubscribe(identifier);
}
if(mode == TimeSource.Mode.PLAY) {
openRAWConnection();
}
/* RAW subscription switched to BCM subscription */
} else {
subscriptionsRAW.remove(s);
if(!subscriptionsBCM.contains(s))
subscriptionsBCM.add(s);
if(subscriptionsRAW.isEmpty() && rawConnection != null && rawConnection.isConnected()) {
logger.log(Level.INFO, "No more raw subscriptions. Closing connection.");
rawConnection.close();
}
/* Make sure BCM connection is opened */
if(mode == TimeSource.Mode.PLAY) {
openBCMConnection();
for(Integer identifier : s.getAllIdentifiers()) {
bcmConnection.subscribeTo(identifier, 0, 0);
}
}
synchronized(subscribedIDs) {
for(Integer identifier : s.getAllIdentifiers()) {
subscribedIDs.add(identifier);
}
}
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void setPlatform(String platform) throws FileNotFoundException, IOException {
if(!platformPattern.matcher(platform).matches())
throw new IllegalArgumentException("Platform must match " + platformPattern.pattern());
File tempFile = new File(file.getAbsolutePath() + ".tmp");
BufferedReader br;
PrintWriter pw;
if(compressed) {
GZIPInputStream zipStream = new GZIPInputStream(new FileInputStream(file));
br = new BufferedReader(new InputStreamReader(zipStream));
GZIPOutputStream outStream = new GZIPOutputStream(new FileOutputStream(tempFile));
pw = new PrintWriter(outStream);
} else {
br = new BufferedReader(new InputStreamReader(new FileInputStream(file)));
pw = new PrintWriter(new FileWriter(tempFile));
}
String line = null;
boolean written = false;
while ((line = br.readLine()) != null) {
/* If line is found overwrite it */
if (!written && line.startsWith(("PLATFORM"))) {
pw.println("PLATFORM " + platform);
written = true;
/* If header has no such field add it */
} else if(!written && line.startsWith("(")) {
pw.println("PLATFORM " + platform);
pw.println(line);
written = true;
/* Write all other header lines */
} else {
pw.println(line);
pw.flush();
}
}
pw.close();
br.close();
if (!file.delete()) {
logger.log(Level.WARNING, "Could not delete old file");
return;
}
if (!tempFile.renameTo(file)) {
logger.log(Level.WARNING, "Could not rename new file to old filename");
}
this.platform = platform;
}
#location 17
#vulnerability type RESOURCE_LEAK | #fixed code
public void setPlatform(String platform) throws FileNotFoundException, IOException {
if(!platformPattern.matcher(platform).matches())
throw new IllegalArgumentException("Platform must match " + platformPattern.pattern());
this.platform = platform;
rewriteHeader();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
Object readProperties(java.util.Properties p) {
if (instance == null) {
instance = this;
}
instance.readPropertiesImpl(p);
return instance;
}
#location 3
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
void writeProperties(java.util.Properties p) {
// better to version settings since initial version as advocated at
// http://wiki.apidesign.org/wiki/PropertyFiles
p.setProperty("version", "1.0");
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void setDescription(String description) throws FileNotFoundException, IOException {
if(!descriptionPattern.matcher(description).matches())
throw new IllegalArgumentException("Description must match " + descriptionPattern.pattern());
File tempFile = new File(file.getAbsolutePath() + ".tmp");
BufferedReader br;
PrintWriter pw;
if(compressed) {
GZIPInputStream zipStream = new GZIPInputStream(new FileInputStream(file));
br = new BufferedReader(new InputStreamReader(zipStream));
GZIPOutputStream outStream = new GZIPOutputStream(new FileOutputStream(tempFile));
pw = new PrintWriter(outStream);
} else {
br = new BufferedReader(new InputStreamReader(new FileInputStream(file)));
pw = new PrintWriter(new FileWriter(tempFile));
}
String line = null;
boolean written = false;
while ((line = br.readLine()) != null) {
/* If line is found overwrite it */
if (!written && line.startsWith(("DESCRIPTION"))) {
pw.println("DESCRIPTION \"" + description + "\"");
written = true;
/* If header has no such field add it */
} else if(!written && line.startsWith("(")) {
pw.println("DESCRIPTION \"" + description + "\"");
pw.println(line);
written = true;
/* Write all other header lines */
} else {
pw.println(line);
pw.flush();
}
}
pw.close();
br.close();
if (!file.delete()) {
logger.log(Level.WARNING, "Could not delete old file");
return;
}
if (!tempFile.renameTo(file)) {
logger.log(Level.WARNING, "Could not rename new file to old filename");
}
this.description = description;
}
#location 12
#vulnerability type RESOURCE_LEAK | #fixed code
public void setDescription(String description) throws FileNotFoundException, IOException {
if(!descriptionPattern.matcher(description).matches())
throw new IllegalArgumentException("Description must match " + descriptionPattern.pattern());
this.description = description;
rewriteHeader();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public static void printInfo(String info) {
synchronized(io) {
createOutput();
OutputWriter out = io.getOut();
Date date = new Date();
out.write("[");
out.write(dateFormat.format(date));
out.write("] INFO ");
out.write(info);
out.write("\n");
out.close();
}
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
public static void printInfo(String info) {
createOutput();
synchronized(io) {
OutputWriter out = io.getOut();
Date date = new Date();
out.write("[");
out.write(dateFormat.format(date));
out.write("] INFO ");
out.write(info);
out.write("\n");
out.close();
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void waitForReady() {
while (true) {
System.out.println("Waiting for SonarQube to be available at " + getUrl());
try {
HttpURLConnection conn = (HttpURLConnection)getUrl("/api/settings/values.protobuf").openConnection();
conn.connect();
int code = conn.getResponseCode();
if (code == 200) {
break;
}
} catch (IOException e) {
/* noop */
}
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
/* noop */
}
}
System.out.println("SonarQube is ready");
}
#location 7
#vulnerability type RESOURCE_LEAK | #fixed code
public void waitForReady() throws MalformedURLException {
LOGGER.info("Waiting for SonarQube to be available at {}", getUrl());
Awaitility
.await("SonarQube is ready")
.atMost(3, TimeUnit.MINUTES)
.pollInterval(5, TimeUnit.SECONDS)
.ignoreExceptionsInstanceOf(ConnectException.class)
.until(this::isReady)
;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public int run(String[] args) throws Exception {
final FileSystem fs = FileSystem.get(getConf());
Options options = new Options();
// automatically generate the help statement
HelpFormatter formatter = new HelpFormatter();
// create the parser
CommandLineParser parser = new GnuParser();
options.addOption("h", "help", false, "print this message");
options.addOption("i", "input", true, "input file or directory");
options.addOption("o", "output", true, "output Behemoth corpus");
Path inputPath = null;
Path outputPath = null;
// parse the command line arguments
CommandLine cmdLine = null;
try {
cmdLine = parser.parse(options, args);
String input = cmdLine.getOptionValue("i");
String output = cmdLine.getOptionValue("o");
if (cmdLine.hasOption("help")) {
formatter.printHelp("LanguageIdDriver", options);
return 0;
}
if (input == null | output == null) {
formatter.printHelp("LanguageIdDriver", options);
return -1;
}
inputPath = new Path(input);
outputPath = new Path(output);
} catch (ParseException e) {
formatter.printHelp("LanguageIdDriver", options);
}
// check whether needs overwriting
if (fs.exists(outputPath)){
System.out.println("Output path "+outputPath + " already exists. Overwrite [y/n]?");
InputStreamReader inp = new InputStreamReader(System.in);
BufferedReader br = new BufferedReader(inp);
String str = br.readLine();
br.close();
if (str.equals("y")){
fs.delete(outputPath, true);
}
else return 0;
}
JobConf job = new JobConf(getConf());
job.setJarByClass(this.getClass());
job.setJobName("Processing with Language Identifier");
job.setInputFormat(SequenceFileInputFormat.class);
job.setOutputFormat(SequenceFileOutputFormat.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(BehemothDocument.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(BehemothDocument.class);
job.setMapperClass(LanguageIdMapper.class);
job.setNumReduceTasks(0);
FileInputFormat.addInputPath(job, inputPath);
FileOutputFormat.setOutputPath(job, outputPath);
try {
JobClient.runJob(job);
} catch (Exception e) {
e.printStackTrace();
fs.delete(outputPath, true);
} finally {
}
return 0;
}
#location 26
#vulnerability type RESOURCE_LEAK | #fixed code
public int run(String[] args) throws Exception {
final FileSystem fs = FileSystem.get(getConf());
Options options = new Options();
// automatically generate the help statement
HelpFormatter formatter = new HelpFormatter();
// create the parser
CommandLineParser parser = new GnuParser();
options.addOption("h", "help", false, "print this message");
options.addOption("i", "input", true, "input file or directory");
options.addOption("o", "output", true, "output Behemoth corpus");
options.addOption("w", "overwrite", false, "overwrite the output");
Path inputPath = null;
Path outputPath = null;
boolean overWrite = false;
// parse the command line arguments
CommandLine cmdLine = null;
try {
cmdLine = parser.parse(options, args);
String input = cmdLine.getOptionValue("i");
String output = cmdLine.getOptionValue("o");
if (cmdLine.hasOption("help")) {
formatter.printHelp("LanguageIdDriver", options);
return 0;
}
if (input == null | output == null) {
formatter.printHelp("LanguageIdDriver", options);
return -1;
}
inputPath = new Path(input);
outputPath = new Path(output);
if (cmdLine.hasOption("overwrite")) {
overWrite = true;
}
} catch (ParseException e) {
formatter.printHelp("LanguageIdDriver", options);
}
// check whether needs overwriting
if (fs.exists(outputPath)) {
if (!overWrite) {
System.out.println("Output path " + outputPath
+ " already exists. Use option -w to overwrite.");
return 0;
} else
fs.delete(outputPath, true);
}
JobConf job = new JobConf(getConf());
job.setJarByClass(this.getClass());
job.setJobName("Processing with Language Identifier");
job.setInputFormat(SequenceFileInputFormat.class);
job.setOutputFormat(SequenceFileOutputFormat.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(BehemothDocument.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(BehemothDocument.class);
job.setMapperClass(LanguageIdMapper.class);
// TODO make this optional based on presence of parameters
job.setReducerClass(BehemothReducer.class);
FileInputFormat.addInputPath(job, inputPath);
FileOutputFormat.setOutputPath(job, outputPath);
try {
JobClient.runJob(job);
} catch (Exception e) {
e.printStackTrace();
fs.delete(outputPath, true);
} finally {
}
return 0;
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
public void map(IntWritable key, WeightedVectorWritable value,
OutputCollector<Text, Text> output, Reporter reporter)
throws IOException {
Vector v = value.getVector();
if (v instanceof NamedVector) {
String name = ((NamedVector) v).getName();
if (name != null & name.length() > 2)
output.collect(new Text(name), new Text(key.toString()));
else
reporter.incrCounter(CLUSTER_DOC_ID_DUMPER, "Missing name", 1);
} else
reporter.incrCounter(CLUSTER_DOC_ID_DUMPER, "Unnamed vector", 1);
}
#location 7
#vulnerability type NULL_DEREFERENCE | #fixed code
public void map(IntWritable key, WeightedVectorWritable value,
OutputCollector<Text, Text> output, Reporter reporter)
throws IOException {
Vector v = value.getVector();
if (v instanceof NamedVector) {
String name = ((NamedVector) v).getName();
if (name != null && name.length() > 2)
output.collect(new Text(name), new Text(key.toString()));
else
reporter.incrCounter(CLUSTER_DOC_ID_DUMPER, "Missing name", 1);
} else
reporter.incrCounter(CLUSTER_DOC_ID_DUMPER, "Unnamed vector", 1);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public Response getNonOAuth(String path, Map<String, String> parameters) {
InputStream in = null;
try {
URL url = UrlUtilities.buildUrl(getScheme(), getHost(), getPort(), path, parameters);
if (Flickr.debugRequest) {
logger.debug("GET: " + url);
}
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("GET");
if (proxyAuth) {
conn.setRequestProperty("Proxy-Authorization", "Basic " + getProxyCredentials());
}
setTimeouts(conn);
conn.connect();
if (Flickr.debugStream) {
in = new DebugInputStream(conn.getInputStream(), System.out);
} else {
in = conn.getInputStream();
}
Response response;
synchronized (mutex) {
Document document = builder.parse(in);
response = (Response) responseClass.newInstance();
response.parse(document);
}
return response;
} catch (IllegalAccessException | SAXException | IOException | InstantiationException e) {
throw new FlickrRuntimeException(e);
} finally {
IOUtilities.close(in);
}
}
#location 12
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
public Response getNonOAuth(String path, Map<String, String> parameters) {
InputStream in = null;
try {
URL url = UrlUtilities.buildUrl(getScheme(), getHost(), getPort(), path, parameters);
if (Flickr.debugRequest) {
logger.debug("GET: " + url);
}
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("GET");
if (proxyAuth) {
conn.setRequestProperty("Proxy-Authorization", "Basic " + getProxyCredentials());
}
setTimeouts(conn);
conn.connect();
if (Flickr.debugStream) {
in = new DebugInputStream(conn.getInputStream(), System.out);
} else {
in = conn.getInputStream();
}
Response response;
DocumentBuilder builder = getDocumentBuilder();
Document document = builder.parse(in);
response = (Response) responseClass.newInstance();
response.parse(document);
return response;
} catch (IllegalAccessException | SAXException | IOException | InstantiationException | ParserConfigurationException e) {
throw new FlickrRuntimeException(e);
} finally {
IOUtilities.close(in);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public com.flickr4java.flickr.Response get(String path, Map<String, Object> parameters, String apiKey, String sharedSecret) throws FlickrException {
OAuthRequest request = new OAuthRequest(Verb.GET, getScheme() + "://" + getHost() + path);
for (Map.Entry<String, Object> entry : parameters.entrySet()) {
request.addQuerystringParameter(entry.getKey(), String.valueOf(entry.getValue()));
}
if (proxyAuth) {
request.addHeader("Proxy-Authorization", "Basic " + getProxyCredentials());
}
RequestContext requestContext = RequestContext.getRequestContext();
Auth auth = requestContext.getAuth();
if (auth != null) {
Token requestToken = new Token(auth.getToken(), auth.getTokenSecret());
OAuthService service = createOAuthService(parameters, apiKey, sharedSecret);
service.signRequest(requestToken, request);
} else {
// For calls that do not require authorization e.g. flickr.people.findByUsername which could be the
// first call if the user did not supply the user-id (i.e. nsid).
if (!parameters.containsKey(Flickr.API_KEY)) {
request.addQuerystringParameter(Flickr.API_KEY, apiKey);
}
}
if (Flickr.debugRequest) {
logger.debug("GET: " + request.getCompleteUrl());
}
setTimeouts(request);
org.scribe.model.Response scribeResponse = request.send();
try {
com.flickr4java.flickr.Response response = null;
synchronized (mutex) {
String strXml = scribeResponse.getBody().trim();
if (Flickr.debugStream) {
logger.debug(strXml);
}
if (strXml.startsWith("oauth_problem=")) {
throw new FlickrRuntimeException(strXml);
}
Document document = builder.parse(new InputSource(new StringReader(strXml)));
response = (com.flickr4java.flickr.Response) responseClass.newInstance();
response.parse(document);
}
return response;
} catch (IllegalAccessException e) {
throw new FlickrRuntimeException(e);
} catch (InstantiationException e) {
throw new FlickrRuntimeException(e);
} catch (SAXException e) {
throw new FlickrRuntimeException(e);
} catch (IOException e) {
throw new FlickrRuntimeException(e);
}
}
#location 30
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
public com.flickr4java.flickr.Response get(String path, Map<String, Object> parameters, String apiKey, String sharedSecret) throws FlickrException {
OAuthRequest request = new OAuthRequest(Verb.GET, getScheme() + "://" + getHost() + path);
for (Map.Entry<String, Object> entry : parameters.entrySet()) {
request.addQuerystringParameter(entry.getKey(), String.valueOf(entry.getValue()));
}
if (proxyAuth) {
request.addHeader("Proxy-Authorization", "Basic " + getProxyCredentials());
}
RequestContext requestContext = RequestContext.getRequestContext();
Auth auth = requestContext.getAuth();
OAuth1AccessToken requestToken = new OAuth1AccessToken(auth.getToken(), auth.getTokenSecret());
OAuth10aService service = createOAuthService(apiKey, sharedSecret);
if (auth != null) {
service.signRequest(requestToken, request);
} else {
// For calls that do not require authorization e.g. flickr.people.findByUsername which could be the
// first call if the user did not supply the user-id (i.e. nsid).
if (!parameters.containsKey(Flickr.API_KEY)) {
request.addQuerystringParameter(Flickr.API_KEY, apiKey);
}
}
if (Flickr.debugRequest) {
logger.debug("GET: " + request.getCompleteUrl());
}
setTimeouts(request);
try {
com.github.scribejava.core.model.Response scribeResponse = service.execute(request);
com.flickr4java.flickr.Response f4jResponse;
synchronized (mutex) {
String strXml = scribeResponse.getBody().trim();
if (Flickr.debugStream) {
logger.debug(strXml);
}
if (strXml.startsWith("oauth_problem=")) {
throw new FlickrRuntimeException(strXml);
}
Document document = builder.parse(new InputSource(new StringReader(strXml)));
f4jResponse = (com.flickr4java.flickr.Response) responseClass.newInstance();
f4jResponse.parse(document);
}
return f4jResponse;
} catch (IllegalAccessException | InstantiationException | SAXException | IOException | InterruptedException | ExecutionException e) {
throw new FlickrRuntimeException(e);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public Response getNonOAuth(String path, Map<String, String> parameters) {
InputStream in = null;
try {
URL url = UrlUtilities.buildUrl(getScheme(), getHost(), getPort(), path, parameters);
if (Flickr.debugRequest) {
logger.debug("GET: " + url);
}
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("GET");
if (proxyAuth) {
conn.setRequestProperty("Proxy-Authorization", "Basic " + getProxyCredentials());
}
setTimeouts(conn);
conn.connect();
if (Flickr.debugStream) {
in = new DebugInputStream(conn.getInputStream(), System.out);
} else {
in = conn.getInputStream();
}
Response response;
synchronized (mutex) {
Document document = builder.parse(in);
response = (Response) responseClass.newInstance();
response.parse(document);
}
return response;
} catch (IllegalAccessException | SAXException | IOException | InstantiationException e) {
throw new FlickrRuntimeException(e);
} finally {
IOUtilities.close(in);
}
}
#location 14
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
public Response getNonOAuth(String path, Map<String, String> parameters) {
InputStream in = null;
try {
URL url = UrlUtilities.buildUrl(getScheme(), getHost(), getPort(), path, parameters);
if (Flickr.debugRequest) {
logger.debug("GET: " + url);
}
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("GET");
if (proxyAuth) {
conn.setRequestProperty("Proxy-Authorization", "Basic " + getProxyCredentials());
}
setTimeouts(conn);
conn.connect();
if (Flickr.debugStream) {
in = new DebugInputStream(conn.getInputStream(), System.out);
} else {
in = conn.getInputStream();
}
Response response;
DocumentBuilder builder = getDocumentBuilder();
Document document = builder.parse(in);
response = (Response) responseClass.newInstance();
response.parse(document);
return response;
} catch (IllegalAccessException | SAXException | IOException | InstantiationException | ParserConfigurationException e) {
throw new FlickrRuntimeException(e);
} finally {
IOUtilities.close(in);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public Response getNonOAuth(String path, Map<String, String> parameters) {
InputStream in = null;
try {
URL url = UrlUtilities.buildUrl(getScheme(), getHost(), getPort(), path, parameters);
if (Flickr.debugRequest) {
logger.debug("GET: " + url);
}
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("GET");
if (proxyAuth) {
conn.setRequestProperty("Proxy-Authorization", "Basic " + getProxyCredentials());
}
setTimeouts(conn);
conn.connect();
if (Flickr.debugStream) {
in = new DebugInputStream(conn.getInputStream(), System.out);
} else {
in = conn.getInputStream();
}
Response response;
synchronized (mutex) {
Document document = builder.parse(in);
response = (Response) responseClass.newInstance();
response.parse(document);
}
return response;
} catch (IllegalAccessException | SAXException | IOException | InstantiationException e) {
throw new FlickrRuntimeException(e);
} finally {
IOUtilities.close(in);
}
}
#location 11
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
public Response getNonOAuth(String path, Map<String, String> parameters) {
InputStream in = null;
try {
URL url = UrlUtilities.buildUrl(getScheme(), getHost(), getPort(), path, parameters);
if (Flickr.debugRequest) {
logger.debug("GET: " + url);
}
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("GET");
if (proxyAuth) {
conn.setRequestProperty("Proxy-Authorization", "Basic " + getProxyCredentials());
}
setTimeouts(conn);
conn.connect();
if (Flickr.debugStream) {
in = new DebugInputStream(conn.getInputStream(), System.out);
} else {
in = conn.getInputStream();
}
Response response;
DocumentBuilder builder = getDocumentBuilder();
Document document = builder.parse(in);
response = (Response) responseClass.newInstance();
response.parse(document);
return response;
} catch (IllegalAccessException | SAXException | IOException | InstantiationException | ParserConfigurationException e) {
throw new FlickrRuntimeException(e);
} finally {
IOUtilities.close(in);
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void refreshIndex(Map<String, Pair<DimensionRow, DimensionRow>> changedRows) {
// Make a single Document instance to hold field data being updated to Lucene
// Creating documents is costly and so Document will be reused for each record being processed due to
// performance best practices.
Document doc = new Document();
Map<DimensionField, Field> dimFieldToLuceneField = new HashMap<>(dimension.getDimensionFields().size());
// Create the document fields for this dimension and add them to the document
for (DimensionField dimensionField : dimension.getDimensionFields()) {
Field luceneField = new StringField(
DimensionStoreKeyUtils.getColumnKey(dimensionField.getName()),
"",
dimensionField.equals(dimension.getKey()) ? Field.Store.YES : Field.Store.NO
);
// Store the lucene field in the doc and in our lookup map
dimFieldToLuceneField.put(dimensionField, luceneField);
doc.add(luceneField);
}
// Write the rows to the document
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer).setRAMBufferSizeMB(BUFFER_SIZE);
lock.writeLock().lock();
try {
try (IndexWriter luceneIndexWriter = new IndexWriter(luceneDirectory, indexWriterConfig)) {
// Update the document fields for each row and update the document
for (String rowId : changedRows.keySet()) {
// Get the new row from the pair
DimensionRow newDimensionRow = changedRows.get(rowId).getKey();
// Update the index
updateDimensionRow(doc, dimFieldToLuceneField, luceneIndexWriter, newDimensionRow);
}
} catch (IOException e) {
luceneIndexIsHealthy = false;
LOG.error("Failed to refresh index for dimension rows", e);
throw new RuntimeException(e);
// Commit all the changes to the index (on .close, called by try-resources) and refresh the cardinality
}
//This must be outside the try-resources block because it may _also_ need to open an IndexWriter, and
//opening an IndexWriter involves taking a write lock on lucene, of which there can only be one at a time.
reopenIndexSearcher(true);
refreshCardinality();
} finally {
lock.writeLock().unlock();
}
}
#location 12
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
public void refreshIndex(Map<String, Pair<DimensionRow, DimensionRow>> changedRows) {
// Make a single Document instance to hold field data being updated to Lucene
// Creating documents is costly and so Document will be reused for each record being processed due to
// performance best practices.
Document doc = new Document();
Map<DimensionField, Field> dimFieldToLuceneField = new HashMap<>(dimension.getDimensionFields().size());
// Create the document fields for this dimension and add them to the document
for (DimensionField dimensionField : dimension.getDimensionFields()) {
Field luceneField = new StringField(
DimensionStoreKeyUtils.getColumnKey(dimensionField.getName()),
"",
dimensionField.equals(dimension.getKey()) ? Field.Store.YES : Field.Store.NO
);
// Store the lucene field in the doc and in our lookup map
dimFieldToLuceneField.put(dimensionField, luceneField);
doc.add(luceneField);
}
// Write the rows to the document
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer).setRAMBufferSizeMB(BUFFER_SIZE);
writeLock();
try {
try (IndexWriter luceneIndexWriter = new IndexWriter(luceneDirectory, indexWriterConfig)) {
// Update the document fields for each row and update the document
for (String rowId : changedRows.keySet()) {
// Get the new row from the pair
DimensionRow newDimensionRow = changedRows.get(rowId).getKey();
// Update the index
updateDimensionRow(doc, dimFieldToLuceneField, luceneIndexWriter, newDimensionRow);
}
} catch (IOException e) {
luceneIndexIsHealthy = false;
LOG.error("Failed to refresh index for dimension rows", e);
throw new RuntimeException(e);
// Commit all the changes to the index (on .close, called by try-resources) and refresh the cardinality
}
//This must be outside the try-resources block because it may _also_ need to open an IndexWriter, and
//opening an IndexWriter involves taking a write lock on lucene, of which there can only be one at a time.
reopenIndexSearcher(true);
refreshCardinality();
} finally {
writeUnlock();
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
@JsonIgnore
public Set<Aggregation> getAggregations() {
return getInnerQuery().getAggregations();
}
#location 4
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
@JsonIgnore
public Set<Aggregation> getAggregations() {
return getInnerQueryUnchecked().getAggregations();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
@JsonIgnore
public List<Interval> getIntervals() {
return getInnerQuery().getIntervals();
}
#location 4
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
@JsonIgnore
public List<Interval> getIntervals() {
return getInnerQueryUnchecked().getIntervals();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void refreshIndex(Map<String, Pair<DimensionRow, DimensionRow>> changedRows) {
// Make a single Document instance to hold field data being updated to Lucene
// Creating documents is costly and so Document will be reused for each record being processed due to
// performance best practices.
Document doc = new Document();
Map<DimensionField, Field> dimFieldToLuceneField = new HashMap<>(dimension.getDimensionFields().size());
// Create the document fields for this dimension and add them to the document
for (DimensionField dimensionField : dimension.getDimensionFields()) {
Field luceneField = new StringField(
DimensionStoreKeyUtils.getColumnKey(dimensionField.getName()),
"",
dimensionField.equals(dimension.getKey()) ? Field.Store.YES : Field.Store.NO
);
// Store the lucene field in the doc and in our lookup map
dimFieldToLuceneField.put(dimensionField, luceneField);
doc.add(luceneField);
}
// Write the rows to the document
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(LUCENE_ANALYZER).setRAMBufferSizeMB(BUFFER_SIZE);
lock.writeLock().lock();
try (IndexWriter luceneIndexWriter = new IndexWriter(luceneDirectory, indexWriterConfig)) {
// Update the document fields for each row and update the document
for (String rowId : changedRows.keySet()) {
// Get the new row from the pair
DimensionRow newDimensionRow = changedRows.get(rowId).getKey();
// Update the index
updateDimensionRow(doc, dimFieldToLuceneField, luceneIndexWriter, newDimensionRow);
}
// Commit all the changes to the index (on .close, called by try-resources) and refresh the cardinality
} catch (IOException e) {
luceneIndexIsHealthy = false;
LOG.error("Failed to refresh index for dimension rows", e);
throw new RuntimeException(e);
} finally {
lock.writeLock().unlock();
}
reopenIndexSearcher(true);
refreshCardinality();
}
#location 44
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
public void refreshIndex(Map<String, Pair<DimensionRow, DimensionRow>> changedRows) {
// Make a single Document instance to hold field data being updated to Lucene
// Creating documents is costly and so Document will be reused for each record being processed due to
// performance best practices.
Document doc = new Document();
Map<DimensionField, Field> dimFieldToLuceneField = new HashMap<>(dimension.getDimensionFields().size());
// Create the document fields for this dimension and add them to the document
for (DimensionField dimensionField : dimension.getDimensionFields()) {
Field luceneField = new StringField(
DimensionStoreKeyUtils.getColumnKey(dimensionField.getName()),
"",
dimensionField.equals(dimension.getKey()) ? Field.Store.YES : Field.Store.NO
);
// Store the lucene field in the doc and in our lookup map
dimFieldToLuceneField.put(dimensionField, luceneField);
doc.add(luceneField);
}
// Write the rows to the document
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(LUCENE_ANALYZER).setRAMBufferSizeMB(BUFFER_SIZE);
lock.writeLock().lock();
try {
try (IndexWriter luceneIndexWriter = new IndexWriter(luceneDirectory, indexWriterConfig)) {
// Update the document fields for each row and update the document
for (String rowId : changedRows.keySet()) {
// Get the new row from the pair
DimensionRow newDimensionRow = changedRows.get(rowId).getKey();
// Update the index
updateDimensionRow(doc, dimFieldToLuceneField, luceneIndexWriter, newDimensionRow);
}
} catch (IOException e) {
luceneIndexIsHealthy = false;
LOG.error("Failed to refresh index for dimension rows", e);
throw new RuntimeException(e);
// Commit all the changes to the index (on .close, called by try-resources) and refresh the cardinality
}
//This must be outside the try-resources block because it may _also_ need to open an IndexWriter, and
//opening an IndexWriter involves taking a write lock on lucene, of which there can only be one at a time.
reopenIndexSearcher(true);
refreshCardinality();
} finally {
lock.writeLock().unlock();
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public LookbackQuery withIntervals(Collection<Interval> intervals) {
return withDataSource(new QueryDataSource(getInnerQuery().withIntervals(intervals)));
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public LookbackQuery withIntervals(Collection<Interval> intervals) {
return withDataSource(new QueryDataSource(getInnerQueryUnchecked().withIntervals(intervals)));
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
@JsonIgnore
public Filter getFilter() {
return getInnerQuery().getFilter();
}
#location 4
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
@JsonIgnore
public Filter getFilter() {
return getInnerQueryUnchecked().getFilter();
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public LookbackQuery withFilter(Filter filter) {
return withDataSource(new QueryDataSource(getInnerQuery().withFilter(filter)));
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public LookbackQuery withFilter(Filter filter) {
return withDataSource(new QueryDataSource(getInnerQueryUnchecked().withFilter(filter)));
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
protected Pagination<DimensionRow> getResultsPage(Query query, PaginationParameters paginationParameters)
throws PageNotFoundException {
int perPage = paginationParameters.getPerPage();
validatePerPage(perPage);
TreeSet<DimensionRow> filteredDimRows;
int documentCount;
initializeIndexSearcher();
LOG.trace("Lucene Query {}", query);
lock.readLock().lock();
try {
ScoreDoc[] hits;
try (TimedPhase timer = RequestLog.startTiming("QueryingLucene")) {
TopDocs hitDocs = getPageOfData(
luceneIndexSearcher,
null,
query,
perPage
);
hits = hitDocs.scoreDocs;
// The change to supprt long document sizes is incompletely supported in Lucene
// Since we can't request up to long documents we'll only expect to receive up to Integer.MAX_VALUE
// responses, and throw an error if we exceed that.
if (hitDocs.totalHits > Integer.MAX_VALUE) {
String message = String.format(TOO_MANY_DOCUMENTS, hitDocs.totalHits);
RowLimitReachedException exception = new RowLimitReachedException(message);
LOG.error(exception.getMessage(), exception);
throw exception;
}
documentCount = (int) hitDocs.totalHits;
int requestedPageNumber = paginationParameters.getPage(documentCount);
if (hits.length == 0) {
if (requestedPageNumber == 1) {
return new SinglePagePagination<>(Collections.emptyList(), paginationParameters, 0);
} else {
throw new PageNotFoundException(requestedPageNumber, perPage, 0);
}
}
for (int currentPage = 1; currentPage < requestedPageNumber; currentPage++) {
ScoreDoc lastEntry = hits[hits.length - 1];
hits = getPageOfData(luceneIndexSearcher, lastEntry, query, perPage).scoreDocs;
if (hits.length == 0) {
throw new PageNotFoundException(requestedPageNumber, perPage, 0);
}
}
}
// convert hits to dimension rows
try (TimedPhase timer = RequestLog.startTiming("LuceneHydratingDimensionRows")) {
String idKey = DimensionStoreKeyUtils.getColumnKey(dimension.getKey().getName());
filteredDimRows = Arrays.stream(hits)
.map(
hit -> {
try {
return luceneIndexSearcher.doc(hit.doc);
} catch (IOException e) {
LOG.error("Unable to convert hit " + hit);
throw new RuntimeException(e);
}
}
)
.map(document -> document.get(idKey))
.map(dimension::findDimensionRowByKeyValue)
.collect(Collectors.toCollection(TreeSet::new));
}
} finally {
lock.readLock().unlock();
}
return new SinglePagePagination<>(
Collections.unmodifiableList(filteredDimRows.stream().collect(Collectors.toList())),
paginationParameters,
documentCount
);
}
#location 8
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
protected Pagination<DimensionRow> getResultsPage(Query query, PaginationParameters paginationParameters)
throws PageNotFoundException {
int perPage = paginationParameters.getPerPage();
validatePerPage(perPage);
TreeSet<DimensionRow> filteredDimRows;
int documentCount;
initializeIndexSearcher();
LOG.trace("Lucene Query {}", query);
readLock();
try {
ScoreDoc[] hits;
try (TimedPhase timer = RequestLog.startTiming("QueryingLucene")) {
TopDocs hitDocs = getPageOfData(
luceneIndexSearcher,
null,
query,
perPage
);
hits = hitDocs.scoreDocs;
// The change to supprt long document sizes is incompletely supported in Lucene
// Since we can't request up to long documents we'll only expect to receive up to Integer.MAX_VALUE
// responses, and throw an error if we exceed that.
if (hitDocs.totalHits > Integer.MAX_VALUE) {
String message = String.format(TOO_MANY_DOCUMENTS, hitDocs.totalHits);
RowLimitReachedException exception = new RowLimitReachedException(message);
LOG.error(exception.getMessage(), exception);
throw exception;
}
documentCount = (int) hitDocs.totalHits;
int requestedPageNumber = paginationParameters.getPage(documentCount);
if (hits.length == 0) {
if (requestedPageNumber == 1) {
return new SinglePagePagination<>(Collections.emptyList(), paginationParameters, 0);
} else {
throw new PageNotFoundException(requestedPageNumber, perPage, 0);
}
}
for (int currentPage = 1; currentPage < requestedPageNumber; currentPage++) {
ScoreDoc lastEntry = hits[hits.length - 1];
hits = getPageOfData(luceneIndexSearcher, lastEntry, query, perPage).scoreDocs;
if (hits.length == 0) {
throw new PageNotFoundException(requestedPageNumber, perPage, 0);
}
}
}
// convert hits to dimension rows
try (TimedPhase timer = RequestLog.startTiming("LuceneHydratingDimensionRows")) {
String idKey = DimensionStoreKeyUtils.getColumnKey(dimension.getKey().getName());
filteredDimRows = Arrays.stream(hits)
.map(
hit -> {
try {
return luceneIndexSearcher.doc(hit.doc);
} catch (IOException e) {
LOG.error("Unable to convert hit " + hit);
throw new RuntimeException(e);
}
}
)
.map(document -> document.get(idKey))
.map(dimension::findDimensionRowByKeyValue)
.collect(Collectors.toCollection(TreeSet::new));
}
} finally {
readUnlock();
}
return new SinglePagePagination<>(
Collections.unmodifiableList(filteredDimRows.stream().collect(Collectors.toList())),
paginationParameters,
documentCount
);
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void refreshIndex(Map<String, Pair<DimensionRow, DimensionRow>> changedRows) {
// Make a single Document instance to hold field data being updated to Lucene
// Creating documents is costly and so Document will be reused for each record being processed due to
// performance best practices.
Document doc = new Document();
Map<DimensionField, Field> dimFieldToLuceneField = new HashMap<>(dimension.getDimensionFields().size());
// Create the document fields for this dimension and add them to the document
for (DimensionField dimensionField : dimension.getDimensionFields()) {
Field luceneField = new StringField(
DimensionStoreKeyUtils.getColumnKey(dimensionField.getName()),
"",
dimensionField.equals(dimension.getKey()) ? Field.Store.YES : Field.Store.NO
);
// Store the lucene field in the doc and in our lookup map
dimFieldToLuceneField.put(dimensionField, luceneField);
doc.add(luceneField);
}
// Write the rows to the document
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer).setRAMBufferSizeMB(BUFFER_SIZE);
lock.writeLock().lock();
try {
try (IndexWriter luceneIndexWriter = new IndexWriter(luceneDirectory, indexWriterConfig)) {
// Update the document fields for each row and update the document
for (String rowId : changedRows.keySet()) {
// Get the new row from the pair
DimensionRow newDimensionRow = changedRows.get(rowId).getKey();
// Update the index
updateDimensionRow(doc, dimFieldToLuceneField, luceneIndexWriter, newDimensionRow);
}
} catch (IOException e) {
luceneIndexIsHealthy = false;
LOG.error("Failed to refresh index for dimension rows", e);
throw new RuntimeException(e);
// Commit all the changes to the index (on .close, called by try-resources) and refresh the cardinality
}
//This must be outside the try-resources block because it may _also_ need to open an IndexWriter, and
//opening an IndexWriter involves taking a write lock on lucene, of which there can only be one at a time.
reopenIndexSearcher(true);
refreshCardinality();
} finally {
lock.writeLock().unlock();
}
}
#location 14
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
public void refreshIndex(Map<String, Pair<DimensionRow, DimensionRow>> changedRows) {
// Make a single Document instance to hold field data being updated to Lucene
// Creating documents is costly and so Document will be reused for each record being processed due to
// performance best practices.
Document doc = new Document();
Map<DimensionField, Field> dimFieldToLuceneField = new HashMap<>(dimension.getDimensionFields().size());
// Create the document fields for this dimension and add them to the document
for (DimensionField dimensionField : dimension.getDimensionFields()) {
Field luceneField = new StringField(
DimensionStoreKeyUtils.getColumnKey(dimensionField.getName()),
"",
dimensionField.equals(dimension.getKey()) ? Field.Store.YES : Field.Store.NO
);
// Store the lucene field in the doc and in our lookup map
dimFieldToLuceneField.put(dimensionField, luceneField);
doc.add(luceneField);
}
// Write the rows to the document
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer).setRAMBufferSizeMB(BUFFER_SIZE);
writeLock();
try {
try (IndexWriter luceneIndexWriter = new IndexWriter(luceneDirectory, indexWriterConfig)) {
// Update the document fields for each row and update the document
for (String rowId : changedRows.keySet()) {
// Get the new row from the pair
DimensionRow newDimensionRow = changedRows.get(rowId).getKey();
// Update the index
updateDimensionRow(doc, dimFieldToLuceneField, luceneIndexWriter, newDimensionRow);
}
} catch (IOException e) {
luceneIndexIsHealthy = false;
LOG.error("Failed to refresh index for dimension rows", e);
throw new RuntimeException(e);
// Commit all the changes to the index (on .close, called by try-resources) and refresh the cardinality
}
//This must be outside the try-resources block because it may _also_ need to open an IndexWriter, and
//opening an IndexWriter involves taking a write lock on lucene, of which there can only be one at a time.
reopenIndexSearcher(true);
refreshCardinality();
} finally {
writeUnlock();
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void clearDimension() {
Set<DimensionRow> dimensionRows = findAllDimensionRows();
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer).setRAMBufferSizeMB(BUFFER_SIZE);
lock.writeLock().lock();
try {
try (IndexWriter writer = new IndexWriter(luceneDirectory, indexWriterConfig)) {
//Remove all dimension data from the store.
String rowId = dimension.getKey().getName();
dimensionRows.stream()
.map(DimensionRow::getRowMap)
.map(map -> map.get(rowId))
.map(id -> DimensionStoreKeyUtils.getRowKey(rowId, id))
.forEach(keyValueStore::remove);
//Since Lucene's indices are being dropped, the dimension field stored via the columnKey is becoming
//stale.
keyValueStore.remove(DimensionStoreKeyUtils.getColumnKey(dimension.getKey().getName()));
//The allValues key mapping needs to reflect the fact that we are dropping all dimension data.
keyValueStore.put(DimensionStoreKeyUtils.getAllValuesKey(), "[]");
//We're resetting the keyValueStore, so we don't want any stale last updated date floating around.
keyValueStore.remove(DimensionStoreKeyUtils.getLastUpdatedKey());
//In addition to clearing the keyValueStore, we also need to delete all of Lucene's segment files.
writer.deleteAll();
writer.commit();
} catch (IOException e) {
LOG.error(ErrorMessageFormat.FAIL_TO_WIPTE_LUCENE_INDEX_DIR.format(luceneDirectory));
throw new RuntimeException(e);
}
//This must be outside the try-resources block because it may _also_ need to open an IndexWriter, and
//opening an IndexWriter involves taking a write lock on lucene, of which there can only be one at a time.
reopenIndexSearcher(true);
refreshCardinality();
} finally {
lock.writeLock().unlock();
}
}
#location 4
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
public void clearDimension() {
Set<DimensionRow> dimensionRows = findAllDimensionRows();
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer).setRAMBufferSizeMB(BUFFER_SIZE);
writeLock();
try {
try (IndexWriter writer = new IndexWriter(luceneDirectory, indexWriterConfig)) {
//Remove all dimension data from the store.
String rowId = dimension.getKey().getName();
dimensionRows.stream()
.map(DimensionRow::getRowMap)
.map(map -> map.get(rowId))
.map(id -> DimensionStoreKeyUtils.getRowKey(rowId, id))
.forEach(keyValueStore::remove);
//Since Lucene's indices are being dropped, the dimension field stored via the columnKey is becoming
//stale.
keyValueStore.remove(DimensionStoreKeyUtils.getColumnKey(dimension.getKey().getName()));
//The allValues key mapping needs to reflect the fact that we are dropping all dimension data.
keyValueStore.put(DimensionStoreKeyUtils.getAllValuesKey(), "[]");
//We're resetting the keyValueStore, so we don't want any stale last updated date floating around.
keyValueStore.remove(DimensionStoreKeyUtils.getLastUpdatedKey());
//In addition to clearing the keyValueStore, we also need to delete all of Lucene's segment files.
writer.deleteAll();
writer.commit();
} catch (IOException e) {
LOG.error(ErrorMessageFormat.FAIL_TO_WIPTE_LUCENE_INDEX_DIR.format(luceneDirectory));
throw new RuntimeException(e);
}
//This must be outside the try-resources block because it may _also_ need to open an IndexWriter, and
//opening an IndexWriter involves taking a write lock on lucene, of which there can only be one at a time.
reopenIndexSearcher(true);
refreshCardinality();
} finally {
writeUnlock();
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public LookbackQuery withAggregations(Collection<Aggregation> aggregations) {
return withDataSource(new QueryDataSource(getInnerQuery().withAggregations(aggregations)));
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public LookbackQuery withAggregations(Collection<Aggregation> aggregations) {
return withDataSource(new QueryDataSource(getInnerQueryUnchecked().withAggregations(aggregations)));
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void refreshIndex(Map<String, Pair<DimensionRow, DimensionRow>> changedRows) {
// Make a single Document instance to hold field data being updated to Lucene
// Creating documents is costly and so Document will be reused for each record being processed due to
// performance best practices.
Document doc = new Document();
Map<DimensionField, Field> dimFieldToLuceneField = new HashMap<>(dimension.getDimensionFields().size());
// Create the document fields for this dimension and add them to the document
for (DimensionField dimensionField : dimension.getDimensionFields()) {
Field luceneField = new StringField(
DimensionStoreKeyUtils.getColumnKey(dimensionField.getName()),
"",
dimensionField.equals(dimension.getKey()) ? Field.Store.YES : Field.Store.NO
);
// Store the lucene field in the doc and in our lookup map
dimFieldToLuceneField.put(dimensionField, luceneField);
doc.add(luceneField);
}
// Write the rows to the document
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer).setRAMBufferSizeMB(BUFFER_SIZE);
lock.writeLock().lock();
try {
try (IndexWriter luceneIndexWriter = new IndexWriter(luceneDirectory, indexWriterConfig)) {
// Update the document fields for each row and update the document
for (String rowId : changedRows.keySet()) {
// Get the new row from the pair
DimensionRow newDimensionRow = changedRows.get(rowId).getKey();
// Update the index
updateDimensionRow(doc, dimFieldToLuceneField, luceneIndexWriter, newDimensionRow);
}
} catch (IOException e) {
luceneIndexIsHealthy = false;
LOG.error("Failed to refresh index for dimension rows", e);
throw new RuntimeException(e);
// Commit all the changes to the index (on .close, called by try-resources) and refresh the cardinality
}
//This must be outside the try-resources block because it may _also_ need to open an IndexWriter, and
//opening an IndexWriter involves taking a write lock on lucene, of which there can only be one at a time.
reopenIndexSearcher(true);
refreshCardinality();
} finally {
lock.writeLock().unlock();
}
}
#location 23
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
public void refreshIndex(Map<String, Pair<DimensionRow, DimensionRow>> changedRows) {
// Make a single Document instance to hold field data being updated to Lucene
// Creating documents is costly and so Document will be reused for each record being processed due to
// performance best practices.
Document doc = new Document();
Map<DimensionField, Field> dimFieldToLuceneField = new HashMap<>(dimension.getDimensionFields().size());
// Create the document fields for this dimension and add them to the document
for (DimensionField dimensionField : dimension.getDimensionFields()) {
Field luceneField = new StringField(
DimensionStoreKeyUtils.getColumnKey(dimensionField.getName()),
"",
dimensionField.equals(dimension.getKey()) ? Field.Store.YES : Field.Store.NO
);
// Store the lucene field in the doc and in our lookup map
dimFieldToLuceneField.put(dimensionField, luceneField);
doc.add(luceneField);
}
// Write the rows to the document
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer).setRAMBufferSizeMB(BUFFER_SIZE);
writeLock();
try {
try (IndexWriter luceneIndexWriter = new IndexWriter(luceneDirectory, indexWriterConfig)) {
// Update the document fields for each row and update the document
for (String rowId : changedRows.keySet()) {
// Get the new row from the pair
DimensionRow newDimensionRow = changedRows.get(rowId).getKey();
// Update the index
updateDimensionRow(doc, dimFieldToLuceneField, luceneIndexWriter, newDimensionRow);
}
} catch (IOException e) {
luceneIndexIsHealthy = false;
LOG.error("Failed to refresh index for dimension rows", e);
throw new RuntimeException(e);
// Commit all the changes to the index (on .close, called by try-resources) and refresh the cardinality
}
//This must be outside the try-resources block because it may _also_ need to open an IndexWriter, and
//opening an IndexWriter involves taking a write lock on lucene, of which there can only be one at a time.
reopenIndexSearcher(true);
refreshCardinality();
} finally {
writeUnlock();
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public void clearDimension() {
Set<DimensionRow> dimensionRows = findAllDimensionRows();
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer).setRAMBufferSizeMB(BUFFER_SIZE);
lock.writeLock().lock();
try {
try (IndexWriter writer = new IndexWriter(luceneDirectory, indexWriterConfig)) {
//Remove all dimension data from the store.
String rowId = dimension.getKey().getName();
dimensionRows.stream()
.map(DimensionRow::getRowMap)
.map(map -> map.get(rowId))
.map(id -> DimensionStoreKeyUtils.getRowKey(rowId, id))
.forEach(keyValueStore::remove);
//Since Lucene's indices are being dropped, the dimension field stored via the columnKey is becoming
//stale.
keyValueStore.remove(DimensionStoreKeyUtils.getColumnKey(dimension.getKey().getName()));
//The allValues key mapping needs to reflect the fact that we are dropping all dimension data.
keyValueStore.put(DimensionStoreKeyUtils.getAllValuesKey(), "[]");
//We're resetting the keyValueStore, so we don't want any stale last updated date floating around.
keyValueStore.remove(DimensionStoreKeyUtils.getLastUpdatedKey());
//In addition to clearing the keyValueStore, we also need to delete all of Lucene's segment files.
writer.deleteAll();
writer.commit();
} catch (IOException e) {
LOG.error(ErrorMessageFormat.FAIL_TO_WIPTE_LUCENE_INDEX_DIR.format(luceneDirectory));
throw new RuntimeException(e);
}
//This must be outside the try-resources block because it may _also_ need to open an IndexWriter, and
//opening an IndexWriter involves taking a write lock on lucene, of which there can only be one at a time.
reopenIndexSearcher(true);
refreshCardinality();
} finally {
lock.writeLock().unlock();
}
}
#location 18
#vulnerability type THREAD_SAFETY_VIOLATION | #fixed code
@Override
public void clearDimension() {
Set<DimensionRow> dimensionRows = findAllDimensionRows();
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer).setRAMBufferSizeMB(BUFFER_SIZE);
writeLock();
try {
try (IndexWriter writer = new IndexWriter(luceneDirectory, indexWriterConfig)) {
//Remove all dimension data from the store.
String rowId = dimension.getKey().getName();
dimensionRows.stream()
.map(DimensionRow::getRowMap)
.map(map -> map.get(rowId))
.map(id -> DimensionStoreKeyUtils.getRowKey(rowId, id))
.forEach(keyValueStore::remove);
//Since Lucene's indices are being dropped, the dimension field stored via the columnKey is becoming
//stale.
keyValueStore.remove(DimensionStoreKeyUtils.getColumnKey(dimension.getKey().getName()));
//The allValues key mapping needs to reflect the fact that we are dropping all dimension data.
keyValueStore.put(DimensionStoreKeyUtils.getAllValuesKey(), "[]");
//We're resetting the keyValueStore, so we don't want any stale last updated date floating around.
keyValueStore.remove(DimensionStoreKeyUtils.getLastUpdatedKey());
//In addition to clearing the keyValueStore, we also need to delete all of Lucene's segment files.
writer.deleteAll();
writer.commit();
} catch (IOException e) {
LOG.error(ErrorMessageFormat.FAIL_TO_WIPTE_LUCENE_INDEX_DIR.format(luceneDirectory));
throw new RuntimeException(e);
}
//This must be outside the try-resources block because it may _also_ need to open an IndexWriter, and
//opening an IndexWriter involves taking a write lock on lucene, of which there can only be one at a time.
reopenIndexSearcher(true);
refreshCardinality();
} finally {
writeUnlock();
}
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
public LookbackQuery withGranularity(Granularity granularity) {
return withDataSource(new QueryDataSource(getInnerQuery().withGranularity(granularity)));
}
#location 3
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
public LookbackQuery withGranularity(Granularity granularity) {
return withDataSource(new QueryDataSource(getInnerQueryUnchecked().withGranularity(granularity)));
} | Below is the vulnerable code, please generate the patch based on the following information. |
#vulnerable code
@Override
@JsonIgnore
public Collection<Dimension> getDimensions() {
return getInnerQuery().getDimensions();
}
#location 4
#vulnerability type NULL_DEREFERENCE | #fixed code
@Override
@JsonIgnore
public Collection<Dimension> getDimensions() {
return getInnerQueryUnchecked().getDimensions();
} | Below is the vulnerable code, please generate the patch based on the following information. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.