output
stringlengths 64
73.2k
| input
stringlengths 208
73.3k
| instruction
stringclasses 1
value |
---|---|---|
#fixed code
@Test
void resolveFieldOfEnumAsInternalClassOfInterfaceUnqualifiedDifferentPackage() throws IOException {
File src = new File("src/test/resources/internalClassInInterface");
File aClass = new File(src.getPath() + File.separator + "foo" + File.separator + "bar"
+ File.separator + "differentpackage" + File.separator + "AClass2.java");
CombinedTypeSolver localCts = new CombinedTypeSolver();
localCts.add(new ReflectionTypeSolver());
localCts.add(new JavaParserTypeSolver(src));
ParserConfiguration parserConfiguration = new ParserConfiguration().setSymbolResolver(new JavaSymbolSolver(localCts));
JavaParser parser = new JavaParser(parserConfiguration);
StreamProvider classProvider = new StreamProvider(new FileInputStream(aClass), StandardCharsets.UTF_8);
CompilationUnit cu = parser.parse(ParseStart.COMPILATION_UNIT, classProvider).getResult().get();
Optional<FieldAccessExpr> fae = cu.findFirst(FieldAccessExpr.class, n -> n.toString().equals("AnInterface.ListChangeType.ADDITION") && n.getRange().get().begin.line == 6);
assertTrue(fae.isPresent());
assertEquals("foo.bar.AnInterface.ListChangeType", fae.get().resolve().getType().describe());
assertEquals("ADDITION", fae.get().resolve().getName());
} | #vulnerable code
@Test
void resolveFieldOfEnumAsInternalClassOfInterfaceUnqualifiedDifferentPackage() throws IOException {
File src = new File("src/test/resources/internalClassInInterface");
File aClass = new File(src.getPath() + File.separator + "foo" + File.separator + "bar"
+ File.separator + "differentpackage" + File.separator + "AClass2.java");
CombinedTypeSolver localCts = new CombinedTypeSolver();
localCts.add(new ReflectionTypeSolver());
localCts.add(new JavaParserTypeSolver(src));
ParserConfiguration parserConfiguration = new ParserConfiguration().setSymbolResolver(new JavaSymbolSolver(localCts));
JavaParser parser = new JavaParser(parserConfiguration);
StreamProvider classProvider = new StreamProvider(new FileInputStream(aClass));
CompilationUnit cu = parser.parse(ParseStart.COMPILATION_UNIT, classProvider).getResult().get();
Optional<FieldAccessExpr> fae = cu.findFirst(FieldAccessExpr.class, n -> n.toString().equals("AnInterface.ListChangeType.ADDITION") && n.getRange().get().begin.line == 6);
assertTrue(fae.isPresent());
assertEquals("foo.bar.AnInterface.ListChangeType", fae.get().resolve().getType().describe());
assertEquals("ADDITION", fae.get().resolve().getName());
}
#location 15
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void issue506() throws IOException {
ensureAllNodesHaveValidBeginPosition( "SourcesHelperOldVersion.java.txt" );
} | #vulnerable code
@Test
public void issue506() throws IOException {
InputStream is = this.getClass().getResourceAsStream("/com/github/javaparser/SourcesHelperOldVersion.java.txt");
ParseResult<CompilationUnit> res = new JavaParser().parse(ParseStart.COMPILATION_UNIT, new StreamProvider(is));
assertTrue(res.getProblems().isEmpty());
CompilationUnit cu = res.getResult().get();
getAllNodes(cu).forEach(n -> {
if (n.getRange() == null) {
throw new IllegalArgumentException("There should be no node without a range: " + n + " (class: "
+ n.getClass().getCanonicalName() + ")");
}
if (n.getBegin().get().line == 0 && !n.toString().isEmpty() && !(n instanceof ArrayBracketPair)) {
throw new IllegalArgumentException("There should be no node at line 0: " + n + " (class: "
+ n.getClass().getCanonicalName() + ")");
}
});
}
#location 4
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Generated("com.github.javaparser.generator.core.visitor.NoCommentHashCodeVisitorGenerator")
public Integer visit(final ConstructorDeclaration n, final Void arg) {
return (n.getBody().accept(this, arg)) * 31 + (n.getModifiers().hashCode()) * 31 + (n.getName().accept(this, arg)) * 31 + (n.getParameters().accept(this, arg)) * 31 + (n.getThrownExceptions().accept(this, arg)) * 31 + (n.getTypeParameters().accept(this, arg)) * 31 + (n.getAnnotations().accept(this, arg));
} | #vulnerable code
@Generated("com.github.javaparser.generator.core.visitor.NoCommentHashCodeVisitorGenerator")
public Integer visit(final ConstructorDeclaration n, final Void arg) {
return (n.getBody().accept(this, arg)) * 31 + (n.getModifiers().hashCode()) * 31 + (n.getName().accept(this, arg)) * 31 + (n.getParameters().accept(this, arg)) * 31 + (n.getReceiverParameter().isPresent() ? n.getReceiverParameter().get().accept(this, arg) : 0) * 31 + (n.getThrownExceptions().accept(this, arg)) * 31 + (n.getTypeParameters().accept(this, arg)) * 31 + (n.getAnnotations().accept(this, arg));
}
#location 3
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public CommentsCollection parse(final InputStream in, final String encoding) throws IOException, UnsupportedEncodingException {
boolean lastWasASlashR = false;
BufferedReader br = new BufferedReader(new InputStreamReader(in));
CommentsCollection comments = new CommentsCollection();
int r;
Deque prevTwoChars = new LinkedList<Character>(Arrays.asList('z','z'));
State state = State.CODE;
LineComment currentLineComment = null;
BlockComment currentBlockComment = null;
StringBuffer currentContent = null;
int currLine = 1;
int currCol = 1;
while ((r=br.read()) != -1){
char c = (char)r;
if (c=='\r'){
lastWasASlashR = true;
} else if (c=='\n'&&lastWasASlashR){
lastWasASlashR=false;
continue;
} else {
lastWasASlashR=false;
}
switch (state) {
case CODE:
if (prevTwoChars.peekLast().equals('/') && c == '/') {
currentLineComment = new LineComment();
currentLineComment.setBeginLine(currLine);
currentLineComment.setBeginColumn(currCol - 1);
state = State.IN_LINE_COMMENT;
currentContent = new StringBuffer();
} else if (prevTwoChars.peekLast().equals('/') && c == '*') {
currentBlockComment = new BlockComment();
currentBlockComment.setBeginLine(currLine);
currentBlockComment.setBeginColumn(currCol - 1);
state = State.IN_BLOCK_COMMENT;
currentContent = new StringBuffer();
} else if (c == '"') {
state = State.IN_STRING;
} else {
// nothing to do
}
break;
case IN_LINE_COMMENT:
if (c=='\n' || c=='\r'){
currentLineComment.setContent(currentContent.toString());
currentLineComment.setEndLine(currLine);
currentLineComment.setEndColumn(currCol);
comments.addComment(currentLineComment);
state = State.CODE;
} else {
currentContent.append(c);
}
break;
case IN_BLOCK_COMMENT:
if (prevTwoChars.peekLast().equals('*') && c=='/' && !prevTwoChars.peekFirst().equals('/')){
// delete last character
String content = currentContent.deleteCharAt(currentContent.toString().length()-1).toString();
if (content.startsWith("*")){
JavadocComment javadocComment = new JavadocComment();
javadocComment.setContent(content.substring(1));
javadocComment.setBeginLine(currentBlockComment.getBeginLine());
javadocComment.setBeginColumn(currentBlockComment.getBeginColumn());
javadocComment.setEndLine(currLine);
javadocComment.setEndColumn(currCol+1);
comments.addComment(javadocComment);
} else {
currentBlockComment.setContent(content);
currentBlockComment.setEndLine(currLine);
currentBlockComment.setEndColumn(currCol+1);
comments.addComment(currentBlockComment);
}
state = State.CODE;
} else {
currentContent.append(c=='\r'?'\n':c);
}
break;
case IN_STRING:
if (!prevTwoChars.peekLast().equals('\\') && c == '"') {
state = State.CODE;
}
break;
default:
throw new RuntimeException("Unexpected");
}
switch (c){
case '\n':
case '\r':
currLine+=1;
currCol = 1;
break;
case '\t':
currCol+=COLUMNS_PER_TAB;
break;
default:
currCol+=1;
}
prevTwoChars.remove();
prevTwoChars.add(c);
}
if (state==State.IN_LINE_COMMENT){
currentLineComment.setContent(currentContent.toString());
currentLineComment.setEndLine(currLine);
currentLineComment.setEndColumn(currCol);
comments.addComment(currentLineComment);
}
return comments;
} | #vulnerable code
public CommentsCollection parse(final InputStream in, final String encoding) throws IOException, UnsupportedEncodingException {
boolean lastWasASlashR = false;
BufferedReader br = new BufferedReader(new InputStreamReader(in));
CommentsCollection comments = new CommentsCollection();
int r;
Deque prevTwoChars = new LinkedList<Character>(Arrays.asList('z','z'));
State state = State.CODE;
LineComment currentLineComment = null;
BlockComment currentBlockComment = null;
StringBuffer currentContent = null;
int currLine = 1;
int currCol = 1;
while ((r=br.read()) != -1){
char c = (char)r;
if (c=='\r'){
lastWasASlashR = true;
} else if (c=='\n'&&lastWasASlashR){
lastWasASlashR=false;
continue;
} else {
lastWasASlashR=false;
}
switch (state){
case CODE:
if (prevTwoChars.peekLast().equals('/') && c=='/'){
currentLineComment = new LineComment();
currentLineComment.setBeginLine(currLine);
currentLineComment.setBeginColumn(currCol-1);
state = State.IN_LINE_COMMENT;
currentContent = new StringBuffer();
} else if (prevTwoChars.peekLast().equals('/') && c=='*'){
currentBlockComment= new BlockComment();
currentBlockComment.setBeginLine(currLine);
currentBlockComment.setBeginColumn(currCol-1);
state = State.IN_BLOCK_COMMENT;
currentContent = new StringBuffer();
} else {
// nothing to do
}
break;
case IN_LINE_COMMENT:
if (c=='\n' || c=='\r'){
currentLineComment.setContent(currentContent.toString());
currentLineComment.setEndLine(currLine);
currentLineComment.setEndColumn(currCol);
comments.addComment(currentLineComment);
state = State.CODE;
} else {
currentContent.append(c);
}
break;
case IN_BLOCK_COMMENT:
if (prevTwoChars.peekLast().equals('*') && c=='/' && !prevTwoChars.peekFirst().equals('/')){
// delete last character
String content = currentContent.deleteCharAt(currentContent.toString().length()-1).toString();
if (content.startsWith("*")){
JavadocComment javadocComment = new JavadocComment();
javadocComment.setContent(content.substring(1));
javadocComment.setBeginLine(currentBlockComment.getBeginLine());
javadocComment.setBeginColumn(currentBlockComment.getBeginColumn());
javadocComment.setEndLine(currLine);
javadocComment.setEndColumn(currCol+1);
comments.addComment(javadocComment);
} else {
currentBlockComment.setContent(content);
currentBlockComment.setEndLine(currLine);
currentBlockComment.setEndColumn(currCol+1);
comments.addComment(currentBlockComment);
}
state = State.CODE;
} else {
currentContent.append(c=='\r'?'\n':c);
}
break;
default:
throw new RuntimeException("Unexpected");
}
switch (c){
case '\n':
case '\r':
currLine+=1;
currCol = 1;
break;
case '\t':
currCol+=COLUMNS_PER_TAB;
break;
default:
currCol+=1;
}
prevTwoChars.remove();
prevTwoChars.add(c);
}
if (state==State.IN_LINE_COMMENT){
currentLineComment.setContent(currentContent.toString());
currentLineComment.setEndLine(currLine);
currentLineComment.setEndColumn(currCol);
comments.addComment(currentLineComment);
}
return comments;
}
#location 78
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void solve(Path path) throws IOException {
Files.walkFileTree(path, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
if (file.toString().endsWith(".java")) {
if (printFileName) {
out.println("- parsing " + file.toAbsolutePath());
}
CompilationUnit cu = JavaParser.parse(file);
List<Node> nodes = collectAllNodes(cu);
nodes.forEach(n -> solve(n));
}
return FileVisitResult.CONTINUE;
}
});
} | #vulnerable code
public void solve(Path path) throws IOException {
File file = path.toFile();
if (file.isDirectory()) {
for (File f : file.listFiles()) {
solve(f.toPath());
}
} else {
if (file.getName().endsWith(".java")) {
if (printFileName) {
out.println("- parsing " + file.getAbsolutePath());
}
CompilationUnit cu = JavaParser.parse(file);
List<Node> nodes = collectAllNodes(cu);
nodes.forEach(n -> solve(n));
}
}
}
#location 4
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
void resolveFieldOfEnumAsInternalClassOfClassQualifiedSamePackage() throws IOException {
File src = new File("src/test/resources/enumLiteralsInAnnotatedClass");
File aClass = new File(src.getPath() + File.separator + "foo" + File.separator + "bar"
+ File.separator + "AClass.java");
CombinedTypeSolver localCts = new CombinedTypeSolver();
localCts.add(new ReflectionTypeSolver());
localCts.add(new JavaParserTypeSolver(src));
ParserConfiguration parserConfiguration = new ParserConfiguration().setSymbolResolver(new JavaSymbolSolver(localCts));
JavaParser parser = new JavaParser(parserConfiguration);
StreamProvider classProvider = new StreamProvider(new FileInputStream(aClass), StandardCharsets.UTF_8);
CompilationUnit cu = parser.parse(ParseStart.COMPILATION_UNIT, classProvider).getResult().get();
Optional<FieldAccessExpr> fae = cu.findFirst(FieldAccessExpr.class, n -> n.toString().equals("foo.bar.BinaryExpr.Operator.AND") && n.getRange().get().begin.line == 5);
assertTrue(fae.isPresent());
assertEquals("foo.bar.BinaryExpr.Operator", fae.get().resolve().getType().describe());
assertEquals("AND", fae.get().resolve().getName());
} | #vulnerable code
@Test
void resolveFieldOfEnumAsInternalClassOfClassQualifiedSamePackage() throws IOException {
File src = new File("src/test/resources/enumLiteralsInAnnotatedClass");
File aClass = new File(src.getPath() + File.separator + "foo" + File.separator + "bar"
+ File.separator + "AClass.java");
CombinedTypeSolver localCts = new CombinedTypeSolver();
localCts.add(new ReflectionTypeSolver());
localCts.add(new JavaParserTypeSolver(src));
ParserConfiguration parserConfiguration = new ParserConfiguration().setSymbolResolver(new JavaSymbolSolver(localCts));
JavaParser parser = new JavaParser(parserConfiguration);
StreamProvider classProvider = new StreamProvider(new FileInputStream(aClass));
CompilationUnit cu = parser.parse(ParseStart.COMPILATION_UNIT, classProvider).getResult().get();
Optional<FieldAccessExpr> fae = cu.findFirst(FieldAccessExpr.class, n -> n.toString().equals("foo.bar.BinaryExpr.Operator.AND") && n.getRange().get().begin.line == 5);
assertTrue(fae.isPresent());
assertEquals("foo.bar.BinaryExpr.Operator", fae.get().resolve().getType().describe());
assertEquals("AND", fae.get().resolve().getName());
}
#location 15
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void handleAllFieldsMappingSettingAndTheMappingsProvided() {
List<DbTableColumn> columns = Lists.newArrayList(
new DbTableColumn("col1", true, false, 1),
new DbTableColumn("col2", false, false, 1),
new DbTableColumn("col3", false, false, 1));
DbTable table = new DbTable("tableA", columns);
Map<String, FieldAlias> aliasMap = new HashMap<>();
aliasMap.put("f1", new FieldAlias("col3"));
FieldsMappings mappings = new FieldsMappings("tableA", "topic1", true, aliasMap);
FieldsMappings newMappings = PreparedStatementBuilderHelper.validateAndMerge(mappings, table, InsertModeEnum.INSERT);
assertEquals(newMappings.getTableName(), mappings.getTableName());
assertEquals(newMappings.getIncomingTopic(), mappings.getIncomingTopic());
assertEquals(newMappings.areAllFieldsIncluded(), false);
Map<String, FieldAlias> newAliasMap = newMappings.getMappings();
assertEquals(4, newAliasMap.size()); //+ the specific mapping
assertTrue(newAliasMap.containsKey("col1"));
assertEquals(newAliasMap.get("col1").getName(), "col1");
assertEquals(newAliasMap.get("col1").isPrimaryKey(), true);
assertTrue(newAliasMap.containsKey("col2"));
assertEquals(newAliasMap.get("col2").getName(), "col2");
assertEquals(newAliasMap.get("col2").isPrimaryKey(), false);
assertTrue(newAliasMap.containsKey("col3"));
assertEquals(newAliasMap.get("col3").getName(), "col3");
assertEquals(newAliasMap.get("col3").isPrimaryKey(), false);
assertTrue(newAliasMap.containsKey("f1"));
assertEquals(newAliasMap.get("f1").getName(), "col3");
assertEquals(newAliasMap.get("f1").isPrimaryKey(), false);
} | #vulnerable code
@Test
public void handleAllFieldsMappingSettingAndTheMappingsProvided() {
List<DbTableColumn> columns = Lists.newArrayList(
new DbTableColumn("col1", true, false, 1),
new DbTableColumn("col2", false, false, 1),
new DbTableColumn("col3", false, false, 1));
DbTable table = new DbTable("tableA", columns);
Map<String, FieldAlias> aliasMap = new HashMap<>();
aliasMap.put("f1", new FieldAlias("col3"));
FieldsMappings mappings = new FieldsMappings("tableA", "topic1", true, aliasMap);
FieldsMappings newMappings = PreparedStatementBuilderHelper.validateAndMerge(mappings, table);
assertEquals(newMappings.getTableName(), mappings.getTableName());
assertEquals(newMappings.getIncomingTopic(), mappings.getIncomingTopic());
assertEquals(newMappings.areAllFieldsIncluded(), false);
Map<String, FieldAlias> newAliasMap = newMappings.getMappings();
assertEquals(4, newAliasMap.size()); //+ the specific mapping
assertTrue(newAliasMap.containsKey("col1"));
assertEquals(newAliasMap.get("col1").getName(), "col1");
assertEquals(newAliasMap.get("col1").isPrimaryKey(), true);
assertTrue(newAliasMap.containsKey("col2"));
assertEquals(newAliasMap.get("col2").getName(), "col2");
assertEquals(newAliasMap.get("col2").isPrimaryKey(), false);
assertTrue(newAliasMap.containsKey("col3"));
assertEquals(newAliasMap.get("col3").getName(), "col3");
assertEquals(newAliasMap.get("col3").isPrimaryKey(), false);
assertTrue(newAliasMap.containsKey("f1"));
assertEquals(newAliasMap.get("f1").getName(), "col3");
assertEquals(newAliasMap.get("f1").isPrimaryKey(), false);
}
#location 25
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void returnAllFieldsAndApplyMappings() {
Schema schema = SchemaBuilder.struct().name("com.example.Person")
.field("firstName", Schema.STRING_SCHEMA)
.field("lastName", Schema.STRING_SCHEMA)
.field("age", Schema.INT32_SCHEMA)
.field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build();
double threshold = 215.66612;
Struct struct = new Struct(schema)
.put("firstName", "Alex")
.put("lastName", "Smith")
.put("age", 30)
.put("threshold", threshold);
Map<String, FieldAlias> mappings = Maps.newHashMap();
mappings.put("lastName", new FieldAlias("Name"));
mappings.put("age", new FieldAlias("a"));
FieldsMappings tm = new FieldsMappings("table", "topic", true, mappings);
StructFieldsDataExtractor dataExtractor = new StructFieldsDataExtractor(tm);
List<PreparedStatementBinder> binders = dataExtractor.get(struct,
new SinkRecord("", 1, null, null, schema, struct, 0));
HashMap<String, PreparedStatementBinder> map = new HashMap<>();
for (PreparedStatementBinder p : binders)
map.put(p.getFieldName(), p);
assertTrue(!binders.isEmpty());
assertEquals(binders.size(), 4);
assertTrue(map.containsKey("firstName"));
assertTrue(map.get("firstName").getClass() == StringPreparedStatementBinder.class);
assertEquals(((StringPreparedStatementBinder) map.get("firstName")).getValue(), "Alex");
assertTrue(map.containsKey("Name"));
assertTrue(map.get("Name").getClass() == StringPreparedStatementBinder.class);
assertEquals(((StringPreparedStatementBinder) map.get("Name")).getValue(), "Smith");
assertTrue(map.containsKey("a"));
assertTrue(map.get("a").getClass() == IntPreparedStatementBinder.class);
assertEquals(((IntPreparedStatementBinder) map.get("a")).getValue(), 30);
assertTrue(map.containsKey("threshold"));
assertTrue(map.get("threshold").getClass() == DoublePreparedStatementBinder.class);
assertTrue(Double.compare(((DoublePreparedStatementBinder) map.get("threshold")).getValue(), threshold) == 0);
} | #vulnerable code
@Test
public void returnAllFieldsAndApplyMappings() {
Schema schema = SchemaBuilder.struct().name("com.example.Person")
.field("firstName", Schema.STRING_SCHEMA)
.field("lastName", Schema.STRING_SCHEMA)
.field("age", Schema.INT32_SCHEMA)
.field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build();
double threshold = 215.66612;
Struct struct = new Struct(schema)
.put("firstName", "Alex")
.put("lastName", "Smith")
.put("age", 30)
.put("threshold", threshold);
Map<String, FieldAlias> mappings = Maps.newHashMap();
mappings.put("lastName", new FieldAlias("Name"));
mappings.put("age", new FieldAlias("a"));
FieldsMappings tm = new FieldsMappings("table", "topic", true, mappings);
StructFieldsDataExtractor dataExtractor = new StructFieldsDataExtractor(tm);
StructFieldsDataExtractor.PreparedStatementBinders binders = dataExtractor.get(struct,
new SinkRecord("", 1, null, null, schema, struct, 0));
HashMap<String, PreparedStatementBinder> map = new HashMap<>();
for (PreparedStatementBinder p : Iterables.concat(binders.getKeyColumns(), binders.getNonKeyColumns()))
map.put(p.getFieldName(), p);
assertTrue(!binders.isEmpty());
assertEquals(binders.getKeyColumns().size() + binders.getNonKeyColumns().size(), 4);
assertTrue(map.containsKey("firstName"));
assertTrue(map.get("firstName").getClass() == StringPreparedStatementBinder.class);
assertEquals(((StringPreparedStatementBinder) map.get("firstName")).getValue(), "Alex");
assertTrue(map.containsKey("Name"));
assertTrue(map.get("Name").getClass() == StringPreparedStatementBinder.class);
assertEquals(((StringPreparedStatementBinder) map.get("Name")).getValue(), "Smith");
assertTrue(map.containsKey("a"));
assertTrue(map.get("a").getClass() == IntPreparedStatementBinder.class);
assertEquals(((IntPreparedStatementBinder) map.get("a")).getValue(), 30);
assertTrue(map.containsKey("threshold"));
assertTrue(map.get("threshold").getClass() == DoublePreparedStatementBinder.class);
assertTrue(Double.compare(((DoublePreparedStatementBinder) map.get("threshold")).getValue(), threshold) == 0);
}
#location 33
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void shouldReturnThePrimaryKeysAtTheEndWhenMultipleFieldsFormThePrimaryKey() {
Schema schema = SchemaBuilder.struct().name("com.example.Person")
.field("firstName", Schema.STRING_SCHEMA)
.field("lastName", Schema.STRING_SCHEMA)
.field("age", Schema.INT32_SCHEMA)
.field("bool", Schema.BOOLEAN_SCHEMA)
.field("short", Schema.INT16_SCHEMA)
.field("byte", Schema.INT8_SCHEMA)
.field("long", Schema.INT64_SCHEMA)
.field("float", Schema.FLOAT32_SCHEMA)
.field("double", Schema.FLOAT64_SCHEMA)
.field("bytes", Schema.BYTES_SCHEMA)
.field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build();
short s = 1234;
byte b = -32;
long l = 12425436;
float f = (float) 2356.3;
double d = -2436546.56457;
byte[] bs = new byte[]{-32, 124};
Struct struct = new Struct(schema)
.put("firstName", "Alex")
.put("lastName", "Smith")
.put("bool", true)
.put("short", s)
.put("byte", b)
.put("long", l)
.put("float", f)
.put("double", d)
.put("bytes", bs)
.put("age", 30);
Map<String, FieldAlias> mappings = new HashMap<>();
mappings.put("firstName", new FieldAlias("fName", true));
mappings.put("lastName", new FieldAlias("lName", true));
FieldsMappings tm = new FieldsMappings("table", "topic", true, mappings);
StructFieldsDataExtractor dataExtractor = new StructFieldsDataExtractor(tm);
List<PreparedStatementBinder> binders = dataExtractor.get(struct,
new SinkRecord("", 1, null, null, schema, struct, 0));
HashMap<String, PreparedStatementBinder> map = new HashMap<>();
List<PreparedStatementBinder> pkBinders = new LinkedList<>();
for (PreparedStatementBinder p : binders) {
if (p.isPrimaryKey()) {
pkBinders.add(p);
}
map.put(p.getFieldName(), p);
}
assertTrue(!binders.isEmpty());
assertEquals(binders.size(), 10);
assertEquals(pkBinders.size(), 2);
assertTrue(Objects.equals(pkBinders.get(0).getFieldName(), "fName") ||
Objects.equals(pkBinders.get(1).getFieldName(), "fName")
);
assertTrue(Objects.equals(pkBinders.get(0).getFieldName(), "lName") ||
Objects.equals(pkBinders.get(1).getFieldName(), "lName")
);
assertTrue(map.containsKey("fName"));
assertTrue(map.get("fName").getClass() == StringPreparedStatementBinder.class);
assertTrue(map.containsKey("lName"));
assertTrue(map.get("lName").getClass() == StringPreparedStatementBinder.class);
assertTrue(map.containsKey("age"));
assertTrue(map.get("age").getClass() == IntPreparedStatementBinder.class);
assertTrue(map.get("long").getClass() == LongPreparedStatementBinder.class);
assertEquals(((LongPreparedStatementBinder) map.get("long")).getValue(), l);
assertTrue(map.get("short").getClass() == ShortPreparedStatementBinder.class);
assertEquals(((ShortPreparedStatementBinder) map.get("short")).getValue(), s);
assertTrue(map.get("byte").getClass() == BytePreparedStatementBinder.class);
assertEquals(((BytePreparedStatementBinder) map.get("byte")).getValue(), b);
assertTrue(map.get("float").getClass() == FloatPreparedStatementBinder.class);
assertEquals(Float.compare(((FloatPreparedStatementBinder) map.get("float")).getValue(), f), 0);
assertTrue(map.get("double").getClass() == DoublePreparedStatementBinder.class);
assertEquals(Double.compare(((DoublePreparedStatementBinder) map.get("double")).getValue(), d), 0);
assertTrue(map.get("bytes").getClass() == BytesPreparedStatementBinder.class);
assertTrue(Arrays.equals(bs, ((BytesPreparedStatementBinder) map.get("bytes")).getValue()));
} | #vulnerable code
@Test
public void shouldReturnThePrimaryKeysAtTheEndWhenMultipleFieldsFormThePrimaryKey() {
Schema schema = SchemaBuilder.struct().name("com.example.Person")
.field("firstName", Schema.STRING_SCHEMA)
.field("lastName", Schema.STRING_SCHEMA)
.field("age", Schema.INT32_SCHEMA)
.field("bool", Schema.BOOLEAN_SCHEMA)
.field("short", Schema.INT16_SCHEMA)
.field("byte", Schema.INT8_SCHEMA)
.field("long", Schema.INT64_SCHEMA)
.field("float", Schema.FLOAT32_SCHEMA)
.field("double", Schema.FLOAT64_SCHEMA)
.field("bytes", Schema.BYTES_SCHEMA)
.field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build();
short s = 1234;
byte b = -32;
long l = 12425436;
float f = (float) 2356.3;
double d = -2436546.56457;
byte[] bs = new byte[]{-32, 124};
Struct struct = new Struct(schema)
.put("firstName", "Alex")
.put("lastName", "Smith")
.put("bool", true)
.put("short", s)
.put("byte", b)
.put("long", l)
.put("float", f)
.put("double", d)
.put("bytes", bs)
.put("age", 30);
Map<String, FieldAlias> mappings = new HashMap<>();
mappings.put("firstName", new FieldAlias("fName", true));
mappings.put("lastName", new FieldAlias("lName", true));
FieldsMappings tm = new FieldsMappings("table", "topic", true, mappings);
StructFieldsDataExtractor dataExtractor = new StructFieldsDataExtractor(tm);
StructFieldsDataExtractor.PreparedStatementBinders binders = dataExtractor.get(struct,
new SinkRecord("", 1, null, null, schema, struct, 0));
HashMap<String, PreparedStatementBinder> map = new HashMap<>();
for (PreparedStatementBinder p : Iterables.concat(binders.getNonKeyColumns(), binders.getKeyColumns()))
map.put(p.getFieldName(), p);
assertTrue(!binders.isEmpty());
assertEquals(binders.getNonKeyColumns().size() + binders.getKeyColumns().size(), 10);
List<PreparedStatementBinder> pkBinders = binders.getKeyColumns();
assertEquals(pkBinders.size(), 2);
assertTrue(Objects.equals(pkBinders.get(0).getFieldName(), "fName") ||
Objects.equals(pkBinders.get(1).getFieldName(), "fName")
);
assertTrue(Objects.equals(pkBinders.get(0).getFieldName(), "lName") ||
Objects.equals(pkBinders.get(1).getFieldName(), "lName")
);
assertTrue(map.containsKey("fName"));
assertTrue(map.get("fName").getClass() == StringPreparedStatementBinder.class);
assertTrue(map.containsKey("lName"));
assertTrue(map.get("lName").getClass() == StringPreparedStatementBinder.class);
assertTrue(map.containsKey("age"));
assertTrue(map.get("age").getClass() == IntPreparedStatementBinder.class);
assertTrue(map.get("long").getClass() == LongPreparedStatementBinder.class);
assertEquals(((LongPreparedStatementBinder) map.get("long")).getValue(), l);
assertTrue(map.get("short").getClass() == ShortPreparedStatementBinder.class);
assertEquals(((ShortPreparedStatementBinder) map.get("short")).getValue(), s);
assertTrue(map.get("byte").getClass() == BytePreparedStatementBinder.class);
assertEquals(((BytePreparedStatementBinder) map.get("byte")).getValue(), b);
assertTrue(map.get("float").getClass() == FloatPreparedStatementBinder.class);
assertEquals(Float.compare(((FloatPreparedStatementBinder) map.get("float")).getValue(), f), 0);
assertTrue(map.get("double").getClass() == DoublePreparedStatementBinder.class);
assertEquals(Double.compare(((DoublePreparedStatementBinder) map.get("double")).getValue(), d), 0);
assertTrue(map.get("bytes").getClass() == BytesPreparedStatementBinder.class);
assertTrue(Arrays.equals(bs, ((BytesPreparedStatementBinder) map.get("bytes")).getValue()));
}
#location 49
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void returnAllFieldsAndTheirBytesValue() {
Schema schema = SchemaBuilder.struct().name("com.example.Person")
.field("firstName", Schema.STRING_SCHEMA)
.field("lastName", Schema.STRING_SCHEMA)
.field("age", Schema.INT32_SCHEMA)
.field("bool", Schema.BOOLEAN_SCHEMA)
.field("short", Schema.INT16_SCHEMA)
.field("byte", Schema.INT8_SCHEMA)
.field("long", Schema.INT64_SCHEMA)
.field("float", Schema.FLOAT32_SCHEMA)
.field("double", Schema.FLOAT64_SCHEMA)
.field("bytes", Schema.BYTES_SCHEMA)
.field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build();
short s = 1234;
byte b = -32;
long l = 12425436;
float f = (float) 2356.3;
double d = -2436546.56457;
byte[] bs = new byte[]{-32, 124};
Struct struct = new Struct(schema)
.put("firstName", "Alex")
.put("lastName", "Smith")
.put("bool", true)
.put("short", s)
.put("byte", b)
.put("long", l)
.put("float", f)
.put("double", d)
.put("bytes", bs)
.put("age", 30);
FieldsMappings tm = new FieldsMappings("table", "topic", true, new HashMap<String, FieldAlias>());
StructFieldsDataExtractor dataExtractor = new StructFieldsDataExtractor(tm);
List<PreparedStatementBinder> binders = dataExtractor.get(struct,
new SinkRecord("", 1, null, null, schema, struct, 0));
HashMap<String, PreparedStatementBinder> map = new HashMap<>();
for (PreparedStatementBinder p : binders)
map.put(p.getFieldName(), p);
assertTrue(!binders.isEmpty());
assertEquals(binders.size(), 10);
assertTrue(map.containsKey("firstName"));
assertTrue(map.get("firstName").getClass() == StringPreparedStatementBinder.class);
assertTrue(map.containsKey("lastName"));
assertTrue(map.get("lastName").getClass() == StringPreparedStatementBinder.class);
assertTrue(map.containsKey("age"));
assertTrue(map.get("age").getClass() == IntPreparedStatementBinder.class);
assertTrue(map.get("long").getClass() == LongPreparedStatementBinder.class);
assertEquals(((LongPreparedStatementBinder) map.get("long")).getValue(), l);
assertTrue(map.get("short").getClass() == ShortPreparedStatementBinder.class);
assertEquals(((ShortPreparedStatementBinder) map.get("short")).getValue(), s);
assertTrue(map.get("byte").getClass() == BytePreparedStatementBinder.class);
assertEquals(((BytePreparedStatementBinder) map.get("byte")).getValue(), b);
assertTrue(map.get("float").getClass() == FloatPreparedStatementBinder.class);
assertEquals(Float.compare(((FloatPreparedStatementBinder) map.get("float")).getValue(), f), 0);
assertTrue(map.get("double").getClass() == DoublePreparedStatementBinder.class);
assertEquals(Double.compare(((DoublePreparedStatementBinder) map.get("double")).getValue(), d), 0);
assertTrue(map.get("bytes").getClass() == BytesPreparedStatementBinder.class);
assertTrue(Arrays.equals(bs, ((BytesPreparedStatementBinder) map.get("bytes")).getValue()));
} | #vulnerable code
@Test
public void returnAllFieldsAndTheirBytesValue() {
Schema schema = SchemaBuilder.struct().name("com.example.Person")
.field("firstName", Schema.STRING_SCHEMA)
.field("lastName", Schema.STRING_SCHEMA)
.field("age", Schema.INT32_SCHEMA)
.field("bool", Schema.BOOLEAN_SCHEMA)
.field("short", Schema.INT16_SCHEMA)
.field("byte", Schema.INT8_SCHEMA)
.field("long", Schema.INT64_SCHEMA)
.field("float", Schema.FLOAT32_SCHEMA)
.field("double", Schema.FLOAT64_SCHEMA)
.field("bytes", Schema.BYTES_SCHEMA)
.field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build();
short s = 1234;
byte b = -32;
long l = 12425436;
float f = (float) 2356.3;
double d = -2436546.56457;
byte[] bs = new byte[]{-32, 124};
Struct struct = new Struct(schema)
.put("firstName", "Alex")
.put("lastName", "Smith")
.put("bool", true)
.put("short", s)
.put("byte", b)
.put("long", l)
.put("float", f)
.put("double", d)
.put("bytes", bs)
.put("age", 30);
FieldsMappings tm = new FieldsMappings("table", "topic", true, new HashMap<String, FieldAlias>());
StructFieldsDataExtractor dataExtractor = new StructFieldsDataExtractor(tm);
StructFieldsDataExtractor.PreparedStatementBinders binders = dataExtractor.get(struct,
new SinkRecord("", 1, null, null, schema, struct, 0));
HashMap<String, PreparedStatementBinder> map = new HashMap<>();
for (PreparedStatementBinder p : Iterables.concat(binders.getNonKeyColumns(), binders.getKeyColumns()))
map.put(p.getFieldName(), p);
assertTrue(!binders.isEmpty());
assertEquals(binders.getKeyColumns().size() + binders.getNonKeyColumns().size(), 10);
assertTrue(map.containsKey("firstName"));
assertTrue(map.get("firstName").getClass() == StringPreparedStatementBinder.class);
assertTrue(map.containsKey("lastName"));
assertTrue(map.get("lastName").getClass() == StringPreparedStatementBinder.class);
assertTrue(map.containsKey("age"));
assertTrue(map.get("age").getClass() == IntPreparedStatementBinder.class);
assertTrue(map.get("long").getClass() == LongPreparedStatementBinder.class);
assertEquals(((LongPreparedStatementBinder) map.get("long")).getValue(), l);
assertTrue(map.get("short").getClass() == ShortPreparedStatementBinder.class);
assertEquals(((ShortPreparedStatementBinder) map.get("short")).getValue(), s);
assertTrue(map.get("byte").getClass() == BytePreparedStatementBinder.class);
assertEquals(((BytePreparedStatementBinder) map.get("byte")).getValue(), b);
assertTrue(map.get("float").getClass() == FloatPreparedStatementBinder.class);
assertEquals(Float.compare(((FloatPreparedStatementBinder) map.get("float")).getValue(), f), 0);
assertTrue(map.get("double").getClass() == DoublePreparedStatementBinder.class);
assertEquals(Double.compare(((DoublePreparedStatementBinder) map.get("double")).getValue(), d), 0);
assertTrue(map.get("bytes").getClass() == BytesPreparedStatementBinder.class);
assertTrue(Arrays.equals(bs, ((BytesPreparedStatementBinder) map.get("bytes")).getValue()));
}
#location 45
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void handleAllFieldsMappingSettingAndTheMappingsProvided() {
List<DbTableColumn> columns = Lists.newArrayList(
new DbTableColumn("col1", true, false, 1),
new DbTableColumn("col2", false, false, 1),
new DbTableColumn("col3", false, false, 1));
DbTable table = new DbTable("tableA", columns);
Map<String, FieldAlias> aliasMap = new HashMap<>();
aliasMap.put("f1", new FieldAlias("col3"));
FieldsMappings mappings = new FieldsMappings("tableA", "topic1", true, aliasMap);
FieldsMappings newMappings = PreparedStatementBuilderHelper.validateAndMerge(mappings, table, InsertModeEnum.INSERT);
assertEquals(newMappings.getTableName(), mappings.getTableName());
assertEquals(newMappings.getIncomingTopic(), mappings.getIncomingTopic());
assertEquals(newMappings.areAllFieldsIncluded(), false);
Map<String, FieldAlias> newAliasMap = newMappings.getMappings();
assertEquals(4, newAliasMap.size()); //+ the specific mapping
assertTrue(newAliasMap.containsKey("col1"));
assertEquals(newAliasMap.get("col1").getName(), "col1");
assertEquals(newAliasMap.get("col1").isPrimaryKey(), true);
assertTrue(newAliasMap.containsKey("col2"));
assertEquals(newAliasMap.get("col2").getName(), "col2");
assertEquals(newAliasMap.get("col2").isPrimaryKey(), false);
assertTrue(newAliasMap.containsKey("col3"));
assertEquals(newAliasMap.get("col3").getName(), "col3");
assertEquals(newAliasMap.get("col3").isPrimaryKey(), false);
assertTrue(newAliasMap.containsKey("f1"));
assertEquals(newAliasMap.get("f1").getName(), "col3");
assertEquals(newAliasMap.get("f1").isPrimaryKey(), false);
} | #vulnerable code
@Test
public void handleAllFieldsMappingSettingAndTheMappingsProvided() {
List<DbTableColumn> columns = Lists.newArrayList(
new DbTableColumn("col1", true, false, 1),
new DbTableColumn("col2", false, false, 1),
new DbTableColumn("col3", false, false, 1));
DbTable table = new DbTable("tableA", columns);
Map<String, FieldAlias> aliasMap = new HashMap<>();
aliasMap.put("f1", new FieldAlias("col3"));
FieldsMappings mappings = new FieldsMappings("tableA", "topic1", true, aliasMap);
FieldsMappings newMappings = PreparedStatementBuilderHelper.validateAndMerge(mappings, table);
assertEquals(newMappings.getTableName(), mappings.getTableName());
assertEquals(newMappings.getIncomingTopic(), mappings.getIncomingTopic());
assertEquals(newMappings.areAllFieldsIncluded(), false);
Map<String, FieldAlias> newAliasMap = newMappings.getMappings();
assertEquals(4, newAliasMap.size()); //+ the specific mapping
assertTrue(newAliasMap.containsKey("col1"));
assertEquals(newAliasMap.get("col1").getName(), "col1");
assertEquals(newAliasMap.get("col1").isPrimaryKey(), true);
assertTrue(newAliasMap.containsKey("col2"));
assertEquals(newAliasMap.get("col2").getName(), "col2");
assertEquals(newAliasMap.get("col2").isPrimaryKey(), false);
assertTrue(newAliasMap.containsKey("col3"));
assertEquals(newAliasMap.get("col3").getName(), "col3");
assertEquals(newAliasMap.get("col3").isPrimaryKey(), false);
assertTrue(newAliasMap.containsKey("f1"));
assertEquals(newAliasMap.get("f1").getName(), "col3");
assertEquals(newAliasMap.get("f1").isPrimaryKey(), false);
}
#location 21
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void returnAllFieldsAndApplyMappings() {
Schema schema = SchemaBuilder.struct().name("com.example.Person")
.field("firstName", Schema.STRING_SCHEMA)
.field("lastName", Schema.STRING_SCHEMA)
.field("age", Schema.INT32_SCHEMA)
.field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build();
double threshold = 215.66612;
Struct struct = new Struct(schema)
.put("firstName", "Alex")
.put("lastName", "Smith")
.put("age", 30)
.put("threshold", threshold);
Map<String, FieldAlias> mappings = Maps.newHashMap();
mappings.put("lastName", new FieldAlias("Name"));
mappings.put("age", new FieldAlias("a"));
FieldsMappings tm = new FieldsMappings("table", "topic", true, mappings);
StructFieldsDataExtractor dataExtractor = new StructFieldsDataExtractor(tm);
List<PreparedStatementBinder> binders = dataExtractor.get(struct,
new SinkRecord("", 1, null, null, schema, struct, 0));
HashMap<String, PreparedStatementBinder> map = new HashMap<>();
for (PreparedStatementBinder p : binders)
map.put(p.getFieldName(), p);
assertTrue(!binders.isEmpty());
assertEquals(binders.size(), 4);
assertTrue(map.containsKey("firstName"));
assertTrue(map.get("firstName").getClass() == StringPreparedStatementBinder.class);
assertEquals(((StringPreparedStatementBinder) map.get("firstName")).getValue(), "Alex");
assertTrue(map.containsKey("Name"));
assertTrue(map.get("Name").getClass() == StringPreparedStatementBinder.class);
assertEquals(((StringPreparedStatementBinder) map.get("Name")).getValue(), "Smith");
assertTrue(map.containsKey("a"));
assertTrue(map.get("a").getClass() == IntPreparedStatementBinder.class);
assertEquals(((IntPreparedStatementBinder) map.get("a")).getValue(), 30);
assertTrue(map.containsKey("threshold"));
assertTrue(map.get("threshold").getClass() == DoublePreparedStatementBinder.class);
assertTrue(Double.compare(((DoublePreparedStatementBinder) map.get("threshold")).getValue(), threshold) == 0);
} | #vulnerable code
@Test
public void returnAllFieldsAndApplyMappings() {
Schema schema = SchemaBuilder.struct().name("com.example.Person")
.field("firstName", Schema.STRING_SCHEMA)
.field("lastName", Schema.STRING_SCHEMA)
.field("age", Schema.INT32_SCHEMA)
.field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build();
double threshold = 215.66612;
Struct struct = new Struct(schema)
.put("firstName", "Alex")
.put("lastName", "Smith")
.put("age", 30)
.put("threshold", threshold);
Map<String, FieldAlias> mappings = Maps.newHashMap();
mappings.put("lastName", new FieldAlias("Name"));
mappings.put("age", new FieldAlias("a"));
FieldsMappings tm = new FieldsMappings("table", "topic", true, mappings);
StructFieldsDataExtractor dataExtractor = new StructFieldsDataExtractor(tm);
StructFieldsDataExtractor.PreparedStatementBinders binders = dataExtractor.get(struct,
new SinkRecord("", 1, null, null, schema, struct, 0));
HashMap<String, PreparedStatementBinder> map = new HashMap<>();
for (PreparedStatementBinder p : Iterables.concat(binders.getKeyColumns(), binders.getNonKeyColumns()))
map.put(p.getFieldName(), p);
assertTrue(!binders.isEmpty());
assertEquals(binders.getKeyColumns().size() + binders.getNonKeyColumns().size(), 4);
assertTrue(map.containsKey("firstName"));
assertTrue(map.get("firstName").getClass() == StringPreparedStatementBinder.class);
assertEquals(((StringPreparedStatementBinder) map.get("firstName")).getValue(), "Alex");
assertTrue(map.containsKey("Name"));
assertTrue(map.get("Name").getClass() == StringPreparedStatementBinder.class);
assertEquals(((StringPreparedStatementBinder) map.get("Name")).getValue(), "Smith");
assertTrue(map.containsKey("a"));
assertTrue(map.get("a").getClass() == IntPreparedStatementBinder.class);
assertEquals(((IntPreparedStatementBinder) map.get("a")).getValue(), 30);
assertTrue(map.containsKey("threshold"));
assertTrue(map.get("threshold").getClass() == DoublePreparedStatementBinder.class);
assertTrue(Double.compare(((DoublePreparedStatementBinder) map.get("threshold")).getValue(), threshold) == 0);
}
#location 33
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void handleAllFieldsMappingSetting() {
List<DbTableColumn> columns = Lists.newArrayList(
new DbTableColumn("col1", true, false, 1),
new DbTableColumn("col2", false, false, 1),
new DbTableColumn("col3", false, false, 1));
DbTable table = new DbTable("tableA", columns);
Map<String, FieldAlias> aliasMap = new HashMap<>();
FieldsMappings mappings = new FieldsMappings("tableA", "topic1", true, aliasMap);
FieldsMappings newMappings = PreparedStatementBuilderHelper.validateAndMerge(mappings, table, InsertModeEnum.INSERT);
assertEquals(newMappings.getTableName(), mappings.getTableName());
assertEquals(newMappings.getIncomingTopic(), mappings.getIncomingTopic());
assertEquals(newMappings.areAllFieldsIncluded(), false);
Map<String, FieldAlias> newAliasMap = newMappings.getMappings();
assertEquals(3, newAliasMap.size());
assertTrue(newAliasMap.containsKey("col1"));
assertEquals(newAliasMap.get("col1").getName(), "col1");
assertEquals(newAliasMap.get("col1").isPrimaryKey(), true);
assertTrue(newAliasMap.containsKey("col2"));
assertEquals(newAliasMap.get("col2").getName(), "col2");
assertEquals(newAliasMap.get("col2").isPrimaryKey(), false);
assertTrue(newAliasMap.containsKey("col3"));
assertEquals(newAliasMap.get("col3").getName(), "col3");
assertEquals(newAliasMap.get("col3").isPrimaryKey(), false);
} | #vulnerable code
@Test
public void handleAllFieldsMappingSetting() {
List<DbTableColumn> columns = Lists.newArrayList(
new DbTableColumn("col1", true, false, 1),
new DbTableColumn("col2", false, false, 1),
new DbTableColumn("col3", false, false, 1));
DbTable table = new DbTable("tableA", columns);
Map<String, FieldAlias> aliasMap = new HashMap<>();
FieldsMappings mappings = new FieldsMappings("tableA", "topic1", true, aliasMap);
FieldsMappings newMappings = PreparedStatementBuilderHelper.validateAndMerge(mappings, table);
assertEquals(newMappings.getTableName(), mappings.getTableName());
assertEquals(newMappings.getIncomingTopic(), mappings.getIncomingTopic());
assertEquals(newMappings.areAllFieldsIncluded(), false);
Map<String, FieldAlias> newAliasMap = newMappings.getMappings();
assertEquals(3, newAliasMap.size());
assertTrue(newAliasMap.containsKey("col1"));
assertEquals(newAliasMap.get("col1").getName(), "col1");
assertEquals(newAliasMap.get("col1").isPrimaryKey(), true);
assertTrue(newAliasMap.containsKey("col2"));
assertEquals(newAliasMap.get("col2").getName(), "col2");
assertEquals(newAliasMap.get("col2").isPrimaryKey(), false);
assertTrue(newAliasMap.containsKey("col3"));
assertEquals(newAliasMap.get("col3").getName(), "col3");
assertEquals(newAliasMap.get("col3").isPrimaryKey(), false);
}
#location 29
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test(expected = ConfigException.class)
public void throwAnExceptionWhenForANewTableToCreateWhichDoesNotAllowAutoCreation() throws SQLException {
Database changesExecutor = new Database(new HashSet<String>(),
new HashSet<String>(),
new DatabaseMetadata(null, new ArrayList<DbTable>()),
DbDialect.fromConnectionString(SQL_LITE_URI),
2);
String tableName = "tableA";
Map<String, Collection<SinkRecordField>> map = new HashMap<>();
map.put(tableName, Lists.newArrayList(
new SinkRecordField(Schema.Type.INT32, "col1", true),
new SinkRecordField(Schema.Type.STRING, "col2", false),
new SinkRecordField(Schema.Type.INT8, "col3", false),
new SinkRecordField(Schema.Type.INT64, "col3", false),
new SinkRecordField(Schema.Type.FLOAT64, "col4", false)
));
try (Connection connection = connectionProvider.getConnection()) {
changesExecutor.update(map, connection);
}
} | #vulnerable code
@Test(expected = ConfigException.class)
public void throwAnExceptionWhenForANewTableToCreateWhichDoesNotAllowAutoCreation() throws SQLException {
Database changesExecutor = new Database(new HashSet<String>(),
new HashSet<String>(),
new DatabaseMetadata(null, new ArrayList<DbTable>()),
DbDialect.fromConnectionString(SQL_LITE_URI),
2);
String tableName = "tableA";
Map<String, Collection<SinkRecordField>> map = new HashMap<>();
map.put(tableName, Lists.newArrayList(
new SinkRecordField(Schema.Type.INT32, "col1", true),
new SinkRecordField(Schema.Type.STRING, "col2", false),
new SinkRecordField(Schema.Type.INT8, "col3", false),
new SinkRecordField(Schema.Type.INT64, "col3", false),
new SinkRecordField(Schema.Type.FLOAT64, "col4", false)
));
Connection connection = null;
try {
connection = connectionProvider.getConnection();
changesExecutor.update(map, connection);
} finally {
AutoCloseableHelper.close(connection);
}
}
#location 24
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void handleAllFieldsIncludedAndAnExistingMapping() {
List<DbTableColumn> columns = Lists.newArrayList(
new DbTableColumn("col1", true, false, 1),
new DbTableColumn("col2", false, false, 1),
new DbTableColumn("col3", false, false, 1));
DbTable table = new DbTable("tableA", columns);
Map<String, FieldAlias> aliasMap = new HashMap<>();
aliasMap.put("col3", new FieldAlias("col3", true));
FieldsMappings mappings = new FieldsMappings("tableA", "topic1", true, aliasMap);
FieldsMappings newMappings = PreparedStatementBuilderHelper.validateAndMerge(mappings, table, InsertModeEnum.INSERT);
assertEquals(newMappings.getTableName(), mappings.getTableName());
assertEquals(newMappings.getIncomingTopic(), mappings.getIncomingTopic());
assertEquals(newMappings.areAllFieldsIncluded(), false);
Map<String, FieldAlias> newAliasMap = newMappings.getMappings();
assertEquals(3, newAliasMap.size()); //+ the specific mapping
assertTrue(newAliasMap.containsKey("col1"));
assertEquals(newAliasMap.get("col1").getName(), "col1");
assertEquals(newAliasMap.get("col1").isPrimaryKey(), true);
assertTrue(newAliasMap.containsKey("col2"));
assertEquals(newAliasMap.get("col2").getName(), "col2");
assertEquals(newAliasMap.get("col2").isPrimaryKey(), false);
assertTrue(newAliasMap.containsKey("col3"));
assertEquals(newAliasMap.get("col3").getName(), "col3");
assertEquals(newAliasMap.get("col3").isPrimaryKey(), false);
} | #vulnerable code
@Test
public void handleAllFieldsIncludedAndAnExistingMapping() {
List<DbTableColumn> columns = Lists.newArrayList(
new DbTableColumn("col1", true, false, 1),
new DbTableColumn("col2", false, false, 1),
new DbTableColumn("col3", false, false, 1));
DbTable table = new DbTable("tableA", columns);
Map<String, FieldAlias> aliasMap = new HashMap<>();
aliasMap.put("col3", new FieldAlias("col3", true));
FieldsMappings mappings = new FieldsMappings("tableA", "topic1", true, aliasMap);
FieldsMappings newMappings = PreparedStatementBuilderHelper.validateAndMerge(mappings, table);
assertEquals(newMappings.getTableName(), mappings.getTableName());
assertEquals(newMappings.getIncomingTopic(), mappings.getIncomingTopic());
assertEquals(newMappings.areAllFieldsIncluded(), false);
Map<String, FieldAlias> newAliasMap = newMappings.getMappings();
assertEquals(3, newAliasMap.size()); //+ the specific mapping
assertTrue(newAliasMap.containsKey("col1"));
assertEquals(newAliasMap.get("col1").getName(), "col1");
assertEquals(newAliasMap.get("col1").isPrimaryKey(), true);
assertTrue(newAliasMap.containsKey("col2"));
assertEquals(newAliasMap.get("col2").getName(), "col2");
assertEquals(newAliasMap.get("col2").isPrimaryKey(), false);
assertTrue(newAliasMap.containsKey("col3"));
assertEquals(newAliasMap.get("col3").getName(), "col3");
assertEquals(newAliasMap.get("col3").isPrimaryKey(), false);
}
#location 21
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void handleBatchedStatementPerRecordInsertingSameRecord100Times() throws SQLException {
String tableName = "batched_statement_test_100";
String createTable = "CREATE TABLE " + tableName + " (" +
" firstName TEXT," +
" lastName TEXT," +
" age INTEGER," +
" bool NUMERIC," +
" byte INTEGER," +
" short INTEGER," +
" long INTEGER," +
" float NUMERIC," +
" double NUMERIC," +
" bytes BLOB " +
");";
SqlLiteHelper.deleteTable(SQL_LITE_URI, tableName);
SqlLiteHelper.createTable(SQL_LITE_URI, createTable);
Schema schema = SchemaBuilder.struct().name("com.example.Person")
.field("firstName", Schema.OPTIONAL_STRING_SCHEMA)
.field("lastName", Schema.OPTIONAL_STRING_SCHEMA)
.field("age", Schema.OPTIONAL_INT32_SCHEMA)
.field("bool", Schema.OPTIONAL_BOOLEAN_SCHEMA)
.field("short", Schema.OPTIONAL_INT16_SCHEMA)
.field("byte", Schema.OPTIONAL_INT8_SCHEMA)
.field("long", Schema.OPTIONAL_INT64_SCHEMA)
.field("float", Schema.OPTIONAL_FLOAT32_SCHEMA)
.field("double", Schema.OPTIONAL_FLOAT64_SCHEMA)
.field("bytes", Schema.OPTIONAL_BYTES_SCHEMA);
final String fName1 = "Alex";
final String lName1 = "Smith";
final int age1 = 21;
final boolean bool1 = true;
final short s1 = 1234;
final byte b1 = -32;
final long l1 = 12425436;
final float f1 = (float) 2356.3;
final double d1 = -2436546.56457;
final byte[] bs1 = new byte[]{-32, 124};
Struct struct1 = new Struct(schema)
.put("firstName", fName1)
.put("lastName", lName1)
.put("bool", bool1)
.put("short", s1)
.put("byte", b1)
.put("long", l1)
.put("float", f1)
.put("double", d1)
.put("bytes", bs1)
.put("age", age1);
String topic = "topic";
int partition = 2;
Collection<SinkRecord> records = Collections.nCopies(
100,
new SinkRecord(topic, partition, null, null, schema, struct1, 1));
Map<String, StructFieldsDataExtractor> map = new HashMap<>();
map.put(topic.toLowerCase(),
new StructFieldsDataExtractor(new FieldsMappings(tableName, topic, true, new HashMap<String, FieldAlias>())));
List<DbTable> dbTables = Lists.newArrayList(
new DbTable(tableName, Lists.<DbTableColumn>newArrayList(
new DbTableColumn("firstName", true, false, 1),
new DbTableColumn("lastName", true, false, 1),
new DbTableColumn("age", false, false, 1),
new DbTableColumn("bool", true, false, 1),
new DbTableColumn("byte", true, false, 1),
new DbTableColumn("short", true, false, 1),
new DbTableColumn("long", true, false, 1),
new DbTableColumn("float", true, false, 1),
new DbTableColumn("double", true, false, 1),
new DbTableColumn("BLOB", true, false, 1)
)));
DatabaseMetadata dbMetadata = new DatabaseMetadata(null, dbTables);
HikariDataSource ds = HikariHelper.from(SQL_LITE_URI, null, null);
DatabaseChangesExecutor executor = new DatabaseChangesExecutor(
ds,
Sets.<String>newHashSet(),
Sets.<String>newHashSet(),
dbMetadata,
new SQLiteDialect(),
1);
JdbcDbWriter writer = new JdbcDbWriter(ds,
new BatchedPreparedStatementBuilder(map, new InsertQueryBuilder(new SQLiteDialect())),
new ThrowErrorHandlingPolicy(),
executor,
10);
writer.write(records);
String query = "SELECT * FROM " + tableName + " ORDER BY firstName";
SqlLiteHelper.ResultSetReadCallback callback = new SqlLiteHelper.ResultSetReadCallback() {
int index = 0;
@Override
public void read(ResultSet rs) throws SQLException {
if (index < 100) {
assertEquals(rs.getString("firstName"), fName1);
assertEquals(rs.getString("lastName"), lName1);
assertEquals(rs.getBoolean("bool"), bool1);
assertEquals(rs.getShort("short"), s1);
assertEquals(rs.getByte("byte"), b1);
assertEquals(rs.getLong("long"), l1);
assertEquals(Float.compare(rs.getFloat("float"), f1), 0);
assertEquals(Double.compare(rs.getDouble("double"), d1), 0);
assertTrue(Arrays.equals(rs.getBytes("bytes"), bs1));
assertEquals(rs.getInt("age"), age1);
} else throw new RuntimeException(String.format("%d is too high", index));
index++;
}
};
SqlLiteHelper.select(SQL_LITE_URI, query, callback);
} | #vulnerable code
@Test
public void handleBatchedStatementPerRecordInsertingSameRecord100Times() throws SQLException {
String tableName = "batched_statement_test_100";
String createTable = "CREATE TABLE " + tableName + " (" +
" firstName TEXT," +
" lastName TEXT," +
" age INTEGER," +
" bool NUMERIC," +
" byte INTEGER," +
" short INTEGER," +
" long INTEGER," +
" float NUMERIC," +
" double NUMERIC," +
" bytes BLOB " +
");";
SqlLiteHelper.deleteTable(SQL_LITE_URI, tableName);
SqlLiteHelper.createTable(SQL_LITE_URI, createTable);
Schema schema = SchemaBuilder.struct().name("com.example.Person")
.field("firstName", Schema.OPTIONAL_STRING_SCHEMA)
.field("lastName", Schema.OPTIONAL_STRING_SCHEMA)
.field("age", Schema.OPTIONAL_INT32_SCHEMA)
.field("bool", Schema.OPTIONAL_BOOLEAN_SCHEMA)
.field("short", Schema.OPTIONAL_INT16_SCHEMA)
.field("byte", Schema.OPTIONAL_INT8_SCHEMA)
.field("long", Schema.OPTIONAL_INT64_SCHEMA)
.field("float", Schema.OPTIONAL_FLOAT32_SCHEMA)
.field("double", Schema.OPTIONAL_FLOAT64_SCHEMA)
.field("bytes", Schema.OPTIONAL_BYTES_SCHEMA);
final String fName1 = "Alex";
final String lName1 = "Smith";
final int age1 = 21;
final boolean bool1 = true;
final short s1 = 1234;
final byte b1 = -32;
final long l1 = 12425436;
final float f1 = (float) 2356.3;
final double d1 = -2436546.56457;
final byte[] bs1 = new byte[]{-32, 124};
Struct struct1 = new Struct(schema)
.put("firstName", fName1)
.put("lastName", lName1)
.put("bool", bool1)
.put("short", s1)
.put("byte", b1)
.put("long", l1)
.put("float", f1)
.put("double", d1)
.put("bytes", bs1)
.put("age", age1);
String topic = "topic";
int partition = 2;
Collection<SinkRecord> records = Collections.nCopies(
100,
new SinkRecord(topic, partition, null, null, schema, struct1, 1));
Map<String, StructFieldsDataExtractor> map = new HashMap<>();
map.put(topic.toLowerCase(),
new StructFieldsDataExtractor(new FieldsMappings(tableName, topic, true, new HashMap<String, FieldAlias>())));
List<DbTable> dbTables = Lists.newArrayList(
new DbTable(tableName, Lists.<DbTableColumn>newArrayList(
new DbTableColumn("firstName", true, false, 1),
new DbTableColumn("lastName", true, false, 1),
new DbTableColumn("age", false, false, 1),
new DbTableColumn("bool", true, false, 1),
new DbTableColumn("byte", true, false, 1),
new DbTableColumn("short", true, false, 1),
new DbTableColumn("long", true, false, 1),
new DbTableColumn("float", true, false, 1),
new DbTableColumn("double", true, false, 1),
new DbTableColumn("BLOB", true, false, 1)
)));
DatabaseMetadata dbMetadata = new DatabaseMetadata(null, dbTables);
HikariDataSource ds = HikariHelper.from(SQL_LITE_URI, null, null);
DatabaseChangesExecutor executor = new DatabaseChangesExecutor(
ds,
Sets.<String>newHashSet(),
Sets.<String>newHashSet(),
dbMetadata,
new SQLiteDialect(),
1);
JdbcDbWriter writer = new JdbcDbWriter(ds,
new BatchedPreparedStatementBuilder(map, new InsertQueryBuilder()),
new ThrowErrorHandlingPolicy(),
executor,
10);
writer.write(records);
String query = "SELECT * FROM " + tableName + " ORDER BY firstName";
SqlLiteHelper.ResultSetReadCallback callback = new SqlLiteHelper.ResultSetReadCallback() {
int index = 0;
@Override
public void read(ResultSet rs) throws SQLException {
if (index < 100) {
assertEquals(rs.getString("firstName"), fName1);
assertEquals(rs.getString("lastName"), lName1);
assertEquals(rs.getBoolean("bool"), bool1);
assertEquals(rs.getShort("short"), s1);
assertEquals(rs.getByte("byte"), b1);
assertEquals(rs.getLong("long"), l1);
assertEquals(Float.compare(rs.getFloat("float"), f1), 0);
assertEquals(Double.compare(rs.getDouble("double"), d1), 0);
assertTrue(Arrays.equals(rs.getBytes("bytes"), bs1));
assertEquals(rs.getInt("age"), age1);
} else throw new RuntimeException(String.format("%d is too high", index));
index++;
}
};
SqlLiteHelper.select(SQL_LITE_URI, query, callback);
}
#location 94
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void shouldReturnThePrimaryKeysAtTheEndWhenOneFieldIsPK() {
Schema schema = SchemaBuilder.struct().name("com.example.Person")
.field("firstName", Schema.STRING_SCHEMA)
.field("lastName", Schema.STRING_SCHEMA)
.field("age", Schema.INT32_SCHEMA)
.field("bool", Schema.BOOLEAN_SCHEMA)
.field("short", Schema.INT16_SCHEMA)
.field("byte", Schema.INT8_SCHEMA)
.field("long", Schema.INT64_SCHEMA)
.field("float", Schema.FLOAT32_SCHEMA)
.field("double", Schema.FLOAT64_SCHEMA)
.field("bytes", Schema.BYTES_SCHEMA)
.field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build();
short s = 1234;
byte b = -32;
long l = 12425436;
float f = (float) 2356.3;
double d = -2436546.56457;
byte[] bs = new byte[]{-32, 124};
Struct struct = new Struct(schema)
.put("firstName", "Alex")
.put("lastName", "Smith")
.put("bool", true)
.put("short", s)
.put("byte", b)
.put("long", l)
.put("float", f)
.put("double", d)
.put("bytes", bs)
.put("age", 30);
Map<String, FieldAlias> mappings = new HashMap<>();
mappings.put("long", new FieldAlias("long", true));
FieldsMappings tm = new FieldsMappings("table", "topic", true, mappings);
StructFieldsDataExtractor dataExtractor = new StructFieldsDataExtractor(tm);
List<PreparedStatementBinder> binders = dataExtractor.get(struct,
new SinkRecord("", 2, null, null, schema, struct, 2));
HashMap<String, PreparedStatementBinder> map = new HashMap<>();
LinkedList<PreparedStatementBinder> pkBinders = new LinkedList<>();
for (PreparedStatementBinder p : binders) {
if (p.isPrimaryKey()) {
pkBinders.add(p);
}
map.put(p.getFieldName(), p);
}
assertTrue(!binders.isEmpty());
assertEquals(map.size(), 10);
assertTrue(Objects.equals(pkBinders.get(0).getFieldName(), "long"));
assertTrue(map.containsKey("firstName"));
assertTrue(map.get("firstName").getClass() == StringPreparedStatementBinder.class);
assertTrue(map.containsKey("lastName"));
assertTrue(map.get("lastName").getClass() == StringPreparedStatementBinder.class);
assertTrue(map.containsKey("age"));
assertTrue(map.get("age").getClass() == IntPreparedStatementBinder.class);
assertTrue(map.get("long").getClass() == LongPreparedStatementBinder.class);
assertEquals(((LongPreparedStatementBinder) map.get("long")).getValue(), l);
assertTrue(map.get("short").getClass() == ShortPreparedStatementBinder.class);
assertEquals(((ShortPreparedStatementBinder) map.get("short")).getValue(), s);
assertTrue(map.get("byte").getClass() == BytePreparedStatementBinder.class);
assertEquals(((BytePreparedStatementBinder) map.get("byte")).getValue(), b);
assertTrue(map.get("float").getClass() == FloatPreparedStatementBinder.class);
assertEquals(Float.compare(((FloatPreparedStatementBinder) map.get("float")).getValue(), f), 0);
assertTrue(map.get("double").getClass() == DoublePreparedStatementBinder.class);
assertEquals(Double.compare(((DoublePreparedStatementBinder) map.get("double")).getValue(), d), 0);
assertTrue(map.get("bytes").getClass() == BytesPreparedStatementBinder.class);
assertTrue(Arrays.equals(bs, ((BytesPreparedStatementBinder) map.get("bytes")).getValue()));
} | #vulnerable code
@Test
public void shouldReturnThePrimaryKeysAtTheEndWhenOneFieldIsPK() {
Schema schema = SchemaBuilder.struct().name("com.example.Person")
.field("firstName", Schema.STRING_SCHEMA)
.field("lastName", Schema.STRING_SCHEMA)
.field("age", Schema.INT32_SCHEMA)
.field("bool", Schema.BOOLEAN_SCHEMA)
.field("short", Schema.INT16_SCHEMA)
.field("byte", Schema.INT8_SCHEMA)
.field("long", Schema.INT64_SCHEMA)
.field("float", Schema.FLOAT32_SCHEMA)
.field("double", Schema.FLOAT64_SCHEMA)
.field("bytes", Schema.BYTES_SCHEMA)
.field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build();
short s = 1234;
byte b = -32;
long l = 12425436;
float f = (float) 2356.3;
double d = -2436546.56457;
byte[] bs = new byte[]{-32, 124};
Struct struct = new Struct(schema)
.put("firstName", "Alex")
.put("lastName", "Smith")
.put("bool", true)
.put("short", s)
.put("byte", b)
.put("long", l)
.put("float", f)
.put("double", d)
.put("bytes", bs)
.put("age", 30);
Map<String, FieldAlias> mappings = new HashMap<>();
mappings.put("long", new FieldAlias("long", true));
FieldsMappings tm = new FieldsMappings("table", "topic", true, mappings);
StructFieldsDataExtractor dataExtractor = new StructFieldsDataExtractor(tm);
StructFieldsDataExtractor.PreparedStatementBinders binders = dataExtractor.get(struct,
new SinkRecord("", 2, null, null, schema, struct, 2));
HashMap<String, PreparedStatementBinder> map = new HashMap<>();
for (PreparedStatementBinder p : Iterables.concat(binders.getNonKeyColumns(), binders.getKeyColumns()))
map.put(p.getFieldName(), p);
assertTrue(!binders.isEmpty());
assertEquals(map.size(), 10);
assertTrue(Objects.equals(binders.getKeyColumns().get(0).getFieldName(), "long"));
assertTrue(map.containsKey("firstName"));
assertTrue(map.get("firstName").getClass() == StringPreparedStatementBinder.class);
assertTrue(map.containsKey("lastName"));
assertTrue(map.get("lastName").getClass() == StringPreparedStatementBinder.class);
assertTrue(map.containsKey("age"));
assertTrue(map.get("age").getClass() == IntPreparedStatementBinder.class);
assertTrue(map.get("long").getClass() == LongPreparedStatementBinder.class);
assertEquals(((LongPreparedStatementBinder) map.get("long")).getValue(), l);
assertTrue(map.get("short").getClass() == ShortPreparedStatementBinder.class);
assertEquals(((ShortPreparedStatementBinder) map.get("short")).getValue(), s);
assertTrue(map.get("byte").getClass() == BytePreparedStatementBinder.class);
assertEquals(((BytePreparedStatementBinder) map.get("byte")).getValue(), b);
assertTrue(map.get("float").getClass() == FloatPreparedStatementBinder.class);
assertEquals(Float.compare(((FloatPreparedStatementBinder) map.get("float")).getValue(), f), 0);
assertTrue(map.get("double").getClass() == DoublePreparedStatementBinder.class);
assertEquals(Double.compare(((DoublePreparedStatementBinder) map.get("double")).getValue(), d), 0);
assertTrue(map.get("bytes").getClass() == BytesPreparedStatementBinder.class);
assertTrue(Arrays.equals(bs, ((BytesPreparedStatementBinder) map.get("bytes")).getValue()));
}
#location 51
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void handleAllFieldsIncludedAndAnExistingMapping() {
List<DbTableColumn> columns = Lists.newArrayList(
new DbTableColumn("col1", true, false, 1),
new DbTableColumn("col2", false, false, 1),
new DbTableColumn("col3", false, false, 1));
DbTable table = new DbTable("tableA", columns);
Map<String, FieldAlias> aliasMap = new HashMap<>();
aliasMap.put("col3", new FieldAlias("col3", true));
FieldsMappings mappings = new FieldsMappings("tableA", "topic1", true, aliasMap);
FieldsMappings newMappings = PreparedStatementBuilderHelper.validateAndMerge(mappings, table, InsertModeEnum.INSERT);
assertEquals(newMappings.getTableName(), mappings.getTableName());
assertEquals(newMappings.getIncomingTopic(), mappings.getIncomingTopic());
assertEquals(newMappings.areAllFieldsIncluded(), false);
Map<String, FieldAlias> newAliasMap = newMappings.getMappings();
assertEquals(3, newAliasMap.size()); //+ the specific mapping
assertTrue(newAliasMap.containsKey("col1"));
assertEquals(newAliasMap.get("col1").getName(), "col1");
assertEquals(newAliasMap.get("col1").isPrimaryKey(), true);
assertTrue(newAliasMap.containsKey("col2"));
assertEquals(newAliasMap.get("col2").getName(), "col2");
assertEquals(newAliasMap.get("col2").isPrimaryKey(), false);
assertTrue(newAliasMap.containsKey("col3"));
assertEquals(newAliasMap.get("col3").getName(), "col3");
assertEquals(newAliasMap.get("col3").isPrimaryKey(), false);
} | #vulnerable code
@Test
public void handleAllFieldsIncludedAndAnExistingMapping() {
List<DbTableColumn> columns = Lists.newArrayList(
new DbTableColumn("col1", true, false, 1),
new DbTableColumn("col2", false, false, 1),
new DbTableColumn("col3", false, false, 1));
DbTable table = new DbTable("tableA", columns);
Map<String, FieldAlias> aliasMap = new HashMap<>();
aliasMap.put("col3", new FieldAlias("col3", true));
FieldsMappings mappings = new FieldsMappings("tableA", "topic1", true, aliasMap);
FieldsMappings newMappings = PreparedStatementBuilderHelper.validateAndMerge(mappings, table);
assertEquals(newMappings.getTableName(), mappings.getTableName());
assertEquals(newMappings.getIncomingTopic(), mappings.getIncomingTopic());
assertEquals(newMappings.areAllFieldsIncluded(), false);
Map<String, FieldAlias> newAliasMap = newMappings.getMappings();
assertEquals(3, newAliasMap.size()); //+ the specific mapping
assertTrue(newAliasMap.containsKey("col1"));
assertEquals(newAliasMap.get("col1").getName(), "col1");
assertEquals(newAliasMap.get("col1").isPrimaryKey(), true);
assertTrue(newAliasMap.containsKey("col2"));
assertEquals(newAliasMap.get("col2").getName(), "col2");
assertEquals(newAliasMap.get("col2").isPrimaryKey(), false);
assertTrue(newAliasMap.containsKey("col3"));
assertEquals(newAliasMap.get("col3").getName(), "col3");
assertEquals(newAliasMap.get("col3").isPrimaryKey(), false);
}
#location 25
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void handleBatchedStatementPerRecordInsertingSameRecord100Times() throws SQLException {
String tableName = "batched_statement_test_100";
String createTable = "CREATE TABLE " + tableName + " (" +
" firstName TEXT," +
" lastName TEXT," +
" age INTEGER," +
" bool NUMERIC," +
" byte INTEGER," +
" short INTEGER," +
" long INTEGER," +
" float NUMERIC," +
" double NUMERIC," +
" bytes BLOB " +
");";
SqlLiteHelper.deleteTable(SQL_LITE_URI, tableName);
SqlLiteHelper.createTable(SQL_LITE_URI, createTable);
Schema schema = SchemaBuilder.struct().name("com.example.Person")
.field("firstName", Schema.OPTIONAL_STRING_SCHEMA)
.field("lastName", Schema.OPTIONAL_STRING_SCHEMA)
.field("age", Schema.OPTIONAL_INT32_SCHEMA)
.field("bool", Schema.OPTIONAL_BOOLEAN_SCHEMA)
.field("short", Schema.OPTIONAL_INT16_SCHEMA)
.field("byte", Schema.OPTIONAL_INT8_SCHEMA)
.field("long", Schema.OPTIONAL_INT64_SCHEMA)
.field("float", Schema.OPTIONAL_FLOAT32_SCHEMA)
.field("double", Schema.OPTIONAL_FLOAT64_SCHEMA)
.field("bytes", Schema.OPTIONAL_BYTES_SCHEMA);
final String fName1 = "Alex";
final String lName1 = "Smith";
final int age1 = 21;
final boolean bool1 = true;
final short s1 = 1234;
final byte b1 = -32;
final long l1 = 12425436;
final float f1 = (float) 2356.3;
final double d1 = -2436546.56457;
final byte[] bs1 = new byte[]{-32, 124};
Struct struct1 = new Struct(schema)
.put("firstName", fName1)
.put("lastName", lName1)
.put("bool", bool1)
.put("short", s1)
.put("byte", b1)
.put("long", l1)
.put("float", f1)
.put("double", d1)
.put("bytes", bs1)
.put("age", age1);
String topic = "topic";
int partition = 2;
Collection<SinkRecord> records = Collections.nCopies(
100,
new SinkRecord(topic, partition, null, null, schema, struct1, 1));
Map<String, StructFieldsDataExtractor> map = new HashMap<>();
map.put(topic.toLowerCase(),
new StructFieldsDataExtractor(new FieldsMappings(tableName, topic, true, new HashMap<String, FieldAlias>())));
List<DbTable> dbTables = Lists.newArrayList(
new DbTable(tableName, Lists.<DbTableColumn>newArrayList(
new DbTableColumn("firstName", true, false, 1),
new DbTableColumn("lastName", true, false, 1),
new DbTableColumn("age", false, false, 1),
new DbTableColumn("bool", true, false, 1),
new DbTableColumn("byte", true, false, 1),
new DbTableColumn("short", true, false, 1),
new DbTableColumn("long", true, false, 1),
new DbTableColumn("float", true, false, 1),
new DbTableColumn("double", true, false, 1),
new DbTableColumn("BLOB", true, false, 1)
)));
DatabaseMetadata dbMetadata = new DatabaseMetadata(null, dbTables);
HikariDataSource ds = HikariHelper.from(SQL_LITE_URI, null, null);
DatabaseChangesExecutor executor = new DatabaseChangesExecutor(
ds,
Sets.<String>newHashSet(),
Sets.<String>newHashSet(),
dbMetadata,
new SQLiteDialect(),
1);
JdbcDbWriter writer = new JdbcDbWriter(ds,
new BatchedPreparedStatementBuilder(map, new InsertQueryBuilder(new SQLiteDialect())),
new ThrowErrorHandlingPolicy(),
executor,
10);
writer.write(records);
String query = "SELECT * FROM " + tableName + " ORDER BY firstName";
SqlLiteHelper.ResultSetReadCallback callback = new SqlLiteHelper.ResultSetReadCallback() {
int index = 0;
@Override
public void read(ResultSet rs) throws SQLException {
if (index < 100) {
assertEquals(rs.getString("firstName"), fName1);
assertEquals(rs.getString("lastName"), lName1);
assertEquals(rs.getBoolean("bool"), bool1);
assertEquals(rs.getShort("short"), s1);
assertEquals(rs.getByte("byte"), b1);
assertEquals(rs.getLong("long"), l1);
assertEquals(Float.compare(rs.getFloat("float"), f1), 0);
assertEquals(Double.compare(rs.getDouble("double"), d1), 0);
assertTrue(Arrays.equals(rs.getBytes("bytes"), bs1));
assertEquals(rs.getInt("age"), age1);
} else throw new RuntimeException(String.format("%d is too high", index));
index++;
}
};
SqlLiteHelper.select(SQL_LITE_URI, query, callback);
} | #vulnerable code
@Test
public void handleBatchedStatementPerRecordInsertingSameRecord100Times() throws SQLException {
String tableName = "batched_statement_test_100";
String createTable = "CREATE TABLE " + tableName + " (" +
" firstName TEXT," +
" lastName TEXT," +
" age INTEGER," +
" bool NUMERIC," +
" byte INTEGER," +
" short INTEGER," +
" long INTEGER," +
" float NUMERIC," +
" double NUMERIC," +
" bytes BLOB " +
");";
SqlLiteHelper.deleteTable(SQL_LITE_URI, tableName);
SqlLiteHelper.createTable(SQL_LITE_URI, createTable);
Schema schema = SchemaBuilder.struct().name("com.example.Person")
.field("firstName", Schema.OPTIONAL_STRING_SCHEMA)
.field("lastName", Schema.OPTIONAL_STRING_SCHEMA)
.field("age", Schema.OPTIONAL_INT32_SCHEMA)
.field("bool", Schema.OPTIONAL_BOOLEAN_SCHEMA)
.field("short", Schema.OPTIONAL_INT16_SCHEMA)
.field("byte", Schema.OPTIONAL_INT8_SCHEMA)
.field("long", Schema.OPTIONAL_INT64_SCHEMA)
.field("float", Schema.OPTIONAL_FLOAT32_SCHEMA)
.field("double", Schema.OPTIONAL_FLOAT64_SCHEMA)
.field("bytes", Schema.OPTIONAL_BYTES_SCHEMA);
final String fName1 = "Alex";
final String lName1 = "Smith";
final int age1 = 21;
final boolean bool1 = true;
final short s1 = 1234;
final byte b1 = -32;
final long l1 = 12425436;
final float f1 = (float) 2356.3;
final double d1 = -2436546.56457;
final byte[] bs1 = new byte[]{-32, 124};
Struct struct1 = new Struct(schema)
.put("firstName", fName1)
.put("lastName", lName1)
.put("bool", bool1)
.put("short", s1)
.put("byte", b1)
.put("long", l1)
.put("float", f1)
.put("double", d1)
.put("bytes", bs1)
.put("age", age1);
String topic = "topic";
int partition = 2;
Collection<SinkRecord> records = Collections.nCopies(
100,
new SinkRecord(topic, partition, null, null, schema, struct1, 1));
Map<String, StructFieldsDataExtractor> map = new HashMap<>();
map.put(topic.toLowerCase(),
new StructFieldsDataExtractor(new FieldsMappings(tableName, topic, true, new HashMap<String, FieldAlias>())));
List<DbTable> dbTables = Lists.newArrayList(
new DbTable(tableName, Lists.<DbTableColumn>newArrayList(
new DbTableColumn("firstName", true, false, 1),
new DbTableColumn("lastName", true, false, 1),
new DbTableColumn("age", false, false, 1),
new DbTableColumn("bool", true, false, 1),
new DbTableColumn("byte", true, false, 1),
new DbTableColumn("short", true, false, 1),
new DbTableColumn("long", true, false, 1),
new DbTableColumn("float", true, false, 1),
new DbTableColumn("double", true, false, 1),
new DbTableColumn("BLOB", true, false, 1)
)));
DatabaseMetadata dbMetadata = new DatabaseMetadata(null, dbTables);
HikariDataSource ds = HikariHelper.from(SQL_LITE_URI, null, null);
DatabaseChangesExecutor executor = new DatabaseChangesExecutor(
ds,
Sets.<String>newHashSet(),
Sets.<String>newHashSet(),
dbMetadata,
new SQLiteDialect(),
1);
JdbcDbWriter writer = new JdbcDbWriter(ds,
new BatchedPreparedStatementBuilder(map, new InsertQueryBuilder()),
new ThrowErrorHandlingPolicy(),
executor,
10);
writer.write(records);
String query = "SELECT * FROM " + tableName + " ORDER BY firstName";
SqlLiteHelper.ResultSetReadCallback callback = new SqlLiteHelper.ResultSetReadCallback() {
int index = 0;
@Override
public void read(ResultSet rs) throws SQLException {
if (index < 100) {
assertEquals(rs.getString("firstName"), fName1);
assertEquals(rs.getString("lastName"), lName1);
assertEquals(rs.getBoolean("bool"), bool1);
assertEquals(rs.getShort("short"), s1);
assertEquals(rs.getByte("byte"), b1);
assertEquals(rs.getLong("long"), l1);
assertEquals(Float.compare(rs.getFloat("float"), f1), 0);
assertEquals(Double.compare(rs.getDouble("double"), d1), 0);
assertTrue(Arrays.equals(rs.getBytes("bytes"), bs1));
assertEquals(rs.getInt("age"), age1);
} else throw new RuntimeException(String.format("%d is too high", index));
index++;
}
};
SqlLiteHelper.select(SQL_LITE_URI, query, callback);
}
#location 115
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void handleAllFieldsMappingSetting() {
List<DbTableColumn> columns = Lists.newArrayList(
new DbTableColumn("col1", true, false, 1),
new DbTableColumn("col2", false, false, 1),
new DbTableColumn("col3", false, false, 1));
DbTable table = new DbTable("tableA", columns);
Map<String, FieldAlias> aliasMap = new HashMap<>();
FieldsMappings mappings = new FieldsMappings("tableA", "topic1", true, aliasMap);
FieldsMappings newMappings = PreparedStatementBuilderHelper.validateAndMerge(mappings, table, InsertModeEnum.INSERT);
assertEquals(newMappings.getTableName(), mappings.getTableName());
assertEquals(newMappings.getIncomingTopic(), mappings.getIncomingTopic());
assertEquals(newMappings.areAllFieldsIncluded(), false);
Map<String, FieldAlias> newAliasMap = newMappings.getMappings();
assertEquals(3, newAliasMap.size());
assertTrue(newAliasMap.containsKey("col1"));
assertEquals(newAliasMap.get("col1").getName(), "col1");
assertEquals(newAliasMap.get("col1").isPrimaryKey(), true);
assertTrue(newAliasMap.containsKey("col2"));
assertEquals(newAliasMap.get("col2").getName(), "col2");
assertEquals(newAliasMap.get("col2").isPrimaryKey(), false);
assertTrue(newAliasMap.containsKey("col3"));
assertEquals(newAliasMap.get("col3").getName(), "col3");
assertEquals(newAliasMap.get("col3").isPrimaryKey(), false);
} | #vulnerable code
@Test
public void handleAllFieldsMappingSetting() {
List<DbTableColumn> columns = Lists.newArrayList(
new DbTableColumn("col1", true, false, 1),
new DbTableColumn("col2", false, false, 1),
new DbTableColumn("col3", false, false, 1));
DbTable table = new DbTable("tableA", columns);
Map<String, FieldAlias> aliasMap = new HashMap<>();
FieldsMappings mappings = new FieldsMappings("tableA", "topic1", true, aliasMap);
FieldsMappings newMappings = PreparedStatementBuilderHelper.validateAndMerge(mappings, table);
assertEquals(newMappings.getTableName(), mappings.getTableName());
assertEquals(newMappings.getIncomingTopic(), mappings.getIncomingTopic());
assertEquals(newMappings.areAllFieldsIncluded(), false);
Map<String, FieldAlias> newAliasMap = newMappings.getMappings();
assertEquals(3, newAliasMap.size());
assertTrue(newAliasMap.containsKey("col1"));
assertEquals(newAliasMap.get("col1").getName(), "col1");
assertEquals(newAliasMap.get("col1").isPrimaryKey(), true);
assertTrue(newAliasMap.containsKey("col2"));
assertEquals(newAliasMap.get("col2").getName(), "col2");
assertEquals(newAliasMap.get("col2").isPrimaryKey(), false);
assertTrue(newAliasMap.containsKey("col3"));
assertEquals(newAliasMap.get("col3").getName(), "col3");
assertEquals(newAliasMap.get("col3").isPrimaryKey(), false);
}
#location 25
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void shouldReturnThePrimaryKeysAtTheEndWhenMultipleFieldsFormThePrimaryKey() {
Schema schema = SchemaBuilder.struct().name("com.example.Person")
.field("firstName", Schema.STRING_SCHEMA)
.field("lastName", Schema.STRING_SCHEMA)
.field("age", Schema.INT32_SCHEMA)
.field("bool", Schema.BOOLEAN_SCHEMA)
.field("short", Schema.INT16_SCHEMA)
.field("byte", Schema.INT8_SCHEMA)
.field("long", Schema.INT64_SCHEMA)
.field("float", Schema.FLOAT32_SCHEMA)
.field("double", Schema.FLOAT64_SCHEMA)
.field("bytes", Schema.BYTES_SCHEMA)
.field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build();
short s = 1234;
byte b = -32;
long l = 12425436;
float f = (float) 2356.3;
double d = -2436546.56457;
byte[] bs = new byte[]{-32, 124};
Struct struct = new Struct(schema)
.put("firstName", "Alex")
.put("lastName", "Smith")
.put("bool", true)
.put("short", s)
.put("byte", b)
.put("long", l)
.put("float", f)
.put("double", d)
.put("bytes", bs)
.put("age", 30);
Map<String, FieldAlias> mappings = new HashMap<>();
mappings.put("firstName", new FieldAlias("fName", true));
mappings.put("lastName", new FieldAlias("lName", true));
FieldsMappings tm = new FieldsMappings("table", "topic", true, mappings);
StructFieldsDataExtractor dataExtractor = new StructFieldsDataExtractor(tm);
List<PreparedStatementBinder> binders = dataExtractor.get(struct,
new SinkRecord("", 1, null, null, schema, struct, 0));
HashMap<String, PreparedStatementBinder> map = new HashMap<>();
List<PreparedStatementBinder> pkBinders = new LinkedList<>();
for (PreparedStatementBinder p : binders) {
if (p.isPrimaryKey()) {
pkBinders.add(p);
}
map.put(p.getFieldName(), p);
}
assertTrue(!binders.isEmpty());
assertEquals(binders.size(), 10);
assertEquals(pkBinders.size(), 2);
assertTrue(Objects.equals(pkBinders.get(0).getFieldName(), "fName") ||
Objects.equals(pkBinders.get(1).getFieldName(), "fName")
);
assertTrue(Objects.equals(pkBinders.get(0).getFieldName(), "lName") ||
Objects.equals(pkBinders.get(1).getFieldName(), "lName")
);
assertTrue(map.containsKey("fName"));
assertTrue(map.get("fName").getClass() == StringPreparedStatementBinder.class);
assertTrue(map.containsKey("lName"));
assertTrue(map.get("lName").getClass() == StringPreparedStatementBinder.class);
assertTrue(map.containsKey("age"));
assertTrue(map.get("age").getClass() == IntPreparedStatementBinder.class);
assertTrue(map.get("long").getClass() == LongPreparedStatementBinder.class);
assertEquals(((LongPreparedStatementBinder) map.get("long")).getValue(), l);
assertTrue(map.get("short").getClass() == ShortPreparedStatementBinder.class);
assertEquals(((ShortPreparedStatementBinder) map.get("short")).getValue(), s);
assertTrue(map.get("byte").getClass() == BytePreparedStatementBinder.class);
assertEquals(((BytePreparedStatementBinder) map.get("byte")).getValue(), b);
assertTrue(map.get("float").getClass() == FloatPreparedStatementBinder.class);
assertEquals(Float.compare(((FloatPreparedStatementBinder) map.get("float")).getValue(), f), 0);
assertTrue(map.get("double").getClass() == DoublePreparedStatementBinder.class);
assertEquals(Double.compare(((DoublePreparedStatementBinder) map.get("double")).getValue(), d), 0);
assertTrue(map.get("bytes").getClass() == BytesPreparedStatementBinder.class);
assertTrue(Arrays.equals(bs, ((BytesPreparedStatementBinder) map.get("bytes")).getValue()));
} | #vulnerable code
@Test
public void shouldReturnThePrimaryKeysAtTheEndWhenMultipleFieldsFormThePrimaryKey() {
Schema schema = SchemaBuilder.struct().name("com.example.Person")
.field("firstName", Schema.STRING_SCHEMA)
.field("lastName", Schema.STRING_SCHEMA)
.field("age", Schema.INT32_SCHEMA)
.field("bool", Schema.BOOLEAN_SCHEMA)
.field("short", Schema.INT16_SCHEMA)
.field("byte", Schema.INT8_SCHEMA)
.field("long", Schema.INT64_SCHEMA)
.field("float", Schema.FLOAT32_SCHEMA)
.field("double", Schema.FLOAT64_SCHEMA)
.field("bytes", Schema.BYTES_SCHEMA)
.field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build();
short s = 1234;
byte b = -32;
long l = 12425436;
float f = (float) 2356.3;
double d = -2436546.56457;
byte[] bs = new byte[]{-32, 124};
Struct struct = new Struct(schema)
.put("firstName", "Alex")
.put("lastName", "Smith")
.put("bool", true)
.put("short", s)
.put("byte", b)
.put("long", l)
.put("float", f)
.put("double", d)
.put("bytes", bs)
.put("age", 30);
Map<String, FieldAlias> mappings = new HashMap<>();
mappings.put("firstName", new FieldAlias("fName", true));
mappings.put("lastName", new FieldAlias("lName", true));
FieldsMappings tm = new FieldsMappings("table", "topic", true, mappings);
StructFieldsDataExtractor dataExtractor = new StructFieldsDataExtractor(tm);
StructFieldsDataExtractor.PreparedStatementBinders binders = dataExtractor.get(struct,
new SinkRecord("", 1, null, null, schema, struct, 0));
HashMap<String, PreparedStatementBinder> map = new HashMap<>();
for (PreparedStatementBinder p : Iterables.concat(binders.getNonKeyColumns(), binders.getKeyColumns()))
map.put(p.getFieldName(), p);
assertTrue(!binders.isEmpty());
assertEquals(binders.getNonKeyColumns().size() + binders.getKeyColumns().size(), 10);
List<PreparedStatementBinder> pkBinders = binders.getKeyColumns();
assertEquals(pkBinders.size(), 2);
assertTrue(Objects.equals(pkBinders.get(0).getFieldName(), "fName") ||
Objects.equals(pkBinders.get(1).getFieldName(), "fName")
);
assertTrue(Objects.equals(pkBinders.get(0).getFieldName(), "lName") ||
Objects.equals(pkBinders.get(1).getFieldName(), "lName")
);
assertTrue(map.containsKey("fName"));
assertTrue(map.get("fName").getClass() == StringPreparedStatementBinder.class);
assertTrue(map.containsKey("lName"));
assertTrue(map.get("lName").getClass() == StringPreparedStatementBinder.class);
assertTrue(map.containsKey("age"));
assertTrue(map.get("age").getClass() == IntPreparedStatementBinder.class);
assertTrue(map.get("long").getClass() == LongPreparedStatementBinder.class);
assertEquals(((LongPreparedStatementBinder) map.get("long")).getValue(), l);
assertTrue(map.get("short").getClass() == ShortPreparedStatementBinder.class);
assertEquals(((ShortPreparedStatementBinder) map.get("short")).getValue(), s);
assertTrue(map.get("byte").getClass() == BytePreparedStatementBinder.class);
assertEquals(((BytePreparedStatementBinder) map.get("byte")).getValue(), b);
assertTrue(map.get("float").getClass() == FloatPreparedStatementBinder.class);
assertEquals(Float.compare(((FloatPreparedStatementBinder) map.get("float")).getValue(), f), 0);
assertTrue(map.get("double").getClass() == DoublePreparedStatementBinder.class);
assertEquals(Double.compare(((DoublePreparedStatementBinder) map.get("double")).getValue(), d), 0);
assertTrue(map.get("bytes").getClass() == BytesPreparedStatementBinder.class);
assertTrue(Arrays.equals(bs, ((BytesPreparedStatementBinder) map.get("bytes")).getValue()));
}
#location 49
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void handleBatchStatementPerRecordInsertingWithAutoCreatedColumnForPK() throws SQLException {
String tableName = "batch_100_auto_create_column";
Schema schema = SchemaBuilder.struct().name("com.example.Person")
.field("firstName", Schema.OPTIONAL_STRING_SCHEMA)
.field("lastName", Schema.OPTIONAL_STRING_SCHEMA)
.field("age", Schema.OPTIONAL_INT32_SCHEMA)
.field("bool", Schema.OPTIONAL_BOOLEAN_SCHEMA)
.field("short", Schema.OPTIONAL_INT16_SCHEMA)
.field("byte", Schema.OPTIONAL_INT8_SCHEMA)
.field("long", Schema.OPTIONAL_INT64_SCHEMA)
.field("float", Schema.OPTIONAL_FLOAT32_SCHEMA)
.field("double", Schema.OPTIONAL_FLOAT64_SCHEMA)
.field("bytes", Schema.OPTIONAL_BYTES_SCHEMA);
final String fName1 = "Alex";
final String lName1 = "Smith";
final int age1 = 21;
final boolean bool1 = true;
final short s1 = 1234;
final byte b1 = -32;
final long l1 = 12425436;
final float f1 = (float) 2356.3;
final double d1 = -2436546.56457;
final byte[] bs1 = new byte[]{-32, 124};
Struct struct1 = new Struct(schema)
.put("firstName", fName1)
.put("lastName", lName1)
.put("bool", bool1)
.put("short", s1)
.put("byte", b1)
.put("long", l1)
.put("float", f1)
.put("double", d1)
.put("bytes", bs1)
.put("age", age1);
final String topic = "topic";
final int partition = 2;
List<SinkRecord> records = new ArrayList<>(100);
for (int i = 0; i < 100; ++i) {
records.add(new SinkRecord(topic, partition, null, null, schema, struct1, i));
}
Map<String, StructFieldsDataExtractor> map = new HashMap<>();
Map<String, FieldAlias> fields = new HashMap<>();
fields.put(FieldsMappings.CONNECT_AUTO_ID_COLUMN, new FieldAlias(FieldsMappings.CONNECT_AUTO_ID_COLUMN, true));
map.put(topic.toLowerCase(),
new StructFieldsDataExtractor(new FieldsMappings(tableName, topic, true, fields,
true, true)));
List<DbTable> dbTables = Lists.newArrayList();
DatabaseMetadata dbMetadata = new DatabaseMetadata(null, dbTables);
HikariDataSource ds = HikariHelper.from(SQL_LITE_URI, null, null);
DatabaseChangesExecutor executor = new DatabaseChangesExecutor(
ds,
Sets.<String>newHashSet(tableName),
Sets.<String>newHashSet(),
dbMetadata,
new SQLiteDialect(),
1);
JdbcDbWriter writer = new JdbcDbWriter(
ds,
new BatchedPreparedStatementBuilder(map, new InsertQueryBuilder(new SQLiteDialect())),
new ThrowErrorHandlingPolicy(),
executor,
10);
writer.write(records);
String query = "SELECT * FROM " + tableName + " ORDER BY firstName";
SqlLiteHelper.ResultSetReadCallback callback = new SqlLiteHelper.ResultSetReadCallback() {
int index = 0;
@Override
public void read(ResultSet rs) throws SQLException {
if (index < 100) {
assertEquals(rs.getString(FieldsMappings.CONNECT_AUTO_ID_COLUMN), String.format("%s.%s.%d", topic, partition, index));
assertEquals(rs.getString("firstName"), fName1);
assertEquals(rs.getString("lastName"), lName1);
assertEquals(rs.getBoolean("bool"), bool1);
assertEquals(rs.getShort("short"), s1);
assertEquals(rs.getByte("byte"), b1);
assertEquals(rs.getLong("long"), l1);
assertEquals(Float.compare(rs.getFloat("float"), f1), 0);
assertEquals(Double.compare(rs.getDouble("double"), d1), 0);
assertTrue(Arrays.equals(rs.getBytes("bytes"), bs1));
assertEquals(rs.getInt("age"), age1);
} else throw new RuntimeException(String.format("%d is too high", index));
index++;
}
};
SqlLiteHelper.select(SQL_LITE_URI, query, callback);
} | #vulnerable code
@Test
public void handleBatchStatementPerRecordInsertingWithAutoCreatedColumnForPK() throws SQLException {
String tableName = "batch_100_auto_create_column";
Schema schema = SchemaBuilder.struct().name("com.example.Person")
.field("firstName", Schema.OPTIONAL_STRING_SCHEMA)
.field("lastName", Schema.OPTIONAL_STRING_SCHEMA)
.field("age", Schema.OPTIONAL_INT32_SCHEMA)
.field("bool", Schema.OPTIONAL_BOOLEAN_SCHEMA)
.field("short", Schema.OPTIONAL_INT16_SCHEMA)
.field("byte", Schema.OPTIONAL_INT8_SCHEMA)
.field("long", Schema.OPTIONAL_INT64_SCHEMA)
.field("float", Schema.OPTIONAL_FLOAT32_SCHEMA)
.field("double", Schema.OPTIONAL_FLOAT64_SCHEMA)
.field("bytes", Schema.OPTIONAL_BYTES_SCHEMA);
final String fName1 = "Alex";
final String lName1 = "Smith";
final int age1 = 21;
final boolean bool1 = true;
final short s1 = 1234;
final byte b1 = -32;
final long l1 = 12425436;
final float f1 = (float) 2356.3;
final double d1 = -2436546.56457;
final byte[] bs1 = new byte[]{-32, 124};
Struct struct1 = new Struct(schema)
.put("firstName", fName1)
.put("lastName", lName1)
.put("bool", bool1)
.put("short", s1)
.put("byte", b1)
.put("long", l1)
.put("float", f1)
.put("double", d1)
.put("bytes", bs1)
.put("age", age1);
final String topic = "topic";
final int partition = 2;
List<SinkRecord> records = new ArrayList<>(100);
for (int i = 0; i < 100; ++i) {
records.add(new SinkRecord(topic, partition, null, null, schema, struct1, i));
}
Map<String, StructFieldsDataExtractor> map = new HashMap<>();
Map<String, FieldAlias> fields = new HashMap<>();
fields.put(FieldsMappings.CONNECT_AUTO_ID_COLUMN, new FieldAlias(FieldsMappings.CONNECT_AUTO_ID_COLUMN, true));
map.put(topic.toLowerCase(),
new StructFieldsDataExtractor(new FieldsMappings(tableName, topic, true, fields,
true, true)));
List<DbTable> dbTables = Lists.newArrayList();
DatabaseMetadata dbMetadata = new DatabaseMetadata(null, dbTables);
HikariDataSource ds = HikariHelper.from(SQL_LITE_URI, null, null);
DatabaseChangesExecutor executor = new DatabaseChangesExecutor(
ds,
Sets.<String>newHashSet(tableName),
Sets.<String>newHashSet(),
dbMetadata,
new SQLiteDialect(),
1);
JdbcDbWriter writer = new JdbcDbWriter(
ds,
new BatchedPreparedStatementBuilder(map, new InsertQueryBuilder()),
new ThrowErrorHandlingPolicy(),
executor,
10);
writer.write(records);
String query = "SELECT * FROM " + tableName + " ORDER BY firstName";
SqlLiteHelper.ResultSetReadCallback callback = new SqlLiteHelper.ResultSetReadCallback() {
int index = 0;
@Override
public void read(ResultSet rs) throws SQLException {
if (index < 100) {
assertEquals(rs.getString(FieldsMappings.CONNECT_AUTO_ID_COLUMN), String.format("%s.%s.%d", topic, partition, index));
assertEquals(rs.getString("firstName"), fName1);
assertEquals(rs.getString("lastName"), lName1);
assertEquals(rs.getBoolean("bool"), bool1);
assertEquals(rs.getShort("short"), s1);
assertEquals(rs.getByte("byte"), b1);
assertEquals(rs.getLong("long"), l1);
assertEquals(Float.compare(rs.getFloat("float"), f1), 0);
assertEquals(Double.compare(rs.getDouble("double"), d1), 0);
assertTrue(Arrays.equals(rs.getBytes("bytes"), bs1));
assertEquals(rs.getInt("age"), age1);
} else throw new RuntimeException(String.format("%d is too high", index));
index++;
}
};
SqlLiteHelper.select(SQL_LITE_URI, query, callback);
}
#location 46
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void handleAllFieldsMappingSetting() {
List<DbTableColumn> columns = Lists.newArrayList(
new DbTableColumn("col1", true, false, 1),
new DbTableColumn("col2", false, false, 1),
new DbTableColumn("col3", false, false, 1));
DbTable table = new DbTable("tableA", columns);
Map<String, FieldAlias> aliasMap = new HashMap<>();
FieldsMappings mappings = new FieldsMappings("tableA", "topic1", true, aliasMap);
FieldsMappings newMappings = PreparedStatementBuilderHelper.validateAndMerge(mappings, table, InsertModeEnum.INSERT);
assertEquals(newMappings.getTableName(), mappings.getTableName());
assertEquals(newMappings.getIncomingTopic(), mappings.getIncomingTopic());
assertEquals(newMappings.areAllFieldsIncluded(), false);
Map<String, FieldAlias> newAliasMap = newMappings.getMappings();
assertEquals(3, newAliasMap.size());
assertTrue(newAliasMap.containsKey("col1"));
assertEquals(newAliasMap.get("col1").getName(), "col1");
assertEquals(newAliasMap.get("col1").isPrimaryKey(), true);
assertTrue(newAliasMap.containsKey("col2"));
assertEquals(newAliasMap.get("col2").getName(), "col2");
assertEquals(newAliasMap.get("col2").isPrimaryKey(), false);
assertTrue(newAliasMap.containsKey("col3"));
assertEquals(newAliasMap.get("col3").getName(), "col3");
assertEquals(newAliasMap.get("col3").isPrimaryKey(), false);
} | #vulnerable code
@Test
public void handleAllFieldsMappingSetting() {
List<DbTableColumn> columns = Lists.newArrayList(
new DbTableColumn("col1", true, false, 1),
new DbTableColumn("col2", false, false, 1),
new DbTableColumn("col3", false, false, 1));
DbTable table = new DbTable("tableA", columns);
Map<String, FieldAlias> aliasMap = new HashMap<>();
FieldsMappings mappings = new FieldsMappings("tableA", "topic1", true, aliasMap);
FieldsMappings newMappings = PreparedStatementBuilderHelper.validateAndMerge(mappings, table);
assertEquals(newMappings.getTableName(), mappings.getTableName());
assertEquals(newMappings.getIncomingTopic(), mappings.getIncomingTopic());
assertEquals(newMappings.areAllFieldsIncluded(), false);
Map<String, FieldAlias> newAliasMap = newMappings.getMappings();
assertEquals(3, newAliasMap.size());
assertTrue(newAliasMap.containsKey("col1"));
assertEquals(newAliasMap.get("col1").getName(), "col1");
assertEquals(newAliasMap.get("col1").isPrimaryKey(), true);
assertTrue(newAliasMap.containsKey("col2"));
assertEquals(newAliasMap.get("col2").getName(), "col2");
assertEquals(newAliasMap.get("col2").isPrimaryKey(), false);
assertTrue(newAliasMap.containsKey("col3"));
assertEquals(newAliasMap.get("col3").getName(), "col3");
assertEquals(newAliasMap.get("col3").isPrimaryKey(), false);
}
#location 21
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void connect() throws IOException {
if (!connectLock.tryLock()) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
boolean notifyWhenDisconnected = false;
try {
try {
channel = openChannel();
GreetingPacket greetingPacket = receiveGreeting();
authenticate(greetingPacket);
connectionId = greetingPacket.getThreadId();
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
disconnectChannel();
throw e;
}
connected = true;
notifyWhenDisconnected = true;
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
} finally {
connectLock.unlock();
if (notifyWhenDisconnected) {
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onDisconnect(this);
}
}
}
}
} | #vulnerable code
public void connect() throws IOException {
if (connected) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
GreetingPacket greetingPacket;
try {
try {
Socket socket = socketFactory != null ? socketFactory.createSocket() : new Socket();
socket.connect(new InetSocketAddress(hostname, port));
channel = new PacketChannel(socket);
if (channel.getInputStream().peek() == -1) {
throw new EOFException();
}
} catch (IOException e) {
throw new IOException("Failed to connect to MySQL on " + hostname + ":" + port +
". Please make sure it's running.", e);
}
greetingPacket = receiveGreeting();
authenticate(greetingPacket);
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
if (channel != null && channel.isOpen()) {
channel.close();
}
throw e;
}
connected = true;
connectionId = greetingPacket.getThreadId();
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
}
#location 31
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private void listenForEventPackets() throws IOException {
ByteArrayInputStream inputStream = channel.getInputStream();
try {
while (inputStream.peek() != -1) {
int packetLength = inputStream.readInteger(3);
inputStream.skip(1); // 1 byte for sequence
int marker = inputStream.read();
if (marker == 0xFF) {
ErrorPacket errorPacket = new ErrorPacket(inputStream.read(packetLength - 1));
throw new IOException(errorPacket.getErrorCode() + " - " + errorPacket.getErrorMessage());
}
Event event;
try {
event = eventDeserializer.nextEvent(inputStream);
} catch (Exception e) {
if (isConnected()) {
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onEventDeserializationFailure(this, e);
}
}
}
continue;
}
if (isConnected()) {
notifyEventListeners(event);
updateClientBinlogFilenameAndPosition(event);
}
}
} catch (Exception e) {
if (isConnected()) {
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onCommunicationFailure(this, e);
}
}
}
} finally {
if (isConnected()) {
disconnectChannel();
}
}
} | #vulnerable code
private void listenForEventPackets() throws IOException {
ByteArrayInputStream inputStream = channel.getInputStream();
try {
while (inputStream.peek() != -1) {
int packetLength = inputStream.readInteger(3);
inputStream.skip(1); // 1 byte for sequence
int marker = inputStream.read();
byte[] bytes = inputStream.read(packetLength - 1);
if (marker == 0xFF) {
ErrorPacket errorPacket = new ErrorPacket(bytes);
throw new IOException(errorPacket.getErrorCode() + " - " + errorPacket.getErrorMessage());
}
Event event;
try {
event = eventDeserializer.nextEvent(new ByteArrayInputStream(bytes));
} catch (Exception e) {
if (isConnected()) {
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onEventDeserializationFailure(this, e);
}
}
}
continue;
}
if (isConnected()) {
notifyEventListeners(event);
updateClientBinlogFilenameAndPosition(event);
}
}
} catch (Exception e) {
if (isConnected()) {
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onCommunicationFailure(this, e);
}
}
}
} finally {
if (isConnected()) {
disconnectChannel();
}
}
}
#location 15
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void connect() throws IOException {
if (!connectLock.tryLock()) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
boolean notifyWhenDisconnected = false;
try {
try {
channel = openChannel();
GreetingPacket greetingPacket = receiveGreeting();
authenticate(greetingPacket);
connectionId = greetingPacket.getThreadId();
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
disconnectChannel();
throw e;
}
connected = true;
notifyWhenDisconnected = true;
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
} finally {
connectLock.unlock();
if (notifyWhenDisconnected) {
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onDisconnect(this);
}
}
}
}
} | #vulnerable code
public void connect() throws IOException {
if (connected) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
GreetingPacket greetingPacket;
try {
try {
Socket socket = socketFactory != null ? socketFactory.createSocket() : new Socket();
socket.connect(new InetSocketAddress(hostname, port));
channel = new PacketChannel(socket);
if (channel.getInputStream().peek() == -1) {
throw new EOFException();
}
} catch (IOException e) {
throw new IOException("Failed to connect to MySQL on " + hostname + ":" + port +
". Please make sure it's running.", e);
}
greetingPacket = receiveGreeting();
authenticate(greetingPacket);
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
if (channel != null && channel.isOpen()) {
channel.close();
}
throw e;
}
connected = true;
connectionId = greetingPacket.getThreadId();
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
}
#location 8
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private void listenForEventPackets() throws IOException {
latch.countDown();
ByteArrayInputStream inputStream = channel.getInputStream();
try {
while (true) {
try {
inputStream.peek();
} catch (SocketException e) {
if (!connected) {
break;
}
throw e;
}
int packetLength = inputStream.readInteger(3);
inputStream.skip(1); // 1 byte for sequence
int marker = inputStream.read();
byte[] bytes = inputStream.read(packetLength - 1);
if (marker == 0xFF) {
ErrorPacket errorPacket = new ErrorPacket(bytes);
throw new IOException(errorPacket.getErrorCode() + " - " + errorPacket.getErrorMessage());
}
Event event = eventDeserializer.nextEvent(new ByteArrayInputStream(bytes));
notifyEventListeners(event);
}
} finally {
disconnect();
}
} | #vulnerable code
private void listenForEventPackets() throws IOException {
latch.countDown();
ByteArrayInputStream inputStream = channel.getInputStream();
while (channel.isOpen()) {
int packetLength = inputStream.readInteger(3);
inputStream.skip(2); // 1 byte for sequence and 1 for marker
ByteArrayInputStream eventByteArray = new ByteArrayInputStream(inputStream.read(packetLength - 1));
Event event = eventDeserializer.nextEvent(eventByteArray);
notifyEventListeners(event);
}
}
#location 8
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void connect() throws IOException {
if (!connectLock.tryLock()) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
boolean notifyWhenDisconnected = false;
try {
try {
channel = openChannel();
GreetingPacket greetingPacket = receiveGreeting();
authenticate(greetingPacket);
connectionId = greetingPacket.getThreadId();
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
disconnectChannel();
throw e;
}
connected = true;
notifyWhenDisconnected = true;
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
} finally {
connectLock.unlock();
if (notifyWhenDisconnected) {
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onDisconnect(this);
}
}
}
}
} | #vulnerable code
public void connect() throws IOException {
if (connected) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
GreetingPacket greetingPacket;
try {
try {
Socket socket = socketFactory != null ? socketFactory.createSocket() : new Socket();
socket.connect(new InetSocketAddress(hostname, port));
channel = new PacketChannel(socket);
if (channel.getInputStream().peek() == -1) {
throw new EOFException();
}
} catch (IOException e) {
throw new IOException("Failed to connect to MySQL on " + hostname + ":" + port +
". Please make sure it's running.", e);
}
greetingPacket = receiveGreeting();
authenticate(greetingPacket);
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
if (channel != null && channel.isOpen()) {
channel.close();
}
throw e;
}
connected = true;
connectionId = greetingPacket.getThreadId();
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
}
#location 29
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void connect() throws IOException {
if (connected) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
try {
try {
Socket socket = socketFactory != null ? socketFactory.createSocket() : new Socket();
socket.connect(new InetSocketAddress(hostname, port));
channel = new PacketChannel(socket);
if (channel.getInputStream().peek() == -1) {
throw new EOFException();
}
} catch (IOException e) {
throw new IOException("Failed to connect to MySQL on " + hostname + ":" + port +
". Please make sure it's running.", e);
}
GreetingPacket greetingPacket = new GreetingPacket(channel.read());
authenticate(greetingPacket.getScramble(), greetingPacket.getServerCollation());
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
channel.write(new DumpBinaryLogCommand(serverId, binlogFilename, binlogPosition));
} catch (IOException e) {
if (channel != null && channel.isOpen()) {
channel.close();
}
throw e;
}
connected = true;
if (logger.isLoggable(Level.INFO)) {
logger.info("Connected to " + hostname + ":" + port + " at " + binlogFilename + "/" + binlogPosition);
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
EventDataDeserializer eventDataDeserializer = eventDeserializer.getEventDataDeserializer(EventType.ROTATE);
if (eventDataDeserializer.getClass() != RotateEventDataDeserializer.class &&
eventDataDeserializer.getClass() != EventDeserializer.EventDataWrapper.Deserializer.class) {
eventDeserializer.setEventDataDeserializer(EventType.ROTATE,
new EventDeserializer.EventDataWrapper.Deserializer(new RotateEventDataDeserializer(),
eventDataDeserializer));
}
listenForEventPackets();
} | #vulnerable code
public void connect() throws IOException {
if (connected) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
try {
try {
Socket socket = socketFactory != null ? socketFactory.createSocket() : new Socket();
socket.connect(new InetSocketAddress(hostname, port));
channel = new PacketChannel(socket);
if (channel.getInputStream().peek() == -1) {
throw new EOFException();
}
} catch (IOException e) {
throw new IOException("Failed to connect to MySQL on " + hostname + ":" + port +
". Please make sure it's running.", e);
}
GreetingPacket greetingPacket = new GreetingPacket(channel.read());
authenticate(greetingPacket.getScramble(), greetingPacket.getServerCollation());
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
channel.write(new DumpBinaryLogCommand(serverId, binlogFilename, binlogPosition));
} catch (IOException e) {
if (channel != null && channel.isOpen()) {
channel.close();
}
throw e;
}
connected = true;
if (logger.isLoggable(Level.INFO)) {
logger.info("Connected to " + hostname + ":" + port + " at " + binlogFilename + "/" + binlogPosition);
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
listenForEventPackets();
}
#location 51
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void connect() throws IOException {
if (!connectLock.tryLock()) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
boolean notifyWhenDisconnected = false;
try {
try {
channel = openChannel();
GreetingPacket greetingPacket = receiveGreeting();
authenticate(greetingPacket);
connectionId = greetingPacket.getThreadId();
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
disconnectChannel();
throw e;
}
connected = true;
notifyWhenDisconnected = true;
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
} finally {
connectLock.unlock();
if (notifyWhenDisconnected) {
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onDisconnect(this);
}
}
}
}
} | #vulnerable code
public void connect() throws IOException {
if (connected) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
GreetingPacket greetingPacket;
try {
try {
Socket socket = socketFactory != null ? socketFactory.createSocket() : new Socket();
socket.connect(new InetSocketAddress(hostname, port));
channel = new PacketChannel(socket);
if (channel.getInputStream().peek() == -1) {
throw new EOFException();
}
} catch (IOException e) {
throw new IOException("Failed to connect to MySQL on " + hostname + ":" + port +
". Please make sure it's running.", e);
}
greetingPacket = receiveGreeting();
authenticate(greetingPacket);
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
if (channel != null && channel.isOpen()) {
channel.close();
}
throw e;
}
connected = true;
connectionId = greetingPacket.getThreadId();
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
}
#location 21
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void disconnect() throws IOException {
terminateKeepAliveThread();
terminateConnect();
} | #vulnerable code
public void disconnect() throws IOException {
shutdownLock.lock();
try {
if (isKeepAliveThreadRunning()) {
keepAliveThreadExecutor.shutdownNow();
}
disconnectChannel();
} finally {
shutdownLock.unlock();
}
if (isKeepAliveThreadRunning()) {
waitForKeepAliveThreadToBeTerminated();
}
}
#location 7
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void connect() throws IOException {
if (!connectLock.tryLock()) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
boolean notifyWhenDisconnected = false;
try {
try {
channel = openChannel();
GreetingPacket greetingPacket = receiveGreeting();
authenticate(greetingPacket);
connectionId = greetingPacket.getThreadId();
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
disconnectChannel();
throw e;
}
connected = true;
notifyWhenDisconnected = true;
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
} finally {
connectLock.unlock();
if (notifyWhenDisconnected) {
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onDisconnect(this);
}
}
}
}
} | #vulnerable code
public void connect() throws IOException {
if (connected) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
GreetingPacket greetingPacket;
try {
try {
Socket socket = socketFactory != null ? socketFactory.createSocket() : new Socket();
socket.connect(new InetSocketAddress(hostname, port));
channel = new PacketChannel(socket);
if (channel.getInputStream().peek() == -1) {
throw new EOFException();
}
} catch (IOException e) {
throw new IOException("Failed to connect to MySQL on " + hostname + ":" + port +
". Please make sure it's running.", e);
}
greetingPacket = receiveGreeting();
authenticate(greetingPacket);
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
if (channel != null && channel.isOpen()) {
channel.close();
}
throw e;
}
connected = true;
connectionId = greetingPacket.getThreadId();
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
}
#location 33
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void connect() throws IOException {
if (!connectLock.tryLock()) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
boolean notifyWhenDisconnected = false;
try {
try {
channel = openChannel();
GreetingPacket greetingPacket = receiveGreeting();
authenticate(greetingPacket);
connectionId = greetingPacket.getThreadId();
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
disconnectChannel();
throw e;
}
connected = true;
notifyWhenDisconnected = true;
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
} finally {
connectLock.unlock();
if (notifyWhenDisconnected) {
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onDisconnect(this);
}
}
}
}
} | #vulnerable code
public void connect() throws IOException {
if (connected) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
GreetingPacket greetingPacket;
try {
try {
Socket socket = socketFactory != null ? socketFactory.createSocket() : new Socket();
socket.connect(new InetSocketAddress(hostname, port));
channel = new PacketChannel(socket);
if (channel.getInputStream().peek() == -1) {
throw new EOFException();
}
} catch (IOException e) {
throw new IOException("Failed to connect to MySQL on " + hostname + ":" + port +
". Please make sure it's running.", e);
}
greetingPacket = receiveGreeting();
authenticate(greetingPacket);
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
if (channel != null && channel.isOpen()) {
channel.close();
}
throw e;
}
connected = true;
connectionId = greetingPacket.getThreadId();
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
}
#location 58
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void connect() throws IOException {
if (!connectLock.tryLock()) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
boolean notifyWhenDisconnected = false;
try {
try {
channel = openChannel();
GreetingPacket greetingPacket = receiveGreeting();
authenticate(greetingPacket);
connectionId = greetingPacket.getThreadId();
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
disconnectChannel();
throw e;
}
connected = true;
notifyWhenDisconnected = true;
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
} finally {
connectLock.unlock();
if (notifyWhenDisconnected) {
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onDisconnect(this);
}
}
}
}
} | #vulnerable code
public void connect() throws IOException {
if (connected) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
GreetingPacket greetingPacket;
try {
try {
Socket socket = socketFactory != null ? socketFactory.createSocket() : new Socket();
socket.connect(new InetSocketAddress(hostname, port));
channel = new PacketChannel(socket);
if (channel.getInputStream().peek() == -1) {
throw new EOFException();
}
} catch (IOException e) {
throw new IOException("Failed to connect to MySQL on " + hostname + ":" + port +
". Please make sure it's running.", e);
}
greetingPacket = receiveGreeting();
authenticate(greetingPacket);
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
if (channel != null && channel.isOpen()) {
channel.close();
}
throw e;
}
connected = true;
connectionId = greetingPacket.getThreadId();
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
}
#location 11
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void connect() throws IOException {
if (!connectLock.tryLock()) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
boolean notifyWhenDisconnected = false;
try {
try {
channel = openChannel();
GreetingPacket greetingPacket = receiveGreeting();
authenticate(greetingPacket);
connectionId = greetingPacket.getThreadId();
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
disconnectChannel();
throw e;
}
connected = true;
notifyWhenDisconnected = true;
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
} finally {
connectLock.unlock();
if (notifyWhenDisconnected) {
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onDisconnect(this);
}
}
}
}
} | #vulnerable code
public void connect() throws IOException {
if (connected) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
GreetingPacket greetingPacket;
try {
try {
Socket socket = socketFactory != null ? socketFactory.createSocket() : new Socket();
socket.connect(new InetSocketAddress(hostname, port));
channel = new PacketChannel(socket);
if (channel.getInputStream().peek() == -1) {
throw new EOFException();
}
} catch (IOException e) {
throw new IOException("Failed to connect to MySQL on " + hostname + ":" + port +
". Please make sure it's running.", e);
}
greetingPacket = receiveGreeting();
authenticate(greetingPacket);
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
if (channel != null && channel.isOpen()) {
channel.close();
}
throw e;
}
connected = true;
connectionId = greetingPacket.getThreadId();
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
}
#location 47
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void connect() throws IOException {
if (!connectLock.tryLock()) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
boolean notifyWhenDisconnected = false;
try {
try {
channel = openChannel();
GreetingPacket greetingPacket = receiveGreeting();
authenticate(greetingPacket);
connectionId = greetingPacket.getThreadId();
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
disconnectChannel();
throw e;
}
connected = true;
notifyWhenDisconnected = true;
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
} finally {
connectLock.unlock();
if (notifyWhenDisconnected) {
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onDisconnect(this);
}
}
}
}
} | #vulnerable code
public void connect() throws IOException {
if (connected) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
GreetingPacket greetingPacket;
try {
try {
Socket socket = socketFactory != null ? socketFactory.createSocket() : new Socket();
socket.connect(new InetSocketAddress(hostname, port));
channel = new PacketChannel(socket);
if (channel.getInputStream().peek() == -1) {
throw new EOFException();
}
} catch (IOException e) {
throw new IOException("Failed to connect to MySQL on " + hostname + ":" + port +
". Please make sure it's running.", e);
}
greetingPacket = receiveGreeting();
authenticate(greetingPacket);
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
if (channel != null && channel.isOpen()) {
channel.close();
}
throw e;
}
connected = true;
connectionId = greetingPacket.getThreadId();
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
}
#location 64
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void connect() throws IOException {
if (!connectLock.tryLock()) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
boolean notifyWhenDisconnected = false;
try {
try {
channel = openChannel();
GreetingPacket greetingPacket = receiveGreeting();
authenticate(greetingPacket);
connectionId = greetingPacket.getThreadId();
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
disconnectChannel();
throw e;
}
connected = true;
notifyWhenDisconnected = true;
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
} finally {
connectLock.unlock();
if (notifyWhenDisconnected) {
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onDisconnect(this);
}
}
}
}
} | #vulnerable code
public void connect() throws IOException {
if (connected) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
GreetingPacket greetingPacket;
try {
try {
Socket socket = socketFactory != null ? socketFactory.createSocket() : new Socket();
socket.connect(new InetSocketAddress(hostname, port));
channel = new PacketChannel(socket);
if (channel.getInputStream().peek() == -1) {
throw new EOFException();
}
} catch (IOException e) {
throw new IOException("Failed to connect to MySQL on " + hostname + ":" + port +
". Please make sure it's running.", e);
}
greetingPacket = receiveGreeting();
authenticate(greetingPacket);
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
if (channel != null && channel.isOpen()) {
channel.close();
}
throw e;
}
connected = true;
connectionId = greetingPacket.getThreadId();
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
}
#location 19
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void connect() throws IOException {
if (!connectLock.tryLock()) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
boolean notifyWhenDisconnected = false;
try {
try {
channel = openChannel();
GreetingPacket greetingPacket = receiveGreeting();
authenticate(greetingPacket);
connectionId = greetingPacket.getThreadId();
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
disconnectChannel();
throw e;
}
connected = true;
notifyWhenDisconnected = true;
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
} finally {
connectLock.unlock();
if (notifyWhenDisconnected) {
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onDisconnect(this);
}
}
}
}
} | #vulnerable code
public void connect() throws IOException {
if (connected) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
GreetingPacket greetingPacket;
try {
try {
Socket socket = socketFactory != null ? socketFactory.createSocket() : new Socket();
socket.connect(new InetSocketAddress(hostname, port));
channel = new PacketChannel(socket);
if (channel.getInputStream().peek() == -1) {
throw new EOFException();
}
} catch (IOException e) {
throw new IOException("Failed to connect to MySQL on " + hostname + ":" + port +
". Please make sure it's running.", e);
}
greetingPacket = receiveGreeting();
authenticate(greetingPacket);
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
if (channel != null && channel.isOpen()) {
channel.close();
}
throw e;
}
connected = true;
connectionId = greetingPacket.getThreadId();
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
}
#location 10
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void connect() throws IOException {
if (!connectLock.tryLock()) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
boolean notifyWhenDisconnected = false;
try {
try {
channel = openChannel();
GreetingPacket greetingPacket = receiveGreeting();
authenticate(greetingPacket);
connectionId = greetingPacket.getThreadId();
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
disconnectChannel();
throw e;
}
connected = true;
notifyWhenDisconnected = true;
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
} finally {
connectLock.unlock();
if (notifyWhenDisconnected) {
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onDisconnect(this);
}
}
}
}
} | #vulnerable code
public void connect() throws IOException {
if (connected) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
GreetingPacket greetingPacket;
try {
try {
Socket socket = socketFactory != null ? socketFactory.createSocket() : new Socket();
socket.connect(new InetSocketAddress(hostname, port));
channel = new PacketChannel(socket);
if (channel.getInputStream().peek() == -1) {
throw new EOFException();
}
} catch (IOException e) {
throw new IOException("Failed to connect to MySQL on " + hostname + ":" + port +
". Please make sure it's running.", e);
}
greetingPacket = receiveGreeting();
authenticate(greetingPacket);
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
if (channel != null && channel.isOpen()) {
channel.close();
}
throw e;
}
connected = true;
connectionId = greetingPacket.getThreadId();
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
}
#location 55
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void connect() throws IOException {
if (!connectLock.tryLock()) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
boolean notifyWhenDisconnected = false;
try {
try {
channel = openChannel();
GreetingPacket greetingPacket = receiveGreeting();
authenticate(greetingPacket);
connectionId = greetingPacket.getThreadId();
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
disconnectChannel();
throw e;
}
connected = true;
notifyWhenDisconnected = true;
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
} finally {
connectLock.unlock();
if (notifyWhenDisconnected) {
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onDisconnect(this);
}
}
}
}
} | #vulnerable code
public void connect() throws IOException {
if (connected) {
throw new IllegalStateException("BinaryLogClient is already connected");
}
GreetingPacket greetingPacket;
try {
try {
Socket socket = socketFactory != null ? socketFactory.createSocket() : new Socket();
socket.connect(new InetSocketAddress(hostname, port));
channel = new PacketChannel(socket);
if (channel.getInputStream().peek() == -1) {
throw new EOFException();
}
} catch (IOException e) {
throw new IOException("Failed to connect to MySQL on " + hostname + ":" + port +
". Please make sure it's running.", e);
}
greetingPacket = receiveGreeting();
authenticate(greetingPacket);
if (binlogFilename == null) {
fetchBinlogFilenameAndPosition();
}
if (binlogPosition < 4) {
if (logger.isLoggable(Level.WARNING)) {
logger.warning("Binary log position adjusted from " + binlogPosition + " to " + 4);
}
binlogPosition = 4;
}
ChecksumType checksumType = fetchBinlogChecksum();
if (checksumType != ChecksumType.NONE) {
confirmSupportOfChecksum(checksumType);
}
requestBinaryLogStream();
} catch (IOException e) {
if (channel != null && channel.isOpen()) {
channel.close();
}
throw e;
}
connected = true;
connectionId = greetingPacket.getThreadId();
if (logger.isLoggable(Level.INFO)) {
String position;
synchronized (gtidSetAccessLock) {
position = gtidSet != null ? gtidSet.toString() : binlogFilename + "/" + binlogPosition;
}
logger.info("Connected to " + hostname + ":" + port + " at " + position +
" (" + (blocking ? "sid:" + serverId + ", " : "") + "cid:" + connectionId + ")");
}
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onConnect(this);
}
}
if (keepAlive && !isKeepAliveThreadRunning()) {
spawnKeepAliveThread();
}
ensureEventDataDeserializer(EventType.ROTATE, RotateEventDataDeserializer.class);
synchronized (gtidSetAccessLock) {
if (gtidSet != null) {
ensureEventDataDeserializer(EventType.GTID, GtidEventDataDeserializer.class);
}
}
listenForEventPackets();
}
#location 18
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testChecksumNONE() throws Exception {
EventDeserializer eventDeserializer = new EventDeserializer();
BinaryLogFileReader reader = new BinaryLogFileReader(
new FileInputStream("src/test/resources/mysql-bin.checksum-none"), eventDeserializer);
readAll(reader, 191);
} | #vulnerable code
@Test
public void testChecksumNONE() throws Exception {
EventDeserializer eventDeserializer = new EventDeserializer();
BinaryLogFileReader reader = new BinaryLogFileReader(new GZIPInputStream(
new FileInputStream("src/test/resources/mysql-bin.sakila.gz")), eventDeserializer);
readAll(reader, 1462);
}
#location 4
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void disconnect() throws IOException {
shutdownLock.lock();
try {
if (isKeepAliveThreadRunning()) {
keepAliveThreadExecutor.shutdownNow();
}
disconnectChannel();
} finally {
shutdownLock.unlock();
}
if (isKeepAliveThreadRunning()) {
waitForKeepAliveThreadToBeTerminated();
}
} | #vulnerable code
public void disconnect() throws IOException {
try {
connected = false;
if (channel != null) {
channel.close();
}
} finally {
synchronized (lifecycleListeners) {
for (LifecycleListener lifecycleListener : lifecycleListeners) {
lifecycleListener.onDisconnect(this);
}
}
}
}
#location 5
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private List<WordFrequency> loadFrequencies(final String input) {
try {
final FrequencyAnalyzer frequencyAnalyzer = new FrequencyAnalyzer();
frequencyAnalyzer.setWordFrequenciesToReturn(cliParameters.getWordCount());
frequencyAnalyzer.setMinWordLength(cliParameters.getMinWordLength());
frequencyAnalyzer.setStopWords(cliParameters.getStopWords());
frequencyAnalyzer.setCharacterEncoding(cliParameters.getCharacterEncoding());
if (cliParameters.getNormalizers().isEmpty()) {
cliParameters.getNormalizers().addAll(Arrays.asList(NormalizerType.TRIM, NormalizerType.CHARACTER_STRIPPING, NormalizerType.LOWERCASE));
}
for (final NormalizerType normalizer : cliParameters.getNormalizers()) {
frequencyAnalyzer.addNormalizer(buildNormalizer(normalizer));
}
frequencyAnalyzer.setWordTokenizer(buildTokenizer());
return frequencyAnalyzer.load(toInputStream(input));
} catch (final IOException e) {
throw new RuntimeException(e.getMessage(), e);
}
} | #vulnerable code
private List<WordFrequency> loadFrequencies(final String input) {
try {
final FrequencyAnalyzer frequencyAnalyzer = new FrequencyAnalyzer();
frequencyAnalyzer.setWordFrequenciesToReturn(cliParameters.getWordCount());
frequencyAnalyzer.setMinWordLength(cliParameters.getMinWordLength());
frequencyAnalyzer.setStopWords(cliParameters.getStopWords());
frequencyAnalyzer.setCharacterEncoding(cliParameters.getCharacterEncoding());
for (final NormalizerType normalizer : cliParameters.getNormalizers()) {
frequencyAnalyzer.setNormalizer(buildNormalizer(normalizer));
}
frequencyAnalyzer.setWordTokenizer(buildTokenizer());
return frequencyAnalyzer.load(toInputStream(input));
} catch (final IOException e) {
throw new RuntimeException(e.getMessage(), e);
}
}
#location 13
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
protected void preOperation(AipRequest request) {
if (needAuth()) {
getAccessToken(config);
}
request.setHttpMethod(HttpMethodName.POST);
request.addHeader(Headers.CONTENT_TYPE, HttpContentType.FORM_URLENCODE_DATA);
request.addHeader("accept", "*/*");
request.setConfig(config);
} | #vulnerable code
protected void preOperation(AipRequest request) {
if (needAuth()) {
getAccessToken();
}
request.setHttpMethod(HttpMethodName.POST);
request.addHeader(Headers.CONTENT_TYPE, HttpContentType.FORM_URLENCODE_DATA);
request.addHeader("accept", "*/*");
request.setConfig(config);
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testSyncHandleTimeout() throws Exception {
RpcFuture<String> rpcFuture = new RpcFuture<String>(timeout, methodInfo, null, channelInfo, rpcClient);
try {
rpcFuture.get(100, TimeUnit.MILLISECONDS);
} catch (RpcException ex2) {
Assert.assertTrue(ex2.getCode() == RpcException.TIMEOUT_EXCEPTION);
}
} | #vulnerable code
@Test
public void testSyncHandleTimeout() throws Exception {
RpcFuture rpcFuture = new RpcFuture<String>(timeout, methodInfo, null, channelInfo, rpcClient);
Response resp = rpcFuture.get(100, TimeUnit.MILLISECONDS);
assertThat(resp.getException(), instanceOf(RpcException.class));
assertThat(((RpcException) resp.getException()).getCode(), is(RpcException.TIMEOUT_EXCEPTION));
}
#location 5
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Override
public Response decodeResponse(Object msg, ChannelHandlerContext ctx) {
FullHttpResponse httpResponse = (FullHttpResponse) msg;
try {
ChannelInfo channelInfo = ChannelInfo.getClientChannelInfo(ctx.channel());
Long correlationId = parseCorrelationId(httpResponse.headers().get(CORRELATION_ID), channelInfo.getCorrelationId());
HttpResponse response = new HttpResponse();
response.setCorrelationId(correlationId);
RpcFuture future = channelInfo.removeRpcFuture(response.getCorrelationId());
if (future == null) {
return response;
}
response.setRpcFuture(future);
if (!httpResponse.status().equals(HttpResponseStatus.OK)) {
LOG.warn("status={}", httpResponse.status());
response.setException(new RpcException(RpcException.SERVICE_EXCEPTION,
"http status=" + httpResponse.status()));
return response;
}
int bodyLen = httpResponse.content().readableBytes();
byte[] bytes = new byte[bodyLen];
httpResponse.content().readBytes(bytes);
String contentTypeAndEncoding = httpResponse.headers().get(HttpHeaderNames.CONTENT_TYPE).toLowerCase();
String[] splits = StringUtils.split(contentTypeAndEncoding, ";");
int protocolType = HttpRpcProtocol.parseProtocolType(splits[0]);
String encoding = this.encoding;
// 由于uc服务返回的encoding是错误的,所以这里以client端设置的encoding为准。
// for (String split : splits) {
// split = split.trim();
// if (split.startsWith("charset=")) {
// encoding = split.substring("charset=".length());
// }
// }
Object body = null;
if (bodyLen != 0) {
try {
body = decodeBody(protocolType, encoding, bytes);
} catch (Exception ex) {
LOG.error("decode response body failed");
response.setException(ex);
return response;
}
}
if (body != null) {
try {
response.setResult(parseHttpResponse(body, future.getRpcMethodInfo()));
} catch (Exception ex) {
LOG.error("failed to parse result from HTTP body");
response.setException(ex);
}
} else {
response.setResult(null);
}
// set response attachment
if (response.getKvAttachment() == null) {
response.setKvAttachment(new HashMap<String, Object>());
}
for (Map.Entry<String, String> entry : httpResponse.headers()) {
response.getKvAttachment().put(entry.getKey(), entry.getValue());
}
return response;
} finally {
httpResponse.release();
}
} | #vulnerable code
@Override
public Response decodeResponse(Object msg, ChannelHandlerContext ctx) {
FullHttpResponse httpResponse = (FullHttpResponse) msg;
try {
ChannelInfo channelInfo = ChannelInfo.getClientChannelInfo(ctx.channel());
Long logId = parseLogId(httpResponse.headers().get(LOG_ID), channelInfo.getLogId());
HttpResponse response = new HttpResponse();
response.setLogId(logId);
RpcFuture future = channelInfo.removeRpcFuture(response.getLogId());
if (future == null) {
return response;
}
response.setRpcFuture(future);
if (!httpResponse.status().equals(HttpResponseStatus.OK)) {
LOG.warn("status={}", httpResponse.status());
response.setException(new RpcException(RpcException.SERVICE_EXCEPTION,
"http status=" + httpResponse.status()));
return response;
}
int bodyLen = httpResponse.content().readableBytes();
byte[] bytes = new byte[bodyLen];
httpResponse.content().readBytes(bytes);
String contentTypeAndEncoding = httpResponse.headers().get(HttpHeaderNames.CONTENT_TYPE).toLowerCase();
String[] splits = StringUtils.split(contentTypeAndEncoding, ";");
int protocolType = HttpRpcProtocol.parseProtocolType(splits[0]);
String encoding = this.encoding;
// 由于uc服务返回的encoding是错误的,所以这里以client端设置的encoding为准。
// for (String split : splits) {
// split = split.trim();
// if (split.startsWith("charset=")) {
// encoding = split.substring("charset=".length());
// }
// }
Object body = null;
if (bodyLen != 0) {
try {
body = decodeBody(protocolType, encoding, bytes);
} catch (Exception ex) {
LOG.error("decode response body failed");
response.setException(ex);
return response;
}
}
if (body != null) {
try {
response.setResult(parseHttpResponse(body, future.getRpcMethodInfo()));
} catch (Exception ex) {
LOG.error("failed to parse result from HTTP body");
response.setException(ex);
}
} else {
response.setResult(null);
}
// set response attachment
if (response.getKvAttachment() == null) {
response.setKvAttachment(new HashMap<String, Object>());
}
for (Map.Entry<String, String> entry : httpResponse.headers()) {
response.getKvAttachment().put(entry.getKey(), entry.getValue());
}
return response;
} finally {
httpResponse.release();
}
}
#location 8
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testSyncHandleSuccessfulResponse() throws Exception {
RpcFuture<String> rpcFuture = new RpcFuture<String>(timeout, methodInfo, null, channelInfo, rpcClient);
RpcResponse response = new RpcResponse();
response.setResult("hello world");
rpcFuture.handleResponse(response);
String resp = rpcFuture.get(1, TimeUnit.SECONDS);
assertThat(resp, is("hello world"));
} | #vulnerable code
@Test
public void testSyncHandleSuccessfulResponse() throws Exception {
RpcFuture rpcFuture = new RpcFuture<String>(timeout, methodInfo, null, channelInfo, rpcClient);
RpcResponse response = new RpcResponse();
response.setResult("hello world");
rpcFuture.handleResponse(response);
Response resp = rpcFuture.get(1, TimeUnit.SECONDS);
assertThat((String) resp.getResult(), is("hello world"));
}
#location 8
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public ThreadPool getOrCreateClientWorkThreadPool(String serviceName, boolean isSharing, int threadNum) {
if (isSharing) {
if (defaultWorkThreadPool == null) {
synchronized (BrpcThreadPoolManager.class) {
if (defaultWorkThreadPool == null) {
defaultWorkThreadPool = new ThreadPool(threadNum,
new CustomThreadFactory("brpc-client-work-thread-default"));
}
}
}
return defaultWorkThreadPool;
}
ThreadPool threadPool;
if ((threadPool = workThreadPoolMap.get(serviceName)) == null) {
synchronized (serviceName.intern()) {
if ((threadPool = workThreadPoolMap.get(serviceName)) == null) {
threadPool = new ThreadPool(threadNum,
new CustomThreadFactory("brpc-client-work-thread-" + serviceName));
workThreadPoolMap.put(serviceName, threadPool);
}
}
}
return threadPool;
} | #vulnerable code
public ThreadPool getOrCreateClientWorkThreadPool(String serviceName, boolean isSharing, int threadNum) {
if (isSharing) {
if (defaultWorkThreadPool == null) {
synchronized (BrpcThreadPoolManager.class) {
if (defaultWorkThreadPool == null) {
defaultWorkThreadPool = new ThreadPool(threadNum,
new CustomThreadFactory("brpc-client-work-thread-default"));
}
}
}
return defaultWorkThreadPool;
}
ThreadPool threadPool = workThreadPoolMap.get(serviceName);
if (threadPool != null) {
return threadPool;
}
threadPool = new ThreadPool(threadNum,
new CustomThreadFactory("brpc-client-work-thread-" + serviceName));
workThreadPoolMap.put(serviceName, threadPool);
return threadPool;
}
#location 11
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private void parseRpcExporterAnnotation(RpcExporter rpcExporter,
ConfigurableListableBeanFactory beanFactory,
Object bean) {
Class<?> serviceClass = AopUtils.getTargetClass(bean);
Class<?>[] interfaces = ClassUtils.getAllInterfacesForClass(serviceClass);
if (interfaces.length != 1) {
throw new RuntimeException("service interface num must equal 1, " + serviceClass.getName());
}
Class<?> serviceInterface = interfaces[0];
BrpcConfig brpcConfig = getServiceConfig(beanFactory, serviceInterface);
// if there are multi service on one port, the first service configs effect only.
Integer port = brpcConfig.getServer().getPort();
RpcServiceExporter rpcServiceExporter = portMappingExporters.get(port);
if (rpcServiceExporter == null) {
rpcServiceExporter = new RpcServiceExporter();
portMappingExporters.put(port, rpcServiceExporter);
rpcServiceExporter.setServicePort(port);
rpcServiceExporter.copyFrom(brpcConfig.getServer());
if (brpcConfig.getNaming() != null) {
rpcServiceExporter.setNamingServiceUrl(brpcConfig.getNaming().getNamingServiceUrl());
}
}
// interceptor
if (brpcConfig.getServer() != null
&& StringUtils.isNoneBlank(brpcConfig.getServer().getInterceptorBeanName())) {
Interceptor interceptor = beanFactory.getBean(
brpcConfig.getServer().getInterceptorBeanName(), Interceptor.class);
if (rpcServiceExporter.getInterceptors() != null &&
!rpcServiceExporter.getInterceptors().contains(interceptor)) {
rpcServiceExporter.getInterceptors().add(interceptor);
} else {
List<Interceptor> interceptors = new ArrayList<>();
interceptors.add(interceptor);
rpcServiceExporter.setInterceptors(interceptors); // must be immutable
}
}
// naming options
rpcServiceExporter.getServiceNamingOptions().put(bean, brpcConfig.getNaming());
if (brpcConfig.getServer() != null && brpcConfig.getServer().isUseSharedThreadPool()) {
rpcServiceExporter.getCustomOptionsServiceMap().put(brpcConfig.getServer(), bean);
} else {
rpcServiceExporter.getRegisterServices().add(bean);
}
if (protobufRpcAnnotationResolverListener != null) {
protobufRpcAnnotationResolverListener.onRpcExporterAnnotationParsered(
rpcExporter, port, bean, rpcServiceExporter.getRegisterServices());
}
} | #vulnerable code
private void parseRpcExporterAnnotation(RpcExporter rpcExporter,
ConfigurableListableBeanFactory beanFactory,
Object bean) {
Class<?> serviceClass = AopUtils.getTargetClass(bean);
Class<?>[] interfaces = ClassUtils.getAllInterfacesForClass(serviceClass);
if (interfaces.length != 1) {
throw new RuntimeException("service interface num must equal 1, " + serviceClass.getName());
}
Class<?> serviceInterface = interfaces[0];
BrpcConfig brpcConfig = getServiceConfig(beanFactory, serviceInterface);
// if there are multi service on one port, the first service configs effect only.
Integer port = brpcConfig.getServer().getPort();
RpcServiceExporter rpcServiceExporter = portMappingExporters.get(port);
if (rpcServiceExporter == null) {
rpcServiceExporter = new RpcServiceExporter();
portMappingExporters.put(port, rpcServiceExporter);
rpcServiceExporter.setServicePort(port);
rpcServiceExporter.copyFrom(brpcConfig.getServer());
if (brpcConfig.getNaming() != null) {
rpcServiceExporter.setNamingServiceUrl(brpcConfig.getNaming().getNamingServiceUrl());
}
}
// interceptor
if (brpcConfig.getServer() != null
&& StringUtils.isNoneBlank(brpcConfig.getServer().getInterceptorBeanName())) {
Interceptor interceptor = beanFactory.getBean(
brpcConfig.getServer().getInterceptorBeanName(), Interceptor.class);
if (rpcServiceExporter.getInterceptors() != null) {
rpcServiceExporter.getInterceptors().add(interceptor);
} else {
rpcServiceExporter.setInterceptors(Arrays.asList(interceptor));
}
}
// naming options
rpcServiceExporter.getServiceNamingOptions().put(bean, brpcConfig.getNaming());
if (brpcConfig.getServer() != null && brpcConfig.getServer().isUseSharedThreadPool()) {
rpcServiceExporter.getCustomOptionsServiceMap().put(brpcConfig.getServer(), bean);
} else {
rpcServiceExporter.getRegisterServices().add(bean);
}
if (protobufRpcAnnotationResolverListener != null) {
protobufRpcAnnotationResolverListener.onRpcExporterAnnotationParsered(
rpcExporter, port, bean, rpcServiceExporter.getRegisterServices());
}
}
#location 13
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public EventLoopGroup getOrCreateClientIoThreadPool(String serviceName, boolean isSharing,
int threadNum, int ioEventType) {
if (isSharing) {
if (defaultIoThreadPool == null) {
synchronized (BrpcThreadPoolManager.class) {
if (defaultIoThreadPool == null) {
defaultIoThreadPool = createClientIoThreadPool(
threadNum, "brpc-client-io-thread-default", ioEventType);
}
}
}
return defaultIoThreadPool;
}
EventLoopGroup threadPool;
if ((threadPool = ioThreadPoolMap.get(serviceName)) == null) {
synchronized (serviceName.intern()) {
if ((threadPool = ioThreadPoolMap.get(serviceName)) == null) {
threadPool = createClientIoThreadPool(
threadNum, "brpc-client-io-thread-" + serviceName, ioEventType);
EventLoopGroup prev = ioThreadPoolMap.putIfAbsent(serviceName, threadPool);
if (prev != null) {
log.warn("brpc io thread pool exist for service:{}", serviceName);
threadPool.shutdownGracefully().awaitUninterruptibly();
}
}
}
}
return threadPool;
} | #vulnerable code
public EventLoopGroup getOrCreateClientIoThreadPool(String serviceName, boolean isSharing,
int threadNum, int ioEventType) {
if (isSharing) {
if (defaultIoThreadPool == null) {
synchronized (BrpcThreadPoolManager.class) {
if (defaultIoThreadPool == null) {
defaultIoThreadPool = createClientIoThreadPool(
threadNum, "brpc-client-io-thread-default", ioEventType);
}
}
}
return defaultIoThreadPool;
}
EventLoopGroup threadPool = ioThreadPoolMap.get(serviceName);
if (threadPool != null) {
return threadPool;
}
threadPool = createClientIoThreadPool(
threadNum, "brpc-client-io-thread-" + serviceName, ioEventType);
EventLoopGroup prev = ioThreadPoolMap.putIfAbsent(serviceName, threadPool);
if (prev != null) {
log.warn("brpc io thread pool exist for service:{}", serviceName);
threadPool.shutdownGracefully().awaitUninterruptibly();
}
return threadPool;
}
#location 12
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Override
public ByteBuf encodeResponse(Request request, Response response) {
FullHttpRequest httpRequest = (FullHttpRequest) request.getMsg();
FullHttpResponse httpResponse = null;
try {
byte[] responseBytes;
if (response.getException() != null) {
httpResponse = new DefaultFullHttpResponse(
HttpVersion.HTTP_1_1, HttpResponseStatus.INTERNAL_SERVER_ERROR);
responseBytes = response.getException().toString().getBytes();
} else {
httpResponse = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK);
int protocolType = Integer.parseInt(httpRequest.headers().get(PROTOCOL_TYPE));
Object body = makeResponse(protocolType, response);
// encode body
try {
responseBytes = encodeBody(protocolType,
httpRequest.headers().get(HttpHeaderNames.CONTENT_ENCODING),
body, response.getRpcMethodInfo());
} catch (Exception e) {
LOG.warn("encode response failed", e);
response.setException(e);
httpResponse = new DefaultFullHttpResponse(
HttpVersion.HTTP_1_1, HttpResponseStatus.INTERNAL_SERVER_ERROR);
responseBytes = response.getException().toString().getBytes();
}
}
httpResponse.content().writeBytes(responseBytes);
addHttpResponseHeaders(httpResponse, response, httpRequest);
// encode full http response
BrpcHttpResponseEncoder encoder = new BrpcHttpResponseEncoder();
return encoder.encode(httpResponse);
} catch (Exception e) {
LOG.warn("encode response failed", e);
response.setException(e);
return null;
} finally {
if (httpResponse != null) {
httpResponse.release();
}
}
} | #vulnerable code
@Override
public ByteBuf encodeResponse(Request request, Response response) {
FullHttpRequest httpRequest = (FullHttpRequest) request.getMsg();
FullHttpResponse httpResponse = null;
try {
if (response.getException() != null) {
httpResponse =
new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.INTERNAL_SERVER_ERROR);
} else {
httpResponse = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK);
addHttpResponseHeaders(httpResponse, response, httpRequest);
int protocolType = Integer.parseInt(httpRequest.headers().get(PROTOCOL_TYPE));
Object body = makeResponse(protocolType, response);
// encode body
try {
byte[] responseBytes = encodeBody(protocolType,
httpRequest.headers().get(HttpHeaderNames.CONTENT_ENCODING),
body, response.getRpcMethodInfo());
httpResponse.content().writeBytes(responseBytes);
httpResponse.headers().set(HttpHeaderNames.CONTENT_LENGTH, responseBytes.length);
} catch (Exception e) {
LOG.warn("encode response failed", e);
response.setException(e);
httpResponse =
new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.INTERNAL_SERVER_ERROR);
}
}
// encode full http response
BrpcHttpResponseEncoder encoder = new BrpcHttpResponseEncoder();
return encoder.encode(httpResponse);
} catch (Exception e) {
LOG.warn("encode response failed", e);
response.setException(e);
return null;
} finally {
if (httpResponse != null) {
httpResponse.release();
}
}
}
#location 21
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public Response executeWithRetry(Request request) {
Response response = null;
RpcException exception = null;
int currentTryTimes = 0;
int maxTryTimes = rpcClient.getRpcClientOptions().getMaxTryTimes();
while (currentTryTimes < maxTryTimes) {
try {
// if it is a retry request, add the last selected instance to request,
// so that load balance strategy can exclude the selected instance.
// if it is the initial request, not init HashSet, so it is more fast.
// therefore, it need LoadBalanceStrategy to judge if selectInstances is null.
if (currentTryTimes > 0) {
if (request.getChannel() != null) {
if (request.getSelectedInstances() == null) {
request.setSelectedInstances(new HashSet<CommunicationClient>(maxTryTimes - 1));
}
request.getSelectedInstances().add(request.getCommunicationClient());
}
}
response = rpcClient.execute(request, rpcClient.getCommunicationOptions());
break;
} catch (RpcException ex) {
exception = ex;
if (exception.getCode() == RpcException.INTERCEPT_EXCEPTION) {
break;
}
} finally {
currentTryTimes++;
}
}
if (response == null || (response.getResult() == null && response.getRpcFuture() == null)) {
if (exception == null) {
exception = new RpcException(RpcException.UNKNOWN_EXCEPTION, "unknown error");
}
throw exception;
}
return response;
} | #vulnerable code
public Response executeWithRetry(Request request) {
Response response = null;
RpcException exception = null;
int currentTryTimes = 0;
int maxTryTimes = rpcClient.getRpcClientOptions().getMaxTryTimes();
while (currentTryTimes < maxTryTimes) {
try {
// if it is a retry request, add the last selected instance to request,
// so that load balance strategy can exclude the selected instance.
// if it is the initial request, not init HashSet, so it is more fast.
// therefore, it need LoadBalanceStrategy to judge if selectInstances is null.
if (currentTryTimes > 0) {
if (request.getChannel() != null) {
if (request.getSelectedInstances() == null) {
request.setSelectedInstances(new HashSet<CommunicationClient>(maxTryTimes - 1));
}
request.getSelectedInstances().add(request.getCommunicationClient());
}
}
response = rpcClient.execute(request, rpcClient.getCommunicationOptions());
break;
} catch (RpcException ex) {
exception = ex;
if (exception.getCode() == RpcException.INTERCEPT_EXCEPTION) {
break;
}
} finally {
currentTryTimes++;
}
}
if (response.getResult() == null && response.getRpcFuture() == null) {
if (exception == null) {
exception = new RpcException(RpcException.UNKNOWN_EXCEPTION, "unknown error");
}
throw exception;
}
return response;
}
#location 32
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testSyncHandleFailResponse() throws Exception {
RpcFuture<String> rpcFuture = new RpcFuture<String>(timeout, methodInfo, null, channelInfo, rpcClient);
RpcResponse response = new RpcResponse();
RuntimeException ex = new RuntimeException("dummy");
response.setException(ex);
rpcFuture.handleResponse(response);
try {
rpcFuture.get(1, TimeUnit.SECONDS);
} catch (RpcException ex2) {
Assert.assertTrue(ex2.getCause() == ex);
}
} | #vulnerable code
@Test
public void testSyncHandleFailResponse() throws Exception {
RpcFuture rpcFuture = new RpcFuture<String>(timeout, methodInfo, null, channelInfo, rpcClient);
RpcResponse response = new RpcResponse();
RuntimeException ex = new RuntimeException("dummy");
response.setException(ex);
rpcFuture.handleResponse(response);
Response resp = rpcFuture.get(1, TimeUnit.SECONDS);
assertThat((RuntimeException) resp.getException(), is(ex));
}
#location 9
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public VerbalExpression replace(String source, String value) {
this.updatePattern();
this.source.replaceAll(pattern,value);
return this;
} | #vulnerable code
public VerbalExpression replace(String source, String value) {
this.add("");
this.source.replaceAll(pattern,value);
return this;
}
#location 3
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private void collectLibraryInputFiles()
{
if ( parsedIncludeJdkLibs )
{
final String slash = File.separator;
// we have to add the Java framework classes to the library JARs, since they are not
// distributed with the JAR on Central, and since we'll strip them out of the android.jar
// that is shipped with the SDK (since that is not a complete Java distribution)
String javaHome = System.getProperty( "java.home" );
String jdkLibsPath = null;
if ( isMacOSXJDKbyApple( javaHome ) )
{
// MacOS X uses different naming conventions for JDK installations
jdkLibsPath = appleJDKLibsPath( javaHome );
addLibraryJar( jdkLibsPath + "/classes.jar" );
}
else
{
jdkLibsPath = javaHome + slash + "lib";
addLibraryJar( jdkLibsPath + slash + "rt.jar" );
}
// we also need to add the JAR containing e.g. javax.servlet
addLibraryJar( jdkLibsPath + slash + "jsse.jar" );
// and the javax.crypto stuff
addLibraryJar( jdkLibsPath + slash + "jce.jar" );
}
// we treat any dependencies with provided scope as library JARs
for ( Artifact artifact : project.getArtifacts() )
{
if ( artifact.getScope().equals( JavaScopes.PROVIDED ) )
{
if ( artifact.getArtifactId().equals( "android" ) && parsedIncludeJdkLibs )
{
addLibraryJar( artifact.getFile().getAbsolutePath(), ANDROID_LIBRARY_EXCLUDED_FILTER );
}
else
{
addLibraryJar( artifact.getFile().getAbsolutePath() );
}
}
else
{
if ( isShiftedArtifact( artifact ) )
{
// this is a blacklisted artifact that should be processed as a library instead
addLibraryJar( artifact.getFile().getAbsolutePath() );
}
}
}
} | #vulnerable code
private void collectLibraryInputFiles()
{
if ( parsedIncludeJdkLibs )
{
final String slash = File.separator;
// we have to add the Java framework classes to the library JARs, since they are not
// distributed with the JAR on Central, and since we'll strip them out of the android.jar
// that is shipped with the SDK (since that is not a complete Java distribution)
String javaHome = System.getProperty( "java.home" );
String jdkLibsPath = null;
if ( javaHome.startsWith( "/System/Library/Java" ) || javaHome.startsWith( "/Library/Java" ) )
{
// MacOS X uses different naming conventions for JDK installations
jdkLibsPath = javaHome + "/../Classes";
addLibraryJar( jdkLibsPath + "/classes.jar" );
}
else
{
jdkLibsPath = javaHome + slash + "lib";
addLibraryJar( jdkLibsPath + slash + "rt.jar" );
}
// we also need to add the JAR containing e.g. javax.servlet
addLibraryJar( jdkLibsPath + slash + "jsse.jar" );
// and the javax.crypto stuff
addLibraryJar( jdkLibsPath + slash + "jce.jar" );
}
// we treat any dependencies with provided scope as library JARs
for ( Artifact artifact : project.getArtifacts() )
{
if ( artifact.getScope().equals( JavaScopes.PROVIDED ) )
{
if ( artifact.getArtifactId().equals( "android" ) && parsedIncludeJdkLibs )
{
addLibraryJar( artifact.getFile().getAbsolutePath(), ANDROID_LIBRARY_EXCLUDED_FILTER );
}
else
{
addLibraryJar( artifact.getFile().getAbsolutePath() );
}
}
else
{
if ( isShiftedArtifact( artifact ) )
{
// this is a blacklisted artifact that should be processed as a library instead
addLibraryJar( artifact.getFile().getAbsolutePath() );
}
}
}
}
#location 11
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public File getPlatform()
{
assertPathIsDirectory( sdkPath );
final File platformsDirectory = new File( sdkPath, PLATFORMS_FOLDER_NAME );
assertPathIsDirectory( platformsDirectory );
final File platformDirectory;
if ( androidTarget == null )
{
IAndroidTarget latestTarget = null;
for ( IAndroidTarget target: sdkManager.getTargets() )
{
if ( target.isPlatform() )
{
if ( latestTarget == null
|| target.getVersion().getApiLevel() > latestTarget.getVersion().getApiLevel() )
{
latestTarget = target;
}
}
}
platformDirectory = new File ( latestTarget.getLocation() );
}
else
{
platformDirectory = new File( androidTarget.getLocation() );
}
assertPathIsDirectory( platformDirectory );
return platformDirectory;
} | #vulnerable code
public File getPlatform()
{
assertPathIsDirectory( sdkPath );
final File platformsDirectory = new File( sdkPath, PLATFORMS_FOLDER_NAME );
assertPathIsDirectory( platformsDirectory );
final File platformDirectory;
if ( platform == null )
{
final File[] platformDirectories = platformsDirectory.listFiles();
Arrays.sort( platformDirectories );
platformDirectory = platformDirectories[ platformDirectories.length - 1 ];
}
else
{
platformDirectory = new File( platform.path );
}
assertPathIsDirectory( platformDirectory );
return platformDirectory;
}
#location 13
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void execute() throws MojoExecutionException, MojoFailureException {
CommandExecutor executor = CommandExecutor.Factory.createDefaultCommmandExecutor();
executor.setLogger(this.getLog());
if (androidManifestFile == null) {
androidManifestFile = new File(resourceDirectory.getParent(), "AndroidManifest.xml");
}
Artifact artifact = artifactFactory.createArtifact("android", "android", androidVersion, "jar", "jar");
ArtifactRepositoryLayout defaultLayout = new DefaultRepositoryLayout();
File androidJar = new File(localRepository, defaultLayout.pathOf(artifact));
artifact.setFile(androidJar);
File outputFile = new File(project.getBuild().getDirectory(), project.getBuild().getFinalName() + ".ap_");
List<String> commands = new ArrayList<String>();
commands.add("package");
commands.add("-f");
commands.add("-M");
commands.add(androidManifestFile.getAbsolutePath());
if (resourceDirectory.exists()) {
commands.add("-S");
commands.add(resourceDirectory.getAbsolutePath());
}
commands.add("-I");
commands.add(androidJar.getAbsolutePath());
commands.add("-F");
commands.add(outputFile.getAbsolutePath());
getLog().info("aapt " + commands.toString());
try {
executor.executeCommand("aapt", commands, project.getBasedir(), false);
} catch (ExecutionException e) {
throw new MojoExecutionException("", e);
}
/*
File dexClassesFile = new File(project.getBuild().getDirectory(), project.getBuild().getFinalName() + ".classes-dex");
ZipOutputStream os = null;
InputStream is = null;
try {
ZipFile zipFile = new ZipFile(tmpOutputFile);
os = new ZipOutputStream(new FileOutputStream(outputFile));
for (ZipEntry entry : (List<ZipEntry>) Collections.list(zipFile.entries())) {
os.putNextEntry(new ZipEntry(entry.getName()));
is = zipFile.getInputStream(entry);
byte[] buffer = new byte[1024];
int i;
while ((i = is.read(buffer)) > 0) {
os.write(buffer, 0, i);
}
is.close();
}
os.putNextEntry(new ZipEntry("classes.dex"));
is = new FileInputStream(dexClassesFile);
byte[] buffer = new byte[1024];
int i;
while ((i = is.read(buffer)) > 0) {
os.write(buffer, 0, i);
}
is.close();
os.close();
} catch (IOException e) {
throw new MojoExecutionException("", e);
}
finally {
if (os != null) {
try {
os.close();
} catch (IOException e) {
}
}
if (is != null) {
try {
is.close();
} catch (IOException e) {
}
}
}
*/
// project.getArtifact().setFile(outputFile);
} | #vulnerable code
public void execute() throws MojoExecutionException, MojoFailureException {
CommandExecutor executor = CommandExecutor.Factory.createDefaultCommmandExecutor();
executor.setLogger(this.getLog());
if (androidManifestFile == null) {
androidManifestFile = new File(resourceDirectory.getParent(), "AndroidManifest.xml");
}
File tmpOutputFile;
try {
tmpOutputFile = File.createTempFile("android", "apk");
} catch (IOException e) {
throw new MojoExecutionException("", e);
}
Artifact artifact = artifactFactory.createArtifact("android", "android", androidVersion, "jar", "jar");
ArtifactRepositoryLayout defaultLayout = new DefaultRepositoryLayout();
File androidJar = new File(localRepository, defaultLayout.pathOf(artifact));
artifact.setFile(androidJar);
tmpOutputFile.deleteOnExit();
File outputFile = new File(project.getBuild().getDirectory(), project.getArtifactId() + "-"
+ project.getVersion() + ".apk");
List<String> commands = new ArrayList<String>();
commands.add("package");
commands.add("-f");
commands.add("-M");
commands.add(androidManifestFile.getAbsolutePath());
if (resourceDirectory.exists()) {
commands.add("-S");
commands.add(resourceDirectory.getAbsolutePath());
}
commands.add("-I");
commands.add(androidJar.getAbsolutePath());
commands.add("-F");
commands.add(tmpOutputFile.getAbsolutePath());
getLog().info("aapt " + commands.toString());
try {
executor.executeCommand("aapt", commands, project.getBasedir(), false);
} catch (ExecutionException e) {
throw new MojoExecutionException("", e);
}
File dexClassesFile = new File(project.getBasedir(), "target" + File.separator + project.getArtifactId() + "-"
+ project.getVersion() + "-classes.dex");
ZipOutputStream os = null;
InputStream is = null;
try {
ZipFile zipFile = new ZipFile(tmpOutputFile);
os = new ZipOutputStream(new FileOutputStream(outputFile));
for (ZipEntry entry : (List<ZipEntry>) Collections.list(zipFile.entries())) {
os.putNextEntry(new ZipEntry(entry.getName()));
is = zipFile.getInputStream(entry);
byte[] buffer = new byte[1024];
int i;
while ((i = is.read(buffer)) > 0) {
os.write(buffer, 0, i);
}
is.close();
}
os.putNextEntry(new ZipEntry("classes.dex"));
is = new FileInputStream(dexClassesFile);
byte[] buffer = new byte[1024];
int i;
while ((i = is.read(buffer)) > 0) {
os.write(buffer, 0, i);
}
is.close();
os.close();
} catch (IOException e) {
throw new MojoExecutionException("", e);
}
finally {
if (os != null) {
try {
os.close();
} catch (IOException e) {
}
}
if (is != null) {
try {
is.close();
} catch (IOException e) {
}
}
}
project.getArtifact().setFile(outputFile);
}
#location 13
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private void collectLibraryInputFiles()
{
if ( parsedIncludeJdkLibs )
{
// we have to add the Java framework classes to the library JARs, since they are not
// distributed with the JAR on Central, and since we'll strip them out of the android.jar
// that is shipped with the SDK (since that is not a complete Java distribution)
File rtJar = getJVMLibrary( "rt.jar" );
if ( rtJar == null )
{
rtJar = getJVMLibrary( "classes.jar" );
}
if ( rtJar != null )
{
addLibraryJar( rtJar.getPath() );
}
// we also need to add the JAR containing e.g. javax.servlet
File jsseJar = getJVMLibrary( "jsse.jar" );
if ( jsseJar != null )
{
addLibraryJar( jsseJar.getPath() );
}
// and the javax.crypto stuff
File jceJar = getJVMLibrary( "jce.jar" );
if ( jceJar != null )
{
addLibraryJar( jceJar.getPath() );
}
}
// we treat any dependencies with provided scope as library JARs
for ( Artifact artifact : project.getArtifacts() )
{
if ( artifact.getScope().equals( JavaScopes.PROVIDED ) )
{
if ( artifact.getArtifactId().equals( "android" ) && parsedIncludeJdkLibs )
{
addLibraryJar( artifact.getFile().getAbsolutePath(), ANDROID_LIBRARY_EXCLUDED_FILTER );
}
else
{
addLibraryJar( artifact.getFile().getAbsolutePath() );
}
}
else
{
if ( isShiftedArtifact( artifact ) )
{
// this is a blacklisted artifact that should be processed as a library instead
addLibraryJar( artifact.getFile().getAbsolutePath() );
}
}
}
} | #vulnerable code
private void collectLibraryInputFiles()
{
if ( parsedIncludeJdkLibs )
{
final String slash = File.separator;
// we have to add the Java framework classes to the library JARs, since they are not
// distributed with the JAR on Central, and since we'll strip them out of the android.jar
// that is shipped with the SDK (since that is not a complete Java distribution)
String javaHome = System.getProperty( "java.home" );
String jdkLibsPath = null;
if ( isMacOSXJDKbyApple( javaHome ) )
{
// MacOS X uses different naming conventions for JDK installations
jdkLibsPath = appleJDKLibsPath( javaHome );
addLibraryJar( jdkLibsPath + "/classes.jar" );
}
else
{
jdkLibsPath = javaHome + slash + "lib";
addLibraryJar( jdkLibsPath + slash + "rt.jar" );
}
// we also need to add the JAR containing e.g. javax.servlet
addLibraryJar( jdkLibsPath + slash + "jsse.jar" );
// and the javax.crypto stuff
addLibraryJar( jdkLibsPath + slash + "jce.jar" );
}
// we treat any dependencies with provided scope as library JARs
for ( Artifact artifact : project.getArtifacts() )
{
if ( artifact.getScope().equals( JavaScopes.PROVIDED ) )
{
if ( artifact.getArtifactId().equals( "android" ) && parsedIncludeJdkLibs )
{
addLibraryJar( artifact.getFile().getAbsolutePath(), ANDROID_LIBRARY_EXCLUDED_FILTER );
}
else
{
addLibraryJar( artifact.getFile().getAbsolutePath() );
}
}
else
{
if ( isShiftedArtifact( artifact ) )
{
// this is a blacklisted artifact that should be processed as a library instead
addLibraryJar( artifact.getFile().getAbsolutePath() );
}
}
}
}
#location 11
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private void runLint() throws MojoExecutionException
{
IssueRegistry registry = new BuiltinIssueRegistry();
LintCliFlags flags = new LintCliFlags();
flags.setQuiet( false );
LintCliClient client = new LintCliClient( flags );
try
{
if ( isNotNull( parsedIgnoreWarnings ) )
{
flags.setIgnoreWarnings( parsedIgnoreWarnings );
}
if ( isNotNull( parsedWarnAll ) )
{
flags.setCheckAllWarnings( parsedWarnAll );
}
if ( isNotNull( parsedWarningsAsErrors ) )
{
flags.setWarningsAsErrors( parsedWarningsAsErrors );
}
if ( isNotNullAndNotEquals( parsedConfig, "null" ) )
{
flags.setDefaultConfiguration( new File( parsedConfig ) );
}
if ( isNotNull( parsedFullPath ) )
{
flags.setFullPath( parsedFullPath );
}
if ( isNotNull( parsedShowAll ) )
{
flags.setShowEverything( parsedShowAll );
}
if ( isNotNull( parsedDisableSourceLines ) )
{
flags.setShowSourceLines( !parsedDisableSourceLines );
}
if ( isNotNullAndTrue( parsedEnableHtml ) )
{
File outHtml = new File( parsedHtmlOutputPath );
flags.getReporters().add( new MultiProjectHtmlReporter( client, outHtml ) );
getLog().info( "Writing Lint HTML report in " + parsedHtmlOutputPath );
}
if ( isNotNullAndNotEquals( parsedUrl, "none" ) )
{
// TODO what is this?
// parameters.add( "--url" );
// parameters.add( parsedUrl );
}
if ( isNotNullAndTrue( parsedEnableSimpleHtml ) )
{
File outSimpleHtml = new File( parsedSimpleHtmlOutputPath );
flags.getReporters().add( new MultiProjectHtmlReporter( client, outSimpleHtml ) );
getLog().info( "Writing Lint simple HTML report in " + parsedSimpleHtmlOutputPath );
}
if ( isNotNullAndTrue( parsedEnableXml ) )
{
flags.getReporters().add( new XmlReporter( client, new File( parsedXmlOutputPath ) ) );
getLog().info( "Writing Lint XML report in " + parsedXmlOutputPath );
}
if ( isNotNullAndTrue( parsedEnableSources ) )
{
// TODO what is this?
// parameters.add( "--sources" );
// parameters.add( parsedSources );
}
if ( isNotNullAndTrue( parsedEnableClasspath ) )
{
// TODO what is this?
// parameters.add( "--classpath" );
// parameters.add( parsedClasspath );
}
if ( isNotNullAndTrue( parsedEnableLibraries ) )
{
// TODO libraries
// parameters.add( "--libraries" );
// parameters.add( parsedLibraries );
}
List< File > files = new ArrayList< File >();
files.add( resourceDirectory );
files.add( androidManifestFile );
files.add( sourceDirectory );
files.add( assetsDirectory );
client.run( registry, files );
}
catch ( IOException ex )
{
throw new MojoExecutionException( ex.getMessage(), ex );
}
} | #vulnerable code
private void runLint()
{
IssueRegistry registry = new BuiltinIssueRegistry();
LintCliFlags flags = new LintCliFlags();
flags.setQuiet( false );
LintCliClient client = new LintCliClient( flags );
File outHtmlFile = new File( getHtmlOutputPath() );
try
{
File outFile = new File( getHtmlOutputPath() + "/lint-results.txt" );
outFile.createNewFile();
FileOutputStream out = new FileOutputStream( outFile );
TextReporter reporter = new TextReporter( client, flags, new PrintWriter( out, true ), false );
flags.getReporters().add( reporter );
MultiProjectHtmlReporter htmlReporter = new MultiProjectHtmlReporter( client, outHtmlFile );
flags.getReporters().add( htmlReporter );
List< File > files = new ArrayList< File >();
files.add( resourceDirectory );
files.add( androidManifestFile );
files.add( sourceDirectory );
files.add( assetsDirectory );
client.run( registry, files );
}
catch ( IOException ex )
{
// TODO Error
}
}
#location 16
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public MakefileHolder createMakefileFromArtifacts( File outputDir, Set<Artifact> artifacts,
String ndkArchitecture,
boolean useHeaderArchives )
throws IOException, MojoExecutionException
{
final StringBuilder makeFile = new StringBuilder( "# Generated by Android Maven Plugin\n" );
final List<File> includeDirectories = new ArrayList<File>();
// Add now output - allows us to somewhat intelligently determine the include paths to use for the header
// archive
makeFile.append( "$(shell echo \"LOCAL_C_INCLUDES=$(LOCAL_C_INCLUDES)\" > $(" + MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
makeFile.append( "$(shell echo \"LOCAL_PATH=$(LOCAL_PATH)\" >> $(" + MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
makeFile.append( "$(shell echo \"LOCAL_MODULE_FILENAME=$(LOCAL_MODULE_FILENAME)\" >> $("
+ MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
makeFile.append( "$(shell echo \"LOCAL_MODULE=$(LOCAL_MODULE)\" >> $(" + MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
makeFile.append( "$(shell echo \"LOCAL_CFLAGS=$(LOCAL_CFLAGS)\" >> $(" + MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
if ( ! artifacts.isEmpty() )
{
for ( Artifact artifact : artifacts )
{
final String architecture = NativeHelper.extractArchitectureFromArtifact( artifact );
makeFile.append( '\n' );
makeFile.append( "ifeq ($(TARGET_ARCH_ABI)," ).append( architecture ).append( ")\n" );
makeFile.append( "#\n" );
makeFile.append( "# Group ID: " );
makeFile.append( artifact.getGroupId() );
makeFile.append( '\n' );
makeFile.append( "# Artifact ID: " );
makeFile.append( artifact.getArtifactId() );
makeFile.append( '\n' );
makeFile.append( "# Artifact Type: " );
makeFile.append( artifact.getType() );
makeFile.append( '\n' );
makeFile.append( "# Version: " );
makeFile.append( artifact.getVersion() );
makeFile.append( '\n' );
makeFile.append( "include $(CLEAR_VARS)" );
makeFile.append( '\n' );
makeFile.append( "LOCAL_MODULE := " );
makeFile.append( artifact.getArtifactId() );
makeFile.append( '\n' );
final boolean apklibStatic = addLibraryDetails( makeFile, outputDir, artifact, ndkArchitecture );
if ( useHeaderArchives )
{
try
{
Artifact harArtifact = new DefaultArtifact( artifact.getGroupId(), artifact.getArtifactId(),
artifact.getVersion(), artifact.getScope(), "har", artifact.getClassifier(),
artifact.getArtifactHandler() );
final Artifact resolvedHarArtifact = AetherHelper
.resolveArtifact( harArtifact, repoSystem, repoSession, projectRepos );
File includeDir = new File( System.getProperty( "java.io.tmpdir" ),
"android_maven_plugin_native_includes" + System.currentTimeMillis() + "_"
+ resolvedHarArtifact.getArtifactId() );
includeDir.deleteOnExit();
includeDirectories.add( includeDir );
JarHelper.unjar( new JarFile( resolvedHarArtifact.getFile() ), includeDir,
new JarHelper.UnjarListener()
{
@Override
public boolean include( JarEntry jarEntry )
{
return ! jarEntry.getName().startsWith( "META-INF" );
}
} );
makeFile.append( "LOCAL_EXPORT_C_INCLUDES := " );
final String str = includeDir.getAbsolutePath();
makeFile.append( str );
makeFile.append( '\n' );
if ( log.isDebugEnabled() )
{
Collection<File> includes = FileUtils.listFiles( includeDir,
TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE );
log.debug( "Listing LOCAL_EXPORT_C_INCLUDES for " + artifact.getId() + ": " + includes );
}
}
catch ( Exception e )
{
throw new MojoExecutionException(
"Error while resolving header archive file for: " + artifact.getArtifactId(), e );
}
}
if ( "a".equals( artifact.getType() ) || apklibStatic )
{
makeFile.append( "include $(PREBUILT_STATIC_LIBRARY)\n" );
}
else
{
makeFile.append( "include $(PREBUILT_SHARED_LIBRARY)\n" );
}
makeFile.append( "endif #" ).append( artifact.getClassifier() ).append( '\n' );
makeFile.append( '\n' );
}
}
return new MakefileHolder( includeDirectories, makeFile.toString() );
} | #vulnerable code
public MakefileHolder createMakefileFromArtifacts( File outputDir, Set<Artifact> artifacts,
String ndkArchitecture,
boolean useHeaderArchives )
throws IOException, MojoExecutionException
{
final StringBuilder makeFile = new StringBuilder( "# Generated by Android Maven Plugin\n" );
final List<File> includeDirectories = new ArrayList<File>();
// Add now output - allows us to somewhat intelligently determine the include paths to use for the header
// archive
makeFile.append( "$(shell echo \"LOCAL_C_INCLUDES=$(LOCAL_C_INCLUDES)\" > $(" + MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
makeFile.append( "$(shell echo \"LOCAL_PATH=$(LOCAL_PATH)\" >> $(" + MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
makeFile.append( "$(shell echo \"LOCAL_MODULE_FILENAME=$(LOCAL_MODULE_FILENAME)\" >> $("
+ MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
makeFile.append( "$(shell echo \"LOCAL_MODULE=$(LOCAL_MODULE)\" >> $(" + MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
makeFile.append( "$(shell echo \"LOCAL_CFLAGS=$(LOCAL_CFLAGS)\" >> $(" + MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
if ( ! artifacts.isEmpty() )
{
for ( Artifact artifact : artifacts )
{
if ( artifact.hasClassifier() )
{
makeFile.append( '\n' );
makeFile.append( "ifeq ($(TARGET_ARCH_ABI)," ).append( artifact.getClassifier() ).append( ")\n" );
}
makeFile.append( "#\n" );
makeFile.append( "# Group ID: " );
makeFile.append( artifact.getGroupId() );
makeFile.append( '\n' );
makeFile.append( "# Artifact ID: " );
makeFile.append( artifact.getArtifactId() );
makeFile.append( '\n' );
makeFile.append( "# Artifact Type: " );
makeFile.append( artifact.getType() );
makeFile.append( '\n' );
makeFile.append( "# Version: " );
makeFile.append( artifact.getVersion() );
makeFile.append( '\n' );
makeFile.append( "include $(CLEAR_VARS)" );
makeFile.append( '\n' );
makeFile.append( "LOCAL_MODULE := " );
makeFile.append( artifact.getArtifactId() );
makeFile.append( '\n' );
final boolean apklibStatic = addLibraryDetails( makeFile, outputDir, artifact, ndkArchitecture );
if ( useHeaderArchives )
{
try
{
Artifact harArtifact = new DefaultArtifact( artifact.getGroupId(), artifact.getArtifactId(),
artifact.getVersion(), artifact.getScope(), "har", artifact.getClassifier(),
artifact.getArtifactHandler() );
final Artifact resolvedHarArtifact = AetherHelper
.resolveArtifact( harArtifact, repoSystem, repoSession, projectRepos );
File includeDir = new File( System.getProperty( "java.io.tmpdir" ),
"android_maven_plugin_native_includes" + System.currentTimeMillis() + "_"
+ resolvedHarArtifact.getArtifactId() );
includeDir.deleteOnExit();
includeDirectories.add( includeDir );
JarHelper.unjar( new JarFile( resolvedHarArtifact.getFile() ), includeDir,
new JarHelper.UnjarListener()
{
@Override
public boolean include( JarEntry jarEntry )
{
return ! jarEntry.getName().startsWith( "META-INF" );
}
} );
makeFile.append( "LOCAL_EXPORT_C_INCLUDES := " );
final String str = includeDir.getAbsolutePath();
makeFile.append( str );
makeFile.append( '\n' );
if ( log.isDebugEnabled() )
{
Collection<File> includes = FileUtils.listFiles( includeDir,
TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE );
log.debug( "Listing LOCAL_EXPORT_C_INCLUDES for " + artifact.getId() + ": " + includes );
}
}
catch ( Exception e )
{
throw new MojoExecutionException(
"Error while resolving header archive file for: " + artifact.getArtifactId(), e );
}
}
if ( "a".equals( artifact.getType() ) || apklibStatic )
{
makeFile.append( "include $(PREBUILT_STATIC_LIBRARY)\n" );
}
else
{
makeFile.append( "include $(PREBUILT_SHARED_LIBRARY)\n" );
}
if ( artifact.hasClassifier() )
{
makeFile.append( "endif #" ).append( artifact.getClassifier() ).append( '\n' );
makeFile.append( '\n' );
}
}
}
return new MakefileHolder( includeDirectories, makeFile.toString() );
}
#location 71
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private void generateBuildConfig() throws MojoExecutionException
{
getLog().debug( "Generating BuildConfig file" );
// Create the BuildConfig for our package.
String packageName = extractPackageNameFromAndroidManifest( androidManifestFile );
if ( StringUtils.isNotBlank( customPackage ) )
{
packageName = customPackage;
}
generateBuildConfigForPackage( packageName );
// Generate the BuildConfig for any APKLIB and AAR dependencies.
// Need to generate for AAR, because some old AARs like ActionBarSherlock do not have BuildConfig (or R)
for ( Artifact artifact : getTransitiveDependencyArtifacts( APKLIB, AAR ) )
{
if ( skipBuildConfigGeneration( artifact ) )
{
continue;
}
final File manifest = new File( getUnpackedLibFolder( artifact ), "AndroidManifest.xml" );
final String depPackageName = extractPackageNameFromAndroidManifest( manifest );
generateBuildConfigForPackage( depPackageName );
}
} | #vulnerable code
private void generateBuildConfig() throws MojoExecutionException
{
getLog().debug( "Generating BuildConfig file" );
// Create the BuildConfig for our package.
String packageName = extractPackageNameFromAndroidManifest( androidManifestFile );
if ( StringUtils.isNotBlank( customPackage ) )
{
packageName = customPackage;
}
generateBuildConfigForPackage( packageName );
try
{
// Generate the BuildConfig for any APKLIB and AAR dependencies.
// Need to generate for AAR, because some old AARs like ActionBarSherlock do not have BuildConfig (or R)
for ( Artifact artifact : getTransitiveDependencyArtifacts( APKLIB, AAR ) )
{
final File manifest = new File( getUnpackedLibFolder( artifact ), "AndroidManifest.xml" );
final String depPackageName = extractPackageNameFromAndroidManifest( manifest );
if ( artifact.getType().equals( AAR ) )
{
final JarFile jar = new JarFile( getUnpackedAarClassesJar( artifact ) );
final JarEntry entry = jar.getJarEntry( depPackageName.replace( '.', '/' ) + "/BuildConfig.class" );
if ( entry != null )
{
getLog().info( "Skip BuildConfig.java generation for "
+ artifact.getGroupId() + " " + artifact.getArtifactId() );
continue;
}
}
generateBuildConfigForPackage( depPackageName );
}
}
catch ( IOException e )
{
getLog().error( "Error generating BuildConfig ", e );
throw new MojoExecutionException( "Error generating BuildConfig", e );
}
}
#location 25
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public MakefileHolder createMakefileFromArtifacts( File outputDir, Set<Artifact> artifacts,
String ndkArchitecture,
boolean useHeaderArchives )
throws IOException, MojoExecutionException
{
final StringBuilder makeFile = new StringBuilder( "# Generated by Android Maven Plugin\n" );
final List<File> includeDirectories = new ArrayList<File>();
// Add now output - allows us to somewhat intelligently determine the include paths to use for the header
// archive
makeFile.append( "$(shell echo \"LOCAL_C_INCLUDES=$(LOCAL_C_INCLUDES)\" > $(" + MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
makeFile.append( "$(shell echo \"LOCAL_PATH=$(LOCAL_PATH)\" >> $(" + MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
makeFile.append( "$(shell echo \"LOCAL_MODULE_FILENAME=$(LOCAL_MODULE_FILENAME)\" >> $("
+ MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
makeFile.append( "$(shell echo \"LOCAL_MODULE=$(LOCAL_MODULE)\" >> $(" + MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
makeFile.append( "$(shell echo \"LOCAL_CFLAGS=$(LOCAL_CFLAGS)\" >> $(" + MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
if ( ! artifacts.isEmpty() )
{
for ( Artifact artifact : artifacts )
{
if ( !NativeHelper.isMatchinArchitecture( ndkArchitecture, artifact ) )
{
continue;
}
makeFile.append( "#\n" );
makeFile.append( "# Group ID: " );
makeFile.append( artifact.getGroupId() );
makeFile.append( '\n' );
makeFile.append( "# Artifact ID: " );
makeFile.append( artifact.getArtifactId() );
makeFile.append( '\n' );
makeFile.append( "# Artifact Type: " );
makeFile.append( artifact.getType() );
makeFile.append( '\n' );
makeFile.append( "# Version: " );
makeFile.append( artifact.getVersion() );
makeFile.append( '\n' );
makeFile.append( "include $(CLEAR_VARS)" );
makeFile.append( '\n' );
makeFile.append( "LOCAL_MODULE := " );
makeFile.append( artifact.getArtifactId() );
makeFile.append( '\n' );
final boolean apklibStatic = addLibraryDetails( makeFile, outputDir, artifact, ndkArchitecture );
if ( useHeaderArchives )
{
try
{
Artifact harArtifact = new DefaultArtifact( artifact.getGroupId(), artifact.getArtifactId(),
artifact.getVersion(), artifact.getScope(), "har", artifact.getClassifier(),
artifact.getArtifactHandler() );
final Artifact resolvedHarArtifact = AetherHelper
.resolveArtifact( harArtifact, repoSystem, repoSession, projectRepos );
File includeDir = new File( System.getProperty( "java.io.tmpdir" ),
"android_maven_plugin_native_includes" + System.currentTimeMillis() + "_"
+ resolvedHarArtifact.getArtifactId() );
includeDir.deleteOnExit();
includeDirectories.add( includeDir );
JarHelper.unjar( new JarFile( resolvedHarArtifact.getFile() ), includeDir,
new JarHelper.UnjarListener()
{
@Override
public boolean include( JarEntry jarEntry )
{
return ! jarEntry.getName().startsWith( "META-INF" );
}
} );
makeFile.append( "LOCAL_EXPORT_C_INCLUDES := " );
final String str = includeDir.getAbsolutePath();
makeFile.append( str );
makeFile.append( '\n' );
if ( log.isDebugEnabled() )
{
Collection<File> includes = FileUtils.listFiles( includeDir,
TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE );
log.debug( "Listing LOCAL_EXPORT_C_INCLUDES for " + artifact.getId() + ": " + includes );
}
}
catch ( Exception e )
{
throw new MojoExecutionException(
"Error while resolving header archive file for: " + artifact.getArtifactId(), e );
}
}
if ( "a".equals( artifact.getType() ) || apklibStatic )
{
makeFile.append( "include $(PREBUILT_STATIC_LIBRARY)\n" );
}
else
{
makeFile.append( "include $(PREBUILT_SHARED_LIBRARY)\n" );
}
}
}
return new MakefileHolder( includeDirectories, makeFile.toString() );
} | #vulnerable code
public MakefileHolder createMakefileFromArtifacts( File outputDir, Set<Artifact> artifacts,
String ndkArchitecture,
boolean useHeaderArchives )
throws IOException, MojoExecutionException
{
final StringBuilder makeFile = new StringBuilder( "# Generated by Android Maven Plugin\n" );
final List<File> includeDirectories = new ArrayList<File>();
// Add now output - allows us to somewhat intelligently determine the include paths to use for the header
// archive
makeFile.append( "$(shell echo \"LOCAL_C_INCLUDES=$(LOCAL_C_INCLUDES)\" > $(" + MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
makeFile.append( "$(shell echo \"LOCAL_PATH=$(LOCAL_PATH)\" >> $(" + MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
makeFile.append( "$(shell echo \"LOCAL_MODULE_FILENAME=$(LOCAL_MODULE_FILENAME)\" >> $("
+ MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
makeFile.append( "$(shell echo \"LOCAL_MODULE=$(LOCAL_MODULE)\" >> $(" + MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
makeFile.append( "$(shell echo \"LOCAL_CFLAGS=$(LOCAL_CFLAGS)\" >> $(" + MAKEFILE_CAPTURE_FILE + "))" );
makeFile.append( '\n' );
if ( ! artifacts.isEmpty() )
{
for ( Artifact artifact : artifacts )
{
boolean apklibStatic = false;
makeFile.append( "#\n" );
makeFile.append( "# Group ID: " );
makeFile.append( artifact.getGroupId() );
makeFile.append( '\n' );
makeFile.append( "# Artifact ID: " );
makeFile.append( artifact.getArtifactId() );
makeFile.append( '\n' );
makeFile.append( "# Artifact Type: " );
makeFile.append( artifact.getType() );
makeFile.append( '\n' );
makeFile.append( "# Version: " );
makeFile.append( artifact.getVersion() );
makeFile.append( '\n' );
makeFile.append( "include $(CLEAR_VARS)" );
makeFile.append( '\n' );
makeFile.append( "LOCAL_MODULE := " );
makeFile.append( artifact.getArtifactId() );
makeFile.append( '\n' );
apklibStatic = addLibraryDetails( makeFile, outputDir, artifact, ndkArchitecture );
if ( useHeaderArchives )
{
try
{
Artifact harArtifact = new DefaultArtifact( artifact.getGroupId(), artifact.getArtifactId(),
artifact.getVersion(), artifact.getScope(), "har", artifact.getClassifier(),
artifact.getArtifactHandler() );
final Artifact resolvedHarArtifact = AetherHelper
.resolveArtifact( harArtifact, repoSystem, repoSession, projectRepos );
File includeDir = new File( System.getProperty( "java.io.tmpdir" ),
"android_maven_plugin_native_includes" + System.currentTimeMillis() + "_"
+ resolvedHarArtifact.getArtifactId() );
includeDir.deleteOnExit();
includeDirectories.add( includeDir );
JarHelper.unjar( new JarFile( resolvedHarArtifact.getFile() ), includeDir,
new JarHelper.UnjarListener()
{
@Override
public boolean include( JarEntry jarEntry )
{
return ! jarEntry.getName().startsWith( "META-INF" );
}
} );
makeFile.append( "LOCAL_EXPORT_C_INCLUDES := " );
final String str = includeDir.getAbsolutePath();
makeFile.append( str );
makeFile.append( '\n' );
if ( log.isDebugEnabled() )
{
Collection<File> includes = FileUtils.listFiles( includeDir,
TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE );
log.debug( "Listing LOCAL_EXPORT_C_INCLUDES for " + artifact.getId() + ": " + includes );
}
}
catch ( Exception e )
{
throw new MojoExecutionException(
"Error while resolving header archive file for: " + artifact.getArtifactId(), e );
}
}
if ( "a".equals( artifact.getType() ) || apklibStatic )
{
makeFile.append( "include $(PREBUILT_STATIC_LIBRARY)\n" );
}
else
{
makeFile.append( "include $(PREBUILT_SHARED_LIBRARY)\n" );
}
}
}
return new MakefileHolder( includeDirectories, makeFile.toString() );
}
#location 65
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private void performVersionCodeUpdateFromVersion( Element manifestElement )
{
String verString = project.getVersion();
getLog().debug( "Generating versionCode for " + verString );
String verCode = generateVersionCodeFromVersionName( verString );
getLog().info( "Setting " + ATTR_VERSION_CODE + " to " + verCode );
manifestElement.setAttribute( ATTR_VERSION_CODE, verCode );
project.getProperties().setProperty( "android.manifest.versionCode", String.valueOf( verCode ) );
} | #vulnerable code
private void performVersionCodeUpdateFromVersion( Element manifestElement )
{
String verString = project.getVersion();
getLog().debug( "Generating versionCode for " + verString );
ArtifactVersion artifactVersion = new DefaultArtifactVersion( verString );
String verCode;
if ( artifactVersion.getMajorVersion() < 1 && artifactVersion.getMinorVersion() < 1
&& artifactVersion.getIncrementalVersion() < 1 )
{
getLog().warn( "Problem parsing version number occurred. Using fall back to determine version code. " );
verCode = verString.replaceAll( "\\D", "" );
Attr versionCodeAttr = manifestElement.getAttributeNode( ATTR_VERSION_CODE );
int currentVersionCode = 0;
if ( versionCodeAttr != null )
{
currentVersionCode = NumberUtils.toInt( versionCodeAttr.getValue(), 0 );
}
if ( Integer.parseInt( verCode ) < currentVersionCode )
{
getLog().info( verCode + " < " + currentVersionCode + " so padding versionCode" );
verCode = StringUtils.rightPad( verCode, versionCodeAttr.getValue().length(), "0" );
}
}
else
{
verCode = Integer.toString( artifactVersion.getMajorVersion() * MAJOR_VERSION_POSITION
+ artifactVersion.getMinorVersion() * MINOR_VERSION_POSITION
+ artifactVersion.getIncrementalVersion() * INCREMENTAL_VERSION_POSITION );
}
getLog().info( "Setting " + ATTR_VERSION_CODE + " to " + verCode );
manifestElement.setAttribute( ATTR_VERSION_CODE, verCode );
project.getProperties().setProperty( "android.manifest.versionCode", String.valueOf( verCode ) );
}
#location 24
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private void updateWithMetaInf( ZipOutputStream zos, File jarFile, Set<String> entries, boolean metaInfOnly )
throws ZipException, IOException
{
ZipFile zin = new ZipFile( jarFile );
for( Enumeration<? extends ZipEntry> en = zin.entries(); en.hasMoreElements(); ) {
ZipEntry ze = en.nextElement();
if( ze.isDirectory() )
continue;
String zn = ze.getName();
if( metaInfOnly ) {
if( !zn.startsWith( "META-INF/" ) ) {
continue;
}
if( this.extractDuplicates && !entries.add( zn ) ) {
continue;
}
if( !metaInfMatches( zn ) ) {
continue;
}
}
zos.putNextEntry( new ZipEntry( zn ) );
InputStream is = zin.getInputStream( ze );
copyStreamWithoutClosing( is, zos );
is.close();
zos.closeEntry();
}
zin.close();
} | #vulnerable code
private void updateWithMetaInf( ZipOutputStream zos, File jarFile, Set<String> entries, boolean metaInfOnly )
throws ZipException, IOException
{
ZipFile zin = new ZipFile( jarFile );
for( Enumeration<? extends ZipEntry> en = zin.entries(); en.hasMoreElements(); ) {
ZipEntry ze = en.nextElement();
if( ze.isDirectory() )
continue;
String zn = ze.getName();
if( metaInfOnly ) {
if( !zn.startsWith( "META-INF/" ) ) {
continue;
}
if( !entries.add( zn ) ) {
continue;
}
if( !metaInfMatches( zn ) ) {
continue;
}
}
zos.putNextEntry( new ZipEntry( zn ) );
InputStream is = zin.getInputStream( ze );
copyStreamWithoutClosing( is, zos );
is.close();
zos.closeEntry();
}
zin.close();
}
#location 17
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void testTokenFilter() throws IOException{
StringReader sr = new StringReader("刘德华");
Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_41);
PinyinTokenFilter filter = new PinyinTokenFilter(analyzer.tokenStream("f",sr),"","none");
List<String> pinyin= new ArrayList<String>();
filter.reset();
while (filter.incrementToken())
{
CharTermAttribute ta = filter.getAttribute(CharTermAttribute.class);
pinyin.add(ta.toString());
}
// Assert.assertEquals(3,pinyin.size());
System.out.println(pinyin.get(0));
System.out.println(pinyin.get(1));
System.out.println(pinyin.get(2));
Assert.assertEquals("liu",pinyin.get(0));
Assert.assertEquals("de",pinyin.get(1));
Assert.assertEquals("hua",pinyin.get(2));
sr = new StringReader("刘德华");
analyzer = new KeywordAnalyzer();
filter = new PinyinTokenFilter(analyzer.tokenStream("f",sr),"","only");
pinyin.clear();
while (filter.incrementToken())
{
CharTermAttribute ta = filter.getAttribute(CharTermAttribute.class);
pinyin.add(ta.toString());
}
Assert.assertEquals(1,pinyin.size());
Assert.assertEquals("ldh",pinyin.get(0));
} | #vulnerable code
@Test
public void testTokenFilter() throws IOException{
StringReader sr = new StringReader("刘德华");
Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_36);
PinyinTokenFilter filter = new PinyinTokenFilter(analyzer.tokenStream("f",sr),"","none");
List<String> pinyin= new ArrayList<String>();
while (filter.incrementToken())
{
CharTermAttribute ta = filter.getAttribute(CharTermAttribute.class);
pinyin.add(ta.toString());
}
Assert.assertEquals(3,pinyin.size());
Assert.assertEquals("liu",pinyin.get(0));
Assert.assertEquals("de",pinyin.get(1));
Assert.assertEquals("hua",pinyin.get(2));
sr = new StringReader("刘德华");
analyzer = new KeywordAnalyzer();
filter = new PinyinTokenFilter(analyzer.tokenStream("f",sr),"","only");
pinyin.clear();
while (filter.incrementToken())
{
CharTermAttribute ta = filter.getAttribute(CharTermAttribute.class);
pinyin.add(ta.toString());
}
Assert.assertEquals(1,pinyin.size());
Assert.assertEquals("ldh",pinyin.get(0));
}
#location 19
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public V put(K key, V value) {
return (V)doOp(ClusterOperation.CONCURRENT_MAP_PUT, Serializer.toByte(key), Serializer.toByte(value));
} | #vulnerable code
public V put(K key, V value) {
Packet request = createRequestPacket();
request.setTxnId(0);
request.setOperation(ClusterOperation.CONCURRENT_MAP_PUT);
request.setKey(Serializer.toByte(key));
request.setValue(Serializer.toByte(value));
Packet response = callAndGetResult(request);
if(response.getValue()!=null){
return (V)Serializer.toObject(response.getValue());
}
return null;
}
#location 11
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void process(Object obj) {
long processStart = System.nanoTime();
if (obj instanceof Invocation) {
Invocation inv = (Invocation) obj;
MemberImpl memberFrom = getMember(inv.conn.getEndPoint());
if (memberFrom != null) {
memberFrom.didRead();
}
int operation = inv.operation;
if (operation < 50) {
ClusterManager.get().handle(inv);
} else if (operation < 300) {
ListenerManager.get().handle(inv);
} else if (operation < 400) {
ExecutorManager.get().handle(inv);
} else if (operation < 500) {
BlockingQueueManager.get().handle(inv);
} else if (operation < 600) {
ConcurrentMapManager.get().handle(inv);
} else
throw new RuntimeException("Unknown operation " + operation);
} else if (obj instanceof Processable) {
((Processable) obj).process();
} else if (obj instanceof Runnable) {
synchronized (obj) {
((Runnable) obj).run();
obj.notify();
}
} else
throw new RuntimeException("Unkown obj " + obj);
long processEnd = System.nanoTime();
long elipsedTime = processEnd - processStart;
totalProcessTime += elipsedTime;
long duration = (processEnd - start);
if (duration > UTILIZATION_CHECK_INTERVAL) {
if (DEBUG) {
System.out.println("ServiceProcessUtilization: " + ((totalProcessTime * 100) / duration) + " %");
}
start = processEnd;
totalProcessTime = 0;
}
} | #vulnerable code
public void process(Object obj) {
long processStart = System.nanoTime();
if (obj instanceof Invocation) {
Invocation inv = (Invocation) obj;
MemberImpl memberFrom = getMember(inv.conn.getEndPoint());
if (memberFrom != null) {
memberFrom.didRead();
}
int operation = inv.operation;
if (operation < 50) {
ClusterManager.get().handle(inv);
} else if (operation < 300) {
ListenerManager.get().handle(inv);
} else if (operation < 400) {
ExecutorManager.get().handle(inv);
} else if (operation < 500) {
BlockingQueueManager.get().handle(inv);
} else if (operation < 600) {
ConcurrentMapManager.get().handle(inv);
} else
throw new RuntimeException("Unknown operation " + operation);
} else if (obj instanceof Processable) {
((Processable) obj).process();
} else if (obj instanceof Runnable) {
synchronized (obj) {
((Runnable) obj).run();
obj.notify();
}
} else
throw new RuntimeException("Unkown obj " + obj);
long processEnd = System.nanoTime();
long elipsedTime = processEnd - processStart;
totalProcessTime += elipsedTime;
long duration = (processEnd - start);
if (duration > TimeUnit.SECONDS.toNanos(10)) {
if (DEBUG) {
System.out.println("ServiceProcessUtilization: " + ((totalProcessTime * 100) / duration) + " %");
}
start = processEnd;
totalProcessTime = 0;
}
}
#location 11
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void run() {
while (running) {
Object obj = null;
try {
lsBuffer.clear();
queue.drainTo(lsBuffer);
int size = lsBuffer.size();
if (size > 0) {
for (int i = 0; i < size; i++) {
obj = lsBuffer.get(i);
checkHeartbeat();
process(obj);
}
lsBuffer.clear();
} else {
obj = queue.poll(100, TimeUnit.MILLISECONDS);
checkHeartbeat();
if (obj != null) {
process(obj);
}
}
} catch (InterruptedException e) {
Node.get().handleInterruptedException(Thread.currentThread(), e);
} catch (Throwable e) {
if (DEBUG) {
System.out.println(e + ", message: " + e + ", obj=" + obj);
}
e.printStackTrace(System.out);
}
}
} | #vulnerable code
public void run() {
while (running) {
Object obj = null;
try {
lsBuffer.clear();
queue.drainTo(lsBuffer);
int size = lsBuffer.size();
if (size > 0) {
for (int i = 0; i < size; i++) {
obj = lsBuffer.get(i);
process(obj);
}
lsBuffer.clear();
} else {
obj = queue.take();
process(obj);
}
} catch (InterruptedException e) {
Node.get().handleInterruptedException(Thread.currentThread(), e);
} catch (Throwable e) {
if (DEBUG) {
System.out.println(e + ", message: " + e + ", obj=" + obj);
}
e.printStackTrace(System.out);
}
}
}
#location 11
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void handle(Invocation inv) {
try {
if (inv.operation == OP_RESPONSE) {
handleResponse(inv);
} else if (inv.operation == OP_REMOTELY_PROCESS_AND_RESPONSE) {
Data data = inv.doTake(inv.data);
RemotelyProcessable rp = (RemotelyProcessable) ThreadContext.get().toObject(data);
rp.setConnection(inv.conn);
rp.process();
sendResponse(inv);
} else if (inv.operation == OP_REMOTELY_PROCESS) {
Data data = inv.doTake(inv.data);
RemotelyProcessable rp = (RemotelyProcessable) ThreadContext.get().toObject(data);
rp.setConnection(inv.conn);
rp.process();
inv.returnToContainer();
} else
throw new RuntimeException("Unhandled message " + inv.name);
} catch (Exception e) {
log(e);
e.printStackTrace();
}
} | #vulnerable code
public void handle(Invocation inv) {
try {
if (inv.operation == OP_RESPONSE) {
handleResponse(inv);
} else if (inv.operation == OP_BIND) {
Address addressEndPoint = (Address) inv.getValueObject();
ConnectionManager.get().bind(addressEndPoint, inv.conn);
inv.returnToContainer();
} else if (inv.operation == OP_REMOTELY_PROCESS_AND_RESPONSE) {
Data data = inv.doTake(inv.data);
RemotelyProcessable rp = (RemotelyProcessable) ThreadContext.get().toObject(data);
rp.setConnection(inv.conn);
rp.process();
sendResponse(inv);
} else if (inv.operation == OP_REMOTELY_PROCESS) {
Data data = inv.doTake(inv.data);
RemotelyProcessable rp = (RemotelyProcessable) ThreadContext.get().toObject(data);
rp.setConnection(inv.conn);
rp.process();
inv.returnToContainer();
} else
throw new RuntimeException("Unhandled message " + inv.name);
} catch (Exception e) {
log(e);
e.printStackTrace();
}
}
#location 7
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public V get(Object key) {
return (V)doOp(ClusterOperation.CONCURRENT_MAP_GET, Serializer.toByte(key), null);
} | #vulnerable code
public V get(Object key) {
// MapGetCall mGet = new MapGetCall();
Packet request = createRequestPacket();
request.setOperation(ClusterOperation.CONCURRENT_MAP_GET);
request.setKey(Serializer.toByte(key));
Packet response = callAndGetResult(request);
if(response.getValue()!=null){
return (V)Serializer.toObject(response.getValue());
}
return null;
}
#location 9
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void run() {
while (running) {
Object obj = null;
try {
lsBuffer.clear();
queue.drainTo(lsBuffer);
int size = lsBuffer.size();
if (size > 0) {
for (int i = 0; i < size; i++) {
obj = lsBuffer.get(i);
checkHeartbeat();
process(obj);
}
lsBuffer.clear();
} else {
obj = queue.poll(100, TimeUnit.MILLISECONDS);
checkHeartbeat();
if (obj != null) {
process(obj);
}
}
} catch (InterruptedException e) {
Node.get().handleInterruptedException(Thread.currentThread(), e);
} catch (Throwable e) {
if (DEBUG) {
System.out.println(e + ", message: " + e + ", obj=" + obj);
}
e.printStackTrace(System.out);
}
}
} | #vulnerable code
public void run() {
while (running) {
Object obj = null;
try {
lsBuffer.clear();
queue.drainTo(lsBuffer);
int size = lsBuffer.size();
if (size > 0) {
for (int i = 0; i < size; i++) {
obj = lsBuffer.get(i);
process(obj);
}
lsBuffer.clear();
} else {
obj = queue.take();
process(obj);
}
} catch (InterruptedException e) {
Node.get().handleInterruptedException(Thread.currentThread(), e);
} catch (Throwable e) {
if (DEBUG) {
System.out.println(e + ", message: " + e + ", obj=" + obj);
}
e.printStackTrace(System.out);
}
}
}
#location 11
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void readData(DataInput in) throws IOException {
numberOfPuts = in.readLong();
numberOfGets = in.readLong();
numberOfRemoves = in.readLong();
numberOfOtherOperations = in.readLong();
periodStart = in.readLong();
periodEnd = in.readLong();
} | #vulnerable code
public void readData(DataInput in) throws IOException {
mapPuts.set(in.readLong());
mapGets.set(in.readLong());
mapRemoves.set(in.readLong());
startTime = in.readLong();
endTime = in.readLong();
}
#location 6
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void run() {
boolean readPackets = false;
boolean readProcessables = false;
while (running) {
readPackets = (dequeuePackets() != 0);
readProcessables = (dequeueProcessables() != 0);
if (!readPackets && !readProcessables) {
try {
synchronized (notEmptyLock) {
notEmptyLock.wait(100);
}
checkPeriodics();
} catch (InterruptedException e) {
node.handleInterruptedException(Thread.currentThread(), e);
}
}
}
packetQueue.clear();
processableQueue.clear();
} | #vulnerable code
public void run() {
boolean readPackets = false;
boolean readProcessables = false;
while (running) {
readPackets = (dequeuePackets() != 0);
readProcessables = (dequeueProcessables() != 0);
if (!readPackets && !readProcessables) {
enqueueLock.lock();
try {
notEmpty.await(100, TimeUnit.MILLISECONDS);
checkPeriodics();
} catch (InterruptedException e) {
node.handleInterruptedException(Thread.currentThread(), e);
} finally {
enqueueLock.unlock();
}
}
}
packetQueue.clear();
processableQueue.clear();
}
#location 11
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
void reArrangeBlocks() {
if (concurrentMapManager.isMaster()) {
Map<Address, Integer> addressBlocks = getCurrentMemberBlocks();
if (addressBlocks.size() == 0) {
return;
}
List<Block> lsBlocksToRedistribute = new ArrayList<Block>();
int aveBlockOwnCount = BLOCK_COUNT / (addressBlocks.size());
for (Block blockReal : blocks) {
if (blockReal.getOwner() == null) {
logger.log(Level.SEVERE, "Master cannot have null block owner " + blockReal);
return;
}
if (blockReal.isMigrating()) {
logger.log(Level.SEVERE, "Cannot have migrating block " + blockReal);
return;
}
Integer countInt = addressBlocks.get(blockReal.getOwner());
int count = (countInt == null) ? 0 : countInt;
if (count >= aveBlockOwnCount) {
lsBlocksToRedistribute.add(new Block(blockReal));
} else {
addressBlocks.put(blockReal.getOwner(), ++count);
}
}
Collection<Address> allAddress = addressBlocks.keySet();
lsBlocksToMigrate.clear();
for (Address address : allAddress) {
Integer countInt = addressBlocks.get(address);
int count = (countInt == null) ? 0 : countInt;
while (count < aveBlockOwnCount && lsBlocksToRedistribute.size() > 0) {
Block blockToMigrate = lsBlocksToRedistribute.remove(0);
if (!blockToMigrate.getOwner().equals(address)) {
blockToMigrate.setMigrationAddress(address);
lsBlocksToMigrate.add(blockToMigrate);
}
count++;
}
}
Collections.shuffle(lsBlocksToMigrate);
}
} | #vulnerable code
void migrateBlock(final Block blockInfo) {
if (!concurrentMapManager.isBlockInfoValid(blockInfo)) {
return;
}
if (!thisAddress.equals(blockInfo.getOwner())) {
throw new RuntimeException();
}
if (!blockInfo.isMigrating()) {
throw new RuntimeException();
}
if (blockInfo.getOwner().equals(blockInfo.getMigrationAddress())) {
throw new RuntimeException();
}
Block blockReal = blocks[blockInfo.getBlockId()];
if (blockReal.isMigrating()) {
if (!blockInfo.getMigrationAddress().equals(blockReal.getMigrationAddress())) {
logger.log(Level.WARNING, blockReal + ". Already migrating blockInfo is migrating again to " + blockInfo);
} else {
logger.log(Level.WARNING, blockInfo + " migration unknown " + blockReal);
}
return;
}
blockReal.setOwner(blockInfo.getOwner());
blockReal.setMigrationAddress(blockInfo.getMigrationAddress());
logger.log(Level.FINEST, "migrate blockInfo " + blockInfo);
if (!node.isActive() || node.factory.restarted) {
return;
}
if (concurrentMapManager.isSuperClient()) {
return;
}
List<Record> lsRecordsToMigrate = new ArrayList<Record>(1000);
Collection<CMap> cmaps = concurrentMapManager.maps.values();
for (final CMap cmap : cmaps) {
if (cmap.locallyOwnedMap != null) {
cmap.locallyOwnedMap.reset();
}
final Object[] records = cmap.ownedRecords.toArray();
for (Object recObj : records) {
final Record rec = (Record) recObj;
if (rec.isActive()) {
if (rec.getKey() == null || rec.getKey().size() == 0) {
throw new RuntimeException("Record.key is null or empty " + rec.getKey());
}
if (rec.getBlockId() == blockInfo.getBlockId()) {
lsRecordsToMigrate.add(rec);
cmap.markAsRemoved(rec);
}
}
}
}
final CountDownLatch latch = new CountDownLatch(lsRecordsToMigrate.size());
for (final Record rec : lsRecordsToMigrate) {
final CMap cmap = concurrentMapManager.getMap(rec.getName());
node.executorManager.executeMigrationTask(new FallThroughRunnable() {
public void doRun() {
try {
concurrentMapManager.migrateRecord(cmap, rec);
} finally {
latch.countDown();
}
}
});
}
node.executorManager.executeMigrationTask(new FallThroughRunnable() {
public void doRun() {
try {
logger.log(Level.FINEST, "migrate blockInfo " + blockInfo + " await ");
latch.await(10, TimeUnit.SECONDS);
concurrentMapManager.enqueueAndReturn(new Processable() {
public void process() {
Block blockReal = blocks[blockInfo.getBlockId()];
logger.log(Level.FINEST, "migrate completing [" + blockInfo + "] realBlock " + blockReal);
blockReal.setOwner(blockReal.getMigrationAddress());
blockReal.setMigrationAddress(null);
logger.log(Level.FINEST, "migrate complete [" + blockInfo.getMigrationAddress() + "] now realBlock " + blockReal);
for (MemberImpl member : concurrentMapManager.lsMembers) {
if (!member.localMember()) {
concurrentMapManager.sendBlockInfo(new Block(blockReal), member.getAddress());
}
}
}
});
} catch (InterruptedException ignored) {
}
}
});
}
#location 5
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
void syncForDead(Address addressDead) {
MemberImpl member = getNextMemberBeforeSync(addressDead, true, 1);
if (DEBUG) {
log(addressDead + " is dead and its backup was " + member);
}
Address addressNewOwner = (member == null) ? thisAddress : member.getAddress();
Collection<Q> queues = mapQueues.values();
for (Q q : queues) {
List<Block> lsBlocks = q.lsBlocks;
for (Block block : lsBlocks) {
if (block.address.equals(addressDead)) {
// set the new owner
block.address = addressNewOwner;
block.resetAddIndex();
if (lsMembers.size() > 1) {
if (addressNewOwner.equals(thisAddress)) {
// I am the new owner so backup to next member
int indexUpto = block.size() - 1;
if (DEBUG) {
log("IndexUpto " + indexUpto);
}
if (indexUpto > -1) {
executeLocally(new BlockBackupSyncRunner(new BlockBackupSync(q, block,
indexUpto)));
}
}
}
} else if (block.address.equals(thisAddress)) {
// If I am/was the owner of this block
// did my backup change..
// if so backup to the new next
if (lsMembers.size() > 1) {
MemberImpl memberBackupWas = getNextMemberBeforeSync(thisAddress, true, 1);
if (memberBackupWas == null
|| memberBackupWas.getAddress().equals(addressDead)) {
int indexUpto = block.size() - 1;
if (indexUpto > -1) {
executeLocally(new BlockBackupSyncRunner(new BlockBackupSync(q, block,
indexUpto)));
}
}
}
}
}
// packetalidate the dead member's scheduled actions
List<ScheduledPollAction> scheduledPollActions = q.lsScheduledPollActions;
for (ScheduledPollAction scheduledAction : scheduledPollActions) {
if (addressDead.equals(scheduledAction.request.caller)) {
scheduledAction.setValid(false);
ClusterManager.get().deregisterScheduledAction(scheduledAction);
}
}
List<ScheduledOfferAction> scheduledOfferActions = q.lsScheduledOfferActions;
for (ScheduledOfferAction scheduledAction : scheduledOfferActions) {
if (addressDead.equals(scheduledAction.request.caller)) {
scheduledAction.setValid(false);
ClusterManager.get().deregisterScheduledAction(scheduledAction);
}
}
}
doResetBlockSizes();
} | #vulnerable code
void doPublish(Request req) {
Q q = getQ(req.name);
if (q.blCurrentPut == null) {
q.setCurrentPut();
}
int index = q.publish(req);
req.longValue = index;
req.response = Boolean.TRUE;
}
#location 3
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
void reArrangeBlocks() {
if (concurrentMapManager.isMaster()) {
List<MemberImpl> lsMembers = concurrentMapManager.lsMembers;
// make sue that all blocks are actually created
for (int i = 0; i < BLOCK_COUNT; i++) {
Block block = blocks[i];
if (block == null) {
concurrentMapManager.getOrCreateBlock(i);
}
}
List<Block> lsBlocksToRedistribute = new ArrayList<Block>();
Map<Address, Integer> addressBlocks = new HashMap<Address, Integer>();
int storageEnabledMemberCount = 0;
for (MemberImpl member : lsMembers) {
if (!member.isSuperClient()) {
addressBlocks.put(member.getAddress(), 0);
storageEnabledMemberCount++;
}
}
if (storageEnabledMemberCount == 0) {
return;
}
int aveBlockOwnCount = BLOCK_COUNT / (storageEnabledMemberCount);
for (Block block : blocks) {
if (block.getOwner() == null) {
lsBlocksToRedistribute.add(new Block(block));
} else {
if (!block.isMigrating()) {
Integer countInt = addressBlocks.get(block.getOwner());
int count = (countInt == null) ? 0 : countInt;
if (count >= aveBlockOwnCount) {
lsBlocksToRedistribute.add(new Block(block));
} else {
count++;
addressBlocks.put(block.getOwner(), count);
}
}
}
}
Set<Address> allAddress = addressBlocks.keySet();
lsBlocksToMigrate.clear();
setNewMembers:
for (Address address : allAddress) {
Integer countInt = addressBlocks.get(address);
int count = (countInt == null) ? 0 : countInt;
while (count < aveBlockOwnCount) {
if (lsBlocksToRedistribute.size() > 0) {
Block blockToMigrate = lsBlocksToRedistribute.remove(0);
if (blockToMigrate.getOwner() == null) {
blockToMigrate.setOwner(address);
} else {
blockToMigrate.setMigrationAddress(address);
if (blockToMigrate.getOwner().equals(blockToMigrate.getMigrationAddress())) {
blockToMigrate.setMigrationAddress(null);
}
}
lsBlocksToMigrate.add(blockToMigrate);
count++;
} else {
break setNewMembers;
}
}
}
int addressIndex = 0;
final Address[] addresses = addressBlocks.keySet().toArray(new Address[]{});
final int addressLength = addresses.length;
for (int i = 0; i < BLOCK_COUNT; i++) {
Block block = blocks[i];
if (block.getOwner() == null) {
block = new Block(block);
int index = addressIndex++ % addressLength;
block.setOwner(addresses[index]);
lsBlocksToRedistribute.add(block);
}
}
}
} | #vulnerable code
void migrateBlock(final Block block) {
if (!concurrentMapManager.isBlockInfoValid(block)) {
return;
}
if (!thisAddress.equals(block.getOwner())) {
throw new RuntimeException();
}
if (block.getMigrationAddress() == null) {
throw new RuntimeException();
}
if (block.getOwner().equals(block.getMigrationAddress())) {
throw new RuntimeException();
}
Block blockReal = blocks[block.getBlockId()];
if (blockReal.isMigrating()) {
if (!block.getOwner().equals(blockReal.getOwner()) || !block.getMigrationAddress().equals(blockReal.getMigrationAddress())) {
logger.log(Level.WARNING, blockReal + ". Already migrating block is migrating again to " + block);
}
return;
}
blockReal.setOwner(block.getOwner());
blockReal.setMigrationAddress(block.getMigrationAddress());
logger.log(Level.FINEST, "migrate block " + block);
if (!node.isActive() || node.factory.restarted) {
return;
}
if (concurrentMapManager.isSuperClient()) {
return;
}
blockMigrating = block;
List<Record> lsRecordsToMigrate = new ArrayList<Record>(1000);
Collection<CMap> cmaps = concurrentMapManager.maps.values();
for (final CMap cmap : cmaps) {
if (cmap.locallyOwnedMap != null) {
cmap.locallyOwnedMap.reset();
}
final Object[] records = cmap.ownedRecords.toArray();
for (Object recObj : records) {
final Record rec = (Record) recObj;
if (rec.isActive()) {
if (rec.getKey() == null || rec.getKey().size() == 0) {
throw new RuntimeException("Record.key is null or empty " + rec.getKey());
}
if (rec.getBlockId() == block.getBlockId()) {
lsRecordsToMigrate.add(rec);
cmap.markAsRemoved(rec);
}
}
}
}
final CountDownLatch latch = new CountDownLatch(lsRecordsToMigrate.size());
for (final Record rec : lsRecordsToMigrate) {
final CMap cmap = concurrentMapManager.getMap(rec.getName());
node.executorManager.executeMigrationTask(new FallThroughRunnable() {
public void doRun() {
try {
concurrentMapManager.migrateRecord(cmap, rec);
} finally {
latch.countDown();
}
}
});
}
node.executorManager.executeMigrationTask(new FallThroughRunnable() {
public void doRun() {
try {
logger.log(Level.FINEST, "migrate block " + block + " await ");
latch.await(10, TimeUnit.SECONDS);
concurrentMapManager.enqueueAndReturn(new Processable() {
public void process() {
Block blockReal = blocks[block.getBlockId()];
logger.log(Level.FINEST, "migrate completing [" + block+ "] realBlock " + blockReal);
blockReal.setOwner(blockReal.getMigrationAddress());
blockReal.setMigrationAddress(null);
logger.log(Level.FINEST, "migrate complete [" + block.getMigrationAddress() + "] now realBlock " + blockReal);
for (MemberImpl member : concurrentMapManager.lsMembers) {
if (!member.localMember()) {
concurrentMapManager.sendBlockInfo(blockReal, member.getAddress());
}
}
}
});
} catch (InterruptedException ignored) {
}
}
});
}
#location 5
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void writeData(DataOutput out) throws IOException {
out.writeLong(numberOfPuts);
out.writeLong(numberOfGets);
out.writeLong(numberOfRemoves);
out.writeLong(numberOfOtherOperations);
out.writeLong(periodStart);
out.writeLong(periodEnd);
} | #vulnerable code
public void writeData(DataOutput out) throws IOException {
out.writeLong(mapPuts.get());
out.writeLong(mapGets.get());
out.writeLong(mapRemoves.get());
out.writeLong(startTime);
out.writeLong(endTime);
}
#location 6
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void readFrom(DataInputStream dis) throws IOException {
headerSize = dis.readInt();
keySize = dis.readInt();
valueSize = dis.readInt();
headerInBytes = new byte[headerSize];
dis.read(headerInBytes);
ByteArrayInputStream bis = new ByteArrayInputStream(headerInBytes);
DataInputStream dis2 = new DataInputStream(bis);
this.operation = ClusterOperation.create(dis2.readInt());
this.blockId = dis2.readInt();
this.threadId = dis2.readInt();
this.lockCount = dis2.readInt();
this.timeout = dis2.readLong();
this.txnId = dis2.readLong();
this.longValue = dis2.readLong();
this.recordId = dis2.readLong();
this.version = dis2.readLong();
this.callId = (int) dis2.readLong();
this.client = dis2.readByte()==1;
this.responseType = dis2.readByte();
int nameLength = dis2.readInt();
byte[] b = new byte[nameLength];
dis2.read(b);
this.name = new String(b);
this.lockAddressIsNull = dis2.readBoolean();
indexCount = dis2.readByte();
for (int i=0; i<indexCount ; i++) {
indexes[i] = dis2.readLong();
indexTypes[i] = dis2.readByte();
}
key = new byte[keySize];
dis.read(key);
value = new byte[valueSize];
dis.read(value);
} | #vulnerable code
public void readFrom(DataInputStream dis) throws IOException {
System.out.println("Available:" + dis.available());
headerSize = dis.readInt();
keySize = dis.readInt();
valueSize = dis.readInt();
headerInBytes = new byte[headerSize];
dis.read(headerInBytes);
ByteArrayInputStream bis = new ByteArrayInputStream(headerInBytes);
DataInputStream dis2 = new DataInputStream(bis);
this.operation = ClusterOperation.create(dis2.readInt());
this.blockId = dis2.readInt();
this.threadId = dis2.readInt();
this.lockCount = dis2.readInt();
this.timeout = dis2.readLong();
this.txnId = dis2.readLong();
this.longValue = dis2.readLong();
this.recordId = dis2.readLong();
this.version = dis2.readLong();
this.callId = (int) dis2.readLong();
this.client = dis2.readByte()==1;
this.responseType = dis2.readByte();
int nameLength = dis2.readInt();
byte[] b = new byte[nameLength];
dis2.read(b);
this.name = new String(b);
this.lockAddressIsNull = dis2.readBoolean();
key = new byte[keySize];
dis.read(key);
value = new byte[valueSize];
dis.read(value);
}
#location 33
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
void syncForDead(Address addressDead) {
MemberImpl member = getNextMemberBeforeSync(addressDead, true, 1);
if (DEBUG) {
log(addressDead + " is dead and its backup was " + member);
}
Address addressNewOwner = (member == null) ? thisAddress : member.getAddress();
Collection<Q> queues = mapQueues.values();
for (Q q : queues) {
List<Block> lsBlocks = q.lsBlocks;
for (Block block : lsBlocks) {
if (block.address.equals(addressDead)) {
// set the new owner
block.address = addressNewOwner;
block.resetAddIndex();
if (lsMembers.size() > 1) {
if (addressNewOwner.equals(thisAddress)) {
// I am the new owner so backup to next member
int indexUpto = block.size() - 1;
if (DEBUG) {
log("IndexUpto " + indexUpto);
}
if (indexUpto > -1) {
executeLocally(new BlockBackupSyncRunner(new BlockBackupSync(q, block,
indexUpto)));
}
}
}
} else if (block.address.equals(thisAddress)) {
// If I am/was the owner of this block
// did my backup change..
// if so backup to the new next
if (lsMembers.size() > 1) {
MemberImpl memberBackupWas = getNextMemberBeforeSync(thisAddress, true, 1);
if (memberBackupWas == null
|| memberBackupWas.getAddress().equals(addressDead)) {
int indexUpto = block.size() - 1;
if (indexUpto > -1) {
executeLocally(new BlockBackupSyncRunner(new BlockBackupSync(q, block,
indexUpto)));
}
}
}
}
}
// packetalidate the dead member's scheduled actions
List<ScheduledPollAction> scheduledPollActions = q.lsScheduledPollActions;
for (ScheduledPollAction scheduledAction : scheduledPollActions) {
if (addressDead.equals(scheduledAction.request.caller)) {
scheduledAction.setValid(false);
ClusterManager.get().deregisterScheduledAction(scheduledAction);
}
}
List<ScheduledOfferAction> scheduledOfferActions = q.lsScheduledOfferActions;
for (ScheduledOfferAction scheduledAction : scheduledOfferActions) {
if (addressDead.equals(scheduledAction.request.caller)) {
scheduledAction.setValid(false);
ClusterManager.get().deregisterScheduledAction(scheduledAction);
}
}
}
doResetBlockSizes();
} | #vulnerable code
void doAddTopicListener(Request req) {
for (MemberImpl member : lsMembers) {
if (member.localMember()) {
handleListenerRegisterations(true, req.name, req.key, req.caller, true);
} else if (!member.getAddress().equals(req.caller)) {
sendProcessableTo(new TopicListenerRegistration(req.name, true, req.caller), member
.getAddress());
}
}
Q q = getQ(req.name);
if (q.blCurrentPut == null) {
q.setCurrentPut();
}
req.recordId = q.getRecordId(q.blCurrentPut.blockId, q.blCurrentPut.addIndex);
}
#location 11
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
void reArrangeBlocks() {
if (concurrentMapManager.isMaster()) {
Map<Address, Integer> addressBlocks = getCurrentMemberBlocks();
if (addressBlocks.size() == 0) {
return;
}
List<Block> lsBlocksToRedistribute = new ArrayList<Block>();
int aveBlockOwnCount = BLOCK_COUNT / (addressBlocks.size());
for (Block blockReal : blocks) {
if (blockReal.getOwner() == null) {
logger.log(Level.SEVERE, "Master cannot have null block owner " + blockReal);
return;
}
if (blockReal.isMigrating()) {
logger.log(Level.SEVERE, "Cannot have migrating block " + blockReal);
return;
}
Integer countInt = addressBlocks.get(blockReal.getOwner());
int count = (countInt == null) ? 0 : countInt;
if (count >= aveBlockOwnCount) {
lsBlocksToRedistribute.add(new Block(blockReal));
} else {
addressBlocks.put(blockReal.getOwner(), ++count);
}
}
Collection<Address> allAddress = addressBlocks.keySet();
lsBlocksToMigrate.clear();
for (Address address : allAddress) {
Integer countInt = addressBlocks.get(address);
int count = (countInt == null) ? 0 : countInt;
while (count < aveBlockOwnCount && lsBlocksToRedistribute.size() > 0) {
Block blockToMigrate = lsBlocksToRedistribute.remove(0);
if (!blockToMigrate.getOwner().equals(address)) {
blockToMigrate.setMigrationAddress(address);
lsBlocksToMigrate.add(blockToMigrate);
}
count++;
}
}
Collections.shuffle(lsBlocksToMigrate);
}
} | #vulnerable code
void initiateMigration() {
for (int i = 0; i < BLOCK_COUNT; i++) {
Block block = blocks[i];
if (block == null) {
block = concurrentMapManager.getOrCreateBlock(i);
block.setOwner(thisAddress);
}
}
if (concurrentMapManager.getMembers().size() < 2) {
return;
}
if (lsBlocksToMigrate.size() == 0) {
reArrangeBlocks();
}
if (lsBlocksToMigrate.size() > 0) {
Block block = lsBlocksToMigrate.remove(0);
if (concurrentMapManager.isBlockInfoValid(block)) {
if (thisAddress.equals(block.getOwner())) {
concurrentMapManager.doBlockInfo(block);
} else {
concurrentMapManager.sendBlockInfo(block, block.getOwner());
}
}
}
}
#location 18
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void readFrom(DataInputStream dis) throws IOException {
headerSize = dis.readInt();
keySize = dis.readInt();
valueSize = dis.readInt();
headerInBytes = new byte[headerSize];
dis.read(headerInBytes);
ByteArrayInputStream bis = new ByteArrayInputStream(headerInBytes);
DataInputStream dis2 = new DataInputStream(bis);
this.operation = ClusterOperation.create(dis2.readInt());
this.blockId = dis2.readInt();
this.threadId = dis2.readInt();
this.lockCount = dis2.readInt();
this.timeout = dis2.readLong();
this.txnId = dis2.readLong();
this.longValue = dis2.readLong();
this.recordId = dis2.readLong();
this.version = dis2.readLong();
this.callId = (int) dis2.readLong();
this.client = dis2.readByte()==1;
this.responseType = dis2.readByte();
int nameLength = dis2.readInt();
byte[] b = new byte[nameLength];
dis2.read(b);
this.name = new String(b);
this.lockAddressIsNull = dis2.readBoolean();
indexCount = dis2.readByte();
for (int i=0; i<indexCount ; i++) {
indexes[i] = dis2.readLong();
indexTypes[i] = dis2.readByte();
}
key = new byte[keySize];
dis.read(key);
value = new byte[valueSize];
dis.read(value);
} | #vulnerable code
public void readFrom(DataInputStream dis) throws IOException {
System.out.println("Available:" + dis.available());
headerSize = dis.readInt();
keySize = dis.readInt();
valueSize = dis.readInt();
headerInBytes = new byte[headerSize];
dis.read(headerInBytes);
ByteArrayInputStream bis = new ByteArrayInputStream(headerInBytes);
DataInputStream dis2 = new DataInputStream(bis);
this.operation = ClusterOperation.create(dis2.readInt());
this.blockId = dis2.readInt();
this.threadId = dis2.readInt();
this.lockCount = dis2.readInt();
this.timeout = dis2.readLong();
this.txnId = dis2.readLong();
this.longValue = dis2.readLong();
this.recordId = dis2.readLong();
this.version = dis2.readLong();
this.callId = (int) dis2.readLong();
this.client = dis2.readByte()==1;
this.responseType = dis2.readByte();
int nameLength = dis2.readInt();
byte[] b = new byte[nameLength];
dis2.read(b);
this.name = new String(b);
this.lockAddressIsNull = dis2.readBoolean();
key = new byte[keySize];
dis.read(key);
value = new byte[valueSize];
dis.read(value);
}
#location 28
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public long getPeriodEnd() {
return periodEnd;
} | #vulnerable code
public long getPeriodEnd() {
return endTime;
}
#location 2
#vulnerability type THREAD_SAFETY_VIOLATION | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void describeAsAdditionalInfo_notEmpty() {
Correspondence.ExceptionStore exceptions = Correspondence.ExceptionStore.forIterable();
addCompareException(exceptions);
assertExpectedFacts(
exceptions.describeAsAdditionalInfo(),
"additionally, one or more exceptions were thrown while comparing elements");
} | #vulnerable code
@Test
public void describeAsAdditionalInfo_notEmpty() {
Correspondence.ExceptionStore exceptions = Correspondence.ExceptionStore.forIterable();
addCompareException(exceptions);
assertExpectedFacts(
exceptions.describeAsAdditionalInfo().asIterable(),
"additionally, one or more exceptions were thrown while comparing elements");
}
#location 6
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public void hasField(String fieldName) {
if (getSubject() == null) {
failureStrategy.fail("Cannot determine a field name from a null object.");
return; // not all failures throw exceptions.
}
check().that(getSubject().getClass()).hasField(fieldName);
} | #vulnerable code
public void hasField(String fieldName) {
if (getSubject() == null) {
failWithoutSubject("<null> has a field named <" + fieldName + ">");
}
Class<?> clazz = getSubject().getClass();
try {
clazz.getField(fieldName);
} catch (NoSuchFieldException e) {
StringBuilder message = new StringBuilder("Not true that ");
message.append("<").append(getSubject().getClass().getSimpleName()).append(">");
message.append(" has a field named <").append(fieldName).append(">");
failureStrategy.fail(message.toString());
}
}
#location 5
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Test
public void describeAsAdditionalInfo_empty() {
Correspondence.ExceptionStore exceptions = Correspondence.ExceptionStore.forIterable();
assertThat(exceptions.describeAsAdditionalInfo()).isEmpty();
} | #vulnerable code
@Test
public void describeAsAdditionalInfo_empty() {
Correspondence.ExceptionStore exceptions = Correspondence.ExceptionStore.forIterable();
assertThat(exceptions.describeAsAdditionalInfo().asIterable()).isEmpty();
}
#location 4
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public String getConceptNetUrl() {
String urlFromConfigOrDefault = (String)getConfiguration().getSettingValueFor(CONFIG_KEY_URL);
return urlFromConfigOrDefault == null
? DEFAULT_CONCEPTNET_URL
: urlFromConfigOrDefault;
} | #vulnerable code
public String getConceptNetUrl() {
String urlFromConfigOrDefault = getConfiguration().getSettingValueFor(CONFIG_KEY_URL).toString();
return urlFromConfigOrDefault == null
? DEFAULT_CONCEPTNET_URL
: urlFromConfigOrDefault;
}
#location 2
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public SingleResult process(TextRankRequest request) {
TextRank textrank = new TextRank(getDatabase(), getNLPManager().getConfiguration());
if (request.getStopWords() != null
&& !request.getStopWords().isEmpty()) {
textrank.setStopwords(request.getStopWords());
}
textrank.removeStopWords(request.isDoStopwords());
textrank.respectDirections(request.isRespectDirections());
textrank.respectSentences(request.isRespectSentences());
textrank.useTfIdfWeights(request.isUseTfIdfWeights());
textrank.useDependencies(request.isUseDependencies());
textrank.setCooccurrenceWindow(request.getCooccurrenceWindow());
textrank.setMaxSingleKeywords(request.getMaxSingleKeywords());
textrank.setKeywordLabel(request.getKeywordLabel());
Map<Long, Map<Long, CoOccurrenceItem>> coOccurrence = textrank.createCooccurrences(request.getNode());
boolean res = textrank.evaluate(request.getNode(),
coOccurrence,
request.getIterations(),
request.getDamp(),
request.getThreshold());
if (!res) {
return SingleResult.fail();
}
LOG.info("AnnotatedText with ID " + request.getNode().getId() + " processed.");
return SingleResult.success();
} | #vulnerable code
public SingleResult process(TextRankRequest request) {
TextRank textrank = new TextRank(getDatabase(), getNLPManager().getConfiguration());
if (request.getStopWords() != null
&& !request.getStopWords().isEmpty()) {
textrank.setStopwords(request.getStopWords());
}
textrank.removeStopWords(request.isDoStopwords());
textrank.respectDirections(request.isRespectDirections());
textrank.respectSentences(request.isRespectSentences());
textrank.useTfIdfWeights(request.isUseTfIdfWeights());
textrank.useDependencies(request.isUseDependencies());
textrank.setCooccurrenceWindow(request.getCooccurrenceWindow());
Map<Long, Map<Long, CoOccurrenceItem>> coOccurrence = textrank.createCooccurrences(request.getNode());
boolean res = textrank.evaluate(request.getNode(),
coOccurrence,
request.getIterations(),
request.getDamp(),
request.getThreshold());
if (!res) {
return SingleResult.fail();
}
LOG.info("AnnotatedText with ID " + request.getNode().getId() + " processed.");
return SingleResult.success();
}
#location 16
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public Map<Long, Map<Long, CoOccurrenceItem>> createCooccurrences(Node annotatedText) {
Map<String, Object> params = new HashMap<>();
params.put("id", annotatedText.getId());
String query;
if (respectSentences) {
query = COOCCURRENCE_QUERY_BY_SENTENCE;
} else {
query = COOCCURRENCE_QUERY;
}
Result res = null;
try (Transaction tx = database.beginTx();) {
res = database.execute(query, params);
tx.success();
} catch (Exception e) {
LOG.error("Error while creating co-occurrences: ", e);
}
List<CoOccurrenceItem> prelim = new ArrayList<>();
while (res != null && res.hasNext()) {
Map<String, Object> next = res.next();
Long tag1 = toLong(next.get("tag1"));
Long tag2 = toLong(next.get("tag2"));
String tagVal1 = (String) next.get("tag1_val");
String tagVal2 = (String) next.get("tag2_val");
int tag1Start = (toLong(next.get("sourceStartPosition"))).intValue();
int tag2Start = (toLong(next.get("destinationStartPosition"))).intValue();
List<String> pos1 = next.get("pos1") != null ? Arrays.asList((String[]) next.get("pos1")) : new ArrayList<>();
List<String> pos2 = next.get("pos2") != null ? Arrays.asList((String[]) next.get("pos2")) : new ArrayList<>();
// check whether POS of both tags are admitted
boolean bPOS1 = pos1.stream().filter(pos -> admittedPOSs.contains(pos)).count() != 0 || pos1.size() == 0;
boolean bPOS2 = pos2.stream().filter(pos -> admittedPOSs.contains(pos)).count() != 0 || pos2.size() == 0;
// fill tag co-occurrences (adjacency matrix)
if (bPOS1 && bPOS2) {
prelim.add(new CoOccurrenceItem(tag1, tag1Start, tag2, tag2Start));
}
// for logging purposses and for `expandNamedEntities()`
idToValue.put(tag1, tagVal1);
idToValue.put(tag2, tagVal2);
}
Map<Long, List<Pair<Long, Long>>> neExp;
if (expandNEs) {
// process named entities: split them into individual tokens by calling ga.nlp.annotate(), assign them IDs and create co-occurrences
neExp = expandNamedEntities();
neExpanded = neExp.entrySet().stream()
.collect(Collectors.toMap( Map.Entry::getKey, e -> e.getValue().stream().map(p -> p.second()).collect(Collectors.toList()) ));
} else
neExp = new HashMap<>();
Map<Long, Map<Long, CoOccurrenceItem>> results = new HashMap<>();
long neVisited = 0L;
for (CoOccurrenceItem it: prelim) {
Long tag1 = it.getSource();
Long tag2 = it.getDestination();
int tag1Start = it.getSourceStartingPositions().get(0).first().intValue();
int tag2Start = it.getSourceStartingPositions().get(0).second().intValue();
if (expandNEs) {
if (neExp.containsKey(tag1)) {
if (neVisited == 0L || neVisited != tag1.longValue()) {
connectTagsInNE(results, neExp.get(tag1), tag1Start);
neVisited = 0L;
}
tag1Start += neExp.get(tag1).get( neExp.get(tag1).size() - 1 ).first().intValue();
tag1 = neExp.get(tag1).get( neExp.get(tag1).size() - 1 ).second();
}
if (neExp.containsKey(tag2)) {
connectTagsInNE(results, neExp.get(tag2), tag2Start);
neVisited = tag2;
tag2 = neExp.get(tag2).get(0).second();
} else
neVisited = 0L;
}
addTagToCoOccurrence(results, tag1, tag1Start, tag2, tag2Start);
if (!directionsMatter) { // when direction of co-occurrence relationships is not important
addTagToCoOccurrence(results, tag2, tag2Start, tag1, tag1Start);
}
}
return results;
} | #vulnerable code
public Map<Long, Map<Long, CoOccurrenceItem>> createCooccurrences(Node annotatedText) {
Map<String, Object> params = new HashMap<>();
params.put("id", annotatedText.getId());
String query;
if (respectSentences) {
query = COOCCURRENCE_QUERY_BY_SENTENCE;
} else {
query = COOCCURRENCE_QUERY;
}
Result res = null;
try (Transaction tx = database.beginTx();) {
res = database.execute(query, params);
tx.success();
} catch (Exception e) {
LOG.error("Error while creating co-occurrences: ", e);
}
List<CoOccurrenceItem> prelim = new ArrayList<>();
while (res != null && res.hasNext()) {
Map<String, Object> next = res.next();
Long tag1 = toLong(next.get("tag1"));
Long tag2 = toLong(next.get("tag2"));
String tagVal1 = (String) next.get("tag1_val");
String tagVal2 = (String) next.get("tag2_val");
int tag1Start = (toLong(next.get("sourceStartPosition"))).intValue();
int tag2Start = (toLong(next.get("destinationStartPosition"))).intValue();
List<String> pos1 = Arrays.asList((String[]) next.get("pos1"));
List<String> pos2 = Arrays.asList((String[]) next.get("pos2"));
// check whether POS of both tags are admitted
boolean bPOS1 = pos1.stream().filter(pos -> admittedPOSs.contains(pos)).count() != 0 || pos1.size() == 0;
boolean bPOS2 = pos2.stream().filter(pos -> admittedPOSs.contains(pos)).count() != 0 || pos2.size() == 0;
// fill tag co-occurrences (adjacency matrix)
if (bPOS1 && bPOS2) {
prelim.add(new CoOccurrenceItem(tag1, tag1Start, tag2, tag2Start));
}
// for logging purposses and for `handleNamedEntities()`
idToValue.put(tag1, tagVal1);
idToValue.put(tag2, tagVal2);
}
Map<Long, List<Pair<Long, Long>>> neExp = expandNamedEntities();
neExpanded = neExp.entrySet().stream()
.collect(Collectors.toMap( Map.Entry::getKey, e -> e.getValue().stream().map(p -> p.second()).collect(Collectors.toList()) ));
Map<Long, Map<Long, CoOccurrenceItem>> results = new HashMap<>();
long neVisited = 0L;
for (CoOccurrenceItem it: prelim) {
Long tag1 = it.getSource();
Long tag2 = it.getDestination();
int tag1Start = it.getSourceStartingPositions().get(0).first().intValue();
int tag2Start = it.getSourceStartingPositions().get(0).second().intValue();
if (neExp.containsKey(tag1)) {
if (neVisited == 0L || neVisited != tag1.longValue()) {
connectTagsInNE(results, neExp.get(tag1), tag1Start);
neVisited = 0L;
}
tag1Start += neExp.get(tag1).get( neExp.get(tag1).size() - 1 ).first().intValue();
tag1 = neExp.get(tag1).get( neExp.get(tag1).size() - 1 ).second();
}
if (neExp.containsKey(tag2)) {
connectTagsInNE(results, neExp.get(tag2), tag2Start);
neVisited = tag2;
tag2 = neExp.get(tag2).get(0).second();
} else
neVisited = 0L;
addTagToCoOccurrence(results, tag1, tag1Start, tag2, tag2Start);
if (!directionsMatter) { // when direction of co-occurrence relationships is not important
addTagToCoOccurrence(results, tag2, tag2Start, tag1, tag1Start);
}
}
return results;
}
#location 62
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private Map<Long, Float> createFeatureMapWithCN5New(long firstNode) throws QueryExecutionException {
Map<String, Object> params = new HashMap<>();
params.put("id", firstNode);
Result res = database.execute(DEFAULT_VECTOR_QUERY_WITH_CONCEPT, params);
Map<Long, Float> result = new HashMap<>();
Map<Long, Float> result_idf = new HashMap<>();
while (res != null && res.hasNext()) {
Map<String, Object> next = res.next();
long id = (long) next.get("tagId");
int nTerms = (int) next.get("nTerms");
//float tf = getFloatValue(next.get("tf"));
float tf = getFloatValue(next.get("tf")) / nTerms;
float idf = Double.valueOf(Math.log10(Float.valueOf(getFloatValue(next.get("idf"))).doubleValue())).floatValue();
// ConceptNet5 Level_1 tags
//long cn5_tag = Long.valueOf((String) next.get("cn5_l1_tag"));
long cn5_tag = (long) next.get("cn5_l1_tag");
float cn5_tag_w = getFloatValue(next.get("cn5_l1_tag_w"));
if (cn5_tag > -1) {
if (!result.containsKey(cn5_tag)) {
result.put(cn5_tag, tf);
result_idf.put(cn5_tag, idf);
} else {
result.put(cn5_tag, result.get(cn5_tag) + tf);
if (result_idf.get(cn5_tag) < idf) // use the highest idf
{
result_idf.put(cn5_tag, idf);
}
}
} else {
result.put(id, tf);
result_idf.put(id, idf);
}
}
result.keySet().forEach((key) -> {
result.put(key, result.get(key) * result_idf.get(key));
});
return result;
} | #vulnerable code
private Map<Long, Float> createFeatureMapWithCN5New(long firstNode) throws QueryExecutionException {
Map<String, Object> params = new HashMap<>();
params.put("id", firstNode);
Result res = database.execute("MATCH (doc:AnnotatedText)\n"
+ "WITH count(doc) as documentsCount\n"
+ "MATCH (document:AnnotatedText)-[:CONTAINS_SENTENCE]->(s:Sentence)-[ht:HAS_TAG]->(tag:Tag)\n"
+ "WHERE id(document) = {id} and not any (p in tag.pos where p in [\"CC\", \"CD\", \"DT\", \"IN\", \"MD\", \"PRP\", \"PRP$\", \"UH\", \"WDT\", \"WP\", \"WRB\", \"TO\", \"PDT\", \"RP\", \"WP$\"])\n" // JJR, JJS ?
+ "WITH tag, sum(ht.tf) as tf, documentsCount, document.numTerms as nTerms\n"
+ "OPTIONAL MATCH (tag)-[rt:IS_RELATED_TO]->(t2_l1:Tag)\n"
+ "WHERE id(t2_l1) = tag.idMaxConcept and exists(t2_l1.word2vec) and com.graphaware.nlp.ml.similarity.cosine(tag.word2vec, t2_l1.word2vec)>0.2\n"
+ "WITH tag, tf, nTerms, id(t2_l1) as cn5_l1_tag, rt.weight as cn5_l1_tag_w, documentsCount\n"
+ "MATCH (a:AnnotatedText)-[:CONTAINS_SENTENCE]->(s:Sentence)-[ht:HAS_TAG]->(tag)\n"
+ "RETURN id(tag) as tagId, tf, (1.0f*documentsCount)/count(distinct a) as idf, nTerms, (case cn5_l1_tag when null then -1 else cn5_l1_tag end) as cn5_l1_tag, cn5_l1_tag_w\n"
+ "ORDER BY tagId, cn5_l1_tag", params);
Map<Long, Float> result = new HashMap<>();
Map<Long, Float> result_idf = new HashMap<>();
while (res != null && res.hasNext()) {
Map<String, Object> next = res.next();
long id = (long) next.get("tagId");
int nTerms = (int) next.get("nTerms");
//float tf = getFloatValue(next.get("tf"));
float tf = getFloatValue(next.get("tf")) / nTerms;
float idf = Double.valueOf(Math.log10(Float.valueOf(getFloatValue(next.get("idf"))).doubleValue())).floatValue();
// ConceptNet5 Level_1 tags
//long cn5_tag = Long.valueOf((String) next.get("cn5_l1_tag"));
long cn5_tag = (long) next.get("cn5_l1_tag");
float cn5_tag_w = getFloatValue(next.get("cn5_l1_tag_w"));
if (cn5_tag>-1) {
if (!result.containsKey(cn5_tag)) {
result.put(cn5_tag, tf);
result_idf.put(cn5_tag, idf);
} else {
result.put(cn5_tag, result.get(cn5_tag) + tf);
if (result_idf.get(cn5_tag) < idf) // use the highest idf
result_idf.put(cn5_tag, idf);
}
} else {
result.put(id, tf);
result_idf.put(id, idf);
}
}
for (Long key: result.keySet()) {
result.put(key, result.get(key) * result_idf.get(key));
}
return result;
}
#location 46
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public SingleResult process(TextRankRequest request) {
TextRankResult result = compute(request);
TextRankPersister persister = new TextRankPersister(Label.label(request.getKeywordLabel()));
persister.peristKeywords(result.getResult(), request.getNode());
return result.getStatus().equals(TextRankResult.TextRankStatus.SUCCESS)
? SingleResult.success()
: SingleResult.fail();
} | #vulnerable code
public SingleResult process(TextRankRequest request) {
TextRank.Builder textrankBuilder = new TextRank.Builder(getDatabase(), getNLPManager().getConfiguration());
if (request.getStopWords() != null
&& !request.getStopWords().isEmpty()) {
textrankBuilder.setStopwords(request.getStopWords());
}
textrankBuilder.removeStopWords(request.isDoStopwords())
.respectDirections(request.isRespectDirections())
.respectSentences(request.isRespectSentences())
.useDependencies(request.isUseDependencies())
.useDependenciesForCooccurrences(request.isUseDependenciesForCooccurrences())
//.setCooccurrenceWindow(request.getCooccurrenceWindow())
.setTopXTags(request.getTopXTags())
.setCleanKeywords(request.isCleanKeywords())
.setKeywordLabel(request.getKeywordLabel());
TextRank textRank = textrankBuilder.build();
boolean res = textRank.evaluate(request.getNode(),
request.getIterations(),
request.getDamp(),
request.getThreshold());
LOG.info("AnnotatedText with ID " + request.getNode().getId() + " processed. Result: " + res);
return res ? SingleResult.success() : SingleResult.fail();
}
#location 2
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public boolean evaluate(Node annotatedText, int iter, double damp, double threshold) {
Map<Long, Map<Long, CoOccurrenceItem>> coOccurrence = createCooccurrences(annotatedText);
PageRank pageRank = new PageRank(database);
if (useTfIdfWeights) {
pageRank.setNodeWeights(initializeNodeWeights_TfIdf(annotatedText, coOccurrence));
}
Map<Long, Double> pageRanks = pageRank.run(coOccurrence, iter, damp, threshold);
int n_oneThird = (int) (pageRanks.size() * phrasesTopxWords);
List<Long> topThird = getTopX(pageRanks, n_oneThird);
LOG.info("Top " + n_oneThird + " tags: " + topThird.stream().map(id -> idToValue.get(id)).collect(Collectors.joining(", ")));
Map<String, Object> params = new HashMap<>();
params.put("id", annotatedText.getId());
//params.put("nodeList", topThird); // new (also changed the GET_TAG_QUERY)
params.put("posList", admittedPOSs);
List<KeywordExtractedItem> keywordsOccurrences = new ArrayList<>();
Map<Long, KeywordExtractedItem> keywordMap = new HashMap<>();
try (Transaction tx = database.beginTx()) {
Result res = database.execute(GET_TAG_QUERY, params);
while (res != null && res.hasNext()) {
Map<String, Object> next = res.next();
long tagId = (long) next.get("tagId");
KeywordExtractedItem item = new KeywordExtractedItem(tagId);
item.setStartPosition(((Number) next.get("sP")).intValue());
item.setValue(((String) next.get("tag")));
item.setEndPosition(((Number) next.get("eP")).intValue());
item.setRelatedTags(iterableToList((Iterable<String>) next.get("rel_tags")));
item.setRelTagStartingPoints(iterableToList((Iterable<Number>) next.get("rel_tos")));
item.setRelTagEndingPoints(iterableToList((Iterable<Number>) next.get("rel_toe")));
item.setRelevance(pageRanks.containsKey(tagId) ? pageRanks.get(tagId) : 0);
keywordsOccurrences.add(item);
if (!keywordMap.containsKey(tagId)) {
keywordMap.put(tagId, item);
}
}
if (res != null) {
res.close();
}
tx.success();
} catch (Exception e) {
LOG.error("Error while running TextRank evaluation: ", e);
return false;
}
Map<String, Long> valToId = idToValue.entrySet().stream().collect(Collectors.toMap(Map.Entry::getValue, Map.Entry::getKey));
Map<String, Keyword> results = new HashMap<>();
while (!keywordsOccurrences.isEmpty()) {
final AtomicReference<KeywordExtractedItem> keywordOccurrence
= new AtomicReference<>(keywordsOccurrences.remove(0));
final AtomicReference<String> currValue = new AtomicReference<>(keywordOccurrence.get().getValue());
final AtomicReference<Double> currRelevance = new AtomicReference<>(keywordOccurrence.get().getRelevance());
List<Long> relTagIDs = keywordOccurrence.get().getRelatedTags().stream().map(el -> valToId.get(el)).collect(Collectors.toList()); // new
relTagIDs.retainAll(topThird); // new
if (!topThird.contains(keywordOccurrence.get().getTagId()) && relTagIDs.size()==0) // new
continue;
//System.out.println("\n> " + currValue.get() + " - " + keywordOccurrence.get().getStartPosition());
Map<String, Keyword> localResults;
do {
long tagId = keywordOccurrence.get().getTagId();
//System.out.println(" cur: " + currValue.get() + ". Examining next level");
localResults = checkNextKeyword(tagId, keywordOccurrence.get(), coOccurrence, keywordMap);
if (localResults.size() > 0) {
//System.out.println(" related tags: " + localResults.entrySet().stream().map(en -> en.getKey()).collect(Collectors.joining(", ")));
localResults.entrySet().stream().forEach((item) -> {
KeywordExtractedItem nextKeyword = keywordsOccurrences.get(0);
if (nextKeyword != null && nextKeyword.value.equalsIgnoreCase(item.getKey())) {
String newCurrValue = currValue.get().split("_")[0] + " " + item.getKey();
//System.out.println(">> " + newCurrValue);
double newCurrRelevance = currRelevance.get() + item.getValue().getRelevance();
currValue.set(newCurrValue);
currRelevance.set(newCurrRelevance);
keywordOccurrence.set(nextKeyword);
keywordsOccurrences.remove(0);
} else {
LOG.warn("Next keyword not found!");
keywordOccurrence.set(null);
}
});
}
} while (!localResults.isEmpty() && keywordOccurrence.get() != null);
addToResults(currValue.get(), currRelevance.get(), results, 1);
//System.out.println("< " + currValue.get());
}
// add named entities that contain at least some of the top 1/3 of words
for (Long key: neExpanded.keySet()) {
if (neExpanded.get(key).stream().filter(v -> topThird.contains(v)).count() == 0)
continue;
String keystr = idToValue.get(key) + "_en"; // + lang;
addToResults(keystr, pageRanks.containsKey(key) ? pageRanks.get(key) : 0, results, 1);
}
computeTotalOccurrence(results);
if (cleanSingleWordKeyword) {
results = cleanSingleWordKeyword(results);
}
peristKeyword(results, annotatedText);
return true;
} | #vulnerable code
public boolean evaluate(Node annotatedText, int iter, double damp, double threshold) {
Map<Long, Map<Long, CoOccurrenceItem>> coOccurrence = createCooccurrences(annotatedText);
PageRank pageRank = new PageRank(database);
if (useTfIdfWeights) {
pageRank.setNodeWeights(initializeNodeWeights_TfIdf(annotatedText, coOccurrence));
}
Map<Long, Double> pageRanks = pageRank.run(coOccurrence, iter, damp, threshold);
int n_oneThird = (int) (pageRanks.size() * phrasesTopxWords);
List<Long> topThird = getTopX(pageRanks, n_oneThird);
LOG.info("Top " + n_oneThird + " tags: " + topThird.stream().map(id -> idToValue.get(id)).collect(Collectors.joining(", ")));
Map<String, Object> params = new HashMap<>();
params.put("id", annotatedText.getId());
params.put("nodeList", topThird);
params.put("posList", admittedPOSs);
List<KeywordExtractedItem> keywordsOccurrences = new ArrayList<>();
Map<Long, KeywordExtractedItem> keywordMap = new HashMap<>();
try (Transaction tx = database.beginTx()) {
Result res = database.execute(GET_TAG_QUERY, params);
while (res != null && res.hasNext()) {
Map<String, Object> next = res.next();
long tagId = (long) next.get("tagId");
KeywordExtractedItem item = new KeywordExtractedItem(tagId);
item.setStartPosition(((Number) next.get("sP")).intValue());
item.setValue(((String) next.get("tag")));
item.setEndPosition(((Number) next.get("eP")).intValue());
item.setRelatedTags(iterableToList((Iterable<String>) next.get("rel_tags")));
item.setRelTagStartingPoints(iterableToList((Iterable<Number>) next.get("rel_tos")));
item.setRelTagEndingPoints(iterableToList((Iterable<Number>) next.get("rel_toe")));
item.setRelevance(pageRanks.get(tagId));
keywordsOccurrences.add(item);
if (!keywordMap.containsKey(tagId)) {
keywordMap.put(tagId, item);
}
}
if (res != null) {
res.close();
}
tx.success();
} catch (Exception e) {
LOG.error("Error while running TextRank evaluation: ", e);
return false;
}
Map<String, Keyword> results = new HashMap<>();
while (!keywordsOccurrences.isEmpty()) {
final AtomicReference<KeywordExtractedItem> keywordOccurrence
= new AtomicReference<>(keywordsOccurrences.remove(0));
final AtomicReference<String> currValue = new AtomicReference<>(keywordOccurrence.get().getValue());
final AtomicReference<Double> currRelevance = new AtomicReference<>(keywordOccurrence.get().getRelevance());
//System.out.println("> " + currValue.get() + " - " + keywordOccurrence.get().getStartPosition());
Map<String, Keyword> localResults;
do {
long tagId = keywordOccurrence.get().getTagId();
//System.out.println("cur: " + currValue.get() + " examinating next level");
localResults = checkNextKeyword(tagId, keywordOccurrence.get(), coOccurrence, keywordMap);
if (localResults.size() > 0) {
localResults.entrySet().stream().forEach((item) -> {
KeywordExtractedItem nextKeyword = keywordsOccurrences.get(0);
if (nextKeyword != null && nextKeyword.value.equalsIgnoreCase(item.getKey())) {
String newCurrValue = currValue.get().split("_")[0] + " " + item.getKey();
//System.out.println(">> " + newCurrValue);
double newCurrRelevance = currRelevance.get() + item.getValue().getRelevance();
currValue.set(newCurrValue);
currRelevance.set(newCurrRelevance);
keywordOccurrence.set(nextKeyword);
keywordsOccurrences.remove(0);
} else {
LOG.warn("Next keyword not found!");
keywordOccurrence.set(null);
}
});
}
} while (!localResults.isEmpty() && keywordOccurrence.get() != null);
addToResults(currValue.get(), currRelevance.get(), results, 1);
//System.out.println("< " + currValue.get());
}
// add named entities that contain at least some of the top 1/3 of words
/*neExpanded.entrySet().stream()
.filter(en -> {
long n = en.getValue().stream().filter(v -> topThird.contains(v)).count();
return n > 0;
//return n >= en.getValue().size() / 3.0f || (n > 0 && en.getValue().size() == 2);
})
.forEach(en -> {
final String key = idToValue.get(en.getKey()) + "_en"; // + lang;
results.put(key, new Keyword(key)); // TO DO: handle counters exactMatch and total
});*/
for (Long key: neExpanded.keySet()) {
if (neExpanded.get(key).stream().filter(v -> topThird.contains(v)).count() == 0)
continue;
String keystr = idToValue.get(key) + "_en"; // + lang;
results.put(keystr, new Keyword(keystr)); // TO DO: handle counters exactMatch and total
}
computeTotalOccurrence(results);
if (cleanSingleWordKeyword) {
results = cleanSingleWordKeyword(results);
}
peristKeyword(results, annotatedText);
return true;
}
#location 93
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
public Tag annotateTag(String text, String language) {
PipelineSpecification spec = getDefaultPipeline(language);
if (spec == null) {
LOG.warn("No default annotator for language: " + language);
return null;
}
TextProcessor processor = getTextProcessor(spec.getTextProcessor());
return processor.annotateTag(text, spec);
} | #vulnerable code
public Tag annotateTag(String text, String language) {
PipelineSpecification spec = getDefaultPipeline(language);
TextProcessor processor = getTextProcessor(spec.getTextProcessor());
return processor.annotateTag(text, spec);
}
#location 3
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private void initializePipelineWithoutNEs() {
if (!NLPManager.getInstance().hasPipeline(PIPELINE_WITHOUT_NER)) {
Map<String, Object> params = new HashMap<>();
params.put("tokenize", true);
params.put("ner", false);
String processor = NLPManager.getInstance().getTextProcessorsManager().getDefaultProcessor().getClass().getName();
PipelineSpecification ps = new PipelineSpecification(PIPELINE_WITHOUT_NER, processor);
ps.setProcessingSteps(params);
NLPManager.getInstance().addPipeline(ps);
}
} | #vulnerable code
private void initializePipelineWithoutNEs() {
//System.out.println(" >>> default processor: " + NLPManager.getInstance().getTextProcessorsManager().getDefaultProcessor().getAlias());
Map<String, Object> params = new HashMap<>();
params.put("tokenize", true);
params.put("ner", false);
PipelineSpecification ps = new PipelineSpecification(PIPELINE_WITHOUT_NER, null);
ps.setProcessingSteps(params);
NLPManager.getInstance().getTextProcessorsManager().getDefaultProcessor()
.createPipeline(ps);
}
#location 8
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Procedure(name = "ga.nlp.parser.powerpoint")
public Stream<Page> parsePowerpoint(@Name("file") String filename, @Name(value = "filterPatterns", defaultValue = "") List<String> filterPatterns) {
PowerpointParser parser = (PowerpointParser) getNLPManager().getExtension(PowerpointParser.class);
return getPages(parser, filename, filterPatterns).stream();
} | #vulnerable code
@Procedure(name = "ga.nlp.parser.powerpoint")
public Stream<Page> parsePowerpoint(@Name("file") String filename, @Name(value = "filterPatterns", defaultValue = "") List<String> filterPatterns) {
PowerpointParser parser = (PowerpointParser) getNLPManager().getExtension(PowerpointParser.class);
List<String> filters = filterPatterns.equals("") ? new ArrayList<>() : filterPatterns;
try {
List<Page> pages = parser.parse(filename, filters);
return pages.stream();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
#location 6
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private Map<String, Keyword> checkNextKeyword(long tagId, KeywordExtractedItem keywordOccurrence, Map<Long, Map<Long, CoOccurrenceItem>> coOccurrences, Map<Long, KeywordExtractedItem> keywords) {
Map<String, Keyword> results = new HashMap<>();
if (!coOccurrences.containsKey(tagId))
return results;
Map<Integer, Set<Long>> mapStartId = createThisMapping(coOccurrences.get(tagId), tagId);
Set<Long> coOccurrence = mapStartId.get(keywordOccurrence.startPosition);
if (coOccurrence == null) {
return results;
}
Iterator<Long> iterator = coOccurrence.stream()
.filter((ccEntry) -> ccEntry != tagId)
.filter((ccEntry) -> keywords.containsKey(ccEntry)).iterator();
while (iterator.hasNext()) {
Long ccEntry = iterator.next();
String relValue = keywords.get(ccEntry).getValue();
//System.out.println("checkNextKeyword >> " + relValue);
//if (!useDependencies || keywordOccurrence.getRelatedTags().contains(relValue.split("_")[0])) {
List<String> merged = new ArrayList<>(keywords.get(ccEntry).getRelatedTags()); // new
merged.retainAll(keywordOccurrence.getRelatedTags()); // new
//System.out.println(" co-occurring tag = " + idToValue.get(ccEntry) + ", related tags = " + keywords.get(ccEntry).getRelatedTags().stream().collect(Collectors.joining(", ")));
//System.out.println(" merged = " + merged.stream().collect(Collectors.joining(", ")));
if (!useDependencies || keywordOccurrence.getRelatedTags().contains(relValue.split("_")[0]) || merged.size()>0) { // new
//System.out.println("checkNextKeyword >>> " + relValue);
addToResults(relValue,
keywords.get(ccEntry).getRelevance(),
results, 1);
}
}
return results;
} | #vulnerable code
private Map<String, Keyword> checkNextKeyword(long tagId, KeywordExtractedItem keywordOccurrence, Map<Long, Map<Long, CoOccurrenceItem>> coOccurrences, Map<Long, KeywordExtractedItem> keywords) {
Map<String, Keyword> results = new HashMap<>();
if (!coOccurrences.containsKey(tagId))
return results;
Map<Integer, Set<Long>> mapStartId = createThisMapping(coOccurrences.get(tagId), tagId);
Set<Long> coOccurrence = mapStartId.get(keywordOccurrence.startPosition);
if (coOccurrence == null) {
return results;
}
Iterator<Long> iterator = coOccurrence.stream()
.filter((ccEntry) -> ccEntry != tagId)
.filter((ccEntry) -> keywords.containsKey(ccEntry)).iterator();
while (iterator.hasNext()) {
Long ccEntry = iterator.next();
String relValue = keywords.get(ccEntry).getValue();
//System.out.println("checkNextKeyword >> " + relValue);
//if (!useDependencies || keywordOccurrence.getRelatedTags().contains(relValue.split("_")[0])) {
List<String> merged = new ArrayList<>(keywords.get(tagId).getRelatedTags()); // new
merged.retainAll(keywordOccurrence.getRelatedTags()); // new
//System.out.println(" related tag = " + idToValue.get(ccEntry) + ", related tags = " + keywords.get(tagId).getRelatedTags().stream().collect(Collectors.joining(", ")));
//System.out.println(" merged = " + merged.stream().collect(Collectors.joining(", ")));
if (!useDependencies || keywordOccurrence.getRelatedTags().contains(relValue.split("_")[0]) || merged.size()>0) { // new
//System.out.println("checkNextKeyword >>> " + relValue);
addToResults(relValue,
keywords.get(ccEntry).getRelevance(),
results, 1);
}
}
return results;
}
#location 21
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Procedure(name = "ga.nlp.parser.pdf")
public Stream<Page> parsePdf(@Name("file") String filename, @Name(value = "filterPatterns", defaultValue = "") List<String> filterPatterns) {
TikaPDFParser parser = (TikaPDFParser) getNLPManager().getExtension(TikaPDFParser.class);
return getPages(parser, filename, filterPatterns).stream();
} | #vulnerable code
@Procedure(name = "ga.nlp.parser.pdf")
public Stream<Page> parsePdf(@Name("file") String filename, @Name(value = "filterPatterns", defaultValue = "") List<String> filterPatterns) {
TikaPDFParser parser = (TikaPDFParser) getNLPManager().getExtension(TikaPDFParser.class);
List<String> filters = filterPatterns.equals("") ? new ArrayList<>() : filterPatterns;
try {
List<Page> pages = parser.parse(filename, filters);
return pages.stream();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
#location 6
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
private Map<Long, Float> createFeatureMapWithCN5New(long firstNode) throws QueryExecutionException {
Map<String, Object> params = new HashMap<>();
params.put("id", firstNode);
Result res = database.execute(DEFAULT_VECTOR_QUERY_WITH_CONCEPT, params);
Map<Long, Float> result = new HashMap<>();
Map<Long, Float> result_idf = new HashMap<>();
while (res != null && res.hasNext()) {
Map<String, Object> next = res.next();
long id = (long) next.get("tagId");
int nTerms = (int) next.get("nTerms");
//float tf = getFloatValue(next.get("tf"));
float tf = getFloatValue(next.get("tf")) / nTerms;
float idf = Double.valueOf(Math.log10(Float.valueOf(getFloatValue(next.get("idf"))).doubleValue())).floatValue();
// ConceptNet5 Level_1 tags
//long cn5_tag = Long.valueOf((String) next.get("cn5_l1_tag"));
long cn5_tag = (long) next.get("cn5_l1_tag");
float cn5_tag_w = getFloatValue(next.get("cn5_l1_tag_w"));
if (cn5_tag > -1) {
if (!result.containsKey(cn5_tag)) {
result.put(cn5_tag, tf);
result_idf.put(cn5_tag, idf);
} else {
result.put(cn5_tag, result.get(cn5_tag) + tf);
if (result_idf.get(cn5_tag) < idf) // use the highest idf
{
result_idf.put(cn5_tag, idf);
}
}
} else {
result.put(id, tf);
result_idf.put(id, idf);
}
}
result.keySet().forEach((key) -> {
result.put(key, result.get(key) * result_idf.get(key));
});
return result;
} | #vulnerable code
private Map<Long, Float> createFeatureMapWithCN5New(long firstNode) throws QueryExecutionException {
Map<String, Object> params = new HashMap<>();
params.put("id", firstNode);
Result res = database.execute("MATCH (doc:AnnotatedText)\n"
+ "WITH count(doc) as documentsCount\n"
+ "MATCH (document:AnnotatedText)-[:CONTAINS_SENTENCE]->(s:Sentence)-[ht:HAS_TAG]->(tag:Tag)\n"
+ "WHERE id(document) = {id} and not any (p in tag.pos where p in [\"CC\", \"CD\", \"DT\", \"IN\", \"MD\", \"PRP\", \"PRP$\", \"UH\", \"WDT\", \"WP\", \"WRB\", \"TO\", \"PDT\", \"RP\", \"WP$\"])\n" // JJR, JJS ?
+ "WITH tag, sum(ht.tf) as tf, documentsCount, document.numTerms as nTerms\n"
+ "OPTIONAL MATCH (tag)-[rt:IS_RELATED_TO]->(t2_l1:Tag)\n"
+ "WHERE id(t2_l1) = tag.idMaxConcept and exists(t2_l1.word2vec) and com.graphaware.nlp.ml.similarity.cosine(tag.word2vec, t2_l1.word2vec)>0.2\n"
+ "WITH tag, tf, nTerms, id(t2_l1) as cn5_l1_tag, rt.weight as cn5_l1_tag_w, documentsCount\n"
+ "MATCH (a:AnnotatedText)-[:CONTAINS_SENTENCE]->(s:Sentence)-[ht:HAS_TAG]->(tag)\n"
+ "RETURN id(tag) as tagId, tf, (1.0f*documentsCount)/count(distinct a) as idf, nTerms, (case cn5_l1_tag when null then -1 else cn5_l1_tag end) as cn5_l1_tag, cn5_l1_tag_w\n"
+ "ORDER BY tagId, cn5_l1_tag", params);
Map<Long, Float> result = new HashMap<>();
Map<Long, Float> result_idf = new HashMap<>();
while (res != null && res.hasNext()) {
Map<String, Object> next = res.next();
long id = (long) next.get("tagId");
int nTerms = (int) next.get("nTerms");
//float tf = getFloatValue(next.get("tf"));
float tf = getFloatValue(next.get("tf")) / nTerms;
float idf = Double.valueOf(Math.log10(Float.valueOf(getFloatValue(next.get("idf"))).doubleValue())).floatValue();
// ConceptNet5 Level_1 tags
//long cn5_tag = Long.valueOf((String) next.get("cn5_l1_tag"));
long cn5_tag = (long) next.get("cn5_l1_tag");
float cn5_tag_w = getFloatValue(next.get("cn5_l1_tag_w"));
if (cn5_tag>-1) {
if (!result.containsKey(cn5_tag)) {
result.put(cn5_tag, tf);
result_idf.put(cn5_tag, idf);
} else {
result.put(cn5_tag, result.get(cn5_tag) + tf);
if (result_idf.get(cn5_tag) < idf) // use the highest idf
result_idf.put(cn5_tag, idf);
}
} else {
result.put(id, tf);
result_idf.put(id, idf);
}
}
for (Long key: result.keySet()) {
result.put(key, result.get(key) * result_idf.get(key));
}
return result;
}
#location 46
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Override
public boolean handle(LoginPacket packet) {
// Check for supported protocol
int index = Arrays.binarySearch(DragonProxy.BEDROCK_SUPPORTED_PROTOCOLS, packet.getProtocolVersion());
if (index < 0) {
session.getBedrockSession().disconnect();
return true;
}
session.getBedrockSession().setPacketCodec(DragonProxy.BEDROCK_SUPPORTED_CODECS[index]);
JsonNode certData;
try {
certData = DragonProxy.JSON_MAPPER.readTree(packet.getChainData().toByteArray());
} catch (IOException ex) {
throw new RuntimeException("Certificate JSON could not be read");
}
JsonNode certChainData = certData.get("chain");
if (certChainData.getNodeType() != JsonNodeType.ARRAY) {
throw new RuntimeException("Certificate data is not valid");
}
boolean validChain;
try {
validChain = BedrockLoginUtils.validateChainData(certChainData);
JWSObject jwt = JWSObject.parse(certChainData.get(certChainData.size() - 1).asText());
JsonNode payload = DragonProxy.JSON_MAPPER.readTree(jwt.getPayload().toBytes());
if (payload.get("extraData").getNodeType() != JsonNodeType.OBJECT) {
throw new RuntimeException("AuthData was not found!");
}
JSONObject extraData = (JSONObject) jwt.getPayload().toJSONObject().get("extraData");
session.setAuthData(DragonProxy.JSON_MAPPER.convertValue(extraData, AuthData.class));
if (payload.get("identityPublicKey").getNodeType() != JsonNodeType.STRING) {
throw new RuntimeException("Identity Public Key was not found!");
}
if(!validChain) {
if(proxy.getConfiguration().isXboxAuth()) {
session.disconnect("You must be authenticated with xbox live");
return true;
}
session.getAuthData().setXuid(null); // TODO: ideally the class should be immutable
}
ECPublicKey identityPublicKey = EncryptionUtils.generateKey(payload.get("identityPublicKey").textValue());
JWSObject clientJwt = JWSObject.parse(packet.getSkinData().toString());
EncryptionUtils.verifyJwt(clientJwt, identityPublicKey);
JsonNode clientPayload = DragonProxy.JSON_MAPPER.readTree(clientJwt.getPayload().toBytes());
session.setClientData(DragonProxy.JSON_MAPPER.convertValue(clientPayload, ClientData.class));
session.setUsername(session.getAuthData().getDisplayName());
if (EncryptionUtils.canUseEncryption()) {
//BedrockLoginUtils.startEncryptionHandshake(session, identityPublicKey);
}
} catch (Exception ex) {
session.disconnect("disconnectionScreen.internalError.cantConnect");
throw new RuntimeException("Unable to complete login", ex);
}
// Tell the Bedrock client login was successful
PlayStatusPacket playStatus = new PlayStatusPacket();
playStatus.setStatus(PlayStatusPacket.Status.LOGIN_SUCCESS);
session.getBedrockSession().sendPacketImmediately(playStatus);
// Start Resource pack handshake
ResourcePacksInfoPacket resourcePacksInfo = new ResourcePacksInfoPacket();
session.getBedrockSession().sendPacketImmediately(resourcePacksInfo);
return true;
} | #vulnerable code
@Override
public boolean handle(LoginPacket packet) {
// Check for supported protocol
int index = Arrays.binarySearch(DragonProxy.BEDROCK_SUPPORTED_PROTOCOLS, packet.getProtocolVersion());
if (index < 0) {
session.getBedrockSession().disconnect();
return true;
}
session.getBedrockSession().setPacketCodec(DragonProxy.BEDROCK_SUPPORTED_CODECS[index]);
try {
// Get chain data that contains identity info
JSONObject chainData = (JSONObject) JSONValue.parse(packet.getChainData().array());
JSONArray chainArray = (JSONArray) chainData.get("chain");
Object identityObject = chainArray.get(chainArray.size() - 1);
JWSObject identity = JWSObject.parse((String) identityObject);
JSONObject extraData = (JSONObject) identity.getPayload().toJSONObject().get("extraData");
session.setAuthData(new AuthData(
extraData.getAsString("displayName"),
extraData.getAsString("identity"),
extraData.getAsString("XUID")
));
session.setUsername(session.getAuthData().getDisplayName());
} catch (ParseException | ClassCastException | NullPointerException e) {
// Invalid chain data
session.getBedrockSession().disconnect();
return true;
}
// Tell the Bedrock client login was successful.
PlayStatusPacket playStatus = new PlayStatusPacket();
playStatus.setStatus(PlayStatusPacket.Status.LOGIN_SUCCESS);
session.getBedrockSession().sendPacketImmediately(playStatus);
// Start Resource pack handshake
ResourcePacksInfoPacket resourcePacksInfo = new ResourcePacksInfoPacket();
session.getBedrockSession().sendPacketImmediately(resourcePacksInfo);
return true;
}
#location 16
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Override
public boolean handle(LoginPacket packet) {
// Check for supported protocol
int index = Arrays.binarySearch(DragonProxy.BEDROCK_SUPPORTED_PROTOCOLS, packet.getProtocolVersion());
if (index < 0) {
upstream.disconnect();
return true;
}
upstream.setPacketCodec(DragonProxy.BEDROCK_SUPPORTED_CODECS[index]);
try {
// Get chain data that contains identity info
JSONObject chainData = (JSONObject) JSONValue.parse(packet.getChainData().array());
JSONArray chainArray = (JSONArray) chainData.get("chain");
Object identityObject = chainArray.get(chainArray.size() - 1);
JWSObject identity = JWSObject.parse((String) identityObject);
JSONObject extraData = (JSONObject) identity.getPayload().toJSONObject().get("extraData");
this.upstream.setAuthData(new AuthDataImpl(
extraData.getAsString("displayName"),
extraData.getAsString("identity"),
extraData.getAsString("XUID")
));
} catch (ParseException | ClassCastException | NullPointerException e) {
// Invalid chain data
this.upstream.disconnect();
return true;
}
// Tell the Bedrock client login was successful.
PlayStatusPacket playStatus = new PlayStatusPacket();
playStatus.setStatus(PlayStatusPacket.Status.LOGIN_SUCCESS);
upstream.sendPacketImmediately(playStatus);
// Start Resource pack handshake
ResourcePacksInfoPacket resourcePacksInfo = new ResourcePacksInfoPacket();
upstream.sendPacketImmediately(resourcePacksInfo);
return true;
} | #vulnerable code
@Override
public boolean handle(LoginPacket packet) {
// TODO: move out of here? idk
UpstreamSession session = new UpstreamSession(this.session);
this.session.setPlayer(session);
try {
// Get chain data that contains identity info
JSONObject chainData = (JSONObject) JSONValue.parse(packet.getChainData().array());
JSONArray chainArray = (JSONArray) chainData.get("chain");
Object identityObject = chainArray.get(chainArray.size() - 1);
JWSObject identity = JWSObject.parse((String) identityObject);
JSONObject extraData = (JSONObject) identity.getPayload().toJSONObject().get("extraData");
this.session.setAuthData(new AuthDataImpl(
extraData.getAsString("displayName"),
extraData.getAsString("identity"),
extraData.getAsString("XUID")
));
} catch (ParseException | ClassCastException | NullPointerException e) {
// Invalid chain data
this.session.disconnect();
return true;
}
session.setRemoteServer(new RemoteServer("local", proxy.getConfiguration().getRemoteAddress(), proxy.getConfiguration().getRemotePort()));
return true;
}
#location 29
#vulnerability type RESOURCE_LEAK | Below is the vulnerable code, please generate the patch based on the following information. |
#fixed code
@Override
public boolean handle(LoginPacket packet) {
// Check for supported protocol
int index = Arrays.binarySearch(DragonProxy.BEDROCK_SUPPORTED_PROTOCOLS, packet.getProtocolVersion());
if (index < 0) {
session.getBedrockSession().disconnect();
return true;
}
session.getBedrockSession().setPacketCodec(DragonProxy.BEDROCK_SUPPORTED_CODECS[index]);
JsonNode certData;
try {
certData = DragonProxy.JSON_MAPPER.readTree(packet.getChainData().toByteArray());
} catch (IOException ex) {
throw new RuntimeException("Certificate JSON could not be read");
}
JsonNode certChainData = certData.get("chain");
if (certChainData.getNodeType() != JsonNodeType.ARRAY) {
throw new RuntimeException("Certificate data is not valid");
}
boolean validChain;
try {
validChain = BedrockLoginUtils.validateChainData(certChainData);
JWSObject jwt = JWSObject.parse(certChainData.get(certChainData.size() - 1).asText());
JsonNode payload = DragonProxy.JSON_MAPPER.readTree(jwt.getPayload().toBytes());
if (payload.get("extraData").getNodeType() != JsonNodeType.OBJECT) {
throw new RuntimeException("AuthData was not found!");
}
JSONObject extraData = (JSONObject) jwt.getPayload().toJSONObject().get("extraData");
session.setAuthData(DragonProxy.JSON_MAPPER.convertValue(extraData, AuthData.class));
if (payload.get("identityPublicKey").getNodeType() != JsonNodeType.STRING) {
throw new RuntimeException("Identity Public Key was not found!");
}
if(!validChain) {
if(proxy.getConfiguration().isXboxAuth()) {
session.disconnect("You must be authenticated with xbox live");
return true;
}
session.getAuthData().setXuid(null); // TODO: ideally the class should be immutable
}
ECPublicKey identityPublicKey = EncryptionUtils.generateKey(payload.get("identityPublicKey").textValue());
JWSObject clientJwt = JWSObject.parse(packet.getSkinData().toString());
EncryptionUtils.verifyJwt(clientJwt, identityPublicKey);
JsonNode clientPayload = DragonProxy.JSON_MAPPER.readTree(clientJwt.getPayload().toBytes());
session.setClientData(DragonProxy.JSON_MAPPER.convertValue(clientPayload, ClientData.class));
session.setUsername(session.getAuthData().getDisplayName());
if (EncryptionUtils.canUseEncryption()) {
//BedrockLoginUtils.startEncryptionHandshake(session, identityPublicKey);
}
} catch (Exception ex) {
session.disconnect("disconnectionScreen.internalError.cantConnect");
throw new RuntimeException("Unable to complete login", ex);
}
// Tell the Bedrock client login was successful
PlayStatusPacket playStatus = new PlayStatusPacket();
playStatus.setStatus(PlayStatusPacket.Status.LOGIN_SUCCESS);
session.getBedrockSession().sendPacketImmediately(playStatus);
// Start Resource pack handshake
ResourcePacksInfoPacket resourcePacksInfo = new ResourcePacksInfoPacket();
session.getBedrockSession().sendPacketImmediately(resourcePacksInfo);
return true;
} | #vulnerable code
@Override
public boolean handle(LoginPacket packet) {
// Check for supported protocol
int index = Arrays.binarySearch(DragonProxy.BEDROCK_SUPPORTED_PROTOCOLS, packet.getProtocolVersion());
if (index < 0) {
session.getBedrockSession().disconnect();
return true;
}
session.getBedrockSession().setPacketCodec(DragonProxy.BEDROCK_SUPPORTED_CODECS[index]);
try {
// Get chain data that contains identity info
JSONObject chainData = (JSONObject) JSONValue.parse(packet.getChainData().array());
JSONArray chainArray = (JSONArray) chainData.get("chain");
Object identityObject = chainArray.get(chainArray.size() - 1);
JWSObject identity = JWSObject.parse((String) identityObject);
JSONObject extraData = (JSONObject) identity.getPayload().toJSONObject().get("extraData");
session.setAuthData(new AuthData(
extraData.getAsString("displayName"),
extraData.getAsString("identity"),
extraData.getAsString("XUID")
));
session.setUsername(session.getAuthData().getDisplayName());
} catch (ParseException | ClassCastException | NullPointerException e) {
// Invalid chain data
session.getBedrockSession().disconnect();
return true;
}
// Tell the Bedrock client login was successful.
PlayStatusPacket playStatus = new PlayStatusPacket();
playStatus.setStatus(PlayStatusPacket.Status.LOGIN_SUCCESS);
session.getBedrockSession().sendPacketImmediately(playStatus);
// Start Resource pack handshake
ResourcePacksInfoPacket resourcePacksInfo = new ResourcePacksInfoPacket();
session.getBedrockSession().sendPacketImmediately(resourcePacksInfo);
return true;
}
#location 22
#vulnerability type NULL_DEREFERENCE | Below is the vulnerable code, please generate the patch based on the following information. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.