method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
public static Collection<AttachProvider> getAttachProviders() {
return Collections.unmodifiableCollection(apInstances.values());
}
| static Collection<AttachProvider> function() { return Collections.unmodifiableCollection(apInstances.values()); } | /**
* Returns a collection of all known attach providers
* @return a collection of attach providers
*/ | Returns a collection of all known attach providers | getAttachProviders | {
"repo_name": "nickman/tsdb-csf",
"path": "csf-core/src/main/java/com/heliosapm/attachme/AttachProvider.java",
"license": "apache-2.0",
"size": 6505
} | [
"java.util.Collection",
"java.util.Collections"
] | import java.util.Collection; import java.util.Collections; | import java.util.*; | [
"java.util"
] | java.util; | 2,125,813 |
@Command
@NotifyChange({SELECTED_BRANCH, MODERATING_BRANCHES, SHOW_MODERATOR_GROUP_SELECTION_PART})
public void saveModeratorForBranches() {
branches.setModeratingGroupForAllBranches(selectedGroup);
closeDeleteModeratorGroupDialog();
} | @NotifyChange({SELECTED_BRANCH, MODERATING_BRANCHES, SHOW_MODERATOR_GROUP_SELECTION_PART}) void function() { branches.setModeratingGroupForAllBranches(selectedGroup); closeDeleteModeratorGroupDialog(); } | /**
* Saves moderator group for all branches, was moderated by group to delete.
*/ | Saves moderator group for all branches, was moderated by group to delete | saveModeratorForBranches | {
"repo_name": "jtalks-org/poulpe",
"path": "poulpe-view/poulpe-web-controller/src/main/java/org/jtalks/poulpe/web/controller/group/UserGroupVm.java",
"license": "lgpl-2.1",
"size": 13715
} | [
"org.zkoss.bind.annotation.NotifyChange"
] | import org.zkoss.bind.annotation.NotifyChange; | import org.zkoss.bind.annotation.*; | [
"org.zkoss.bind"
] | org.zkoss.bind; | 1,312,115 |
FileSystem localFileSystem = FileSystem.getLocal(new Configuration());
String[] vols = new String[]{TEST_ROOT_DIR + "/0",
TEST_ROOT_DIR + "/1"};
MRAsyncDiskService service = new MRAsyncDiskService(
localFileSystem, vols);
String a = "a";
String b = "b";
String c = "b/c";
String d = "d";
File fa = new File(vols[0], a);
File fb = new File(vols[1], b);
File fc = new File(vols[1], c);
File fd = new File(vols[1], d);
// Create the directories
fa.mkdirs();
fb.mkdirs();
fc.mkdirs();
fd.mkdirs();
assertTrue(fa.exists());
assertTrue(fb.exists());
assertTrue(fc.exists());
assertTrue(fd.exists());
// Move and delete them
service.moveAndDeleteRelativePath(vols[0], a);
assertFalse(fa.exists());
service.moveAndDeleteRelativePath(vols[1], b);
assertFalse(fb.exists());
assertFalse(fc.exists());
// asyncDiskService is NOT able to delete files outside all volumes.
IOException ee = null;
try {
service.moveAndDeleteAbsolutePath(TEST_ROOT_DIR + "/2");
} catch (IOException e) {
ee = e;
}
assertNotNull("asyncDiskService should not be able to delete files "
+ "outside all volumes", ee);
// asyncDiskService is able to automatically find the file in one
// of the volumes.
assertTrue(service.moveAndDeleteAbsolutePath(vols[1] + Path.SEPARATOR_CHAR + d));
// Make sure everything is cleaned up
makeSureCleanedUp(vols, service);
} | FileSystem localFileSystem = FileSystem.getLocal(new Configuration()); String[] vols = new String[]{TEST_ROOT_DIR + "/0", TEST_ROOT_DIR + "/1"}; MRAsyncDiskService service = new MRAsyncDiskService( localFileSystem, vols); String a = "a"; String b = "b"; String c = "b/c"; String d = "d"; File fa = new File(vols[0], a); File fb = new File(vols[1], b); File fc = new File(vols[1], c); File fd = new File(vols[1], d); fa.mkdirs(); fb.mkdirs(); fc.mkdirs(); fd.mkdirs(); assertTrue(fa.exists()); assertTrue(fb.exists()); assertTrue(fc.exists()); assertTrue(fd.exists()); service.moveAndDeleteRelativePath(vols[0], a); assertFalse(fa.exists()); service.moveAndDeleteRelativePath(vols[1], b); assertFalse(fb.exists()); assertFalse(fc.exists()); IOException ee = null; try { service.moveAndDeleteAbsolutePath(TEST_ROOT_DIR + "/2"); } catch (IOException e) { ee = e; } assertNotNull(STR + STR, ee); assertTrue(service.moveAndDeleteAbsolutePath(vols[1] + Path.SEPARATOR_CHAR + d)); makeSureCleanedUp(vols, service); } | /**
* This test creates some directories and then removes them through
* MRAsyncDiskService.
*/ | This test creates some directories and then removes them through MRAsyncDiskService | testMRAsyncDiskService | {
"repo_name": "jayantgolhar/Hadoop-0.21.0",
"path": "mapred/src/test/mapred/org/apache/hadoop/mapreduce/util/TestMRAsyncDiskService.java",
"license": "apache-2.0",
"size": 6777
} | [
"java.io.File",
"java.io.IOException",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.fs.FileSystem",
"org.apache.hadoop.fs.Path",
"org.apache.hadoop.mapreduce.util.MRAsyncDiskService"
] | import java.io.File; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.util.MRAsyncDiskService; | import java.io.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.mapreduce.util.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 2,554,253 |
public SAMRecord next() {
if (!hasNext())
throw new NoSuchElementException("BAMQueryFilteringIterator: no next element available");
final SAMRecord currentRead = mNextRecord;
mNextRecord = advance();
return currentRead;
} | SAMRecord function() { if (!hasNext()) throw new NoSuchElementException(STR); final SAMRecord currentRead = mNextRecord; mNextRecord = advance(); return currentRead; } | /**
* Gets the next record from the given iterator.
*
* @return The next SAM record in the iterator.
*/ | Gets the next record from the given iterator | next | {
"repo_name": "jn7163/cramtools",
"path": "src/main/java/net/sf/cram/index/BAMQueryFilteringIterator.java",
"license": "apache-2.0",
"size": 4614
} | [
"java.util.NoSuchElementException"
] | import java.util.NoSuchElementException; | import java.util.*; | [
"java.util"
] | java.util; | 607,109 |
public Map<String, RrdGraphAttribute> getRrdGraphAttributes() {
Map<String, RrdGraphAttribute> attributes = new HashMap<String, RrdGraphAttribute>();
for (OnmsAttribute attribute : getAttributes()) {
if (RrdGraphAttribute.class.isAssignableFrom(attribute.getClass())) {
RrdGraphAttribute graphAttribute = (RrdGraphAttribute) attribute;
attributes.put(graphAttribute.getName(), graphAttribute);
}
}
return attributes;
} | Map<String, RrdGraphAttribute> function() { Map<String, RrdGraphAttribute> attributes = new HashMap<String, RrdGraphAttribute>(); for (OnmsAttribute attribute : getAttributes()) { if (RrdGraphAttribute.class.isAssignableFrom(attribute.getClass())) { RrdGraphAttribute graphAttribute = (RrdGraphAttribute) attribute; attributes.put(graphAttribute.getName(), graphAttribute); } } return attributes; } | /**
* Get the RRD graph attributes for this resource, if any.
*
* @return a {@link java.util.Map} object.
*/ | Get the RRD graph attributes for this resource, if any | getRrdGraphAttributes | {
"repo_name": "dzonekl/oss2nms",
"path": "plugins/com.netxforge.oss2.model/src/com/netxforge/oss2/model/OnmsResource.java",
"license": "gpl-3.0",
"size": 10450
} | [
"java.util.HashMap",
"java.util.Map"
] | import java.util.HashMap; import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 76,657 |
AffineTransform handleImage(RenderedImage image, Element imageElement,
int x, int y,
int width, int height,
SVGGeneratorContext generatorContext); | AffineTransform handleImage(RenderedImage image, Element imageElement, int x, int y, int width, int height, SVGGeneratorContext generatorContext); | /**
* The handler should set the xlink:href tag and return a transform
*
* @param image the image under consideration
* @param imageElement the DOM Element for this image
* @param x x coordinate
* @param y y coordinate
* @param width width for rendering
* @param height height for rendering
* @param generatorContext the SVGGeneratorContext
*
* @return transform converting the image dimension to rendered dimension
*/ | The handler should set the xlink:href tag and return a transform | handleImage | {
"repo_name": "Squeegee/batik",
"path": "sources/org/apache/batik/svggen/GenericImageHandler.java",
"license": "apache-2.0",
"size": 4124
} | [
"java.awt.geom.AffineTransform",
"java.awt.image.RenderedImage",
"org.w3c.dom.Element"
] | import java.awt.geom.AffineTransform; import java.awt.image.RenderedImage; import org.w3c.dom.Element; | import java.awt.geom.*; import java.awt.image.*; import org.w3c.dom.*; | [
"java.awt",
"org.w3c.dom"
] | java.awt; org.w3c.dom; | 1,566,234 |
public static PushButton createPushButton(ImageResource img, String tip,
ClickHandler handler) {
PushButton pb = new PushButton(new Image(img));
pb.addClickHandler(handler);
pb.setTitle(tip);
return pb;
} | static PushButton function(ImageResource img, String tip, ClickHandler handler) { PushButton pb = new PushButton(new Image(img)); pb.addClickHandler(handler); pb.setTitle(tip); return pb; } | /**
* Helper method to create push buttons.
*
* @param img image to shown on face of push button
* @param tip text to show in tooltip
* @return newly created push button
*/ | Helper method to create push buttons | createPushButton | {
"repo_name": "AppScale/appinventor",
"path": "src/com/google/appinventor/client/Ode.java",
"license": "mit",
"size": 34647
} | [
"com.google.gwt.event.dom.client.ClickHandler",
"com.google.gwt.resources.client.ImageResource",
"com.google.gwt.user.client.ui.Image",
"com.google.gwt.user.client.ui.PushButton"
] | import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.resources.client.ImageResource; import com.google.gwt.user.client.ui.Image; import com.google.gwt.user.client.ui.PushButton; | import com.google.gwt.event.dom.client.*; import com.google.gwt.resources.client.*; import com.google.gwt.user.client.ui.*; | [
"com.google.gwt"
] | com.google.gwt; | 1,639,115 |
public Field getField() {
return field;
} | Field function() { return field; } | /**
* Access the Field associated with the FieldModel
* @return The associated Field
*/ | Access the Field associated with the FieldModel | getField | {
"repo_name": "olliefreeman/jadira",
"path": "cloning/src/main/java/org/jadira/reflection/access/model/FieldModel.java",
"license": "apache-2.0",
"size": 4894
} | [
"java.lang.reflect.Field"
] | import java.lang.reflect.Field; | import java.lang.reflect.*; | [
"java.lang"
] | java.lang; | 2,841,784 |
public static UpdateSyndicationFeedBuilder update(String id, BaseSyndicationFeed syndicationFeed) {
return new UpdateSyndicationFeedBuilder(id, syndicationFeed);
} | static UpdateSyndicationFeedBuilder function(String id, BaseSyndicationFeed syndicationFeed) { return new UpdateSyndicationFeedBuilder(id, syndicationFeed); } | /**
* Update Syndication Feed by ID
*
* @param id
* @param syndicationFeed
*/ | Update Syndication Feed by ID | update | {
"repo_name": "kaltura/KalturaGeneratedAPIClientsJava",
"path": "src/main/java/com/kaltura/client/services/SyndicationFeedService.java",
"license": "agpl-3.0",
"size": 6867
} | [
"com.kaltura.client.types.BaseSyndicationFeed"
] | import com.kaltura.client.types.BaseSyndicationFeed; | import com.kaltura.client.types.*; | [
"com.kaltura.client"
] | com.kaltura.client; | 648,214 |
@WeelMethod
public final static void clear(final ValueMap thiz)
{
final StringBuilder sb = WeelOop.getInstance(thiz, StringBuilder.class);
sb.setLength(0);
} | final static void function(final ValueMap thiz) { final StringBuilder sb = WeelOop.getInstance(thiz, StringBuilder.class); sb.setLength(0); } | /**
* Sets the length of this StringBuilder to 0.
*
* @param thiz This.
* @see java.lang.StringBuilder#setLength(int)
*/ | Sets the length of this StringBuilder to 0 | clear | {
"repo_name": "rjeschke/weel",
"path": "src/main/java/com/github/rjeschke/weel/jclass/WeelStringBuilder.java",
"license": "apache-2.0",
"size": 4104
} | [
"com.github.rjeschke.weel.ValueMap",
"com.github.rjeschke.weel.WeelOop"
] | import com.github.rjeschke.weel.ValueMap; import com.github.rjeschke.weel.WeelOop; | import com.github.rjeschke.weel.*; | [
"com.github.rjeschke"
] | com.github.rjeschke; | 112,951 |
@SuppressWarnings({ "rawtypes", "unchecked"})
public static void JSONToShapefile(String json, String shpFileName, List<String> names, boolean keep, String crsFrom, String crsTo, boolean compressed) {
try {
if (json == null) {
throw new Exception("No JSON file specified");
} else {
if (json.isEmpty()) {
throw new Exception("No JSON file specified");
}
}
if (shpFileName == null) {
throw new Exception("No Shapefile specified");
} else {
if (shpFileName.isEmpty()) {
throw new Exception("No Shapefile specified");
}
}
String path;
String fileName;
int loc = shpFileName.lastIndexOf(File.separatorChar);
if (loc == -1) {
// loc = 0; // no path - ie. "hills.shp"
// path = "";
// fname = shpfileName;
//probably using the wrong path separator character.
throw new Exception("Couldn't find the path separator character '" +
File.separatorChar +
"' in your shape file name. This you're probably using the unix (or dos) one.");
} else {
path = shpFileName.substring(0, loc + 1); // ie. "/data1/hills.shp" -> "/data1/"
fileName = shpFileName.substring(loc + 1); // ie. "/data1/hills.shp" -> "hills.shp"
}
loc = fileName.lastIndexOf(".");
if (loc == -1) {
throw new Exception("Filename must end in '.shp'");
}
String fileNameWithoutExtention = fileName.substring(0, loc); // ie. "hills.shp" -> "hills."
String dbfFileName = path + fileNameWithoutExtention + ".dbf";
int numRecords = 0;
Envelope bounds = null;
int fileLength = 0;
double[] boundsArr = new double[4];
boundsArr[0] = Double.MAX_VALUE;
boundsArr[1] = Double.MAX_VALUE;
boundsArr[2] = -Double.MAX_VALUE;
boundsArr[3] = -Double.MAX_VALUE;
InputStream in1 = new FileInputStream(json);
InputStreamReader inStream1 = new InputStreamReader(in1);
BufferedReader buff1 = new BufferedReader(inStream1);
JsonFactory jfactory = new JsonFactory();
GeometryParser geometryParser = new GeometryParser();
ShapeHandler handler = null;
List<DbfFieldDef> fieldDefs = new ArrayList<DbfFieldDef>();
Map<String,Integer> attribNameToIndex = new HashMap<String,Integer>();
String line1 = null;
boolean first1 = true;
//boolean flagContinue1 = false;
boolean flagFinished1 = false;
//while ((line1 = buff1.readLine()) != null) {
while (true) {
if (compressed) {
if (first1) {
byte[] header = new byte[7];
in1.read(header);
first1 = false;
}
ByteArrayOutputStream global = new ByteArrayOutputStream();
while (true) {
byte[] blockheader = new byte[7];
int size = in1.read(blockheader);
if (size != 7) {
flagFinished1 = true;
break;
}
byte[] h = Arrays.copyOfRange(blockheader, 1, 7);
if (new String(h).equals("snappy")) {
in1.read();
//flagContinue1 = true;
break;
}
int flagCompressed = blockheader[0];
int a = blockheader[1] & 0xFF;
int b = blockheader[2] & 0xFF;
int compressedSize = (a << 8) | b;
byte[] compressedData = new byte[compressedSize];
in1.read(compressedData);
ByteArrayOutputStream local = new ByteArrayOutputStream( );
local.write(blockheader);
if (flagCompressed == 0) {
global.write( compressedData );
//System.out.println(new String(compressedData));
} else {
local.write(compressedData);
byte[] decompressed = ByteStreams.toByteArray(new SnappyInputStream(new ByteArrayInputStream(blockToStream(local.toByteArray()))));
global.write(decompressed);
//System.out.println(new String(decompressed));
}
local.close();
}
line1 = new String(global.toByteArray());
global.close();
//System.out.println(line1);
} else {
line1 = buff1.readLine();
if (line1 == null)
break;
}
//System.out.println(line1);
Geometry geometry;
JsonParser jParser = jfactory.createJsonParser(line1);
int countEndObject = 0;
while (true) {
JsonToken token = jParser.nextToken();
if (token==JsonToken.END_OBJECT)
countEndObject++;
if (countEndObject>1) //skip data '}' character
break;
String fieldname = jParser.getCurrentName();
if ("geometry".equals(fieldname)) {
jParser.nextToken();
//TODO: Should work with wkb
geometry = geometryParser.parseGeometry(jParser.getText());
CRS.convert(crsFrom, crsTo, geometry);
Envelope envelope = geometry.getEnvelopeInternal();
if (envelope.getMinX() < boundsArr[0])
boundsArr[0] = envelope.getMinX();
if (envelope.getMinY() < boundsArr[1])
boundsArr[1] = envelope.getMinY();
if (envelope.getMaxX() > boundsArr[2])
boundsArr[2] = envelope.getMaxX();
if (envelope.getMaxY() > boundsArr[3])
boundsArr[3] = envelope.getMaxY();
if (handler == null) {
handler = Shapefile.getShapeHandler(geometry,2);
}
fileLength=fileLength + handler.getLength(geometry);
fileLength+=4;//for each header
}
if ((numRecords==0) && ("properties".equals(fieldname))) {
jParser.nextToken(); //skip '{' character
int idx = 0;
while (jParser.nextToken() != JsonToken.END_OBJECT) {
String columnName = jParser.getText();
boolean bool;
if (names != null) {
if (names.size() > 0) {
if (keep) {
bool = names.contains(columnName);
} else {
bool = !names.contains(columnName);
}
} else {
bool = true;
}
} else {
bool = true;
}
jParser.nextToken();
if (bool) {
String value = jParser.getText().trim();
try {
//Tests if it's an integer
@SuppressWarnings("unused")
Integer intValue = Integer.parseInt(value);
fieldDefs.add(new DbfFieldDef(columnName, 'N', 16, 0));
} catch (NumberFormatException nfe) {
//Not an integer. Tests if it's a double
try {
@SuppressWarnings("unused")
Double doubleValue = Double.parseDouble(value);
fieldDefs.add(new DbfFieldDef(columnName, 'F', 33, 16));
} catch (NumberFormatException nfe2) {
//Not a double. Assuming it's a string
fieldDefs.add(new DbfFieldDef(columnName, 'C', 255, 0));
}
}
attribNameToIndex.put(columnName, idx);
idx++;
}
}
}
}
numRecords++;
jParser.close();
//data1 = in1.read();
if (flagFinished1) {
break;
}
}
//in1.close();
buff1.close();
bounds = new Envelope(boundsArr[0], boundsArr[2], boundsArr[1], boundsArr[3]);
DbfFileWriter dbf;
dbf = new DbfFileWriter(dbfFileName);
DbfFieldDef[] fields = new DbfFieldDef[fieldDefs.size()];
int countf = 0;
for (DbfFieldDef f : fieldDefs) {
fields[countf] = f;
countf++;
}
dbf.writeHeader(fields, numRecords);
OutputStream out2 = new FileOutputStream(shpFileName);
EndianDataOutputStream shapeFile = new EndianDataOutputStream(out2);
writeShapefileHeader(shapeFile, fileLength, bounds);
String shxFileName = path + fileNameWithoutExtention + ".shx";
BufferedOutputStream out3 = new BufferedOutputStream(new FileOutputStream(shxFileName));
EndianDataOutputStream indexfile = new EndianDataOutputStream(out3);
int indexLength = 0;
indexLength = 50+(4*numRecords);
writeShapefileIndexHeader(indexfile, indexLength, bounds);
InputStream in = new FileInputStream(json);
InputStreamReader inStream = new InputStreamReader(in);
BufferedReader buff = new BufferedReader(inStream);
int indexPos = 50;
int indexLen = 0;
int pos = 50;
int count = 1;
String line = null;
boolean first = true;
//boolean flagContinue = false;
boolean flagFinished = false;
//while ((line = buff.readLine()) != null) {
while (true) {
if (compressed) {
if (first) {
byte[] header = new byte[7];
in.read(header);
first = false;
}
ByteArrayOutputStream global = new ByteArrayOutputStream();
while (true) {
byte[] blockheader = new byte[7];
int size = in.read(blockheader);
if (size != 7) {
flagFinished = true;
break;
}
byte[] h = Arrays.copyOfRange(blockheader, 1, 7);
if (new String(h).equals("snappy")) {
in.read();
//flagContinue = true;
break;
}
int flagCompressed = blockheader[0];
int a = blockheader[1] & 0xFF;
int b = blockheader[2] & 0xFF;
int compressedSize = (a << 8) | b;
byte[] compressedData = new byte[compressedSize];
in.read(compressedData);
ByteArrayOutputStream local = new ByteArrayOutputStream( );
local.write(blockheader);
if (flagCompressed == 0) {
global.write( compressedData );
} else {
local.write(compressedData);
byte[] decompressed = ByteStreams.toByteArray(new SnappyInputStream(new ByteArrayInputStream(blockToStream(local.toByteArray()))));
global.write(decompressed);
}
local.close();
}
line = new String(global.toByteArray());
global.close();
} else {
line = buff.readLine();
if (line == null)
break;
}
Vector DBFrow = new Vector();
DBFrow.setSize(fieldDefs.size());
Geometry geometry;
JsonParser jParser = jfactory.createJsonParser(line);
int countEndObject = 0;
while (true) {
JsonToken token = jParser.nextToken();
if (token==JsonToken.END_OBJECT)
countEndObject++;
if (countEndObject>1) //skip data '}' character
break;
String fieldname = jParser.getCurrentName();
if ("geometry".equals(fieldname)) {
jParser.nextToken();
//TODO: Should work with wkb
geometry = geometryParser.parseGeometry(jParser.getText());
CRS.convert(crsFrom, crsTo, geometry);
if (handler == null) {
handler = Shapefile.getShapeHandler(geometry,2);
}
indexLen = handler.getLength(geometry);
indexfile.writeIntBE(indexPos);
indexfile.writeIntBE(indexLen);
indexPos = indexPos+indexLen+4;
shapeFile.writeIntBE(count++);
shapeFile.writeIntBE(handler.getLength(geometry));
// file.setLittleEndianMode(true);
pos=pos+4; // length of header in WORDS
handler.write(geometry,shapeFile);
pos+=handler.getLength(geometry); // length of shape in WORDS
}
if ("properties".equals(fieldname)) {
jParser.nextToken(); //skip '{' character
while (jParser.nextToken() != JsonToken.END_OBJECT) {
String columnName = jParser.getText();
boolean bool;
if (names != null) {
if (names.size() > 0) {
if (keep) {
bool = names.contains(columnName);
} else {
bool = !names.contains(columnName);
}
} else {
bool = true;
}
} else {
bool = true;
}
jParser.nextToken();
if (bool) {
String value = jParser.getText().trim();
try {
//Tests if it's an integer
Integer intValue = Integer.parseInt(value);
DBFrow.setElementAt(intValue, attribNameToIndex.get(columnName));
} catch (NumberFormatException nfe) {
//Not an integer. Tests if it's a double
try {
Double doubleValue = Double.parseDouble(value);
DBFrow.setElementAt(doubleValue, attribNameToIndex.get(columnName));
} catch (NumberFormatException nfe2) {
//Not a double. Assuming it's a string
DBFrow.setElementAt(value, attribNameToIndex.get(columnName));
}
}
}
}
}
}
dbf.writeRecord(DBFrow);
jParser.close();
if (flagFinished) {
break;
}
}
shapeFile.close();
indexfile.close();
dbf.close();
buff.close();
//in.close();
} catch (Exception e) {
System.err.println("Failed to create shapefile; error - " + e.getMessage());
e.printStackTrace();
}
}
| @SuppressWarnings({ STR, STR}) static void function(String json, String shpFileName, List<String> names, boolean keep, String crsFrom, String crsTo, boolean compressed) { try { if (json == null) { throw new Exception(STR); } else { if (json.isEmpty()) { throw new Exception(STR); } } if (shpFileName == null) { throw new Exception(STR); } else { if (shpFileName.isEmpty()) { throw new Exception(STR); } } String path; String fileName; int loc = shpFileName.lastIndexOf(File.separatorChar); if (loc == -1) { throw new Exception(STR + File.separatorChar + STR); } else { path = shpFileName.substring(0, loc + 1); fileName = shpFileName.substring(loc + 1); } loc = fileName.lastIndexOf("."); if (loc == -1) { throw new Exception(STR); } String fileNameWithoutExtention = fileName.substring(0, loc); String dbfFileName = path + fileNameWithoutExtention + ".dbf"; int numRecords = 0; Envelope bounds = null; int fileLength = 0; double[] boundsArr = new double[4]; boundsArr[0] = Double.MAX_VALUE; boundsArr[1] = Double.MAX_VALUE; boundsArr[2] = -Double.MAX_VALUE; boundsArr[3] = -Double.MAX_VALUE; InputStream in1 = new FileInputStream(json); InputStreamReader inStream1 = new InputStreamReader(in1); BufferedReader buff1 = new BufferedReader(inStream1); JsonFactory jfactory = new JsonFactory(); GeometryParser geometryParser = new GeometryParser(); ShapeHandler handler = null; List<DbfFieldDef> fieldDefs = new ArrayList<DbfFieldDef>(); Map<String,Integer> attribNameToIndex = new HashMap<String,Integer>(); String line1 = null; boolean first1 = true; boolean flagFinished1 = false; while (true) { if (compressed) { if (first1) { byte[] header = new byte[7]; in1.read(header); first1 = false; } ByteArrayOutputStream global = new ByteArrayOutputStream(); while (true) { byte[] blockheader = new byte[7]; int size = in1.read(blockheader); if (size != 7) { flagFinished1 = true; break; } byte[] h = Arrays.copyOfRange(blockheader, 1, 7); if (new String(h).equals(STR)) { in1.read(); break; } int flagCompressed = blockheader[0]; int a = blockheader[1] & 0xFF; int b = blockheader[2] & 0xFF; int compressedSize = (a << 8) b; byte[] compressedData = new byte[compressedSize]; in1.read(compressedData); ByteArrayOutputStream local = new ByteArrayOutputStream( ); local.write(blockheader); if (flagCompressed == 0) { global.write( compressedData ); } else { local.write(compressedData); byte[] decompressed = ByteStreams.toByteArray(new SnappyInputStream(new ByteArrayInputStream(blockToStream(local.toByteArray())))); global.write(decompressed); } local.close(); } line1 = new String(global.toByteArray()); global.close(); } else { line1 = buff1.readLine(); if (line1 == null) break; } Geometry geometry; JsonParser jParser = jfactory.createJsonParser(line1); int countEndObject = 0; while (true) { JsonToken token = jParser.nextToken(); if (token==JsonToken.END_OBJECT) countEndObject++; if (countEndObject>1) break; String fieldname = jParser.getCurrentName(); if (STR.equals(fieldname)) { jParser.nextToken(); geometry = geometryParser.parseGeometry(jParser.getText()); CRS.convert(crsFrom, crsTo, geometry); Envelope envelope = geometry.getEnvelopeInternal(); if (envelope.getMinX() < boundsArr[0]) boundsArr[0] = envelope.getMinX(); if (envelope.getMinY() < boundsArr[1]) boundsArr[1] = envelope.getMinY(); if (envelope.getMaxX() > boundsArr[2]) boundsArr[2] = envelope.getMaxX(); if (envelope.getMaxY() > boundsArr[3]) boundsArr[3] = envelope.getMaxY(); if (handler == null) { handler = Shapefile.getShapeHandler(geometry,2); } fileLength=fileLength + handler.getLength(geometry); fileLength+=4; } if ((numRecords==0) && (STR.equals(fieldname))) { jParser.nextToken(); int idx = 0; while (jParser.nextToken() != JsonToken.END_OBJECT) { String columnName = jParser.getText(); boolean bool; if (names != null) { if (names.size() > 0) { if (keep) { bool = names.contains(columnName); } else { bool = !names.contains(columnName); } } else { bool = true; } } else { bool = true; } jParser.nextToken(); if (bool) { String value = jParser.getText().trim(); try { @SuppressWarnings(STR) Integer intValue = Integer.parseInt(value); fieldDefs.add(new DbfFieldDef(columnName, 'N', 16, 0)); } catch (NumberFormatException nfe) { try { @SuppressWarnings(STR) Double doubleValue = Double.parseDouble(value); fieldDefs.add(new DbfFieldDef(columnName, 'F', 33, 16)); } catch (NumberFormatException nfe2) { fieldDefs.add(new DbfFieldDef(columnName, 'C', 255, 0)); } } attribNameToIndex.put(columnName, idx); idx++; } } } } numRecords++; jParser.close(); if (flagFinished1) { break; } } buff1.close(); bounds = new Envelope(boundsArr[0], boundsArr[2], boundsArr[1], boundsArr[3]); DbfFileWriter dbf; dbf = new DbfFileWriter(dbfFileName); DbfFieldDef[] fields = new DbfFieldDef[fieldDefs.size()]; int countf = 0; for (DbfFieldDef f : fieldDefs) { fields[countf] = f; countf++; } dbf.writeHeader(fields, numRecords); OutputStream out2 = new FileOutputStream(shpFileName); EndianDataOutputStream shapeFile = new EndianDataOutputStream(out2); writeShapefileHeader(shapeFile, fileLength, bounds); String shxFileName = path + fileNameWithoutExtention + ".shx"; BufferedOutputStream out3 = new BufferedOutputStream(new FileOutputStream(shxFileName)); EndianDataOutputStream indexfile = new EndianDataOutputStream(out3); int indexLength = 0; indexLength = 50+(4*numRecords); writeShapefileIndexHeader(indexfile, indexLength, bounds); InputStream in = new FileInputStream(json); InputStreamReader inStream = new InputStreamReader(in); BufferedReader buff = new BufferedReader(inStream); int indexPos = 50; int indexLen = 0; int pos = 50; int count = 1; String line = null; boolean first = true; boolean flagFinished = false; while (true) { if (compressed) { if (first) { byte[] header = new byte[7]; in.read(header); first = false; } ByteArrayOutputStream global = new ByteArrayOutputStream(); while (true) { byte[] blockheader = new byte[7]; int size = in.read(blockheader); if (size != 7) { flagFinished = true; break; } byte[] h = Arrays.copyOfRange(blockheader, 1, 7); if (new String(h).equals(STR)) { in.read(); break; } int flagCompressed = blockheader[0]; int a = blockheader[1] & 0xFF; int b = blockheader[2] & 0xFF; int compressedSize = (a << 8) b; byte[] compressedData = new byte[compressedSize]; in.read(compressedData); ByteArrayOutputStream local = new ByteArrayOutputStream( ); local.write(blockheader); if (flagCompressed == 0) { global.write( compressedData ); } else { local.write(compressedData); byte[] decompressed = ByteStreams.toByteArray(new SnappyInputStream(new ByteArrayInputStream(blockToStream(local.toByteArray())))); global.write(decompressed); } local.close(); } line = new String(global.toByteArray()); global.close(); } else { line = buff.readLine(); if (line == null) break; } Vector DBFrow = new Vector(); DBFrow.setSize(fieldDefs.size()); Geometry geometry; JsonParser jParser = jfactory.createJsonParser(line); int countEndObject = 0; while (true) { JsonToken token = jParser.nextToken(); if (token==JsonToken.END_OBJECT) countEndObject++; if (countEndObject>1) break; String fieldname = jParser.getCurrentName(); if (STR.equals(fieldname)) { jParser.nextToken(); geometry = geometryParser.parseGeometry(jParser.getText()); CRS.convert(crsFrom, crsTo, geometry); if (handler == null) { handler = Shapefile.getShapeHandler(geometry,2); } indexLen = handler.getLength(geometry); indexfile.writeIntBE(indexPos); indexfile.writeIntBE(indexLen); indexPos = indexPos+indexLen+4; shapeFile.writeIntBE(count++); shapeFile.writeIntBE(handler.getLength(geometry)); pos=pos+4; handler.write(geometry,shapeFile); pos+=handler.getLength(geometry); } if (STR.equals(fieldname)) { jParser.nextToken(); while (jParser.nextToken() != JsonToken.END_OBJECT) { String columnName = jParser.getText(); boolean bool; if (names != null) { if (names.size() > 0) { if (keep) { bool = names.contains(columnName); } else { bool = !names.contains(columnName); } } else { bool = true; } } else { bool = true; } jParser.nextToken(); if (bool) { String value = jParser.getText().trim(); try { Integer intValue = Integer.parseInt(value); DBFrow.setElementAt(intValue, attribNameToIndex.get(columnName)); } catch (NumberFormatException nfe) { try { Double doubleValue = Double.parseDouble(value); DBFrow.setElementAt(doubleValue, attribNameToIndex.get(columnName)); } catch (NumberFormatException nfe2) { DBFrow.setElementAt(value, attribNameToIndex.get(columnName)); } } } } } } dbf.writeRecord(DBFrow); jParser.close(); if (flagFinished) { break; } } shapeFile.close(); indexfile.close(); dbf.close(); buff.close(); } catch (Exception e) { System.err.println(STR + e.getMessage()); e.printStackTrace(); } } | /**
* Converts from JSON to Shapefile.<br>
* @param input JSON file path<br>
* output shapefile path
*/ | Converts from JSON to Shapefile | JSONToShapefile | {
"repo_name": "prashant003/interimage-2",
"path": "interimage-geometry/src/main/java/br/puc_rio/ele/lvc/interimage/geometry/ShapefileConverter.java",
"license": "apache-2.0",
"size": 45685
} | [
"br.puc_rio.ele.lvc.interimage.common.CRS",
"br.puc_rio.ele.lvc.interimage.common.GeometryParser",
"com.google.common.io.ByteStreams",
"com.vividsolutions.jts.geom.Envelope",
"com.vividsolutions.jts.geom.Geometry",
"com.vividsolutions.jump.io.EndianDataOutputStream",
"java.io.BufferedOutputStream",
"java.io.BufferedReader",
"java.io.ByteArrayInputStream",
"java.io.ByteArrayOutputStream",
"java.io.File",
"java.io.FileInputStream",
"java.io.FileOutputStream",
"java.io.InputStream",
"java.io.InputStreamReader",
"java.io.OutputStream",
"java.util.ArrayList",
"java.util.Arrays",
"java.util.HashMap",
"java.util.List",
"java.util.Map",
"java.util.Vector",
"org.codehaus.jackson.JsonFactory",
"org.codehaus.jackson.JsonParser",
"org.codehaus.jackson.JsonToken",
"org.geotools.dbffile.DbfFieldDef",
"org.geotools.dbffile.DbfFileWriter",
"org.geotools.shapefile.ShapeHandler",
"org.geotools.shapefile.Shapefile",
"org.iq80.snappy.SnappyInputStream"
] | import br.puc_rio.ele.lvc.interimage.common.CRS; import br.puc_rio.ele.lvc.interimage.common.GeometryParser; import com.google.common.io.ByteStreams; import com.vividsolutions.jts.geom.Envelope; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jump.io.EndianDataOutputStream; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Vector; import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonParser; import org.codehaus.jackson.JsonToken; import org.geotools.dbffile.DbfFieldDef; import org.geotools.dbffile.DbfFileWriter; import org.geotools.shapefile.ShapeHandler; import org.geotools.shapefile.Shapefile; import org.iq80.snappy.SnappyInputStream; | import br.puc_rio.ele.lvc.interimage.common.*; import com.google.common.io.*; import com.vividsolutions.jts.geom.*; import com.vividsolutions.jump.io.*; import java.io.*; import java.util.*; import org.codehaus.jackson.*; import org.geotools.dbffile.*; import org.geotools.shapefile.*; import org.iq80.snappy.*; | [
"br.puc_rio.ele",
"com.google.common",
"com.vividsolutions.jts",
"com.vividsolutions.jump",
"java.io",
"java.util",
"org.codehaus.jackson",
"org.geotools.dbffile",
"org.geotools.shapefile",
"org.iq80.snappy"
] | br.puc_rio.ele; com.google.common; com.vividsolutions.jts; com.vividsolutions.jump; java.io; java.util; org.codehaus.jackson; org.geotools.dbffile; org.geotools.shapefile; org.iq80.snappy; | 2,887,777 |
public IgniteInternalFuture<Long> topologyFuture(final long awaitVer) {
long topVer = topologyVersion();
if (topVer >= awaitVer)
return new GridFinishedFuture<>(topVer);
DiscoTopologyFuture fut = new DiscoTopologyFuture(ctx, awaitVer);
fut.init();
return fut;
} | IgniteInternalFuture<Long> function(final long awaitVer) { long topVer = topologyVersion(); if (topVer >= awaitVer) return new GridFinishedFuture<>(topVer); DiscoTopologyFuture fut = new DiscoTopologyFuture(ctx, awaitVer); fut.init(); return fut; } | /**
* Gets future that will be completed when current topology version becomes greater or equal to argument passed.
*
* @param awaitVer Topology version to await.
* @return Future.
*/ | Gets future that will be completed when current topology version becomes greater or equal to argument passed | topologyFuture | {
"repo_name": "leveyj/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManager.java",
"license": "apache-2.0",
"size": 112502
} | [
"org.apache.ignite.internal.IgniteInternalFuture",
"org.apache.ignite.internal.util.future.GridFinishedFuture"
] | import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.util.future.GridFinishedFuture; | import org.apache.ignite.internal.*; import org.apache.ignite.internal.util.future.*; | [
"org.apache.ignite"
] | org.apache.ignite; | 1,850,134 |
private static void completeWithError(@Nullable GridFutureAdapter<Boolean> userFut, String errMsg) {
if (userFut != null)
userFut.onDone(new IgniteCheckedException(errMsg));
} | static void function(@Nullable GridFutureAdapter<Boolean> userFut, String errMsg) { if (userFut != null) userFut.onDone(new IgniteCheckedException(errMsg)); } | /**
* Complete user future with error.
*
* @param errMsg Error message.
*/ | Complete user future with error | completeWithError | {
"repo_name": "samaitra/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/processors/cache/WalStateManager.java",
"license": "apache-2.0",
"size": 42450
} | [
"org.apache.ignite.IgniteCheckedException",
"org.apache.ignite.internal.util.future.GridFutureAdapter",
"org.jetbrains.annotations.Nullable"
] | import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.util.future.GridFutureAdapter; import org.jetbrains.annotations.Nullable; | import org.apache.ignite.*; import org.apache.ignite.internal.util.future.*; import org.jetbrains.annotations.*; | [
"org.apache.ignite",
"org.jetbrains.annotations"
] | org.apache.ignite; org.jetbrains.annotations; | 198,729 |
public static CmsResource getDetailResource(ServletRequest req) {
return (CmsResource)req.getAttribute(ATTR_DETAIL_CONTENT_RESOURCE);
} | static CmsResource function(ServletRequest req) { return (CmsResource)req.getAttribute(ATTR_DETAIL_CONTENT_RESOURCE); } | /**
* Returns the current detail content resource, or <code>null</code> if this is not a request to a content detail page.<p>
*
* @param req the current request
*
* @return the current detail content resource, or <code>null</code> if this is not a request to a content detail page
*/ | Returns the current detail content resource, or <code>null</code> if this is not a request to a content detail page | getDetailResource | {
"repo_name": "mediaworx/opencms-core",
"path": "src/org/opencms/ade/detailpage/CmsDetailPageResourceHandler.java",
"license": "lgpl-2.1",
"size": 7388
} | [
"javax.servlet.ServletRequest",
"org.opencms.file.CmsResource"
] | import javax.servlet.ServletRequest; import org.opencms.file.CmsResource; | import javax.servlet.*; import org.opencms.file.*; | [
"javax.servlet",
"org.opencms.file"
] | javax.servlet; org.opencms.file; | 772,554 |
public static int bytesRequiredWhenCopied(Object fromObj, RVMClass type) {
int size = type.getInstanceSize();
if (ADDRESS_BASED_HASHING) {
Word hashState = Magic.getWordAtOffset(fromObj, STATUS_OFFSET).and(HASH_STATE_MASK);
if (hashState.NE(HASH_STATE_UNHASHED)) {
size += HASHCODE_BYTES;
}
}
return size;
} | static int function(Object fromObj, RVMClass type) { int size = type.getInstanceSize(); if (ADDRESS_BASED_HASHING) { Word hashState = Magic.getWordAtOffset(fromObj, STATUS_OFFSET).and(HASH_STATE_MASK); if (hashState.NE(HASH_STATE_UNHASHED)) { size += HASHCODE_BYTES; } } return size; } | /**
* how many bytes are needed when the scalar object is copied by GC?
*/ | how many bytes are needed when the scalar object is copied by GC | bytesRequiredWhenCopied | {
"repo_name": "ut-osa/laminar",
"path": "jikesrvm-3.0.0/rvm/src/org/jikesrvm/objectmodel/JavaHeader.java",
"license": "bsd-3-clause",
"size": 33287
} | [
"org.jikesrvm.classloader.RVMClass",
"org.jikesrvm.runtime.Magic",
"org.vmmagic.unboxed.Word"
] | import org.jikesrvm.classloader.RVMClass; import org.jikesrvm.runtime.Magic; import org.vmmagic.unboxed.Word; | import org.jikesrvm.classloader.*; import org.jikesrvm.runtime.*; import org.vmmagic.unboxed.*; | [
"org.jikesrvm.classloader",
"org.jikesrvm.runtime",
"org.vmmagic.unboxed"
] | org.jikesrvm.classloader; org.jikesrvm.runtime; org.vmmagic.unboxed; | 928,877 |
protected Element getLocationElement(Document doc) {
Location loc = getLocation();
Element e = doc.createElement("location");
Element ecol = doc.createElement("column");
Element eline = doc.createElement("line");
Element fname = doc.createElement("filename");
Element bl = doc.createElement("begin");
Element el = doc.createElement("end");
Element bc = doc.createElement("begin");
Element ec = doc.createElement("end");
bc.appendChild(doc.createTextNode(Integer.toString(loc.beginColumn())));
ec.appendChild(doc.createTextNode(Integer.toString(loc.endColumn())));
bl.appendChild(doc.createTextNode(Integer.toString(loc.beginLine())));
el.appendChild(doc.createTextNode(Integer.toString(loc.endLine())));
fname.appendChild(doc.createTextNode(stn.getFilename()));
ecol.appendChild(bc);
ecol.appendChild(ec);
eline.appendChild(bl);
eline.appendChild(el);
e.appendChild(ecol);
e.appendChild(eline);
e.appendChild(fname);
return e;
} | Element function(Document doc) { Location loc = getLocation(); Element e = doc.createElement(STR); Element ecol = doc.createElement(STR); Element eline = doc.createElement("line"); Element fname = doc.createElement(STR); Element bl = doc.createElement("begin"); Element el = doc.createElement("end"); Element bc = doc.createElement("begin"); Element ec = doc.createElement("end"); bc.appendChild(doc.createTextNode(Integer.toString(loc.beginColumn()))); ec.appendChild(doc.createTextNode(Integer.toString(loc.endColumn()))); bl.appendChild(doc.createTextNode(Integer.toString(loc.beginLine()))); el.appendChild(doc.createTextNode(Integer.toString(loc.endLine()))); fname.appendChild(doc.createTextNode(stn.getFilename())); ecol.appendChild(bc); ecol.appendChild(ec); eline.appendChild(bl); eline.appendChild(el); e.appendChild(ecol); e.appendChild(eline); e.appendChild(fname); return e; } | /**
* August 2014 - TL
* returns location information for XML exporting as attributes to
* the element returned by getElement
*/ | August 2014 - TL returns location information for XML exporting as attributes to the element returned by getElement | getLocationElement | {
"repo_name": "tlaplus/tlaplus",
"path": "tlatools/org.lamport.tlatools/src/tla2sany/semantic/SemanticNode.java",
"license": "mit",
"size": 17522
} | [
"org.w3c.dom.Document",
"org.w3c.dom.Element"
] | import org.w3c.dom.Document; import org.w3c.dom.Element; | import org.w3c.dom.*; | [
"org.w3c.dom"
] | org.w3c.dom; | 1,452,679 |
protected T singleQuery(String query, Object[] params) {
List<T> list = query(query, params);
if(list != null && !list.isEmpty()) {
return list.get(0);
}
return null;
}
| T function(String query, Object[] params) { List<T> list = query(query, params); if(list != null && !list.isEmpty()) { return list.get(0); } return null; } | /**
* Executes the given query with the given parameters.
* @param query query to execute.
* @param params query parameters.
* @return Entity returned by the query (null if no result found).
*/ | Executes the given query with the given parameters | singleQuery | {
"repo_name": "alejandro-du/enterprise-app",
"path": "src/enterpriseapp/hibernate/DefaultHbnContainer.java",
"license": "agpl-3.0",
"size": 23889
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,203,533 |
public static CandidateEntry findByUuid_Last(java.lang.String uuid,
OrderByComparator<CandidateEntry> orderByComparator)
throws com.liferay.micro.maintainance.candidate.exception.NoSuchEntryException {
return getPersistence().findByUuid_Last(uuid, orderByComparator);
} | static CandidateEntry function(java.lang.String uuid, OrderByComparator<CandidateEntry> orderByComparator) throws com.liferay.micro.maintainance.candidate.exception.NoSuchEntryException { return getPersistence().findByUuid_Last(uuid, orderByComparator); } | /**
* Returns the last candidate entry in the ordered set where uuid = ?.
*
* @param uuid the uuid
* @param orderByComparator the comparator to order the set by (optionally <code>null</code>)
* @return the last matching candidate entry
* @throws NoSuchEntryException if a matching candidate entry could not be found
*/ | Returns the last candidate entry in the ordered set where uuid = ? | findByUuid_Last | {
"repo_name": "moltam89/OWXP",
"path": "modules/micro-maintainance-candidate/micro-maintainance-candidate-api/src/main/java/com/liferay/micro/maintainance/candidate/service/persistence/CandidateEntryUtil.java",
"license": "gpl-3.0",
"size": 103522
} | [
"com.liferay.micro.maintainance.candidate.model.CandidateEntry",
"com.liferay.portal.kernel.util.OrderByComparator"
] | import com.liferay.micro.maintainance.candidate.model.CandidateEntry; import com.liferay.portal.kernel.util.OrderByComparator; | import com.liferay.micro.maintainance.candidate.model.*; import com.liferay.portal.kernel.util.*; | [
"com.liferay.micro",
"com.liferay.portal"
] | com.liferay.micro; com.liferay.portal; | 1,684,243 |
public static boolean hasValidRangeInfo(@Nullable AccessibilityNodeInfoCompat node) {
if (node == null) {
return false;
}
@Nullable final RangeInfoCompat rangeInfo = node.getRangeInfo();
if (rangeInfo == null) {
return false;
}
final float maxProgress = rangeInfo.getMax();
final float minProgress = rangeInfo.getMin();
final float currentProgress = rangeInfo.getCurrent();
final float diffProgress = maxProgress - minProgress;
return (diffProgress > 0.0f)
&& (currentProgress >= minProgress)
&& (currentProgress <= maxProgress);
} | static boolean function(@Nullable AccessibilityNodeInfoCompat node) { if (node == null) { return false; } @Nullable final RangeInfoCompat rangeInfo = node.getRangeInfo(); if (rangeInfo == null) { return false; } final float maxProgress = rangeInfo.getMax(); final float minProgress = rangeInfo.getMin(); final float currentProgress = rangeInfo.getCurrent(); final float diffProgress = maxProgress - minProgress; return (diffProgress > 0.0f) && (currentProgress >= minProgress) && (currentProgress <= maxProgress); } | /**
* Returns whether the node has valid RangeInfo.
*
* @param node The node to check.
* @return Whether the node has valid RangeInfo.
*/ | Returns whether the node has valid RangeInfo | hasValidRangeInfo | {
"repo_name": "google/talkback",
"path": "utils/src/main/java/com/google/android/accessibility/utils/AccessibilityNodeInfoUtils.java",
"license": "apache-2.0",
"size": 105305
} | [
"androidx.core.view.accessibility.AccessibilityNodeInfoCompat",
"org.checkerframework.checker.nullness.qual.Nullable"
] | import androidx.core.view.accessibility.AccessibilityNodeInfoCompat; import org.checkerframework.checker.nullness.qual.Nullable; | import androidx.core.view.accessibility.*; import org.checkerframework.checker.nullness.qual.*; | [
"androidx.core",
"org.checkerframework.checker"
] | androidx.core; org.checkerframework.checker; | 1,400,594 |
@Schema(example = "Just a note.", description = "Note to include in email.")
public String getNote() {
return note;
} | @Schema(example = STR, description = STR) String function() { return note; } | /**
* Note to include in email.
* @return note
**/ | Note to include in email | getNote | {
"repo_name": "iterate-ch/cyberduck",
"path": "brick/src/main/java/ch/cyberduck/core/brick/io/swagger/client/model/IdShareBody.java",
"license": "gpl-3.0",
"size": 4118
} | [
"io.swagger.v3.oas.annotations.media.Schema"
] | import io.swagger.v3.oas.annotations.media.Schema; | import io.swagger.v3.oas.annotations.media.*; | [
"io.swagger.v3"
] | io.swagger.v3; | 1,311,027 |
public void paintComponent(Graphics g)
{
int w = getWidth();
int h = getHeight();
g.setColor(Color.white);
g.fillRect(0, 0, w, h);
drawGrid(w, h, g);
g.setColor(Color.black);
g.drawString(message, W, W);
g.drawString(mouse, W, 2*W);
drawFigures(g);
} | void function(Graphics g) { int w = getWidth(); int h = getHeight(); g.setColor(Color.white); g.fillRect(0, 0, w, h); drawGrid(w, h, g); g.setColor(Color.black); g.drawString(message, W, W); g.drawString(mouse, W, 2*W); drawFigures(g); } | /**
* Paint this component.
*/ | Paint this component | paintComponent | {
"repo_name": "SanDisk-Open-Source/SSD_Dashboard",
"path": "uefi/gcc/gcc-4.6.3/libjava/classpath/examples/gnu/classpath/examples/CORBA/swing/x5/CanvasWorld.java",
"license": "gpl-2.0",
"size": 7424
} | [
"java.awt.Color",
"java.awt.Graphics"
] | import java.awt.Color; import java.awt.Graphics; | import java.awt.*; | [
"java.awt"
] | java.awt; | 2,317,860 |
public static Feed getFeed(final long feedId) {
PodDBAdapter adapter = PodDBAdapter.getInstance();
adapter.open();
Feed result = getFeed(feedId, adapter);
adapter.close();
return result;
} | static Feed function(final long feedId) { PodDBAdapter adapter = PodDBAdapter.getInstance(); adapter.open(); Feed result = getFeed(feedId, adapter); adapter.close(); return result; } | /**
* Loads a specific Feed from the database.
*
* @param feedId The ID of the Feed
* @return The Feed or null if the Feed could not be found. The Feeds FeedItems will also be loaded from the
* database and the items-attribute will be set correctly.
*/ | Loads a specific Feed from the database | getFeed | {
"repo_name": "TomHennen/AntennaPod",
"path": "core/src/main/java/de/danoeh/antennapod/core/storage/DBReader.java",
"license": "mit",
"size": 37122
} | [
"de.danoeh.antennapod.core.feed.Feed"
] | import de.danoeh.antennapod.core.feed.Feed; | import de.danoeh.antennapod.core.feed.*; | [
"de.danoeh.antennapod"
] | de.danoeh.antennapod; | 1,237,846 |
private void apply(String style,
String source,
HttpServletRequest req,
HttpServletResponse res) throws Exception {
// Validate parameters
if (style == null) {
throw new TransformerException("No style parameter supplied");
}
if (source == null) {
throw new TransformerException("No source parameter supplied");
}
InputStream sourceStream = null;
try {
// Load the stylesheet (adding to cache if necessary)
Templates pss = tryCache(style);
Transformer transformer = pss.newTransformer();
Enumeration<?> p = req.getParameterNames();
while (p.hasMoreElements()) {
String name = (String) p.nextElement();
if (!(name.equals("style") || name.equals("source"))) {
String value = req.getParameter(name);
transformer.setParameter(name, new StringValue(value));
}
}
// Start loading the document to be transformed
sourceStream = getInputStream(source);
// Set the appropriate output mime type
String mime =
pss.getOutputProperties()
.getProperty(OutputKeys.MEDIA_TYPE);
if (mime == null) {
res.setContentType("text/html");
} else {
res.setContentType(mime);
}
// Transform
StreamSource ss = new StreamSource(sourceStream);
ss.setSystemId(source);
transformer.transform(ss, new StreamResult(res.getOutputStream()));
} finally {
if (sourceStream != null) {
try {
sourceStream.close();
} catch (Exception e) {
}
}
}
} | void function(String style, String source, HttpServletRequest req, HttpServletResponse res) throws Exception { if (style == null) { throw new TransformerException(STR); } if (source == null) { throw new TransformerException(STR); } InputStream sourceStream = null; try { Templates pss = tryCache(style); Transformer transformer = pss.newTransformer(); Enumeration<?> p = req.getParameterNames(); while (p.hasMoreElements()) { String name = (String) p.nextElement(); if (!(name.equals("style") name.equals(STR))) { String value = req.getParameter(name); transformer.setParameter(name, new StringValue(value)); } } sourceStream = getInputStream(source); String mime = pss.getOutputProperties() .getProperty(OutputKeys.MEDIA_TYPE); if (mime == null) { res.setContentType(STR); } else { res.setContentType(mime); } StreamSource ss = new StreamSource(sourceStream); ss.setSystemId(source); transformer.transform(ss, new StreamResult(res.getOutputStream())); } finally { if (sourceStream != null) { try { sourceStream.close(); } catch (Exception e) { } } } } | /**
* Apply stylesheet to source document
*/ | Apply stylesheet to source document | apply | {
"repo_name": "andreasnef/fcrepo",
"path": "fcrepo-webapp/fcrepo-webapp-saxon/src/main/java/org/fcrepo/localservices/saxon/SaxonServlet.java",
"license": "apache-2.0",
"size": 12423
} | [
"java.io.InputStream",
"java.util.Enumeration",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse",
"javax.xml.transform.OutputKeys",
"javax.xml.transform.Templates",
"javax.xml.transform.Transformer",
"javax.xml.transform.TransformerException",
"javax.xml.transform.stream.StreamResult",
"javax.xml.transform.stream.StreamSource",
"net.sf.saxon.value.StringValue"
] | import java.io.InputStream; import java.util.Enumeration; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.xml.transform.OutputKeys; import javax.xml.transform.Templates; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import net.sf.saxon.value.StringValue; | import java.io.*; import java.util.*; import javax.servlet.http.*; import javax.xml.transform.*; import javax.xml.transform.stream.*; import net.sf.saxon.value.*; | [
"java.io",
"java.util",
"javax.servlet",
"javax.xml",
"net.sf.saxon"
] | java.io; java.util; javax.servlet; javax.xml; net.sf.saxon; | 29,163 |
List<Pattern> patterns = new ArrayList<Pattern>();
for(int i = 0; i < type.length; i++)
patterns.add(new Pattern(color, type[i]));
return patterns;
}
| List<Pattern> patterns = new ArrayList<Pattern>(); for(int i = 0; i < type.length; i++) patterns.add(new Pattern(color, type[i])); return patterns; } | /**
* Return a list of patterns with the same color.
* @param color The generic color that will be used
* @param type The layers to be printed on the banner using the chosen color
* @return A copy of this banner
*/ | Return a list of patterns with the same color | getPatterns | {
"repo_name": "FlorianCassayre/BannersUtils",
"path": "src/main/java/me/cassayre/florian/BannersUtils/BannersUtils.java",
"license": "mpl-2.0",
"size": 20044
} | [
"java.util.ArrayList",
"java.util.List",
"org.bukkit.block.banner.Pattern"
] | import java.util.ArrayList; import java.util.List; import org.bukkit.block.banner.Pattern; | import java.util.*; import org.bukkit.block.banner.*; | [
"java.util",
"org.bukkit.block"
] | java.util; org.bukkit.block; | 2,404,566 |
public void setLocalTime(long time)
{
// If this is a local time zone date, just set the time
if (_timeZone != _gmtTimeZone) {
calculateSplit(time);
}
// If this is a GMT date, convert from local to GMT
else {
calculateSplit(time - _localTimeZone.getRawOffset());
try {
long offset = _localTimeZone.getOffset(GregorianCalendar.AD,
(int) _year,
(int) _month,
(int) _dayOfMonth + 1,
getDayOfWeek(),
(int) _timeOfDay);
calculateSplit(time - offset);
} catch (Exception e) {
log.log(Level.FINE, e.toString(), e);
}
}
} | void function(long time) { if (_timeZone != _gmtTimeZone) { calculateSplit(time); } else { calculateSplit(time - _localTimeZone.getRawOffset()); try { long offset = _localTimeZone.getOffset(GregorianCalendar.AD, (int) _year, (int) _month, (int) _dayOfMonth + 1, getDayOfWeek(), (int) _timeOfDay); calculateSplit(time - offset); } catch (Exception e) { log.log(Level.FINE, e.toString(), e); } } } | /**
* Sets the time in milliseconds since the epoch and calculate
* the internal variables.
*/ | Sets the time in milliseconds since the epoch and calculate the internal variables | setLocalTime | {
"repo_name": "moriyoshi/quercus-gae",
"path": "src/main/java/com/caucho/util/QDate.java",
"license": "gpl-2.0",
"size": 40473
} | [
"java.util.GregorianCalendar",
"java.util.logging.Level"
] | import java.util.GregorianCalendar; import java.util.logging.Level; | import java.util.*; import java.util.logging.*; | [
"java.util"
] | java.util; | 2,342,342 |
public void saveLegacyOIVImage(FSNamesystem source, String targetDir,
Canceler canceler) throws IOException {
FSImageCompression compression =
FSImageCompression.createCompression(conf);
long txid = getCorrectLastAppliedOrWrittenTxId();
SaveNamespaceContext ctx = new SaveNamespaceContext(source, txid,
canceler);
FSImageFormat.Saver saver = new FSImageFormat.Saver(ctx);
String imageFileName = NNStorage.getLegacyOIVImageFileName(txid);
File imageFile = new File(targetDir, imageFileName);
saver.save(imageFile, compression);
archivalManager.purgeOldLegacyOIVImages(targetDir, txid);
}
private class FSImageSaver implements Runnable {
private final SaveNamespaceContext context;
private final StorageDirectory sd;
private final NameNodeFile nnf;
public FSImageSaver(SaveNamespaceContext context, StorageDirectory sd,
NameNodeFile nnf) {
this.context = context;
this.sd = sd;
this.nnf = nnf;
} | void function(FSNamesystem source, String targetDir, Canceler canceler) throws IOException { FSImageCompression compression = FSImageCompression.createCompression(conf); long txid = getCorrectLastAppliedOrWrittenTxId(); SaveNamespaceContext ctx = new SaveNamespaceContext(source, txid, canceler); FSImageFormat.Saver saver = new FSImageFormat.Saver(ctx); String imageFileName = NNStorage.getLegacyOIVImageFileName(txid); File imageFile = new File(targetDir, imageFileName); saver.save(imageFile, compression); archivalManager.purgeOldLegacyOIVImages(targetDir, txid); } private class FSImageSaver implements Runnable { private final SaveNamespaceContext context; private final StorageDirectory sd; private final NameNodeFile nnf; public FSImageSaver(SaveNamespaceContext context, StorageDirectory sd, NameNodeFile nnf) { this.context = context; this.sd = sd; this.nnf = nnf; } | /**
* Save FSimage in the legacy format. This is not for NN consumption,
* but for tools like OIV.
*/ | Save FSimage in the legacy format. This is not for NN consumption, but for tools like OIV | saveLegacyOIVImage | {
"repo_name": "dennishuo/hadoop",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java",
"license": "apache-2.0",
"size": 57879
} | [
"java.io.File",
"java.io.IOException",
"org.apache.hadoop.hdfs.server.common.Storage",
"org.apache.hadoop.hdfs.server.namenode.NNStorage",
"org.apache.hadoop.hdfs.util.Canceler"
] | import java.io.File; import java.io.IOException; import org.apache.hadoop.hdfs.server.common.Storage; import org.apache.hadoop.hdfs.server.namenode.NNStorage; import org.apache.hadoop.hdfs.util.Canceler; | import java.io.*; import org.apache.hadoop.hdfs.server.common.*; import org.apache.hadoop.hdfs.server.namenode.*; import org.apache.hadoop.hdfs.util.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 2,111,785 |
void addReaction(String messageId, IGuildEmoji guildEmoji); | void addReaction(String messageId, IGuildEmoji guildEmoji); | /**
* Add a guild emoji reaction to a message.
*
* @exception PermissionException
* <ul>
* <li>If the identity does not have {@code Read Message History} permission to access the message.</li>
* <li>If the identity does not have {@code Add Reactions} permission to add a brand new reaction to the message.</li>
* </ul>
* @exception org.alienideology.jcord.internal.exception.ErrorResponseException
* If the messages does not belong to this channel.
* @see ErrorResponse#UNKNOWN_MESSAGE
*
* @param messageId The message ID.
* @param guildEmoji The guild emoji
*/ | Add a guild emoji reaction to a message | addReaction | {
"repo_name": "AlienIdeology/J-Cord",
"path": "src/main/java/org/alienideology/jcord/handle/channel/IMessageChannel.java",
"license": "mit",
"size": 19586
} | [
"org.alienideology.jcord.handle.guild.IGuildEmoji"
] | import org.alienideology.jcord.handle.guild.IGuildEmoji; | import org.alienideology.jcord.handle.guild.*; | [
"org.alienideology.jcord"
] | org.alienideology.jcord; | 369,502 |
@Test
public void nestedMapGet() {
assertEquals(makePopulatedMessage().get("b.c"), java.lang.String.class);
} | void function() { assertEquals(makePopulatedMessage().get("b.c"), java.lang.String.class); } | /**
* Check getting a value from a nested map.
*/ | Check getting a value from a nested map | nestedMapGet | {
"repo_name": "defunct/verbiage",
"path": "src/test/java/com/goodworkalan/verbiage/MessageTest.java",
"license": "mit",
"size": 7908
} | [
"org.testng.Assert"
] | import org.testng.Assert; | import org.testng.*; | [
"org.testng"
] | org.testng; | 1,008,584 |
@Test( enabled = true )
public void testTomcatFailover() throws IOException, InterruptedException, HttpException {
final SessionIdFormat format = new SessionIdFormat();
final String key = "foo";
final String value = "bar";
final String sessionId1 = post( _httpClient, TC_PORT_1, null, key, value ).getSessionId();
assertEquals( format.extractJvmRoute( sessionId1 ), JVM_ROUTE_1 ); | @Test( enabled = true ) void function() throws IOException, InterruptedException, HttpException { final SessionIdFormat format = new SessionIdFormat(); final String key = "foo"; final String value = "bar"; final String sessionId1 = post( _httpClient, TC_PORT_1, null, key, value ).getSessionId(); assertEquals( format.extractJvmRoute( sessionId1 ), JVM_ROUTE_1 ); | /**
* Tests that when two tomcats are running and one tomcat fails the other
* tomcat can take over the session.
*
* @throws IOException
* @throws InterruptedException
* @throws HttpException
*/ | Tests that when two tomcats are running and one tomcat fails the other tomcat can take over the session | testTomcatFailover | {
"repo_name": "magro/memcached-session-manager",
"path": "core/src/test/java/de/javakaffee/web/msm/integration/TomcatFailoverIntegrationTest.java",
"license": "apache-2.0",
"size": 21105
} | [
"de.javakaffee.web.msm.SessionIdFormat",
"de.javakaffee.web.msm.integration.TestUtils",
"java.io.IOException",
"org.apache.http.HttpException",
"org.testng.Assert",
"org.testng.annotations.Test"
] | import de.javakaffee.web.msm.SessionIdFormat; import de.javakaffee.web.msm.integration.TestUtils; import java.io.IOException; import org.apache.http.HttpException; import org.testng.Assert; import org.testng.annotations.Test; | import de.javakaffee.web.msm.*; import de.javakaffee.web.msm.integration.*; import java.io.*; import org.apache.http.*; import org.testng.*; import org.testng.annotations.*; | [
"de.javakaffee.web",
"java.io",
"org.apache.http",
"org.testng",
"org.testng.annotations"
] | de.javakaffee.web; java.io; org.apache.http; org.testng; org.testng.annotations; | 2,814,579 |
public void putCourseId(@NonNull String courseId) {
putString(Analytics.Keys.COURSE_ID, courseId);
} | void function(@NonNull String courseId) { putString(Analytics.Keys.COURSE_ID, courseId); } | /**
* Put the course's ID in the {@link #bundle}.
*
* @param courseId The course's ID.
*/ | Put the course's ID in the <code>#bundle</code> | putCourseId | {
"repo_name": "edx/edx-app-android",
"path": "OpenEdXMobile/src/main/java/org/edx/mobile/module/analytics/FirebaseEvent.java",
"license": "apache-2.0",
"size": 9258
} | [
"androidx.annotation.NonNull"
] | import androidx.annotation.NonNull; | import androidx.annotation.*; | [
"androidx.annotation"
] | androidx.annotation; | 969,780 |
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<OrderItemResourceInner> listOrderItemsAtSubscriptionLevel(
String filter, String expand, String skipToken, Context context); | @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<OrderItemResourceInner> listOrderItemsAtSubscriptionLevel( String filter, String expand, String skipToken, Context context); | /**
* Lists order item at subscription level.
*
* @param filter $filter is supported to filter based on order id. Filter supports only equals operation.
* @param expand $expand is supported on device details, forward shipping details and reverse shipping details
* parameters. Each of these can be provided as a comma separated list. Device Details for order item provides
* details on the devices of the product, Forward and Reverse Shipping details provide forward and reverse
* shipping details respectively.
* @param skipToken $skipToken is supported on Get list of order items, which provides the next page in the list of
* order items.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of orderItems.
*/ | Lists order item at subscription level | listOrderItemsAtSubscriptionLevel | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/edgeorder/azure-resourcemanager-edgeorder/src/main/java/com/azure/resourcemanager/edgeorder/fluent/ResourceProvidersClient.java",
"license": "mit",
"size": 53832
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.PagedIterable",
"com.azure.core.util.Context",
"com.azure.resourcemanager.edgeorder.fluent.models.OrderItemResourceInner"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedIterable; import com.azure.core.util.Context; import com.azure.resourcemanager.edgeorder.fluent.models.OrderItemResourceInner; | import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; import com.azure.resourcemanager.edgeorder.fluent.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 2,851,559 |
private VScrollTableRow getNextRow(VScrollTableRow row, int offset) {
final Iterator<Widget> it = scrollBody.iterator();
VScrollTableRow r = null;
while (it.hasNext()) {
r = (VScrollTableRow) it.next();
if (r == row) {
r = null;
while (offset >= 0 && it.hasNext()) {
r = (VScrollTableRow) it.next();
offset--;
}
return r;
}
}
return null;
} | VScrollTableRow function(VScrollTableRow row, int offset) { final Iterator<Widget> it = scrollBody.iterator(); VScrollTableRow r = null; while (it.hasNext()) { r = (VScrollTableRow) it.next(); if (r == row) { r = null; while (offset >= 0 && it.hasNext()) { r = (VScrollTableRow) it.next(); offset--; } return r; } } return null; } | /**
* Returns the next row to the given row
*
* @param row
* The row to calculate from
*
* @return The next row or null if no row exists
*/ | Returns the next row to the given row | getNextRow | {
"repo_name": "jdahlstrom/vaadin.react",
"path": "client/src/main/java/com/vaadin/client/ui/VScrollTable.java",
"license": "apache-2.0",
"size": 317883
} | [
"com.google.gwt.user.client.ui.Widget",
"com.vaadin.client.ui.VScrollTable",
"java.util.Iterator"
] | import com.google.gwt.user.client.ui.Widget; import com.vaadin.client.ui.VScrollTable; import java.util.Iterator; | import com.google.gwt.user.client.ui.*; import com.vaadin.client.ui.*; import java.util.*; | [
"com.google.gwt",
"com.vaadin.client",
"java.util"
] | com.google.gwt; com.vaadin.client; java.util; | 1,105,748 |
public PublicIpPrefixesClient getPublicIpPrefixes() {
return this.publicIpPrefixes;
}
private final RouteFiltersClient routeFilters; | PublicIpPrefixesClient function() { return this.publicIpPrefixes; } private final RouteFiltersClient routeFilters; | /**
* Gets the PublicIpPrefixesClient object to access its operations.
*
* @return the PublicIpPrefixesClient object.
*/ | Gets the PublicIpPrefixesClient object to access its operations | getPublicIpPrefixes | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanagerhybrid/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/implementation/NetworkManagementClientImpl.java",
"license": "mit",
"size": 60665
} | [
"com.azure.resourcemanager.network.fluent.PublicIpPrefixesClient",
"com.azure.resourcemanager.network.fluent.RouteFiltersClient"
] | import com.azure.resourcemanager.network.fluent.PublicIpPrefixesClient; import com.azure.resourcemanager.network.fluent.RouteFiltersClient; | import com.azure.resourcemanager.network.fluent.*; | [
"com.azure.resourcemanager"
] | com.azure.resourcemanager; | 1,107,371 |
boolean validateScanningDeviceHasAssignedAuthorRepresentedOrganizationId(DiagnosticChain diagnostics, Map<Object, Object> context);
| boolean validateScanningDeviceHasAssignedAuthorRepresentedOrganizationId(DiagnosticChain diagnostics, Map<Object, Object> context); | /**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* <!-- begin-model-doc -->
* self.assignedAuthor.representedOrganization.id->forAll(ident : datatypes::II | not ident.root.oclIsUndefined())
* @param diagnostics The chain of diagnostics to which problems are to be appended.
* @param context The cache of context-specific information.
* <!-- end-model-doc -->
* @model annotation="http://www.eclipse.org/uml2/1.1.0/GenModel body='self.assignedAuthor.representedOrganization.id->forAll(ident : datatypes::II | not ident.root.oclIsUndefined())'"
* @generated
*/ | self.assignedAuthor.representedOrganization.id->forAll(ident : datatypes::II | not ident.root.oclIsUndefined()) | validateScanningDeviceHasAssignedAuthorRepresentedOrganizationId | {
"repo_name": "drbgfc/mdht",
"path": "cda/deprecated/org.openhealthtools.mdht.uml.cda.ihe/src/org/openhealthtools/mdht/uml/cda/ihe/ScanningDevice.java",
"license": "epl-1.0",
"size": 6111
} | [
"java.util.Map",
"org.eclipse.emf.common.util.DiagnosticChain"
] | import java.util.Map; import org.eclipse.emf.common.util.DiagnosticChain; | import java.util.*; import org.eclipse.emf.common.util.*; | [
"java.util",
"org.eclipse.emf"
] | java.util; org.eclipse.emf; | 2,277,235 |
private CategorialClass getMs() {
return get("edu.wustl.catissuecore.domain.MolecularSpecimen", "MS_label");
}
| CategorialClass function() { return get(STR, STR); } | /**
* Mock Method for getting object for <code>edu.wustl.catissuecore.domain.MolecularSpecimen</code> class.
* @return CategorialClass
*/ | Mock Method for getting object for <code>edu.wustl.catissuecore.domain.MolecularSpecimen</code> class | getMs | {
"repo_name": "NCIP/cab2b",
"path": "software/cab2bWebApp/test/unit/java/edu/wustl/cab2bwebapp/bizlogic/executequery/CategoryToSpreadsheetTransformerTest.java",
"license": "bsd-3-clause",
"size": 13181
} | [
"edu.wustl.common.querysuite.metadata.category.CategorialClass"
] | import edu.wustl.common.querysuite.metadata.category.CategorialClass; | import edu.wustl.common.querysuite.metadata.category.*; | [
"edu.wustl.common"
] | edu.wustl.common; | 1,137,711 |
private void connect() throws VMConnectException {
final String connectorName = "com.sun.jdi.SocketAttach";
AttachingConnector connector = connector(connectorName);
if (connector == null) {
throw new VMConnectException(
String.format("Unable connect to target Java VM. Requested connector '%s' not found. ", connectorName));
}
Map<String, Connector.Argument> arguments = connector.defaultArguments();
arguments.get("hostname").setValue(host);
((Connector.IntegerArgument)arguments.get("port")).setValue(port);
int attempt = 0;
for (; ; ) {
try {
Thread.sleep(2000);
vm = connector.attach(arguments);
break;
} catch (UnknownHostException | IllegalConnectorArgumentsException e) {
throw new VMConnectException(e.getMessage(), e);
} catch (IOException e) {
LOG.error(e.getMessage(), e);
if (++attempt > 10) {
throw new VMConnectException(e.getMessage(), e);
}
try {
Thread.sleep(2000);
} catch (InterruptedException ignored) {
}
} catch (InterruptedException ignored) {
}
}
eventsCollector = new EventsCollector(vm.eventQueue(), this);
LOG.debug("Connect {}:{}", host, port);
} | void function() throws VMConnectException { final String connectorName = STR; AttachingConnector connector = connector(connectorName); if (connector == null) { throw new VMConnectException( String.format(STR, connectorName)); } Map<String, Connector.Argument> arguments = connector.defaultArguments(); arguments.get(STR).setValue(host); ((Connector.IntegerArgument)arguments.get("port")).setValue(port); int attempt = 0; for (; ; ) { try { Thread.sleep(2000); vm = connector.attach(arguments); break; } catch (UnknownHostException IllegalConnectorArgumentsException e) { throw new VMConnectException(e.getMessage(), e); } catch (IOException e) { LOG.error(e.getMessage(), e); if (++attempt > 10) { throw new VMConnectException(e.getMessage(), e); } try { Thread.sleep(2000); } catch (InterruptedException ignored) { } } catch (InterruptedException ignored) { } } eventsCollector = new EventsCollector(vm.eventQueue(), this); LOG.debug(STR, host, port); } | /**
* Attach to a JVM that is already running at specified host.
*
* @throws VMConnectException
* when connection to Java VM is not established
*/ | Attach to a JVM that is already running at specified host | connect | {
"repo_name": "codenvy/che-plugins",
"path": "plugin-java/che-plugin-java-ext-debugger-java-server/src/main/java/org/eclipse/che/ide/ext/java/jdi/server/Debugger.java",
"license": "epl-1.0",
"size": 36874
} | [
"com.sun.jdi.connect.AttachingConnector",
"com.sun.jdi.connect.Connector",
"com.sun.jdi.connect.IllegalConnectorArgumentsException",
"java.io.IOException",
"java.net.UnknownHostException",
"java.util.Map"
] | import com.sun.jdi.connect.AttachingConnector; import com.sun.jdi.connect.Connector; import com.sun.jdi.connect.IllegalConnectorArgumentsException; import java.io.IOException; import java.net.UnknownHostException; import java.util.Map; | import com.sun.jdi.connect.*; import java.io.*; import java.net.*; import java.util.*; | [
"com.sun.jdi",
"java.io",
"java.net",
"java.util"
] | com.sun.jdi; java.io; java.net; java.util; | 1,775,071 |
public void reset(final Vector3D p, final Vector3D normal) throws MathRuntimeException {
setNormal(normal);
originOffset = -p.dotProduct(w);
setFrame();
} | void function(final Vector3D p, final Vector3D normal) throws MathRuntimeException { setNormal(normal); originOffset = -p.dotProduct(w); setFrame(); } | /** Reset the instance as if built from a point and a normal.
* @param p point belonging to the plane
* @param normal normal direction to the plane
* @exception MathRuntimeException if the normal norm is too small
*/ | Reset the instance as if built from a point and a normal | reset | {
"repo_name": "sdinot/hipparchus",
"path": "hipparchus-geometry/src/main/java/org/hipparchus/geometry/euclidean/threed/Plane.java",
"license": "apache-2.0",
"size": 18856
} | [
"org.hipparchus.exception.MathRuntimeException"
] | import org.hipparchus.exception.MathRuntimeException; | import org.hipparchus.exception.*; | [
"org.hipparchus.exception"
] | org.hipparchus.exception; | 1,074,897 |
@Override
public String getText(Object object) {
String label = ((ColumnType4)object).getName();
return label == null || label.length() == 0 ?
getString("_UI_ColumnType4_type") :
getString("_UI_ColumnType4_type") + " " + label;
}
| String function(Object object) { String label = ((ColumnType4)object).getName(); return label == null label.length() == 0 ? getString(STR) : getString(STR) + " " + label; } | /**
* This returns the label text for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | This returns the label text for the adapted class. | getText | {
"repo_name": "Treehopper/EclipseAugments",
"path": "liquibase-editor/eu.hohenegger.xsd.liquibase.ui/src-gen/org/liquibase/xml/ns/dbchangelog/provider/ColumnType4ItemProvider.java",
"license": "epl-1.0",
"size": 5675
} | [
"org.liquibase.xml.ns.dbchangelog.ColumnType4"
] | import org.liquibase.xml.ns.dbchangelog.ColumnType4; | import org.liquibase.xml.ns.dbchangelog.*; | [
"org.liquibase.xml"
] | org.liquibase.xml; | 1,506,823 |
public CalendarioPersistence getCalendarioPersistence() {
return calendarioPersistence;
} | CalendarioPersistence function() { return calendarioPersistence; } | /**
* Returns the calendario persistence.
*
* @return the calendario persistence
*/ | Returns the calendario persistence | getCalendarioPersistence | {
"repo_name": "RMarinDTI/CloubityRepo",
"path": "Servicio-portlet/docroot/WEB-INF/src/es/davinciti/liferay/service/base/ConnectionDataServiceBaseImpl.java",
"license": "unlicense",
"size": 52980
} | [
"es.davinciti.liferay.service.persistence.CalendarioPersistence"
] | import es.davinciti.liferay.service.persistence.CalendarioPersistence; | import es.davinciti.liferay.service.persistence.*; | [
"es.davinciti.liferay"
] | es.davinciti.liferay; | 248,104 |
void removeListener(RMQChannelListener listener); | void removeListener(RMQChannelListener listener); | /**
* Remove channel listener.
*
* @param listener the listener.
*/ | Remove channel listener | removeListener | {
"repo_name": "jenkinsci/rabbitmq-consumer-plugin",
"path": "src/main/java/org/jenkinsci/plugins/rabbitmqconsumer/publishers/PublishChannel.java",
"license": "mit",
"size": 3536
} | [
"org.jenkinsci.plugins.rabbitmqconsumer.listeners.RMQChannelListener"
] | import org.jenkinsci.plugins.rabbitmqconsumer.listeners.RMQChannelListener; | import org.jenkinsci.plugins.rabbitmqconsumer.listeners.*; | [
"org.jenkinsci.plugins"
] | org.jenkinsci.plugins; | 2,014,258 |
public static List<?> array2List(Object array) {
return Arrays.asList(ObjectUtil.toObjectArray(array));
} | static List<?> function(Object array) { return Arrays.asList(ObjectUtil.toObjectArray(array)); } | /**
* Convert the specified array to a list which contains all elements in the
* array
*
* @param array
* the specified array
* @return a list which contains all elements in the array
*/ | Convert the specified array to a list which contains all elements in the array | array2List | {
"repo_name": "elminsterjimmy/java",
"path": "Commons/src/main/java/com/elminster/common/util/CollectionUtil.java",
"license": "apache-2.0",
"size": 5946
} | [
"java.util.Arrays",
"java.util.List"
] | import java.util.Arrays; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 2,369,664 |
protected boolean isAuditEnabled(String propertyFile, String propertyName) {
boolean isEnabled = false;
try {
isEnabled = PropertyAccessor.getInstance().getPropertyBoolean(propertyFile, propertyName);
} catch (PropertyAccessException e) {
LOG.error("Error: Failed to retrieve " + propertyName + " from property file: " + propertyFile);
LOG.error(e.getMessage(), e);
}
return isEnabled;
} | boolean function(String propertyFile, String propertyName) { boolean isEnabled = false; try { isEnabled = PropertyAccessor.getInstance().getPropertyBoolean(propertyFile, propertyName); } catch (PropertyAccessException e) { LOG.error(STR + propertyName + STR + propertyFile); LOG.error(e.getMessage(), e); } return isEnabled; } | /**
* Determine if audit is enabled for the NwHIN interface
*
* @return Flag to indicate if audit logging is enabled for this interface
*/ | Determine if audit is enabled for the NwHIN interface | isAuditEnabled | {
"repo_name": "AurionProject/Aurion",
"path": "Product/Production/Services/DocumentSubmissionCore/src/main/java/gov/hhs/fha/nhinc/docsubmission/entity/deferred/response/OutboundDocSubmissionDeferredResponseStrategyImpl_g1.java",
"license": "bsd-3-clause",
"size": 5871
} | [
"gov.hhs.fha.nhinc.properties.PropertyAccessException",
"gov.hhs.fha.nhinc.properties.PropertyAccessor"
] | import gov.hhs.fha.nhinc.properties.PropertyAccessException; import gov.hhs.fha.nhinc.properties.PropertyAccessor; | import gov.hhs.fha.nhinc.properties.*; | [
"gov.hhs.fha"
] | gov.hhs.fha; | 244,169 |
protected String getParamsFromConf(List<String> confItems, String param, String defaultValue) {
for (Iterator<String> iterator = confItems.iterator(); iterator.hasNext();) {
String conf = iterator.next();
Matcher matcher = PARAM_CONFIG_PATTERN.matcher(conf);
if (matcher.find()) {
if (StringUtils.equalsIgnoreCase(matcher.group(1), param)) {
iterator.remove();
return matcher.group(2);
}
}
}
return defaultValue;
} | String function(List<String> confItems, String param, String defaultValue) { for (Iterator<String> iterator = confItems.iterator(); iterator.hasNext();) { String conf = iterator.next(); Matcher matcher = PARAM_CONFIG_PATTERN.matcher(conf); if (matcher.find()) { if (StringUtils.equalsIgnoreCase(matcher.group(1), param)) { iterator.remove(); return matcher.group(2); } } } return defaultValue; } | /**
* Looking for paramsName=value in config items list.
*
* @param confItems
* @param param
* param name to look for value
* @return value or defaultValue if property not found
*/ | Looking for paramsName=value in config items list | getParamsFromConf | {
"repo_name": "lewie/openhab",
"path": "bundles/binding/org.openhab.binding.fatekplc/src/main/java/org/openhab/binding/fatekplc/items/FatekPLCItem.java",
"license": "epl-1.0",
"size": 7234
} | [
"java.util.Iterator",
"java.util.List",
"java.util.regex.Matcher",
"org.apache.commons.lang.StringUtils"
] | import java.util.Iterator; import java.util.List; import java.util.regex.Matcher; import org.apache.commons.lang.StringUtils; | import java.util.*; import java.util.regex.*; import org.apache.commons.lang.*; | [
"java.util",
"org.apache.commons"
] | java.util; org.apache.commons; | 2,021,346 |
public HttpFactory getHttpFactory() {
return httpFactory;
} | HttpFactory function() { return httpFactory; } | /**
* Getter for httpFactory
*/ | Getter for httpFactory | getHttpFactory | {
"repo_name": "shafreenAnfar/wso2-axis2",
"path": "modules/transport/http/src/org/apache/axis2/transport/http/SimpleHTTPServer.java",
"license": "apache-2.0",
"size": 11256
} | [
"org.apache.axis2.transport.http.server.HttpFactory"
] | import org.apache.axis2.transport.http.server.HttpFactory; | import org.apache.axis2.transport.http.server.*; | [
"org.apache.axis2"
] | org.apache.axis2; | 591,102 |
public static HandlerList getHandlerList() {
return HANDLER;
}
| static HandlerList function() { return HANDLER; } | /**
* Gets all the event's handlers
*
* @return all the event's handlers
*/ | Gets all the event's handlers | getHandlerList | {
"repo_name": "VolumetricPixels/RockyPlugin",
"path": "src/main/java/com/volumetricpixels/rockyapi/event/player/PlayerLeaveArea.java",
"license": "lgpl-3.0",
"size": 2302
} | [
"org.bukkit.event.HandlerList"
] | import org.bukkit.event.HandlerList; | import org.bukkit.event.*; | [
"org.bukkit.event"
] | org.bukkit.event; | 1,593,008 |
void add(List<byte[]> add) throws DeliveryServiceException; | void add(List<byte[]> add) throws DeliveryServiceException; | /**
* Adds one batch to be processed by the service. Throws DeliveryServiceException if the batch cannot be added to the underlying storage. This is not the
* same as failing to be processed. Rather it is failure to be accepted as a candidate for eventual guaranteed delivery.
* @param add
*/ | Adds one batch to be processed by the service. Throws DeliveryServiceException if the batch cannot be added to the underlying storage. This is not the same as failing to be processed. Rather it is failure to be accepted as a candidate for eventual guaranteed delivery | add | {
"repo_name": "jivesoftware/filer",
"path": "queue-guaranteed-delivery/src/main/java/com/jivesoftware/os/filer/queue/guaranteed/delivery/GuaranteedDeliveryService.java",
"license": "apache-2.0",
"size": 1023
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 928,711 |
@Test
public void testCheckPortLocation() throws Exception {
SalNode[] snodes = {
new SalNode(1L),
new SalNode(123456L),
new SalNode(-1L),
};
Long[] portIds = {
null, 1L, 2L, 12345L, 4294967040L,
};
String[] portNames = {
null, "port-1", "port-2", "port-123", "a,b,c",
};
RpcErrorTag etag = RpcErrorTag.BAD_ELEMENT;
VtnErrorTag vtag = VtnErrorTag.BADREQUEST;
String msg = "Target port name or ID must be specified";
for (SalNode snode: snodes) {
long dpid = snode.getNodeNumber();
for (Long pnum: portIds) {
String id = (pnum == null) ? null : pnum.toString();
for (String name: portNames) {
if (id == null && name == null) {
try {
NodeUtils.checkPortLocation(snode, id, name);
unexpected();
} catch (RpcException e) {
assertEquals(etag, e.getErrorTag());
assertEquals(vtag, e.getVtnErrorTag());
assertEquals(msg, e.getMessage());
}
} else {
SalPort expected = (pnum == null)
? null
: new SalPort(dpid, pnum.longValue());
assertEquals(expected,
NodeUtils.checkPortLocation(snode, id,
name));
}
}
}
}
// Invalid port ID.
String[] badPortIds = {
"a", "bad port ID", "99999999999999999999999", "4294967041",
};
for (String portId: badPortIds) {
msg = "Invalid port ID: " + portId;
try {
NodeUtils.checkPortLocation(new SalNode(1L), portId, "port-1");
unexpected();
} catch (RpcException e) {
assertEquals(etag, e.getErrorTag());
assertEquals(vtag, e.getVtnErrorTag());
assertEquals(msg, e.getMessage());
}
}
// Empty port name.
msg = "Port name cannot be empty";
try {
NodeUtils.checkPortLocation(new SalNode(1L), "1", "");
unexpected();
} catch (RpcException e) {
assertEquals(etag, e.getErrorTag());
assertEquals(vtag, e.getVtnErrorTag());
assertEquals(msg, e.getMessage());
}
} | void function() throws Exception { SalNode[] snodes = { new SalNode(1L), new SalNode(123456L), new SalNode(-1L), }; Long[] portIds = { null, 1L, 2L, 12345L, 4294967040L, }; String[] portNames = { null, STR, STR, STR, "a,b,c", }; RpcErrorTag etag = RpcErrorTag.BAD_ELEMENT; VtnErrorTag vtag = VtnErrorTag.BADREQUEST; String msg = STR; for (SalNode snode: snodes) { long dpid = snode.getNodeNumber(); for (Long pnum: portIds) { String id = (pnum == null) ? null : pnum.toString(); for (String name: portNames) { if (id == null && name == null) { try { NodeUtils.checkPortLocation(snode, id, name); unexpected(); } catch (RpcException e) { assertEquals(etag, e.getErrorTag()); assertEquals(vtag, e.getVtnErrorTag()); assertEquals(msg, e.getMessage()); } } else { SalPort expected = (pnum == null) ? null : new SalPort(dpid, pnum.longValue()); assertEquals(expected, NodeUtils.checkPortLocation(snode, id, name)); } } } } String[] badPortIds = { "a", STR, STR, STR, }; for (String portId: badPortIds) { msg = STR + portId; try { NodeUtils.checkPortLocation(new SalNode(1L), portId, STR); unexpected(); } catch (RpcException e) { assertEquals(etag, e.getErrorTag()); assertEquals(vtag, e.getVtnErrorTag()); assertEquals(msg, e.getMessage()); } } msg = STR; try { NodeUtils.checkPortLocation(new SalNode(1L), "1", ""); unexpected(); } catch (RpcException e) { assertEquals(etag, e.getErrorTag()); assertEquals(vtag, e.getVtnErrorTag()); assertEquals(msg, e.getMessage()); } } | /**
* Test case for
* {@link NodeUtils#checkPortLocation(SalNode, String, String)}.
*
* @throws Exception An error occurred.
*/ | Test case for <code>NodeUtils#checkPortLocation(SalNode, String, String)</code> | testCheckPortLocation | {
"repo_name": "opendaylight/vtn",
"path": "manager/implementation/src/test/java/org/opendaylight/vtn/manager/internal/util/inventory/NodeUtilsTest.java",
"license": "epl-1.0",
"size": 17041
} | [
"org.opendaylight.vtn.manager.internal.util.rpc.RpcErrorTag",
"org.opendaylight.vtn.manager.internal.util.rpc.RpcException",
"org.opendaylight.yang.gen.v1.urn.opendaylight.vtn.types.rev150209.VtnErrorTag"
] | import org.opendaylight.vtn.manager.internal.util.rpc.RpcErrorTag; import org.opendaylight.vtn.manager.internal.util.rpc.RpcException; import org.opendaylight.yang.gen.v1.urn.opendaylight.vtn.types.rev150209.VtnErrorTag; | import org.opendaylight.vtn.manager.internal.util.rpc.*; import org.opendaylight.yang.gen.v1.urn.opendaylight.vtn.types.rev150209.*; | [
"org.opendaylight.vtn",
"org.opendaylight.yang"
] | org.opendaylight.vtn; org.opendaylight.yang; | 2,007,795 |
public ServiceFuture<AlertRuleResourceInner> getByResourceGroupAsync(String resourceGroupName, String ruleName, final ServiceCallback<AlertRuleResourceInner> serviceCallback) {
return ServiceFuture.fromResponse(getByResourceGroupWithServiceResponseAsync(resourceGroupName, ruleName), serviceCallback);
} | ServiceFuture<AlertRuleResourceInner> function(String resourceGroupName, String ruleName, final ServiceCallback<AlertRuleResourceInner> serviceCallback) { return ServiceFuture.fromResponse(getByResourceGroupWithServiceResponseAsync(resourceGroupName, ruleName), serviceCallback); } | /**
* Gets an alert rule.
*
* @param resourceGroupName The name of the resource group.
* @param ruleName The name of the rule.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/ | Gets an alert rule | getByResourceGroupAsync | {
"repo_name": "martinsawicki/azure-sdk-for-java",
"path": "azure-mgmt-insights/src/main/java/com/microsoft/azure/management/gallery/implementation/AlertRulesInner.java",
"license": "mit",
"size": 28641
} | [
"com.microsoft.rest.ServiceCallback",
"com.microsoft.rest.ServiceFuture"
] | import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; | import com.microsoft.rest.*; | [
"com.microsoft.rest"
] | com.microsoft.rest; | 882,199 |
public void parseFromReader(Reader reader,
int startingLineNr)
throws IOException, XMLParseException {
this.charReadTooMuch = '\0';
this.reader = reader;
this.parserLineNr = startingLineNr;
for (;;) {
char ch = this.scanWhitespace();
if (ch != '<') {
throw this.expectedInput("<", ch);
}
ch = this.readChar();
if ((ch == '!') || (ch == '?')) {
this.skipSpecialTag(0);
} else {
this.unreadChar(ch);
this.scanElement(this);
return;
}
}
} | void function(Reader reader, int startingLineNr) throws IOException, XMLParseException { this.charReadTooMuch = '\0'; this.reader = reader; this.parserLineNr = startingLineNr; for (;;) { char ch = this.scanWhitespace(); if (ch != '<') { throw this.expectedInput("<", ch); } ch = this.readChar(); if ((ch == '!') (ch == '?')) { this.skipSpecialTag(0); } else { this.unreadChar(ch); this.scanElement(this); return; } } } | /**
* Reads one XML element from a java.io.Reader and parses it.
*
* @param reader
* The reader from which to retrieve the XML data.
* @param startingLineNr
* The line number of the first line in the data.
*
* <dl><dt><b>Preconditions:</b></dt><dd>
* <ul><li>{@code reader != null}</li>
* <li>{@code reader} is not closed</li>
* </ul></dd></dl>
*
* <dl><dt><b>Postconditions:</b></dt><dd>
* <ul><li>the state of the receiver is updated to reflect the XML element
* parsed from the reader</li>
* <li>the reader points to the first character following the last
* {@code '>'} character of the XML element</li>
* </ul></dd></dl>
*
* @throws java.io.IOException
* If an error occured while reading the input.
* @throws net.sourceforge.nanoxml.XMLParseException
* If an error occured while parsing the read data.
*/ | Reads one XML element from a java.io.Reader and parses it | parseFromReader | {
"repo_name": "GITNE/icedtea-web",
"path": "netx/net/sourceforge/nanoxml/XMLElement.java",
"license": "gpl-2.0",
"size": 43465
} | [
"java.io.IOException",
"java.io.Reader"
] | import java.io.IOException; import java.io.Reader; | import java.io.*; | [
"java.io"
] | java.io; | 2,056,659 |
void onSwitched(View view, int position);
}
public ViewFlow(Context context) {
super(context);
mSideBuffer = 3;
init();
}
public ViewFlow(Context context, int sideBuffer) {
super(context);
mSideBuffer = sideBuffer;
init();
}
public ViewFlow(Context context, AttributeSet attrs) {
super(context, attrs);
int[] attrArray = ResourceUtil.getStyleableArray(context, "ViewFlow");
TypedArray styledAttrs = context.obtainStyledAttributes(attrs,
attrArray);
int attrVal = ResourceUtil.getStyleable(context, "ViewFlow_sidebuffer");
mSideBuffer = styledAttrs.getInt(attrVal, 3);
init();
} | void onSwitched(View view, int position); } public ViewFlow(Context context) { super(context); mSideBuffer = 3; init(); } public ViewFlow(Context context, int sideBuffer) { super(context); mSideBuffer = sideBuffer; init(); } public ViewFlow(Context context, AttributeSet attrs) { super(context, attrs); int[] attrArray = ResourceUtil.getStyleableArray(context, STR); TypedArray styledAttrs = context.obtainStyledAttributes(attrs, attrArray); int attrVal = ResourceUtil.getStyleable(context, STR); mSideBuffer = styledAttrs.getInt(attrVal, 3); init(); } | /**
* This method is called when a new View has been scrolled to.
*
* @param view the {@link View} currently in focus.
* @param position The position in the adapter of the {@link View} currently
* in focus.
*/ | This method is called when a new View has been scrolled to | onSwitched | {
"repo_name": "ZhanJohn/AG_Modules",
"path": "ag_controls/src/main/java/com/ag/controls/viewflow/ViewFlow.java",
"license": "gpl-2.0",
"size": 29471
} | [
"android.content.Context",
"android.content.res.TypedArray",
"android.util.AttributeSet",
"android.view.View",
"com.ag.controls.common.util.ResourceUtil"
] | import android.content.Context; import android.content.res.TypedArray; import android.util.AttributeSet; import android.view.View; import com.ag.controls.common.util.ResourceUtil; | import android.content.*; import android.content.res.*; import android.util.*; import android.view.*; import com.ag.controls.common.util.*; | [
"android.content",
"android.util",
"android.view",
"com.ag.controls"
] | android.content; android.util; android.view; com.ag.controls; | 1,397,312 |
@Check
public void checkMIODomain_languagesEMsAndPlatforms(MIODomain iod)
{
if (iod.getLanguages().isEmpty() == false &&
iod.getSupportedPlatforms().isEmpty() == true)
{
error("At least one supported platform shall be defined if a language is selected",
mclevdomPackage.eINSTANCE.getMIODomain_SupportedPlatforms());
}
if (iod.getSupportedPlatforms().isEmpty() == false &&
iod.getLanguages().isEmpty() == true)
{
error("At least one language has to be selected if a supported platform is defined",
mclevdomPackage.eINSTANCE.getMIODomain_Languages());
}
}
| void function(MIODomain iod) { if (iod.getLanguages().isEmpty() == false && iod.getSupportedPlatforms().isEmpty() == true) { error(STR, mclevdomPackage.eINSTANCE.getMIODomain_SupportedPlatforms()); } if (iod.getSupportedPlatforms().isEmpty() == false && iod.getLanguages().isEmpty() == true) { error(STR, mclevdomPackage.eINSTANCE.getMIODomain_Languages()); } } | /**
* Checks that a language is defined by an IOD if and only if
* at least one supported platform is also defined.
* Implements Restriction TBD.
* @param iod the IOD to check.
*/ | Checks that a language is defined by an IOD if and only if at least one supported platform is also defined. Implements Restriction TBD | checkMIODomain_languagesEMsAndPlatforms | {
"repo_name": "parraman/micobs",
"path": "mclev/es.uah.aut.srg.micobs.mclev.editor.dom/src/es/uah/aut/srg/micobs/mclev/lang/validation/DOMJavaValidator.java",
"license": "epl-1.0",
"size": 40751
} | [
"es.uah.aut.srg.micobs.mclev.mclevdom.MIODomain"
] | import es.uah.aut.srg.micobs.mclev.mclevdom.MIODomain; | import es.uah.aut.srg.micobs.mclev.mclevdom.*; | [
"es.uah.aut"
] | es.uah.aut; | 2,457,258 |
boolean canCorrectMore(ModelNode operation);
/**
* Correct the operation, only called if {@link #canCorrectMore(ModelNode)} returned {@code true} | boolean canCorrectMore(ModelNode operation); /** * Correct the operation, only called if {@link #canCorrectMore(ModelNode)} returned {@code true} | /**
* Whether something can be corrected in the operation to make it pass.
* It is preferable to correct one attribute at a time.
*
* @param operation the operation to check
* @return {@code true} if expected to fail, {@code false} otherwise
*/ | Whether something can be corrected in the operation to make it pass. It is preferable to correct one attribute at a time | canCorrectMore | {
"repo_name": "aloubyansky/wildfly-core",
"path": "model-test/src/main/java/org/jboss/as/model/test/FailedOperationTransformationConfig.java",
"license": "lgpl-2.1",
"size": 30547
} | [
"org.jboss.dmr.ModelNode"
] | import org.jboss.dmr.ModelNode; | import org.jboss.dmr.*; | [
"org.jboss.dmr"
] | org.jboss.dmr; | 439,580 |
void setAlertOnRevokedCertificate(StatusAlert alertOnRevokedCertificate); | void setAlertOnRevokedCertificate(StatusAlert alertOnRevokedCertificate); | /**
* This method allows to change the behavior on revoked certificates (LT/LTA
* augmentation).
*
* Default : {@link ExceptionOnStatusAlert} - throw an exception.
*
* @param alertOnRevokedCertificate defines a behaviour in case of revoked
* certificate
*/ | This method allows to change the behavior on revoked certificates (LT/LTA augmentation). Default : <code>ExceptionOnStatusAlert</code> - throw an exception | setAlertOnRevokedCertificate | {
"repo_name": "esig/dss",
"path": "dss-document/src/main/java/eu/europa/esig/dss/validation/CertificateVerifier.java",
"license": "lgpl-2.1",
"size": 10704
} | [
"eu.europa.esig.dss.alert.StatusAlert"
] | import eu.europa.esig.dss.alert.StatusAlert; | import eu.europa.esig.dss.alert.*; | [
"eu.europa.esig"
] | eu.europa.esig; | 642,420 |
public TypeOrFeature[] typeOrFeatureArrayRemove(TypeOrFeature[] a, String s) {
TypeOrFeature[] result = new TypeOrFeature[a.length - 1];
for (int i = 0, j = 0; i < a.length; i++) {
if (!a[i].getName().equals(s)) {
// debug
if (j == a.length - 1) {
throw new InternalErrorCDE("feature or type not found: looking for " + s);
}
result[j++] = a[i];
}
}
return result;
} | TypeOrFeature[] function(TypeOrFeature[] a, String s) { TypeOrFeature[] result = new TypeOrFeature[a.length - 1]; for (int i = 0, j = 0; i < a.length; i++) { if (!a[i].getName().equals(s)) { if (j == a.length - 1) { throw new InternalErrorCDE(STR + s); } result[j++] = a[i]; } } return result; } | /**
* Type or feature array remove.
*
* @param a
* the a
* @param s
* the s
* @return the type or feature[]
*/ | Type or feature array remove | typeOrFeatureArrayRemove | {
"repo_name": "apache/uima-uimaj",
"path": "uimaj-ep-configurator/src/main/java/org/apache/uima/taeconfigurator/editors/ui/AbstractSection.java",
"license": "apache-2.0",
"size": 79829
} | [
"org.apache.uima.analysis_engine.TypeOrFeature",
"org.apache.uima.taeconfigurator.InternalErrorCDE"
] | import org.apache.uima.analysis_engine.TypeOrFeature; import org.apache.uima.taeconfigurator.InternalErrorCDE; | import org.apache.uima.analysis_engine.*; import org.apache.uima.taeconfigurator.*; | [
"org.apache.uima"
] | org.apache.uima; | 1,088,163 |
public NonOrganizationalRolodex getRolodex() {
return rolodex;
} | NonOrganizationalRolodex function() { return rolodex; } | /**
* Gets the rolodex attribute.
* @return Returns the rolodex.
*/ | Gets the rolodex attribute | getRolodex | {
"repo_name": "mukadder/kc",
"path": "coeus-impl/src/main/java/org/kuali/kra/institutionalproposal/contacts/InstitutionalProposalContact.java",
"license": "agpl-3.0",
"size": 14288
} | [
"org.kuali.coeus.common.framework.rolodex.NonOrganizationalRolodex"
] | import org.kuali.coeus.common.framework.rolodex.NonOrganizationalRolodex; | import org.kuali.coeus.common.framework.rolodex.*; | [
"org.kuali.coeus"
] | org.kuali.coeus; | 2,204,235 |
public static synchronized void destroy() {
if (CmsLog.INIT.isDebugEnabled()) {
trackOn();
}
try {
m_openCmsEmPool.close();
} catch (Exception e) {
// do nothing
}
if (m_factoryTable != null) {
Set<String> s = m_factoryTable.keySet();
EntityManagerFactory emf;
for (String f : s) {
emf = m_factoryTable.get(f);
if (emf != null) {
emf.close();
m_factoryTable.remove(f);
}
}
}
m_isInitialized = false;
} | static synchronized void function() { if (CmsLog.INIT.isDebugEnabled()) { trackOn(); } try { m_openCmsEmPool.close(); } catch (Exception e) { } if (m_factoryTable != null) { Set<String> s = m_factoryTable.keySet(); EntityManagerFactory emf; for (String f : s) { emf = m_factoryTable.get(f); if (emf != null) { emf.close(); m_factoryTable.remove(f); } } } m_isInitialized = false; } | /**
* Close all instances of EntityManagerFactory.
*/ | Close all instances of EntityManagerFactory | destroy | {
"repo_name": "serrapos/opencms-core",
"path": "src/org/opencms/db/jpa/CmsSqlManager.java",
"license": "lgpl-2.1",
"size": 33942
} | [
"java.util.Set",
"javax.persistence.EntityManagerFactory",
"org.opencms.main.CmsLog"
] | import java.util.Set; import javax.persistence.EntityManagerFactory; import org.opencms.main.CmsLog; | import java.util.*; import javax.persistence.*; import org.opencms.main.*; | [
"java.util",
"javax.persistence",
"org.opencms.main"
] | java.util; javax.persistence; org.opencms.main; | 2,423,505 |
public static void submit(Command command) {
if (command != null) {
ENGINE.submit(command);
}
} | static void function(Command command) { if (command != null) { ENGINE.submit(command); } } | /**
* Submit a {@link Command} to be executed by Strongback's internal scheduler.
*
* @param command the command to be submitted
* @see Configurator#useExecutionPeriod(long, TimeUnit)
*/ | Submit a <code>Command</code> to be executed by Strongback's internal scheduler | submit | {
"repo_name": "strongback/strongback-java",
"path": "strongback/src/org/strongback/Strongback.java",
"license": "mit",
"size": 62749
} | [
"org.strongback.command.Command"
] | import org.strongback.command.Command; | import org.strongback.command.*; | [
"org.strongback.command"
] | org.strongback.command; | 2,730,544 |
private List<HeteroFieldContainer> reAssembleCategories(List<List<HeteroFieldContainer>> input, int size)
{
List<HeteroFieldContainer> result = new ArrayList<HeteroFieldContainer>();
for (int i = 0 ; i < size ; ++i)
{
result.addAll(input.get(i));
}
return result;
} | List<HeteroFieldContainer> function(List<List<HeteroFieldContainer>> input, int size) { List<HeteroFieldContainer> result = new ArrayList<HeteroFieldContainer>(); for (int i = 0 ; i < size ; ++i) { result.addAll(input.get(i)); } return result; } | /**
* reassemble the categories into one list
* @param input List<List<String[]>> the list of catagories
* @param size the size of the shipping priority enum
* @return category sorted list of strings with previous sort order preserved
*/ | reassemble the categories into one list | reAssembleCategories | {
"repo_name": "jpchanson/TomoBay",
"path": "src/tomoBay/helpers/SortOrders.java",
"license": "gpl-3.0",
"size": 4125
} | [
"java.util.ArrayList",
"java.util.List"
] | import java.util.ArrayList; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 152,616 |
public Builder fixingRelativeTo(FxResetFixingRelativeTo fixingRelativeTo) {
JodaBeanUtils.notNull(fixingRelativeTo, "fixingRelativeTo");
this.fixingRelativeTo = fixingRelativeTo;
return this;
} | Builder function(FxResetFixingRelativeTo fixingRelativeTo) { JodaBeanUtils.notNull(fixingRelativeTo, STR); this.fixingRelativeTo = fixingRelativeTo; return this; } | /**
* Sets the base date that each FX reset fixing is made relative to, defaulted to 'PeriodStart'.
* <p>
* The FX reset fixing date is relative to either the start or end of each accrual period.
* @param fixingRelativeTo the new value, not null
* @return this, for chaining, not null
*/ | Sets the base date that each FX reset fixing is made relative to, defaulted to 'PeriodStart'. The FX reset fixing date is relative to either the start or end of each accrual period | fixingRelativeTo | {
"repo_name": "ChinaQuants/Strata",
"path": "modules/product/src/main/java/com/opengamma/strata/product/swap/FxResetCalculation.java",
"license": "apache-2.0",
"size": 22166
} | [
"org.joda.beans.JodaBeanUtils"
] | import org.joda.beans.JodaBeanUtils; | import org.joda.beans.*; | [
"org.joda.beans"
] | org.joda.beans; | 2,674,325 |
public final void addRangeChangeHandler(RangeChangeHandler handler) {
RangeChangeHandler.addHandler(this, "rangechange", handler);
} | final void function(RangeChangeHandler handler) { RangeChangeHandler.addHandler(this, STR, handler); } | /**
* Add a changerange handler. The changerange event is fired repeatedly when
* the the visible range is being changed by user interaction (moving or
* zooming), but not after a call to the setVisibleChartRange method. The
* new range can be retrieved by calling getVisibleChartRange method.
*
* @param handler
* A select handler
*/ | Add a changerange handler. The changerange event is fired repeatedly when the the visible range is being changed by user interaction (moving or zooming), but not after a call to the setVisibleChartRange method. The new range can be retrieved by calling getVisibleChartRange method | addRangeChangeHandler | {
"repo_name": "say2joe/fedkit",
"path": "js/graphing/chap-links-library/gwt/src/Timeline/src/com/chap/links/client/Timeline.java",
"license": "gpl-3.0",
"size": 41569
} | [
"com.chap.links.client.events.RangeChangeHandler"
] | import com.chap.links.client.events.RangeChangeHandler; | import com.chap.links.client.events.*; | [
"com.chap.links"
] | com.chap.links; | 373,519 |
public X509CRL generateX509CRL(
PrivateKey key,
String provider,
SecureRandom random)
throws NoSuchProviderException, SecurityException, SignatureException, InvalidKeyException
{
try
{
return generate(key, provider, random);
}
catch (NoSuchProviderException e)
{
throw e;
}
catch (SignatureException e)
{
throw e;
}
catch (InvalidKeyException e)
{
throw e;
}
catch (GeneralSecurityException e)
{
throw new SecurityException("exception: " + e);
}
} | X509CRL function( PrivateKey key, String provider, SecureRandom random) throws NoSuchProviderException, SecurityException, SignatureException, InvalidKeyException { try { return generate(key, provider, random); } catch (NoSuchProviderException e) { throw e; } catch (SignatureException e) { throw e; } catch (InvalidKeyException e) { throw e; } catch (GeneralSecurityException e) { throw new SecurityException(STR + e); } } | /**
* generate an X509 CRL, based on the current issuer and subject,
* using the passed in provider for the signing.
* @deprecated use generate()
*/ | generate an X509 CRL, based on the current issuer and subject, using the passed in provider for the signing | generateX509CRL | {
"repo_name": "ripple/ripple-lib-java",
"path": "ripple-bouncycastle/src/main/java/org/ripple/bouncycastle/x509/X509V2CRLGenerator.java",
"license": "isc",
"size": 13707
} | [
"java.security.GeneralSecurityException",
"java.security.InvalidKeyException",
"java.security.NoSuchProviderException",
"java.security.PrivateKey",
"java.security.SecureRandom",
"java.security.SignatureException"
] | import java.security.GeneralSecurityException; import java.security.InvalidKeyException; import java.security.NoSuchProviderException; import java.security.PrivateKey; import java.security.SecureRandom; import java.security.SignatureException; | import java.security.*; | [
"java.security"
] | java.security; | 2,505,127 |
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<ServiceInner> getAsync(String resourceGroupName, String mobileNetworkName, String serviceName) {
return getWithResponseAsync(resourceGroupName, mobileNetworkName, serviceName)
.flatMap(
(Response<ServiceInner> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
} | @ServiceMethod(returns = ReturnType.SINGLE) Mono<ServiceInner> function(String resourceGroupName, String mobileNetworkName, String serviceName) { return getWithResponseAsync(resourceGroupName, mobileNetworkName, serviceName) .flatMap( (Response<ServiceInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } | /**
* Gets information about the specified service.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param mobileNetworkName The name of the mobile network.
* @param serviceName The name of the service. You must not use any of the following reserved strings - `default`,
* `requested` or `service`.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return information about the specified service on successful completion of {@link Mono}.
*/ | Gets information about the specified service | getAsync | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/mobilenetwork/azure-resourcemanager-mobilenetwork/src/main/java/com/azure/resourcemanager/mobilenetwork/implementation/ServicesClientImpl.java",
"license": "mit",
"size": 69296
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.http.rest.Response",
"com.azure.resourcemanager.mobilenetwork.fluent.models.ServiceInner"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.Response; import com.azure.resourcemanager.mobilenetwork.fluent.models.ServiceInner; | import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.resourcemanager.mobilenetwork.fluent.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 328,630 |
protected long[] computeMaxParameterSetSizeAndBatchSize(int numBatchedArgs) throws SQLException {
synchronized (checkClosed().getConnectionMutex()) {
long sizeOfEntireBatch = 1 + + 4 + 1 + 4 ;
long maxSizeOfParameterSet = 0;
for (int i = 0; i < numBatchedArgs; i++) {
BindValue[] paramArg = ((BatchedBindValues) this.batchedArgs.get(i)).batchedParameterValues;
long sizeOfParameterSet = 0;
sizeOfParameterSet += (this.parameterCount + 7) / 8; // for isNull
sizeOfParameterSet += this.parameterCount * 2; // have to send types
for (int j = 0; j < this.parameterBindings.length; j++) {
if (!paramArg[j].isNull) {
long size = paramArg[j].getBoundLength();
if (paramArg[j].isLongData) {
if (size != -1) {
sizeOfParameterSet += size;
}
} else {
sizeOfParameterSet += size;
}
}
}
sizeOfEntireBatch += sizeOfParameterSet;
if (sizeOfParameterSet > maxSizeOfParameterSet) {
maxSizeOfParameterSet = sizeOfParameterSet;
}
}
return new long[] {maxSizeOfParameterSet, sizeOfEntireBatch};
}
} | long[] function(int numBatchedArgs) throws SQLException { synchronized (checkClosed().getConnectionMutex()) { long sizeOfEntireBatch = 1 + + 4 + 1 + 4 ; long maxSizeOfParameterSet = 0; for (int i = 0; i < numBatchedArgs; i++) { BindValue[] paramArg = ((BatchedBindValues) this.batchedArgs.get(i)).batchedParameterValues; long sizeOfParameterSet = 0; sizeOfParameterSet += (this.parameterCount + 7) / 8; sizeOfParameterSet += this.parameterCount * 2; for (int j = 0; j < this.parameterBindings.length; j++) { if (!paramArg[j].isNull) { long size = paramArg[j].getBoundLength(); if (paramArg[j].isLongData) { if (size != -1) { sizeOfParameterSet += size; } } else { sizeOfParameterSet += size; } } } sizeOfEntireBatch += sizeOfParameterSet; if (sizeOfParameterSet > maxSizeOfParameterSet) { maxSizeOfParameterSet = sizeOfParameterSet; } } return new long[] {maxSizeOfParameterSet, sizeOfEntireBatch}; } } | /**
* Computes the maximum parameter set size, and entire batch size given
* the number of arguments in the batch.
* @throws SQLException
*/ | Computes the maximum parameter set size, and entire batch size given the number of arguments in the batch | computeMaxParameterSetSizeAndBatchSize | {
"repo_name": "seadsystem/SchemaSpy",
"path": "src/com/mysql/jdbc/ServerPreparedStatement.java",
"license": "gpl-2.0",
"size": 87276
} | [
"java.sql.SQLException"
] | import java.sql.SQLException; | import java.sql.*; | [
"java.sql"
] | java.sql; | 1,396,628 |
protected void copy(org.omg.CORBA.portable.InputStream src,
org.omg.CORBA.portable.OutputStream dst)
{
switch (_kind) {
case TCKind._tk_null:
case TCKind._tk_void:
case TCKind._tk_native:
case TCKind._tk_abstract_interface:
break;
case TCKind._tk_short:
case TCKind._tk_ushort:
dst.write_short(src.read_short());
break;
case TCKind._tk_long:
case TCKind._tk_ulong:
dst.write_long(src.read_long());
break;
case TCKind._tk_float:
dst.write_float(src.read_float());
break;
case TCKind._tk_double:
dst.write_double(src.read_double());
break;
case TCKind._tk_longlong:
case TCKind._tk_ulonglong:
dst.write_longlong(src.read_longlong());
break;
case TCKind._tk_longdouble:
throw wrapper.tkLongDoubleNotSupported() ;
case TCKind._tk_boolean:
dst.write_boolean(src.read_boolean());
break;
case TCKind._tk_char:
dst.write_char(src.read_char());
break;
case TCKind._tk_wchar:
dst.write_wchar(src.read_wchar());
break;
case TCKind._tk_octet:
dst.write_octet(src.read_octet());
break;
case TCKind._tk_string:
{
String s;
s = src.read_string();
// make sure length bound in typecode is not violated
if ((_length != 0) && (s.length() > _length))
throw wrapper.badStringBounds( new Integer(s.length()),
new Integer(_length) ) ;
dst.write_string(s);
}
break;
case TCKind._tk_wstring:
{
String s;
s = src.read_wstring();
// make sure length bound in typecode is not violated
if ((_length != 0) && (s.length() > _length))
throw wrapper.badStringBounds( new Integer(s.length()),
new Integer(_length) ) ;
dst.write_wstring(s);
}
break;
case TCKind._tk_fixed:
{
dst.write_ushort(src.read_ushort());
dst.write_short(src.read_short());
}
break;
case TCKind._tk_any:
{
//Any tmp = new AnyImpl(_orb);
Any tmp = ((CDRInputStream)src).orb().create_any();
TypeCodeImpl t = new TypeCodeImpl((ORB)dst.orb());
t.read_value((org.omg.CORBA_2_3.portable.InputStream)src);
t.write_value((org.omg.CORBA_2_3.portable.OutputStream)dst);
tmp.read_value(src, t);
tmp.write_value(dst);
break;
}
case TCKind._tk_TypeCode:
{
dst.write_TypeCode(src.read_TypeCode());
break;
}
case TCKind._tk_Principal:
{
dst.write_Principal(src.read_Principal());
break;
}
case TCKind._tk_objref:
{
dst.write_Object(src.read_Object());
break;
}
case TCKind._tk_except:
// Copy repositoryId
dst.write_string(src.read_string());
// Fall into ...
// _REVISIT_ what about the inherited members of this values concrete base type?
case TCKind._tk_value:
case TCKind._tk_struct:
{
// copy each element, using the corresponding member type
for (int i=0; i < _memberTypes.length; i++) {
_memberTypes[i].copy(src, dst);
}
break;
}
case TCKind._tk_union:
{
Any tagValue = new AnyImpl( (ORB)src.orb());
switch (realType(_discriminator).kind().value()) {
case TCKind._tk_short:
{
short value = src.read_short();
tagValue.insert_short(value);
dst.write_short(value);
break;
}
case TCKind._tk_long:
{
int value = src.read_long();
tagValue.insert_long(value);
dst.write_long(value);
break;
}
case TCKind._tk_ushort:
{
short value = src.read_short();
tagValue.insert_ushort(value);
dst.write_short(value);
break;
}
case TCKind._tk_ulong:
{
int value = src.read_long();
tagValue.insert_ulong(value);
dst.write_long(value);
break;
}
case TCKind._tk_float:
{
float value = src.read_float();
tagValue.insert_float(value);
dst.write_float(value);
break;
}
case TCKind._tk_double:
{
double value = src.read_double();
tagValue.insert_double(value);
dst.write_double(value);
break;
}
case TCKind._tk_boolean:
{
boolean value = src.read_boolean();
tagValue.insert_boolean(value);
dst.write_boolean(value);
break;
}
case TCKind._tk_char:
{
char value = src.read_char();
tagValue.insert_char(value);
dst.write_char(value);
break;
}
case TCKind._tk_enum:
{
int value = src.read_long();
tagValue.type(_discriminator);
tagValue.insert_long(value);
dst.write_long(value);
break;
}
case TCKind._tk_longlong:
{
long value = src.read_longlong();
tagValue.insert_longlong(value);
dst.write_longlong(value);
break;
}
case TCKind._tk_ulonglong:
{
long value = src.read_longlong();
tagValue.insert_ulonglong(value);
dst.write_longlong(value);
break;
}
// _REVISIT_ figure out long double mapping
// case TCKind.tk_longdouble:
// {
// double value = src.read_double();
// tagValue.insert_longdouble(value);
// dst.putDouble(value);
// break;
//}
case TCKind._tk_wchar:
{
char value = src.read_wchar();
tagValue.insert_wchar(value);
dst.write_wchar(value);
break;
}
default:
throw wrapper.illegalUnionDiscriminatorType() ;
}
// using the value of the tag, find out the type of the value
// following.
int labelIndex;
for (labelIndex = 0; labelIndex < _unionLabels.length; labelIndex++) {
// use equality over anys
if (tagValue.equal(_unionLabels[labelIndex])) {
_memberTypes[labelIndex].copy(src, dst);
break;
}
}
if (labelIndex == _unionLabels.length) {
// check if label has not been found
if (_defaultIndex == -1)
// throw exception if default was not expected
throw wrapper.unexpectedUnionDefault() ;
else
// must be of the default branch type
_memberTypes[_defaultIndex].copy(src, dst);
}
break;
}
case TCKind._tk_enum:
dst.write_long(src.read_long());
break;
case TCKind._tk_sequence:
// get the length of the sequence
int seqLength = src.read_long();
// check for sequence bound violated
if ((_length != 0) && (seqLength > _length))
throw wrapper.badSequenceBounds( new Integer(seqLength),
new Integer(_length) ) ;
// write the length of the sequence
dst.write_long(seqLength);
// copy each element of the seq using content type
lazy_content_type(); // make sure it's resolved
for (int i=0; i < seqLength; i++)
_contentType.copy(src, dst);
break;
case TCKind._tk_array:
// copy each element of the array using content type
for (int i=0; i < _length; i++)
_contentType.copy(src, dst);
break;
case TCKind._tk_alias:
case TCKind._tk_value_box:
// follow the alias
_contentType.copy(src, dst);
break;
case tk_indirect:
// need to follow offset, get unmarshal typecode from that
// offset, and use that to do the copy
// Don't need to read type code before using it to do the copy.
// It should be fully usable.
indirectType().copy(src, dst);
break;
default:
throw wrapper.invalidTypecodeKindMarshal() ;
}
} | void function(org.omg.CORBA.portable.InputStream src, org.omg.CORBA.portable.OutputStream dst) { switch (_kind) { case TCKind._tk_null: case TCKind._tk_void: case TCKind._tk_native: case TCKind._tk_abstract_interface: break; case TCKind._tk_short: case TCKind._tk_ushort: dst.write_short(src.read_short()); break; case TCKind._tk_long: case TCKind._tk_ulong: dst.write_long(src.read_long()); break; case TCKind._tk_float: dst.write_float(src.read_float()); break; case TCKind._tk_double: dst.write_double(src.read_double()); break; case TCKind._tk_longlong: case TCKind._tk_ulonglong: dst.write_longlong(src.read_longlong()); break; case TCKind._tk_longdouble: throw wrapper.tkLongDoubleNotSupported() ; case TCKind._tk_boolean: dst.write_boolean(src.read_boolean()); break; case TCKind._tk_char: dst.write_char(src.read_char()); break; case TCKind._tk_wchar: dst.write_wchar(src.read_wchar()); break; case TCKind._tk_octet: dst.write_octet(src.read_octet()); break; case TCKind._tk_string: { String s; s = src.read_string(); if ((_length != 0) && (s.length() > _length)) throw wrapper.badStringBounds( new Integer(s.length()), new Integer(_length) ) ; dst.write_string(s); } break; case TCKind._tk_wstring: { String s; s = src.read_wstring(); if ((_length != 0) && (s.length() > _length)) throw wrapper.badStringBounds( new Integer(s.length()), new Integer(_length) ) ; dst.write_wstring(s); } break; case TCKind._tk_fixed: { dst.write_ushort(src.read_ushort()); dst.write_short(src.read_short()); } break; case TCKind._tk_any: { Any tmp = ((CDRInputStream)src).orb().create_any(); TypeCodeImpl t = new TypeCodeImpl((ORB)dst.orb()); t.read_value((org.omg.CORBA_2_3.portable.InputStream)src); t.write_value((org.omg.CORBA_2_3.portable.OutputStream)dst); tmp.read_value(src, t); tmp.write_value(dst); break; } case TCKind._tk_TypeCode: { dst.write_TypeCode(src.read_TypeCode()); break; } case TCKind._tk_Principal: { dst.write_Principal(src.read_Principal()); break; } case TCKind._tk_objref: { dst.write_Object(src.read_Object()); break; } case TCKind._tk_except: dst.write_string(src.read_string()); case TCKind._tk_value: case TCKind._tk_struct: { for (int i=0; i < _memberTypes.length; i++) { _memberTypes[i].copy(src, dst); } break; } case TCKind._tk_union: { Any tagValue = new AnyImpl( (ORB)src.orb()); switch (realType(_discriminator).kind().value()) { case TCKind._tk_short: { short value = src.read_short(); tagValue.insert_short(value); dst.write_short(value); break; } case TCKind._tk_long: { int value = src.read_long(); tagValue.insert_long(value); dst.write_long(value); break; } case TCKind._tk_ushort: { short value = src.read_short(); tagValue.insert_ushort(value); dst.write_short(value); break; } case TCKind._tk_ulong: { int value = src.read_long(); tagValue.insert_ulong(value); dst.write_long(value); break; } case TCKind._tk_float: { float value = src.read_float(); tagValue.insert_float(value); dst.write_float(value); break; } case TCKind._tk_double: { double value = src.read_double(); tagValue.insert_double(value); dst.write_double(value); break; } case TCKind._tk_boolean: { boolean value = src.read_boolean(); tagValue.insert_boolean(value); dst.write_boolean(value); break; } case TCKind._tk_char: { char value = src.read_char(); tagValue.insert_char(value); dst.write_char(value); break; } case TCKind._tk_enum: { int value = src.read_long(); tagValue.type(_discriminator); tagValue.insert_long(value); dst.write_long(value); break; } case TCKind._tk_longlong: { long value = src.read_longlong(); tagValue.insert_longlong(value); dst.write_longlong(value); break; } case TCKind._tk_ulonglong: { long value = src.read_longlong(); tagValue.insert_ulonglong(value); dst.write_longlong(value); break; } case TCKind._tk_wchar: { char value = src.read_wchar(); tagValue.insert_wchar(value); dst.write_wchar(value); break; } default: throw wrapper.illegalUnionDiscriminatorType() ; } int labelIndex; for (labelIndex = 0; labelIndex < _unionLabels.length; labelIndex++) { if (tagValue.equal(_unionLabels[labelIndex])) { _memberTypes[labelIndex].copy(src, dst); break; } } if (labelIndex == _unionLabels.length) { if (_defaultIndex == -1) throw wrapper.unexpectedUnionDefault() ; else _memberTypes[_defaultIndex].copy(src, dst); } break; } case TCKind._tk_enum: dst.write_long(src.read_long()); break; case TCKind._tk_sequence: int seqLength = src.read_long(); if ((_length != 0) && (seqLength > _length)) throw wrapper.badSequenceBounds( new Integer(seqLength), new Integer(_length) ) ; dst.write_long(seqLength); lazy_content_type(); for (int i=0; i < seqLength; i++) _contentType.copy(src, dst); break; case TCKind._tk_array: for (int i=0; i < _length; i++) _contentType.copy(src, dst); break; case TCKind._tk_alias: case TCKind._tk_value_box: _contentType.copy(src, dst); break; case tk_indirect: indirectType().copy(src, dst); break; default: throw wrapper.invalidTypecodeKindMarshal() ; } } | /**
* This is not a copy of the TypeCodeImpl objects, but instead it
* copies the value this type code is representing.
* See AnyImpl read_value and write_value for usage.
* The state of this TypeCodeImpl instance isn't changed, only used
* by the Any to do the correct copy.
*/ | This is not a copy of the TypeCodeImpl objects, but instead it copies the value this type code is representing. See AnyImpl read_value and write_value for usage. The state of this TypeCodeImpl instance isn't changed, only used by the Any to do the correct copy | copy | {
"repo_name": "TheTypoMaster/Scaper",
"path": "openjdk/corba/src/share/classes/com/sun/corba/se/impl/corba/TypeCodeImpl.java",
"license": "gpl-2.0",
"size": 88299
} | [
"com.sun.corba.se.impl.encoding.CDRInputStream",
"org.omg.CORBA_2_3"
] | import com.sun.corba.se.impl.encoding.CDRInputStream; import org.omg.CORBA_2_3; | import com.sun.corba.se.impl.encoding.*; import org.omg.*; | [
"com.sun.corba",
"org.omg"
] | com.sun.corba; org.omg; | 1,046,683 |
@Test
public void testFindById() {
if (!DataFilter.class.isAssignableFrom(getController().getFilterClass())) {
LOG.warn("Controller [{}] doesn't support DataFilter. Find by id will not be tested.", getController().getClass());
return;
}
//
DTO dto = prepareDto();
//
DTO createdDto = createDto(dto);
// mock dto
createDto(prepareDto());
//
MultiValueMap<String, String> parameters = new LinkedMultiValueMap<>();
parameters.set(DataFilter.PARAMETER_ID, createdDto.getId().toString());
//
List<DTO> results = find(parameters);
//
Assert.assertEquals(1, results.size());
Assert.assertEquals(createdDto.getId(), results.get(0).getId());
//
// find quick alias
results = findQuick(parameters);
//
Assert.assertEquals(1, results.size());
Assert.assertEquals(createdDto.getId(), results.get(0).getId());
//
if (supportsAutocomplete()) {
results = autocomplete(parameters);
//
Assert.assertEquals(1, results.size());
Assert.assertEquals(createdDto.getId(), results.get(0).getId());
} else {
LOG.info("Controller [{}] doesn't support autocomplete method. Method will not be tested.", getController().getClass());
}
//
Assert.assertEquals(1, count(parameters));
}
| void function() { if (!DataFilter.class.isAssignableFrom(getController().getFilterClass())) { LOG.warn(STR, getController().getClass()); return; } createDto(prepareDto()); parameters.set(DataFilter.PARAMETER_ID, createdDto.getId().toString()); Assert.assertEquals(createdDto.getId(), results.get(0).getId()); results = findQuick(parameters); Assert.assertEquals(createdDto.getId(), results.get(0).getId()); results = autocomplete(parameters); Assert.assertEquals(createdDto.getId(), results.get(0).getId()); } else { LOG.info(STR, getController().getClass()); } } | /**
* Test search by id - supported by default, id DataFilter is used (see #toPedicates in services - has to call super implementation)
*
* @throws Exception
*/ | Test search by id - supported by default, id DataFilter is used (see #toPedicates in services - has to call super implementation) | testFindById | {
"repo_name": "bcvsolutions/CzechIdMng",
"path": "Realization/backend/core/core-test-api/src/main/java/eu/bcvsolutions/idm/core/api/rest/AbstractReadWriteDtoControllerRestTest.java",
"license": "mit",
"size": 55419
} | [
"eu.bcvsolutions.idm.core.api.dto.filter.DataFilter",
"org.junit.Assert",
"org.springframework.test.web.servlet.request.MockMvcRequestBuilders"
] | import eu.bcvsolutions.idm.core.api.dto.filter.DataFilter; import org.junit.Assert; import org.springframework.test.web.servlet.request.MockMvcRequestBuilders; | import eu.bcvsolutions.idm.core.api.dto.filter.*; import org.junit.*; import org.springframework.test.web.servlet.request.*; | [
"eu.bcvsolutions.idm",
"org.junit",
"org.springframework.test"
] | eu.bcvsolutions.idm; org.junit; org.springframework.test; | 1,649,451 |
public static <T> org.hamcrest.core.AnyOf<T> anyOf(final Matcher<T> first, final Matcher<? super T> second) {
return org.hamcrest.core.AnyOf.<T>anyOf(first, second);
} | static <T> org.hamcrest.core.AnyOf<T> function(final Matcher<T> first, final Matcher<? super T> second) { return org.hamcrest.core.AnyOf.<T>anyOf(first, second); } | /**
* Creates a matcher that matches if the examined object matches <b>ANY</b> of the specified matchers.
* <p/>
* For example:
* <pre>assertThat("myValue", anyOf(startsWith("foo"), containsString("Val")))</pre>
*/ | Creates a matcher that matches if the examined object matches ANY of the specified matchers. For example: <code>assertThat("myValue", anyOf(startsWith("foo"), containsString("Val")))</code> | anyOf | {
"repo_name": "gv2011/util",
"path": "testutil/src/main/java/com/github/gv2011/testutil/Matchers.java",
"license": "mit",
"size": 65674
} | [
"org.hamcrest.Matcher"
] | import org.hamcrest.Matcher; | import org.hamcrest.*; | [
"org.hamcrest"
] | org.hamcrest; | 1,123,406 |
public static int intValue(Collection<Attribute> attrs) {
int val = 0;
for (Attribute attr : attrs) {
val += attr.val;
}
return val;
}
// Private
private int val;
private Attribute(int val) {
this.val = val;
}
} | static int function(Collection<Attribute> attrs) { int val = 0; for (Attribute attr : attrs) { val += attr.val; } return val; } private int val; private Attribute(int val) { this.val = val; } } | /**
* Converts a collection of attributes back into an int representation.
*/ | Converts a collection of attributes back into an int representation | intValue | {
"repo_name": "joval/jSAF",
"path": "src/jsaf/intf/windows/io/IWindowsFileInfo.java",
"license": "lgpl-2.1",
"size": 8102
} | [
"java.util.Collection"
] | import java.util.Collection; | import java.util.*; | [
"java.util"
] | java.util; | 898,051 |
public void notify(ObjectNode event) {
if (delegate != null) {
delegate.notify(event);
}
} | void function(ObjectNode event) { if (delegate != null) { delegate.notify(event); } } | /**
* Notifies the process monitor delegate with the specified event.
*
* @param event JSON event data
*/ | Notifies the process monitor delegate with the specified event | notify | {
"repo_name": "packet-tracker/onos",
"path": "utils/stc/src/main/java/org/onlab/stc/Monitor.java",
"license": "apache-2.0",
"size": 4609
} | [
"com.fasterxml.jackson.databind.node.ObjectNode"
] | import com.fasterxml.jackson.databind.node.ObjectNode; | import com.fasterxml.jackson.databind.node.*; | [
"com.fasterxml.jackson"
] | com.fasterxml.jackson; | 2,225,158 |
public void updateRemoteStoreDefList(List<StoreDefinition> storeDefs,
Collection<Integer> nodeIds) throws VoldemortException {
// Check for backwards compatibility
StoreDefinitionUtils.validateSchemasAsNeeded(storeDefs);
for(Integer nodeId: nodeIds) {
logger.info("Updating stores.xml for "
+ currentCluster.getNodeById(nodeId).getHost() + ":" + nodeId);
// get current version.
VectorClock oldClock = (VectorClock) metadataMgmtOps.getRemoteStoreDefList(nodeId)
.getVersion();
Versioned<String> value = new Versioned<String>(storeMapper.writeStoreList(storeDefs),
oldClock.incremented(nodeId, 1));
ByteArray keyBytes = new ByteArray(ByteUtils.getBytes(MetadataStore.STORES_KEY,
"UTF-8"));
Versioned<byte[]> valueBytes = new Versioned<byte[]>(ByteUtils.getBytes(value.getValue(),
"UTF-8"),
value.getVersion());
VAdminProto.VoldemortAdminRequest request = VAdminProto.VoldemortAdminRequest.newBuilder()
.setType(VAdminProto.AdminRequestType.UPDATE_STORE_DEFINITIONS)
.setUpdateMetadata(VAdminProto.UpdateMetadataRequest.newBuilder()
.setKey(ByteString.copyFrom(keyBytes.get()))
.setVersioned(ProtoUtils.encodeVersioned(valueBytes))
.build())
.build();
VAdminProto.UpdateMetadataResponse.Builder response = rpcOps.sendAndReceive(nodeId,
request,
VAdminProto.UpdateMetadataResponse.newBuilder());
if(response.hasError()) {
helperOps.throwException(response.getError());
}
}
} | void function(List<StoreDefinition> storeDefs, Collection<Integer> nodeIds) throws VoldemortException { StoreDefinitionUtils.validateSchemasAsNeeded(storeDefs); for(Integer nodeId: nodeIds) { logger.info(STR + currentCluster.getNodeById(nodeId).getHost() + ":" + nodeId); VectorClock oldClock = (VectorClock) metadataMgmtOps.getRemoteStoreDefList(nodeId) .getVersion(); Versioned<String> value = new Versioned<String>(storeMapper.writeStoreList(storeDefs), oldClock.incremented(nodeId, 1)); ByteArray keyBytes = new ByteArray(ByteUtils.getBytes(MetadataStore.STORES_KEY, "UTF-8")); Versioned<byte[]> valueBytes = new Versioned<byte[]>(ByteUtils.getBytes(value.getValue(), "UTF-8"), value.getVersion()); VAdminProto.VoldemortAdminRequest request = VAdminProto.VoldemortAdminRequest.newBuilder() .setType(VAdminProto.AdminRequestType.UPDATE_STORE_DEFINITIONS) .setUpdateMetadata(VAdminProto.UpdateMetadataRequest.newBuilder() .setKey(ByteString.copyFrom(keyBytes.get())) .setVersioned(ProtoUtils.encodeVersioned(valueBytes)) .build()) .build(); VAdminProto.UpdateMetadataResponse.Builder response = rpcOps.sendAndReceive(nodeId, request, VAdminProto.UpdateMetadataResponse.newBuilder()); if(response.hasError()) { helperOps.throwException(response.getError()); } } } | /**
* Update the store definitions on a list of remote nodes.
* <p>
*
* @param storeDefs The new store definition list
* @param nodeIds The node id of the machine
* @throws VoldemortException
*/ | Update the store definitions on a list of remote nodes. | updateRemoteStoreDefList | {
"repo_name": "cshaxu/voldemort",
"path": "src/java/voldemort/client/protocol/admin/AdminClient.java",
"license": "apache-2.0",
"size": 234723
} | [
"com.google.protobuf.ByteString",
"java.util.Collection",
"java.util.List"
] | import com.google.protobuf.ByteString; import java.util.Collection; import java.util.List; | import com.google.protobuf.*; import java.util.*; | [
"com.google.protobuf",
"java.util"
] | com.google.protobuf; java.util; | 1,860,357 |
public Builder putAllMetadata(Map<String, String> map) {
if (this.metadata == null) {
this.metadata = new HashMap<>();
}
this.metadata.putAll(map);
return this;
} | Builder function(Map<String, String> map) { if (this.metadata == null) { this.metadata = new HashMap<>(); } this.metadata.putAll(map); return this; } | /**
* Add all map key/value pairs to `metadata` map. A map is initialized for the first
* `put/putAll` call, and subsequent calls add additional key/value pairs to the original map.
* See {@link SessionCreateParams.SubscriptionData#metadata} for the field documentation.
*/ | Add all map key/value pairs to `metadata` map. A map is initialized for the first `put/putAll` call, and subsequent calls add additional key/value pairs to the original map. See <code>SessionCreateParams.SubscriptionData#metadata</code> for the field documentation | putAllMetadata | {
"repo_name": "stripe/stripe-java",
"path": "src/main/java/com/stripe/param/checkout/SessionCreateParams.java",
"license": "mit",
"size": 222478
} | [
"java.util.HashMap",
"java.util.Map"
] | import java.util.HashMap; import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 2,385,494 |
@Override
protected void doActionPerformed(ActionEvent e) {
String path;
path = GUIHelper.showInputDialog(
m_State, "Please enter the full name of the actor (e.g., 'Flow.Sequence.Display'):",
null, "Enter actor path", null, 40, 4);
if (path == null)
return;
m_State.getCurrentPanel().getTree().locateAndDisplay(path, true);
} | void function(ActionEvent e) { String path; path = GUIHelper.showInputDialog( m_State, STR, null, STR, null, 40, 4); if (path == null) return; m_State.getCurrentPanel().getTree().locateAndDisplay(path, true); } | /**
* Invoked when an action occurs.
*/ | Invoked when an action occurs | doActionPerformed | {
"repo_name": "waikato-datamining/adams-base",
"path": "adams-core/src/main/java/adams/gui/flow/menu/EditLocateActor.java",
"license": "gpl-3.0",
"size": 1962
} | [
"java.awt.event.ActionEvent"
] | import java.awt.event.ActionEvent; | import java.awt.event.*; | [
"java.awt"
] | java.awt; | 724,403 |
private void saveProfile()
{
if (verifyInput())
{
if (mProfile != null)
{
updateProfileData();
mDataSource.updateVpnProfile(mProfile);
}
else
{
mProfile = new VpnProfile();
updateProfileData();
mDataSource.insertProfile(mProfile);
}
setResult(RESULT_OK, new Intent().putExtra(VpnProfileDataSource.KEY_ID, mProfile.getId()));
finish();
}
} | void function() { if (verifyInput()) { if (mProfile != null) { updateProfileData(); mDataSource.updateVpnProfile(mProfile); } else { mProfile = new VpnProfile(); updateProfileData(); mDataSource.insertProfile(mProfile); } setResult(RESULT_OK, new Intent().putExtra(VpnProfileDataSource.KEY_ID, mProfile.getId())); finish(); } } | /**
* Save or update the profile depending on whether we actually have a
* profile object or not (this was created in updateProfileData)
*/ | Save or update the profile depending on whether we actually have a profile object or not (this was created in updateProfileData) | saveProfile | {
"repo_name": "vapana/client",
"path": "src/frontends/android/src/org/strongswan/android/ui/VpnProfileDetailActivity.java",
"license": "gpl-2.0",
"size": 15530
} | [
"android.content.Intent",
"org.strongswan.android.data.VpnProfile",
"org.strongswan.android.data.VpnProfileDataSource"
] | import android.content.Intent; import org.strongswan.android.data.VpnProfile; import org.strongswan.android.data.VpnProfileDataSource; | import android.content.*; import org.strongswan.android.data.*; | [
"android.content",
"org.strongswan.android"
] | android.content; org.strongswan.android; | 328,936 |
public void testMappings() {
XMLDescriptor xDesc = ((org.eclipse.persistence.jaxb.JAXBContext) jaxbContext).getXMLContext().getDescriptor(new QName("QuoteRequest"));
assertNotNull("No descriptor was generated for CustomQuoteRequest.", xDesc);
int currencyPairCodeCount = 0;
int dateCount = 0;
Vector<DatabaseMapping> mappings = xDesc.getMappings();
for (int i=0; i < mappings.size(); i++) {
DatabaseMapping mapping = mappings.get(i);
if (mapping.getAttributeName().equals("currencyPairCode")) {
currencyPairCodeCount++;
// check user-set properties
Map props = mapping.getProperties();
assertNotNull("No user-defined properties exist on the mapping for [currencyPairCode]", props);
assertTrue("Expected [2] user-defined properties, but there were [" + props.size() + "]", props.size() == 2);
if (mapping.getField().getName().equals("QuoteReq/Instrmt/@Sym")) {
// verify value-types
assertTrue("Expected value-type [String] for key [1] but was [" + props.get("1").getClass().getName() + "]", props.get("1") instanceof String);
assertTrue("Expected value-type [Integer] for key [2] but was [" + props.get("2").getClass().getName() + "]", props.get("2") instanceof Integer);
// verify values
assertTrue("Expected property value [A] for key [" + 1 + "] but was [" + props.get("1") + "]", "A".equals(props.get("1")));
assertTrue("Expected property value [66] for key [" + 2 + "] but was [" + props.get("2") + "]", 66 == (Integer) props.get("2"));
} else {
// assume "QuoteReq/Leg/@Sym"
// verify value-types
assertTrue("Expected value-type [String] for key [2] but was [" + props.get("1").getClass().getName() + "]", props.get("1") instanceof String);
assertTrue("Expected value-type [Double] for key [1] but was [" + props.get("2").getClass().getName() + "]", props.get("2") instanceof Double);
// verify values
assertTrue("Expected property value [B] for key [" + 1 + "] but was [" + props.get("1") + "]", "B".equals(props.get("1")));
assertTrue("Expected property value [9.9] for key [" + 2 + "] but was [" + props.get("2") + "]", 9.9 == (Double) props.get("2"));
}
} else if (mapping.getAttributeName().equals("date")) {
dateCount++;
}
}
assertTrue("Expected [2] mappings for attribute [currencyPairCode], but was [" + currencyPairCodeCount + "]", currencyPairCodeCount == 2);
assertTrue("Expected [3] mappings for attribute [date], but was [" + dateCount + "]", dateCount == 3);
} | void function() { XMLDescriptor xDesc = ((org.eclipse.persistence.jaxb.JAXBContext) jaxbContext).getXMLContext().getDescriptor(new QName(STR)); assertNotNull(STR, xDesc); int currencyPairCodeCount = 0; int dateCount = 0; Vector<DatabaseMapping> mappings = xDesc.getMappings(); for (int i=0; i < mappings.size(); i++) { DatabaseMapping mapping = mappings.get(i); if (mapping.getAttributeName().equals(STR)) { currencyPairCodeCount++; Map props = mapping.getProperties(); assertNotNull(STR, props); assertTrue(STR + props.size() + "]", props.size() == 2); if (mapping.getField().getName().equals(STR)) { assertTrue(STR + props.get("1").getClass().getName() + "]", props.get("1") instanceof String); assertTrue(STR + props.get("2").getClass().getName() + "]", props.get("2") instanceof Integer); assertTrue(STR + 1 + STR + props.get("1") + "]", "A".equals(props.get("1"))); assertTrue(STR + 2 + STR + props.get("2") + "]", 66 == (Integer) props.get("2")); } else { assertTrue(STR + props.get("1").getClass().getName() + "]", props.get("1") instanceof String); assertTrue(STR + props.get("2").getClass().getName() + "]", props.get("2") instanceof Double); assertTrue(STR + 1 + STR + props.get("1") + "]", "B".equals(props.get("1"))); assertTrue(STR + 2 + STR + props.get("2") + "]", 9.9 == (Double) props.get("2")); } } else if (mapping.getAttributeName().equals("date")) { dateCount++; } } assertTrue(STR + currencyPairCodeCount + "]", currencyPairCodeCount == 2); assertTrue(STR + dateCount + "]", dateCount == 3); } | /**
* We expect two mappings for 'currencyPairCode'. We will verify mapping count,
* user-set properties, etc.
*
* Positive test.
*
*/ | We expect two mappings for 'currencyPairCode'. We will verify mapping count, user-set properties, etc. Positive test | testMappings | {
"repo_name": "RallySoftware/eclipselink.runtime",
"path": "moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/externalizedmetadata/mappings/multiple/MultipleMappingPerFieldTestCases.java",
"license": "epl-1.0",
"size": 7125
} | [
"java.util.Map",
"java.util.Vector",
"javax.xml.namespace.QName",
"org.eclipse.persistence.mappings.DatabaseMapping",
"org.eclipse.persistence.oxm.XMLDescriptor"
] | import java.util.Map; import java.util.Vector; import javax.xml.namespace.QName; import org.eclipse.persistence.mappings.DatabaseMapping; import org.eclipse.persistence.oxm.XMLDescriptor; | import java.util.*; import javax.xml.namespace.*; import org.eclipse.persistence.mappings.*; import org.eclipse.persistence.oxm.*; | [
"java.util",
"javax.xml",
"org.eclipse.persistence"
] | java.util; javax.xml; org.eclipse.persistence; | 1,811,696 |
public void deselectByVisibleText(String text) {
List<WebElement> options = element.findElements(By.xpath(
".//option[normalize-space(.) = " + Quotes.escape(text) + "]"));
for (WebElement option : options) {
if (option.isSelected()) {
option.click();
}
}
} | void function(String text) { List<WebElement> options = element.findElements(By.xpath( ". for (WebElement option : options) { if (option.isSelected()) { option.click(); } } } | /**
* Deselect all options that display text matching the argument. That is, when given "Bar" this
* would deselect an option like:
*
* <option value="foo">Bar</option>
*
* @param text The visible text to match against
* @throws NoSuchElementException If no matching option elements are found
*/ | Deselect all options that display text matching the argument. That is, when given "Bar" this would deselect an option like: <option value="foo">Bar</option> | deselectByVisibleText | {
"repo_name": "sevaseva/selenium",
"path": "java/client/src/org/openqa/selenium/support/ui/Select.java",
"license": "apache-2.0",
"size": 8864
} | [
"java.util.List",
"org.openqa.selenium.By",
"org.openqa.selenium.WebElement"
] | import java.util.List; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; | import java.util.*; import org.openqa.selenium.*; | [
"java.util",
"org.openqa.selenium"
] | java.util; org.openqa.selenium; | 576,711 |
public static Map<VIFMetrics, VIFMetrics.Record> getAllRecords(Connection c) throws
BadServerResponse,
XenAPIException,
XmlRpcException {
String method_call = "VIF_metrics.get_all_records";
String session = c.getSessionReference();
Object[] method_params = {Marshalling.toXMLRPC(session)};
Map response = c.dispatch(method_call, method_params);
Object result = response.get("Value");
return Types.toMapOfVIFMetricsVIFMetricsRecord(result);
} | static Map<VIFMetrics, VIFMetrics.Record> function(Connection c) throws BadServerResponse, XenAPIException, XmlRpcException { String method_call = STR; String session = c.getSessionReference(); Object[] method_params = {Marshalling.toXMLRPC(session)}; Map response = c.dispatch(method_call, method_params); Object result = response.get("Value"); return Types.toMapOfVIFMetricsVIFMetricsRecord(result); } | /**
* Return a map of VIF_metrics references to VIF_metrics records for all VIF_metrics instances known to the system.
*
* @return records of all objects
*/ | Return a map of VIF_metrics references to VIF_metrics records for all VIF_metrics instances known to the system | getAllRecords | {
"repo_name": "cinderella/incubator-cloudstack",
"path": "deps/XenServerJava/com/xensource/xenapi/VIFMetrics.java",
"license": "apache-2.0",
"size": 11585
} | [
"com.xensource.xenapi.Types",
"java.util.Map",
"org.apache.xmlrpc.XmlRpcException"
] | import com.xensource.xenapi.Types; import java.util.Map; import org.apache.xmlrpc.XmlRpcException; | import com.xensource.xenapi.*; import java.util.*; import org.apache.xmlrpc.*; | [
"com.xensource.xenapi",
"java.util",
"org.apache.xmlrpc"
] | com.xensource.xenapi; java.util; org.apache.xmlrpc; | 1,499,125 |
public static double getDecimalYear(Calendar calendar) {
return calendar.get(Calendar.YEAR)
+ (double) calendar.get(Calendar.DAY_OF_YEAR) / calendar.getActualMaximum(Calendar.DAY_OF_YEAR);
} | static double function(Calendar calendar) { return calendar.get(Calendar.YEAR) + (double) calendar.get(Calendar.DAY_OF_YEAR) / calendar.getActualMaximum(Calendar.DAY_OF_YEAR); } | /**
* Returns the year of the calendar object as a decimal value.
*/ | Returns the year of the calendar object as a decimal value | getDecimalYear | {
"repo_name": "AchimHentschel/smarthome",
"path": "extensions/binding/org.eclipse.smarthome.binding.astro/src/main/java/org/eclipse/smarthome/binding/astro/internal/util/DateTimeUtils.java",
"license": "epl-1.0",
"size": 8088
} | [
"java.util.Calendar"
] | import java.util.Calendar; | import java.util.*; | [
"java.util"
] | java.util; | 675,299 |
private static String printTree(DetailAST ast) {
final StringBuilder messageBuilder = new StringBuilder();
DetailAST node = ast;
while (node != null) {
messageBuilder.append(getIndentation(node))
.append(getNodeInfo(node))
.append(LINE_SEPARATOR)
.append(printTree(node.getFirstChild()));
node = node.getNextSibling();
}
return messageBuilder.toString();
} | static String function(DetailAST ast) { final StringBuilder messageBuilder = new StringBuilder(); DetailAST node = ast; while (node != null) { messageBuilder.append(getIndentation(node)) .append(getNodeInfo(node)) .append(LINE_SEPARATOR) .append(printTree(node.getFirstChild())); node = node.getNextSibling(); } return messageBuilder.toString(); } | /**
* Print AST.
* @param ast the root AST node.
* @return string AST.
*/ | Print AST | printTree | {
"repo_name": "bansalayush/checkstyle",
"path": "src/main/java/com/puppycrawl/tools/checkstyle/AstTreeStringPrinter.java",
"license": "lgpl-2.1",
"size": 9873
} | [
"com.puppycrawl.tools.checkstyle.api.DetailAST"
] | import com.puppycrawl.tools.checkstyle.api.DetailAST; | import com.puppycrawl.tools.checkstyle.api.*; | [
"com.puppycrawl.tools"
] | com.puppycrawl.tools; | 2,510,254 |
public Model setVersion(String version) {
this.version = version;
if (version == null) {
root.removeChild("version");
} else if (!root.hasSingleChild("version")) {
root.insertChild(
createElement("version", version),
afterAnyOf("artifactId", "groupId", "parent", "modelVersion").or(inTheBegin()));
} else {
tree.updateText("/project/version", version);
}
return this;
} | Model function(String version) { this.version = version; if (version == null) { root.removeChild(STR); } else if (!root.hasSingleChild(STR)) { root.insertChild( createElement(STR, version), afterAnyOf(STR, STR, STR, STR).or(inTheBegin())); } else { tree.updateText(STR, version); } return this; } | /**
* Sets the current version of the artifact produced by this project.
*
* <p>If {@code version} is {@code null} then it will be remove from model as well as from xml.
*
* @param version new project version
* @return this model instance
*/ | Sets the current version of the artifact produced by this project. If version is null then it will be remove from model as well as from xml | setVersion | {
"repo_name": "TypeFox/che",
"path": "plugins/plugin-maven/che-plugin-maven-tools/src/main/java/org/eclipse/che/ide/maven/tools/Model.java",
"license": "epl-1.0",
"size": 38042
} | [
"org.eclipse.che.commons.xml.NewElement",
"org.eclipse.che.commons.xml.XMLTreeLocation"
] | import org.eclipse.che.commons.xml.NewElement; import org.eclipse.che.commons.xml.XMLTreeLocation; | import org.eclipse.che.commons.xml.*; | [
"org.eclipse.che"
] | org.eclipse.che; | 4,718 |
public List loadGraphCollection(String dirName, FilenameFilter filter) {
File dir = new File(dirName);
if (!dir.isDirectory()) {
throw new FatalException("Parameter dirName must be a directory");
}
String[] files = dir.list(filter);
List graphCollection = new ArrayList();
for (int i = 0; i < files.length; i++) {
String currentFile = dirName + File.separatorChar + files[i];
GraphMLFile graphmlFile = new GraphMLFile(mFileHandler);
Graph graph = graphmlFile.load(currentFile);
//System.out.println("Graph loaded with " + graph.numVertices() + " nodes and " + graph.numEdges() + " edges.");
graphCollection.add(graph);
}
return graphCollection;
} | List function(String dirName, FilenameFilter filter) { File dir = new File(dirName); if (!dir.isDirectory()) { throw new FatalException(STR); } String[] files = dir.list(filter); List graphCollection = new ArrayList(); for (int i = 0; i < files.length; i++) { String currentFile = dirName + File.separatorChar + files[i]; GraphMLFile graphmlFile = new GraphMLFile(mFileHandler); Graph graph = graphmlFile.load(currentFile); graphCollection.add(graph); } return graphCollection; } | /**
* Loads in a list of graphs whose corresponding filenames pass the file filter and are located in the
* specified directory
* @param dirName the directory containing the set of files that are to be screened through the file filter
* @param filter the file filter
* @return a list of graphs
*/ | Loads in a list of graphs whose corresponding filenames pass the file filter and are located in the specified directory | loadGraphCollection | {
"repo_name": "markus1978/clickwatch",
"path": "external/edu.uci.ics.jung/src/edu/uci/ics/jung/io/GraphMLFile.java",
"license": "apache-2.0",
"size": 10869
} | [
"edu.uci.ics.jung.exceptions.FatalException",
"edu.uci.ics.jung.graph.Graph",
"java.io.File",
"java.io.FilenameFilter",
"java.util.ArrayList",
"java.util.List"
] | import edu.uci.ics.jung.exceptions.FatalException; import edu.uci.ics.jung.graph.Graph; import java.io.File; import java.io.FilenameFilter; import java.util.ArrayList; import java.util.List; | import edu.uci.ics.jung.exceptions.*; import edu.uci.ics.jung.graph.*; import java.io.*; import java.util.*; | [
"edu.uci.ics",
"java.io",
"java.util"
] | edu.uci.ics; java.io; java.util; | 42,591 |
private Map<String, GuacamoleConfiguration> getFilteredAuthorizedConfigurations(AuthenticatedUser authenticatedUser)
throws GuacamoleException {
// Pull cached configurations, if any
if (authenticatedUser instanceof UserFilesAuthenticationProvider.UserFilesAuthenticatedUser && authenticatedUser.getAuthenticationProvider() == this) {
return ((UserFilesAuthenticationProvider.UserFilesAuthenticatedUser) authenticatedUser).getAuthorizedConfigurations();
}
// Otherwise, pull using credentials
return getFilteredAuthorizedConfigurations(authenticatedUser.getCredentials());
} | Map<String, GuacamoleConfiguration> function(AuthenticatedUser authenticatedUser) throws GuacamoleException { if (authenticatedUser instanceof UserFilesAuthenticationProvider.UserFilesAuthenticatedUser && authenticatedUser.getAuthenticationProvider() == this) { return ((UserFilesAuthenticationProvider.UserFilesAuthenticatedUser) authenticatedUser).getAuthorizedConfigurations(); } return getFilteredAuthorizedConfigurations(authenticatedUser.getCredentials()); } | /**
* Given a user who has already been authenticated, returns a Map
* containing all configurations for which that user is authorized,
* filtering those configurations using a TokenFilter and the standard
* credential tokens (like ${GUAC_USERNAME} and ${GUAC_PASSWORD}). The keys
* of this Map are Strings which uniquely identify each configuration.
*
* @param authenticatedUser
* The user whose authorized configurations are to be retrieved.
*
* @return
* A Map of all configurations authorized for use by the given user, or
* null if the user is not authorized to use any configurations.
*
* @throws GuacamoleException
* If an error occurs while retrieving configurations.
*/ | Given a user who has already been authenticated, returns a Map containing all configurations for which that user is authorized, filtering those configurations using a TokenFilter and the standard credential tokens (like ${GUAC_USERNAME} and ${GUAC_PASSWORD}). The keys of this Map are Strings which uniquely identify each configuration | getFilteredAuthorizedConfigurations | {
"repo_name": "GreenRover/guacamole-auth-userfiles",
"path": "src/main/java/net/sourceforge/guacamole/net/auth/userfiles/UserFilesAuthenticationProvider.java",
"license": "mit",
"size": 17623
} | [
"java.util.Map",
"org.apache.guacamole.GuacamoleException",
"org.apache.guacamole.net.auth.AuthenticatedUser",
"org.apache.guacamole.protocol.GuacamoleConfiguration"
] | import java.util.Map; import org.apache.guacamole.GuacamoleException; import org.apache.guacamole.net.auth.AuthenticatedUser; import org.apache.guacamole.protocol.GuacamoleConfiguration; | import java.util.*; import org.apache.guacamole.*; import org.apache.guacamole.net.auth.*; import org.apache.guacamole.protocol.*; | [
"java.util",
"org.apache.guacamole"
] | java.util; org.apache.guacamole; | 1,429,336 |
Map<Long,Payload> readPayloadByExecutionIds(Long... executionIds); | Map<Long,Payload> readPayloadByExecutionIds(Long... executionIds); | /**
*
* get the payloads for requested execution ids
*
* @param executionIds the execution ids to get payload for
* @return a map of the execution id and its payload
*/ | get the payloads for requested execution ids | readPayloadByExecutionIds | {
"repo_name": "orius123/slite",
"path": "engine/queue/score-queue-api/src/main/java/org/openscore/engine/queue/services/ExecutionQueueService.java",
"license": "apache-2.0",
"size": 3377
} | [
"java.util.Map",
"org.openscore.engine.queue.entities.Payload"
] | import java.util.Map; import org.openscore.engine.queue.entities.Payload; | import java.util.*; import org.openscore.engine.queue.entities.*; | [
"java.util",
"org.openscore.engine"
] | java.util; org.openscore.engine; | 370,396 |
public Map<String, Slice> getActiveSlicesMap() {
return activeSlices;
} | Map<String, Slice> function() { return activeSlices; } | /**
* Get the map of active slices (sliceName->Slice) for this collection.
*/ | Get the map of active slices (sliceName->Slice) for this collection | getActiveSlicesMap | {
"repo_name": "williamchengit/TestRepo",
"path": "solr/solrj/src/java/org/apache/solr/common/cloud/DocCollection.java",
"license": "apache-2.0",
"size": 3543
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 2,892,466 |
@ConditionalOnClass({Pcap.class, Inet4Address.class, Context.class})
//@ConditionalOnBean({Pcap.Builder.class, Inet4Address.class})
@Bean(CONTEXT_BEAN_NAME)
public Context context(@Qualifier(PCAP_BUILDER_BEAN_NAME) Pcap.Builder builder,
@Qualifier(NETMASK_BEAN_NAME) Inet4Address netmask) {
switch (properties.getPcapType()) {
case DEAD:
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Opening pcap dead handler : {}", builder);
}
builder.pcapType(Pcap.PcapType.DEAD);
break;
case OFFLINE:
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Opening pcap offline hadler : {}", builder);
}
builder.pcapType(Pcap.PcapType.OFFLINE);
break;
default:
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Opening pcap live hadler : {}", builder);
}
builder.pcapType(Pcap.PcapType.LIVE);
break;
}
Application.run(applicationName, applicationDisplayName, applicationVersion, builder);
Context context = Application.getApplicationContext();
if (properties.getFilter() != null) {
if (context.pcapCompile(properties.getFilter(),
properties.getBpfCompileMode(),
netmask.toInt()) == PcapCode.PCAP_OK) {
if (context.pcapSetFilter() != PcapCode.PCAP_OK) {
if (LOGGER.isWarnEnabled()) {
LOGGER.warn(context.pcapGetErr());
}
} else {
LOGGER.debug("Filter \'{}\' has been applied.", this.properties.getFilter());
}
} else {
if (LOGGER.isWarnEnabled()) {
LOGGER.warn(context.pcapGetErr());
}
}
} else {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("No filter has been applied.");
}
}
return context;
} | @ConditionalOnClass({Pcap.class, Inet4Address.class, Context.class}) @Bean(CONTEXT_BEAN_NAME) Context function(@Qualifier(PCAP_BUILDER_BEAN_NAME) Pcap.Builder builder, @Qualifier(NETMASK_BEAN_NAME) Inet4Address netmask) { switch (properties.getPcapType()) { case DEAD: if (LOGGER.isDebugEnabled()) { LOGGER.debug(STR, builder); } builder.pcapType(Pcap.PcapType.DEAD); break; case OFFLINE: if (LOGGER.isDebugEnabled()) { LOGGER.debug(STR, builder); } builder.pcapType(Pcap.PcapType.OFFLINE); break; default: if (LOGGER.isDebugEnabled()) { LOGGER.debug(STR, builder); } builder.pcapType(Pcap.PcapType.LIVE); break; } Application.run(applicationName, applicationDisplayName, applicationVersion, builder); Context context = Application.getApplicationContext(); if (properties.getFilter() != null) { if (context.pcapCompile(properties.getFilter(), properties.getBpfCompileMode(), netmask.toInt()) == PcapCode.PCAP_OK) { if (context.pcapSetFilter() != PcapCode.PCAP_OK) { if (LOGGER.isWarnEnabled()) { LOGGER.warn(context.pcapGetErr()); } } else { LOGGER.debug(STR, this.properties.getFilter()); } } else { if (LOGGER.isWarnEnabled()) { LOGGER.warn(context.pcapGetErr()); } } } else { if (LOGGER.isDebugEnabled()) { LOGGER.debug(STR); } } return context; } | /**
* Jxnet application context.
* @param builder pcap builder.
* @param netmask netmask.
* @return returns application context.
*/ | Jxnet application context | context | {
"repo_name": "ardikars/Jxpcap",
"path": "jxnet-spring-boot-autoconfigure/src/main/java/com/ardikars/jxnet/spring/boot/autoconfigure/JxnetAutoConfiguration.java",
"license": "lgpl-3.0",
"size": 14577
} | [
"com.ardikars.common.net.Inet4Address",
"com.ardikars.jxnet.Pcap",
"com.ardikars.jxnet.PcapCode",
"com.ardikars.jxnet.context.Application",
"com.ardikars.jxnet.context.Context",
"org.springframework.beans.factory.annotation.Qualifier",
"org.springframework.boot.autoconfigure.condition.ConditionalOnClass",
"org.springframework.context.annotation.Bean"
] | import com.ardikars.common.net.Inet4Address; import com.ardikars.jxnet.Pcap; import com.ardikars.jxnet.PcapCode; import com.ardikars.jxnet.context.Application; import com.ardikars.jxnet.context.Context; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.context.annotation.Bean; | import com.ardikars.common.net.*; import com.ardikars.jxnet.*; import com.ardikars.jxnet.context.*; import org.springframework.beans.factory.annotation.*; import org.springframework.boot.autoconfigure.condition.*; import org.springframework.context.annotation.*; | [
"com.ardikars.common",
"com.ardikars.jxnet",
"org.springframework.beans",
"org.springframework.boot",
"org.springframework.context"
] | com.ardikars.common; com.ardikars.jxnet; org.springframework.beans; org.springframework.boot; org.springframework.context; | 437,429 |
YangNode node = manager.getDataModel("src/test/resources/ContainerSubStatementMust.yang");
YangModule yangNode = (YangModule) node;
assertThat(yangNode.getName(), is("Test"));
YangContainer yangContainer = (YangContainer) yangNode.getChild();
assertThat(yangContainer.getName(), is("interface"));
String expectedConstraint = "ifType != 'ethernet' or (ifType = 'ethernet' and ifMTU = 1500)";
List<YangMust> mustConstraintList = yangContainer.getListOfMust();
assertThat(mustConstraintList.iterator().next().getConstraint(), is(expectedConstraint));
} | YangNode node = manager.getDataModel(STR); YangModule yangNode = (YangModule) node; assertThat(yangNode.getName(), is("Test")); YangContainer yangContainer = (YangContainer) yangNode.getChild(); assertThat(yangContainer.getName(), is(STR)); String expectedConstraint = STR; List<YangMust> mustConstraintList = yangContainer.getListOfMust(); assertThat(mustConstraintList.iterator().next().getConstraint(), is(expectedConstraint)); } | /**
* Checks if must listener updates the data model.
*/ | Checks if must listener updates the data model | processContainerSubStatementMust | {
"repo_name": "VinodKumarS-Huawei/ietf96yang",
"path": "utils/yangutils/plugin/src/test/java/org/onosproject/yangutils/parser/impl/listeners/MustListenerTest.java",
"license": "apache-2.0",
"size": 2860
} | [
"java.util.List",
"org.hamcrest.core.Is",
"org.junit.Assert",
"org.junit.Test",
"org.onosproject.yangutils.datamodel.YangContainer",
"org.onosproject.yangutils.datamodel.YangModule",
"org.onosproject.yangutils.datamodel.YangMust",
"org.onosproject.yangutils.datamodel.YangNode"
] | import java.util.List; import org.hamcrest.core.Is; import org.junit.Assert; import org.junit.Test; import org.onosproject.yangutils.datamodel.YangContainer; import org.onosproject.yangutils.datamodel.YangModule; import org.onosproject.yangutils.datamodel.YangMust; import org.onosproject.yangutils.datamodel.YangNode; | import java.util.*; import org.hamcrest.core.*; import org.junit.*; import org.onosproject.yangutils.datamodel.*; | [
"java.util",
"org.hamcrest.core",
"org.junit",
"org.onosproject.yangutils"
] | java.util; org.hamcrest.core; org.junit; org.onosproject.yangutils; | 2,573,414 |
public BaseSliderView image(File file){
if(mUrl != null || mRes != 0){
throw new IllegalStateException("Call multi image function," +
"you only have permission to call it once");
}
mFile = file;
return this;
} | BaseSliderView function(File file){ if(mUrl != null mRes != 0){ throw new IllegalStateException(STR + STR); } mFile = file; return this; } | /**
* set a file as a image that will to load
* @param file
* @return
*/ | set a file as a image that will to load | image | {
"repo_name": "cymcsg/UltimateAndroid",
"path": "deprecated/UltimateAndroidNormal/UltimateAndroidUi/src/com/marshalchen/common/uimodule/slider/SliderTypes/BaseSliderView.java",
"license": "apache-2.0",
"size": 7321
} | [
"java.io.File"
] | import java.io.File; | import java.io.*; | [
"java.io"
] | java.io; | 1,967,265 |
switch ( exportType ) {
case PlainTextExportDialog.TYPE_PLAIN_OUTPUT: {
return new TextFilePrinterDriver( out, charPerInch, linesPerInch );
}
case PlainTextExportDialog.TYPE_IBM_OUTPUT: {
return new IBMCompatiblePrinterDriver( out, charPerInch, linesPerInch );
}
case PlainTextExportDialog.TYPE_EPSON9_OUTPUT: {
final Epson9PinPrinterDriver driver = new Epson9PinPrinterDriver( out, charPerInch, linesPerInch, printer );
applyFallbackEncoding( driver );
return driver;
}
case PlainTextExportDialog.TYPE_EPSON24_OUTPUT: {
final Epson24PinPrinterDriver driver = new Epson24PinPrinterDriver( out, charPerInch, linesPerInch, printer );
applyFallbackEncoding( driver );
return driver;
}
default:
throw new IllegalArgumentException();
}
} | switch ( exportType ) { case PlainTextExportDialog.TYPE_PLAIN_OUTPUT: { return new TextFilePrinterDriver( out, charPerInch, linesPerInch ); } case PlainTextExportDialog.TYPE_IBM_OUTPUT: { return new IBMCompatiblePrinterDriver( out, charPerInch, linesPerInch ); } case PlainTextExportDialog.TYPE_EPSON9_OUTPUT: { final Epson9PinPrinterDriver driver = new Epson9PinPrinterDriver( out, charPerInch, linesPerInch, printer ); applyFallbackEncoding( driver ); return driver; } case PlainTextExportDialog.TYPE_EPSON24_OUTPUT: { final Epson24PinPrinterDriver driver = new Epson24PinPrinterDriver( out, charPerInch, linesPerInch, printer ); applyFallbackEncoding( driver ); return driver; } default: throw new IllegalArgumentException(); } } | /**
* Returns the printer command set for the given report and export type.
*
* @param out
* the output stream.
* @return The printer command set.
*/ | Returns the printer command set for the given report and export type | getPrinterCommandSet | {
"repo_name": "mbatchelor/pentaho-reporting",
"path": "engine/core/src/main/java/org/pentaho/reporting/engine/classic/core/modules/gui/plaintext/PlainTextExportTask.java",
"license": "lgpl-2.1",
"size": 10733
} | [
"org.pentaho.reporting.engine.classic.core.modules.output.pageable.plaintext.driver.Epson24PinPrinterDriver",
"org.pentaho.reporting.engine.classic.core.modules.output.pageable.plaintext.driver.Epson9PinPrinterDriver",
"org.pentaho.reporting.engine.classic.core.modules.output.pageable.plaintext.driver.IBMCompatiblePrinterDriver",
"org.pentaho.reporting.engine.classic.core.modules.output.pageable.plaintext.driver.TextFilePrinterDriver"
] | import org.pentaho.reporting.engine.classic.core.modules.output.pageable.plaintext.driver.Epson24PinPrinterDriver; import org.pentaho.reporting.engine.classic.core.modules.output.pageable.plaintext.driver.Epson9PinPrinterDriver; import org.pentaho.reporting.engine.classic.core.modules.output.pageable.plaintext.driver.IBMCompatiblePrinterDriver; import org.pentaho.reporting.engine.classic.core.modules.output.pageable.plaintext.driver.TextFilePrinterDriver; | import org.pentaho.reporting.engine.classic.core.modules.output.pageable.plaintext.driver.*; | [
"org.pentaho.reporting"
] | org.pentaho.reporting; | 1,557,191 |
EClass getTraceOption_(); | EClass getTraceOption_(); | /**
* Returns the meta object for class '{@link cruise.umple.umple.TraceOption_ <em>Trace Option </em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for class '<em>Trace Option </em>'.
* @see cruise.umple.umple.TraceOption_
* @generated
*/ | Returns the meta object for class '<code>cruise.umple.umple.TraceOption_ Trace Option </code>'. | getTraceOption_ | {
"repo_name": "ahmedvc/umple",
"path": "cruise.umple.xtext/src-gen/cruise/umple/umple/UmplePackage.java",
"license": "mit",
"size": 485842
} | [
"org.eclipse.emf.ecore.EClass"
] | import org.eclipse.emf.ecore.EClass; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 392,388 |
public static void removePlayer(final Player player) {
if (player != null) {
final String name = player.getName();
MockStendlRPWorld.get().remove(player.getID());
MockStendhalRPRuleProcessor.get().getOnlinePlayers().remove(player);
if (name != null) {
WordList.getInstance().unregisterSubjectName(name);
}
}
} | static void function(final Player player) { if (player != null) { final String name = player.getName(); MockStendlRPWorld.get().remove(player.getID()); MockStendhalRPRuleProcessor.get().getOnlinePlayers().remove(player); if (name != null) { WordList.getInstance().unregisterSubjectName(name); } } } | /**
* Remove a player from world and rule processor.
*
* @param player
*/ | Remove a player from world and rule processor | removePlayer | {
"repo_name": "AntumDeluge/arianne-stendhal",
"path": "tests/utilities/PlayerTestHelper.java",
"license": "gpl-2.0",
"size": 10785
} | [
"games.stendhal.common.parser.WordList",
"games.stendhal.server.entity.player.Player",
"games.stendhal.server.maps.MockStendhalRPRuleProcessor",
"games.stendhal.server.maps.MockStendlRPWorld"
] | import games.stendhal.common.parser.WordList; import games.stendhal.server.entity.player.Player; import games.stendhal.server.maps.MockStendhalRPRuleProcessor; import games.stendhal.server.maps.MockStendlRPWorld; | import games.stendhal.common.parser.*; import games.stendhal.server.entity.player.*; import games.stendhal.server.maps.*; | [
"games.stendhal.common",
"games.stendhal.server"
] | games.stendhal.common; games.stendhal.server; | 1,954,928 |
@Override
public List<TQueryRepositoryBean> loadByCategory(Integer categoryID) {
List<TQueryRepository> queries = null;
Criteria criteria = new Criteria();
criteria.add(CATEGORYKEY, categoryID);
try {
queries = doSelect(criteria);
} catch (TorqueException e) {
LOGGER.error("Loading the queries by category " + categoryID +
" failed with " + e.getMessage(), e);
}
return convertTorqueListToBeanList(queries);
}
| List<TQueryRepositoryBean> function(Integer categoryID) { List<TQueryRepository> queries = null; Criteria criteria = new Criteria(); criteria.add(CATEGORYKEY, categoryID); try { queries = doSelect(criteria); } catch (TorqueException e) { LOGGER.error(STR + categoryID + STR + e.getMessage(), e); } return convertTorqueListToBeanList(queries); } | /**
* Loads the queries for a category
* @param categoryID
* @return
*/ | Loads the queries for a category | loadByCategory | {
"repo_name": "trackplus/Genji",
"path": "src/main/java/com/aurel/track/persist/TQueryRepositoryPeer.java",
"license": "gpl-3.0",
"size": 17976
} | [
"com.aurel.track.beans.TQueryRepositoryBean",
"java.util.List",
"org.apache.torque.TorqueException",
"org.apache.torque.util.Criteria"
] | import com.aurel.track.beans.TQueryRepositoryBean; import java.util.List; import org.apache.torque.TorqueException; import org.apache.torque.util.Criteria; | import com.aurel.track.beans.*; import java.util.*; import org.apache.torque.*; import org.apache.torque.util.*; | [
"com.aurel.track",
"java.util",
"org.apache.torque"
] | com.aurel.track; java.util; org.apache.torque; | 2,047,998 |
public PacketCaptureInner withFilters(List<PacketCaptureFilter> filters) {
this.filters = filters;
return this;
} | PacketCaptureInner function(List<PacketCaptureFilter> filters) { this.filters = filters; return this; } | /**
* Set a list of packet capture filters.
*
* @param filters the filters value to set
* @return the PacketCaptureInner object itself.
*/ | Set a list of packet capture filters | withFilters | {
"repo_name": "navalev/azure-sdk-for-java",
"path": "sdk/network/mgmt-v2019_09_01/src/main/java/com/microsoft/azure/management/network/v2019_09_01/implementation/PacketCaptureInner.java",
"license": "mit",
"size": 5288
} | [
"com.microsoft.azure.management.network.v2019_09_01.PacketCaptureFilter",
"java.util.List"
] | import com.microsoft.azure.management.network.v2019_09_01.PacketCaptureFilter; import java.util.List; | import com.microsoft.azure.management.network.v2019_09_01.*; import java.util.*; | [
"com.microsoft.azure",
"java.util"
] | com.microsoft.azure; java.util; | 839,449 |
public static void dumpTestLog(String logFileName) throws IOException {
Path logPath = LoggingUtil.getInServerLogPath(logFileName);
dumpTestLog(logPath);
} | static void function(String logFileName) throws IOException { Path logPath = LoggingUtil.getInServerLogPath(logFileName); dumpTestLog(logPath); } | /**
* Helper method to dump the contents of a log to stdout.
* @param logFileName the name of the log file
*/ | Helper method to dump the contents of a log to stdout | dumpTestLog | {
"repo_name": "jstourac/wildfly",
"path": "testsuite/shared/src/main/java/org/jboss/as/test/shared/util/LoggingUtil.java",
"license": "lgpl-2.1",
"size": 6530
} | [
"java.io.IOException",
"java.nio.file.Path"
] | import java.io.IOException; import java.nio.file.Path; | import java.io.*; import java.nio.file.*; | [
"java.io",
"java.nio"
] | java.io; java.nio; | 1,560,707 |
protected int nextInStartTag() throws IOException, XMLException {
switch (current) {
case 0x9:
case 0xA:
case 0xD:
case 0x20:
do {
nextChar();
} while (current != -1 && XMLUtilities.isXMLSpace((char)current));
return LexicalUnits.S;
case '/':
if (nextChar() != '>') {
throw createXMLException("malformed.tag.end");
}
nextChar();
context = (--depth == 0) ? TOP_LEVEL_CONTEXT : CONTENT_CONTEXT;
return LexicalUnits.EMPTY_ELEMENT_END;
case '>':
nextChar();
context = CONTENT_CONTEXT;
return LexicalUnits.END_CHAR;
case '=':
nextChar();
return LexicalUnits.EQ;
case '"':
attrDelimiter = '"';
nextChar();
for (;;) {
switch (current) {
case '"':
nextChar();
return LexicalUnits.STRING;
case '&':
context = ATTRIBUTE_VALUE_CONTEXT;
return LexicalUnits.FIRST_ATTRIBUTE_FRAGMENT;
case '<':
throw createXMLException("invalid.character");
case -1:
throw createXMLException("unexpected.eof");
}
nextChar();
}
case '\'':
attrDelimiter = '\'';
nextChar();
for (;;) {
switch (current) {
case '\'':
nextChar();
return LexicalUnits.STRING;
case '&':
context = ATTRIBUTE_VALUE_CONTEXT;
return LexicalUnits.FIRST_ATTRIBUTE_FRAGMENT;
case '<':
throw createXMLException("invalid.character");
case -1:
throw createXMLException("unexpected.eof");
}
nextChar();
}
default:
return readName(LexicalUnits.NAME);
}
} | int function() throws IOException, XMLException { switch (current) { case 0x9: case 0xA: case 0xD: case 0x20: do { nextChar(); } while (current != -1 && XMLUtilities.isXMLSpace((char)current)); return LexicalUnits.S; case '/': if (nextChar() != '>') { throw createXMLException(STR); } nextChar(); context = (--depth == 0) ? TOP_LEVEL_CONTEXT : CONTENT_CONTEXT; return LexicalUnits.EMPTY_ELEMENT_END; case '>': nextChar(); context = CONTENT_CONTEXT; return LexicalUnits.END_CHAR; case '=': nextChar(); return LexicalUnits.EQ; case 'STR'; nextChar(); for (;;) { switch (current) { case 'STRinvalid.characterSTRunexpected.eofSTRinvalid.characterSTRunexpected.eof"); } nextChar(); } default: return readName(LexicalUnits.NAME); } } | /**
* Returns the next lexical unit in the context of a start tag.
*/ | Returns the next lexical unit in the context of a start tag | nextInStartTag | {
"repo_name": "apache/batik",
"path": "batik-xml/src/main/java/org/apache/batik/xml/XMLScanner.java",
"license": "apache-2.0",
"size": 60774
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 562,660 |
public EntityManager getCurrentSession(); | EntityManager function(); | /**
* Gets the current database session
* @return database session
*/ | Gets the current database session | getCurrentSession | {
"repo_name": "hrh5775/LibraryManager",
"path": "Persistence/src/main/java/at/team2/database_wrapper/interfaces/Session.java",
"license": "gpl-3.0",
"size": 660
} | [
"javax.persistence.EntityManager"
] | import javax.persistence.EntityManager; | import javax.persistence.*; | [
"javax.persistence"
] | javax.persistence; | 2,275,409 |
public GrpcServiceBuilder supportedSerializationFormats(SerializationFormat... formats) {
return supportedSerializationFormats(ImmutableSet.copyOf(requireNonNull(formats, "formats")));
} | GrpcServiceBuilder function(SerializationFormat... formats) { return supportedSerializationFormats(ImmutableSet.copyOf(requireNonNull(formats, STR))); } | /**
* Sets the {@link SerializationFormat}s supported by this server. If not set, defaults to supporting binary
* protobuf formats. Enabling JSON can be useful, e.g., when migrating existing JSON services to gRPC.
*/ | Sets the <code>SerializationFormat</code>s supported by this server. If not set, defaults to supporting binary protobuf formats. Enabling JSON can be useful, e.g., when migrating existing JSON services to gRPC | supportedSerializationFormats | {
"repo_name": "jonefeewang/armeria",
"path": "grpc/src/main/java/com/linecorp/armeria/server/grpc/GrpcServiceBuilder.java",
"license": "apache-2.0",
"size": 8151
} | [
"com.google.common.collect.ImmutableSet",
"com.linecorp.armeria.common.SerializationFormat"
] | import com.google.common.collect.ImmutableSet; import com.linecorp.armeria.common.SerializationFormat; | import com.google.common.collect.*; import com.linecorp.armeria.common.*; | [
"com.google.common",
"com.linecorp.armeria"
] | com.google.common; com.linecorp.armeria; | 2,823,056 |
public static QueryContinuousRequest decodeTuple(final ByteBuffer encodedPackage) throws PacketEncodeException, IOException {
final short sequenceNumber = NetworkPackageDecoder.getRequestIDFromRequestPackage(encodedPackage);
final boolean decodeResult = NetworkPackageDecoder.validateRequestPackageHeader(encodedPackage, NetworkConst.REQUEST_TYPE_QUERY);
if(decodeResult == false) {
throw new PacketEncodeException("Unable to decode package");
}
final byte queryType = encodedPackage.get();
if(queryType != NetworkConst.REQUEST_QUERY_CONTINUOUS_BBOX) {
throw new PacketEncodeException("Wrong query type: " + queryType + " required type is: " + NetworkConst.REQUEST_QUERY_CONTINUOUS_BBOX);
}
// 1 unused byte
encodedPackage.get();
final int queryPlanLength = encodedPackage.getInt();
final byte[] queryPlanBytes = new byte[queryPlanLength];
encodedPackage.get(queryPlanBytes, 0, queryPlanBytes.length);
final String queryPlanString = new String(queryPlanBytes);
if(encodedPackage.remaining() != 0) {
throw new PacketEncodeException("Some bytes are left after decoding: " + encodedPackage.remaining());
}
final RoutingHeader routingHeader = NetworkPackageDecoder.getRoutingHeaderFromRequestPackage(encodedPackage);
try {
final ContinuousQueryPlan queryPlan = ContinuousQueryPlanSerializer.fromJSON(queryPlanString);
return new QueryContinuousRequest(sequenceNumber, routingHeader, queryPlan);
} catch (BBoxDBException e) {
throw new PacketEncodeException(e);
}
} | static QueryContinuousRequest function(final ByteBuffer encodedPackage) throws PacketEncodeException, IOException { final short sequenceNumber = NetworkPackageDecoder.getRequestIDFromRequestPackage(encodedPackage); final boolean decodeResult = NetworkPackageDecoder.validateRequestPackageHeader(encodedPackage, NetworkConst.REQUEST_TYPE_QUERY); if(decodeResult == false) { throw new PacketEncodeException(STR); } final byte queryType = encodedPackage.get(); if(queryType != NetworkConst.REQUEST_QUERY_CONTINUOUS_BBOX) { throw new PacketEncodeException(STR + queryType + STR + NetworkConst.REQUEST_QUERY_CONTINUOUS_BBOX); } encodedPackage.get(); final int queryPlanLength = encodedPackage.getInt(); final byte[] queryPlanBytes = new byte[queryPlanLength]; encodedPackage.get(queryPlanBytes, 0, queryPlanBytes.length); final String queryPlanString = new String(queryPlanBytes); if(encodedPackage.remaining() != 0) { throw new PacketEncodeException(STR + encodedPackage.remaining()); } final RoutingHeader routingHeader = NetworkPackageDecoder.getRoutingHeaderFromRequestPackage(encodedPackage); try { final ContinuousQueryPlan queryPlan = ContinuousQueryPlanSerializer.fromJSON(queryPlanString); return new QueryContinuousRequest(sequenceNumber, routingHeader, queryPlan); } catch (BBoxDBException e) { throw new PacketEncodeException(e); } } | /**
* Decode the encoded package into a object
*
* @param encodedPackage
* @return
* @throws PacketEncodeException
* @throws IOException
*/ | Decode the encoded package into a object | decodeTuple | {
"repo_name": "jnidzwetzki/bboxdb",
"path": "bboxdb-server/src/main/java/org/bboxdb/network/packets/request/QueryContinuousRequest.java",
"license": "apache-2.0",
"size": 4606
} | [
"java.io.IOException",
"java.nio.ByteBuffer",
"org.bboxdb.misc.BBoxDBException",
"org.bboxdb.network.NetworkConst",
"org.bboxdb.network.NetworkPackageDecoder",
"org.bboxdb.network.packets.PacketEncodeException",
"org.bboxdb.network.routing.RoutingHeader",
"org.bboxdb.query.ContinuousQueryPlan",
"org.bboxdb.query.ContinuousQueryPlanSerializer"
] | import java.io.IOException; import java.nio.ByteBuffer; import org.bboxdb.misc.BBoxDBException; import org.bboxdb.network.NetworkConst; import org.bboxdb.network.NetworkPackageDecoder; import org.bboxdb.network.packets.PacketEncodeException; import org.bboxdb.network.routing.RoutingHeader; import org.bboxdb.query.ContinuousQueryPlan; import org.bboxdb.query.ContinuousQueryPlanSerializer; | import java.io.*; import java.nio.*; import org.bboxdb.misc.*; import org.bboxdb.network.*; import org.bboxdb.network.packets.*; import org.bboxdb.network.routing.*; import org.bboxdb.query.*; | [
"java.io",
"java.nio",
"org.bboxdb.misc",
"org.bboxdb.network",
"org.bboxdb.query"
] | java.io; java.nio; org.bboxdb.misc; org.bboxdb.network; org.bboxdb.query; | 1,463,661 |
@RequestMapping(value = "/{groupId}/users", method = RequestMethod.GET)
public ResponseEntity<Set<User>> findUsersOfGroup(@PathVariable Integer groupId) {
try {
Set<User> groupUsersSet = this.service.getUsersOfGroup(groupId);
return new ResponseEntity<Set<User>>(groupUsersSet, HttpStatus.OK);
} catch (Exception e) {
LOG.error("Error finding group with id " + groupId + ": "
+ e.getMessage());
return new ResponseEntity<Set<User>>(HttpStatus.NOT_FOUND);
}
} | @RequestMapping(value = STR, method = RequestMethod.GET) ResponseEntity<Set<User>> function(@PathVariable Integer groupId) { try { Set<User> groupUsersSet = this.service.getUsersOfGroup(groupId); return new ResponseEntity<Set<User>>(groupUsersSet, HttpStatus.OK); } catch (Exception e) { LOG.error(STR + groupId + STR + e.getMessage()); return new ResponseEntity<Set<User>>(HttpStatus.NOT_FOUND); } } | /**
* Get the users of a specific group.
*
* @param groupId
* @return
*/ | Get the users of a specific group | findUsersOfGroup | {
"repo_name": "buehner/shogun2",
"path": "src/shogun-core-main/src/main/java/de/terrestris/shoguncore/rest/UserGroupRestController.java",
"license": "apache-2.0",
"size": 2455
} | [
"de.terrestris.shoguncore.model.User",
"java.util.Set",
"org.springframework.http.HttpStatus",
"org.springframework.http.ResponseEntity",
"org.springframework.web.bind.annotation.PathVariable",
"org.springframework.web.bind.annotation.RequestMapping",
"org.springframework.web.bind.annotation.RequestMethod"
] | import de.terrestris.shoguncore.model.User; import java.util.Set; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; | import de.terrestris.shoguncore.model.*; import java.util.*; import org.springframework.http.*; import org.springframework.web.bind.annotation.*; | [
"de.terrestris.shoguncore",
"java.util",
"org.springframework.http",
"org.springframework.web"
] | de.terrestris.shoguncore; java.util; org.springframework.http; org.springframework.web; | 558,049 |
List<SpaceInstanceStatistics> getTimelineFromTimestamp( long fromTimestamp ); | List<SpaceInstanceStatistics> getTimelineFromTimestamp( long fromTimestamp ); | /**
* Returns the timeline (from newest to oldest) history statistics, including this one.
* @param fromTimestamp brings statistics starting from this timestamp ( not included )
* @since 9.6
*/ | Returns the timeline (from newest to oldest) history statistics, including this one | getTimelineFromTimestamp | {
"repo_name": "Gigaspaces/xap-openspaces",
"path": "src/main/java/org/openspaces/admin/space/SpaceInstanceStatistics.java",
"license": "apache-2.0",
"size": 4477
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 800,865 |
default Optional<Boolean> doesReduceMotion() {
return Optional.ofNullable(toSafeBoolean(getCapability(REDUCE_MOTION_OPTION)));
} | default Optional<Boolean> doesReduceMotion() { return Optional.ofNullable(toSafeBoolean(getCapability(REDUCE_MOTION_OPTION))); } | /**
* Get whether to reduce motion accessibility preference.
*
* @return True or false.
*/ | Get whether to reduce motion accessibility preference | doesReduceMotion | {
"repo_name": "appium/java-client",
"path": "src/main/java/io/appium/java_client/ios/options/simulator/SupportsReduceMotionOption.java",
"license": "apache-2.0",
"size": 2064
} | [
"java.util.Optional"
] | import java.util.Optional; | import java.util.*; | [
"java.util"
] | java.util; | 853,051 |
public DataNode setUnit_cell_volumeScalar(Double unit_cell_volume); | DataNode function(Double unit_cell_volume); | /**
* Volume of the unit cell
* <p>
* <b>Type:</b> NX_FLOAT
* <b>Units:</b> NX_VOLUME
* <b>Dimensions:</b> 1: n_comp;
* </p>
*
* @param unit_cell_volume the unit_cell_volume
*/ | Volume of the unit cell Type: NX_FLOAT Units: NX_VOLUME Dimensions: 1: n_comp; | setUnit_cell_volumeScalar | {
"repo_name": "xen-0/dawnsci",
"path": "org.eclipse.dawnsci.nexus/autogen/org/eclipse/dawnsci/nexus/NXsample.java",
"license": "epl-1.0",
"size": 49075
} | [
"org.eclipse.dawnsci.analysis.api.tree.DataNode"
] | import org.eclipse.dawnsci.analysis.api.tree.DataNode; | import org.eclipse.dawnsci.analysis.api.tree.*; | [
"org.eclipse.dawnsci"
] | org.eclipse.dawnsci; | 1,097,820 |
public static NativeObject jsFunction_getOpenAPIDefinitionResource(Context cx, Scriptable thisObj,
Object[] args, Function funObj) throws APIManagementException, ScriptException {
if (args == null || args.length == 0) {
handleException("Invalid number of input parameters.");
}
NativeObject apiData = (NativeObject) args[0];
String provider = String.valueOf(apiData.get("provider", apiData));
String name = (String) apiData.get("apiName", apiData);
String version = (String) apiData.get("version", apiData);
if (provider != null) {
provider = APIUtil.replaceEmailDomain(provider);
}
provider = (provider != null ? provider.trim() : null);
name = (name != null ? name.trim() : null);
version = (version != null ? version.trim() : null);
APIIdentifier apiId = new APIIdentifier(provider, name, version);
boolean isTenantFlowStarted = false;
String apiJSON = null;
try {
String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(provider));
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = true;
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
}
APIProvider apiProvider = getAPIProvider(thisObj);
if (apiProvider.getAPI(apiId) == null) {
return null;
}
RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService();
int tenantId;
UserRegistry registry;
try {
tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager().getTenantId(tenantDomain);
registry = registryService.getGovernanceSystemRegistry(tenantId);
apiJSON = OASParserUtil.getAPIDefinition(apiId, registry); //apiProvider.getSwagger12Definition(apiId);
} catch (RegistryException e) {
handleException("Error when create registry instance ", e);
} catch (UserStoreException e) {
handleException("Error while reading tenant information ", e);
}
} finally {
if (isTenantFlowStarted) {
PrivilegedCarbonContext.endTenantFlow();
}
}
NativeObject row = new NativeObject();
row.put("swagger", row, apiJSON);
return row;
} | static NativeObject function(Context cx, Scriptable thisObj, Object[] args, Function funObj) throws APIManagementException, ScriptException { if (args == null args.length == 0) { handleException(STR); } NativeObject apiData = (NativeObject) args[0]; String provider = String.valueOf(apiData.get(STR, apiData)); String name = (String) apiData.get(STR, apiData); String version = (String) apiData.get(STR, apiData); if (provider != null) { provider = APIUtil.replaceEmailDomain(provider); } provider = (provider != null ? provider.trim() : null); name = (name != null ? name.trim() : null); version = (version != null ? version.trim() : null); APIIdentifier apiId = new APIIdentifier(provider, name, version); boolean isTenantFlowStarted = false; String apiJSON = null; try { String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(provider)); if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { isTenantFlowStarted = true; PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } APIProvider apiProvider = getAPIProvider(thisObj); if (apiProvider.getAPI(apiId) == null) { return null; } RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); int tenantId; UserRegistry registry; try { tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager().getTenantId(tenantDomain); registry = registryService.getGovernanceSystemRegistry(tenantId); apiJSON = OASParserUtil.getAPIDefinition(apiId, registry); } catch (RegistryException e) { handleException(STR, e); } catch (UserStoreException e) { handleException(STR, e); } } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } NativeObject row = new NativeObject(); row.put(STR, row, apiJSON); return row; } | /**
* Returns the Swagger12 definition //todo this actually returns swagger v2.0, create a new method
*
* @param cx
* @param thisObj
* @param args
* @param funObj
* @return
* @throws APIManagementException
* @throws ScriptException
*/ | Returns the Swagger12 definition //todo this actually returns swagger v2.0, create a new method | jsFunction_getOpenAPIDefinitionResource | {
"repo_name": "nuwand/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.hostobjects/src/main/java/org/wso2/carbon/apimgt/hostobjects/APIProviderHostObject.java",
"license": "apache-2.0",
"size": 236142
} | [
"org.jaggeryjs.scriptengine.exceptions.ScriptException",
"org.mozilla.javascript.Context",
"org.mozilla.javascript.Function",
"org.mozilla.javascript.NativeObject",
"org.mozilla.javascript.Scriptable",
"org.wso2.carbon.apimgt.api.APIManagementException",
"org.wso2.carbon.apimgt.api.APIProvider",
"org.wso2.carbon.apimgt.api.model.APIIdentifier",
"org.wso2.carbon.apimgt.hostobjects.internal.ServiceReferenceHolder",
"org.wso2.carbon.apimgt.impl.definitions.OASParserUtil",
"org.wso2.carbon.apimgt.impl.utils.APIUtil",
"org.wso2.carbon.context.PrivilegedCarbonContext",
"org.wso2.carbon.registry.core.exceptions.RegistryException",
"org.wso2.carbon.registry.core.service.RegistryService",
"org.wso2.carbon.registry.core.session.UserRegistry",
"org.wso2.carbon.user.api.UserStoreException",
"org.wso2.carbon.utils.multitenancy.MultitenantConstants",
"org.wso2.carbon.utils.multitenancy.MultitenantUtils"
] | import org.jaggeryjs.scriptengine.exceptions.ScriptException; import org.mozilla.javascript.Context; import org.mozilla.javascript.Function; import org.mozilla.javascript.NativeObject; import org.mozilla.javascript.Scriptable; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.APIProvider; import org.wso2.carbon.apimgt.api.model.APIIdentifier; import org.wso2.carbon.apimgt.hostobjects.internal.ServiceReferenceHolder; import org.wso2.carbon.apimgt.impl.definitions.OASParserUtil; import org.wso2.carbon.apimgt.impl.utils.APIUtil; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.core.service.RegistryService; import org.wso2.carbon.registry.core.session.UserRegistry; import org.wso2.carbon.user.api.UserStoreException; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; import org.wso2.carbon.utils.multitenancy.MultitenantUtils; | import org.jaggeryjs.scriptengine.exceptions.*; import org.mozilla.javascript.*; import org.wso2.carbon.apimgt.api.*; import org.wso2.carbon.apimgt.api.model.*; import org.wso2.carbon.apimgt.hostobjects.internal.*; import org.wso2.carbon.apimgt.impl.definitions.*; import org.wso2.carbon.apimgt.impl.utils.*; import org.wso2.carbon.context.*; import org.wso2.carbon.registry.core.exceptions.*; import org.wso2.carbon.registry.core.service.*; import org.wso2.carbon.registry.core.session.*; import org.wso2.carbon.user.api.*; import org.wso2.carbon.utils.multitenancy.*; | [
"org.jaggeryjs.scriptengine",
"org.mozilla.javascript",
"org.wso2.carbon"
] | org.jaggeryjs.scriptengine; org.mozilla.javascript; org.wso2.carbon; | 2,236,553 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.