method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
public static UnlockObjectTask tryLockDSOnly(DeploymentSpec ds) throws VmidcBrokerInvalidRequestException,
InterruptedException {
return lockObject(new LockObjectReference(ds.getId(), ds.getName(), ObjectType.DEPLOYMENT_SPEC),
LockType.WRITE_LOCK, true);
} | static UnlockObjectTask function(DeploymentSpec ds) throws VmidcBrokerInvalidRequestException, InterruptedException { return lockObject(new LockObjectReference(ds.getId(), ds.getName(), ObjectType.DEPLOYMENT_SPEC), LockType.WRITE_LOCK, true); } | /**
* Gets a write lock on the DS only. Throws exception if lock cannot be obtained
*/ | Gets a write lock on the DS only. Throws exception if lock cannot be obtained | tryLockDSOnly | {
"repo_name": "arvindn05/osc-core",
"path": "osc-server/src/main/java/org/osc/core/broker/service/LockUtil.java",
"license": "apache-2.0",
"size": 11738
} | [
"org.osc.core.broker.job.lock.LockObjectReference",
"org.osc.core.broker.job.lock.LockRequest",
"org.osc.core.broker.model.entities.virtualization.openstack.DeploymentSpec",
"org.osc.core.broker.service.exceptions.VmidcBrokerInvalidRequestException",
"org.osc.core.broker.service.tasks.conformance.UnlockObjectTask"
] | import org.osc.core.broker.job.lock.LockObjectReference; import org.osc.core.broker.job.lock.LockRequest; import org.osc.core.broker.model.entities.virtualization.openstack.DeploymentSpec; import org.osc.core.broker.service.exceptions.VmidcBrokerInvalidRequestException; import org.osc.core.broker.service.tasks.conformance.UnlockObjectTask; | import org.osc.core.broker.job.lock.*; import org.osc.core.broker.model.entities.virtualization.openstack.*; import org.osc.core.broker.service.exceptions.*; import org.osc.core.broker.service.tasks.conformance.*; | [
"org.osc.core"
] | org.osc.core; | 2,377,764 |
public static CSVReader getReader(File file) throws IOException
{
return new CSVReader(Files.newReader(file, Charset.defaultCharset()), CSVParser.DEFAULT_SEPARATOR, CSVParser.DEFAULT_QUOTE_CHARACTER, CSVParser.NULL_CHARACTER, 1, false);
} | static CSVReader function(File file) throws IOException { return new CSVReader(Files.newReader(file, Charset.defaultCharset()), CSVParser.DEFAULT_SEPARATOR, CSVParser.DEFAULT_QUOTE_CHARACTER, CSVParser.NULL_CHARACTER, 1, false); } | /**
* This method constructs,, configures and returns a CSV reader instance to be used to read MCP CSV files.
* @param file File to read
* @return a configured CSVReader
* @throws IOException Propogated from openning the file
*/ | This method constructs,, configures and returns a CSV reader instance to be used to read MCP CSV files | getReader | {
"repo_name": "kenzierocks/ForgeGradle",
"path": "src/main/java/net/minecraftforge/gradle/common/Constants.java",
"license": "lgpl-2.1",
"size": 20777
} | [
"au.com.bytecode.opencsv.CSVParser",
"au.com.bytecode.opencsv.CSVReader",
"com.google.common.io.Files",
"java.io.File",
"java.io.IOException",
"java.nio.charset.Charset"
] | import au.com.bytecode.opencsv.CSVParser; import au.com.bytecode.opencsv.CSVReader; import com.google.common.io.Files; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; | import au.com.bytecode.opencsv.*; import com.google.common.io.*; import java.io.*; import java.nio.charset.*; | [
"au.com.bytecode",
"com.google.common",
"java.io",
"java.nio"
] | au.com.bytecode; com.google.common; java.io; java.nio; | 2,893,491 |
@Test
public void testChangeDefaultGroup() throws Exception {
// Create 2 groups and add a user
String uuid1 = UUID.randomUUID().toString();
ExperimenterGroup g1 = new ExperimenterGroupI();
g1.setName(omero.rtypes.rstring(uuid1));
g1.setLdap(omero.rtypes.rbool(false));
g1.getDetails().setPermissions(new PermissionsI("rw----"));
String uuid2 = UUID.randomUUID().toString();
ExperimenterGroup g2 = new ExperimenterGroupI();
g2.setName(omero.rtypes.rstring(uuid2));
g2.setLdap(omero.rtypes.rbool(false));
g2.getDetails().setPermissions(new PermissionsI("rw----"));
IAdminPrx svc = root.getSession().getAdminService();
IQueryPrx query = root.getSession().getQueryService();
long id1 = svc.createGroup(g1);
long id2 = svc.createGroup(g2);
ParametersI p = new ParametersI();
p.addId(id1);
ExperimenterGroup eg1 = (ExperimenterGroup) query.findByQuery(
"select distinct g from ExperimenterGroup g where g.id = :id",
p);
p = new ParametersI();
p.addId(id2);
ExperimenterGroup eg2 = (ExperimenterGroup) query.findByQuery(
"select distinct g from ExperimenterGroup g where g.id = :id",
p);
Experimenter e = createExperimenterI(uuid1, "user", "user");
List<ExperimenterGroup> groups = new ArrayList<ExperimenterGroup>();
// method tested elsewhere
ExperimenterGroup userGroup = svc.lookupGroup(roles.userGroupName);
groups.add(eg1);
groups.add(eg2);
groups.add(userGroup);
long id = svc.createExperimenter(e, eg1, groups);
e = svc.lookupExperimenter(uuid1);
List<GroupExperimenterMap> links = e.copyGroupExperimenterMap();
Assert.assertEquals(groups.get(0).getId().getValue(), eg1.getId().getValue());
svc.setDefaultGroup(e, eg2);
e = svc.lookupExperimenter(uuid1);
links = e.copyGroupExperimenterMap();
groups = new ArrayList<ExperimenterGroup>();
for (GroupExperimenterMap link : links) {
groups.add(link.getParent());
}
Assert.assertEquals(groups.get(0).getId().getValue(), eg2.getId().getValue());
} | void function() throws Exception { String uuid1 = UUID.randomUUID().toString(); ExperimenterGroup g1 = new ExperimenterGroupI(); g1.setName(omero.rtypes.rstring(uuid1)); g1.setLdap(omero.rtypes.rbool(false)); g1.getDetails().setPermissions(new PermissionsI(STR)); String uuid2 = UUID.randomUUID().toString(); ExperimenterGroup g2 = new ExperimenterGroupI(); g2.setName(omero.rtypes.rstring(uuid2)); g2.setLdap(omero.rtypes.rbool(false)); g2.getDetails().setPermissions(new PermissionsI(STR)); IAdminPrx svc = root.getSession().getAdminService(); IQueryPrx query = root.getSession().getQueryService(); long id1 = svc.createGroup(g1); long id2 = svc.createGroup(g2); ParametersI p = new ParametersI(); p.addId(id1); ExperimenterGroup eg1 = (ExperimenterGroup) query.findByQuery( STR, p); p = new ParametersI(); p.addId(id2); ExperimenterGroup eg2 = (ExperimenterGroup) query.findByQuery( STR, p); Experimenter e = createExperimenterI(uuid1, "user", "user"); List<ExperimenterGroup> groups = new ArrayList<ExperimenterGroup>(); ExperimenterGroup userGroup = svc.lookupGroup(roles.userGroupName); groups.add(eg1); groups.add(eg2); groups.add(userGroup); long id = svc.createExperimenter(e, eg1, groups); e = svc.lookupExperimenter(uuid1); List<GroupExperimenterMap> links = e.copyGroupExperimenterMap(); Assert.assertEquals(groups.get(0).getId().getValue(), eg1.getId().getValue()); svc.setDefaultGroup(e, eg2); e = svc.lookupExperimenter(uuid1); links = e.copyGroupExperimenterMap(); groups = new ArrayList<ExperimenterGroup>(); for (GroupExperimenterMap link : links) { groups.add(link.getParent()); } Assert.assertEquals(groups.get(0).getId().getValue(), eg2.getId().getValue()); } | /**
* Tests the default group of an experimenter.
*
* @throws Exception
* Thrown if an error occurred.
*/ | Tests the default group of an experimenter | testChangeDefaultGroup | {
"repo_name": "simleo/openmicroscopy",
"path": "components/tools/OmeroJava/test/integration/AdminServiceTest.java",
"license": "gpl-2.0",
"size": 85916
} | [
"java.util.ArrayList",
"java.util.List",
"java.util.UUID",
"org.testng.Assert"
] | import java.util.ArrayList; import java.util.List; import java.util.UUID; import org.testng.Assert; | import java.util.*; import org.testng.*; | [
"java.util",
"org.testng"
] | java.util; org.testng; | 2,247,121 |
@Override
final Geometry tryMergePolylines(Object next, final Iterator<?> polylines) {
if (!(next instanceof MultiPath || next instanceof Point)) {
return null;
}
final Polyline path = new Polyline();
boolean lineTo = false;
for (;; next = polylines.next()) {
if (next != null) {
if (next instanceof Point) {
final double x = ((Point) next).getX();
final double y = ((Point) next).getY();
if (Double.isNaN(x) || Double.isNaN(y)) {
lineTo = false;
} else if (lineTo) {
path.lineTo(x, y);
} else {
path.startPath(x, y);
lineTo = true;
}
} else {
path.add((MultiPath) next, false);
lineTo = false;
}
}
if (!polylines.hasNext()) { // Should be part of the 'for' instruction, but we need
break; // to skip this condition during the first iteration.
}
}
return path;
} | final Geometry tryMergePolylines(Object next, final Iterator<?> polylines) { if (!(next instanceof MultiPath next instanceof Point)) { return null; } final Polyline path = new Polyline(); boolean lineTo = false; for (;; next = polylines.next()) { if (next != null) { if (next instanceof Point) { final double x = ((Point) next).getX(); final double y = ((Point) next).getY(); if (Double.isNaN(x) Double.isNaN(y)) { lineTo = false; } else if (lineTo) { path.lineTo(x, y); } else { path.startPath(x, y); lineTo = true; } } else { path.add((MultiPath) next, false); lineTo = false; } } if (!polylines.hasNext()) { break; } } return path; } | /**
* Merges a sequence of points or paths if the first instance is an implementation of this library.
*
* @throws ClassCastException if an element in the iterator is not a JTS geometry.
*/ | Merges a sequence of points or paths if the first instance is an implementation of this library | tryMergePolylines | {
"repo_name": "Geomatys/sis",
"path": "core/sis-feature/src/main/java/org/apache/sis/internal/feature/ESRI.java",
"license": "apache-2.0",
"size": 7044
} | [
"com.esri.core.geometry.Geometry",
"com.esri.core.geometry.MultiPath",
"com.esri.core.geometry.Point",
"com.esri.core.geometry.Polyline",
"java.util.Iterator"
] | import com.esri.core.geometry.Geometry; import com.esri.core.geometry.MultiPath; import com.esri.core.geometry.Point; import com.esri.core.geometry.Polyline; import java.util.Iterator; | import com.esri.core.geometry.*; import java.util.*; | [
"com.esri.core",
"java.util"
] | com.esri.core; java.util; | 2,043,568 |
public static TimeClassBreaksAger create(Info[] infos, TimeUnits timeUnits) {
return _create(Util.objectArrayToJSO(infos), timeUnits.getValue());
}
| static TimeClassBreaksAger function(Info[] infos, TimeUnits timeUnits) { return _create(Util.objectArrayToJSO(infos), timeUnits.getValue()); } | /**
* Creates a new TimeClassBreaksAgerObject with the specified time breaks inforamtion.
*
* @param infos - Each element in the array is an object that describes the class breaks information.
* @param timeUnits - The unit in which the minimum and maximum break values are measured. Default is days.
* @return TimeClassBreaksAger
*/ | Creates a new TimeClassBreaksAgerObject with the specified time breaks inforamtion | create | {
"repo_name": "CSTARS/gwt-esri",
"path": "src/main/java/edu/ucdavis/cstars/client/renderer/TimeClassBreaksAger.java",
"license": "lgpl-3.0",
"size": 3005
} | [
"edu.ucdavis.cstars.client.Util"
] | import edu.ucdavis.cstars.client.Util; | import edu.ucdavis.cstars.client.*; | [
"edu.ucdavis.cstars"
] | edu.ucdavis.cstars; | 2,838,544 |
int putStringUtf8(int index, String value, ByteOrder byteOrder, int maxEncodedSize); | int putStringUtf8(int index, String value, ByteOrder byteOrder, int maxEncodedSize); | /**
* Encode a String as UTF-8 bytes the buffer with a length prefix with a maximum encoded size check.
*
* @param index at which the String should be encoded.
* @param value of the String to be encoded.
* @param byteOrder for the length prefix.
* @param maxEncodedSize to be checked before writing to the buffer.
* @return the number of bytes put to the buffer.
* @throws java.lang.IllegalArgumentException if the encoded bytes are greater than maxEncodedSize.
*/ | Encode a String as UTF-8 bytes the buffer with a length prefix with a maximum encoded size check | putStringUtf8 | {
"repo_name": "tbrooks8/Agrona",
"path": "agrona/src/main/java/org/agrona/MutableDirectBuffer.java",
"license": "apache-2.0",
"size": 11365
} | [
"java.nio.ByteOrder"
] | import java.nio.ByteOrder; | import java.nio.*; | [
"java.nio"
] | java.nio; | 26,536 |
protected DoubleCallable makeStringCallable(ExpressionEvaluator evaluator) {
final Callable<String> funcEvaluator = evaluator.getStringFunction();
try {
final String valueLeft = evaluator.isConstant() ? funcEvaluator.call() : "";
if (evaluator.isConstant()) {
final double result = compute(valueLeft);
return new DoubleCallable() { | DoubleCallable function(ExpressionEvaluator evaluator) { final Callable<String> funcEvaluator = evaluator.getStringFunction(); try { final String valueLeft = evaluator.isConstant() ? funcEvaluator.call() : ""; if (evaluator.isConstant()) { final double result = compute(valueLeft); return new DoubleCallable() { | /**
* Builds a DoubleCallable from left and right using {@link #compute(String, String)}, where
* constant child results are evaluated.
*
* @param left
* the left input
* @param right
* the right input
* @return the resulting DoubleCallable
*/ | Builds a DoubleCallable from left and right using <code>#compute(String, String)</code>, where constant child results are evaluated | makeStringCallable | {
"repo_name": "aborg0/rapidminer-studio",
"path": "src/main/java/com/rapidminer/tools/expression/internal/function/text/Abstract1StringInputIntegerOutputFunction.java",
"license": "agpl-3.0",
"size": 4434
} | [
"com.rapidminer.tools.expression.DoubleCallable",
"com.rapidminer.tools.expression.ExpressionEvaluator",
"java.util.concurrent.Callable"
] | import com.rapidminer.tools.expression.DoubleCallable; import com.rapidminer.tools.expression.ExpressionEvaluator; import java.util.concurrent.Callable; | import com.rapidminer.tools.expression.*; import java.util.concurrent.*; | [
"com.rapidminer.tools",
"java.util"
] | com.rapidminer.tools; java.util; | 1,294,926 |
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (Build.VERSION.SDK_INT < 11) {
mTransform.set(canvas.getMatrix());
mTransform.preRotate(mRotation, mPivot.x, mPivot.y);
canvas.setMatrix(mTransform);
}
onGraphDraw(canvas);
} | void function(Canvas canvas) { super.onDraw(canvas); if (Build.VERSION.SDK_INT < 11) { mTransform.set(canvas.getMatrix()); mTransform.preRotate(mRotation, mPivot.x, mPivot.y); canvas.setMatrix(mTransform); } onGraphDraw(canvas); } | /**
* Implement this to do your drawing.
*
* @param canvas the canvas on which the background will be drawn
*/ | Implement this to do your drawing | onDraw | {
"repo_name": "0359xiaodong/EazeGraph",
"path": "EazeGraphLibrary/src/main/java/org/eazegraph/lib/charts/BaseChart.java",
"license": "apache-2.0",
"size": 18812
} | [
"android.graphics.Canvas",
"android.os.Build"
] | import android.graphics.Canvas; import android.os.Build; | import android.graphics.*; import android.os.*; | [
"android.graphics",
"android.os"
] | android.graphics; android.os; | 798,322 |
public IntegraType getIntegraType() {
return this.integraType;
}
| IntegraType function() { return this.integraType; } | /**
* Returns type of Integra connected to the module.
*
* @return Integra type
*/ | Returns type of Integra connected to the module | getIntegraType | {
"repo_name": "kreutpet/openhab",
"path": "bundles/binding/org.openhab.binding.satel/src/main/java/org/openhab/binding/satel/internal/protocol/SatelModule.java",
"license": "epl-1.0",
"size": 17685
} | [
"org.openhab.binding.satel.internal.types.IntegraType"
] | import org.openhab.binding.satel.internal.types.IntegraType; | import org.openhab.binding.satel.internal.types.*; | [
"org.openhab.binding"
] | org.openhab.binding; | 874,253 |
public void acceptNotification(ByteBuffer payload, Exception err); | void function(ByteBuffer payload, Exception err); | /**
* Accept notification.
*
* @param payload Notification payload or {@code null} if there is no payload.
* @param err Error.
*/ | Accept notification | acceptNotification | {
"repo_name": "ascherbakoff/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/internal/client/thin/NotificationListener.java",
"license": "apache-2.0",
"size": 1385
} | [
"java.nio.ByteBuffer"
] | import java.nio.ByteBuffer; | import java.nio.*; | [
"java.nio"
] | java.nio; | 1,440,003 |
public synchronized void seek(long position) throws IOException {
in.seek(position);
blockCount = 0;
blockStart = position;
vin = DecoderFactory.get().binaryDecoder(in, vin);
} | synchronized void function(long position) throws IOException { in.seek(position); blockCount = 0; blockStart = position; vin = DecoderFactory.get().binaryDecoder(in, vin); } | /**
* Move to the specified synchronization point, as returned by
* {@link DataFileWriter#sync()}.
*/ | Move to the specified synchronization point, as returned by <code>DataFileWriter#sync()</code> | seek | {
"repo_name": "apache/avro",
"path": "lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader12.java",
"license": "apache-2.0",
"size": 6891
} | [
"java.io.IOException",
"org.apache.avro.io.DecoderFactory"
] | import java.io.IOException; import org.apache.avro.io.DecoderFactory; | import java.io.*; import org.apache.avro.io.*; | [
"java.io",
"org.apache.avro"
] | java.io; org.apache.avro; | 1,700,257 |
public static Uncollect create(
RelTraitSet traitSet,
RelNode input,
boolean withOrdinality,
List<String> itemAliases) {
final RelOptCluster cluster = input.getCluster();
return new Uncollect(cluster, traitSet, input, withOrdinality, itemAliases);
}
//~ Methods ---------------------------------------------------------------- | static Uncollect function( RelTraitSet traitSet, RelNode input, boolean withOrdinality, List<String> itemAliases) { final RelOptCluster cluster = input.getCluster(); return new Uncollect(cluster, traitSet, input, withOrdinality, itemAliases); } | /**
* Creates an Uncollect.
*
* <p>Each field of the input relational expression must be an array or
* multiset.
*
* @param traitSet Trait set
* @param input Input relational expression
* @param withOrdinality Whether output should contain an ORDINALITY column
* @param itemAliases Aliases for the operand items
*/ | Creates an Uncollect. Each field of the input relational expression must be an array or multiset | create | {
"repo_name": "googleinterns/calcite",
"path": "core/src/main/java/org/apache/calcite/rel/core/Uncollect.java",
"license": "apache-2.0",
"size": 7196
} | [
"java.util.List",
"org.apache.calcite.plan.RelOptCluster",
"org.apache.calcite.plan.RelTraitSet",
"org.apache.calcite.rel.RelNode"
] | import java.util.List; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelTraitSet; import org.apache.calcite.rel.RelNode; | import java.util.*; import org.apache.calcite.plan.*; import org.apache.calcite.rel.*; | [
"java.util",
"org.apache.calcite"
] | java.util; org.apache.calcite; | 389,770 |
ChangePlanItemStateBuilder childInstanceTaskVariables(String planItemDefinitionId, Map<String, Object> variables); | ChangePlanItemStateBuilder childInstanceTaskVariables(String planItemDefinitionId, Map<String, Object> variables); | /**
* Set the case variable that should be set as part of the change process or case task state action.
*/ | Set the case variable that should be set as part of the change process or case task state action | childInstanceTaskVariables | {
"repo_name": "dbmalkovsky/flowable-engine",
"path": "modules/flowable-cmmn-api/src/main/java/org/flowable/cmmn/api/runtime/ChangePlanItemStateBuilder.java",
"license": "apache-2.0",
"size": 3968
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 1,382,721 |
protected void handleLogging(Exception e) {
if (logCategory != null) {
if (categoryLogger == null) {
// init category logger
categoryLogger = LogManager.getLogger(logCategory);
}
doLog(categoryLogger, e);
} else {
doLog(LOG, e);
}
} | void function(Exception e) { if (logCategory != null) { if (categoryLogger == null) { categoryLogger = LogManager.getLogger(logCategory); } doLog(categoryLogger, e); } else { doLog(LOG, e); } } | /**
* Handles the logging of the exception.
*
* @param e the exception to log.
*/ | Handles the logging of the exception | handleLogging | {
"repo_name": "Ile2/struts2-showcase-demo",
"path": "src/core/src/main/java/com/opensymphony/xwork2/interceptor/ExceptionMappingInterceptor.java",
"license": "apache-2.0",
"size": 12278
} | [
"org.apache.logging.log4j.LogManager"
] | import org.apache.logging.log4j.LogManager; | import org.apache.logging.log4j.*; | [
"org.apache.logging"
] | org.apache.logging; | 1,567,677 |
private static int findClosingParenToLeft(IDocument document, int position, String partitioning) {
final char CLOSING_PAREN= ')';
try {
if (position < 1)
return position;
int nonWS= firstNonWhitespaceBackward(document, position - 1, partitioning, -1);
if (nonWS != -1 && document.getChar(nonWS) == CLOSING_PAREN)
return nonWS;
} catch (BadLocationException e1) {
}
return position;
} | static int function(IDocument document, int position, String partitioning) { final char CLOSING_PAREN= ')'; try { if (position < 1) return position; int nonWS= firstNonWhitespaceBackward(document, position - 1, partitioning, -1); if (nonWS != -1 && document.getChar(nonWS) == CLOSING_PAREN) return nonWS; } catch (BadLocationException e1) { } return position; } | /**
* Finds a closing parenthesis to the left of <code>position</code> in document, where that parenthesis is only
* separated by whitespace from <code>position</code>. If no such parenthesis can be found, <code>position</code> is returned.
*
* @param document the document being modified
* @param position the first character position in <code>document</code> to be considered
* @param partitioning the document partitioning
* @return the position of a closing parenthesis left to <code>position</code> separated only by whitespace, or <code>position</code> if no parenthesis can be found
*/ | Finds a closing parenthesis to the left of <code>position</code> in document, where that parenthesis is only separated by whitespace from <code>position</code>. If no such parenthesis can be found, <code>position</code> is returned | findClosingParenToLeft | {
"repo_name": "dhuebner/che",
"path": "plugins/plugin-java/che-plugin-java-ext-jdt/org-eclipse-jdt-ui/src/main/java/org/eclipse/jdt/internal/ui/text/java/SmartSemicolonAutoEditStrategy.java",
"license": "epl-1.0",
"size": 39789
} | [
"org.eclipse.jface.text.BadLocationException",
"org.eclipse.jface.text.IDocument"
] | import org.eclipse.jface.text.BadLocationException; import org.eclipse.jface.text.IDocument; | import org.eclipse.jface.text.*; | [
"org.eclipse.jface"
] | org.eclipse.jface; | 1,107,236 |
//endregion
//region > description (property)
public static class DescriptionDomainEvent extends PropertyDomainEvent<String> { }
@javax.jdo.annotations.Column(allowsNull="false", length=100)
@Property(
domainEvent = DescriptionDomainEvent.class,
regexPattern = "\\w[@&:\\-\\,\\.\\+ \\w]*",
editing = Editing.ENABLED
)
@Getter @Setter
private String description;
//endregion
//region > dueBy (property), Calendarable impl
public static class DueByDomainEvent extends PropertyDomainEvent<LocalDate> { }
@javax.jdo.annotations.Persistent(defaultFetchGroup="true")
@javax.jdo.annotations.Column(allowsNull="true")
@Property(
domainEvent = DueByDomainEvent.class
)
@Getter @Setter
private LocalDate dueBy; | public static class DescriptionDomainEvent extends PropertyDomainEvent<String> { } @javax.jdo.annotations.Column(allowsNull="false", length=100) @Property( domainEvent = DescriptionDomainEvent.class, regexPattern = STR, editing = Editing.ENABLED ) @Getter private String description; public static class DueByDomainEvent extends PropertyDomainEvent<LocalDate> { } @javax.jdo.annotations.Persistent(defaultFetchGroup="true") @javax.jdo.annotations.Column(allowsNull="true") @Property( domainEvent = DueByDomainEvent.class ) @Getter private LocalDate dueBy; | /**
* Provides a strikethrough for "done" items, see <code>application.css</code>.
*/ | Provides a strikethrough for "done" items, see <code>application.css</code> | cssClass | {
"repo_name": "isisaddons/isis-app-todoapp",
"path": "dom/src/main/java/todoapp/dom/todoitem/ToDoItem.java",
"license": "apache-2.0",
"size": 31810
} | [
"org.apache.isis.applib.annotation.Editing",
"org.apache.isis.applib.annotation.Property",
"org.joda.time.LocalDate"
] | import org.apache.isis.applib.annotation.Editing; import org.apache.isis.applib.annotation.Property; import org.joda.time.LocalDate; | import org.apache.isis.applib.annotation.*; import org.joda.time.*; | [
"org.apache.isis",
"org.joda.time"
] | org.apache.isis; org.joda.time; | 753,319 |
protected Collection<String> getInitialObjectNames() {
if (initialObjectNames == null) {
initialObjectNames = new ArrayList<String>();
for (EClassifier eClassifier : makingOfPackage.getEClassifiers()) {
if (eClassifier instanceof EClass) {
EClass eClass = (EClass)eClassifier;
if (!eClass.isAbstract()) {
initialObjectNames.add(eClass.getName());
}
}
}
Collections.sort(initialObjectNames, CommonPlugin.INSTANCE.getComparator());
}
return initialObjectNames;
}
| Collection<String> function() { if (initialObjectNames == null) { initialObjectNames = new ArrayList<String>(); for (EClassifier eClassifier : makingOfPackage.getEClassifiers()) { if (eClassifier instanceof EClass) { EClass eClass = (EClass)eClassifier; if (!eClass.isAbstract()) { initialObjectNames.add(eClass.getName()); } } } Collections.sort(initialObjectNames, CommonPlugin.INSTANCE.getComparator()); } return initialObjectNames; } | /**
* Returns the names of the types that can be created as the root object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | Returns the names of the types that can be created as the root object. | getInitialObjectNames | {
"repo_name": "ObeoNetwork/Conference-Designer",
"path": "plugins/fr.obeo.conference.editor/src/conference/makingOf/presentation/MakingOfModelWizard.java",
"license": "epl-1.0",
"size": 18473
} | [
"java.util.ArrayList",
"java.util.Collection",
"java.util.Collections",
"org.eclipse.emf.common.CommonPlugin",
"org.eclipse.emf.ecore.EClass",
"org.eclipse.emf.ecore.EClassifier"
] | import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import org.eclipse.emf.common.CommonPlugin; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.EClassifier; | import java.util.*; import org.eclipse.emf.common.*; import org.eclipse.emf.ecore.*; | [
"java.util",
"org.eclipse.emf"
] | java.util; org.eclipse.emf; | 2,860,431 |
protected void removeColumns(Collection<String> removeColumnNames, String tableName) {
if (removeColumnNames != null && !removeColumnNames.isEmpty()) {
execute(getRemoveColumnSQLs(removeColumnNames, tableName), mDb);
}
} | void function(Collection<String> removeColumnNames, String tableName) { if (removeColumnNames != null && !removeColumnNames.isEmpty()) { execute(getRemoveColumnSQLs(removeColumnNames, tableName), mDb); } } | /**
* When some fields are removed from class, the table should synchronize the
* changes by removing the corresponding columns.
*
* @param removeColumnNames
* The column names that need to remove.
* @param tableName
* The table name to remove columns from.
*/ | When some fields are removed from class, the table should synchronize the changes by removing the corresponding columns | removeColumns | {
"repo_name": "weatherfish/LitePal",
"path": "litepal/src/main/java/org/litepal/tablemanager/AssociationUpdater.java",
"license": "apache-2.0",
"size": 18649
} | [
"java.util.Collection"
] | import java.util.Collection; | import java.util.*; | [
"java.util"
] | java.util; | 420,321 |
public static void setCSS2FontProperties(CSS2FontProperties fontProperties,
CSSElementContext context) {
if (fontProperties == null)
return;
context.setData(CSS2FONT_KEY, fontProperties);
}
| static void function(CSS2FontProperties fontProperties, CSSElementContext context) { if (fontProperties == null) return; context.setData(CSS2FONT_KEY, fontProperties); } | /**
* Set {@link CSS2FontProperties} <code>fontProperties</code> into
* {@link CSSElementContext} context.
*
* @param fontProperties
* @param context
*/ | Set <code>CSS2FontProperties</code> <code>fontProperties</code> into <code>CSSElementContext</code> context | setCSS2FontProperties | {
"repo_name": "bdaum/zoraPD",
"path": "com.bdaum.zoom.css/src/org/akrogen/tkui/css/core/css2/CSS2FontPropertiesHelpers.java",
"license": "gpl-2.0",
"size": 5477
} | [
"org.akrogen.tkui.css.core.dom.properties.css2.CSS2FontProperties",
"org.akrogen.tkui.css.core.engine.CSSElementContext"
] | import org.akrogen.tkui.css.core.dom.properties.css2.CSS2FontProperties; import org.akrogen.tkui.css.core.engine.CSSElementContext; | import org.akrogen.tkui.css.core.dom.properties.css2.*; import org.akrogen.tkui.css.core.engine.*; | [
"org.akrogen.tkui"
] | org.akrogen.tkui; | 206,333 |
public void insertTop(final ByteBuffer buf, final long lastModifiedTime, final boolean isChunkable);
| void function(final ByteBuffer buf, final long lastModifiedTime, final boolean isChunkable); | /**
* Add a new ByteBuffer to the top of this set.
* @param buf the buffer to add
* @param lastModifiedTime the last modified date of the data, or 0 to ignore
* @param isChunkable true if this is a body buffer, and can be chunked
*/ | Add a new ByteBuffer to the top of this set | insertTop | {
"repo_name": "oriontribunal/CoffeeMud",
"path": "com/planet_ink/coffee_web/interfaces/DataBuffers.java",
"license": "apache-2.0",
"size": 4668
} | [
"java.nio.ByteBuffer"
] | import java.nio.ByteBuffer; | import java.nio.*; | [
"java.nio"
] | java.nio; | 386,864 |
public Glyph dynaPick(Camera c) {
if (c == null) {
return null;
}
refToCam4DynaPick = c;
List<Glyph> drawnGlyphs = c.getOwningSpace().getDrawnGlyphs(c.getIndex());
Glyph selectedGlyph = null;
// initialized at -1 because we don't know have any easy way to compute some sort of "initial" distance for comparison
// when == 0, means that the cursor's hotspot is actually inside the glyph
// if > 0 at the end of the loop, dynaspot intersects at least one glyph (but cursor hotspot is not inside any glyph)
// if == -1, nothing is intersected by the dynaspot area
double distanceToSelectedGlyph = -1;
Glyph g;
int gumIndex = -1;
int cgumIndex = -1;
double unprojectedDSRadius = ((c.focal + c.altitude) / c.focal) * dynaSpotRadius;
dynawnes[0] = cursor.vx - unprojectedDSRadius; // west bound
dynawnes[1] = cursor.vy + unprojectedDSRadius; // north bound
dynawnes[2] = cursor.vx + unprojectedDSRadius; // east bound
dynawnes[3] = cursor.vy - unprojectedDSRadius; // south bound
dynaspotVSshape.setFrame(dynawnes[0], dynawnes[3], 2 * unprojectedDSRadius, 2 * unprojectedDSRadius);
synchronized (drawnGlyphs) {
for (int i = 0; i < drawnGlyphs.size(); i++) {
g = drawnGlyphs.get(i);
if (!g.isSensitive()) {
continue;
}
// check if cursor hotspot is inside glyph
// if hotspot in several glyphs, selected glyph will be the last glyph entered (according to glyphsUnderMouse)
cgumIndex = Utils.indexOfGlyph(cursor.getPicker().pickedGlyphs, g, cursor.getPicker().maxIndex + 1);
if (cgumIndex > -1) {
if (cgumIndex > gumIndex) {
gumIndex = cgumIndex;
selectedGlyph = g;
distanceToSelectedGlyph = 0;
}
gida.put(g, null);
} // if cursor hotspot is not inside the glyph, check bounding boxes (Glyph's and DynaSpot's),
// if they do intersect, peform a finer-grain chec with Areas
else if (g.visibleInRegion(dynawnes[0], dynawnes[1], dynawnes[2], dynawnes[3], c.getIndex())
&& g.visibleInDisc(cursor.vx, cursor.vy, unprojectedDSRadius, dynaspotVSshape, c.getIndex(), cursor.jpx, cursor.jpy, dynaSpotRadius)) {
// glyph intersects dynaspot area
gida.put(g, null);
double d = Math.sqrt(Math.pow(g.vx - cursor.vx, 2) + Math.pow(g.vy - cursor.vy, 2));
if (distanceToSelectedGlyph == -1 || d < distanceToSelectedGlyph) {
selectedGlyph = g;
distanceToSelectedGlyph = d;
}
} else {
// glyph does not intersect dynaspot area
if (gida.containsKey(g)) {
gida.remove(g);
if (sl != null) {
sl.glyphSelected(g, false);
}
}
}
}
}
if (selectedGlyph != null && sl != null) {
sl.glyphSelected(selectedGlyph, true);
}
if (lastDynaPicked != null && selectedGlyph != lastDynaPicked && sl != null) {
sl.glyphSelected(lastDynaPicked, false);
}
lastDynaPicked = selectedGlyph;
return selectedGlyph;
} | Glyph function(Camera c) { if (c == null) { return null; } refToCam4DynaPick = c; List<Glyph> drawnGlyphs = c.getOwningSpace().getDrawnGlyphs(c.getIndex()); Glyph selectedGlyph = null; double distanceToSelectedGlyph = -1; Glyph g; int gumIndex = -1; int cgumIndex = -1; double unprojectedDSRadius = ((c.focal + c.altitude) / c.focal) * dynaSpotRadius; dynawnes[0] = cursor.vx - unprojectedDSRadius; dynawnes[1] = cursor.vy + unprojectedDSRadius; dynawnes[2] = cursor.vx + unprojectedDSRadius; dynawnes[3] = cursor.vy - unprojectedDSRadius; dynaspotVSshape.setFrame(dynawnes[0], dynawnes[3], 2 * unprojectedDSRadius, 2 * unprojectedDSRadius); synchronized (drawnGlyphs) { for (int i = 0; i < drawnGlyphs.size(); i++) { g = drawnGlyphs.get(i); if (!g.isSensitive()) { continue; } cgumIndex = Utils.indexOfGlyph(cursor.getPicker().pickedGlyphs, g, cursor.getPicker().maxIndex + 1); if (cgumIndex > -1) { if (cgumIndex > gumIndex) { gumIndex = cgumIndex; selectedGlyph = g; distanceToSelectedGlyph = 0; } gida.put(g, null); } else if (g.visibleInRegion(dynawnes[0], dynawnes[1], dynawnes[2], dynawnes[3], c.getIndex()) && g.visibleInDisc(cursor.vx, cursor.vy, unprojectedDSRadius, dynaspotVSshape, c.getIndex(), cursor.jpx, cursor.jpy, dynaSpotRadius)) { gida.put(g, null); double d = Math.sqrt(Math.pow(g.vx - cursor.vx, 2) + Math.pow(g.vy - cursor.vy, 2)); if (distanceToSelectedGlyph == -1 d < distanceToSelectedGlyph) { selectedGlyph = g; distanceToSelectedGlyph = d; } } else { if (gida.containsKey(g)) { gida.remove(g); if (sl != null) { sl.glyphSelected(g, false); } } } } } if (selectedGlyph != null && sl != null) { sl.glyphSelected(selectedGlyph, true); } if (lastDynaPicked != null && selectedGlyph != lastDynaPicked && sl != null) { sl.glyphSelected(lastDynaPicked, false); } lastDynaPicked = selectedGlyph; return selectedGlyph; } | /**
* Compute the list of glyphs picked by the DynaSpot cursor. The best picked
* glyph is returned. See <a
* href="http://zvtm.sourceforge.net/doc/dynaspot.html">http://zvtm.sourceforge.net/doc/dynaspot.html</a>
* for more detail.
*
* @return null if the dynaspot cursor does not pick anything.
* @see #dynaPick()
*/ | Compute the list of glyphs picked by the DynaSpot cursor. The best picked glyph is returned. See HREF for more detail | dynaPick | {
"repo_name": "sharwell/zgrnbviewer",
"path": "org-tvl-netbeans-zgrviewer/src/fr/inria/zvtm/engine/DynaPicker.java",
"license": "lgpl-3.0",
"size": 16532
} | [
"fr.inria.zvtm.glyphs.Glyph",
"java.util.List"
] | import fr.inria.zvtm.glyphs.Glyph; import java.util.List; | import fr.inria.zvtm.glyphs.*; import java.util.*; | [
"fr.inria.zvtm",
"java.util"
] | fr.inria.zvtm; java.util; | 2,247,415 |
public void addCollectionAction() {
SwingUtilities.invokeLater(() -> {
String name = JOptionPane.showInputDialog("Please enter the collection title");
if (name != null) {
name = name.trim();
}
CollectionListModel model = mainFrame.getCollectionListModel();
if (!collection.contains(name)) {
UploadCollection uc = new UploadCollection(name, "", "");
collection.addCollection(name, uc);
model.addElement(name);
model.sort();
}
});
} | void function() { SwingUtilities.invokeLater(() -> { String name = JOptionPane.showInputDialog(STR); if (name != null) { name = name.trim(); } CollectionListModel model = mainFrame.getCollectionListModel(); if (!collection.contains(name)) { UploadCollection uc = new UploadCollection(name, STR"); collection.addCollection(name, uc); model.addElement(name); model.sort(); } }); } | /**
* Asks for a collection name and adds a new collection with that name.
*/ | Asks for a collection name and adds a new collection with that name | addCollectionAction | {
"repo_name": "thetmk/UploadManager",
"path": "src/main/java/com/tmk/uploadmanager/control/MainController.java",
"license": "mit",
"size": 16441
} | [
"com.tmk.uploadmanager.model.CollectionListModel",
"com.tmk.uploadmanager.model.UploadCollection",
"javax.swing.JOptionPane",
"javax.swing.SwingUtilities"
] | import com.tmk.uploadmanager.model.CollectionListModel; import com.tmk.uploadmanager.model.UploadCollection; import javax.swing.JOptionPane; import javax.swing.SwingUtilities; | import com.tmk.uploadmanager.model.*; import javax.swing.*; | [
"com.tmk.uploadmanager",
"javax.swing"
] | com.tmk.uploadmanager; javax.swing; | 460,710 |
Criteria criteria = getSession().createCriteria(OrderDTO.class)
.createAlias("orderStatus", "s")
.add(Restrictions.eq("s.id", Constants.ORDER_STATUS_ACTIVE))
.add(Restrictions.eq("deleted", 0))
.createAlias("baseUserByUserId", "u")
.add(Restrictions.eq("u.id", userId))
.add(Restrictions.eq("orderPeriod", period))
.addOrder(Order.asc("id"))
.setMaxResults(1);
return findFirst(criteria);
}
| Criteria criteria = getSession().createCriteria(OrderDTO.class) .createAlias(STR, "s") .add(Restrictions.eq("s.id", Constants.ORDER_STATUS_ACTIVE)) .add(Restrictions.eq(STR, 0)) .createAlias(STR, "u") .add(Restrictions.eq("u.id", userId)) .add(Restrictions.eq(STR, period)) .addOrder(Order.asc("id")) .setMaxResults(1); return findFirst(criteria); } | /**
* Returns the newest active order for the given user id and period.
*
* @param userId user id
* @param period period
* @return newest active order for user and period.
*/ | Returns the newest active order for the given user id and period | findByUserAndPeriod | {
"repo_name": "rahith/jbilling_3.0.2",
"path": "src/java/com/sapienter/jbilling/server/order/db/OrderDAS.java",
"license": "agpl-3.0",
"size": 11004
} | [
"com.sapienter.jbilling.server.util.Constants",
"org.hibernate.Criteria",
"org.hibernate.criterion.Order",
"org.hibernate.criterion.Restrictions"
] | import com.sapienter.jbilling.server.util.Constants; import org.hibernate.Criteria; import org.hibernate.criterion.Order; import org.hibernate.criterion.Restrictions; | import com.sapienter.jbilling.server.util.*; import org.hibernate.*; import org.hibernate.criterion.*; | [
"com.sapienter.jbilling",
"org.hibernate",
"org.hibernate.criterion"
] | com.sapienter.jbilling; org.hibernate; org.hibernate.criterion; | 1,707,285 |
@JavaHandler
InputStream asInputStream() throws RuntimeException; | InputStream asInputStream() throws RuntimeException; | /**
* Returns an open {@link InputStream} for reading from this file
*/ | Returns an open <code>InputStream</code> for reading from this file | asInputStream | {
"repo_name": "mareknovotny/windup",
"path": "graph/api/src/main/java/org/jboss/windup/graph/model/resource/FileModel.java",
"license": "epl-1.0",
"size": 9407
} | [
"java.io.InputStream"
] | import java.io.InputStream; | import java.io.*; | [
"java.io"
] | java.io; | 1,533,604 |
private void launchUrlFromExternalApp(String url, String expectedUrl, String appId,
boolean createNewTab, Bundle extras, boolean firstParty) throws InterruptedException {
final Intent intent = new Intent(Intent.ACTION_VIEW);
if (appId != null) {
intent.putExtra(Browser.EXTRA_APPLICATION_ID, appId);
}
if (createNewTab) {
intent.putExtra(Browser.EXTRA_CREATE_NEW_TAB, true);
}
intent.setData(Uri.parse(url));
if (extras != null) intent.putExtras(extras);
if (firstParty) {
Context context = getInstrumentation().getTargetContext();
intent.setPackage(context.getPackageName());
IntentHandler.addTrustedIntentExtras(intent, context);
} | void function(String url, String expectedUrl, String appId, boolean createNewTab, Bundle extras, boolean firstParty) throws InterruptedException { final Intent intent = new Intent(Intent.ACTION_VIEW); if (appId != null) { intent.putExtra(Browser.EXTRA_APPLICATION_ID, appId); } if (createNewTab) { intent.putExtra(Browser.EXTRA_CREATE_NEW_TAB, true); } intent.setData(Uri.parse(url)); if (extras != null) intent.putExtras(extras); if (firstParty) { Context context = getInstrumentation().getTargetContext(); intent.setPackage(context.getPackageName()); IntentHandler.addTrustedIntentExtras(intent, context); } | /**
* Launch the specified URL as if it was triggered by an external application with id appId.
* Returns when the URL has been navigated to.
* @throws InterruptedException
*/ | Launch the specified URL as if it was triggered by an external application with id appId. Returns when the URL has been navigated to | launchUrlFromExternalApp | {
"repo_name": "danakj/chromium",
"path": "chrome/android/javatests/src/org/chromium/chrome/browser/TabsOpenedFromExternalAppTest.java",
"license": "bsd-3-clause",
"size": 31481
} | [
"android.content.Context",
"android.content.Intent",
"android.net.Uri",
"android.os.Bundle",
"android.provider.Browser"
] | import android.content.Context; import android.content.Intent; import android.net.Uri; import android.os.Bundle; import android.provider.Browser; | import android.content.*; import android.net.*; import android.os.*; import android.provider.*; | [
"android.content",
"android.net",
"android.os",
"android.provider"
] | android.content; android.net; android.os; android.provider; | 1,501,150 |
public void testConstrDoubleMinus01() {
double a = -1.E-1;
int aScale = 55;
BigInteger bA = new BigInteger("-1000000000000000055511151231257827021181583404541015625");
BigDecimal aNumber = new BigDecimal(a);
assertEquals("incorrect value", bA, aNumber.unscaledValue());
assertEquals("incorrect scale", aScale, aNumber.scale());
} | void function() { double a = -1.E-1; int aScale = 55; BigInteger bA = new BigInteger(STR); BigDecimal aNumber = new BigDecimal(a); assertEquals(STR, bA, aNumber.unscaledValue()); assertEquals(STR, aScale, aNumber.scale()); } | /**
* new BigDecimal(-0.1)
*/ | new BigDecimal(-0.1) | testConstrDoubleMinus01 | {
"repo_name": "JSDemos/android-sdk-20",
"path": "src/org/apache/harmony/tests/java/math/BigDecimalConstructorsTest.java",
"license": "apache-2.0",
"size": 25724
} | [
"java.math.BigDecimal",
"java.math.BigInteger"
] | import java.math.BigDecimal; import java.math.BigInteger; | import java.math.*; | [
"java.math"
] | java.math; | 2,882,661 |
protected static List<String> getExcludedNames( List<String> sourcePaths, String[] subpackagesList,
String[] excludedPackages )
{
List<String> excludedNames = new ArrayList<String>();
for ( String path : sourcePaths )
{
for ( int j = 0; j < subpackagesList.length; j++ )
{
List<String> excludes = getExcludedPackages( path, excludedPackages );
excludedNames.addAll( excludes );
}
}
return excludedNames;
}
/**
* Copy from {@link org.apache.maven.project.MavenProject#getCompileArtifacts()} | static List<String> function( List<String> sourcePaths, String[] subpackagesList, String[] excludedPackages ) { List<String> excludedNames = new ArrayList<String>(); for ( String path : sourcePaths ) { for ( int j = 0; j < subpackagesList.length; j++ ) { List<String> excludes = getExcludedPackages( path, excludedPackages ); excludedNames.addAll( excludes ); } } return excludedNames; } /** * Copy from {@link org.apache.maven.project.MavenProject#getCompileArtifacts()} | /**
* Method that gets all the source files to be excluded from the javadoc on the given
* source paths.
*
* @param sourcePaths the path to the source files
* @param subpackagesList list of subpackages to be included in the javadoc
* @param excludedPackages the package names to be excluded in the javadoc
* @return a List of the source files to be excluded in the generated javadoc
*/ | Method that gets all the source files to be excluded from the javadoc on the given source paths | getExcludedNames | {
"repo_name": "dmlloyd/maven-plugins",
"path": "maven-javadoc-plugin/src/main/java/org/apache/maven/plugin/javadoc/JavadocUtil.java",
"license": "apache-2.0",
"size": 64004
} | [
"java.util.ArrayList",
"java.util.List",
"org.apache.maven.project.MavenProject"
] | import java.util.ArrayList; import java.util.List; import org.apache.maven.project.MavenProject; | import java.util.*; import org.apache.maven.project.*; | [
"java.util",
"org.apache.maven"
] | java.util; org.apache.maven; | 2,212,937 |
private void callCreate(JSONObject in, JSONObject out) {
int ticks;
if (in.containsKey("ticks")) {
ticks = Integer.parseInt(in.get("ticks").toString());
} else {
ticks = Integer.parseInt(SettingsParser.settings.get("default_ticks").toString());
}
GameThread newthread = new GameThread(ticks, false);
this.threads.put("" + this.threads.size(), newthread);
out.put("manager_id", "" + (this.threads.size() - 1));
success(out);
} | void function(JSONObject in, JSONObject out) { int ticks; if (in.containsKey("ticks")) { ticks = Integer.parseInt(in.get("ticks").toString()); } else { ticks = Integer.parseInt(SettingsParser.settings.get(STR).toString()); } GameThread newthread = new GameThread(ticks, false); this.threads.put(STRmanager_idSTR" + (this.threads.size() - 1)); success(out); } | /**
* Create a gameManager, returns manager_id.
*
* @param in
* @param out
*/ | Create a gameManager, returns manager_id | callCreate | {
"repo_name": "jgke/mapserver",
"path": "src/main/java/fi/paivola/mapserver/core/WSServer.java",
"license": "mit",
"size": 12388
} | [
"org.json.simple.JSONObject"
] | import org.json.simple.JSONObject; | import org.json.simple.*; | [
"org.json.simple"
] | org.json.simple; | 1,703,545 |
void spawnParticles(ParticleEffect particleEffect, Vector3d position, int radius); | void spawnParticles(ParticleEffect particleEffect, Vector3d position, int radius); | /**
* Spawn a {@link ParticleEffect} at a given position.
* All players within a given radius around the position will see the
* particles.
*
* @param particleEffect The particle effect to spawn
* @param position The position at which to spawn the particle effect
* @param radius The radius around the position where the particles can be
* seen by players
*/ | Spawn a <code>ParticleEffect</code> at a given position. All players within a given radius around the position will see the particles | spawnParticles | {
"repo_name": "modwizcode/SpongeAPI",
"path": "src/main/java/org/spongepowered/api/effect/Viewer.java",
"license": "mit",
"size": 6631
} | [
"com.flowpowered.math.vector.Vector3d",
"org.spongepowered.api.effect.particle.ParticleEffect"
] | import com.flowpowered.math.vector.Vector3d; import org.spongepowered.api.effect.particle.ParticleEffect; | import com.flowpowered.math.vector.*; import org.spongepowered.api.effect.particle.*; | [
"com.flowpowered.math",
"org.spongepowered.api"
] | com.flowpowered.math; org.spongepowered.api; | 174,614 |
public RuntimeJobExecution createJobStartExecution(final WSJobInstance jobInstance,
IJobXMLSource jslSource,
Properties jobParameters,
long executionId) throws JobStartException {
// TODO - redesign to avoid cast?
final JobInstanceEntity jobInstanceImpl = (JobInstanceEntity) jobInstance;
long instanceId = jobInstance.getInstanceId();
ModelNavigator<JSLJob> navigator = createFirstExecution(jobInstanceImpl, jslSource, jobParameters);
JobExecutionEntity jobExec = null;
try {
jobExec = getPersistenceManagerService().getJobExecutionMostRecent(instanceId);
// Check to make sure the executionId belongs to the most recent execution.
// If not, a restart may have occurred. So, fail the start.
// Also check to make sure a stop did not come in while the start was on the queue.
BatchStatus currentBatchStatus = jobExec.getBatchStatus();
if (jobExec.getExecutionId() != executionId ||
currentBatchStatus.equals(BatchStatus.STOPPING) ||
currentBatchStatus.equals(BatchStatus.STOPPED)) {
throw new JobStartException();
}
} catch (IllegalStateException ie) {
// If no execution exists, request came from old dispatch path.
jobExec = getPersistenceManagerService().createJobExecution(instanceId, jobParameters, new Date());
BatchEventsPublisher eventsPublisher = batchKernelImpl.getBatchEventsPublisher();
if (eventsPublisher != null) {
String correlationId = getCorrelationId(jobParameters);
eventsPublisher.publishJobExecutionEvent(jobExec, BatchEventsPublisher.TOPIC_EXECUTION_STARTING, correlationId);
}
}
TopLevelNameInstanceExecutionInfo topLevelInfo = new TopLevelNameInstanceExecutionInfo(jobInstanceImpl.getJobName(), instanceId, jobExec.getExecutionId());
return new RuntimeJobExecution(topLevelInfo, jobParameters, navigator);
} | RuntimeJobExecution function(final WSJobInstance jobInstance, IJobXMLSource jslSource, Properties jobParameters, long executionId) throws JobStartException { final JobInstanceEntity jobInstanceImpl = (JobInstanceEntity) jobInstance; long instanceId = jobInstance.getInstanceId(); ModelNavigator<JSLJob> navigator = createFirstExecution(jobInstanceImpl, jslSource, jobParameters); JobExecutionEntity jobExec = null; try { jobExec = getPersistenceManagerService().getJobExecutionMostRecent(instanceId); BatchStatus currentBatchStatus = jobExec.getBatchStatus(); if (jobExec.getExecutionId() != executionId currentBatchStatus.equals(BatchStatus.STOPPING) currentBatchStatus.equals(BatchStatus.STOPPED)) { throw new JobStartException(); } } catch (IllegalStateException ie) { jobExec = getPersistenceManagerService().createJobExecution(instanceId, jobParameters, new Date()); BatchEventsPublisher eventsPublisher = batchKernelImpl.getBatchEventsPublisher(); if (eventsPublisher != null) { String correlationId = getCorrelationId(jobParameters); eventsPublisher.publishJobExecutionEvent(jobExec, BatchEventsPublisher.TOPIC_EXECUTION_STARTING, correlationId); } } TopLevelNameInstanceExecutionInfo topLevelInfo = new TopLevelNameInstanceExecutionInfo(jobInstanceImpl.getJobName(), instanceId, jobExec.getExecutionId()); return new RuntimeJobExecution(topLevelInfo, jobParameters, navigator); } | /**
* Note: this method is called on the job submission thread.
*
* Updates the jobinstance record with the jobXML and jobname (jobid in jobxml).
*
* @return a new RuntimeJobExecution record, ready to be dispatched.
*/ | Note: this method is called on the job submission thread. Updates the jobinstance record with the jobXML and jobname (jobid in jobxml) | createJobStartExecution | {
"repo_name": "OpenLiberty/open-liberty",
"path": "dev/com.ibm.jbatch.container/src/com/ibm/jbatch/container/execution/impl/JobExecutionHelper.java",
"license": "epl-1.0",
"size": 12889
} | [
"com.ibm.jbatch.container.navigator.ModelNavigator",
"com.ibm.jbatch.container.persistence.jpa.JobExecutionEntity",
"com.ibm.jbatch.container.persistence.jpa.JobInstanceEntity",
"com.ibm.jbatch.container.services.IJobXMLSource",
"com.ibm.jbatch.container.ws.TopLevelNameInstanceExecutionInfo",
"com.ibm.jbatch.container.ws.WSJobInstance",
"com.ibm.jbatch.container.ws.events.BatchEventsPublisher",
"com.ibm.jbatch.jsl.model.JSLJob",
"java.util.Date",
"java.util.Properties",
"javax.batch.operations.JobStartException",
"javax.batch.runtime.BatchStatus"
] | import com.ibm.jbatch.container.navigator.ModelNavigator; import com.ibm.jbatch.container.persistence.jpa.JobExecutionEntity; import com.ibm.jbatch.container.persistence.jpa.JobInstanceEntity; import com.ibm.jbatch.container.services.IJobXMLSource; import com.ibm.jbatch.container.ws.TopLevelNameInstanceExecutionInfo; import com.ibm.jbatch.container.ws.WSJobInstance; import com.ibm.jbatch.container.ws.events.BatchEventsPublisher; import com.ibm.jbatch.jsl.model.JSLJob; import java.util.Date; import java.util.Properties; import javax.batch.operations.JobStartException; import javax.batch.runtime.BatchStatus; | import com.ibm.jbatch.container.navigator.*; import com.ibm.jbatch.container.persistence.jpa.*; import com.ibm.jbatch.container.services.*; import com.ibm.jbatch.container.ws.*; import com.ibm.jbatch.container.ws.events.*; import com.ibm.jbatch.jsl.model.*; import java.util.*; import javax.batch.operations.*; import javax.batch.runtime.*; | [
"com.ibm.jbatch",
"java.util",
"javax.batch"
] | com.ibm.jbatch; java.util; javax.batch; | 1,634,625 |
public List<ConnectorTaskId> tasks(String connectorName) {
if (inconsistentConnectors.contains(connectorName))
return Collections.emptyList();
Integer numTasks = connectorTaskCounts.get(connectorName);
if (numTasks == null)
return Collections.emptyList();
List<ConnectorTaskId> taskIds = new ArrayList<>();
for (int taskIndex = 0; taskIndex < numTasks; taskIndex++) {
ConnectorTaskId taskId = new ConnectorTaskId(connectorName, taskIndex);
taskIds.add(taskId);
}
return taskIds;
} | List<ConnectorTaskId> function(String connectorName) { if (inconsistentConnectors.contains(connectorName)) return Collections.emptyList(); Integer numTasks = connectorTaskCounts.get(connectorName); if (numTasks == null) return Collections.emptyList(); List<ConnectorTaskId> taskIds = new ArrayList<>(); for (int taskIndex = 0; taskIndex < numTasks; taskIndex++) { ConnectorTaskId taskId = new ConnectorTaskId(connectorName, taskIndex); taskIds.add(taskId); } return taskIds; } | /**
* Get the current set of task IDs for the specified connector.
* @param connectorName the name of the connector to look up task configs for
* @return the current set of connector task IDs
*/ | Get the current set of task IDs for the specified connector | tasks | {
"repo_name": "sslavic/kafka",
"path": "connect/runtime/src/main/java/org/apache/kafka/connect/runtime/distributed/ClusterConfigState.java",
"license": "apache-2.0",
"size": 11728
} | [
"java.util.ArrayList",
"java.util.Collections",
"java.util.List",
"org.apache.kafka.connect.util.ConnectorTaskId"
] | import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.apache.kafka.connect.util.ConnectorTaskId; | import java.util.*; import org.apache.kafka.connect.util.*; | [
"java.util",
"org.apache.kafka"
] | java.util; org.apache.kafka; | 2,437,972 |
public Map<String, Integer> getElementsWithIDs() {
return m_idAttributes;
} | Map<String, Integer> function() { return m_idAttributes; } | /**
* Return the attributes map.
* @return the attributes map.
*/ | Return the attributes map | getElementsWithIDs | {
"repo_name": "shun634501730/java_source_cn",
"path": "src_en/com/sun/org/apache/xalan/internal/xsltc/dom/SAXImpl.java",
"license": "apache-2.0",
"size": 61260
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 111,652 |
public Integer addBanner(Map params) throws XmlRpcException {
return (Integer) execute(ADD_BANNER_METHOD, params);
} | Integer function(Map params) throws XmlRpcException { return (Integer) execute(ADD_BANNER_METHOD, params); } | /**
* Adds the banner.
*
* @param params the params
*
* @return the integer
*
* @throws XmlRpcException the xml rpc exception
*/ | Adds the banner | addBanner | {
"repo_name": "AdRiverSoftware/AdServerProjectWebnock",
"path": "lib/xmlrpc/java/openx-api-v2/ApacheLib3/org/openads/proxy/BannerService.java",
"license": "gpl-2.0",
"size": 7973
} | [
"java.util.Map",
"org.apache.xmlrpc.XmlRpcException"
] | import java.util.Map; import org.apache.xmlrpc.XmlRpcException; | import java.util.*; import org.apache.xmlrpc.*; | [
"java.util",
"org.apache.xmlrpc"
] | java.util; org.apache.xmlrpc; | 783,489 |
private static int getStatusValue(final INaviInstruction startInstruction,
final String trackedRegister, final CInstructionResult result) {
if (startInstruction == result.getInstruction()) {
return 0;
} else if (result.undefinesAll()) {
return 1;
} else if (result.getUndefinedRegisters().contains(trackedRegister)) {
return 2;
} else if (result.undefinesSome()) {
return 3;
} else {
return 4;
}
} | static int function(final INaviInstruction startInstruction, final String trackedRegister, final CInstructionResult result) { if (startInstruction == result.getInstruction()) { return 0; } else if (result.undefinesAll()) { return 1; } else if (result.getUndefinedRegisters().contains(trackedRegister)) { return 2; } else if (result.undefinesSome()) { return 3; } else { return 4; } } | /**
* Converts an instruction result into an integer number that represents its status.
*
* @param startInstruction The start instruction of the register tracking operation.
* @param trackedRegister The register tracked by the register tracking operation.
* @param result The instruction result to convert.
* @return A numerical value that represents the type of the instruction result.
*/ | Converts an instruction result into an integer number that represents its status | getStatusValue | {
"repo_name": "AmesianX/binnavi",
"path": "src/main/java/com/google/security/zynamics/binnavi/Gui/GraphWindows/BottomPanel/RegisterTracker/CResultColumnWrapper.java",
"license": "apache-2.0",
"size": 4000
} | [
"com.google.security.zynamics.binnavi.disassembly.INaviInstruction"
] | import com.google.security.zynamics.binnavi.disassembly.INaviInstruction; | import com.google.security.zynamics.binnavi.disassembly.*; | [
"com.google.security"
] | com.google.security; | 2,863,954 |
public static boolean isWithinPastHour(Date time1, Date time2) {
Calendar calendarTime1 = Calendar.getInstance(SystemParams.TIME_ZONE);
calendarTime1.setTime(time1);
Calendar calendarTime2 = Calendar.getInstance(SystemParams.TIME_ZONE);
calendarTime2.setTime(time2);
long time1Millis = calendarTime1.getTimeInMillis();
long time2Millis = calendarTime2.getTimeInMillis();
long differenceBetweenNowAndCal = (time2Millis - time1Millis) / (60 * 60 * 1000);
return differenceBetweenNowAndCal == 0 && calendarTime2.after(calendarTime1);
} | static boolean function(Date time1, Date time2) { Calendar calendarTime1 = Calendar.getInstance(SystemParams.TIME_ZONE); calendarTime1.setTime(time1); Calendar calendarTime2 = Calendar.getInstance(SystemParams.TIME_ZONE); calendarTime2.setTime(time2); long time1Millis = calendarTime1.getTimeInMillis(); long time2Millis = calendarTime2.getTimeInMillis(); long differenceBetweenNowAndCal = (time2Millis - time1Millis) / (60 * 60 * 1000); return differenceBetweenNowAndCal == 0 && calendarTime2.after(calendarTime1); } | /**
* Returns true if the {@code time1} falls within past 1 hour of {@code time2}.
* That is exactly one hour or less from time2 but earlier than time2.
* Precision is at millisecond level.
*/ | Returns true if the time1 falls within past 1 hour of time2. That is exactly one hour or less from time2 but earlier than time2. Precision is at millisecond level | isWithinPastHour | {
"repo_name": "thenaesh/teammates",
"path": "src/main/java/teammates/common/util/TimeHelper.java",
"license": "gpl-2.0",
"size": 17332
} | [
"java.util.Calendar",
"java.util.Date"
] | import java.util.Calendar; import java.util.Date; | import java.util.*; | [
"java.util"
] | java.util; | 2,377,159 |
public static TextArea factoryTextArea(String pCaption, String pInputPrompt) {
return factoryTextArea(pCaption, pInputPrompt, 1024);
}
| static TextArea function(String pCaption, String pInputPrompt) { return factoryTextArea(pCaption, pInputPrompt, 1024); } | /**
* Create a text area with the provided properties. The size is set to
* SizeFull and the text area is set to 'immediate' (show changed
* immediately to the user). The max. string length for the returned area is
* 1024 characters.
*
* @param pCaption The caption
* @param pInputPrompt The input prompt shown if no value is set
*
* @return The text area
*/ | Create a text area with the provided properties. The size is set to SizeFull and the text area is set to 'immediate' (show changed immediately to the user). The max. string length for the returned area is 1024 characters | factoryTextArea | {
"repo_name": "kit-data-manager/base",
"path": "UserInterface/UICommons7/src/main/java/edu/kit/dama/ui/commons/util/UIUtils7.java",
"license": "apache-2.0",
"size": 43027
} | [
"com.vaadin.ui.TextArea"
] | import com.vaadin.ui.TextArea; | import com.vaadin.ui.*; | [
"com.vaadin.ui"
] | com.vaadin.ui; | 437,861 |
public XObject operate(XObject left, XObject right)
throws javax.xml.transform.TransformerException
{
return new XNumber(left.num() % right.num());
}
| XObject function(XObject left, XObject right) throws javax.xml.transform.TransformerException { return new XNumber(left.num() % right.num()); } | /**
* Apply the operation to two operands, and return the result.
*
*
* @param left non-null reference to the evaluated left operand.
* @param right non-null reference to the evaluated right operand.
*
* @return non-null reference to the XObject that represents the result of the operation.
*
* @throws javax.xml.transform.TransformerException
*/ | Apply the operation to two operands, and return the result | operate | {
"repo_name": "srnsw/xena",
"path": "xena/ext/src/xalan-j_2_7_1/src/org/apache/xpath/operations/Mod.java",
"license": "gpl-3.0",
"size": 2148
} | [
"org.apache.xpath.objects.XNumber",
"org.apache.xpath.objects.XObject"
] | import org.apache.xpath.objects.XNumber; import org.apache.xpath.objects.XObject; | import org.apache.xpath.objects.*; | [
"org.apache.xpath"
] | org.apache.xpath; | 240,127 |
protected void _fireTreeNodesRemoved(Object parent, int[] itemIndexes, Object[] items)
{
Object[] path = this.getPath(parent);
TreeModelEvent event = new TreeModelEvent(this, path, itemIndexes, items);
for (TreeModelListener listener : this._listeners)
listener.treeNodesRemoved(event);
}
| void function(Object parent, int[] itemIndexes, Object[] items) { Object[] path = this.getPath(parent); TreeModelEvent event = new TreeModelEvent(this, path, itemIndexes, items); for (TreeModelListener listener : this._listeners) listener.treeNodesRemoved(event); } | /**
* Fires a 'tree nodes removed' event for any
* interested listeners.
*
* @param parent The parent of the inserted nodes
* @param itemIndexes An array of indices indicating
* which items were removed
*/ | Fires a 'tree nodes removed' event for any interested listeners | _fireTreeNodesRemoved | {
"repo_name": "goc9000/UniArchive",
"path": "src/uniarchive/widgets/ArchiveGroupsView.java",
"license": "gpl-3.0",
"size": 55106
} | [
"javax.swing.event.TreeModelEvent",
"javax.swing.event.TreeModelListener"
] | import javax.swing.event.TreeModelEvent; import javax.swing.event.TreeModelListener; | import javax.swing.event.*; | [
"javax.swing"
] | javax.swing; | 502,400 |
private void setupResponse(ByteArrayOutputStream response, Call call, Throwable t, String error)
throws IOException {
if (response != null) response.reset();
call.setResponse(null, null, t, error);
} | void function(ByteArrayOutputStream response, Call call, Throwable t, String error) throws IOException { if (response != null) response.reset(); call.setResponse(null, null, t, error); } | /**
* Setup response for the IPC Call.
*
* @param response buffer to serialize the response into
* @param call {@link Call} to which we are setting up the response
* @param error error message, if the call failed
* @param t
* @throws IOException
*/ | Setup response for the IPC Call | setupResponse | {
"repo_name": "francisliu/hbase_namespace",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java",
"license": "apache-2.0",
"size": 96031
} | [
"java.io.ByteArrayOutputStream",
"java.io.IOException"
] | import java.io.ByteArrayOutputStream; import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,128,837 |
public static void log(@NotNull CharSequence tag, @Nullable CharSequence message) {
if (StagProcessor.DEBUG) {
//noinspection UseOfSystemOutOrSystemErr
System.out.println(TAG + ":" + tag + ": " + message);
}
} | static void function(@NotNull CharSequence tag, @Nullable CharSequence message) { if (StagProcessor.DEBUG) { System.out.println(TAG + ":" + tag + STR + message); } } | /**
* Log the provided message with an additional log tag.
*
* @param tag the tag to add to the log.
* @param message the message to log.
*/ | Log the provided message with an additional log tag | log | {
"repo_name": "Flipkart/stag-java",
"path": "stag-library-compiler/src/main/java/com/vimeo/stag/processor/utils/DebugLog.java",
"license": "mit",
"size": 2409
} | [
"com.vimeo.stag.processor.StagProcessor",
"org.jetbrains.annotations.NotNull",
"org.jetbrains.annotations.Nullable"
] | import com.vimeo.stag.processor.StagProcessor; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; | import com.vimeo.stag.processor.*; import org.jetbrains.annotations.*; | [
"com.vimeo.stag",
"org.jetbrains.annotations"
] | com.vimeo.stag; org.jetbrains.annotations; | 2,351,216 |
private void assertMonitorNotifyAllEvent(final MonitorNotifyAllEvent record) {
Assert.assertEquals("'timestamp' value assertion failed.", record.getTimestamp(), PROPERTY_TIMESTAMP);
Assert.assertEquals("'traceId' value assertion failed.", record.getTraceId(), PROPERTY_TRACE_ID);
Assert.assertEquals("'orderIndex' value assertion failed.", record.getOrderIndex(), PROPERTY_ORDER_INDEX);
Assert.assertEquals("'lockId' value assertion failed.", record.getLockId(), PROPERTY_LOCK_ID);
}
| void function(final MonitorNotifyAllEvent record) { Assert.assertEquals(STR, record.getTimestamp(), PROPERTY_TIMESTAMP); Assert.assertEquals(STR, record.getTraceId(), PROPERTY_TRACE_ID); Assert.assertEquals(STR, record.getOrderIndex(), PROPERTY_ORDER_INDEX); Assert.assertEquals(STR, record.getLockId(), PROPERTY_LOCK_ID); } | /**
* Assertions for MonitorNotifyAllEvent.
*/ | Assertions for MonitorNotifyAllEvent | assertMonitorNotifyAllEvent | {
"repo_name": "HaStr/kieker",
"path": "kieker-common/test-gen/kieker/test/common/junit/api/flow/trace/concurrency/monitor/TestMonitorNotifyAllEventPropertyOrder.java",
"license": "apache-2.0",
"size": 5605
} | [
"org.junit.Assert"
] | import org.junit.Assert; | import org.junit.*; | [
"org.junit"
] | org.junit; | 1,051,263 |
private DrawerLayout.LayoutParams getDrawerParams() {
DrawerLayout.LayoutParams params = (DrawerLayout.LayoutParams) mScrimInsetsFrameLayout.getLayoutParams();
int width = getResources().getDisplayMetrics().widthPixels;
boolean tablet = Utils.isTablet(this);
int actionBarSize = Utils.getActionBarHeight(this);
if (Utils.getScreenOrientation(this) == Configuration.ORIENTATION_LANDSCAPE) {
params.width = width / 2;
if (tablet)
params.width -= actionBarSize + (35 * getResources().getDisplayMetrics().density);
} else params.width = tablet ? width / 2 : width - actionBarSize;
return params;
} | DrawerLayout.LayoutParams function() { DrawerLayout.LayoutParams params = (DrawerLayout.LayoutParams) mScrimInsetsFrameLayout.getLayoutParams(); int width = getResources().getDisplayMetrics().widthPixels; boolean tablet = Utils.isTablet(this); int actionBarSize = Utils.getActionBarHeight(this); if (Utils.getScreenOrientation(this) == Configuration.ORIENTATION_LANDSCAPE) { params.width = width / 2; if (tablet) params.width -= actionBarSize + (35 * getResources().getDisplayMetrics().density); } else params.width = tablet ? width / 2 : width - actionBarSize; return params; } | /**
* A function to calculate the width of the Navigation Drawer
* Phones and Tablets have different sizes
*
* @return the LayoutParams for the Drawer
*/ | A function to calculate the width of the Navigation Drawer Phones and Tablets have different sizes | getDrawerParams | {
"repo_name": "Eliminater74/BlackBox-Toolkit",
"path": "app/src/main/java/com/kunalkene1797/blackboxkit/MainActivity.java",
"license": "apache-2.0",
"size": 17197
} | [
"android.content.res.Configuration",
"android.support.v4.widget.DrawerLayout",
"com.kunalkene1797.blackboxkit.utils.Utils"
] | import android.content.res.Configuration; import android.support.v4.widget.DrawerLayout; import com.kunalkene1797.blackboxkit.utils.Utils; | import android.content.res.*; import android.support.v4.widget.*; import com.kunalkene1797.blackboxkit.utils.*; | [
"android.content",
"android.support",
"com.kunalkene1797.blackboxkit"
] | android.content; android.support; com.kunalkene1797.blackboxkit; | 2,093,113 |
boolean pushAggregation(Aggregation aggregation); | boolean pushAggregation(Aggregation aggregation); | /**
* Pushes down Aggregation to datasource. The order of the datasource scan output columns should
* be: grouping columns, aggregate columns (in the same order as the aggregate functions in
* the given Aggregation).
*
* @return true if the aggregation can be pushed down to datasource, false otherwise.
*/ | Pushes down Aggregation to datasource. The order of the datasource scan output columns should be: grouping columns, aggregate columns (in the same order as the aggregate functions in the given Aggregation) | pushAggregation | {
"repo_name": "jiangxb1987/spark",
"path": "sql/catalyst/src/main/java/org/apache/spark/sql/connector/read/SupportsPushDownAggregates.java",
"license": "apache-2.0",
"size": 2536
} | [
"org.apache.spark.sql.connector.expressions.Aggregation"
] | import org.apache.spark.sql.connector.expressions.Aggregation; | import org.apache.spark.sql.connector.expressions.*; | [
"org.apache.spark"
] | org.apache.spark; | 363,132 |
private static Charset extractCharset(Metadata headers) {
String contentType = headers.get(GrpcUtil.CONTENT_TYPE_KEY);
if (contentType != null) {
String[] split = contentType.split("charset=");
try {
return Charset.forName(split[split.length - 1].trim());
} catch (Exception t) {
// Ignore and assume UTF-8
}
}
return Charsets.UTF_8;
} | static Charset function(Metadata headers) { String contentType = headers.get(GrpcUtil.CONTENT_TYPE_KEY); if (contentType != null) { String[] split = contentType.split(STR); try { return Charset.forName(split[split.length - 1].trim()); } catch (Exception t) { } } return Charsets.UTF_8; } | /**
* Inspect the raw metadata and figure out what charset is being used.
*/ | Inspect the raw metadata and figure out what charset is being used | extractCharset | {
"repo_name": "wrwg/grpc-java",
"path": "core/src/main/java/io/grpc/internal/Http2ClientStream.java",
"license": "bsd-3-clause",
"size": 8813
} | [
"com.google.common.base.Charsets",
"io.grpc.Metadata",
"java.nio.charset.Charset"
] | import com.google.common.base.Charsets; import io.grpc.Metadata; import java.nio.charset.Charset; | import com.google.common.base.*; import io.grpc.*; import java.nio.charset.*; | [
"com.google.common",
"io.grpc",
"java.nio"
] | com.google.common; io.grpc; java.nio; | 233,618 |
private List<KnownApplicationInfo> getAllBookmarksOfChrome() {
String columnTitle = "title";
String columnURL = "url";
String columnFavicon = "favicon";
String selectionBookmark = "bookmark";
ContentResolver resolver = getActivity().getContentResolver();
Uri uri = Uri.parse(getString(R.string.chrome_bookmark_provider_url));
Cursor c = resolver.query(uri, new String[] { columnTitle, columnURL, columnFavicon }, selectionBookmark, null,
null);
List<KnownApplicationInfo> bookmarks = new ArrayList<>();
if (c.moveToFirst()) {
do {
final int titleIndex = c.getColumnIndex(columnTitle);
final int urlIndex = c.getColumnIndex(columnURL);
final int iconIndex = c.getColumnIndex(columnFavicon);
if (titleIndex < 0 || urlIndex < 0 || iconIndex < 0) {
continue;
}
final String title = c.getString(titleIndex);
final String url = c.getString(urlIndex);
final byte[] icon = c.getBlob(iconIndex);
KnownApplicationInfo info = new BookmarkInfo(title, url, icon);
if (info.getOrigin() != null) {
bookmarks.add(info);
}
} while (c.moveToNext());
}
c.close();
return bookmarks;
} | List<KnownApplicationInfo> function() { String columnTitle = "title"; String columnURL = "url"; String columnFavicon = STR; String selectionBookmark = STR; ContentResolver resolver = getActivity().getContentResolver(); Uri uri = Uri.parse(getString(R.string.chrome_bookmark_provider_url)); Cursor c = resolver.query(uri, new String[] { columnTitle, columnURL, columnFavicon }, selectionBookmark, null, null); List<KnownApplicationInfo> bookmarks = new ArrayList<>(); if (c.moveToFirst()) { do { final int titleIndex = c.getColumnIndex(columnTitle); final int urlIndex = c.getColumnIndex(columnURL); final int iconIndex = c.getColumnIndex(columnFavicon); if (titleIndex < 0 urlIndex < 0 iconIndex < 0) { continue; } final String title = c.getString(titleIndex); final String url = c.getString(urlIndex); final byte[] icon = c.getBlob(iconIndex); KnownApplicationInfo info = new BookmarkInfo(title, url, icon); if (info.getOrigin() != null) { bookmarks.add(info); } } while (c.moveToNext()); } c.close(); return bookmarks; } | /**
* Gets all bookmarks of Chrome for Android.
*
* This method works properly only in SDK version 22 or below.
*
* @return all bookmarks of Chrome for Android
*/ | Gets all bookmarks of Chrome for Android. This method works properly only in SDK version 22 or below | getAllBookmarksOfChrome | {
"repo_name": "TakayukiHoshi1984/DeviceConnect-Android",
"path": "dConnectManager/dConnectManager/dconnect-manager-app/src/main/java/org/deviceconnect/android/manager/setting/AllowlistFragment.java",
"license": "mit",
"size": 25579
} | [
"android.content.ContentResolver",
"android.database.Cursor",
"android.net.Uri",
"java.util.ArrayList",
"java.util.List"
] | import android.content.ContentResolver; import android.database.Cursor; import android.net.Uri; import java.util.ArrayList; import java.util.List; | import android.content.*; import android.database.*; import android.net.*; import java.util.*; | [
"android.content",
"android.database",
"android.net",
"java.util"
] | android.content; android.database; android.net; java.util; | 1,226,896 |
private int getSaveFile() throws IOException {
JFileChooser chooser = new JFileChooser();
chooser.setFileFilter(getFileFilter());
if (file != null && ! FileSystemUtil.isChildOf( FileSystem.settings().getLocalCacheDir(), file ) ) {
chooser.setSelectedFile(file);
}
int r = chooser.showSaveDialog(editor);
if (r == JFileChooser.APPROVE_OPTION) {
file = chooser.getSelectedFile();
if (!(file.toString().endsWith(".jy") || file.toString().endsWith(".py") || file.toString().endsWith(".jyds"))) {
file = new File(file.toString() + ".jy");
//TODO NOW: .jyds
}
}
return r;
} | int function() throws IOException { JFileChooser chooser = new JFileChooser(); chooser.setFileFilter(getFileFilter()); if (file != null && ! FileSystemUtil.isChildOf( FileSystem.settings().getLocalCacheDir(), file ) ) { chooser.setSelectedFile(file); } int r = chooser.showSaveDialog(editor); if (r == JFileChooser.APPROVE_OPTION) { file = chooser.getSelectedFile(); if (!(file.toString().endsWith(".jy") file.toString().endsWith(".py") file.toString().endsWith(".jyds"))) { file = new File(file.toString() + ".jy"); } } return r; } | /**
* show the save as dialog and return the result. This was recently public, but
* no one appears to be using it and it seems like private is more appropriate.
* @return the result of showSaveDialog, e.g. JFileChooser.APPROVE_OPTION, etc.
* @throws IOException
*/ | show the save as dialog and return the result. This was recently public, but no one appears to be using it and it seems like private is more appropriate | getSaveFile | {
"repo_name": "autoplot/app",
"path": "JythonSupport/src/org/autoplot/jythonsupport/ui/ScriptPanelSupport.java",
"license": "gpl-2.0",
"size": 10152
} | [
"java.io.File",
"java.io.IOException",
"javax.swing.JFileChooser",
"org.autoplot.datasource.FileSystemUtil",
"org.das2.util.filesystem.FileSystem"
] | import java.io.File; import java.io.IOException; import javax.swing.JFileChooser; import org.autoplot.datasource.FileSystemUtil; import org.das2.util.filesystem.FileSystem; | import java.io.*; import javax.swing.*; import org.autoplot.datasource.*; import org.das2.util.filesystem.*; | [
"java.io",
"javax.swing",
"org.autoplot.datasource",
"org.das2.util"
] | java.io; javax.swing; org.autoplot.datasource; org.das2.util; | 1,230,810 |
public YangString getNaptrFlagsValue() throws JNCException {
return (YangString)getValue("naptr-flags");
} | YangString function() throws JNCException { return (YangString)getValue(STR); } | /**
* Gets the value for child leaf "naptr-flags".
* @return The value of the leaf.
*/ | Gets the value for child leaf "naptr-flags" | getNaptrFlagsValue | {
"repo_name": "jnpr-shinma/yangfile",
"path": "hitel/src/hctaEpc/mmeSgsn/dns/dynamicCacheOper/Naptr.java",
"license": "apache-2.0",
"size": 21066
} | [
"com.tailf.jnc.YangString"
] | import com.tailf.jnc.YangString; | import com.tailf.jnc.*; | [
"com.tailf.jnc"
] | com.tailf.jnc; | 405,845 |
public CallTarget getCallTarget() {
return getMethodVersion().getCallTarget();
} | CallTarget function() { return getMethodVersion().getCallTarget(); } | /**
* Ensure any callTarget is called immediately before a BCI is advanced, or it could violate the
* specs on class init.
*/ | Ensure any callTarget is called immediately before a BCI is advanced, or it could violate the specs on class init | getCallTarget | {
"repo_name": "smarr/Truffle",
"path": "espresso/src/com.oracle.truffle.espresso/src/com/oracle/truffle/espresso/impl/Method.java",
"license": "gpl-2.0",
"size": 64397
} | [
"com.oracle.truffle.api.CallTarget"
] | import com.oracle.truffle.api.CallTarget; | import com.oracle.truffle.api.*; | [
"com.oracle.truffle"
] | com.oracle.truffle; | 366,727 |
public void deinitialize() {
((NodeEventTarget) eventTarget).removeEventListenerNS
(eventNamespaceURI, eventType, this, false);
}
// EventListener ///////////////////////////////////////////////////////// | void function() { ((NodeEventTarget) eventTarget).removeEventListenerNS (eventNamespaceURI, eventType, this, false); } | /**
* Deinitializes this timing specifier by removing any event listeners.
*/ | Deinitializes this timing specifier by removing any event listeners | deinitialize | {
"repo_name": "shyamalschandra/flex-sdk",
"path": "modules/thirdparty/batik/sources/org/apache/flex/forks/batik/anim/timing/EventbaseTimingSpecifier.java",
"license": "apache-2.0",
"size": 4109
} | [
"org.apache.flex.forks.batik.dom.events.NodeEventTarget"
] | import org.apache.flex.forks.batik.dom.events.NodeEventTarget; | import org.apache.flex.forks.batik.dom.events.*; | [
"org.apache.flex"
] | org.apache.flex; | 2,128,785 |
Struct sct=new StructImpl(Struct.TYPE_LINKED);
if(test==null)test=new HashSet();
// Fields
Field[] fields = clazz.getFields();
Field field;
for(int i=0;i<fields.length;i++){
field=fields[i];
if(obj!=null || (field.getModifiers()&Modifier.STATIC)>0)
try {
sct.setEL(field.getName(), testRecusrion(test,field.get(obj)));
} catch (Exception e) {
e.printStackTrace();
}
}
if(obj !=null){
// setters
Method[] setters=Reflector.getSetters(clazz);
for(int i=0;i<setters.length;i++){
sct.setEL(setters[i].getName().substring(3), NULL);
}
// getters
Method[] getters=Reflector.getGetters(clazz);
for(int i=0;i<getters.length;i++){
try {
sct.setEL(getters[i].getName().substring(3), testRecusrion(test,getters[i].invoke(obj, ArrayUtil.OBJECT_EMPTY)));
}
catch (Exception e) {}
}
}
test.add(clazz);
_serializeStruct(pc,test,sct, sb, serializeQueryByColumns, true,done);
}
| Struct sct=new StructImpl(Struct.TYPE_LINKED); if(test==null)test=new HashSet(); Field[] fields = clazz.getFields(); Field field; for(int i=0;i<fields.length;i++){ field=fields[i]; if(obj!=null (field.getModifiers()&Modifier.STATIC)>0) try { sct.setEL(field.getName(), testRecusrion(test,field.get(obj))); } catch (Exception e) { e.printStackTrace(); } } if(obj !=null){ Method[] setters=Reflector.getSetters(clazz); for(int i=0;i<setters.length;i++){ sct.setEL(setters[i].getName().substring(3), NULL); } Method[] getters=Reflector.getGetters(clazz); for(int i=0;i<getters.length;i++){ try { sct.setEL(getters[i].getName().substring(3), testRecusrion(test,getters[i].invoke(obj, ArrayUtil.OBJECT_EMPTY))); } catch (Exception e) {} } } test.add(clazz); _serializeStruct(pc,test,sct, sb, serializeQueryByColumns, true,done); } | /**
* serialize Serializable class
* @param serializable
* @param sb
* @param serializeQueryByColumns
* @param done
* @throws ConverterException
*/ | serialize Serializable class | _serializeClass | {
"repo_name": "paulklinkenberg/Lucee4",
"path": "lucee-java/lucee-core/src/lucee/runtime/converter/JSONConverter.java",
"license": "lgpl-2.1",
"size": 20288
} | [
"java.lang.reflect.Field",
"java.lang.reflect.Method",
"java.lang.reflect.Modifier",
"java.util.HashSet"
] | import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.HashSet; | import java.lang.reflect.*; import java.util.*; | [
"java.lang",
"java.util"
] | java.lang; java.util; | 1,213,182 |
public Map<String,FieldsOperon> getEntityOperon(List<String> ids, List<String> fields) throws IOException, JsonClientException {
List<Object> args = new ArrayList<Object>();
args.add(ids);
args.add(fields);
TypeReference<List<Map<String,FieldsOperon>>> retType = new TypeReference<List<Map<String,FieldsOperon>>>() {};
List<Map<String,FieldsOperon>> res = caller.jsonrpcCall("CDMI_EntityAPI.get_entity_Operon", args, retType, true, false);
return res.get(0);
} | Map<String,FieldsOperon> function(List<String> ids, List<String> fields) throws IOException, JsonClientException { List<Object> args = new ArrayList<Object>(); args.add(ids); args.add(fields); TypeReference<List<Map<String,FieldsOperon>>> retType = new TypeReference<List<Map<String,FieldsOperon>>>() {}; List<Map<String,FieldsOperon>> res = caller.jsonrpcCall(STR, args, retType, true, false); return res.get(0); } | /**
* <p>Original spec-file function name: get_entity_Operon</p>
* <pre>
* It has the following fields:
* =over 4
* =back
* </pre>
* @param ids instance of list of String
* @param fields instance of list of String
* @return instance of mapping from String to type {@link us.kbase.cdmientityapi.FieldsOperon FieldsOperon} (original type "fields_Operon")
* @throws IOException if an IO exception occurs
* @throws JsonClientException if a JSON RPC exception occurs
*/ | Original spec-file function name: get_entity_Operon <code> It has the following fields: =over 4 =back </code> | getEntityOperon | {
"repo_name": "kbase/trees",
"path": "src/us/kbase/cdmientityapi/CDMIEntityAPIClient.java",
"license": "mit",
"size": 869221
} | [
"com.fasterxml.jackson.core.type.TypeReference",
"java.io.IOException",
"java.util.ArrayList",
"java.util.List",
"java.util.Map",
"us.kbase.common.service.JsonClientException"
] | import com.fasterxml.jackson.core.type.TypeReference; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import us.kbase.common.service.JsonClientException; | import com.fasterxml.jackson.core.type.*; import java.io.*; import java.util.*; import us.kbase.common.service.*; | [
"com.fasterxml.jackson",
"java.io",
"java.util",
"us.kbase.common"
] | com.fasterxml.jackson; java.io; java.util; us.kbase.common; | 1,967,442 |
private void verifyAndAssignRoot()
throws InterruptedException, IOException, KeeperException {
long timeout = this.server.getConfiguration().
getLong("hbase.catalog.verification.timeout", 1000);
if (!this.server.getCatalogTracker().verifyRootRegionLocation(timeout)) {
this.services.getAssignmentManager().assignRoot();
} else if (serverName.equals(server.getCatalogTracker().getRootLocation())) {
throw new IOException("-ROOT- is onlined on the dead server "
+ serverName);
} else {
LOG.info("Skip assigning -ROOT-, because it is online on the "
+ server.getCatalogTracker().getRootLocation());
}
} | void function() throws InterruptedException, IOException, KeeperException { long timeout = this.server.getConfiguration(). getLong(STR, 1000); if (!this.server.getCatalogTracker().verifyRootRegionLocation(timeout)) { this.services.getAssignmentManager().assignRoot(); } else if (serverName.equals(server.getCatalogTracker().getRootLocation())) { throw new IOException(STR + serverName); } else { LOG.info(STR + server.getCatalogTracker().getRootLocation()); } } | /**
* Before assign the ROOT region, ensure it haven't
* been assigned by other place
* <p>
* Under some scenarios, the ROOT region can be opened twice, so it seemed online
* in two regionserver at the same time.
* If the ROOT region has been assigned, so the operation can be canceled.
* @throws InterruptedException
* @throws IOException
* @throws KeeperException
*/ | Before assign the ROOT region, ensure it haven't been assigned by other place Under some scenarios, the ROOT region can be opened twice, so it seemed online in two regionserver at the same time. If the ROOT region has been assigned, so the operation can be canceled | verifyAndAssignRoot | {
"repo_name": "zqxjjj/NobidaBase",
"path": "target/hbase-0.94.9/hbase-0.94.9/src/main/java/org/apache/hadoop/hbase/master/handler/MetaServerShutdownHandler.java",
"license": "apache-2.0",
"size": 7239
} | [
"java.io.IOException",
"org.apache.zookeeper.KeeperException"
] | import java.io.IOException; import org.apache.zookeeper.KeeperException; | import java.io.*; import org.apache.zookeeper.*; | [
"java.io",
"org.apache.zookeeper"
] | java.io; org.apache.zookeeper; | 2,310,331 |
public IMediaGenerator getSource()
{
return mSource;
} | IMediaGenerator function() { return mSource; } | /**
* Implementation of {@link IEvent#getSource()}.
*/ | Implementation of <code>IEvent#getSource()</code> | getSource | {
"repo_name": "luisvt/xuggle-xuggler",
"path": "src/com/xuggle/mediatool/event/AEventMixin.java",
"license": "gpl-3.0",
"size": 1523
} | [
"com.xuggle.mediatool.IMediaGenerator"
] | import com.xuggle.mediatool.IMediaGenerator; | import com.xuggle.mediatool.*; | [
"com.xuggle.mediatool"
] | com.xuggle.mediatool; | 493,367 |
public final List<AnalystClassItem> getAnalyzedClassMembers() {
final List<String> sorted = new ArrayList<String>();
sorted.addAll(this.classMap.keySet());
Collections.sort(sorted);
final List<AnalystClassItem> result = new ArrayList<AnalystClassItem>();
for (final String str : sorted) {
result.add(this.classMap.get(str));
}
return result;
} | final List<AnalystClassItem> function() { final List<String> sorted = new ArrayList<String>(); sorted.addAll(this.classMap.keySet()); Collections.sort(sorted); final List<AnalystClassItem> result = new ArrayList<AnalystClassItem>(); for (final String str : sorted) { result.add(this.classMap.get(str)); } return result; } | /**
* Get the class members.
* @return The class members.
*/ | Get the class members | getAnalyzedClassMembers | {
"repo_name": "larhoy/SentimentProjectV2",
"path": "SentimentAnalysisV2/encog-core-3.1.0/src/main/java/org/encog/app/analyst/analyze/AnalyzedField.java",
"license": "mit",
"size": 6688
} | [
"java.util.ArrayList",
"java.util.Collections",
"java.util.List",
"org.encog.app.analyst.script.AnalystClassItem"
] | import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.encog.app.analyst.script.AnalystClassItem; | import java.util.*; import org.encog.app.analyst.script.*; | [
"java.util",
"org.encog.app"
] | java.util; org.encog.app; | 2,322,647 |
public final ExecRow getLastRow()
throws StandardException
{
throw StandardException.newException(SQLState.LANG_DOES_NOT_RETURN_ROWS, "last");
} | final ExecRow function() throws StandardException { throw StandardException.newException(SQLState.LANG_DOES_NOT_RETURN_ROWS, "last"); } | /**
* Returns the last row from the query, and returns NULL when there
* are no rows.
*
* @return The last row, or NULL if no rows.
*
* @exception StandardException Thrown on failure
* @see Row
*/ | Returns the last row from the query, and returns NULL when there are no rows | getLastRow | {
"repo_name": "SnappyDataInc/snappy-store",
"path": "gemfirexd/core/src/main/java/com/pivotal/gemfirexd/internal/impl/sql/execute/NoRowsResultSetImpl.java",
"license": "apache-2.0",
"size": 23363
} | [
"com.pivotal.gemfirexd.internal.iapi.error.StandardException",
"com.pivotal.gemfirexd.internal.iapi.reference.SQLState",
"com.pivotal.gemfirexd.internal.iapi.sql.execute.ExecRow"
] | import com.pivotal.gemfirexd.internal.iapi.error.StandardException; import com.pivotal.gemfirexd.internal.iapi.reference.SQLState; import com.pivotal.gemfirexd.internal.iapi.sql.execute.ExecRow; | import com.pivotal.gemfirexd.internal.iapi.error.*; import com.pivotal.gemfirexd.internal.iapi.reference.*; import com.pivotal.gemfirexd.internal.iapi.sql.execute.*; | [
"com.pivotal.gemfirexd"
] | com.pivotal.gemfirexd; | 1,064,482 |
public void unloadDocument(Document doc) {
return;
}
protected List<Document> supportList = null;
protected class VerboseList extends AbstractList implements Serializable {
VerboseList() {
data = new ArrayList();
} | void function(Document doc) { return; } protected List<Document> supportList = null; protected class VerboseList extends AbstractList implements Serializable { VerboseList() { data = new ArrayList(); } | /**
* This method does not make sense for transient corpora, so it does
* nothing.
*/ | This method does not make sense for transient corpora, so it does nothing | unloadDocument | {
"repo_name": "liuhongchao/GATE_Developer_7.0",
"path": "src/gate/corpora/CorpusImpl.java",
"license": "lgpl-3.0",
"size": 30225
} | [
"java.io.Serializable",
"java.util.AbstractList",
"java.util.ArrayList",
"java.util.List"
] | import java.io.Serializable; import java.util.AbstractList; import java.util.ArrayList; import java.util.List; | import java.io.*; import java.util.*; | [
"java.io",
"java.util"
] | java.io; java.util; | 2,785,550 |
ServerLocator addIncomingInterceptor(Interceptor interceptor); | ServerLocator addIncomingInterceptor(Interceptor interceptor); | /**
* Adds an interceptor which will be executed <em>after packets are received from the server</em>.
*
* @param interceptor an Interceptor
* @return this ServerLocator
*/ | Adds an interceptor which will be executed after packets are received from the server | addIncomingInterceptor | {
"repo_name": "mnovak1/activemq-artemis",
"path": "artemis-core-client/src/main/java/org/apache/activemq/artemis/api/core/client/ServerLocator.java",
"license": "apache-2.0",
"size": 30404
} | [
"org.apache.activemq.artemis.api.core.Interceptor"
] | import org.apache.activemq.artemis.api.core.Interceptor; | import org.apache.activemq.artemis.api.core.*; | [
"org.apache.activemq"
] | org.apache.activemq; | 323,657 |
public static void deleteProductSpecificTenantData(String dataSourceName, String tableName, int tenantId) {
try {
TenantDataDeletionUtil.deleteProductSpecificTenantData(((DataSource) InitialContext.doLookup(dataSourceName)).
getConnection(), tableName, tenantId);
} catch (Exception e) {
throw new RuntimeException("Error in looking up data source: " + e.getMessage(), e);
}
} | static void function(String dataSourceName, String tableName, int tenantId) { try { TenantDataDeletionUtil.deleteProductSpecificTenantData(((DataSource) InitialContext.doLookup(dataSourceName)). getConnection(), tableName, tenantId); } catch (Exception e) { throw new RuntimeException(STR + e.getMessage(), e); } } | /**
* Delete tenant data specific to product from database.
*
* @param dataSourceName
* @param tableName
* @param tenantId
*/ | Delete tenant data specific to product from database | deleteProductSpecificTenantData | {
"repo_name": "GayanM/carbon-multitenancy",
"path": "components/tenant-mgt/org.wso2.carbon.tenant.mgt/src/main/java/org/wso2/carbon/tenant/mgt/util/TenantMgtUtil.java",
"license": "apache-2.0",
"size": 21423
} | [
"javax.naming.InitialContext",
"javax.sql.DataSource"
] | import javax.naming.InitialContext; import javax.sql.DataSource; | import javax.naming.*; import javax.sql.*; | [
"javax.naming",
"javax.sql"
] | javax.naming; javax.sql; | 1,716,678 |
public Vector<BandWidth> getBandwidths(boolean create) {
return bandwidthFields;
} | Vector<BandWidth> function(boolean create) { return bandwidthFields; } | /**
* Returns the Bandwidth of the specified type.
*
* @param create type of the Bandwidth to return
* @return the Bandwidth or null if undefined
*/ | Returns the Bandwidth of the specified type | getBandwidths | {
"repo_name": "darkmi/rtspproxy",
"path": "src/main/java/gov/nist/javax/sdp/MediaDescriptionImpl.java",
"license": "gpl-2.0",
"size": 18115
} | [
"java.util.Vector",
"javax.sdp.BandWidth"
] | import java.util.Vector; import javax.sdp.BandWidth; | import java.util.*; import javax.sdp.*; | [
"java.util",
"javax.sdp"
] | java.util; javax.sdp; | 319,520 |
@SuppressLint("NewApi")
protected TextView createDefaultTabView(Context context) {
TextView textView = new TextView(context);
textView.setGravity(Gravity.CENTER);
textView.setTextSize(TypedValue.COMPLEX_UNIT_SP, TAB_VIEW_TEXT_SIZE_SP);
textView.setTypeface(Typeface.DEFAULT_BOLD);
textView.setTextColor(Color.WHITE);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
// If we're running on Honeycomb or newer, then we can use the Theme's
// selectableItemBackground to ensure that the View has a pressed state
TypedValue outValue = new TypedValue();
getContext().getTheme().resolveAttribute(android.R.attr.selectableItemBackground,
outValue, true);
textView.setBackgroundResource(outValue.resourceId);
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
// If we're running on ICS or newer, enable all-caps to match the Action Bar tab style
textView.setAllCaps(true);
}
int padding = (int) (TAB_VIEW_PADDING_DIPS * getResources().getDisplayMetrics().density);
textView.setPadding(padding, padding, padding, padding);
return textView;
} | @SuppressLint(STR) TextView function(Context context) { TextView textView = new TextView(context); textView.setGravity(Gravity.CENTER); textView.setTextSize(TypedValue.COMPLEX_UNIT_SP, TAB_VIEW_TEXT_SIZE_SP); textView.setTypeface(Typeface.DEFAULT_BOLD); textView.setTextColor(Color.WHITE); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { TypedValue outValue = new TypedValue(); getContext().getTheme().resolveAttribute(android.R.attr.selectableItemBackground, outValue, true); textView.setBackgroundResource(outValue.resourceId); } if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) { textView.setAllCaps(true); } int padding = (int) (TAB_VIEW_PADDING_DIPS * getResources().getDisplayMetrics().density); textView.setPadding(padding, padding, padding, padding); return textView; } | /**
* Create a default view to be used for tabs. This is called if a custom tab view is not set via
* {@link #setCustomTabView(int, int)}.
*/ | Create a default view to be used for tabs. This is called if a custom tab view is not set via <code>#setCustomTabView(int, int)</code> | createDefaultTabView | {
"repo_name": "kunalsaini141014/gomusic",
"path": "src/com/gomusic/app/helpers/view/SlidingTabLayout.java",
"license": "gpl-2.0",
"size": 11624
} | [
"android.annotation.SuppressLint",
"android.content.Context",
"android.graphics.Color",
"android.graphics.Typeface",
"android.os.Build",
"android.util.TypedValue",
"android.view.Gravity",
"android.widget.TextView"
] | import android.annotation.SuppressLint; import android.content.Context; import android.graphics.Color; import android.graphics.Typeface; import android.os.Build; import android.util.TypedValue; import android.view.Gravity; import android.widget.TextView; | import android.annotation.*; import android.content.*; import android.graphics.*; import android.os.*; import android.util.*; import android.view.*; import android.widget.*; | [
"android.annotation",
"android.content",
"android.graphics",
"android.os",
"android.util",
"android.view",
"android.widget"
] | android.annotation; android.content; android.graphics; android.os; android.util; android.view; android.widget; | 1,469,997 |
@Column(name="type")
public String getType() {
return this.type;
} | @Column(name="type") String function() { return this.type; } | /**
* standard getter for the attribute type.
*
* @return the type of the resource
*/ | standard getter for the attribute type | getType | {
"repo_name": "LemoProject/lemo2",
"path": "src/main/java/de/lemo/dms/db/mapping/ResourceMining.java",
"license": "gpl-3.0",
"size": 8461
} | [
"javax.persistence.Column"
] | import javax.persistence.Column; | import javax.persistence.*; | [
"javax.persistence"
] | javax.persistence; | 2,106,000 |
public Collection<ApplicationDescriptor> getApplications()
{
return applicationMap.values();
}
| Collection<ApplicationDescriptor> function() { return applicationMap.values(); } | /**
* Gets the applications.
*
* @return the applications
*/ | Gets the applications | getApplications | {
"repo_name": "xframium/xframium-java",
"path": "framework/src/org/xframium/application/ApplicationRegistry.java",
"license": "gpl-3.0",
"size": 4290
} | [
"java.util.Collection"
] | import java.util.Collection; | import java.util.*; | [
"java.util"
] | java.util; | 2,425,619 |
public void writeToUrlSpec(String urlSpec) throws IOException; | void function(String urlSpec) throws IOException; | /**
* Write DataAdaptor to the specified url
*/ | Write DataAdaptor to the specified url | writeToUrlSpec | {
"repo_name": "EuropeanSpallationSource/openxal",
"path": "core/src/main/java/xal/tools/data/FileDataAdaptor.java",
"license": "bsd-3-clause",
"size": 2435
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,591,420 |
void cleanOldLogs(String log, boolean inclusive, ReplicationSourceInterface source) {
String logPrefix = AbstractFSWALProvider.getWALPrefixFromWALName(log);
if (source.isRecovered()) {
NavigableSet<String> wals = walsByIdRecoveredQueues.get(source.getQueueId()).get(logPrefix);
if (wals != null) {
NavigableSet<String> walsToRemove = wals.headSet(log, inclusive);
if (walsToRemove.isEmpty()) {
return;
}
cleanOldLogs(walsToRemove, source);
walsToRemove.clear();
}
} else {
NavigableSet<String> wals;
NavigableSet<String> walsToRemove;
// synchronized on walsById to avoid race with preLogRoll
synchronized (this.walsById) {
wals = walsById.get(source.getQueueId()).get(logPrefix);
if (wals == null) {
return;
}
walsToRemove = wals.headSet(log, inclusive);
if (walsToRemove.isEmpty()) {
return;
}
walsToRemove = new TreeSet<>(walsToRemove);
}
// cleanOldLogs may spend some time, especially for sync replication where we may want to
// remove remote wals as the remote cluster may have already been down, so we do it outside
// the lock to avoid block preLogRoll
cleanOldLogs(walsToRemove, source);
// now let's remove the files in the set
synchronized (this.walsById) {
wals.removeAll(walsToRemove);
}
}
} | void cleanOldLogs(String log, boolean inclusive, ReplicationSourceInterface source) { String logPrefix = AbstractFSWALProvider.getWALPrefixFromWALName(log); if (source.isRecovered()) { NavigableSet<String> wals = walsByIdRecoveredQueues.get(source.getQueueId()).get(logPrefix); if (wals != null) { NavigableSet<String> walsToRemove = wals.headSet(log, inclusive); if (walsToRemove.isEmpty()) { return; } cleanOldLogs(walsToRemove, source); walsToRemove.clear(); } } else { NavigableSet<String> wals; NavigableSet<String> walsToRemove; synchronized (this.walsById) { wals = walsById.get(source.getQueueId()).get(logPrefix); if (wals == null) { return; } walsToRemove = wals.headSet(log, inclusive); if (walsToRemove.isEmpty()) { return; } walsToRemove = new TreeSet<>(walsToRemove); } cleanOldLogs(walsToRemove, source); synchronized (this.walsById) { wals.removeAll(walsToRemove); } } } | /**
* Cleans a log file and all older logs from replication queue. Called when we are sure that a log
* file is closed and has no more entries.
* @param log Path to the log
* @param inclusive whether we should also remove the given log file
* @param source the replication source
*/ | Cleans a log file and all older logs from replication queue. Called when we are sure that a log file is closed and has no more entries | cleanOldLogs | {
"repo_name": "mahak/hbase",
"path": "hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java",
"license": "apache-2.0",
"size": 45220
} | [
"java.util.NavigableSet",
"java.util.TreeSet",
"org.apache.hadoop.hbase.wal.AbstractFSWALProvider"
] | import java.util.NavigableSet; import java.util.TreeSet; import org.apache.hadoop.hbase.wal.AbstractFSWALProvider; | import java.util.*; import org.apache.hadoop.hbase.wal.*; | [
"java.util",
"org.apache.hadoop"
] | java.util; org.apache.hadoop; | 438,623 |
public PortletRenderResult doRenderMarkup(IPortletWindowId portletWindowId, HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, PortletOutputHandler portletOutputHandler) throws IOException; | PortletRenderResult function(IPortletWindowId portletWindowId, HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, PortletOutputHandler portletOutputHandler) throws IOException; | /**
* Executes a render for the body of a portlet, handles all the request and response setup and teardown
*
* @param portletWindowId Portlet to target with the render
* @param httpServletRequest The portal's request
* @param httpServletResponse The portal's response (nothing will be written to the response)
* @param portletOutputHandler The output handler to write to
* @throws AuthorizationException if the requesting user lacks permission to invoke
* the portlet window (e.g. due to its having a forbidden portlet mode)
*/ | Executes a render for the body of a portlet, handles all the request and response setup and teardown | doRenderMarkup | {
"repo_name": "apetro/uPortal",
"path": "uportal-war/src/main/java/org/apereo/portal/portlet/rendering/IPortletRenderer.java",
"license": "apache-2.0",
"size": 8417
} | [
"java.io.IOException",
"javax.servlet.http.HttpServletRequest",
"javax.servlet.http.HttpServletResponse",
"org.apereo.portal.portlet.om.IPortletWindowId"
] | import java.io.IOException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apereo.portal.portlet.om.IPortletWindowId; | import java.io.*; import javax.servlet.http.*; import org.apereo.portal.portlet.om.*; | [
"java.io",
"javax.servlet",
"org.apereo.portal"
] | java.io; javax.servlet; org.apereo.portal; | 2,479,229 |
public int increasesCyclomaticComplexityBy(AbstractProduction production); | int function(AbstractProduction production); | /**
* This method is implemented in language packs for determining whether the
* cyclomatic complexity is to be increased or not.
*
* @param production
* is a {@link AbstractProduction} of the language to be checked.
* @return An integer is returned about how much the complexity is to be
* increased. 0 is to be returned in case no complexity is to be
* changed.
*/ | This method is implemented in language packs for determining whether the cyclomatic complexity is to be increased or not | increasesCyclomaticComplexityBy | {
"repo_name": "PureSolTechnologies/Purifinity",
"path": "analysis/api/evaluation.api/src/main/java/com/puresoltechnologies/purifinity/evaluation/api/LanguageDependedMcCabeMetric.java",
"license": "agpl-3.0",
"size": 1055
} | [
"com.puresoltechnologies.parsers.ust.AbstractProduction"
] | import com.puresoltechnologies.parsers.ust.AbstractProduction; | import com.puresoltechnologies.parsers.ust.*; | [
"com.puresoltechnologies.parsers"
] | com.puresoltechnologies.parsers; | 2,215,096 |
public SubResource natGateway() {
return this.natGateway;
} | SubResource function() { return this.natGateway; } | /**
* Get nat gateway associated with this subnet.
*
* @return the natGateway value
*/ | Get nat gateway associated with this subnet | natGateway | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/network/mgmt-v2020_05_01/src/main/java/com/microsoft/azure/management/network/v2020_05_01/implementation/SubnetInner.java",
"license": "mit",
"size": 14317
} | [
"com.microsoft.azure.SubResource"
] | import com.microsoft.azure.SubResource; | import com.microsoft.azure.*; | [
"com.microsoft.azure"
] | com.microsoft.azure; | 1,312,957 |
public void assertDate(String expected) {
assertEquals(expected, date);
} | void function(String expected) { assertEquals(expected, date); } | /**
* Assert email date
*
* @param expected
* The expected date
*/ | Assert email date | assertDate | {
"repo_name": "Tournia/tournia-site",
"path": "seleniumTest/src/com/isbtplanner/Mail.java",
"license": "gpl-3.0",
"size": 3238
} | [
"org.testng.AssertJUnit"
] | import org.testng.AssertJUnit; | import org.testng.*; | [
"org.testng"
] | org.testng; | 1,032,620 |
private ArrayList<String> scanDir(String path) {
ArrayList<String> result = new ArrayList<>();
File dir_path = new File(path);
File[] list = dir_path.listFiles();
if (list.length == 0) {
return result;
}
for (File file:list) {
if (file.isDirectory() == false || file.canRead() == false) {
continue;
}
result.add(file.getName());
}
return result;
} | ArrayList<String> function(String path) { ArrayList<String> result = new ArrayList<>(); File dir_path = new File(path); File[] list = dir_path.listFiles(); if (list.length == 0) { return result; } for (File file:list) { if (file.isDirectory() == false file.canRead() == false) { continue; } result.add(file.getName()); } return result; } | /**
* Return a list of files and directories in a given path
* @param path
* @return ArrayList<String>
*/ | Return a list of files and directories in a given path | scanDir | {
"repo_name": "adrian-tilita/ezDuplicateFileFinder",
"path": "src/layout/helper/component/DirectoryDataRetrieval.java",
"license": "mit",
"size": 5357
} | [
"java.io.File",
"java.util.ArrayList"
] | import java.io.File; import java.util.ArrayList; | import java.io.*; import java.util.*; | [
"java.io",
"java.util"
] | java.io; java.util; | 2,047,673 |
// the key is the unit
private HashMap<String, ImportsMaps> importedTypes = new HashMap<String, ImportsMaps>(); // one per unit
public void addToImportsMaps(String type, String asName, String pkg, Object metaArgs) {
String unitKey = ParseUnit.mkPackageName(getCurrPath()) + "." + ParseUnit.mkUnitName(getCurrPath());
setDebugMode(false);
if (isDebugMode()) {
String dbgStr = "";
if (metaArgs instanceof ListNode) {
dbgStr = ", meta args ";
String comma = "";
@SuppressWarnings("unchecked")
ListNode<BaseNode> l = (ListNode<BaseNode>) metaArgs;
for (BaseNode b : l.getElems()) {
dbgStr += comma;
if (b.getType() != pollenParser.NIL)
dbgStr += b.getText() + "." + b.getChild(0).getText();
else
dbgStr += "<none, use default>";
comma = ", ";
}
}
System.out.println("addToImportsMaps(): in unit " + unitKey + " add 'import " + pkg + "." + type + " as " + asName + "'" + dbgStr);
}
if (!importedTypes.containsKey(unitKey))
importedTypes.put(unitKey, new ImportsMaps());
Tree t = (Tree) metaArgs;
importedTypes.get(unitKey).addTypeNamesToPackageMaps(type, asName, pkg, t);
}
| HashMap<String, ImportsMaps> importedTypes = new HashMap<String, ImportsMaps>(); public void function(String type, String asName, String pkg, Object metaArgs) { String unitKey = ParseUnit.mkPackageName(getCurrPath()) + "." + ParseUnit.mkUnitName(getCurrPath()); setDebugMode(false); if (isDebugMode()) { String dbgStr = STR, meta args STRSTRuncheckedSTR.STR<none, use default>STR, STRaddToImportsMaps(): in unit STR add 'import STR.STR as STR'" + dbgStr); } if (!importedTypes.containsKey(unitKey)) importedTypes.put(unitKey, new ImportsMaps()); Tree t = (Tree) metaArgs; importedTypes.get(unitKey).addTypeNamesToPackageMaps(type, asName, pkg, t); } | /**
* Supports fixups to meta parameters, see nested class ImportsMaps.
* @param type
* @param asName
* @param pkg
*/ | Supports fixups to meta parameters, see nested class ImportsMaps | addToImportsMaps | {
"repo_name": "amaret/pollen",
"path": "translator/src/main/java/com/amaret/pollen/parser/ParseUnit.java",
"license": "gpl-2.0",
"size": 52373
} | [
"java.util.HashMap",
"org.antlr.runtime.tree.Tree"
] | import java.util.HashMap; import org.antlr.runtime.tree.Tree; | import java.util.*; import org.antlr.runtime.tree.*; | [
"java.util",
"org.antlr.runtime"
] | java.util; org.antlr.runtime; | 2,813,295 |
private void voidAllergy(Allergy allergy) {
allergy.setVoided(true);
allergy.setVoidedBy(Context.getAuthenticatedUser());
allergy.setDateVoided(new Date());
allergy.setVoidReason("Voided by API");
dao.saveAllergy(allergy);
}
| void function(Allergy allergy) { allergy.setVoided(true); allergy.setVoidedBy(Context.getAuthenticatedUser()); allergy.setDateVoided(new Date()); allergy.setVoidReason(STR); dao.saveAllergy(allergy); } | /**
* Voids a given allergy
*
* @param allergy the allergy to void
*/ | Voids a given allergy | voidAllergy | {
"repo_name": "vinayvenu/openmrs-core",
"path": "api/src/main/java/org/openmrs/api/impl/PatientServiceImpl.java",
"license": "mpl-2.0",
"size": 59211
} | [
"java.util.Date",
"org.openmrs.Allergy",
"org.openmrs.api.context.Context"
] | import java.util.Date; import org.openmrs.Allergy; import org.openmrs.api.context.Context; | import java.util.*; import org.openmrs.*; import org.openmrs.api.context.*; | [
"java.util",
"org.openmrs",
"org.openmrs.api"
] | java.util; org.openmrs; org.openmrs.api; | 440,526 |
@Test(expected=IllegalArgumentException.class)
public void testInvalidEvaluate() {
function.f(Vector.of(1.0, 2.0, 3.0));
} | @Test(expected=IllegalArgumentException.class) void function() { function.f(Vector.of(1.0, 2.0, 3.0)); } | /**
* Test argument with invalid dimension.
*/ | Test argument with invalid dimension | testInvalidEvaluate | {
"repo_name": "krharrison/cilib",
"path": "library/src/test/java/net/sourceforge/cilib/functions/continuous/unconstrained/MatyasTest.java",
"license": "gpl-3.0",
"size": 1203
} | [
"net.sourceforge.cilib.type.types.container.Vector",
"org.junit.Test"
] | import net.sourceforge.cilib.type.types.container.Vector; import org.junit.Test; | import net.sourceforge.cilib.type.types.container.*; import org.junit.*; | [
"net.sourceforge.cilib",
"org.junit"
] | net.sourceforge.cilib; org.junit; | 1,192,525 |
public static <C> Collection<C> unmodifiableCollection(final Collection<? extends C> collection) {
return UnmodifiableCollection.unmodifiableCollection(collection);
} | static <C> Collection<C> function(final Collection<? extends C> collection) { return UnmodifiableCollection.unmodifiableCollection(collection); } | /**
* Returns an unmodifiable collection backed by the given collection.
* <p>
* This method uses the implementation in the decorators subpackage.
*
* @param <C> the type of object the {@link Collection} contains
* @param collection the collection to make unmodifiable, must not be null
* @return an unmodifiable collection backed by the given collection
* @throws IllegalArgumentException if the collection is null
*/ | Returns an unmodifiable collection backed by the given collection. This method uses the implementation in the decorators subpackage | unmodifiableCollection | {
"repo_name": "gonmarques/commons-collections",
"path": "src/main/java/org/apache/commons/collections4/CollectionUtils.java",
"license": "apache-2.0",
"size": 92921
} | [
"java.util.Collection",
"org.apache.commons.collections4.collection.UnmodifiableCollection"
] | import java.util.Collection; import org.apache.commons.collections4.collection.UnmodifiableCollection; | import java.util.*; import org.apache.commons.collections4.collection.*; | [
"java.util",
"org.apache.commons"
] | java.util; org.apache.commons; | 2,546,407 |
@Nullable
protected final <V> V callConvert(Converter<V> externalConverter, Object value) {
ConverterTracker tracker = new ConverterTracker(this, value, _tracker);
return externalConverter.convert(tracker);
}
private static class ConverterTracker {
private ConverterTracker _parentContainer;
private Converter<?> _parentConverter;
private Set<Converter<?>> _converters;
private Object _value;
ConverterTracker(Converter<?> parentConverter, @Nullable Object value) {
this(parentConverter, value, null);
}
ConverterTracker(Converter<?> parentConverter, @Nullable Object value,
@Nullable ConverterTracker container) {
PreCon.notNull(parentConverter);
_parentContainer = container;
_value = value;
addConverter(parentConverter);
} | final <V> V function(Converter<V> externalConverter, Object value) { ConverterTracker tracker = new ConverterTracker(this, value, _tracker); return externalConverter.convert(tracker); } private static class ConverterTracker { private ConverterTracker _parentContainer; private Converter<?> _parentConverter; private Set<Converter<?>> _converters; private Object _value; ConverterTracker(Converter<?> parentConverter, @Nullable Object value) { this(parentConverter, value, null); } ConverterTracker(Converter<?> parentConverter, @Nullable Object value, @Nullable ConverterTracker container) { PreCon.notNull(parentConverter); _parentContainer = container; _value = value; addConverter(parentConverter); } | /**
* Invoke to convert a value using another converter.
*
* <p>Prevents infinite loops by tracking which converters have already been used.</p>
*
* @param externalConverter The external converter to use.
* @param value The value to convert.
*
* @param <V> The value type.
*
* @return The converted value or null if failed.
*/ | Invoke to convert a value using another converter. Prevents infinite loops by tracking which converters have already been used | callConvert | {
"repo_name": "JCThePants/NucleusFramework",
"path": "src/com/jcwhatever/nucleus/utils/converters/Converter.java",
"license": "mit",
"size": 5751
} | [
"com.jcwhatever.nucleus.utils.PreCon",
"java.util.Set",
"javax.annotation.Nullable"
] | import com.jcwhatever.nucleus.utils.PreCon; import java.util.Set; import javax.annotation.Nullable; | import com.jcwhatever.nucleus.utils.*; import java.util.*; import javax.annotation.*; | [
"com.jcwhatever.nucleus",
"java.util",
"javax.annotation"
] | com.jcwhatever.nucleus; java.util; javax.annotation; | 2,782,740 |
List<HiveObjectPrivilege> listPrincipalTableColumnGrants(
String principalName, PrincipalType principalType, String catName, String dbName,
String tableName, String columnName); | List<HiveObjectPrivilege> listPrincipalTableColumnGrants( String principalName, PrincipalType principalType, String catName, String dbName, String tableName, String columnName); | /**
* For a given principal name and type, list the Table Grants
* @param principalName principal name
* @param principalType type
* @param catName catalog name
* @param dbName database name
* @param tableName table name
* @param columnName column name
* @return list of privileges for that principal on the specified database.
*/ | For a given principal name and type, list the Table Grants | listPrincipalTableColumnGrants | {
"repo_name": "sankarh/hive",
"path": "standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java",
"license": "apache-2.0",
"size": 95486
} | [
"java.util.List",
"org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege",
"org.apache.hadoop.hive.metastore.api.PrincipalType"
] | import java.util.List; import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; import org.apache.hadoop.hive.metastore.api.PrincipalType; | import java.util.*; import org.apache.hadoop.hive.metastore.api.*; | [
"java.util",
"org.apache.hadoop"
] | java.util; org.apache.hadoop; | 1,884,278 |
@Override
public void flush() throws IOException {
super.flush();
this.branch.flush();
} | void function() throws IOException { super.flush(); this.branch.flush(); } | /**
* Flushes both streams.
* @throws IOException if an I/O error occurs
*/ | Flushes both streams | flush | {
"repo_name": "stereokrauts/stereoscope",
"path": "org.apache.commons.io/src/main/java/org/apache/commons/io/output/TeeOutputStream.java",
"license": "gpl-2.0",
"size": 3362
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 430,432 |
@Test
public void getWorkerNoneEligibleAfterCache() {
List<BlockWorkerInfo> workerInfoList = new ArrayList<>();
workerInfoList.add(new BlockWorkerInfo(new WorkerNetAddress().setHost("worker1")
.setRpcPort(PORT).setDataPort(PORT).setWebPort(PORT), Constants.GB, 0));
RoundRobinPolicy policy = new RoundRobinPolicy(ConfigurationTestUtils.defaults());
GetWorkerOptions options = GetWorkerOptions.defaults().setBlockWorkerInfos(workerInfoList)
.setBlockInfo(new BlockInfo().setLength((long) Constants.MB));
assertNotNull(policy.getWorker(options));
options.setBlockWorkerInfos(new ArrayList<>());
assertNull(policy.getWorker(options));
} | void function() { List<BlockWorkerInfo> workerInfoList = new ArrayList<>(); workerInfoList.add(new BlockWorkerInfo(new WorkerNetAddress().setHost(STR) .setRpcPort(PORT).setDataPort(PORT).setWebPort(PORT), Constants.GB, 0)); RoundRobinPolicy policy = new RoundRobinPolicy(ConfigurationTestUtils.defaults()); GetWorkerOptions options = GetWorkerOptions.defaults().setBlockWorkerInfos(workerInfoList) .setBlockInfo(new BlockInfo().setLength((long) Constants.MB)); assertNotNull(policy.getWorker(options)); options.setBlockWorkerInfos(new ArrayList<>()); assertNull(policy.getWorker(options)); } | /**
* Tests that no workers are returned when subsequent calls to the policy have no eligible
* workers.
*/ | Tests that no workers are returned when subsequent calls to the policy have no eligible workers | getWorkerNoneEligibleAfterCache | {
"repo_name": "wwjiang007/alluxio",
"path": "core/client/fs/src/test/java/alluxio/client/block/policy/RoundRobinPolicyTest.java",
"license": "apache-2.0",
"size": 4099
} | [
"java.util.ArrayList",
"java.util.List",
"org.junit.Assert"
] | import java.util.ArrayList; import java.util.List; import org.junit.Assert; | import java.util.*; import org.junit.*; | [
"java.util",
"org.junit"
] | java.util; org.junit; | 2,148,952 |
public void testDoneFlag() throws Exception {
Configuration conf = new XConfiguration();
String appPath = getTestCaseFileUri("coordinator.xml");
String appXml = "<coordinator-app name=\"NAME\" frequency=\"${coord:days(1)}\" start=\"2009-02-01T01:00Z\" end="
+ "\"2009-02-01T02:00Z\" timezone=\"UTC\" "
+ "xmlns=\"uri:oozie:coordinator:0.1\"> <controls> <timeout>10</timeout> <concurrency>2</concurrency> "
+ "<execution>LIFO</execution> </controls> <datasets> "
+ "<dataset name=\"local_a\" frequency=\"${coord:days(1)}\" initial-instance=\"2009-02-01T01:00Z\" "
+ "timezone=\"UTC\"> <uri-template>" + getTestCaseFileUri("workflows/${YEAR}/${DAY}") + "</uri-template> "
+ "</dataset>"
+ "</datasets> <input-events> "
+ "<data-in name=\"A\" dataset=\"local_a\"> <instance>${coord:current(0)}</instance> </data-in> "
+ "</input-events> "
+ "<action> <workflow> <app-path>hdfs:///tmp/workflows2/</app-path> "
+ "<configuration> <property> <name>inputA</name> <value>${coord:dataIn('A')}</value> </property> "
+ "</configuration> </workflow> </action> </coordinator-app>";
writeToFile(appXml, appPath);
conf.set(OozieClient.COORDINATOR_APP_PATH, appPath);
conf.set(OozieClient.USER_NAME, getTestUser()); | void function() throws Exception { Configuration conf = new XConfiguration(); String appPath = getTestCaseFileUri(STR); String appXml = STRNAME\STR${coord:days(1)}\STR2009-02-01T01:00Z\STR + "\"2009-02-01T02:00Z\STRUTC\" " + STRuri:oozie:coordinator:0.1\STR + STR + STRlocal_a\STR${coord:days(1)}\STR2009-02-01T01:00Z\" " + STRUTC\STR + getTestCaseFileUri(STR) + STR + STR + STR + STRA\STRlocal_a\STR + STR + STR<configuration> <property> <name>inputA</name> <value>${coord:dataIn('A')}</value> </property> STR</configuration> </workflow> </action> </coordinator-app>"; writeToFile(appXml, appPath); conf.set(OozieClient.COORDINATOR_APP_PATH, appPath); conf.set(OozieClient.USER_NAME, getTestUser()); | /**
* Test Missing Dependencies with No Done Flag in Schema
*
* @throws Exception
*/ | Test Missing Dependencies with No Done Flag in Schema | testDoneFlag | {
"repo_name": "cbaenziger/oozie",
"path": "core/src/test/java/org/apache/oozie/TestCoordinatorEngine.java",
"license": "apache-2.0",
"size": 24560
} | [
"org.apache.hadoop.conf.Configuration",
"org.apache.oozie.client.OozieClient",
"org.apache.oozie.util.XConfiguration"
] | import org.apache.hadoop.conf.Configuration; import org.apache.oozie.client.OozieClient; import org.apache.oozie.util.XConfiguration; | import org.apache.hadoop.conf.*; import org.apache.oozie.client.*; import org.apache.oozie.util.*; | [
"org.apache.hadoop",
"org.apache.oozie"
] | org.apache.hadoop; org.apache.oozie; | 2,648,821 |
@Override
public String autocreateJSON(ContextualisedStorage root,
CSPRequestCredentials creds,
CSPRequestCache cache,
String filePath,
JSONObject jsonObject,
JSONObject restrictions) throws ExistException, UnimplementedException, UnderlyingStorageException {
try {
ReturnedURL url = null;
Document doc = null;
//used by userroles and permroles as they have complex urls
if(r.hasPrimaryField()){
//XXX test if works: need to delete first before create/update
// deleteJSON(root,creds,cache,filePath);
for(String section : r.getServicesRecordPathKeys()) {
doc=XmlJsonConversion.convertToXml(r,jsonObject,section,"POST");
String path = r.getServicesURL();
path = path.replace("*", getSubCsid(jsonObject,r.getPrimaryField()));
String restrictedPath = getRestrictedPath(path, restrictions, null);
deleteJSON(root,creds,cache,restrictedPath);
url = conn.getURL(RequestMethod.POST, restrictedPath, doc, creds, cache);
}
}
else{
String restrictedPath = getRestrictedPath(r.getServicesURL(), restrictions, null);
url = autoCreateSub(creds, cache, jsonObject, doc, restrictedPath, r); // REM - We need a way to send query params (restrictions) on POST and UPDATE
}
// create related sub records?
//I am developing this.. it might not work...
for(FieldSet fs : r.getAllSubRecords("POST")){
Record sr = fs.usesRecordId();
//sr.getID()
if(sr.isType("authority")){
//need to use code from configuredVocabStorage
}
else{
String savePath = url.getURL() + "/" + sr.getServicesURL();
if(fs instanceof Field){//get the fields form inline XXX untested - might not work...
JSONObject subdata = new JSONObject();
//loop thr jsonObject and find the fields I need
for(FieldSet subfs: sr.getAllFieldTopLevel("POST")){
String key = subfs.getID();
if(jsonObject.has(key)){
subdata.put(key, jsonObject.get(key));
}
}
subautocreateJSON(root,creds,cache,sr,subdata,savePath);
}
else if(fs instanceof Group){//JSONObject
if(jsonObject.has(fs.getID())){
Object subdata = jsonObject.get(fs.getID());
if(subdata instanceof JSONObject){
JSONObject subrecord = (JSONObject)subdata;
subautocreateJSON(root,creds,cache,sr,subrecord,savePath);
}
}
}
else{//JSONArray
if(jsonObject.has(fs.getID())){
Object subdata = jsonObject.get(fs.getID());
if(subdata instanceof JSONArray){
JSONArray subarray = (JSONArray)subdata;
for(int i=0;i<subarray.length();i++) {
JSONObject subrecord = subarray.getJSONObject(i);
subautocreateJSON(root,creds,cache,sr,subrecord,savePath);
}
}
}
}
}
}
return url.getURLTail();
} catch (ConnectionException e) {
String msg = e.getMessage();
if(e.getStatus() == 403){ //permissions error
msg += " permissions error";
}
throw new UnderlyingStorageException(msg,e.getStatus(), e.getUrl(),e);
} catch (UnderlyingStorageException e) {
throw e; // REM - CSPACE-5632: Need to catch and rethrow this exception type to prevent throwing an "UnimplementedException" exception below.
} catch (Exception e) {
throw new UnimplementedException("JSONException",e);
}
} | String function(ContextualisedStorage root, CSPRequestCredentials creds, CSPRequestCache cache, String filePath, JSONObject jsonObject, JSONObject restrictions) throws ExistException, UnimplementedException, UnderlyingStorageException { try { ReturnedURL url = null; Document doc = null; if(r.hasPrimaryField()){ for(String section : r.getServicesRecordPathKeys()) { doc=XmlJsonConversion.convertToXml(r,jsonObject,section,"POST"); String path = r.getServicesURL(); path = path.replace("*", getSubCsid(jsonObject,r.getPrimaryField())); String restrictedPath = getRestrictedPath(path, restrictions, null); deleteJSON(root,creds,cache,restrictedPath); url = conn.getURL(RequestMethod.POST, restrictedPath, doc, creds, cache); } } else{ String restrictedPath = getRestrictedPath(r.getServicesURL(), restrictions, null); url = autoCreateSub(creds, cache, jsonObject, doc, restrictedPath, r); } for(FieldSet fs : r.getAllSubRecords("POST")){ Record sr = fs.usesRecordId(); if(sr.isType(STR)){ } else{ String savePath = url.getURL() + "/" + sr.getServicesURL(); if(fs instanceof Field){ JSONObject subdata = new JSONObject(); for(FieldSet subfs: sr.getAllFieldTopLevel("POST")){ String key = subfs.getID(); if(jsonObject.has(key)){ subdata.put(key, jsonObject.get(key)); } } subautocreateJSON(root,creds,cache,sr,subdata,savePath); } else if(fs instanceof Group){ if(jsonObject.has(fs.getID())){ Object subdata = jsonObject.get(fs.getID()); if(subdata instanceof JSONObject){ JSONObject subrecord = (JSONObject)subdata; subautocreateJSON(root,creds,cache,sr,subrecord,savePath); } } } else{ if(jsonObject.has(fs.getID())){ Object subdata = jsonObject.get(fs.getID()); if(subdata instanceof JSONArray){ JSONArray subarray = (JSONArray)subdata; for(int i=0;i<subarray.length();i++) { JSONObject subrecord = subarray.getJSONObject(i); subautocreateJSON(root,creds,cache,sr,subrecord,savePath); } } } } } } return url.getURLTail(); } catch (ConnectionException e) { String msg = e.getMessage(); if(e.getStatus() == 403){ msg += STR; } throw new UnderlyingStorageException(msg,e.getStatus(), e.getUrl(),e); } catch (UnderlyingStorageException e) { throw e; } catch (Exception e) { throw new UnimplementedException(STR,e); } } | /**
* Convert the JSON from the UI Layer into XML for the Service layer while using the XML structure from cspace-config.xml
* Send the XML through to the Service Layer to store it in the database
* The Service Layer returns a url to the object we just stored.
* @param {ContextualisedStorage} root
* @param {CSPRequestCredentials} creds
* @param {CSPRequestCache} cache
* @param {String} filePath part of the path to the Service URL (containing the type of object)
* @param {JSONObject} jsonObject The JSON string coming in from the UI Layer, containing the object to be stored
* @return {String} csid The id of the object in the database
*/ | Convert the JSON from the UI Layer into XML for the Service layer while using the XML structure from cspace-config.xml Send the XML through to the Service Layer to store it in the database The Service Layer returns a url to the object we just stored | autocreateJSON | {
"repo_name": "cherryhill/collectionspace-application",
"path": "cspi-services/src/main/java/org/collectionspace/chain/csp/persistence/services/GenericStorage.java",
"license": "apache-2.0",
"size": 93300
} | [
"org.collectionspace.chain.csp.persistence.services.connection.ConnectionException",
"org.collectionspace.chain.csp.persistence.services.connection.RequestMethod",
"org.collectionspace.chain.csp.persistence.services.connection.ReturnedURL",
"org.collectionspace.chain.csp.schema.Field",
"org.collectionspace.chain.csp.schema.FieldSet",
"org.collectionspace.chain.csp.schema.Group",
"org.collectionspace.chain.csp.schema.Record",
"org.collectionspace.csp.api.core.CSPRequestCache",
"org.collectionspace.csp.api.core.CSPRequestCredentials",
"org.collectionspace.csp.api.persistence.ExistException",
"org.collectionspace.csp.api.persistence.UnderlyingStorageException",
"org.collectionspace.csp.api.persistence.UnimplementedException",
"org.collectionspace.csp.helper.persistence.ContextualisedStorage",
"org.dom4j.Document",
"org.json.JSONArray",
"org.json.JSONObject"
] | import org.collectionspace.chain.csp.persistence.services.connection.ConnectionException; import org.collectionspace.chain.csp.persistence.services.connection.RequestMethod; import org.collectionspace.chain.csp.persistence.services.connection.ReturnedURL; import org.collectionspace.chain.csp.schema.Field; import org.collectionspace.chain.csp.schema.FieldSet; import org.collectionspace.chain.csp.schema.Group; import org.collectionspace.chain.csp.schema.Record; import org.collectionspace.csp.api.core.CSPRequestCache; import org.collectionspace.csp.api.core.CSPRequestCredentials; import org.collectionspace.csp.api.persistence.ExistException; import org.collectionspace.csp.api.persistence.UnderlyingStorageException; import org.collectionspace.csp.api.persistence.UnimplementedException; import org.collectionspace.csp.helper.persistence.ContextualisedStorage; import org.dom4j.Document; import org.json.JSONArray; import org.json.JSONObject; | import org.collectionspace.chain.csp.persistence.services.connection.*; import org.collectionspace.chain.csp.schema.*; import org.collectionspace.csp.api.core.*; import org.collectionspace.csp.api.persistence.*; import org.collectionspace.csp.helper.persistence.*; import org.dom4j.*; import org.json.*; | [
"org.collectionspace.chain",
"org.collectionspace.csp",
"org.dom4j",
"org.json"
] | org.collectionspace.chain; org.collectionspace.csp; org.dom4j; org.json; | 1,472,961 |
BlockState getBlock(Vector3i position); | BlockState getBlock(Vector3i position); | /**
* Get a representation of the block at the given position.
*
* @param position The position
* @return The block
* @throws PositionOutOfBoundsException If the position is outside of the
* bounds of the volume
*/ | Get a representation of the block at the given position | getBlock | {
"repo_name": "caseif/SpongeAPI",
"path": "src/main/java/org/spongepowered/api/world/extent/BlockVolume.java",
"license": "mit",
"size": 5946
} | [
"com.flowpowered.math.vector.Vector3i",
"org.spongepowered.api.block.BlockState"
] | import com.flowpowered.math.vector.Vector3i; import org.spongepowered.api.block.BlockState; | import com.flowpowered.math.vector.*; import org.spongepowered.api.block.*; | [
"com.flowpowered.math",
"org.spongepowered.api"
] | com.flowpowered.math; org.spongepowered.api; | 694,958 |
private ParquetTableMetadata_v3 getParquetTableMetadata(List<FileStatus> fileStatuses)
throws IOException {
ParquetTableMetadata_v3 tableMetadata = new ParquetTableMetadata_v3(SUPPORTED_VERSIONS.last().toString(),
DrillVersionInfo.getVersion());
List<ParquetFileMetadata_v3> fileMetadataList = getParquetFileMetadata_v3(tableMetadata, fileStatuses);
tableMetadata.files = fileMetadataList;
tableMetadata.directories = new ArrayList<String>();
return tableMetadata;
} | ParquetTableMetadata_v3 function(List<FileStatus> fileStatuses) throws IOException { ParquetTableMetadata_v3 tableMetadata = new ParquetTableMetadata_v3(SUPPORTED_VERSIONS.last().toString(), DrillVersionInfo.getVersion()); List<ParquetFileMetadata_v3> fileMetadataList = getParquetFileMetadata_v3(tableMetadata, fileStatuses); tableMetadata.files = fileMetadataList; tableMetadata.directories = new ArrayList<String>(); return tableMetadata; } | /**
* Get the parquet metadata for a list of parquet files
*
* @param fileStatuses List of file statuses
* @return parquet table metadata object
* @throws IOException if parquet file metadata can't be obtained
*/ | Get the parquet metadata for a list of parquet files | getParquetTableMetadata | {
"repo_name": "KulykRoman/drill",
"path": "exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/Metadata.java",
"license": "apache-2.0",
"size": 72268
} | [
"java.io.IOException",
"java.util.ArrayList",
"java.util.List",
"org.apache.drill.common.util.DrillVersionInfo",
"org.apache.hadoop.fs.FileStatus"
] | import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.drill.common.util.DrillVersionInfo; import org.apache.hadoop.fs.FileStatus; | import java.io.*; import java.util.*; import org.apache.drill.common.util.*; import org.apache.hadoop.fs.*; | [
"java.io",
"java.util",
"org.apache.drill",
"org.apache.hadoop"
] | java.io; java.util; org.apache.drill; org.apache.hadoop; | 216,133 |
@SafeVarargs
public static final ParameterSet<String> newParameterSet(List<? extends ParameterData<String, ? extends Object>> parameters,
boolean generateHelpParam, String helpParamName, String... helpParamAliases) {
List<ParameterData<String, ? extends Object>> paramsCopy = new ArrayList<>(parameters);
ParameterSet<String> paramSet = null;
if(generateHelpParam == true && helpParamName != null) {
List<String> aliases = helpParamAliases != null ? Arrays.asList(helpParamAliases) : null;
StringBuilder sb = new StringBuilder("\t'" + helpParamName + "'" + orParamAliasesToString(",", aliases) +
" - displays this help message\n");
for(ParameterData<String, ?> param : parameters) {
sb.append("\t" + parameterInfo(param) + "\n");
}
sb.append("\n");
final String helpMsg = sb.toString();
paramSet = new ParameterSet<String>(parameters, true, helpParamName, helpMsg, helpParamAliases);
}
else {
paramSet = new ParameterSet<>(paramsCopy);
}
return paramSet;
} | static final ParameterSet<String> function(List<? extends ParameterData<String, ? extends Object>> parameters, boolean generateHelpParam, String helpParamName, String... helpParamAliases) { List<ParameterData<String, ? extends Object>> paramsCopy = new ArrayList<>(parameters); ParameterSet<String> paramSet = null; if(generateHelpParam == true && helpParamName != null) { List<String> aliases = helpParamAliases != null ? Arrays.asList(helpParamAliases) : null; StringBuilder sb = new StringBuilder("\t'" + helpParamName + "'" + orParamAliasesToString(",", aliases) + STR); for(ParameterData<String, ?> param : parameters) { sb.append("\t" + parameterInfo(param) + "\n"); } sb.append("\n"); final String helpMsg = sb.toString(); paramSet = new ParameterSet<String>(parameters, true, helpParamName, helpMsg, helpParamAliases); } else { paramSet = new ParameterSet<>(paramsCopy); } return paramSet; } | /** Create a new parameter set with the given parameters and generate a help parameter
* @param parameters the list of parameters
* @param generateHelpParam true to generate a help parameter
* @param helpParamName the name of the help parameter
* @param helpParamAliases aliases for the name of the help parameter
* @return a {@link ParameterSet} that contains {@code parameters} and a help parameter containing
* information about all of the parameters
*/ | Create a new parameter set with the given parameters and generate a help parameter | newParameterSet | {
"repo_name": "TeamworkGuy2/JParameter",
"path": "src/twg2/cli/ParameterSet.java",
"license": "mit",
"size": 12586
} | [
"java.util.ArrayList",
"java.util.Arrays",
"java.util.List"
] | import java.util.ArrayList; import java.util.Arrays; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 2,184,459 |
@Override
public void dispatchTo(final Object listener) {
final MouseMotionListener mouseMotionListener = (MouseMotionListener)listener;
switch (getID()) {
case PSwingMouseEvent.MOUSE_DRAGGED: {
mouseMotionListener.mouseDragged(this);
break;
}
case PSwingMouseEvent.MOUSE_MOVED: {
mouseMotionListener.mouseMoved(this);
break;
}
default: {
throw new RuntimeException("ZMouseMotionEvent with bad ID"); // NOI18N
}
}
} | void function(final Object listener) { final MouseMotionListener mouseMotionListener = (MouseMotionListener)listener; switch (getID()) { case PSwingMouseEvent.MOUSE_DRAGGED: { mouseMotionListener.mouseDragged(this); break; } case PSwingMouseEvent.MOUSE_MOVED: { mouseMotionListener.mouseMoved(this); break; } default: { throw new RuntimeException(STR); } } } | /**
* Calls appropriate method on the listener based on this events ID.
*
* @param listener DOCUMENT ME!
*
* @throws RuntimeException DOCUMENT ME!
*/ | Calls appropriate method on the listener based on this events ID | dispatchTo | {
"repo_name": "cismet/cismap-commons",
"path": "src/main/java/pswing/PSwingMouseMotionEvent.java",
"license": "lgpl-3.0",
"size": 3035
} | [
"java.awt.event.MouseMotionListener"
] | import java.awt.event.MouseMotionListener; | import java.awt.event.*; | [
"java.awt"
] | java.awt; | 1,072,457 |
public FileChecksum getFileChecksum(Path f, final long length)
throws IOException {
return null;
} | FileChecksum function(Path f, final long length) throws IOException { return null; } | /**
* Get the checksum of a file, from the beginning of the file till the
* specific length.
* @param f The file path
* @param length The length of the file range for checksum calculation
* @return The file checksum.
*/ | Get the checksum of a file, from the beginning of the file till the specific length | getFileChecksum | {
"repo_name": "joyghosh/hadoop",
"path": "hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java",
"license": "gpl-3.0",
"size": 116427
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 1,307,806 |
public void setActivityRecognitionClient(ActivityRecognitionClient client) {
mActivityRecognitionClient = client;
} | void function(ActivityRecognitionClient client) { mActivityRecognitionClient = client; } | /**
* Set the global activity recognition client
* @param client An ActivityRecognitionClient object
*/ | Set the global activity recognition client | setActivityRecognitionClient | {
"repo_name": "mrsf/IAU_PSS_1415",
"path": "app/src/main/java/pt/ipleiria/estg/meicm/iaupss/estgparking/activityrecognition/DetectionRemover.java",
"license": "gpl-2.0",
"size": 7935
} | [
"com.google.android.gms.location.ActivityRecognitionClient"
] | import com.google.android.gms.location.ActivityRecognitionClient; | import com.google.android.gms.location.*; | [
"com.google.android"
] | com.google.android; | 1,141,094 |
protected final void clearReadPending() {
if (isRegistered()) {
EventLoop eventLoop = eventLoop();
if (eventLoop.inEventLoop()) {
clearReadPending0();
} else {
eventLoop.execute(clearReadPendingRunnable);
}
} else {
// Best effort if we are not registered yet clear readPending. This happens during channel initialization.
// NB: We only set the boolean field instead of calling clearReadPending0(), because the SelectionKey is
// not set yet so it would produce an assertion failure.
readPending = false;
}
} | final void function() { if (isRegistered()) { EventLoop eventLoop = eventLoop(); if (eventLoop.inEventLoop()) { clearReadPending0(); } else { eventLoop.execute(clearReadPendingRunnable); } } else { readPending = false; } } | /**
* Set read pending to {@code false}.
*/ | Set read pending to false | clearReadPending | {
"repo_name": "johnou/netty",
"path": "transport/src/main/java/io/netty/channel/nio/AbstractNioChannel.java",
"license": "apache-2.0",
"size": 18647
} | [
"io.netty.channel.EventLoop"
] | import io.netty.channel.EventLoop; | import io.netty.channel.*; | [
"io.netty.channel"
] | io.netty.channel; | 187,449 |
ServiceResponse<TaskMsg> rs;
TaskMsg msg;
try {
this.validateRequest(rq);
this.setContext(rq.getContext());
msg = this.produceTask(rq.getRequestMessage());
if ((msg == null)) {
super.getLogger().warning("No response message defined.");
} else {
super.cleanServiceMessage(msg);
}
rs = new ServiceResponse<TaskMsg>(rq.getContext());
rs.setResponseMessage(msg);
return rs;
} catch (ProduceException e) {
super.getLogger().error(e);
throw e;
} catch (NabuccoException e) {
super.getLogger().error(e);
ProduceException wrappedException = new ProduceException(e);
throw wrappedException;
} catch (Exception e) {
super.getLogger().error(e);
throw new ProduceException("Error during service invocation.", e);
}
}
| ServiceResponse<TaskMsg> rs; TaskMsg msg; try { this.validateRequest(rq); this.setContext(rq.getContext()); msg = this.produceTask(rq.getRequestMessage()); if ((msg == null)) { super.getLogger().warning(STR); } else { super.cleanServiceMessage(msg); } rs = new ServiceResponse<TaskMsg>(rq.getContext()); rs.setResponseMessage(msg); return rs; } catch (ProduceException e) { super.getLogger().error(e); throw e; } catch (NabuccoException e) { super.getLogger().error(e); ProduceException wrappedException = new ProduceException(e); throw wrappedException; } catch (Exception e) { super.getLogger().error(e); throw new ProduceException(STR, e); } } | /**
* Invokes the service handler method.
*
* @param rq the ServiceRequest<TaskMsg>.
* @return the ServiceResponse<TaskMsg>.
* @throws ProduceException
*/ | Invokes the service handler method | invoke | {
"repo_name": "NABUCCO/org.nabucco.framework.workflow",
"path": "org.nabucco.framework.workflow.impl.service/src/main/gen/org/nabucco/framework/workflow/impl/service/datatype/produce/ProduceTaskServiceHandler.java",
"license": "epl-1.0",
"size": 3675
} | [
"org.nabucco.framework.base.facade.exception.NabuccoException",
"org.nabucco.framework.base.facade.exception.service.ProduceException",
"org.nabucco.framework.base.facade.message.ServiceResponse",
"org.nabucco.framework.workflow.facade.message.datatype.task.TaskMsg"
] | import org.nabucco.framework.base.facade.exception.NabuccoException; import org.nabucco.framework.base.facade.exception.service.ProduceException; import org.nabucco.framework.base.facade.message.ServiceResponse; import org.nabucco.framework.workflow.facade.message.datatype.task.TaskMsg; | import org.nabucco.framework.base.facade.exception.*; import org.nabucco.framework.base.facade.exception.service.*; import org.nabucco.framework.base.facade.message.*; import org.nabucco.framework.workflow.facade.message.datatype.task.*; | [
"org.nabucco.framework"
] | org.nabucco.framework; | 393,293 |
EReference getStartMainFuelCurve_StartupModel(); | EReference getStartMainFuelCurve_StartupModel(); | /**
* Returns the meta object for the reference '{@link CIM.IEC61970.Generation.Production.StartMainFuelCurve#getStartupModel <em>Startup Model</em>}'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the meta object for the reference '<em>Startup Model</em>'.
* @see CIM.IEC61970.Generation.Production.StartMainFuelCurve#getStartupModel()
* @see #getStartMainFuelCurve()
* @generated
*/ | Returns the meta object for the reference '<code>CIM.IEC61970.Generation.Production.StartMainFuelCurve#getStartupModel Startup Model</code>'. | getStartMainFuelCurve_StartupModel | {
"repo_name": "georghinkel/ttc2017smartGrids",
"path": "solutions/ModelJoin/src/main/java/CIM/IEC61970/Generation/Production/ProductionPackage.java",
"license": "mit",
"size": 499866
} | [
"org.eclipse.emf.ecore.EReference"
] | import org.eclipse.emf.ecore.EReference; | import org.eclipse.emf.ecore.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 806,738 |
@Override
public Response importOpenAPIDefinition(InputStream fileInputStream, Attachment fileDetail, String url,
String additionalProperties, String inlineApiDefinition,
MessageContext messageContext) {
// validate 'additionalProperties' json
if (StringUtils.isBlank(additionalProperties)) {
RestApiUtil.handleBadRequest("'additionalProperties' is required and should not be null", log);
}
// Convert the 'additionalProperties' json into an APIDTO object
ObjectMapper objectMapper = new ObjectMapper();
APIDTO apiDTOFromProperties;
try {
apiDTOFromProperties = objectMapper.readValue(additionalProperties, APIDTO.class);
} catch (IOException e) {
throw RestApiUtil.buildBadRequestException("Error while parsing 'additionalProperties'", e);
}
// Import the API and Definition
try {
APIDTO createdApiDTO = importOpenAPIDefinition(fileInputStream, url, inlineApiDefinition,
apiDTOFromProperties, fileDetail, null);
if (createdApiDTO != null) {
// This URI used to set the location header of the POST response
URI createdApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + createdApiDTO.getId());
return Response.created(createdApiUri).entity(createdApiDTO).build();
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving API location : " + apiDTOFromProperties.getProvider() + "-" +
apiDTOFromProperties.getName() + "-" + apiDTOFromProperties.getVersion();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
} | Response function(InputStream fileInputStream, Attachment fileDetail, String url, String additionalProperties, String inlineApiDefinition, MessageContext messageContext) { if (StringUtils.isBlank(additionalProperties)) { RestApiUtil.handleBadRequest(STR, log); } ObjectMapper objectMapper = new ObjectMapper(); APIDTO apiDTOFromProperties; try { apiDTOFromProperties = objectMapper.readValue(additionalProperties, APIDTO.class); } catch (IOException e) { throw RestApiUtil.buildBadRequestException(STR, e); } try { APIDTO createdApiDTO = importOpenAPIDefinition(fileInputStream, url, inlineApiDefinition, apiDTOFromProperties, fileDetail, null); if (createdApiDTO != null) { URI createdApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + createdApiDTO.getId()); return Response.created(createdApiUri).entity(createdApiDTO).build(); } } catch (URISyntaxException e) { String errorMessage = STR + apiDTOFromProperties.getProvider() + "-" + apiDTOFromProperties.getName() + "-" + apiDTOFromProperties.getVersion(); RestApiUtil.handleInternalServerError(errorMessage, e, log); } return null; } | /**
* Importing an OpenAPI definition and create an API
*
* @param fileInputStream InputStream for the provided file
* @param fileDetail File meta-data
* @param url URL of the OpenAPI definition
* @param additionalProperties API object (json) including additional properties like name, version, context
* @param inlineApiDefinition Swagger API definition String
* @param messageContext CXF message context
* @return API Import using OpenAPI definition response
*/ | Importing an OpenAPI definition and create an API | importOpenAPIDefinition | {
"repo_name": "uvindra/carbon-apimgt",
"path": "components/apimgt/org.wso2.carbon.apimgt.rest.api.publisher.v1/src/main/java/org/wso2/carbon/apimgt/rest/api/publisher/v1/impl/ApisApiServiceImpl.java",
"license": "apache-2.0",
"size": 274405
} | [
"com.fasterxml.jackson.databind.ObjectMapper",
"java.io.IOException",
"java.io.InputStream",
"java.net.URISyntaxException",
"javax.ws.rs.core.Response",
"org.apache.commons.lang3.StringUtils",
"org.apache.cxf.jaxrs.ext.MessageContext",
"org.apache.cxf.jaxrs.ext.multipart.Attachment",
"org.wso2.carbon.apimgt.rest.api.common.RestApiConstants",
"org.wso2.carbon.apimgt.rest.api.util.utils.RestApiUtil"
] | import com.fasterxml.jackson.databind.ObjectMapper; import java.io.IOException; import java.io.InputStream; import java.net.URISyntaxException; import javax.ws.rs.core.Response; import org.apache.commons.lang3.StringUtils; import org.apache.cxf.jaxrs.ext.MessageContext; import org.apache.cxf.jaxrs.ext.multipart.Attachment; import org.wso2.carbon.apimgt.rest.api.common.RestApiConstants; import org.wso2.carbon.apimgt.rest.api.util.utils.RestApiUtil; | import com.fasterxml.jackson.databind.*; import java.io.*; import java.net.*; import javax.ws.rs.core.*; import org.apache.commons.lang3.*; import org.apache.cxf.jaxrs.ext.*; import org.apache.cxf.jaxrs.ext.multipart.*; import org.wso2.carbon.apimgt.rest.api.common.*; import org.wso2.carbon.apimgt.rest.api.util.utils.*; | [
"com.fasterxml.jackson",
"java.io",
"java.net",
"javax.ws",
"org.apache.commons",
"org.apache.cxf",
"org.wso2.carbon"
] | com.fasterxml.jackson; java.io; java.net; javax.ws; org.apache.commons; org.apache.cxf; org.wso2.carbon; | 198,096 |
public static void msgShort(Context context, String msg) {
Toaster.msgShort(context, msg);
} | static void function(Context context, String msg) { Toaster.msgShort(context, msg); } | /**
* Description: Short toast message
* Notes: A proxy method to the Toaster class. This also takes care of using the Toaster's Singleton.
*/ | Description: Short toast message Notes: A proxy method to the Toaster class. This also takes care of using the Toaster's Singleton | msgShort | {
"repo_name": "5GSD/AIMSICDL",
"path": "AIMSICD/src/main/java/zz/aimsicd/lite/utils/Helpers.java",
"license": "gpl-3.0",
"size": 15540
} | [
"android.content.Context"
] | import android.content.Context; | import android.content.*; | [
"android.content"
] | android.content; | 1,871,311 |
public String list(){
return NavigationResults.LIST;
} | String function(){ return NavigationResults.LIST; } | /**
* Go back to beans list
* @return forward to LIST page
*/ | Go back to beans list | list | {
"repo_name": "autentia/TNTConcept",
"path": "tntconcept-web/src/main/java/com/autentia/tnt/bean/account/AccountBean.java",
"license": "gpl-3.0",
"size": 14682
} | [
"com.autentia.tnt.bean.NavigationResults"
] | import com.autentia.tnt.bean.NavigationResults; | import com.autentia.tnt.bean.*; | [
"com.autentia.tnt"
] | com.autentia.tnt; | 2,378,407 |
void clearScroll(ClearScrollRequest request, ActionListener<ClearScrollResponse> listener); | void clearScroll(ClearScrollRequest request, ActionListener<ClearScrollResponse> listener); | /**
* Clears the search contexts associated with specified scroll ids.
*/ | Clears the search contexts associated with specified scroll ids | clearScroll | {
"repo_name": "nomoa/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/client/Client.java",
"license": "apache-2.0",
"size": 17919
} | [
"org.elasticsearch.action.ActionListener",
"org.elasticsearch.action.search.ClearScrollRequest",
"org.elasticsearch.action.search.ClearScrollResponse"
] | import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.ClearScrollResponse; | import org.elasticsearch.action.*; import org.elasticsearch.action.search.*; | [
"org.elasticsearch.action"
] | org.elasticsearch.action; | 2,361,971 |
public DataLoader<K, V> prime(K key, V value) {
Object cacheKey = getCacheKey(key);
if (!futureCache.containsKey(cacheKey)) {
futureCache.set(cacheKey, Future.succeededFuture(value));
}
return this;
} | DataLoader<K, V> function(K key, V value) { Object cacheKey = getCacheKey(key); if (!futureCache.containsKey(cacheKey)) { futureCache.set(cacheKey, Future.succeededFuture(value)); } return this; } | /**
* Primes the cache with the given key and value.
*
* @param key the key
* @param value the value
* @return the data loader for fluent coding
*/ | Primes the cache with the given key and value | prime | {
"repo_name": "engagingspaces/vertx-dataloader",
"path": "src/main/java/io/engagingspaces/vertx/dataloader/DataLoader.java",
"license": "apache-2.0",
"size": 9073
} | [
"io.vertx.core.Future"
] | import io.vertx.core.Future; | import io.vertx.core.*; | [
"io.vertx.core"
] | io.vertx.core; | 574,459 |
@Theory(nullsAccepted = false)
@edu.umd.cs.findbugs.annotations.SuppressWarnings("EC_NULL_ARG")
public final void equalsReturnFalseOnNull(Object x) {
Assert.assertThat(x.equals(null), CoreMatchers.is(false));
} | @Theory(nullsAccepted = false) @edu.umd.cs.findbugs.annotations.SuppressWarnings(STR) final void function(Object x) { Assert.assertThat(x.equals(null), CoreMatchers.is(false)); } | /**
* Check {@link Object#equals(Object)} always returns false on null: for any non-null reference
* value {@code x}, {@code x.equals(null)} should return false. Test to cover short cut exit for
* checking equals against null object.
*
* @param x primary object instance.
*/ | Check <code>Object#equals(Object)</code> always returns false on null: for any non-null reference value x, x.equals(null) should return false. Test to cover short cut exit for checking equals against null object | equalsReturnFalseOnNull | {
"repo_name": "gwynlavin/jactors-junit",
"path": "src/main/java/org/jactors/junit/theory/ObjectTheory.java",
"license": "mit",
"size": 11071
} | [
"org.hamcrest.CoreMatchers",
"org.junit.Assert",
"org.junit.experimental.theories.Theory"
] | import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.junit.experimental.theories.Theory; | import org.hamcrest.*; import org.junit.*; import org.junit.experimental.theories.*; | [
"org.hamcrest",
"org.junit",
"org.junit.experimental"
] | org.hamcrest; org.junit; org.junit.experimental; | 1,399,611 |
public static List<String> linesOf(File file, Charset charset) {
return Files.linesOf(file, charset);
} | static List<String> function(File file, Charset charset) { return Files.linesOf(file, charset); } | /**
* Loads the text content of a file into a list of strings, each string corresponding to a line.
* The line endings are either \n, \r or \r\n.
*
* @param file the file.
* @param charset the character set to use.
* @return the content of the file.
* @throws NullPointerException if the given charset is {@code null}.
* @throws RuntimeIOException if an I/O exception occurs.
*/ | Loads the text content of a file into a list of strings, each string corresponding to a line. The line endings are either \n, \r or \r\n | linesOf | {
"repo_name": "mariuszs/assertj-core",
"path": "src/main/java/org/assertj/core/api/Assertions.java",
"license": "apache-2.0",
"size": 57618
} | [
"java.io.File",
"java.nio.charset.Charset",
"java.util.List",
"org.assertj.core.util.Files"
] | import java.io.File; import java.nio.charset.Charset; import java.util.List; import org.assertj.core.util.Files; | import java.io.*; import java.nio.charset.*; import java.util.*; import org.assertj.core.util.*; | [
"java.io",
"java.nio",
"java.util",
"org.assertj.core"
] | java.io; java.nio; java.util; org.assertj.core; | 298,528 |
void setInvalid() {
updateLoader = null;
if (jar != null) {
try {
jar.close();
} catch (IOException ioe) {
}
jar = null;
}
isStream = false;
} | void setInvalid() { updateLoader = null; if (jar != null) { try { jar.close(); } catch (IOException ioe) { } jar = null; } isStream = false; } | /**
* Set this loader to be invaid so that it will not
* resolve any classes or resources.
*
*/ | Set this loader to be invaid so that it will not resolve any classes or resources | setInvalid | {
"repo_name": "scnakandala/derby",
"path": "java/engine/org/apache/derby/impl/services/reflect/JarLoader.java",
"license": "apache-2.0",
"size": 15691
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 2,452,030 |
private void conn_ann_inn_success(CubeMessage msg)
{
// Validate the message
InetSocketAddress addr = (InetSocketAddress) validateMsg(msg, innStates);
if (null == addr)
return;
// Authenticate the source
INNState innState = innStates.get(addr);
if (!innState.ann.equals(msg.getSrc()))
{
reply(msg, Type.INVALID_MSG, msg.getType());
return;
}
// Close the client socket
quietClose(innState.chan);
// Inform the other ANNs that they can flush state
innState.state = Type.CONN_INN_GEN_CLEANUP;
while (!innState.able.equals(BigInteger.ZERO))
{
int link = innState.able.getLowestSetBit();
innState.able = innState.able.clearBit(link);
unicastSend(new CubeMessage(cubeState.addr, new CubeAddress(Integer.toString(link)), innState.state, addr));
}
// Clean up my own state
annStates.remove(addr);
innStates.remove(addr);
}
| void function(CubeMessage msg) { InetSocketAddress addr = (InetSocketAddress) validateMsg(msg, innStates); if (null == addr) return; INNState innState = innStates.get(addr); if (!innState.ann.equals(msg.getSrc())) { reply(msg, Type.INVALID_MSG, msg.getType()); return; } quietClose(innState.chan); innState.state = Type.CONN_INN_GEN_CLEANUP; while (!innState.able.equals(BigInteger.ZERO)) { int link = innState.able.getLowestSetBit(); innState.able = innState.able.clearBit(link); unicastSend(new CubeMessage(cubeState.addr, new CubeAddress(Integer.toString(link)), innState.state, addr)); } annStates.remove(addr); innStates.remove(addr); } | /**
* INN must respond to an indication of successful address negotiation from an ANN.
*
* Algorithm: Close the client SocketChannel
*/ | INN must respond to an indication of successful address negotiation from an ANN. Algorithm: Close the client SocketChannel | conn_ann_inn_success | {
"repo_name": "deblau/hyper",
"path": "Hyper/src/hyper/CubeProtocol.java",
"license": "gpl-2.0",
"size": 70120
} | [
"java.math.BigInteger",
"java.net.InetSocketAddress"
] | import java.math.BigInteger; import java.net.InetSocketAddress; | import java.math.*; import java.net.*; | [
"java.math",
"java.net"
] | java.math; java.net; | 1,824,773 |
private String expandParameterList(String query, String name, TypedValue typedList, Map namedParamsCopy) {
Collection vals = (Collection) typedList.getValue();
Type type = typedList.getType();
boolean isJpaPositionalParam = parameterMetadata.getNamedParameterDescriptor( name ).isJpaStyle();
String paramPrefix = isJpaPositionalParam ? "?" : ParserHelper.HQL_VARIABLE_PREFIX;
String placeholder =
new StringBuffer( paramPrefix.length() + name.length() )
.append( paramPrefix ).append( name )
.toString();
if ( query == null ) {
return query;
}
int loc = query.indexOf( placeholder );
if ( loc < 0 ) {
return query;
}
String beforePlaceholder = query.substring( 0, loc );
String afterPlaceholder = query.substring( loc + placeholder.length() );
// check if placeholder is already immediately enclosed in parentheses
// (ignoring whitespace)
boolean isEnclosedInParens =
StringHelper.getLastNonWhitespaceCharacter( beforePlaceholder ) == '(' &&
StringHelper.getFirstNonWhitespaceCharacter( afterPlaceholder ) == ')';
if ( vals.size() == 1 && isEnclosedInParens ) {
// short-circuit for performance when only 1 value and the
// placeholder is already enclosed in parentheses...
namedParamsCopy.put( name, new TypedValue( type, vals.iterator().next(), session.getEntityMode() ) );
return query;
}
StringBuffer list = new StringBuffer( 16 );
Iterator iter = vals.iterator();
int i = 0;
while ( iter.hasNext() ) {
String alias = ( isJpaPositionalParam ? 'x' + name : name ) + i++ + '_';
namedParamsCopy.put( alias, new TypedValue( type, iter.next(), session.getEntityMode() ) );
list.append( ParserHelper.HQL_VARIABLE_PREFIX ).append( alias );
if ( iter.hasNext() ) {
list.append( ", " );
}
}
return StringHelper.replace(
beforePlaceholder,
afterPlaceholder,
placeholder.toString(),
list.toString(),
true,
true
);
} | String function(String query, String name, TypedValue typedList, Map namedParamsCopy) { Collection vals = (Collection) typedList.getValue(); Type type = typedList.getType(); boolean isJpaPositionalParam = parameterMetadata.getNamedParameterDescriptor( name ).isJpaStyle(); String paramPrefix = isJpaPositionalParam ? "?" : ParserHelper.HQL_VARIABLE_PREFIX; String placeholder = new StringBuffer( paramPrefix.length() + name.length() ) .append( paramPrefix ).append( name ) .toString(); if ( query == null ) { return query; } int loc = query.indexOf( placeholder ); if ( loc < 0 ) { return query; } String beforePlaceholder = query.substring( 0, loc ); String afterPlaceholder = query.substring( loc + placeholder.length() ); boolean isEnclosedInParens = StringHelper.getLastNonWhitespaceCharacter( beforePlaceholder ) == '(' && StringHelper.getFirstNonWhitespaceCharacter( afterPlaceholder ) == ')'; if ( vals.size() == 1 && isEnclosedInParens ) { namedParamsCopy.put( name, new TypedValue( type, vals.iterator().next(), session.getEntityMode() ) ); return query; } StringBuffer list = new StringBuffer( 16 ); Iterator iter = vals.iterator(); int i = 0; while ( iter.hasNext() ) { String alias = ( isJpaPositionalParam ? 'x' + name : name ) + i++ + '_'; namedParamsCopy.put( alias, new TypedValue( type, iter.next(), session.getEntityMode() ) ); list.append( ParserHelper.HQL_VARIABLE_PREFIX ).append( alias ); if ( iter.hasNext() ) { list.append( STR ); } } return StringHelper.replace( beforePlaceholder, afterPlaceholder, placeholder.toString(), list.toString(), true, true ); } | /**
* Warning: adds new parameters to the argument by side-effect, as well as
* mutating the query string!
*/ | Warning: adds new parameters to the argument by side-effect, as well as mutating the query string | expandParameterList | {
"repo_name": "codeApeFromChina/resource",
"path": "frame_packages/java_libs/hibernate-distribution-3.6.10.Final/project/hibernate-core/src/main/java/org/hibernate/impl/AbstractQueryImpl.java",
"license": "unlicense",
"size": 27584
} | [
"java.util.Collection",
"java.util.Iterator",
"java.util.Map",
"org.hibernate.engine.TypedValue",
"org.hibernate.hql.classic.ParserHelper",
"org.hibernate.type.Type",
"org.hibernate.util.StringHelper"
] | import java.util.Collection; import java.util.Iterator; import java.util.Map; import org.hibernate.engine.TypedValue; import org.hibernate.hql.classic.ParserHelper; import org.hibernate.type.Type; import org.hibernate.util.StringHelper; | import java.util.*; import org.hibernate.engine.*; import org.hibernate.hql.classic.*; import org.hibernate.type.*; import org.hibernate.util.*; | [
"java.util",
"org.hibernate.engine",
"org.hibernate.hql",
"org.hibernate.type",
"org.hibernate.util"
] | java.util; org.hibernate.engine; org.hibernate.hql; org.hibernate.type; org.hibernate.util; | 2,534,374 |
private boolean isReplicationFailover(Properties p) {
return isTrue(p, Attribute.REPLICATION_FAILOVER);
} | boolean function(Properties p) { return isTrue(p, Attribute.REPLICATION_FAILOVER); } | /**
* used to verify if the failover attribute has been set.
*
* @param p The attribute set.
* @return true if the failover attribute has been set.
* false otherwise.
*/ | used to verify if the failover attribute has been set | isReplicationFailover | {
"repo_name": "trejkaz/derby",
"path": "java/engine/org/apache/derby/impl/jdbc/EmbedConnection.java",
"license": "apache-2.0",
"size": 142130
} | [
"java.util.Properties",
"org.apache.derby.iapi.reference.Attribute"
] | import java.util.Properties; import org.apache.derby.iapi.reference.Attribute; | import java.util.*; import org.apache.derby.iapi.reference.*; | [
"java.util",
"org.apache.derby"
] | java.util; org.apache.derby; | 1,463,595 |
@VisibleForTesting
static AndroidPlatformTarget createFromDefaultDirectoryStructure(
String name,
AndroidDirectoryResolver androidDirectoryResolver,
String platformDirectoryPath,
Set<Path> additionalJarPaths,
Optional<Path> aaptOverride) {
Path androidSdkDir = androidDirectoryResolver.getSdkOrThrow();
if (!androidSdkDir.isAbsolute()) {
throw new HumanReadableException(
"Path to Android SDK must be absolute but was: %s.",
androidSdkDir);
}
Path platformDirectory = androidSdkDir.resolve(platformDirectoryPath);
Path androidJar = platformDirectory.resolve("android.jar");
// Add any libraries found in the optional directory under the Android SDK directory. These
// go at the head of the bootclasspath before any additional jars.
File optionalDirectory = platformDirectory.resolve("optional").toFile();
if (optionalDirectory.exists() &&
optionalDirectory.isDirectory()) {
String[] optionalDirList = optionalDirectory.list(new AddonFilter());
if (optionalDirList != null) {
Arrays.sort(optionalDirList);
ImmutableSet.Builder<Path> additionalJars = ImmutableSet.builder();
for (String file : optionalDirList) {
additionalJars.add(optionalDirectory.toPath().resolve(file));
}
additionalJars.addAll(additionalJarPaths);
additionalJarPaths = additionalJars.build();
}
}
LinkedList<Path> bootclasspathEntries = Lists.newLinkedList(additionalJarPaths);
// Make sure android.jar is at the front of the bootclasspath.
bootclasspathEntries.addFirst(androidJar);
// This is the directory under the Android SDK directory that contains the dx script, jack,
// jill, and binaries.
Path buildToolsDir = androidDirectoryResolver.getBuildToolsOrThrow();
// This is the directory under the Android SDK directory that contains the aapt, aidl, and
// zipalign binaries. Before Android SDK Build-tools 23.0.0_rc1, this was the same as
// buildToolsDir above.
Path buildToolsBinDir;
if (buildToolsDir.resolve("bin").toFile().exists()) {
// Android SDK Build-tools >= 23.0.0_rc1 have executables under a new bin directory.
buildToolsBinDir = buildToolsDir.resolve("bin");
} else {
// Android SDK Build-tools < 23.0.0_rc1 have executables under the build-tools directory.
buildToolsBinDir = buildToolsDir;
}
Path zipAlignExecutable = androidSdkDir.resolve("tools/zipalign").toAbsolutePath();
if (!zipAlignExecutable.toFile().exists()) {
// Android SDK Build-tools >= 19.1.0 have zipalign under the build-tools directory.
zipAlignExecutable =
androidSdkDir.resolve(buildToolsBinDir).resolve("zipalign").toAbsolutePath();
}
Path androidFrameworkIdlFile = platformDirectory.resolve("framework.aidl");
Path proguardJar = androidSdkDir.resolve("tools/proguard/lib/proguard.jar");
Path proguardConfig = androidSdkDir.resolve("tools/proguard/proguard-android.txt");
Path optimizedProguardConfig =
androidSdkDir.resolve("tools/proguard/proguard-android-optimize.txt");
return new AndroidPlatformTarget(
name,
androidJar.toAbsolutePath(),
bootclasspathEntries,
aaptOverride.or(androidSdkDir.resolve(buildToolsBinDir).resolve("aapt").toAbsolutePath()),
androidSdkDir.resolve("platform-tools/adb").toAbsolutePath(),
androidSdkDir.resolve(buildToolsBinDir).resolve("aidl").toAbsolutePath(),
zipAlignExecutable,
buildToolsDir.resolve(
Platform.detect() == Platform.WINDOWS ? "dx.bat" : "dx").toAbsolutePath(),
androidFrameworkIdlFile,
proguardJar,
proguardConfig,
optimizedProguardConfig,
androidDirectoryResolver);
}
private static class AndroidWithGoogleApisFactory implements Factory {
private static final String API_DIR_SUFFIX = "(?:-([0-9]+))*"; | static AndroidPlatformTarget createFromDefaultDirectoryStructure( String name, AndroidDirectoryResolver androidDirectoryResolver, String platformDirectoryPath, Set<Path> additionalJarPaths, Optional<Path> aaptOverride) { Path androidSdkDir = androidDirectoryResolver.getSdkOrThrow(); if (!androidSdkDir.isAbsolute()) { throw new HumanReadableException( STR, androidSdkDir); } Path platformDirectory = androidSdkDir.resolve(platformDirectoryPath); Path androidJar = platformDirectory.resolve(STR); File optionalDirectory = platformDirectory.resolve(STR).toFile(); if (optionalDirectory.exists() && optionalDirectory.isDirectory()) { String[] optionalDirList = optionalDirectory.list(new AddonFilter()); if (optionalDirList != null) { Arrays.sort(optionalDirList); ImmutableSet.Builder<Path> additionalJars = ImmutableSet.builder(); for (String file : optionalDirList) { additionalJars.add(optionalDirectory.toPath().resolve(file)); } additionalJars.addAll(additionalJarPaths); additionalJarPaths = additionalJars.build(); } } LinkedList<Path> bootclasspathEntries = Lists.newLinkedList(additionalJarPaths); bootclasspathEntries.addFirst(androidJar); Path buildToolsDir = androidDirectoryResolver.getBuildToolsOrThrow(); Path buildToolsBinDir; if (buildToolsDir.resolve("bin").toFile().exists()) { buildToolsBinDir = buildToolsDir.resolve("bin"); } else { buildToolsBinDir = buildToolsDir; } Path zipAlignExecutable = androidSdkDir.resolve(STR).toAbsolutePath(); if (!zipAlignExecutable.toFile().exists()) { zipAlignExecutable = androidSdkDir.resolve(buildToolsBinDir).resolve(STR).toAbsolutePath(); } Path androidFrameworkIdlFile = platformDirectory.resolve(STR); Path proguardJar = androidSdkDir.resolve(STR); Path proguardConfig = androidSdkDir.resolve(STR); Path optimizedProguardConfig = androidSdkDir.resolve(STR); return new AndroidPlatformTarget( name, androidJar.toAbsolutePath(), bootclasspathEntries, aaptOverride.or(androidSdkDir.resolve(buildToolsBinDir).resolve("aapt").toAbsolutePath()), androidSdkDir.resolve(STR).toAbsolutePath(), androidSdkDir.resolve(buildToolsBinDir).resolve("aidl").toAbsolutePath(), zipAlignExecutable, buildToolsDir.resolve( Platform.detect() == Platform.WINDOWS ? STR : "dx").toAbsolutePath(), androidFrameworkIdlFile, proguardJar, proguardConfig, optimizedProguardConfig, androidDirectoryResolver); } private static class AndroidWithGoogleApisFactory implements Factory { private static final String API_DIR_SUFFIX = STR; | /**
* Given the path to the Android SDK as well as the platform path within the Android SDK,
* find all the files needed to create the {@link AndroidPlatformTarget}, assuming that the
* organization of the Android SDK conforms to the ordinary directory structure.
*/ | Given the path to the Android SDK as well as the platform path within the Android SDK, find all the files needed to create the <code>AndroidPlatformTarget</code>, assuming that the organization of the Android SDK conforms to the ordinary directory structure | createFromDefaultDirectoryStructure | {
"repo_name": "raviagarwal7/buck",
"path": "src/com/facebook/buck/android/AndroidPlatformTarget.java",
"license": "apache-2.0",
"size": 14244
} | [
"com.facebook.buck.util.HumanReadableException",
"com.facebook.buck.util.environment.Platform",
"com.google.common.base.Optional",
"com.google.common.collect.ImmutableSet",
"com.google.common.collect.Lists",
"java.io.File",
"java.nio.file.Path",
"java.util.Arrays",
"java.util.LinkedList",
"java.util.Set"
] | import com.facebook.buck.util.HumanReadableException; import com.facebook.buck.util.environment.Platform; import com.google.common.base.Optional; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import java.io.File; import java.nio.file.Path; import java.util.Arrays; import java.util.LinkedList; import java.util.Set; | import com.facebook.buck.util.*; import com.facebook.buck.util.environment.*; import com.google.common.base.*; import com.google.common.collect.*; import java.io.*; import java.nio.file.*; import java.util.*; | [
"com.facebook.buck",
"com.google.common",
"java.io",
"java.nio",
"java.util"
] | com.facebook.buck; com.google.common; java.io; java.nio; java.util; | 2,399,045 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.