method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
sequence
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
sequence
libraries_info
stringlengths
6
661
id
int64
0
2.92M
MonitoredResourceInner innerModel();
MonitoredResourceInner innerModel();
/** * Gets the inner com.azure.resourcemanager.datadog.fluent.models.MonitoredResourceInner object. * * @return the inner object. */
Gets the inner com.azure.resourcemanager.datadog.fluent.models.MonitoredResourceInner object
innerModel
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/datadog/azure-resourcemanager-datadog/src/main/java/com/azure/resourcemanager/datadog/models/MonitoredResource.java", "license": "mit", "size": 1562 }
[ "com.azure.resourcemanager.datadog.fluent.models.MonitoredResourceInner" ]
import com.azure.resourcemanager.datadog.fluent.models.MonitoredResourceInner;
import com.azure.resourcemanager.datadog.fluent.models.*;
[ "com.azure.resourcemanager" ]
com.azure.resourcemanager;
2,461,486
private String getShortItemId(TreeItem item) { // show much of component id can we skip? (to minimize the length of // javascript being sent) final int skip = getMarkupId().length() + 1; // the length of id of // tree and '_'. return item.getMarkupId().substring(skip); } private final static ResourceReference JAVASCRIPT = new JavascriptResourceReference( AbstractTree.class, "res/tree.js");
String function(TreeItem item) { final int skip = getMarkupId().length() + 1; return item.getMarkupId().substring(skip); } private final static ResourceReference JAVASCRIPT = new JavascriptResourceReference( AbstractTree.class, STR);
/** * returns the short version of item id (just the number part). * * @param item * The tree item * @return The id */
returns the short version of item id (just the number part)
getShortItemId
{ "repo_name": "astubbs/wicket.get-portals2", "path": "wicket/src/main/java/org/apache/wicket/markup/html/tree/AbstractTree.java", "license": "apache-2.0", "size": 38944 }
[ "org.apache.wicket.ResourceReference", "org.apache.wicket.markup.html.resources.JavascriptResourceReference" ]
import org.apache.wicket.ResourceReference; import org.apache.wicket.markup.html.resources.JavascriptResourceReference;
import org.apache.wicket.*; import org.apache.wicket.markup.html.resources.*;
[ "org.apache.wicket" ]
org.apache.wicket;
1,988,195
public RestOperationResponseHeaderDefinition header(String name) { if (headers == null) { headers = new ArrayList<>(); } RestOperationResponseHeaderDefinition header = new RestOperationResponseHeaderDefinition(this); header.setName(name); headers.add(header); return header; }
RestOperationResponseHeaderDefinition function(String name) { if (headers == null) { headers = new ArrayList<>(); } RestOperationResponseHeaderDefinition header = new RestOperationResponseHeaderDefinition(this); header.setName(name); headers.add(header); return header; }
/** * Adds a response header */
Adds a response header
header
{ "repo_name": "nikhilvibhav/camel", "path": "core/camel-core-model/src/main/java/org/apache/camel/model/rest/RestOperationResponseMsgDefinition.java", "license": "apache-2.0", "size": 5133 }
[ "java.util.ArrayList" ]
import java.util.ArrayList;
import java.util.*;
[ "java.util" ]
java.util;
772,764
void addApplicationAssociatedAPI(CompositeAPI api) throws APIMgtDAOException;
void addApplicationAssociatedAPI(CompositeAPI api) throws APIMgtDAOException;
/** * Create API that is associated with an Application. This is specifically required to support the creation of * Composite APIs which are always associated with a specific Application. * * @param api The {@link API} object to be added * @throws APIMgtDAOException if error occurs while accessing data layer * */
Create API that is associated with an Application. This is specifically required to support the creation of Composite APIs which are always associated with a specific Application
addApplicationAssociatedAPI
{ "repo_name": "sambaheerathan/carbon-apimgt", "path": "components/apimgt/org.wso2.carbon.apimgt.core/src/main/java/org/wso2/carbon/apimgt/core/dao/ApiDAO.java", "license": "apache-2.0", "size": 28726 }
[ "org.wso2.carbon.apimgt.core.exception.APIMgtDAOException", "org.wso2.carbon.apimgt.core.models.CompositeAPI" ]
import org.wso2.carbon.apimgt.core.exception.APIMgtDAOException; import org.wso2.carbon.apimgt.core.models.CompositeAPI;
import org.wso2.carbon.apimgt.core.exception.*; import org.wso2.carbon.apimgt.core.models.*;
[ "org.wso2.carbon" ]
org.wso2.carbon;
1,131,551
@Override public final void emit(final String line) { if (stats != null) { stats.add(line.substring(0, line.length() - 1)); // strip the '\n' } else { buf.append(line); } } } final class SerializerCollector extends StatsCollector { final boolean canonical; final List<IncomingDataPoint> dps; public SerializerCollector(final String prefix, final List<IncomingDataPoint> dps, final boolean canonical) { super(prefix); this.dps = dps; this.canonical = canonical; }
final void function(final String line) { if (stats != null) { stats.add(line.substring(0, line.length() - 1)); } else { buf.append(line); } } } final class SerializerCollector extends StatsCollector { final boolean canonical; final List<IncomingDataPoint> dps; public SerializerCollector(final String prefix, final List<IncomingDataPoint> dps, final boolean canonical) { super(prefix); this.dps = dps; this.canonical = canonical; }
/** * Called by the {@link #record} method after a source writes a statistic. */
Called by the <code>#record</code> method after a source writes a statistic
emit
{ "repo_name": "turn/opentsdb", "path": "src/tsd/StatsRpc.java", "license": "gpl-3.0", "size": 7509 }
[ "java.util.List", "net.opentsdb.core.IncomingDataPoint", "net.opentsdb.stats.StatsCollector" ]
import java.util.List; import net.opentsdb.core.IncomingDataPoint; import net.opentsdb.stats.StatsCollector;
import java.util.*; import net.opentsdb.core.*; import net.opentsdb.stats.*;
[ "java.util", "net.opentsdb.core", "net.opentsdb.stats" ]
java.util; net.opentsdb.core; net.opentsdb.stats;
2,482,277
protected void setDefaultRequestHeaders(HttpMethod httpMethod) { // Method left empty here, but allows subclasses to override }
void function(HttpMethod httpMethod) { }
/** * Set any default request headers to include * * @param httpMethod the HttpMethod used for the request */
Set any default request headers to include
setDefaultRequestHeaders
{ "repo_name": "llllewicki/jmeter-diff", "path": "src/protocol/http/org/apache/jmeter/protocol/http/sampler/HTTPHC3Impl.java", "license": "apache-2.0", "size": 52002 }
[ "org.apache.commons.httpclient.HttpMethod" ]
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.*;
[ "org.apache.commons" ]
org.apache.commons;
1,956,510
EClass getTImport();
EClass getTImport();
/** * Returns the meta object for class '{@link org.wso2.developerstudio.eclipse.humantask.model.ht.TImport <em>TImport</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for class '<em>TImport</em>'. * @see org.wso2.developerstudio.eclipse.humantask.model.ht.TImport * @generated */
Returns the meta object for class '<code>org.wso2.developerstudio.eclipse.humantask.model.ht.TImport TImport</code>'.
getTImport
{ "repo_name": "chanakaudaya/developer-studio", "path": "humantask/org.wso2.tools.humantask.model/src/org/wso2/carbonstudio/eclipse/humantask/model/ht/HTPackage.java", "license": "apache-2.0", "size": 247810 }
[ "org.eclipse.emf.ecore.EClass" ]
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
1,563,433
public List<WSRPConsumer> findByCompanyId(long companyId) throws SystemException { return findByCompanyId(companyId, QueryUtil.ALL_POS, QueryUtil.ALL_POS, null); }
List<WSRPConsumer> function(long companyId) throws SystemException { return findByCompanyId(companyId, QueryUtil.ALL_POS, QueryUtil.ALL_POS, null); }
/** * Returns all the w s r p consumers where companyId = &#63;. * * @param companyId the company ID * @return the matching w s r p consumers * @throws SystemException if a system exception occurred */
Returns all the w s r p consumers where companyId = &#63;
findByCompanyId
{ "repo_name": "inbloom/datastore-portal", "path": "portlets/wsrp-portlet/docroot/WEB-INF/src/com/liferay/wsrp/service/persistence/WSRPConsumerPersistenceImpl.java", "license": "apache-2.0", "size": 59684 }
[ "com.liferay.portal.kernel.dao.orm.QueryUtil", "com.liferay.portal.kernel.exception.SystemException", "com.liferay.wsrp.model.WSRPConsumer", "java.util.List" ]
import com.liferay.portal.kernel.dao.orm.QueryUtil; import com.liferay.portal.kernel.exception.SystemException; import com.liferay.wsrp.model.WSRPConsumer; import java.util.List;
import com.liferay.portal.kernel.dao.orm.*; import com.liferay.portal.kernel.exception.*; import com.liferay.wsrp.model.*; import java.util.*;
[ "com.liferay.portal", "com.liferay.wsrp", "java.util" ]
com.liferay.portal; com.liferay.wsrp; java.util;
764,090
public File getOutputFile() { return m_OutputFile; }
File function() { return m_OutputFile; }
/** * Get the value of OutputFile. * * @return Value of OutputFile. */
Get the value of OutputFile
getOutputFile
{ "repo_name": "williamClanton/singularity", "path": "weka/src/main/java/weka/experiment/CrossValidationResultProducer.java", "license": "mit", "size": 26169 }
[ "java.io.File" ]
import java.io.File;
import java.io.*;
[ "java.io" ]
java.io;
719,191
@NonNull public AboutBuilder setVersionNameAsAppSubTitle() { try { return setAppTitle(context.getString(com.vansuita.materialabout.R.string.version, getPackageInfo().versionName)); } catch (PackageManager.NameNotFoundException e) { return setAppTitle(R.string.error); } }
AboutBuilder function() { try { return setAppTitle(context.getString(com.vansuita.materialabout.R.string.version, getPackageInfo().versionName)); } catch (PackageManager.NameNotFoundException e) { return setAppTitle(R.string.error); } }
/** * Displays the app version below the app name * * @return the same {@link AboutBuilder} instance */
Displays the app version below the app name
setVersionNameAsAppSubTitle
{ "repo_name": "carvaldo/MaterialAbout", "path": "library/src/main/java/com/vansuita/materialabout/builder/AboutBuilder.java", "license": "mit", "size": 61220 }
[ "android.content.pm.PackageManager" ]
import android.content.pm.PackageManager;
import android.content.pm.*;
[ "android.content" ]
android.content;
1,679,209
private boolean parseArguments(String[] args) { Options options = makeOptions(); CommandLine cli; try { CommandLineParser parser = new GnuParser(); cli = parser.parse(options, args); } catch (ParseException e) { LOG.warn("options parsing failed: " + e.getMessage()); new HelpFormatter().printHelp("...", options); return false; } if (cli.hasOption("help")) { new HelpFormatter().printHelp("...", options); return false; } if (cli.getArgs().length > 0) { for (String arg : cli.getArgs()) { System.err.println("Unrecognized option: " + arg); new HelpFormatter().printHelp("...", options); return false; } } // MR noMR = cli.hasOption("nomr"); numNodeManagers = intArgument(cli, "nodemanagers", 1); rmPort = intArgument(cli, "rmport", 0); jhsPort = intArgument(cli, "jhsport", 0); fs = cli.getOptionValue("namenode"); // HDFS noDFS = cli.hasOption("nodfs"); numDataNodes = intArgument(cli, "datanodes", 1); nnPort = intArgument(cli, "nnport", 0); nnHttpPort = intArgument(cli, "nnhttpport", 0); dfsOpts = cli.hasOption("format") ? StartupOption.FORMAT : StartupOption.REGULAR; // Runner writeDetails = cli.getOptionValue("writeDetails"); writeConfig = cli.getOptionValue("writeConfig"); // General conf = new JobConf(); updateConfiguration(conf, cli.getOptionValues("D")); return true; }
boolean function(String[] args) { Options options = makeOptions(); CommandLine cli; try { CommandLineParser parser = new GnuParser(); cli = parser.parse(options, args); } catch (ParseException e) { LOG.warn(STR + e.getMessage()); new HelpFormatter().printHelp("...", options); return false; } if (cli.hasOption("help")) { new HelpFormatter().printHelp("...", options); return false; } if (cli.getArgs().length > 0) { for (String arg : cli.getArgs()) { System.err.println(STR + arg); new HelpFormatter().printHelp("...", options); return false; } } noMR = cli.hasOption("nomr"); numNodeManagers = intArgument(cli, STR, 1); rmPort = intArgument(cli, STR, 0); jhsPort = intArgument(cli, STR, 0); fs = cli.getOptionValue(STR); noDFS = cli.hasOption("nodfs"); numDataNodes = intArgument(cli, STR, 1); nnPort = intArgument(cli, STR, 0); nnHttpPort = intArgument(cli, STR, 0); dfsOpts = cli.hasOption(STR) ? StartupOption.FORMAT : StartupOption.REGULAR; writeDetails = cli.getOptionValue(STR); writeConfig = cli.getOptionValue(STR); conf = new JobConf(); updateConfiguration(conf, cli.getOptionValues("D")); return true; }
/** * Parses arguments and fills out the member variables. * * @param args * Command-line arguments. * @return true on successful parse; false to indicate that the program should * exit. */
Parses arguments and fills out the member variables
parseArguments
{ "repo_name": "legend-hua/hadoop", "path": "hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MiniHadoopClusterManager.java", "license": "apache-2.0", "size": 10543 }
[ "org.apache.commons.cli.CommandLine", "org.apache.commons.cli.CommandLineParser", "org.apache.commons.cli.GnuParser", "org.apache.commons.cli.HelpFormatter", "org.apache.commons.cli.Options", "org.apache.commons.cli.ParseException", "org.apache.hadoop.hdfs.server.common.HdfsServerConstants", "org.apache.hadoop.mapred.JobConf" ]
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.mapred.JobConf;
import org.apache.commons.cli.*; import org.apache.hadoop.hdfs.server.common.*; import org.apache.hadoop.mapred.*;
[ "org.apache.commons", "org.apache.hadoop" ]
org.apache.commons; org.apache.hadoop;
1,104,848
public void newPatient() { this.sectionFeatureTree = new HashMap<String, String>(); } /** * Sets global parameters that are used by various other functions. * * @param positive sets the global parameter {@link #positive} * @param negative same as the above positive parameter, but for {@link #negative}
void function() { this.sectionFeatureTree = new HashMap<String, String>(); } /** * Sets global parameters that are used by various other functions. * * @param positive sets the global parameter {@link #positive} * @param negative same as the above positive parameter, but for {@link #negative}
/** * Clears the {@link #sectionFeatureTree} and must be called before processing each patient, as each patient has a * different set of phenotypes. */
Clears the <code>#sectionFeatureTree</code> and must be called before processing each patient, as each patient has a different set of phenotypes
newPatient
{ "repo_name": "DeanWay/phenotips", "path": "components/export/api/src/main/java/org/phenotips/export/internal/ConversionHelpers.java", "license": "agpl-3.0", "size": 15209 }
[ "java.util.HashMap" ]
import java.util.HashMap;
import java.util.*;
[ "java.util" ]
java.util;
2,689,625
public static Map<TupleTag<?>, KV<WindowingStrategy<?, ?>, SideInputBroadcast<?>>> getSideInputs( List<PCollectionView<?>> views, JavaSparkContext context, SparkPCollectionView pviews) { if (views == null) { return ImmutableMap.of(); } else { Map<TupleTag<?>, KV<WindowingStrategy<?, ?>, SideInputBroadcast<?>>> sideInputs = Maps.newHashMap(); for (PCollectionView<?> view : views) { SideInputBroadcast helper = pviews.getPCollectionView(view, context); WindowingStrategy<?, ?> windowingStrategy = view.getWindowingStrategyInternal(); sideInputs.put(view.getTagInternal(), KV.<WindowingStrategy<?, ?>, SideInputBroadcast<?>>of(windowingStrategy, helper)); } return sideInputs; } }
static Map<TupleTag<?>, KV<WindowingStrategy<?, ?>, SideInputBroadcast<?>>> function( List<PCollectionView<?>> views, JavaSparkContext context, SparkPCollectionView pviews) { if (views == null) { return ImmutableMap.of(); } else { Map<TupleTag<?>, KV<WindowingStrategy<?, ?>, SideInputBroadcast<?>>> sideInputs = Maps.newHashMap(); for (PCollectionView<?> view : views) { SideInputBroadcast helper = pviews.getPCollectionView(view, context); WindowingStrategy<?, ?> windowingStrategy = view.getWindowingStrategyInternal(); sideInputs.put(view.getTagInternal(), KV.<WindowingStrategy<?, ?>, SideInputBroadcast<?>>of(windowingStrategy, helper)); } return sideInputs; } }
/** * Create SideInputs as Broadcast variables. * * @param views The {@link PCollectionView}s. * @param context The {@link JavaSparkContext}. * @param pviews The {@link SparkPCollectionView}. * @return a map of tagged {@link SideInputBroadcast}s and their {@link WindowingStrategy}. */
Create SideInputs as Broadcast variables
getSideInputs
{ "repo_name": "jasonkuster/incubator-beam", "path": "runners/spark/src/main/java/org/apache/beam/runners/spark/translation/TranslationUtils.java", "license": "apache-2.0", "size": 9574 }
[ "com.google.common.collect.ImmutableMap", "com.google.common.collect.Maps", "java.util.List", "java.util.Map", "org.apache.beam.runners.spark.util.SideInputBroadcast", "org.apache.beam.sdk.util.WindowingStrategy", "org.apache.beam.sdk.values.PCollectionView", "org.apache.beam.sdk.values.TupleTag", "org.apache.spark.api.java.JavaSparkContext" ]
import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import java.util.List; import java.util.Map; import org.apache.beam.runners.spark.util.SideInputBroadcast; import org.apache.beam.sdk.util.WindowingStrategy; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; import org.apache.spark.api.java.JavaSparkContext;
import com.google.common.collect.*; import java.util.*; import org.apache.beam.runners.spark.util.*; import org.apache.beam.sdk.util.*; import org.apache.beam.sdk.values.*; import org.apache.spark.api.java.*;
[ "com.google.common", "java.util", "org.apache.beam", "org.apache.spark" ]
com.google.common; java.util; org.apache.beam; org.apache.spark;
125,379
void enterEnumConstantList(@NotNull Java8Parser.EnumConstantListContext ctx); void exitEnumConstantList(@NotNull Java8Parser.EnumConstantListContext ctx);
void enterEnumConstantList(@NotNull Java8Parser.EnumConstantListContext ctx); void exitEnumConstantList(@NotNull Java8Parser.EnumConstantListContext ctx);
/** * Exit a parse tree produced by {@link Java8Parser#enumConstantList}. * @param ctx the parse tree */
Exit a parse tree produced by <code>Java8Parser#enumConstantList</code>
exitEnumConstantList
{ "repo_name": "IsThisThePayneResidence/intellidots", "path": "src/main/java/ua/edu/hneu/ast/parsers/Java8Listener.java", "license": "gpl-3.0", "size": 95845 }
[ "org.antlr.v4.runtime.misc.NotNull" ]
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.*;
[ "org.antlr.v4" ]
org.antlr.v4;
608,489
@Test public void afterTestMethod() throws IOException, AnalysisException, NoSuchMethodException { scanClasses(TestClass.class); assertThat(applyConcept("testng:AfterTestMethod").getStatus(), equalTo(SUCCESS)); store.beginTransaction(); List<Object> methods = query("match (m:AfterTestMethod:TestNG:Method) return m").getColumn("m"); assertThat(methods, hasItem(methodDescriptor(TestClass.class, "afterTest"))); store.commitTransaction(); }
void function() throws IOException, AnalysisException, NoSuchMethodException { scanClasses(TestClass.class); assertThat(applyConcept(STR).getStatus(), equalTo(SUCCESS)); store.beginTransaction(); List<Object> methods = query(STR).getColumn("m"); assertThat(methods, hasItem(methodDescriptor(TestClass.class, STR))); store.commitTransaction(); }
/** * Verifies the concept "testng:AfterTestMethod". * * @throws IOException * If the test fails. * @throws AnalysisException * If the test fails. * @throws NoSuchMethodException * If the test fails. * @throws AnalysisException * If the test fails. */
Verifies the concept "testng:AfterTestMethod"
afterTestMethod
{ "repo_name": "kontext-e/jqassistant", "path": "plugin/testng/src/test/java/com/buschmais/jqassistant/plugin/testng/test/TestNGIT.java", "license": "gpl-3.0", "size": 7464 }
[ "com.buschmais.jqassistant.core.analysis.api.AnalysisException", "com.buschmais.jqassistant.plugin.testng.test.set.test.TestClass", "java.io.IOException", "java.util.List", "org.hamcrest.CoreMatchers", "org.junit.Assert" ]
import com.buschmais.jqassistant.core.analysis.api.AnalysisException; import com.buschmais.jqassistant.plugin.testng.test.set.test.TestClass; import java.io.IOException; import java.util.List; import org.hamcrest.CoreMatchers; import org.junit.Assert;
import com.buschmais.jqassistant.core.analysis.api.*; import com.buschmais.jqassistant.plugin.testng.test.set.test.*; import java.io.*; import java.util.*; import org.hamcrest.*; import org.junit.*;
[ "com.buschmais.jqassistant", "java.io", "java.util", "org.hamcrest", "org.junit" ]
com.buschmais.jqassistant; java.io; java.util; org.hamcrest; org.junit;
379,628
public ValidatorResult validateUpload(HttpServletRequest request) { ValidatorResult msgs = new ValidatorResult(); FormFile file = (FormFile)get(REV_UPLOAD); //make sure there is a file if (file == null || file.getFileName() == null || file.getFileName().trim().length() == 0) { msgs.addError(new ValidatorError("error.config-not-specified")); } else if (file.getFileSize() == 0) { msgs.addError(new ValidatorError("error.config-empty", file.getFileName())); } //make sure they didn't send in something huge else if (file.getFileSize() > ConfigFile.getMaxFileSize()) { msgs.addError(new ValidatorError("error.configtoolarge", StringUtil.displayFileSize(ConfigFile.getMaxFileSize(), false))); } // It exists and isn't too big - is it text? else if (!isBinary()) { try { String content = new String(file.getFileData()); String startDelim = getString(REV_MACROSTART); String endDelim = getString(REV_MACROEND); msgs.append(ConfigurationValidation.validateContent( content, startDelim, endDelim)); } catch (Exception e) { msgs.addError(new ValidatorError("error.fatalupload", StringUtil.displayFileSize( ConfigFile.getMaxFileSize(), false))); } } return msgs; }
ValidatorResult function(HttpServletRequest request) { ValidatorResult msgs = new ValidatorResult(); FormFile file = (FormFile)get(REV_UPLOAD); if (file == null file.getFileName() == null file.getFileName().trim().length() == 0) { msgs.addError(new ValidatorError(STR)); } else if (file.getFileSize() == 0) { msgs.addError(new ValidatorError(STR, file.getFileName())); } else if (file.getFileSize() > ConfigFile.getMaxFileSize()) { msgs.addError(new ValidatorError(STR, StringUtil.displayFileSize(ConfigFile.getMaxFileSize(), false))); } else if (!isBinary()) { try { String content = new String(file.getFileData()); String startDelim = getString(REV_MACROSTART); String endDelim = getString(REV_MACROEND); msgs.append(ConfigurationValidation.validateContent( content, startDelim, endDelim)); } catch (Exception e) { msgs.addError(new ValidatorError(STR, StringUtil.displayFileSize( ConfigFile.getMaxFileSize(), false))); } } return msgs; }
/** * Validate a file-upload. This checks that: * <ul> * <li>The file exists * <li>The file isn't too large * <li>If the file is text, its contents are valid after macro-substitution * </ul> * @param request the incoming request * @return a ValidatorResult.. The list is empty if everything is OK */
Validate a file-upload. This checks that: The file exists The file isn't too large If the file is text, its contents are valid after macro-substitution
validateUpload
{ "repo_name": "lhellebr/spacewalk", "path": "java/code/src/com/redhat/rhn/frontend/action/configuration/ConfigFileForm.java", "license": "gpl-2.0", "size": 13450 }
[ "com.redhat.rhn.common.util.StringUtil", "com.redhat.rhn.common.validator.ValidatorError", "com.redhat.rhn.common.validator.ValidatorResult", "com.redhat.rhn.domain.config.ConfigFile", "com.redhat.rhn.manager.configuration.ConfigurationValidation", "javax.servlet.http.HttpServletRequest", "org.apache.struts.upload.FormFile" ]
import com.redhat.rhn.common.util.StringUtil; import com.redhat.rhn.common.validator.ValidatorError; import com.redhat.rhn.common.validator.ValidatorResult; import com.redhat.rhn.domain.config.ConfigFile; import com.redhat.rhn.manager.configuration.ConfigurationValidation; import javax.servlet.http.HttpServletRequest; import org.apache.struts.upload.FormFile;
import com.redhat.rhn.common.util.*; import com.redhat.rhn.common.validator.*; import com.redhat.rhn.domain.config.*; import com.redhat.rhn.manager.configuration.*; import javax.servlet.http.*; import org.apache.struts.upload.*;
[ "com.redhat.rhn", "javax.servlet", "org.apache.struts" ]
com.redhat.rhn; javax.servlet; org.apache.struts;
2,267,779
public final void open() throws CumulusStoreException { if (isOpen()) { _log.info(MessageCatalog._00049_STORE_ALREADY_OPEN); return; } try { ManagementRegistrar.registerStore( this, String.valueOf(storageLayout()), getDataAccessLayerFactory().getUnderlyingStorageInfo()); } catch (InstanceAlreadyExistsException exception) { _log.error(MessageCatalog._00111_MBEAN_ALREADY_REGISTERED, getId()); throw new CumulusStoreException(exception); } catch (final Exception exception) { _log.error(MessageCatalog._00109_UNABLE_TO_REGISTER_MBEAN, exception, getId()); throw new CumulusStoreException(exception); } _factory = getDataAccessLayerFactory(); _log.info(MessageCatalog._00114_UNDERLYING_STORAGE, _id, _factory.getUnderlyingStorageInfo()); _configurator.configure(_factory); _configurator.configure(this); try { _dictionary = Dictionaries.newDefaultDictionary(_configurator); _rdfIndexDAO = getRdfIndexDAO(); _rdfIndexDAO.initialiseRdfIndex(); } catch (final DataAccessLayerException exception) { _log.error(MessageCatalog._00093_DATA_ACCESS_LAYER_FAILURE, exception); throw new CumulusStoreException(exception); } try { _dictionary.initialise(_factory); _log.info(MessageCatalog._00090_DICTIONARY_INITIALISED, _dictionary.getClass().getName()); } catch (final InitialisationException exception) { _log.error(MessageCatalog._00092_DICTIONARY_INIT_FAILURE, exception); throw new CumulusStoreException(exception); } try { final CounterFactory counterFactory = getCounterFactory(); counterFactory.initialise(_factory); } catch (final InitialisationException exception) { _log.error(MessageCatalog._00095_COUNTER_FACTORY_INIT_FAILURE, exception); throw new CumulusStoreException(exception); } _selectEstimator = new HeuristicsBasedSelectivityEstimator(getCounterFactory()); _status = new StatusListener(); _changeListeners = new LinkedList<ITriplesChangesListener>(); _changeListeners.add(_selectEstimator); _changeListeners.add(_status); openInternal(); final int howManyWorkers = computeWorkersPoolSize(); final RejectedExecutionHandler rejectedExecutionHandler = new ThreadPoolExecutor.CallerRunsPolicy(); _workers = new ThreadPoolExecutor( howManyWorkers, howManyWorkers, 0L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue<Runnable>(100), rejectedExecutionHandler); _log.info(MessageCatalog._00047_BATCH_BULK_LOAD_DATA_THREAD_POOL_SIZE, howManyWorkers); _isOpen = true; _log.info(MessageCatalog._00052_STORE_OPEN); }
final void function() throws CumulusStoreException { if (isOpen()) { _log.info(MessageCatalog._00049_STORE_ALREADY_OPEN); return; } try { ManagementRegistrar.registerStore( this, String.valueOf(storageLayout()), getDataAccessLayerFactory().getUnderlyingStorageInfo()); } catch (InstanceAlreadyExistsException exception) { _log.error(MessageCatalog._00111_MBEAN_ALREADY_REGISTERED, getId()); throw new CumulusStoreException(exception); } catch (final Exception exception) { _log.error(MessageCatalog._00109_UNABLE_TO_REGISTER_MBEAN, exception, getId()); throw new CumulusStoreException(exception); } _factory = getDataAccessLayerFactory(); _log.info(MessageCatalog._00114_UNDERLYING_STORAGE, _id, _factory.getUnderlyingStorageInfo()); _configurator.configure(_factory); _configurator.configure(this); try { _dictionary = Dictionaries.newDefaultDictionary(_configurator); _rdfIndexDAO = getRdfIndexDAO(); _rdfIndexDAO.initialiseRdfIndex(); } catch (final DataAccessLayerException exception) { _log.error(MessageCatalog._00093_DATA_ACCESS_LAYER_FAILURE, exception); throw new CumulusStoreException(exception); } try { _dictionary.initialise(_factory); _log.info(MessageCatalog._00090_DICTIONARY_INITIALISED, _dictionary.getClass().getName()); } catch (final InitialisationException exception) { _log.error(MessageCatalog._00092_DICTIONARY_INIT_FAILURE, exception); throw new CumulusStoreException(exception); } try { final CounterFactory counterFactory = getCounterFactory(); counterFactory.initialise(_factory); } catch (final InitialisationException exception) { _log.error(MessageCatalog._00095_COUNTER_FACTORY_INIT_FAILURE, exception); throw new CumulusStoreException(exception); } _selectEstimator = new HeuristicsBasedSelectivityEstimator(getCounterFactory()); _status = new StatusListener(); _changeListeners = new LinkedList<ITriplesChangesListener>(); _changeListeners.add(_selectEstimator); _changeListeners.add(_status); openInternal(); final int howManyWorkers = computeWorkersPoolSize(); final RejectedExecutionHandler rejectedExecutionHandler = new ThreadPoolExecutor.CallerRunsPolicy(); _workers = new ThreadPoolExecutor( howManyWorkers, howManyWorkers, 0L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue<Runnable>(100), rejectedExecutionHandler); _log.info(MessageCatalog._00047_BATCH_BULK_LOAD_DATA_THREAD_POOL_SIZE, howManyWorkers); _isOpen = true; _log.info(MessageCatalog._00052_STORE_OPEN); }
/** * Open the store. * After completing this operation, the store instance is supposed to be available. * * @throws CumulusStoreException If the connection cannot be established. */
Open the store. After completing this operation, the store instance is supposed to be available
open
{ "repo_name": "agazzarini/cumulusrdf", "path": "cumulusrdf-core/src/main/java/edu/kit/aifb/cumulus/store/Store.java", "license": "apache-2.0", "size": 39510 }
[ "edu.kit.aifb.cumulus.framework.InitialisationException", "edu.kit.aifb.cumulus.framework.datasource.DataAccessLayerException", "edu.kit.aifb.cumulus.framework.events.ITriplesChangesListener", "edu.kit.aifb.cumulus.framework.mx.ManagementRegistrar", "edu.kit.aifb.cumulus.log.MessageCatalog", "edu.kit.aifb.cumulus.store.sel.HeuristicsBasedSelectivityEstimator", "java.util.LinkedList", "java.util.concurrent.ArrayBlockingQueue", "java.util.concurrent.RejectedExecutionHandler", "java.util.concurrent.ThreadPoolExecutor", "java.util.concurrent.TimeUnit", "javax.management.InstanceAlreadyExistsException" ]
import edu.kit.aifb.cumulus.framework.InitialisationException; import edu.kit.aifb.cumulus.framework.datasource.DataAccessLayerException; import edu.kit.aifb.cumulus.framework.events.ITriplesChangesListener; import edu.kit.aifb.cumulus.framework.mx.ManagementRegistrar; import edu.kit.aifb.cumulus.log.MessageCatalog; import edu.kit.aifb.cumulus.store.sel.HeuristicsBasedSelectivityEstimator; import java.util.LinkedList; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.RejectedExecutionHandler; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import javax.management.InstanceAlreadyExistsException;
import edu.kit.aifb.cumulus.framework.*; import edu.kit.aifb.cumulus.framework.datasource.*; import edu.kit.aifb.cumulus.framework.events.*; import edu.kit.aifb.cumulus.framework.mx.*; import edu.kit.aifb.cumulus.log.*; import edu.kit.aifb.cumulus.store.sel.*; import java.util.*; import java.util.concurrent.*; import javax.management.*;
[ "edu.kit.aifb", "java.util", "javax.management" ]
edu.kit.aifb; java.util; javax.management;
1,499,424
public static void setVersionCodename(String versionCodename) { ReflectionHelpers.setStaticField(Build.VERSION.class, "CODENAME", versionCodename); }
static void function(String versionCodename) { ReflectionHelpers.setStaticField(Build.VERSION.class, STR, versionCodename); }
/** * Sets the value of the {@link Build.VERSION#CODENAME} field. * * It will be reset for the next test. */
Sets the value of the <code>Build.VERSION#CODENAME</code> field. It will be reset for the next test
setVersionCodename
{ "repo_name": "jongerrish/robolectric", "path": "shadows/framework/src/main/java/org/robolectric/shadows/ShadowBuild.java", "license": "mit", "size": 4227 }
[ "android.os.Build", "org.robolectric.util.ReflectionHelpers" ]
import android.os.Build; import org.robolectric.util.ReflectionHelpers;
import android.os.*; import org.robolectric.util.*;
[ "android.os", "org.robolectric.util" ]
android.os; org.robolectric.util;
2,864,633
TableHandle beginDelete(Session session, TableHandle tableHandle);
TableHandle beginDelete(Session session, TableHandle tableHandle);
/** * Begin delete query */
Begin delete query
beginDelete
{ "repo_name": "RobinUS2/presto", "path": "presto-main/src/main/java/com/facebook/presto/metadata/Metadata.java", "license": "apache-2.0", "size": 10357 }
[ "com.facebook.presto.Session" ]
import com.facebook.presto.Session;
import com.facebook.presto.*;
[ "com.facebook.presto" ]
com.facebook.presto;
1,388,183
boolean hasNext() throws IOException;
boolean hasNext() throws IOException;
/** * Does another set of attributes exist in this reader? * * @return <code>true</code> if additional content exists for * AttributeReader */
Does another set of attributes exist in this reader
hasNext
{ "repo_name": "TerraMobile/TerraMobile", "path": "sldparser/src/main/geotools/data/AttributeReader.java", "license": "apache-2.0", "size": 2231 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
2,714,028
public int getLength(); public static final String ACCEPT = "Accept"; public static final String ACCEPT_CHARSET = "Accept-Charset"; public static final String ACCEPT_ENCODING = "Accept-Encoding"; public static final String ACCEPT_LANGUAGE = "Accept-Language"; public static final String ALLOW = "Allow"; public static final String AUTHORIZATION = "Authorization"; public static final String CACHE_CONTROL = "Cache-Control"; public static final String CONTENT_DISPOSITION = "Content-Disposition"; public static final String CONTENT_ENCODING = "Content-Encoding"; public static final String CONTENT_ID = "Content-ID"; public static final String CONTENT_LANGUAGE = "Content-Language"; public static final String CONTENT_LENGTH = "Content-Length"; public static final String CONTENT_LOCATION = "Content-Location"; public static final String CONTENT_TYPE = "Content-Type"; public static final String DATE = "Date"; public static final String ETAG = "ETag"; public static final String EXPIRES = "Expires"; public static final String HOST = "Host"; public static final String IF_MATCH = "If-Match"; public static final String IF_MODIFIED_SINCE = "If-Modified-Since"; public static final String IF_NONE_MATCH = "If-None-Match"; public static final String IF_UNMODIFIED_SINCE = "If-Unmodified-Since"; public static final String LAST_MODIFIED = "Last-Modified"; public static final String LOCATION = "Location"; public static final String LINK = "Link"; public static final String RETRY_AFTER = "Retry-After"; public static final String USER_AGENT = "User-Agent"; public static final String VARY = "Vary"; public static final String WWW_AUTHENTICATE = "WWW-Authenticate"; public static final String COOKIE = "Cookie"; public static final String SET_COOKIE = "Set-Cookie";
int function(); public static final String ACCEPT = STR; public static final String ACCEPT_CHARSET = STR; public static final String ACCEPT_ENCODING = STR; public static final String ACCEPT_LANGUAGE = STR; public static final String ALLOW = "Allow"; public static final String AUTHORIZATION = STR; public static final String CACHE_CONTROL = STR; public static final String CONTENT_DISPOSITION = STR; public static final String CONTENT_ENCODING = STR; public static final String CONTENT_ID = STR; public static final String CONTENT_LANGUAGE = STR; public static final String CONTENT_LENGTH = STR; public static final String CONTENT_LOCATION = STR; public static final String CONTENT_TYPE = STR; public static final String DATE = "Date"; public static final String ETAG = "ETag"; public static final String EXPIRES = STR; public static final String HOST = "Host"; public static final String IF_MATCH = STR; public static final String IF_MODIFIED_SINCE = STR; public static final String IF_NONE_MATCH = STR; public static final String IF_UNMODIFIED_SINCE = STR; public static final String LAST_MODIFIED = STR; public static final String LOCATION = STR; public static final String LINK = "Link"; public static final String RETRY_AFTER = STR; public static final String USER_AGENT = STR; public static final String VARY = "Vary"; public static final String WWW_AUTHENTICATE = STR; public static final String COOKIE = STR; public static final String SET_COOKIE = STR;
/** * Get Content-Length value. * * @return Content-Length as integer if present and valid number. In other * cases returns -1. * @since 2.0 */
Get Content-Length value
getLength
{ "repo_name": "raphaelning/resteasy-client-android", "path": "jaxrs/jaxrs-api/src/main/java/javax/ws/rs/core/HttpHeaders.java", "license": "apache-2.0", "size": 12633 }
[ "java.util.Date" ]
import java.util.Date;
import java.util.*;
[ "java.util" ]
java.util;
803,418
// Add a listener for the Cancel button btnCancelButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent ae) { buttonSelected = CANCEL_OPTION; closeDialog(); } }); } }
btnCancelButton.addActionListener(new ActionListener() { void function(ActionEvent ae) { buttonSelected = CANCEL_OPTION; closeDialog(); } }); } }
/********************************************************** * Set the selected button to indicate Cancel and exit the * dialog *********************************************************/
Set the selected button to indicate Cancel and exit the dialog
actionPerformed
{ "repo_name": "CACTUS-Mission/TRAPSat", "path": "TRAPSat_cFS/cfs/cfe/tools/perfutils-java/src/CFSPerformanceMonitor/CPMDialogHandler.java", "license": "mit", "size": 57337 }
[ "java.awt.event.ActionEvent", "java.awt.event.ActionListener" ]
import java.awt.event.ActionEvent; import java.awt.event.ActionListener;
import java.awt.event.*;
[ "java.awt" ]
java.awt;
1,329,717
void setRuntimeEndpointRegistry(RuntimeEndpointRegistry runtimeEndpointRegistry);
void setRuntimeEndpointRegistry(RuntimeEndpointRegistry runtimeEndpointRegistry);
/** * Sets a custom {@link org.apache.camel.spi.RuntimeEndpointRegistry} to use. */
Sets a custom <code>org.apache.camel.spi.RuntimeEndpointRegistry</code> to use
setRuntimeEndpointRegistry
{ "repo_name": "neoramon/camel", "path": "camel-core/src/main/java/org/apache/camel/CamelContext.java", "license": "apache-2.0", "size": 70125 }
[ "org.apache.camel.spi.RuntimeEndpointRegistry" ]
import org.apache.camel.spi.RuntimeEndpointRegistry;
import org.apache.camel.spi.*;
[ "org.apache.camel" ]
org.apache.camel;
1,942,801
public void setConnection(TCPMasterConnection con) { m_Connection = con; m_IO = con.getModbusTransport(); }// setConnection
void function(TCPMasterConnection con) { m_Connection = con; m_IO = con.getModbusTransport(); }
/** * Sets the connection on which this <tt>ModbusTransaction</tt> * should be executed. * <p> * An implementation should be able to * handle open and closed connections.<br> * <p/> * * @param con a <tt>TCPMasterConnection</tt>. */
Sets the connection on which this ModbusTransaction should be executed. An implementation should be able to handle open and closed connections.
setConnection
{ "repo_name": "watou/openhab", "path": "bundles/binding/org.openhab.binding.modbus/src/main/java/net/wimpi/modbus/io/ModbusTCPTransaction.java", "license": "epl-1.0", "size": 9770 }
[ "net.wimpi.modbus.net.TCPMasterConnection" ]
import net.wimpi.modbus.net.TCPMasterConnection;
import net.wimpi.modbus.net.*;
[ "net.wimpi.modbus" ]
net.wimpi.modbus;
2,061,171
public RecommendationActionInner withExpirationTime(OffsetDateTime expirationTime) { this.expirationTime = expirationTime; return this; }
RecommendationActionInner function(OffsetDateTime expirationTime) { this.expirationTime = expirationTime; return this; }
/** * Set the expirationTime property: Recommendation action expiration time. * * @param expirationTime the expirationTime value to set. * @return the RecommendationActionInner object itself. */
Set the expirationTime property: Recommendation action expiration time
withExpirationTime
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/mariadb/azure-resourcemanager-mariadb/src/main/java/com/azure/resourcemanager/mariadb/fluent/models/RecommendationActionInner.java", "license": "mit", "size": 6547 }
[ "java.time.OffsetDateTime" ]
import java.time.OffsetDateTime;
import java.time.*;
[ "java.time" ]
java.time;
785,569
public void setCustomCalendar(Calendar customCalendar) { this.customCalendar = customCalendar; }
void function(Calendar customCalendar) { this.customCalendar = customCalendar; }
/** * Specifies a custom calendar to avoid specific range of date */
Specifies a custom calendar to avoid specific range of date
setCustomCalendar
{ "repo_name": "pax95/camel", "path": "components/camel-quartz/src/main/java/org/apache/camel/component/quartz/QuartzEndpoint.java", "license": "apache-2.0", "size": 22420 }
[ "org.quartz.Calendar" ]
import org.quartz.Calendar;
import org.quartz.*;
[ "org.quartz" ]
org.quartz;
727,602
protected void firePacketSendingListeners(Packet packet) { // Notify the listeners of the new sent packet for (ListenerWrapper listenerWrapper : sendListeners.values()) { listenerWrapper.notifyListener(packet); } }
void function(Packet packet) { for (ListenerWrapper listenerWrapper : sendListeners.values()) { listenerWrapper.notifyListener(packet); } }
/** * Process all packet listeners for sending packets. * * @param packet the packet to process. */
Process all packet listeners for sending packets
firePacketSendingListeners
{ "repo_name": "luchuangbin/test1", "path": "src/org/jivesoftware/smack/Connection.java", "license": "apache-2.0", "size": 35525 }
[ "org.jivesoftware.smack.packet.Packet" ]
import org.jivesoftware.smack.packet.Packet;
import org.jivesoftware.smack.packet.*;
[ "org.jivesoftware.smack" ]
org.jivesoftware.smack;
1,765,764
public Uuid mirrorPort() { return mirrorPort.orElse(null); }
Uuid function() { return mirrorPort.orElse(null); }
/** * Returns the mirror port. * * @return the uuid port if present, otherwise null */
Returns the mirror port
mirrorPort
{ "repo_name": "wuwenbin2/onos_bgp_evpn", "path": "protocols/ovsdb/api/src/main/java/org/onosproject/ovsdb/controller/OvsdbMirror.java", "license": "apache-2.0", "size": 11209 }
[ "org.onosproject.ovsdb.rfc.notation.Uuid" ]
import org.onosproject.ovsdb.rfc.notation.Uuid;
import org.onosproject.ovsdb.rfc.notation.*;
[ "org.onosproject.ovsdb" ]
org.onosproject.ovsdb;
513,348
public void reactivateTask(String jobId, String taskId, Iterable<BatchClientBehavior> additionalBehaviors) throws BatchErrorException, IOException { TaskReactivateOptions options = new TaskReactivateOptions(); BehaviorManager bhMgr = new BehaviorManager(this.customBehaviors(), additionalBehaviors); bhMgr.applyRequestBehaviors(options); this.parentBatchClient.protocolLayer().tasks().reactivate(jobId, taskId, options); }
void function(String jobId, String taskId, Iterable<BatchClientBehavior> additionalBehaviors) throws BatchErrorException, IOException { TaskReactivateOptions options = new TaskReactivateOptions(); BehaviorManager bhMgr = new BehaviorManager(this.customBehaviors(), additionalBehaviors); bhMgr.applyRequestBehaviors(options); this.parentBatchClient.protocolLayer().tasks().reactivate(jobId, taskId, options); }
/** * Reactivates a task, allowing it to run again even if its retry count has been * exhausted. * * @param jobId * The ID of the job containing the task. * @param taskId * The ID of the task. * @param additionalBehaviors * A collection of {@link BatchClientBehavior} instances that are * applied to the Batch service request. * @throws BatchErrorException * Exception thrown when an error response is received from the * Batch service. * @throws IOException * Exception thrown when there is an error in * serialization/deserialization of data sent to/received from the * Batch service. */
Reactivates a task, allowing it to run again even if its retry count has been exhausted
reactivateTask
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/batch/microsoft-azure-batch/src/main/java/com/microsoft/azure/batch/TaskOperations.java", "license": "mit", "size": 34364 }
[ "com.microsoft.azure.batch.protocol.models.BatchErrorException", "com.microsoft.azure.batch.protocol.models.TaskReactivateOptions", "java.io.IOException" ]
import com.microsoft.azure.batch.protocol.models.BatchErrorException; import com.microsoft.azure.batch.protocol.models.TaskReactivateOptions; import java.io.IOException;
import com.microsoft.azure.batch.protocol.models.*; import java.io.*;
[ "com.microsoft.azure", "java.io" ]
com.microsoft.azure; java.io;
341,962
void measureChildBeforeLayout(View child, int childIndex, int widthMeasureSpec, int totalWidth, int heightMeasureSpec, int totalHeight) { measureChildWithMargins(child, widthMeasureSpec, totalWidth, heightMeasureSpec, totalHeight); }
void measureChildBeforeLayout(View child, int childIndex, int widthMeasureSpec, int totalWidth, int heightMeasureSpec, int totalHeight) { measureChildWithMargins(child, widthMeasureSpec, totalWidth, heightMeasureSpec, totalHeight); }
/** * <p>Measure the child according to the parent's measure specs. This * method should be overriden by subclasses to force the sizing of * children. This method is called by {@link #measureVertical(int, int)} and * {@link #measureHorizontal(int, int)}.</p> * * @param child the child to measure * @param childIndex the index of the child in this view * @param widthMeasureSpec horizontal space requirements as imposed by the parent * @param totalWidth extra space that has been used up by the parent horizontally * @param heightMeasureSpec vertical space requirements as imposed by the parent * @param totalHeight extra space that has been used up by the parent vertically */
Measure the child according to the parent's measure specs. This method should be overriden by subclasses to force the sizing of children. This method is called by <code>#measureVertical(int, int)</code> and <code>#measureHorizontal(int, int)</code>
measureChildBeforeLayout
{ "repo_name": "lynnlyc/for-honeynet-reviewers", "path": "CallbackDroid/android-environment/src/base/core/java/android/widget/LinearLayout.java", "license": "gpl-3.0", "size": 59255 }
[ "android.view.View" ]
import android.view.View;
import android.view.*;
[ "android.view" ]
android.view;
204,088
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) public void reset() { mGestureInProgress = false; mCount = 0; for (int i = 0; i < MAX_POINTERS; i++) { mId[i] = Build.VERSION.SDK_INT < Build.VERSION_CODES.ICE_CREAM_SANDWICH ? -1 : MotionEvent.INVALID_POINTER_ID; } }
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) void function() { mGestureInProgress = false; mCount = 0; for (int i = 0; i < MAX_POINTERS; i++) { mId[i] = Build.VERSION.SDK_INT < Build.VERSION_CODES.ICE_CREAM_SANDWICH ? -1 : MotionEvent.INVALID_POINTER_ID; } }
/** * Resets the component to the initial state. */
Resets the component to the initial state
reset
{ "repo_name": "cjpx00008/ZoomableDraweeView-sample", "path": "app/src/main/java/com/facebook/samples/gestures/MultiPointerGestureDetector.java", "license": "apache-2.0", "size": 7496 }
[ "android.annotation.TargetApi", "android.os.Build", "android.view.MotionEvent" ]
import android.annotation.TargetApi; import android.os.Build; import android.view.MotionEvent;
import android.annotation.*; import android.os.*; import android.view.*;
[ "android.annotation", "android.os", "android.view" ]
android.annotation; android.os; android.view;
1,948,835
public static String join(String separator, long... array) { checkNotNull(separator); if (array.length == 0) { return ""; } // For pre-sizing a builder, just get the right order of magnitude StringBuilder builder = new StringBuilder(array.length * 10); builder.append(array[0]); for (int i = 1; i < array.length; i++) { builder.append(separator).append(array[i]); } return builder.toString(); }
static String function(String separator, long... array) { checkNotNull(separator); if (array.length == 0) { return ""; } StringBuilder builder = new StringBuilder(array.length * 10); builder.append(array[0]); for (int i = 1; i < array.length; i++) { builder.append(separator).append(array[i]); } return builder.toString(); }
/** * Returns a string containing the supplied {@code long} values separated by {@code separator}. * For example, {@code join("-", 1L, 2L, 3L)} returns the string {@code "1-2-3"}. * * @param separator the text that should appear between consecutive values in the resulting string * (but not at the start or end) * @param array an array of {@code long} values, possibly empty */
Returns a string containing the supplied long values separated by separator. For example, join("-", 1L, 2L, 3L) returns the string "1-2-3"
join
{ "repo_name": "google/guava", "path": "android/guava/src/com/google/common/primitives/Longs.java", "license": "apache-2.0", "size": 26976 }
[ "com.google.common.base.Preconditions" ]
import com.google.common.base.Preconditions;
import com.google.common.base.*;
[ "com.google.common" ]
com.google.common;
2,142,043
List<Long> installRules(String switchId, InstallRulesAction installAction);
List<Long> installRules(String switchId, InstallRulesAction installAction);
/** * Install default rules on the switch. The flag (@code installAction) defines what to do about the default rules. * * @param switchId switch id * @param installAction defines what to do about the default rules * @return the list of cookies for installed rules */
Install default rules on the switch. The flag (@code installAction) defines what to do about the default rules
installRules
{ "repo_name": "carmine/open-kilda", "path": "services/src/northbound/src/main/java/org/openkilda/northbound/service/SwitchService.java", "license": "apache-2.0", "size": 3042 }
[ "java.util.List", "org.openkilda.messaging.command.switches.InstallRulesAction" ]
import java.util.List; import org.openkilda.messaging.command.switches.InstallRulesAction;
import java.util.*; import org.openkilda.messaging.command.switches.*;
[ "java.util", "org.openkilda.messaging" ]
java.util; org.openkilda.messaging;
1,040,188
SurfaceConfig transformSurfaceConfig(int imageFormat, Size size) { ConfigType configType = getConfigType(imageFormat); ConfigSize configSize = ConfigSize.NOT_SUPPORT; Size maxSize = fetchMaxSize(imageFormat); // Compare with surface size definition to determine the surface configuration size if (size.getWidth() * size.getHeight() <= mSurfaceSizeDefinition.getAnalysisSize().getWidth() * mSurfaceSizeDefinition.getAnalysisSize().getHeight()) { configSize = ConfigSize.ANALYSIS; } else if (size.getWidth() * size.getHeight() <= mSurfaceSizeDefinition.getPreviewSize().getWidth() * mSurfaceSizeDefinition.getPreviewSize().getHeight()) { configSize = ConfigSize.PREVIEW; } else if (size.getWidth() * size.getHeight() <= mSurfaceSizeDefinition.getRecordSize().getWidth() * mSurfaceSizeDefinition.getRecordSize().getHeight()) { configSize = ConfigSize.RECORD; } else if (size.getWidth() * size.getHeight() <= maxSize.getWidth() * maxSize.getHeight()) { configSize = ConfigSize.MAXIMUM; } return SurfaceConfig.create(configType, configSize); }
SurfaceConfig transformSurfaceConfig(int imageFormat, Size size) { ConfigType configType = getConfigType(imageFormat); ConfigSize configSize = ConfigSize.NOT_SUPPORT; Size maxSize = fetchMaxSize(imageFormat); if (size.getWidth() * size.getHeight() <= mSurfaceSizeDefinition.getAnalysisSize().getWidth() * mSurfaceSizeDefinition.getAnalysisSize().getHeight()) { configSize = ConfigSize.ANALYSIS; } else if (size.getWidth() * size.getHeight() <= mSurfaceSizeDefinition.getPreviewSize().getWidth() * mSurfaceSizeDefinition.getPreviewSize().getHeight()) { configSize = ConfigSize.PREVIEW; } else if (size.getWidth() * size.getHeight() <= mSurfaceSizeDefinition.getRecordSize().getWidth() * mSurfaceSizeDefinition.getRecordSize().getHeight()) { configSize = ConfigSize.RECORD; } else if (size.getWidth() * size.getHeight() <= maxSize.getWidth() * maxSize.getHeight()) { configSize = ConfigSize.MAXIMUM; } return SurfaceConfig.create(configType, configSize); }
/** * Transform to a SurfaceConfig object with image format and size info * * @param imageFormat the image format info for the surface configuration object * @param size the size info for the surface configuration object * @return new {@link SurfaceConfig} object */
Transform to a SurfaceConfig object with image format and size info
transformSurfaceConfig
{ "repo_name": "AndroidX/androidx", "path": "camera/camera-camera2/src/main/java/androidx/camera/camera2/internal/SupportedSurfaceCombination.java", "license": "apache-2.0", "size": 63668 }
[ "android.util.Size", "androidx.camera.core.impl.SurfaceConfig" ]
import android.util.Size; import androidx.camera.core.impl.SurfaceConfig;
import android.util.*; import androidx.camera.core.impl.*;
[ "android.util", "androidx.camera" ]
android.util; androidx.camera;
1,010,463
void enterImageAlt(@NotNull MarkdownParser.ImageAltContext ctx); void exitImageAlt(@NotNull MarkdownParser.ImageAltContext ctx);
void enterImageAlt(@NotNull MarkdownParser.ImageAltContext ctx); void exitImageAlt(@NotNull MarkdownParser.ImageAltContext ctx);
/** * Exit a parse tree produced by {@link MarkdownParser#imageAlt}. * @param ctx the parse tree */
Exit a parse tree produced by <code>MarkdownParser#imageAlt</code>
exitImageAlt
{ "repo_name": "mar9000/antmark", "path": "src/org/mar9000/antmark/grammar/MarkdownParserListener.java", "license": "gpl-3.0", "size": 28587 }
[ "org.antlr.v4.runtime.misc.NotNull" ]
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.misc.*;
[ "org.antlr.v4" ]
org.antlr.v4;
1,322,670
public static ItemUsage calculateUsage(final IndexNameExpressionResolver indexNameExpressionResolver, final ClusterState state, final String policyName) { final List<String> indices = StreamSupport.stream(state.metadata().indices().values().spliterator(), false) .map(cursor -> cursor.value) .filter(indexMetadata -> policyName.equals(LifecycleSettings.LIFECYCLE_NAME_SETTING.get(indexMetadata.getSettings()))) .map(indexMetadata -> indexMetadata.getIndex().getName()) .collect(Collectors.toList()); final List<String> allDataStreams = indexNameExpressionResolver.dataStreamNames(state, IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN); final List<String> dataStreams = allDataStreams.stream() .filter(dsName -> { String indexTemplate = MetadataIndexTemplateService.findV2Template(state.metadata(), dsName, false); if (indexTemplate != null) { Settings settings = MetadataIndexTemplateService.resolveSettings(state.metadata(), indexTemplate); return policyName.equals(LifecycleSettings.LIFECYCLE_NAME_SETTING.get(settings)); } else { return false; } }) .collect(Collectors.toList()); final List<String> composableTemplates = state.metadata().templatesV2().keySet().stream() .filter(templateName -> { Settings settings = MetadataIndexTemplateService.resolveSettings(state.metadata(), templateName); return policyName.equals(LifecycleSettings.LIFECYCLE_NAME_SETTING.get(settings)); }) .collect(Collectors.toList()); return new ItemUsage(indices, dataStreams, composableTemplates); }
static ItemUsage function(final IndexNameExpressionResolver indexNameExpressionResolver, final ClusterState state, final String policyName) { final List<String> indices = StreamSupport.stream(state.metadata().indices().values().spliterator(), false) .map(cursor -> cursor.value) .filter(indexMetadata -> policyName.equals(LifecycleSettings.LIFECYCLE_NAME_SETTING.get(indexMetadata.getSettings()))) .map(indexMetadata -> indexMetadata.getIndex().getName()) .collect(Collectors.toList()); final List<String> allDataStreams = indexNameExpressionResolver.dataStreamNames(state, IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN); final List<String> dataStreams = allDataStreams.stream() .filter(dsName -> { String indexTemplate = MetadataIndexTemplateService.findV2Template(state.metadata(), dsName, false); if (indexTemplate != null) { Settings settings = MetadataIndexTemplateService.resolveSettings(state.metadata(), indexTemplate); return policyName.equals(LifecycleSettings.LIFECYCLE_NAME_SETTING.get(settings)); } else { return false; } }) .collect(Collectors.toList()); final List<String> composableTemplates = state.metadata().templatesV2().keySet().stream() .filter(templateName -> { Settings settings = MetadataIndexTemplateService.resolveSettings(state.metadata(), templateName); return policyName.equals(LifecycleSettings.LIFECYCLE_NAME_SETTING.get(settings)); }) .collect(Collectors.toList()); return new ItemUsage(indices, dataStreams, composableTemplates); }
/** * Given a cluster state and ILM policy, calculate the {@link ItemUsage} of * the policy (what indices, data streams, and templates use the policy) */
Given a cluster state and ILM policy, calculate the <code>ItemUsage</code> of the policy (what indices, data streams, and templates use the policy)
calculateUsage
{ "repo_name": "ern/elasticsearch", "path": "x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtils.java", "license": "apache-2.0", "size": 5467 }
[ "java.util.List", "java.util.stream.Collectors", "java.util.stream.StreamSupport", "org.elasticsearch.action.support.IndicesOptions", "org.elasticsearch.cluster.ClusterState", "org.elasticsearch.cluster.metadata.IndexNameExpressionResolver", "org.elasticsearch.cluster.metadata.ItemUsage", "org.elasticsearch.cluster.metadata.MetadataIndexTemplateService", "org.elasticsearch.common.settings.Settings" ]
import java.util.List; import java.util.stream.Collectors; import java.util.stream.StreamSupport; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.ItemUsage; import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; import org.elasticsearch.common.settings.Settings;
import java.util.*; import java.util.stream.*; import org.elasticsearch.action.support.*; import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.metadata.*; import org.elasticsearch.common.settings.*;
[ "java.util", "org.elasticsearch.action", "org.elasticsearch.cluster", "org.elasticsearch.common" ]
java.util; org.elasticsearch.action; org.elasticsearch.cluster; org.elasticsearch.common;
1,856,818
public void noMarketDataBuilderAvailable() { TestIdB idB1 = new TestIdB("1"); TestIdB idB2 = new TestIdB("2"); TestIdC idC1 = new TestIdC("1"); TestIdC idC2 = new TestIdC("2"); TestMarketDataFunctionB builder = new TestMarketDataFunctionB(); // Market data B depends on market data C so these requirements should cause instances of C to be built. // There is no market data function for building instances of C so this should cause failures. CalculationRequirements requirements = CalculationRequirements.builder() .addValues(idB1, idB2) .build(); DefaultMarketDataFactory marketDataFactory = new DefaultMarketDataFactory( new TestTimeSeriesProvider(ImmutableMap.of()), new TestObservableMarketDataFunction(), FeedIdMapping.identity(), builder); CalculationEnvironment marketData = marketDataFactory.buildCalculationEnvironment( requirements, MarketEnvironment.empty(date(2011, 3, 8)), MARKET_DATA_CONFIG); Map<MarketDataId<?>, Failure> failures = marketData.getSingleValueFailures(); Failure failureB1 = failures.get(idB1); Failure failureB2 = failures.get(idB2); Failure failureC1 = failures.get(idC1); Failure failureC2 = failures.get(idC2); assertThat(failureB1).isNotNull(); assertThat(failureB2).isNotNull(); assertThat(failureC1).isNotNull(); assertThat(failureC2).isNotNull(); assertThat(failureB1.getMessage()).matches("No value for.*"); assertThat(failureB2.getMessage()).matches("No value for.*"); assertThat(failureC1.getMessage()).matches("No market data function available to handle.*"); assertThat(failureC2.getMessage()).matches("No market data function available to handle.*"); }
void function() { TestIdB idB1 = new TestIdB("1"); TestIdB idB2 = new TestIdB("2"); TestIdC idC1 = new TestIdC("1"); TestIdC idC2 = new TestIdC("2"); TestMarketDataFunctionB builder = new TestMarketDataFunctionB(); CalculationRequirements requirements = CalculationRequirements.builder() .addValues(idB1, idB2) .build(); DefaultMarketDataFactory marketDataFactory = new DefaultMarketDataFactory( new TestTimeSeriesProvider(ImmutableMap.of()), new TestObservableMarketDataFunction(), FeedIdMapping.identity(), builder); CalculationEnvironment marketData = marketDataFactory.buildCalculationEnvironment( requirements, MarketEnvironment.empty(date(2011, 3, 8)), MARKET_DATA_CONFIG); Map<MarketDataId<?>, Failure> failures = marketData.getSingleValueFailures(); Failure failureB1 = failures.get(idB1); Failure failureB2 = failures.get(idB2); Failure failureC1 = failures.get(idC1); Failure failureC2 = failures.get(idC2); assertThat(failureB1).isNotNull(); assertThat(failureB2).isNotNull(); assertThat(failureC1).isNotNull(); assertThat(failureC2).isNotNull(); assertThat(failureB1.getMessage()).matches(STR); assertThat(failureB2.getMessage()).matches(STR); assertThat(failureC1.getMessage()).matches(STR); assertThat(failureC2.getMessage()).matches(STR); }
/** * Tests failures when there is no builder for an ID type. */
Tests failures when there is no builder for an ID type
noMarketDataBuilderAvailable
{ "repo_name": "nssales/Strata", "path": "modules/engine/src/test/java/com/opengamma/strata/engine/marketdata/DefaultMarketDataFactoryTest.java", "license": "apache-2.0", "size": 65385 }
[ "com.google.common.collect.ImmutableMap", "com.opengamma.strata.basics.market.MarketDataId", "com.opengamma.strata.collect.CollectProjectAssertions", "com.opengamma.strata.collect.result.Failure", "com.opengamma.strata.engine.marketdata.mapping.FeedIdMapping", "java.util.Map" ]
import com.google.common.collect.ImmutableMap; import com.opengamma.strata.basics.market.MarketDataId; import com.opengamma.strata.collect.CollectProjectAssertions; import com.opengamma.strata.collect.result.Failure; import com.opengamma.strata.engine.marketdata.mapping.FeedIdMapping; import java.util.Map;
import com.google.common.collect.*; import com.opengamma.strata.basics.market.*; import com.opengamma.strata.collect.*; import com.opengamma.strata.collect.result.*; import com.opengamma.strata.engine.marketdata.mapping.*; import java.util.*;
[ "com.google.common", "com.opengamma.strata", "java.util" ]
com.google.common; com.opengamma.strata; java.util;
1,160,541
public List<String> eventSystemProperties() { return this.eventSystemProperties; }
List<String> function() { return this.eventSystemProperties; }
/** * Get system properties of the iot hub. * * @return the eventSystemProperties value */
Get system properties of the iot hub
eventSystemProperties
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/kusto/mgmt-v2019_11_09/src/main/java/com/microsoft/azure/management/kusto/v2019_11_09/IotHubDataConnection.java", "license": "mit", "size": 6995 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,350,702
public byte putAndMoveToFirst( final K k, final byte v ) { final K key[] = this.key; final boolean used[] = this.used; final int mask = this.mask; // The starting point. int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode( (K) (k)) ) ) & mask; // There's always an unused entry. while( used[ pos ] ) { if ( ( strategy.equals( (k), (K) (key[ pos ]) ) ) ) { final byte oldValue = value[ pos ]; value[ pos ] = v; moveIndexToFirst( pos ); return oldValue; } pos = ( pos + 1 ) & mask; } used[ pos ] = true; key[ pos ] = k; value[ pos ] = v; if ( size == 0 ) { first = last = pos; // Special case of SET_UPPER_LOWER( link[ pos ], -1, -1 ); link[ pos ] = -1L; } else { link[ first ] ^= ( ( link[ first ] ^ ( ( pos & 0xFFFFFFFFL ) << 32 ) ) & 0xFFFFFFFF00000000L ); link[ pos ] = ( ( -1 & 0xFFFFFFFFL ) << 32 ) | ( first & 0xFFFFFFFFL ); first = pos; } if ( ++size >= maxFill ) rehash( arraySize( size, f ) ); if ( ASSERTS ) checkTable(); return defRetValue; }
byte function( final K k, final byte v ) { final K key[] = this.key; final boolean used[] = this.used; final int mask = this.mask; int pos = ( it.unimi.dsi.fastutil.HashCommon.murmurHash3( strategy.hashCode( (K) (k)) ) ) & mask; while( used[ pos ] ) { if ( ( strategy.equals( (k), (K) (key[ pos ]) ) ) ) { final byte oldValue = value[ pos ]; value[ pos ] = v; moveIndexToFirst( pos ); return oldValue; } pos = ( pos + 1 ) & mask; } used[ pos ] = true; key[ pos ] = k; value[ pos ] = v; if ( size == 0 ) { first = last = pos; link[ pos ] = -1L; } else { link[ first ] ^= ( ( link[ first ] ^ ( ( pos & 0xFFFFFFFFL ) << 32 ) ) & 0xFFFFFFFF00000000L ); link[ pos ] = ( ( -1 & 0xFFFFFFFFL ) << 32 ) ( first & 0xFFFFFFFFL ); first = pos; } if ( ++size >= maxFill ) rehash( arraySize( size, f ) ); if ( ASSERTS ) checkTable(); return defRetValue; }
/** Adds a pair to the map; if the key is already present, it is moved to the first position of the iteration order. * * @param k the key. * @param v the value. * @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key. */
Adds a pair to the map; if the key is already present, it is moved to the first position of the iteration order
putAndMoveToFirst
{ "repo_name": "karussell/fastutil", "path": "src/it/unimi/dsi/fastutil/objects/Object2ByteLinkedOpenCustomHashMap.java", "license": "apache-2.0", "size": 49617 }
[ "it.unimi.dsi.fastutil.HashCommon" ]
import it.unimi.dsi.fastutil.HashCommon;
import it.unimi.dsi.fastutil.*;
[ "it.unimi.dsi" ]
it.unimi.dsi;
2,912,625
Map<String, List<String>> describeSchemaVersions() throws ConnectionException;
Map<String, List<String>> describeSchemaVersions() throws ConnectionException;
/** * For each schema version present in the cluster, returns a list of nodes at that * version. Hosts that do not respond will be under the key DatabaseDescriptor.INITIAL_VERSION. * The cluster is all on the same version if the size of the map is 1 * @throws ConnectionException */
For each schema version present in the cluster, returns a list of nodes at that version. Hosts that do not respond will be under the key DatabaseDescriptor.INITIAL_VERSION. The cluster is all on the same version if the size of the map is 1
describeSchemaVersions
{ "repo_name": "bazaarvoice/astyanax", "path": "astyanax-cassandra/src/main/java/com/netflix/astyanax/Cluster.java", "license": "apache-2.0", "size": 9281 }
[ "com.netflix.astyanax.connectionpool.exceptions.ConnectionException", "java.util.List", "java.util.Map" ]
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException; import java.util.List; import java.util.Map;
import com.netflix.astyanax.connectionpool.exceptions.*; import java.util.*;
[ "com.netflix.astyanax", "java.util" ]
com.netflix.astyanax; java.util;
2,510,360
protected void assertTermVectorDocumentMapping(final String term, final long docId) throws IOException { byte[] internalDocId = Bytes.toBytes(docId); Get get = new Get(Bytes.toBytes(term)); get.addFamily(HBaseneConstants.FAMILY_TERMVECTOR); HTable table = new HTable(conf, TEST_INDEX); try { Result result = table.get(get); NavigableMap<byte[], byte[]> map = result .getFamilyMap(HBaseneConstants.FAMILY_TERMVECTOR); Assert.assertTrue(map.size() > 0); Assert.assertNotNull(map.get(internalDocId)); } finally { table.close(); } }
void function(final String term, final long docId) throws IOException { byte[] internalDocId = Bytes.toBytes(docId); Get get = new Get(Bytes.toBytes(term)); get.addFamily(HBaseneConstants.FAMILY_TERMVECTOR); HTable table = new HTable(conf, TEST_INDEX); try { Result result = table.get(get); NavigableMap<byte[], byte[]> map = result .getFamilyMap(HBaseneConstants.FAMILY_TERMVECTOR); Assert.assertTrue(map.size() > 0); Assert.assertNotNull(map.get(internalDocId)); } finally { table.close(); } }
/** * Asserts if a mapping exists between the given term and the doc Id. * * @param term * @param docId * @throws IOException */
Asserts if a mapping exists between the given term and the doc Id
assertTermVectorDocumentMapping
{ "repo_name": "webfd/hbasene", "path": "src/test/java/org/hbasene/index/AbstractHBaseneTest.java", "license": "apache-2.0", "size": 10858 }
[ "java.io.IOException", "java.util.NavigableMap", "org.apache.hadoop.hbase.client.Get", "org.apache.hadoop.hbase.client.HTable", "org.apache.hadoop.hbase.client.Result", "org.apache.hadoop.hbase.util.Bytes", "org.junit.Assert" ]
import java.io.IOException; import java.util.NavigableMap; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert;
import java.io.*; import java.util.*; import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.util.*; import org.junit.*;
[ "java.io", "java.util", "org.apache.hadoop", "org.junit" ]
java.io; java.util; org.apache.hadoop; org.junit;
1,156,494
private static void mergeRefConfidenceGenotypes(final GenotypesContext mergedGenotypes, final VariantContext vc, final List<Allele> remappedAlleles, final List<Allele> targetAlleles, final boolean samplesAreUniquified, final boolean shouldComputePLs) { final int maximumPloidy = vc.getMaxPloidy(GaeaGvcfVariantContextUtils.DEFAULT_PLOIDY); // the map is different depending on the ploidy, so in order to keep // this method flexible (mixed ploidies) // we need to get a map done (lazily inside the loop) for each ploidy, // up to the maximum possible. final int[][] genotypeIndexMapsByPloidy = new int[maximumPloidy + 1][]; final int maximumAlleleCount = Math.max(remappedAlleles.size(), targetAlleles.size()); for (final Genotype g : vc.getGenotypes()) { final String name; if (samplesAreUniquified) name = g.getSampleName() + "." + vc.getSource(); else name = g.getSampleName(); final int ploidy = g.getPloidy(); final GenotypeBuilder genotypeBuilder = new GenotypeBuilder(g) .alleles(GaeaGvcfVariantContextUtils.noCallAlleles(g.getPloidy())).noPL(); genotypeBuilder.name(name); final boolean doPLs = shouldComputePLs && g.hasPL(); final boolean hasAD = g.hasAD(); final boolean hasSAC = g.hasExtendedAttribute(GaeaVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY); if (doPLs || hasSAC || hasAD) { final int[] perSampleIndexesOfRelevantAlleles = getIndexesOfRelevantAlleles(remappedAlleles, targetAlleles, vc.getStart(), g); if (doPLs) { // lazy initialization of the genotype index map by ploidy. final int[] genotypeIndexMapByPloidy = genotypeIndexMapsByPloidy[ploidy] == null ? GenotypeLikelihoodCalculators.getInstance(ploidy, maximumAlleleCount).genotypeIndexMap( perSampleIndexesOfRelevantAlleles) : genotypeIndexMapsByPloidy[ploidy]; final int[] PLs = generatePL(g, genotypeIndexMapByPloidy); genotypeBuilder.PL(PLs); } if (hasAD) { genotypeBuilder.AD(generateAD(g.getAD(), perSampleIndexesOfRelevantAlleles)); } if (hasSAC) { final List<Integer> sacIndexesToUse = adaptToSACIndexes(perSampleIndexesOfRelevantAlleles); final int[] SACs = GaeaGvcfVariantContextUtils.makeNewSACs(g, sacIndexesToUse); genotypeBuilder.attribute(GaeaVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, SACs); } } mergedGenotypes.add(genotypeBuilder.make()); } }
static void function(final GenotypesContext mergedGenotypes, final VariantContext vc, final List<Allele> remappedAlleles, final List<Allele> targetAlleles, final boolean samplesAreUniquified, final boolean shouldComputePLs) { final int maximumPloidy = vc.getMaxPloidy(GaeaGvcfVariantContextUtils.DEFAULT_PLOIDY); final int[][] genotypeIndexMapsByPloidy = new int[maximumPloidy + 1][]; final int maximumAlleleCount = Math.max(remappedAlleles.size(), targetAlleles.size()); for (final Genotype g : vc.getGenotypes()) { final String name; if (samplesAreUniquified) name = g.getSampleName() + "." + vc.getSource(); else name = g.getSampleName(); final int ploidy = g.getPloidy(); final GenotypeBuilder genotypeBuilder = new GenotypeBuilder(g) .alleles(GaeaGvcfVariantContextUtils.noCallAlleles(g.getPloidy())).noPL(); genotypeBuilder.name(name); final boolean doPLs = shouldComputePLs && g.hasPL(); final boolean hasAD = g.hasAD(); final boolean hasSAC = g.hasExtendedAttribute(GaeaVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY); if (doPLs hasSAC hasAD) { final int[] perSampleIndexesOfRelevantAlleles = getIndexesOfRelevantAlleles(remappedAlleles, targetAlleles, vc.getStart(), g); if (doPLs) { final int[] genotypeIndexMapByPloidy = genotypeIndexMapsByPloidy[ploidy] == null ? GenotypeLikelihoodCalculators.getInstance(ploidy, maximumAlleleCount).genotypeIndexMap( perSampleIndexesOfRelevantAlleles) : genotypeIndexMapsByPloidy[ploidy]; final int[] PLs = generatePL(g, genotypeIndexMapByPloidy); genotypeBuilder.PL(PLs); } if (hasAD) { genotypeBuilder.AD(generateAD(g.getAD(), perSampleIndexesOfRelevantAlleles)); } if (hasSAC) { final List<Integer> sacIndexesToUse = adaptToSACIndexes(perSampleIndexesOfRelevantAlleles); final int[] SACs = GaeaGvcfVariantContextUtils.makeNewSACs(g, sacIndexesToUse); genotypeBuilder.attribute(GaeaVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, SACs); } } mergedGenotypes.add(genotypeBuilder.make()); } }
/** * Merge into the context a new genotype represented by the given * VariantContext for the provided list of target alleles. This method * assumes that none of the alleles in the VC overlaps with any of the * alleles in the set. */
Merge into the context a new genotype represented by the given VariantContext for the provided list of target alleles. This method assumes that none of the alleles in the VC overlaps with any of the alleles in the set
mergeRefConfidenceGenotypes
{ "repo_name": "BGI-flexlab/SOAPgaeaDevelopment4.0", "path": "src/main/java/org/bgi/flexlab/gaea/tools/jointcalling/util/ReferenceConfidenceVariantContextMerger.java", "license": "gpl-3.0", "size": 23322 }
[ "java.util.List", "org.bgi.flexlab.gaea.tools.jointcalling.genotypelikelihood.GenotypeLikelihoodCalculators", "org.bgi.flexlab.gaea.util.GaeaVCFConstants" ]
import java.util.List; import org.bgi.flexlab.gaea.tools.jointcalling.genotypelikelihood.GenotypeLikelihoodCalculators; import org.bgi.flexlab.gaea.util.GaeaVCFConstants;
import java.util.*; import org.bgi.flexlab.gaea.tools.jointcalling.genotypelikelihood.*; import org.bgi.flexlab.gaea.util.*;
[ "java.util", "org.bgi.flexlab" ]
java.util; org.bgi.flexlab;
1,191,260
public void setConfigurationChangeHandler(final ConfigurationChangeHandler handler) { this.configurationChangesHandler = handler; }
void function(final ConfigurationChangeHandler handler) { this.configurationChangesHandler = handler; }
/** * set here your configuration changes handler * * @param handler */
set here your configuration changes handler
setConfigurationChangeHandler
{ "repo_name": "jraduget/kaleido-repository", "path": "kaleido-core/src/main/java/org/kaleidofoundry/core/context/RuntimeContext.java", "license": "apache-2.0", "size": 29069 }
[ "org.kaleidofoundry.core.config.ConfigurationChangeHandler" ]
import org.kaleidofoundry.core.config.ConfigurationChangeHandler;
import org.kaleidofoundry.core.config.*;
[ "org.kaleidofoundry.core" ]
org.kaleidofoundry.core;
282,589
public static void availabilitySetsListMaximumSetGen(com.azure.resourcemanager.AzureResourceManager azure) { azure .virtualMachines() .manager() .serviceClient() .getAvailabilitySets() .listByResourceGroup("rgcompute", Context.NONE); }
static void function(com.azure.resourcemanager.AzureResourceManager azure) { azure .virtualMachines() .manager() .serviceClient() .getAvailabilitySets() .listByResourceGroup(STR, Context.NONE); }
/** * Sample code: AvailabilitySets_List_MaximumSet_Gen. * * @param azure The entry point for accessing resource management APIs in Azure. */
Sample code: AvailabilitySets_List_MaximumSet_Gen
availabilitySetsListMaximumSetGen
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager/src/samples/java/com/azure/resourcemanager/compute/generated/AvailabilitySetsListByResourceGroupSamples.java", "license": "mit", "size": 1680 }
[ "com.azure.core.util.Context" ]
import com.azure.core.util.Context;
import com.azure.core.util.*;
[ "com.azure.core" ]
com.azure.core;
1,411,310
public boolean defineIfSet(String key, String val) { Preconditions.checkArgument(key != null, "null key"); if (val != null) { define(key, val); return true; } else { return false; } }
boolean function(String key, String val) { Preconditions.checkArgument(key != null, STR); if (val != null) { define(key, val); return true; } else { return false; } }
/** * Add a <code>-D key=val</code> command to the CLI if <code>val</code> * is not null * @param key key * @param val value */
Add a <code>-D key=val</code> command to the CLI if <code>val</code> is not null
defineIfSet
{ "repo_name": "GeLiXin/hadoop", "path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/JavaCommandLineBuilder.java", "license": "apache-2.0", "size": 5293 }
[ "com.google.common.base.Preconditions" ]
import com.google.common.base.Preconditions;
import com.google.common.base.*;
[ "com.google.common" ]
com.google.common;
1,178,183
public static MozuClient<com.mozu.api.contracts.commerceruntime.wishlists.WishlistItem> getWishlistItemClient(String wishlistId, String wishlistItemId) throws Exception { return getWishlistItemClient( wishlistId, wishlistItemId, null); }
static MozuClient<com.mozu.api.contracts.commerceruntime.wishlists.WishlistItem> function(String wishlistId, String wishlistItemId) throws Exception { return getWishlistItemClient( wishlistId, wishlistItemId, null); }
/** * Retrieves the details of an item in a shopper wish list. * <p><pre><code> * MozuClient<com.mozu.api.contracts.commerceruntime.wishlists.WishlistItem> mozuClient=GetWishlistItemClient( wishlistId, wishlistItemId); * client.setBaseAddress(url); * client.executeRequest(); * WishlistItem wishlistItem = client.Result(); * </code></pre></p> * @param wishlistId Unique identifier of the wish list. * @param wishlistItemId Unique identifier of the item to remove from the shopper wish list. * @return Mozu.Api.MozuClient <com.mozu.api.contracts.commerceruntime.wishlists.WishlistItem> * @see com.mozu.api.contracts.commerceruntime.wishlists.WishlistItem */
Retrieves the details of an item in a shopper wish list. <code><code> MozuClient mozuClient=GetWishlistItemClient( wishlistId, wishlistItemId); client.setBaseAddress(url); client.executeRequest(); WishlistItem wishlistItem = client.Result(); </code></code>
getWishlistItemClient
{ "repo_name": "lakshmi-nair/mozu-java", "path": "mozu-javaasync-core/src/main/java/com/mozu/api/clients/commerce/wishlists/WishlistItemClient.java", "license": "mit", "size": 21569 }
[ "com.mozu.api.MozuClient" ]
import com.mozu.api.MozuClient;
import com.mozu.api.*;
[ "com.mozu.api" ]
com.mozu.api;
163,417
public SchemaBuilder schemaBuilder(Column columnDefinition);
SchemaBuilder function(Column columnDefinition);
/** * Returns a {@link SchemaBuilder} for a {@link Schema} describing literal values of the given JDBC type. * * @param columnDefinition the column definition; never null * @return the schema builder; null if the column's type information is unknown */
Returns a <code>SchemaBuilder</code> for a <code>Schema</code> describing literal values of the given JDBC type
schemaBuilder
{ "repo_name": "adsr/debezium", "path": "debezium-core/src/main/java/io/debezium/relational/ValueConverterProvider.java", "license": "apache-2.0", "size": 1609 }
[ "org.apache.kafka.connect.data.SchemaBuilder" ]
import org.apache.kafka.connect.data.SchemaBuilder;
import org.apache.kafka.connect.data.*;
[ "org.apache.kafka" ]
org.apache.kafka;
1,092,763
public void connectToNode(DiscoveryNode node) throws ConnectTransportException { connectToNode(node, null); }
void function(DiscoveryNode node) throws ConnectTransportException { connectToNode(node, null); }
/** * Connect to the specified node with the default connection profile * * @param node the node to connect to */
Connect to the specified node with the default connection profile
connectToNode
{ "repo_name": "strapdata/elassandra", "path": "server/src/main/java/org/elasticsearch/transport/TransportService.java", "license": "apache-2.0", "size": 56321 }
[ "org.elasticsearch.cluster.node.DiscoveryNode" ]
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.*;
[ "org.elasticsearch.cluster" ]
org.elasticsearch.cluster;
2,637,195
public void addURL(String path) throws MalformedURLException{ File f = new File(path); this.addURL(f.toURI().toURL()); }
void function(String path) throws MalformedURLException{ File f = new File(path); this.addURL(f.toURI().toURL()); }
/** * Add classes in path to defined classes. * Should point to jar file. * @param path Path to jar. * @return true, if loading successful. */
Add classes in path to defined classes. Should point to jar file
addURL
{ "repo_name": "m4dguy/CVForge", "path": "src/cvforge/CVForgeClassLoader.java", "license": "bsd-3-clause", "size": 1501 }
[ "java.io.File", "java.net.MalformedURLException" ]
import java.io.File; import java.net.MalformedURLException;
import java.io.*; import java.net.*;
[ "java.io", "java.net" ]
java.io; java.net;
1,082,807
private void getData() { String format = m_currentMeta.getFileFormat(); if (format != null && format.length() > 0) { m_wFormat.setText(format); } String encoding = m_currentMeta.getEncoding(); if (encoding != null && encoding.length() > 0) { m_wEncoding.setText(encoding); } String fName = m_currentMeta.getFileName(); m_wFilename.setText(fName); String rName = m_currentMeta.getRelationName(); m_wRelationName.setText(rName); ArffMeta[] fields = m_currentMeta.getOutputFields(); if (fields == null || fields.length == 0) { fields = setupArffMetas(); } if (fields != null) { m_wFields.clearAll(false); Table table = m_wFields.table; int count = 0; for (int i = 0; i < fields.length; i++) { if (fields[i] != null) { // TableItem item = m_wFields.table.getItem(i); TableItem item = new TableItem(table, SWT.NONE); item.setText(1, Const.NVL(fields[i].getFieldName(), "")); item.setText(2, ValueMeta.getTypeDesc(fields[i].getKettleType()) ); item.setText(3, Const.NVL(getArffTypeString(fields[i].getArffType()), "")); if (fields[i].getArffType() == ArffMeta.NOMINAL && !Const.isEmpty(fields[i].getNominalVals())) { item.setText(4, fields[i].getNominalVals()); } else if (fields[i].getArffType() == ArffMeta.DATE && !Const.isEmpty(fields[i].getDateFormat())) { item.setText(4, fields[i].getDateFormat()); } } } m_wFields.removeEmptyRows(); m_wFields.setRowNums(); m_wFields.optWidth(true); // weight field specified? if (!Const.isEmpty(m_currentMeta.getWeightFieldName())) { m_weightFieldCheckBox.setSelection(true); setupWeightFieldComboBox(); m_weightFieldComboBox.setEnabled(true); m_weightFieldComboBox.setText(m_currentMeta.getWeightFieldName()); } m_sparseOutputCheckBox.setSelection(m_currentMeta .getOutputSparseInstance()); } }
void function() { String format = m_currentMeta.getFileFormat(); if (format != null && format.length() > 0) { m_wFormat.setText(format); } String encoding = m_currentMeta.getEncoding(); if (encoding != null && encoding.length() > 0) { m_wEncoding.setText(encoding); } String fName = m_currentMeta.getFileName(); m_wFilename.setText(fName); String rName = m_currentMeta.getRelationName(); m_wRelationName.setText(rName); ArffMeta[] fields = m_currentMeta.getOutputFields(); if (fields == null fields.length == 0) { fields = setupArffMetas(); } if (fields != null) { m_wFields.clearAll(false); Table table = m_wFields.table; int count = 0; for (int i = 0; i < fields.length; i++) { if (fields[i] != null) { TableItem item = new TableItem(table, SWT.NONE); item.setText(1, Const.NVL(fields[i].getFieldName(), STR")); if (fields[i].getArffType() == ArffMeta.NOMINAL && !Const.isEmpty(fields[i].getNominalVals())) { item.setText(4, fields[i].getNominalVals()); } else if (fields[i].getArffType() == ArffMeta.DATE && !Const.isEmpty(fields[i].getDateFormat())) { item.setText(4, fields[i].getDateFormat()); } } } m_wFields.removeEmptyRows(); m_wFields.setRowNums(); m_wFields.optWidth(true); if (!Const.isEmpty(m_currentMeta.getWeightFieldName())) { m_weightFieldCheckBox.setSelection(true); setupWeightFieldComboBox(); m_weightFieldComboBox.setEnabled(true); m_weightFieldComboBox.setText(m_currentMeta.getWeightFieldName()); } m_sparseOutputCheckBox.setSelection(m_currentMeta .getOutputSparseInstance()); } }
/** * Copy data out of the ArffOutputMeta object and into the GUI */
Copy data out of the ArffOutputMeta object and into the GUI
getData
{ "repo_name": "e-cuellar/pdi-weka-arff-output-plugin", "path": "src/org/pentaho/di/arff/ArffOutputDialog.java", "license": "gpl-3.0", "size": 34423 }
[ "org.eclipse.swt.widgets.Table", "org.eclipse.swt.widgets.TableItem", "org.pentaho.di.core.Const", "org.pentaho.dm.commons.ArffMeta" ]
import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableItem; import org.pentaho.di.core.Const; import org.pentaho.dm.commons.ArffMeta;
import org.eclipse.swt.widgets.*; import org.pentaho.di.core.*; import org.pentaho.dm.commons.*;
[ "org.eclipse.swt", "org.pentaho.di", "org.pentaho.dm" ]
org.eclipse.swt; org.pentaho.di; org.pentaho.dm;
300,876
public void dumpPAGForMethod(String fName, String cName, String mName) throws FileNotFoundException { PrintStream ps; FileOutputStream fos = new FileOutputStream(new File(fName)); ps = new PrintStream(fos); ps.println("digraph G {"); ps.println("\trankdir=LR;"); dumpLocalPAG(cName, mName, ps); ps.print("}"); }
void function(String fName, String cName, String mName) throws FileNotFoundException { PrintStream ps; FileOutputStream fos = new FileOutputStream(new File(fName)); ps = new PrintStream(fos); ps.println(STR); ps.println(STR); dumpLocalPAG(cName, mName, ps); ps.print("}"); }
/** * Dump the PAG for some method in the program in * dot format * @param fName The filename for the output * @param cName The name of the declaring class for the method * @param mName The name of the method * @throws FileNotFoundException if output file cannot be written */
Dump the PAG for some method in the program in dot format
dumpPAGForMethod
{ "repo_name": "BuddhaLabs/DeD-OSX", "path": "soot/soot-2.3.0/src/soot/jimple/spark/pag/PagToDotDumper.java", "license": "gpl-2.0", "size": 16073 }
[ "java.io.File", "java.io.FileNotFoundException", "java.io.FileOutputStream", "java.io.PrintStream" ]
import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.PrintStream;
import java.io.*;
[ "java.io" ]
java.io;
1,810,070
public List<FrontendIPConfigurationInner> loadBalancerFrontendIpConfigurations() { return this.loadBalancerFrontendIpConfigurations; }
List<FrontendIPConfigurationInner> function() { return this.loadBalancerFrontendIpConfigurations; }
/** * Get an array of references to the load balancer IP configurations. * * @return the loadBalancerFrontendIpConfigurations value */
Get an array of references to the load balancer IP configurations
loadBalancerFrontendIpConfigurations
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/network/mgmt-v2019_11_01/src/main/java/com/microsoft/azure/management/network/v2019_11_01/implementation/PrivateLinkServiceInner.java", "license": "mit", "size": 8862 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,901,013
public void setRotateAngle(ModelRenderer modelRenderer, float x, float y, float z) { modelRenderer.rotateAngleX = x; modelRenderer.rotateAngleY = y; modelRenderer.rotateAngleZ = z; }
void function(ModelRenderer modelRenderer, float x, float y, float z) { modelRenderer.rotateAngleX = x; modelRenderer.rotateAngleY = y; modelRenderer.rotateAngleZ = z; }
/** * This is a helper function from Tabula to set the rotation of model parts */
This is a helper function from Tabula to set the rotation of model parts
setRotateAngle
{ "repo_name": "Mitchellbrine/SteelSheep", "path": "src/main/java/mc/Mitchellbrine/steelSheep/client/model/ModelUprightCow.java", "license": "lgpl-3.0", "size": 3056 }
[ "net.minecraft.client.model.ModelRenderer" ]
import net.minecraft.client.model.ModelRenderer;
import net.minecraft.client.model.*;
[ "net.minecraft.client" ]
net.minecraft.client;
55,835
private void readVdmjClass(SClassDefinition c, AnnotationTable annotationTable, Vector<String> classNames, ITypeCheckerAssistantFactory assistantFactory) { boolean hasConstructors = c.getHasContructors(); VdmClass vdmClass = new VdmClass(c.getName().getName(), hasConstructors); // adds annotation if any for (PDefinition def : c.getDefinitions()) { VdmMethod newDefinition = null; if (assistantFactory.createPDefinitionAssistant().isFunctionOrOperation(def)) { // now we check what sub class it is... if (def instanceof AExplicitOperationDefinition) { AExplicitOperationDefinition operation = (AExplicitOperationDefinition) def; // In terms of type only 'class types' are treated VdmType type = getType(((AOperationType) operation.getType()).getResult(), assistantFactory); newDefinition = new VdmMethod(operation.getName().getName(), operation.getIsConstructor(), type); // Temporary solution, just to check if there's a return // fetching the arguments for (List<PPattern> li : assistantFactory.createAExplicitOperationDefinitionAssistant().getParamPatternList(operation)) { for (int n = 0; n < li.size(); ++n) { LexNameList varName = assistantFactory.createPPatternAssistant().getVariableNames(li.get(n)); // the type String typeName = extractTypeName(operation.getType(), n); boolean flag = false; for (String cn : classNames) { if (typeName.equals(cn)) { flag = true; } } ((VdmMethod) newDefinition).addParam(new VdmParam(varName.toString(), new VdmType(typeName, flag))); } } } else if (def instanceof AImplicitOperationDefinition) { AImplicitOperationDefinition operation = (AImplicitOperationDefinition) def; VdmType type = null; // In terms of type only 'class types' are treated type = getType(((AOperationType) operation.getType()).getResult(), assistantFactory); newDefinition = new VdmMethod(operation.getName().getName(), operation.getIsConstructor(), type); // fetching the arguments int n = 0; for (PPattern li : assistantFactory.createAImplicitOperationDefinitionAssistant().getParamPatternList(operation)) { LexNameList varName = assistantFactory.createPPatternAssistant().getVariableNames(li); // the type String typeName = extractTypeName(operation.getType(), n); boolean flag = false; for (String cn : classNames) { if (typeName.equals(cn)) { flag = true; } } ((VdmMethod) newDefinition).addParam(new VdmParam(varName.toString(), new VdmType(typeName, flag))); extractTypeName(operation.getType(), n); ++n; } } else if (def instanceof AExplicitFunctionDefinition) { AExplicitFunctionDefinition function = (AExplicitFunctionDefinition) def; VdmType type = null; // In terms of type only 'class types' are treated type = getType(((AFunctionType) function.getType()).getResult(), assistantFactory); newDefinition = new VdmMethod(function.getName().getName(), false, type); // fetching the arguments for (List<PPattern> li : function.getParamPatternList()) { for (int n = 0; n < li.size(); ++n) { LexNameList varName = assistantFactory.createPPatternAssistant().getVariableNames(li.get(n)); // the type String typeName = extractTypeName(function.getType(), n); boolean flag = false; for (String cn : classNames) { if (typeName.equals(cn)) { flag = true; } } ((VdmMethod) newDefinition).addParam(new VdmParam(varName.toString(), new VdmType(typeName, flag))); extractTypeName(function.getType(), n); } } } else if (def instanceof AImplicitFunctionDefinition) { AImplicitFunctionDefinition function = (AImplicitFunctionDefinition) def; VdmType type = null; // In terms of type only 'class types' are treated type = getType(((AFunctionType) function.getType()).getResult(), assistantFactory); newDefinition = new VdmMethod(function.getName().getName(), false, type); // fetching the arguments for (List<PPattern> li : assistantFactory.createAImplicitFunctionDefinitionAssistant().getParamPatternList(function)) { for (int n = 0; n < li.size(); ++n) { LexNameList varName = assistantFactory.createPPatternAssistant().getVariableNames(li.get(n)); // the type String typeName = extractTypeName(function.getType(), n); boolean flag = false; for (String cn : classNames) { if (typeName.equals(cn)) { flag = true; } } ((VdmMethod) newDefinition).addParam(new VdmParam(varName.toString(), new VdmType(typeName, flag))); extractTypeName(function.getType(), n); } } } // adds annotations if any if (newDefinition != null) { if (annotationTable.getOpAnnotations(vdmClass.getName() + newDefinition.getName()) != null) { for (VdmAnnotation annotation : annotationTable.getOpAnnotations(vdmClass.getName() + newDefinition.getName())) { newDefinition.addAnnotation(annotation); } } } } // isFunctionOrOperation() // we add the new definition vdmClass.addDefinition(newDefinition); // adds annotations if any if (annotationTable.classHasAnnotations(vdmClass.getName())) { for (VdmAnnotation annotation : annotationTable.getClassAnnotations(vdmClass.getName())) { vdmClass.addAnnotation(annotation); } } } // we only have one annotation for classes, so we can get away with this, but // a sanity check is needed... if (!(ToolSettings.GENERATION_SETTINGS == ToolSettings.GENERATION_MODE.ANNOTATIONS && vdmClass.hasAnnotations())) { classList.add(vdmClass); } }
void function(SClassDefinition c, AnnotationTable annotationTable, Vector<String> classNames, ITypeCheckerAssistantFactory assistantFactory) { boolean hasConstructors = c.getHasContructors(); VdmClass vdmClass = new VdmClass(c.getName().getName(), hasConstructors); for (PDefinition def : c.getDefinitions()) { VdmMethod newDefinition = null; if (assistantFactory.createPDefinitionAssistant().isFunctionOrOperation(def)) { if (def instanceof AExplicitOperationDefinition) { AExplicitOperationDefinition operation = (AExplicitOperationDefinition) def; VdmType type = getType(((AOperationType) operation.getType()).getResult(), assistantFactory); newDefinition = new VdmMethod(operation.getName().getName(), operation.getIsConstructor(), type); for (List<PPattern> li : assistantFactory.createAExplicitOperationDefinitionAssistant().getParamPatternList(operation)) { for (int n = 0; n < li.size(); ++n) { LexNameList varName = assistantFactory.createPPatternAssistant().getVariableNames(li.get(n)); String typeName = extractTypeName(operation.getType(), n); boolean flag = false; for (String cn : classNames) { if (typeName.equals(cn)) { flag = true; } } ((VdmMethod) newDefinition).addParam(new VdmParam(varName.toString(), new VdmType(typeName, flag))); } } } else if (def instanceof AImplicitOperationDefinition) { AImplicitOperationDefinition operation = (AImplicitOperationDefinition) def; VdmType type = null; type = getType(((AOperationType) operation.getType()).getResult(), assistantFactory); newDefinition = new VdmMethod(operation.getName().getName(), operation.getIsConstructor(), type); int n = 0; for (PPattern li : assistantFactory.createAImplicitOperationDefinitionAssistant().getParamPatternList(operation)) { LexNameList varName = assistantFactory.createPPatternAssistant().getVariableNames(li); String typeName = extractTypeName(operation.getType(), n); boolean flag = false; for (String cn : classNames) { if (typeName.equals(cn)) { flag = true; } } ((VdmMethod) newDefinition).addParam(new VdmParam(varName.toString(), new VdmType(typeName, flag))); extractTypeName(operation.getType(), n); ++n; } } else if (def instanceof AExplicitFunctionDefinition) { AExplicitFunctionDefinition function = (AExplicitFunctionDefinition) def; VdmType type = null; type = getType(((AFunctionType) function.getType()).getResult(), assistantFactory); newDefinition = new VdmMethod(function.getName().getName(), false, type); for (List<PPattern> li : function.getParamPatternList()) { for (int n = 0; n < li.size(); ++n) { LexNameList varName = assistantFactory.createPPatternAssistant().getVariableNames(li.get(n)); String typeName = extractTypeName(function.getType(), n); boolean flag = false; for (String cn : classNames) { if (typeName.equals(cn)) { flag = true; } } ((VdmMethod) newDefinition).addParam(new VdmParam(varName.toString(), new VdmType(typeName, flag))); extractTypeName(function.getType(), n); } } } else if (def instanceof AImplicitFunctionDefinition) { AImplicitFunctionDefinition function = (AImplicitFunctionDefinition) def; VdmType type = null; type = getType(((AFunctionType) function.getType()).getResult(), assistantFactory); newDefinition = new VdmMethod(function.getName().getName(), false, type); for (List<PPattern> li : assistantFactory.createAImplicitFunctionDefinitionAssistant().getParamPatternList(function)) { for (int n = 0; n < li.size(); ++n) { LexNameList varName = assistantFactory.createPPatternAssistant().getVariableNames(li.get(n)); String typeName = extractTypeName(function.getType(), n); boolean flag = false; for (String cn : classNames) { if (typeName.equals(cn)) { flag = true; } } ((VdmMethod) newDefinition).addParam(new VdmParam(varName.toString(), new VdmType(typeName, flag))); extractTypeName(function.getType(), n); } } } if (newDefinition != null) { if (annotationTable.getOpAnnotations(vdmClass.getName() + newDefinition.getName()) != null) { for (VdmAnnotation annotation : annotationTable.getOpAnnotations(vdmClass.getName() + newDefinition.getName())) { newDefinition.addAnnotation(annotation); } } } } vdmClass.addDefinition(newDefinition); if (annotationTable.classHasAnnotations(vdmClass.getName())) { for (VdmAnnotation annotation : annotationTable.getClassAnnotations(vdmClass.getName())) { vdmClass.addAnnotation(annotation); } } } if (!(ToolSettings.GENERATION_SETTINGS == ToolSettings.GENERATION_MODE.ANNOTATIONS && vdmClass.hasAnnotations())) { classList.add(vdmClass); } }
/** * Reads a individual vdmj ClassDefinition object, cross checking with the annotation table, and translates it into * the intermediate representation * * @param c * class definition * @param annotationTable * annotation table associated with the specification * @param classNames * @throws InvocationAssistantException */
Reads a individual vdmj ClassDefinition object, cross checking with the annotation table, and translates it into the intermediate representation
readVdmjClass
{ "repo_name": "LasseBP/overture", "path": "core/guibuilder/src/main/java/org/overture/guibuilder/internal/ir/VdmjVdmClassReader.java", "license": "gpl-3.0", "size": 12937 }
[ "java.util.List", "java.util.Vector", "org.overture.ast.definitions.AExplicitFunctionDefinition", "org.overture.ast.definitions.AExplicitOperationDefinition", "org.overture.ast.definitions.AImplicitFunctionDefinition", "org.overture.ast.definitions.AImplicitOperationDefinition", "org.overture.ast.definitions.PDefinition", "org.overture.ast.definitions.SClassDefinition", "org.overture.ast.lex.LexNameList", "org.overture.ast.patterns.PPattern", "org.overture.ast.types.AFunctionType", "org.overture.ast.types.AOperationType", "org.overture.guibuilder.internal.ToolSettings", "org.overture.typechecker.assistant.ITypeCheckerAssistantFactory" ]
import java.util.List; import java.util.Vector; import org.overture.ast.definitions.AExplicitFunctionDefinition; import org.overture.ast.definitions.AExplicitOperationDefinition; import org.overture.ast.definitions.AImplicitFunctionDefinition; import org.overture.ast.definitions.AImplicitOperationDefinition; import org.overture.ast.definitions.PDefinition; import org.overture.ast.definitions.SClassDefinition; import org.overture.ast.lex.LexNameList; import org.overture.ast.patterns.PPattern; import org.overture.ast.types.AFunctionType; import org.overture.ast.types.AOperationType; import org.overture.guibuilder.internal.ToolSettings; import org.overture.typechecker.assistant.ITypeCheckerAssistantFactory;
import java.util.*; import org.overture.ast.definitions.*; import org.overture.ast.lex.*; import org.overture.ast.patterns.*; import org.overture.ast.types.*; import org.overture.guibuilder.internal.*; import org.overture.typechecker.assistant.*;
[ "java.util", "org.overture.ast", "org.overture.guibuilder", "org.overture.typechecker" ]
java.util; org.overture.ast; org.overture.guibuilder; org.overture.typechecker;
1,596,530
@Test public void checkExpectedNumberOfFeatures() throws InvalidProtocolBufferException, UnirestException, GAWrapperException { final long start = 62162; final long end = 62239; final String parentId = ""; final int expectedNumberOfFeatures = 69; final String id = Utils.getFeatureSetId(client); final SearchFeaturesRequest fReq = SearchFeaturesRequest.newBuilder() .setFeatureSetId(id) .setReferenceName(TestData.REFERENCE_NAME) .setStart(start).setEnd(end) .setParentId(parentId) .build(); final SearchFeaturesResponse fResp = client.sequenceAnnotations.searchFeatures(fReq); final List<Feature> searchFeatures = fResp.getFeaturesList(); assertThat(searchFeatures).hasSize(expectedNumberOfFeatures); }
void function() throws InvalidProtocolBufferException, UnirestException, GAWrapperException { final long start = 62162; final long end = 62239; final String parentId = ""; final int expectedNumberOfFeatures = 69; final String id = Utils.getFeatureSetId(client); final SearchFeaturesRequest fReq = SearchFeaturesRequest.newBuilder() .setFeatureSetId(id) .setReferenceName(TestData.REFERENCE_NAME) .setStart(start).setEnd(end) .setParentId(parentId) .build(); final SearchFeaturesResponse fResp = client.sequenceAnnotations.searchFeatures(fReq); final List<Feature> searchFeatures = fResp.getFeaturesList(); assertThat(searchFeatures).hasSize(expectedNumberOfFeatures); }
/** * Fetch features between two positions in the reference and count them. The number must * equal what we're expecting by visual examination of the features data. * * @throws GAWrapperException if the server finds the request invalid in some way * @throws UnirestException if there's a problem speaking HTTP to the server * @throws InvalidProtocolBufferException if there's a problem processing the JSON response from the server */
Fetch features between two positions in the reference and count them. The number must equal what we're expecting by visual examination of the features data
checkExpectedNumberOfFeatures
{ "repo_name": "macieksmuga/compliance", "path": "cts-java/src/test/java/org/ga4gh/cts/api/sequenceAnnotations/FeaturesSearchIT.java", "license": "apache-2.0", "size": 6763 }
[ "com.google.protobuf.InvalidProtocolBufferException", "com.mashape.unirest.http.exceptions.UnirestException", "java.util.List", "org.assertj.core.api.Assertions", "org.ga4gh.ctk.transport.GAWrapperException", "org.ga4gh.cts.api.TestData", "org.ga4gh.cts.api.Utils" ]
import com.google.protobuf.InvalidProtocolBufferException; import com.mashape.unirest.http.exceptions.UnirestException; import java.util.List; import org.assertj.core.api.Assertions; import org.ga4gh.ctk.transport.GAWrapperException; import org.ga4gh.cts.api.TestData; import org.ga4gh.cts.api.Utils;
import com.google.protobuf.*; import com.mashape.unirest.http.exceptions.*; import java.util.*; import org.assertj.core.api.*; import org.ga4gh.ctk.transport.*; import org.ga4gh.cts.api.*;
[ "com.google.protobuf", "com.mashape.unirest", "java.util", "org.assertj.core", "org.ga4gh.ctk", "org.ga4gh.cts" ]
com.google.protobuf; com.mashape.unirest; java.util; org.assertj.core; org.ga4gh.ctk; org.ga4gh.cts;
27,296
public KeyPropertyField getIdField(IdField field) { ArrayList<IdField> keys = getEntityTargetType().getId().getKeys(); if (_idFields.size() != keys.size()) { try { init(); } catch (Exception e) { throw new RuntimeException(e); } } for (int i = 0; i < keys.size(); i++) { if (keys.get(i) == field) return _idFields.get(i); } throw new IllegalStateException(field.toString()); }
KeyPropertyField function(IdField field) { ArrayList<IdField> keys = getEntityTargetType().getId().getKeys(); if (_idFields.size() != keys.size()) { try { init(); } catch (Exception e) { throw new RuntimeException(e); } } for (int i = 0; i < keys.size(); i++) { if (keys.get(i) == field) return _idFields.get(i); } throw new IllegalStateException(field.toString()); }
/** * Returns the identifying field matching the target's id. */
Returns the identifying field matching the target's id
getIdField
{ "repo_name": "christianchristensen/resin", "path": "modules/resin/src/com/caucho/amber/field/KeyManyToOneField.java", "license": "gpl-2.0", "size": 11705 }
[ "java.util.ArrayList" ]
import java.util.ArrayList;
import java.util.*;
[ "java.util" ]
java.util;
2,472,769
public final <T> void bindTo(Class<T> clazz, T object) throws ObjectAlreadyAssociatedException { LifecycleManagerStore.getCurrentStore().bind(this, clazz, object); }
final <T> void function(Class<T> clazz, T object) throws ObjectAlreadyAssociatedException { LifecycleManagerStore.getCurrentStore().bind(this, clazz, object); }
/** * Binds the current {@link LifecycleManager} with given object of given class. * * @param clazz the class to be bound * @param object the object to be bound * @throws ObjectAlreadyAssociatedException when there is already object bound with {@link LifecycleManager} for given * class. */
Binds the current <code>LifecycleManager</code> with given object of given class
bindTo
{ "repo_name": "CSchulz/arquillian-extension-warp", "path": "spi/src/main/java/org/jboss/arquillian/warp/spi/LifecycleManager.java", "license": "apache-2.0", "size": 4973 }
[ "org.jboss.arquillian.warp.spi.exception.ObjectAlreadyAssociatedException" ]
import org.jboss.arquillian.warp.spi.exception.ObjectAlreadyAssociatedException;
import org.jboss.arquillian.warp.spi.exception.*;
[ "org.jboss.arquillian" ]
org.jboss.arquillian;
1,036,245
private void remapComposite(FontFileReader in, Map glyphs, int glyphOffset, Integer glyphIdx) throws IOException { int offset = glyphOffset + (int) mtxTab[glyphIdx.intValue()].getOffset() + 10; Integer compositeIdx = null; int flags = 0; boolean moreComposites = true; while (moreComposites) { flags = in.readTTFUShort(offset); compositeIdx = Integer.valueOf(in.readTTFUShort(offset + 2)); Integer newIdx = (Integer) glyphs.get(compositeIdx); if (newIdx == null) { // This errormessage would look much better // if the fontname was printed to //log.error("An embedded font " // + "contains bad glyph data. " // + "Characters might not display " // + "correctly."); moreComposites = false; continue; } in.writeTTFUShort(offset + 2, newIdx.intValue()); offset += 4; if ((flags & 1) > 0) { // ARG_1_AND_ARG_2_ARE_WORDS offset += 4; } else { offset += 2; } if ((flags & 8) > 0) { offset += 2; // WE_HAVE_A_SCALE } else if ((flags & 64) > 0) { offset += 4; // WE_HAVE_AN_X_AND_Y_SCALE } else if ((flags & 128) > 0) { offset += 8; // WE_HAVE_A_TWO_BY_TWO } if ((flags & 32) > 0) { moreComposites = true; } else { moreComposites = false; } } }
void function(FontFileReader in, Map glyphs, int glyphOffset, Integer glyphIdx) throws IOException { int offset = glyphOffset + (int) mtxTab[glyphIdx.intValue()].getOffset() + 10; Integer compositeIdx = null; int flags = 0; boolean moreComposites = true; while (moreComposites) { flags = in.readTTFUShort(offset); compositeIdx = Integer.valueOf(in.readTTFUShort(offset + 2)); Integer newIdx = (Integer) glyphs.get(compositeIdx); if (newIdx == null) { moreComposites = false; continue; } in.writeTTFUShort(offset + 2, newIdx.intValue()); offset += 4; if ((flags & 1) > 0) { offset += 4; } else { offset += 2; } if ((flags & 8) > 0) { offset += 2; } else if ((flags & 64) > 0) { offset += 4; } else if ((flags & 128) > 0) { offset += 8; } if ((flags & 32) > 0) { moreComposites = true; } else { moreComposites = false; } } }
/** * Rewrite all compositepointers in glyphindex glyphIdx * <p/> * * @param in The input from which to remap * @param glyphs The glyphs to remap * @param glyphOffset The offset to start at * @param glyphIdx The index of the glyph * <p/> * * @throws IOException Indicates a failure to read from the font file. */
Rewrite all compositepointers in glyphindex glyphIdx
remapComposite
{ "repo_name": "emabrey/SleekSlick2D", "path": "slick-hiero/src/main/java/org/newdawn/slick/tools/hiero/truetype/TTFSubSetFile.java", "license": "bsd-3-clause", "size": 33159 }
[ "java.io.IOException", "java.util.Map" ]
import java.io.IOException; import java.util.Map;
import java.io.*; import java.util.*;
[ "java.io", "java.util" ]
java.io; java.util;
2,359,849
Expression getExpr();
Expression getExpr();
/** * Returns the value of the '<em><b>Expr</b></em>' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the value of the '<em>Expr</em>' containment reference. * @see #setExpr(Expression) * @see de.uni_hildesheim.sse.vilBuildLanguage.VilBuildLanguagePackage#getFor_Expr() * @model containment="true" * @generated */
Returns the value of the 'Expr' containment reference.
getExpr
{ "repo_name": "SSEHUB/EASyProducer", "path": "Plugins/Instantiation/de.uni_hildesheim.sse.vil.buildlang/src-gen/de/uni_hildesheim/sse/vilBuildLanguage/For.java", "license": "apache-2.0", "size": 3753 }
[ "de.uni_hildesheim.sse.vil.expressions.expressionDsl.Expression" ]
import de.uni_hildesheim.sse.vil.expressions.expressionDsl.Expression;
import de.uni_hildesheim.sse.vil.expressions.*;
[ "de.uni_hildesheim.sse" ]
de.uni_hildesheim.sse;
1,682,977
public RiskModelHistogram getHistogram() { return new RiskModelHistogram(this.distribution); }
RiskModelHistogram function() { return new RiskModelHistogram(this.distribution); }
/** * Returns a set of classes as an input for the risk model */
Returns a set of classes as an input for the risk model
getHistogram
{ "repo_name": "fstahnke/arx", "path": "src/main/org/deidentifier/arx/framework/check/groupify/HashGroupifyDistribution.java", "license": "apache-2.0", "size": 12030 }
[ "org.deidentifier.arx.risk.RiskModelHistogram" ]
import org.deidentifier.arx.risk.RiskModelHistogram;
import org.deidentifier.arx.risk.*;
[ "org.deidentifier.arx" ]
org.deidentifier.arx;
133,746
private void createDirs(String toolbox, String[] packagenames) { String path = toolbox; for (int i = 0; i < packagenames.length; i++) { if (!path.endsWith(Env.separator())) { path += Env.separator(); } path += packagenames[i]; } File packDir = new File(path); if (!(packDir.exists() && packDir.isDirectory())) { packDir.mkdirs(); } }
void function(String toolbox, String[] packagenames) { String path = toolbox; for (int i = 0; i < packagenames.length; i++) { if (!path.endsWith(Env.separator())) { path += Env.separator(); } path += packagenames[i]; } File packDir = new File(path); if (!(packDir.exists() && packDir.isDirectory())) { packDir.mkdirs(); } }
/** * Create any missing directories in the toolbox that are needed for a real package structure. * * @param toolbox * @param packagenames */
Create any missing directories in the toolbox that are needed for a real package structure
createDirs
{ "repo_name": "CSCSI/Triana", "path": "triana-gui/src/main/java/org/trianacode/gui/hci/PasteHandler.java", "license": "apache-2.0", "size": 7552 }
[ "java.io.File", "org.trianacode.gui.util.Env" ]
import java.io.File; import org.trianacode.gui.util.Env;
import java.io.*; import org.trianacode.gui.util.*;
[ "java.io", "org.trianacode.gui" ]
java.io; org.trianacode.gui;
1,109,024
System.out.println("setDatapoints"); List<GangliaMetric.TemporalMetric> listTemporalMetrics = new ArrayList<>(); GangliaMetric instance = new GangliaMetric(); instance.setDs_name("dsName"); instance.setCluster_name("c1"); instance.setHost_name("localhost"); instance.setMetric_name("cpu_wio"); listTemporalMetrics.add(new GangliaMetric.TemporalMetric("111.0", new Long(1362440880))); listTemporalMetrics.add(new GangliaMetric.TemporalMetric("11.0", new Long(1362440881))); listTemporalMetrics.add(new GangliaMetric.TemporalMetric("100.0", new Long(1362440882))); instance.setDatapointsFromList(listTemporalMetrics); assertTrue(instance.getDatapoints().length == 2); }
System.out.println(STR); List<GangliaMetric.TemporalMetric> listTemporalMetrics = new ArrayList<>(); GangliaMetric instance = new GangliaMetric(); instance.setDs_name(STR); instance.setCluster_name("c1"); instance.setHost_name(STR); instance.setMetric_name(STR); listTemporalMetrics.add(new GangliaMetric.TemporalMetric("111.0", new Long(1362440880))); listTemporalMetrics.add(new GangliaMetric.TemporalMetric("11.0", new Long(1362440881))); listTemporalMetrics.add(new GangliaMetric.TemporalMetric("100.0", new Long(1362440882))); instance.setDatapointsFromList(listTemporalMetrics); assertTrue(instance.getDatapoints().length == 2); }
/** * Test of setDatapoints method, of class GangliaMetric. */
Test of setDatapoints method, of class GangliaMetric
testSetDatapointsOfPercentValue
{ "repo_name": "sekikn/ambari", "path": "ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaMetricTest.java", "license": "apache-2.0", "size": 4857 }
[ "java.util.ArrayList", "java.util.List", "org.junit.Assert" ]
import java.util.ArrayList; import java.util.List; import org.junit.Assert;
import java.util.*; import org.junit.*;
[ "java.util", "org.junit" ]
java.util; org.junit;
1,925,676
EAttribute getBusinessTransactionType_Pattern();
EAttribute getBusinessTransactionType_Pattern();
/** * Returns the meta object for the attribute '{@link org.ebxml.business.process.BusinessTransactionType#getPattern <em>Pattern</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>Pattern</em>'. * @see org.ebxml.business.process.BusinessTransactionType#getPattern() * @see #getBusinessTransactionType() * @generated */
Returns the meta object for the attribute '<code>org.ebxml.business.process.BusinessTransactionType#getPattern Pattern</code>'.
getBusinessTransactionType_Pattern
{ "repo_name": "GRA-UML/tool", "path": "plugins/org.ijis.gra.ebxml.ebBPSS/src/main/java/org/ebxml/business/process/ProcessPackage.java", "license": "epl-1.0", "size": 274455 }
[ "org.eclipse.emf.ecore.EAttribute" ]
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
1,355,730
@Override public void actionPerformed(ActionEvent e) { // Start playing the Farkle sound when the JDialog is first // diaplayed if (step == 4) { playFarkleSound(); step--; } if (step == 0) { fadeTimer.stop(); FarkleMessage.this.setVisible(false); FarkleMessage.this.getContentPane().add(farkleLeftMsg); step = 4; } // If the opacity != 0, keep fading if (opacity.compareTo(BigDecimal.valueOf(0)) > 0) { fade(); // If the opacity <= 0, change the // JLabel and decrement step } else if (opacity.compareTo(BigDecimal.valueOf(0)) <= 0) { switch (step) { case 3: remove(farkleLeftMsg); break; case 2: remove(farkleRightMsg); break; case 1: remove(farkleCenterMsg); break; } step--; } }
void function(ActionEvent e) { if (step == 4) { playFarkleSound(); step--; } if (step == 0) { fadeTimer.stop(); FarkleMessage.this.setVisible(false); FarkleMessage.this.getContentPane().add(farkleLeftMsg); step = 4; } if (opacity.compareTo(BigDecimal.valueOf(0)) > 0) { fade(); } else if (opacity.compareTo(BigDecimal.valueOf(0)) <= 0) { switch (step) { case 3: remove(farkleLeftMsg); break; case 2: remove(farkleRightMsg); break; case 1: remove(farkleCenterMsg); break; } step--; } }
/** * Animate the message in the actionPerformed method */
Animate the message in the actionPerformed method
actionPerformed
{ "repo_name": "JacobMDavidson/farkle-csc478", "path": "farkle-csc478/src/main/java/com/lotsofun/farkle/FarkleMessage.java", "license": "mit", "size": 6851 }
[ "java.awt.event.ActionEvent", "java.math.BigDecimal" ]
import java.awt.event.ActionEvent; import java.math.BigDecimal;
import java.awt.event.*; import java.math.*;
[ "java.awt", "java.math" ]
java.awt; java.math;
1,474,512
public void setReplaceStringsConfiguration( String placeholderToken, List<String> functionDescriptors) { this.replaceStringsPlaceholderToken = placeholderToken; this.replaceStringsFunctionDescriptions = Lists.newArrayList(functionDescriptors); }
void function( String placeholderToken, List<String> functionDescriptors) { this.replaceStringsPlaceholderToken = placeholderToken; this.replaceStringsFunctionDescriptions = Lists.newArrayList(functionDescriptors); }
/** * Sets the functions whose debug strings to replace. */
Sets the functions whose debug strings to replace
setReplaceStringsConfiguration
{ "repo_name": "johan/closure-compiler", "path": "src/com/google/javascript/jscomp/CompilerOptions.java", "license": "apache-2.0", "size": 29996 }
[ "com.google.common.collect.Lists", "java.util.List" ]
import com.google.common.collect.Lists; import java.util.List;
import com.google.common.collect.*; import java.util.*;
[ "com.google.common", "java.util" ]
com.google.common; java.util;
2,343,085
public static boolean isValidValue ( final CharSequence value ) { Preconditions.checkNotNull(value, "value required"); return VALUE_PATTERN.matcher(value).matches(); }
static boolean function ( final CharSequence value ) { Preconditions.checkNotNull(value, STR); return VALUE_PATTERN.matcher(value).matches(); }
/** * Returns whether or not {@code value} is a valid parameter value. * * @param value the value to test. Must be non {@code null} and must not be * quoted. * @return whether or not {@code value} is a valid parameter value */
Returns whether or not value is a valid parameter value
isValidValue
{ "repo_name": "calebrichardson/spiff", "path": "src/main/java/com/outerspacecat/icalendar/ParameterValue.java", "license": "apache-2.0", "size": 2821 }
[ "com.google.common.base.Preconditions" ]
import com.google.common.base.Preconditions;
import com.google.common.base.*;
[ "com.google.common" ]
com.google.common;
316,139
public static boolean is3D(PSFOrBuilder psf) { if (psf != null) { return psf.getPsfType() == PSFType.ASTIGMATIC_GAUSSIAN_2D; } return false; }
static boolean function(PSFOrBuilder psf) { if (psf != null) { return psf.getPsfType() == PSFType.ASTIGMATIC_GAUSSIAN_2D; } return false; }
/** * Checks if is a 3D. * * @param psf the psf * @return true, if is 3D */
Checks if is a 3D
is3D
{ "repo_name": "aherbert/GDSC-SMLM", "path": "src/main/java/uk/ac/sussex/gdsc/smlm/data/config/PsfHelper.java", "license": "gpl-3.0", "size": 11816 }
[ "uk.ac.sussex.gdsc.smlm.data.config.PSFProtos" ]
import uk.ac.sussex.gdsc.smlm.data.config.PSFProtos;
import uk.ac.sussex.gdsc.smlm.data.config.*;
[ "uk.ac.sussex" ]
uk.ac.sussex;
1,759,904
if (!ANALYTICS_ENABLED) { return sEmptyAnalyticsUtils; } if (sInstance == null) { if (context == null) { return sEmptyAnalyticsUtils; } sInstance = new AnalyticsUtils(context); } return sInstance; } private AnalyticsUtils(Context context) { if (context == null) { // This should only occur for the empty Analytics utils object. return; } mApplicationContext = context.getApplicationContext(); mTracker = GoogleAnalyticsTracker.getInstance(); // Unfortunately this needs to be synchronous. mTracker.start(UACODE, 300, mApplicationContext); Log.d(TAG, "Initializing Analytics"); // Since visitor CV's should only be declared the first time an app // runs, check if // it's run before. Add as necessary. SharedPreferences prefs = PreferenceManager .getDefaultSharedPreferences(mApplicationContext); final boolean firstRun = prefs.getBoolean(FIRST_RUN_KEY, true); if (firstRun) { Log.d(TAG, "Analytics firstRun"); String apiLevel = Integer.toString(Build.VERSION.SDK_INT); String model = Build.MODEL; mTracker.setCustomVar(1, "apiLevel", apiLevel, VISITOR_SCOPE); mTracker.setCustomVar(2, "model", model, VISITOR_SCOPE); // Close out so we never run this block again, unless app is removed // & = // reinstalled. prefs.edit().putBoolean(FIRST_RUN_KEY, false).commit(); } }
if (!ANALYTICS_ENABLED) { return sEmptyAnalyticsUtils; } if (sInstance == null) { if (context == null) { return sEmptyAnalyticsUtils; } sInstance = new AnalyticsUtils(context); } return sInstance; } private AnalyticsUtils(Context context) { if (context == null) { return; } mApplicationContext = context.getApplicationContext(); mTracker = GoogleAnalyticsTracker.getInstance(); mTracker.start(UACODE, 300, mApplicationContext); Log.d(TAG, STR); SharedPreferences prefs = PreferenceManager .getDefaultSharedPreferences(mApplicationContext); final boolean firstRun = prefs.getBoolean(FIRST_RUN_KEY, true); if (firstRun) { Log.d(TAG, STR); String apiLevel = Integer.toString(Build.VERSION.SDK_INT); String model = Build.MODEL; mTracker.setCustomVar(1, STR, apiLevel, VISITOR_SCOPE); mTracker.setCustomVar(2, "model", model, VISITOR_SCOPE); prefs.edit().putBoolean(FIRST_RUN_KEY, false).commit(); } }
/** * Returns the global {@link AnalyticsUtils} singleton object, creating one * if necessary. */
Returns the global <code>AnalyticsUtils</code> singleton object, creating one if necessary
getInstance
{ "repo_name": "devoxx/mobile-client", "path": "devoxx-android-client/src/net/peterkuterna/android/apps/devoxxsched/util/AnalyticsUtils.java", "license": "apache-2.0", "size": 4819 }
[ "android.content.Context", "android.content.SharedPreferences", "android.os.Build", "android.preference.PreferenceManager", "android.util.Log", "com.google.android.apps.analytics.GoogleAnalyticsTracker" ]
import android.content.Context; import android.content.SharedPreferences; import android.os.Build; import android.preference.PreferenceManager; import android.util.Log; import com.google.android.apps.analytics.GoogleAnalyticsTracker;
import android.content.*; import android.os.*; import android.preference.*; import android.util.*; import com.google.android.apps.analytics.*;
[ "android.content", "android.os", "android.preference", "android.util", "com.google.android" ]
android.content; android.os; android.preference; android.util; com.google.android;
2,377,341
@ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<Flux<ByteBuffer>>> createDnsServiceWithResponseAsync( String resourceGroupName, String privateCloudName, String dnsServiceId, WorkloadNetworkDnsServiceInner workloadNetworkDnsService) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (privateCloudName == null) { return Mono .error(new IllegalArgumentException("Parameter privateCloudName is required and cannot be null.")); } if (dnsServiceId == null) { return Mono.error(new IllegalArgumentException("Parameter dnsServiceId is required and cannot be null.")); } if (workloadNetworkDnsService == null) { return Mono .error( new IllegalArgumentException( "Parameter workloadNetworkDnsService is required and cannot be null.")); } else { workloadNetworkDnsService.validate(); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .createDnsService( this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName, this.client.getApiVersion(), privateCloudName, dnsServiceId, workloadNetworkDnsService, accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); }
@ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<Flux<ByteBuffer>>> function( String resourceGroupName, String privateCloudName, String dnsServiceId, WorkloadNetworkDnsServiceInner workloadNetworkDnsService) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( STR)); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( STR)); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException(STR)); } if (privateCloudName == null) { return Mono .error(new IllegalArgumentException(STR)); } if (dnsServiceId == null) { return Mono.error(new IllegalArgumentException(STR)); } if (workloadNetworkDnsService == null) { return Mono .error( new IllegalArgumentException( STR)); } else { workloadNetworkDnsService.validate(); } final String accept = STR; return FluxUtil .withContext( context -> service .createDnsService( this.client.getEndpoint(), this.client.getSubscriptionId(), resourceGroupName, this.client.getApiVersion(), privateCloudName, dnsServiceId, workloadNetworkDnsService, accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); }
/** * Create a DNS service by id in a private cloud workload network. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param privateCloudName Name of the private cloud. * @param dnsServiceId NSX DNS Service identifier. Generally the same as the DNS Service's display name. * @param workloadNetworkDnsService NSX DNS Service. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return nSX DNS Service. */
Create a DNS service by id in a private cloud workload network
createDnsServiceWithResponseAsync
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/avs/azure-resourcemanager-avs/src/main/java/com/azure/resourcemanager/avs/implementation/WorkloadNetworksClientImpl.java", "license": "mit", "size": 538828 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.core.http.rest.Response", "com.azure.core.util.FluxUtil", "com.azure.resourcemanager.avs.fluent.models.WorkloadNetworkDnsServiceInner", "java.nio.ByteBuffer" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.Response; import com.azure.core.util.FluxUtil; import com.azure.resourcemanager.avs.fluent.models.WorkloadNetworkDnsServiceInner; import java.nio.ByteBuffer;
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; import com.azure.resourcemanager.avs.fluent.models.*; import java.nio.*;
[ "com.azure.core", "com.azure.resourcemanager", "java.nio" ]
com.azure.core; com.azure.resourcemanager; java.nio;
1,416,513
public StringGenerator create(String name, String param, StringSpecification stringSpec, long seed) throws UnknownFuzzingHeuristicException { if (name == null) { throw new UnknownFuzzingHeuristicException(name); } String canonicalName = name.trim().toUpperCase(); if (canonicalName.equals("AllBadStrings".toUpperCase())) { return createAllBadStrings(stringSpec, seed); } else if (canonicalName.equals("AllXSS".toUpperCase())) { return createAllXSSGenerator(stringSpec, seed, param); } else if (canonicalName.equals("BadDate".toUpperCase())) { return createBadDateGenerator(stringSpec, seed); } else if (canonicalName.equals("BadFilenames".toUpperCase())) { return createBadFilenames(stringSpec, seed); } else if (canonicalName.equals("BadIpAddresses".toUpperCase())) { return createBadIpAddresses(stringSpec, seed); } else if (canonicalName.equals("BadHostnames".toUpperCase())) { return createBadHostnames(stringSpec, seed); } else if (canonicalName.equals("BadLongStrings".toUpperCase())) { return createBadLongStrings(stringSpec, seed); } else if (canonicalName.equals("BadLongUnicodeStrings".toUpperCase())) { return createBadLongUnicodeStrings(stringSpec, seed); } else if (canonicalName.equals("BadNumbersAsString".toUpperCase())) { return createBadNumbersAsStringGenerator(stringSpec, seed); } else if (canonicalName.equals("BadPaths".toUpperCase())) { return createBadPaths(stringSpec, seed); } else if (canonicalName.equals("BadStrings".toUpperCase())) { return createBadStringsGenerator(stringSpec, seed); } else if (canonicalName.equals("BadTime".toUpperCase())) { return createBadTimeGenerator(stringSpec, seed); } else if (canonicalName.equals("BadUnicodeUtf8Strings".toUpperCase())) { return createBadUnicodeUtf8Strings(stringSpec, seed); } else if (canonicalName.equals("CommandInjections".toUpperCase())) { return createCommandInjections(stringSpec, seed); } else if (canonicalName.equals("Delimiters".toUpperCase())) { return createDelimitersGenerator(stringSpec, seed); } if (canonicalName.equals("FormatStrings".toUpperCase())) { return createFormatStrings(stringSpec, seed); } else if (canonicalName.equals("ForeignDigits".toUpperCase())) { return createForeignDigits(stringSpec, seed); } else if (canonicalName.equals("HTMLFieldInput".toUpperCase())) { return createHTMLFieldInputGenerator(stringSpec, seed, param); } else if (canonicalName.equals("LongStrings".toUpperCase())) { return createLongStrings(stringSpec, seed); } else if (canonicalName.equals("Popular4DigitPins".toUpperCase())) { return createSmallGenerator(stringSpec, seed); } else if (canonicalName.equals("SmallGenerator".toUpperCase())) { return createSmallGenerator(stringSpec, seed); } else if (canonicalName.equals("SQLInjections".toUpperCase())) { return createSqlInjections(stringSpec, seed); } else if (canonicalName.equals("SQLTimeBasedInjections".toUpperCase())) { return createSqlTimeBasedInjections(stringSpec, seed); } else if (canonicalName.equals("UnicodeBomStrings".toUpperCase())) { return createUnicodeBomStringsGenerator(stringSpec, seed); } else if (canonicalName.equals("UnicodeNumerals".toUpperCase())) { return createUnicodeNumeralsGenerator(stringSpec, seed); } else if (canonicalName.equals("XMLInjections".toUpperCase())) { return createXmlInjections(stringSpec, seed); } else if (canonicalName.equals("XSSBasicInput".toUpperCase())) { return createXSSBasicInput(stringSpec, seed, param); } else if (canonicalName.equals("XSSMultipleLinesInput".toUpperCase())) { return createXSSMultipleLinesInput(stringSpec, seed); } else if (canonicalName.equals("XSSOpenHTMLTagVariance".toUpperCase())) { return createXSSOpenHTMLTagVariance(stringSpec, seed); } else { throw new UnknownFuzzingHeuristicException(name); } }
StringGenerator function(String name, String param, StringSpecification stringSpec, long seed) throws UnknownFuzzingHeuristicException { if (name == null) { throw new UnknownFuzzingHeuristicException(name); } String canonicalName = name.trim().toUpperCase(); if (canonicalName.equals(STR.toUpperCase())) { return createAllBadStrings(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createAllXSSGenerator(stringSpec, seed, param); } else if (canonicalName.equals(STR.toUpperCase())) { return createBadDateGenerator(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createBadFilenames(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createBadIpAddresses(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createBadHostnames(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createBadLongStrings(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createBadLongUnicodeStrings(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createBadNumbersAsStringGenerator(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createBadPaths(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createBadStringsGenerator(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createBadTimeGenerator(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createBadUnicodeUtf8Strings(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createCommandInjections(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createDelimitersGenerator(stringSpec, seed); } if (canonicalName.equals(STR.toUpperCase())) { return createFormatStrings(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createForeignDigits(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createHTMLFieldInputGenerator(stringSpec, seed, param); } else if (canonicalName.equals(STR.toUpperCase())) { return createLongStrings(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createSmallGenerator(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createSmallGenerator(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createSqlInjections(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createSqlTimeBasedInjections(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createUnicodeBomStringsGenerator(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createUnicodeNumeralsGenerator(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createXmlInjections(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createXSSBasicInput(stringSpec, seed, param); } else if (canonicalName.equals(STR.toUpperCase())) { return createXSSMultipleLinesInput(stringSpec, seed); } else if (canonicalName.equals(STR.toUpperCase())) { return createXSSOpenHTMLTagVariance(stringSpec, seed); } else { throw new UnknownFuzzingHeuristicException(name); } }
/** * Creates a string generator identified by its name. * * @param name * The name of the string generator that is not the class name * but the name used in a request (see Javadoc in * {@link StringGeneratorFactory} or * {@link ComputableFuzzingHeuristic#getName()}) or documentation for * a list of string generators and its names). * @param param * A parameter for the requested string generator. May be * {@code null} if the requested generator does not have a parameter * or a default value shall be used. * @param stringSpec * The string specification that describes the type the generator * shall create values for. * @param seed * The seed to be used for random-based fuzzing heuristics. * @return the requested instance of string generator. * @throws UnknownFuzzingHeuristicException * if no generator with {@code name} is known. */
Creates a string generator identified by its name
create
{ "repo_name": "fraunhoferfokus/Fuzzino", "path": "src/main/java/de/fraunhofer/fokus/fuzzing/fuzzino/heuristics/generators/StringGeneratorFactory.java", "license": "apache-2.0", "size": 27321 }
[ "de.fraunhofer.fokus.fuzzing.fuzzino.exceptions.UnknownFuzzingHeuristicException", "de.fraunhofer.fokus.fuzzing.fuzzino.request.StringSpecification" ]
import de.fraunhofer.fokus.fuzzing.fuzzino.exceptions.UnknownFuzzingHeuristicException; import de.fraunhofer.fokus.fuzzing.fuzzino.request.StringSpecification;
import de.fraunhofer.fokus.fuzzing.fuzzino.exceptions.*; import de.fraunhofer.fokus.fuzzing.fuzzino.request.*;
[ "de.fraunhofer.fokus" ]
de.fraunhofer.fokus;
756,163
public static keplerCisType fromPerUnaligned(byte[] encodedBytes) { keplerCisType result = new keplerCisType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; }
static keplerCisType function(byte[] encodedBytes) { keplerCisType result = new keplerCisType(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; }
/** * Creates a new keplerCisType from encoded stream. */
Creates a new keplerCisType from encoded stream
fromPerUnaligned
{ "repo_name": "google/supl-client", "path": "src/main/java/com/google/location/suplclient/asn1/supl2/rrlp_components/ReferenceNavModel.java", "license": "apache-2.0", "size": 64027 }
[ "com.google.location.suplclient.asn1.base.BitStreamReader" ]
import com.google.location.suplclient.asn1.base.BitStreamReader;
import com.google.location.suplclient.asn1.base.*;
[ "com.google.location" ]
com.google.location;
1,903,955
public final void printLatin1(String string) throws IOException { if (string == null) string = "null"; int length = string.length(); int offset = 0; char []chars = _chars; if (chars == null) { _chars = new char[_charsLength]; chars = _chars; } while (length > 0) { int sublen = length < _charsLength ? length : _charsLength; string.getChars(offset, offset + sublen, chars, 0); printLatin1(chars, 0, sublen); length -= sublen; offset += sublen; } }
final void function(String string) throws IOException { if (string == null) string = "null"; int length = string.length(); int offset = 0; char []chars = _chars; if (chars == null) { _chars = new char[_charsLength]; chars = _chars; } while (length > 0) { int sublen = length < _charsLength ? length : _charsLength; string.getChars(offset, offset + sublen, chars, 0); printLatin1(chars, 0, sublen); length -= sublen; offset += sublen; } }
/** * Prints a string. */
Prints a string
printLatin1
{ "repo_name": "dwango/quercus", "path": "src/main/java/com/caucho/vfs/WriteStream.java", "license": "gpl-2.0", "size": 30795 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,258,150
public BigDecimal getBigDecimal (int columnIndex) throws SQLException { validateResultSet(); return resultSet_.getBigDecimal(columnIndex); }
BigDecimal function (int columnIndex) throws SQLException { validateResultSet(); return resultSet_.getBigDecimal(columnIndex); }
/** * Returns the value of a column as a BigDecimal object. This * can be used to get values from columns with SQL types * SMALLINT, INTEGER, BIGINT, REAL, FLOAT, DOUBLE, DECIMAL, * NUMERIC, CHAR, and VARCHAR. * * @param columnIndex The column index (1-based). * @return The column value or null if the value is SQL NULL. * * @exception SQLException If the result set is not open, * the cursor is not positioned on a row, * the column index is not valid, * or the requested conversion is not valid. **/
Returns the value of a column as a BigDecimal object. This can be used to get values from columns with SQL types SMALLINT, INTEGER, BIGINT, REAL, FLOAT, DOUBLE, DECIMAL, NUMERIC, CHAR, and VARCHAR
getBigDecimal
{ "repo_name": "piguangming/jt400", "path": "cvsroot/src/com/ibm/as400/access/AS400JDBCRowSet.java", "license": "epl-1.0", "size": 311708 }
[ "java.math.BigDecimal", "java.sql.SQLException" ]
import java.math.BigDecimal; import java.sql.SQLException;
import java.math.*; import java.sql.*;
[ "java.math", "java.sql" ]
java.math; java.sql;
2,810,332
public void mecanumDrive_Cartesian(GenericHID stick) { mecanumDrive_Cartesian(stick.getX(), stick.getY()); }
void function(GenericHID stick) { mecanumDrive_Cartesian(stick.getX(), stick.getY()); }
/** * Drive based on the specified joystick using the x and y axes. * * @param stick The joystick to use */
Drive based on the specified joystick using the x and y axes
mecanumDrive_Cartesian
{ "repo_name": "RobotsByTheC/CMonster2014", "path": "src/org/usfirst/frc2084/CMonster2014/drive/MecanumDriveAlgorithm.java", "license": "bsd-3-clause", "size": 11671 }
[ "edu.wpi.first.wpilibj.GenericHID" ]
import edu.wpi.first.wpilibj.GenericHID;
import edu.wpi.first.wpilibj.*;
[ "edu.wpi.first" ]
edu.wpi.first;
67,764
public void testState() throws Exception { Ignite ignite = startGrids(1); assertFalse(ignite.cluster().active()); assertEquals(EXIT_CODE_OK, execute("--state")); ignite.cluster().active(true); assertEquals(EXIT_CODE_OK, execute("--state")); }
void function() throws Exception { Ignite ignite = startGrids(1); assertFalse(ignite.cluster().active()); assertEquals(EXIT_CODE_OK, execute(STR)); ignite.cluster().active(true); assertEquals(EXIT_CODE_OK, execute(STR)); }
/** * Test cluster active state works via control.sh * * @throws Exception If failed. */
Test cluster active state works via control.sh
testState
{ "repo_name": "amirakhmedov/ignite", "path": "modules/core/src/test/java/org/apache/ignite/util/GridCommandHandlerTest.java", "license": "apache-2.0", "size": 48646 }
[ "org.apache.ignite.Ignite" ]
import org.apache.ignite.Ignite;
import org.apache.ignite.*;
[ "org.apache.ignite" ]
org.apache.ignite;
850,376
@Override protected void removeChildVisual(EditPart childEditPart) { IFigure child = ((GraphicalEditPart)childEditPart).getFigure(); getContentPane(childEditPart).remove(child); }
void function(EditPart childEditPart) { IFigure child = ((GraphicalEditPart)childEditPart).getFigure(); getContentPane(childEditPart).remove(child); }
/** * Override removeChildVisual so that it removes the childEditPart from * the correct figure. FH/EH/CH live in a different figure than the * activity does. */
Override removeChildVisual so that it removes the childEditPart from the correct figure. FH/EH/CH live in a different figure than the activity does
removeChildVisual
{ "repo_name": "Drifftr/devstudio-tooling-bps", "path": "plugins/org.eclipse.bpel.ui/src/org/eclipse/bpel/ui/editparts/ScopeEditPart.java", "license": "apache-2.0", "size": 27381 }
[ "org.eclipse.draw2d.IFigure", "org.eclipse.gef.EditPart", "org.eclipse.gef.GraphicalEditPart" ]
import org.eclipse.draw2d.IFigure; import org.eclipse.gef.EditPart; import org.eclipse.gef.GraphicalEditPart;
import org.eclipse.draw2d.*; import org.eclipse.gef.*;
[ "org.eclipse.draw2d", "org.eclipse.gef" ]
org.eclipse.draw2d; org.eclipse.gef;
1,225,067
public void testStateTransitions() throws Exception { try { PersistenceManager pm = pmf.getPersistenceManager(); Transaction tx = null; Object id = null; try { Person x = new Person(0, FIRSTNAME[0], LASTNAME[0], EMAIL[0]); tx = pm.currentTransaction(); tx.setNontransactionalRead(false); tx.begin(); // 1. transient to persistent-new assertTransient(x); pm.makePersistent(x); assertPersistentNew(x); // 15. persistent-new to transient tx.rollback(); assertTransient(x); tx.begin(); pm.makePersistent(x); assertPersistentNew(x); // 16. persistent-new to persistent-new-deleted pm.deletePersistent(x); assertPersistentNewDeleted(x); // 18. persistent-new-deleted to transient tx.commit(); assertTransient(x); // 2. persistent-new to hollow x = new Person(0, FIRSTNAME[0], LASTNAME[0], EMAIL[0]); tx.begin(); pm.makePersistent(x); assertPersistentNew(x); id = pm.getObjectId(x); tx.commit(); assertHollow(x); if (!tx.getOptimistic()) { // ?. hollow tx.begin(); x = (Person) pm.getObjectById(id, true); assertHollowOrPersistentClean(x); tx.setNontransactionalRead(false); tx.commit(); assertHollow(x); boolean success = false; try { x.getFirstName(); } catch (JDOUserException ex) { success = true; } if (!success) { fail("Expected exception while trying to read a field in a Hollow pc with 'NonTransactionalRead=false'."); } } // 6. persistent-clean to hollow tx.begin(); x = (Person) pm.getObjectById(id, false); assertHollow(x); x.getFirstName(); if (tx.getOptimistic()) { assertPersistentNontransactional(x); } else { assertPersistentClean(x); } tx.commit(); assertHollow(x); // 3. hollow to persistent-clean tx.begin(); assertHollow(x); x.getFirstName(); if (tx.getOptimistic()) { assertPersistentNontransactional(x); } else { assertPersistentClean(x); } tx.commit(); // 11. hollow to persistent-dirty tx.begin(); assertHollow(x); x.setLastName(LASTNAME[1]); assertPersistentDirty(x); // 5. persistent-dirty to hollow via rollback tx.rollback(); assertHollow(x); tx.begin(); // 19. hollow to persistent-deleted assertHollow(x); pm.deletePersistent(x); assertPersistentDeleted(x); // 21. persistent-deleted to hollow tx.rollback(); assertHollow(x); tx.begin(); x.getFirstName(); // 4. persistent-clean to persistent-dirty if (tx.getOptimistic()) { assertPersistentNontransactional(x); } else { assertPersistentClean(x); } x.setLastName(LASTNAME[1]); assertPersistentDirty(x); // 19. persistent-dirty to persistent-deleted pm.deletePersistent(x); assertPersistentDeleted(x); // 21. persistent-deleted to hollow tx.rollback(); assertHollow(x); tx.begin(); // 5. persistent-dirty to hollow via commit x.setLastName(LASTNAME[1]); assertPersistentDirty(x); tx.commit(); assertHollow(x); tx.begin(); x.setLastName(LASTNAME[2]); assertPersistentDirty(x); // 19. persistent-dirty to persistent-deleted pm.deletePersistent(x); assertPersistentDeleted(x); // 20. persistent-deleted to transient tx.commit(); assertTransient(x); x = new Person(1, FIRSTNAME[1], LASTNAME[1], EMAIL[1]); tx.begin(); pm.makePersistent(x); pm.deletePersistent(x); assertPersistentNewDeleted(x); // 18. persistent-new-deleted to transient via commit tx.commit(); assertTransient(x); x = new Person(2, FIRSTNAME[2], LASTNAME[2], EMAIL[2]); tx.begin(); pm.makePersistent(x); pm.deletePersistent(x); assertPersistentNewDeleted(x); // 17. persistent-new-deleted to transient via rollback tx.rollback(); assertTransient(x); x = new Person(0, FIRSTNAME[0], LASTNAME[0], EMAIL[0]); tx.begin(); pm.makePersistent(x); id = pm.getObjectId(x); tx.commit(); assertHollow(x); tx.begin(); x.getFirstName(); if (tx.getOptimistic()) { assertPersistentNontransactional(x); } else { assertPersistentClean(x); } // 12. persistent-clean to persistent-nontransactional pm.makeNontransactional(x); assertPersistentNontransactional(x); tx.commit(); tx.begin(); // 13. persistent-nontransactional to persistent-clean pm.makeTransactional(x); assertPersistentClean(x); tx.commit(); tx.begin(); pm.makeNontransactional(x); assertPersistentNontransactional(x); // 14. persistent-nontransactional to persistent-dirty x.setLastName(LASTNAME[1]); assertPersistentDirty(x); tx.rollback(); tx.setNontransactionalRead(true); assertHollow(x); // 22. hollow to persistent-nontransactional x.getFirstName(); assertPersistentNontransactional(x); // ?. hollow exception tx.begin(); x = (Person) pm.getObjectById(id, false); assertHollow(x); tx.setNontransactionalRead(false); tx.commit(); assertHollow(x); boolean success = false; try { x.getFirstName(); } catch (JDOUserException ex) { success = true; } if (!success) { fail("Expected exception while trying to read a field in a Hollow pc with 'NonTransactionalRead=false'."); } tx.setNontransactionalRead(false); // ?. hollow exception tx.begin(); x = new Person(3, FIRSTNAME[0], LASTNAME[0], EMAIL[0]); pm.makePersistent(x); tx.commit(); success = false; try { x.getFirstName(); } catch (JDOUserException ex) { success = true; } if (!success) { fail("Expected exception while trying to read a field in a Hollow pc with 'NonTransactionalRead=false'."); } // ?. transient to transient-clean Person y = new Person(1, FIRSTNAME[1], LASTNAME[1], EMAIL[1]); tx.begin(); assertTransient(y); pm.makeTransactional(y); assertTransientClean(y); tx.commit(); assertTransientClean(y); // ?. transient-clean to transient pm.makeNontransactional(y); assertTransient(y); // ?. transient to transient-clean assertTransient(y); pm.makeTransactional(y); assertTransientClean(y); // ?. transient-clean to transient-dirty tx.begin(); assertTransientClean(y); y.setLastName(EMAIL[2]); assertTransientDirty(y); tx.commit(); assertTransientClean(y); // ?. transient-clean to transient-dirty to transient-clean tx.begin(); assertTransientClean(y); y.setLastName(EMAIL[2] + "a"); assertTransientDirty(y); tx.rollback(); assertTransientClean(y); tx.begin(); y.setLastName(EMAIL[2]); tx.rollback(); assertTransientClean(y); // ?. transient-clean to transient-dirty to transient-clean tx.begin(); assertTransientClean(y); y.setLastName(EMAIL[2] + "a"); assertTransientDirty(y); tx.commit(); assertTransientClean(y); // ?. transient to transient-dirty to transient-clean Person z = new Person(2, FIRSTNAME[2], LASTNAME[2], EMAIL[2]); tx.begin(); pm.makeTransactional(z); z.setLastName(EMAIL[0]); assertTransientDirty(z); tx.rollback(); assertTransientClean(z); // ?. persistent-new to detached x = new Person(0, FIRSTNAME[0], LASTNAME[0], EMAIL[0]); pm.setDetachAllOnCommit(true); tx.begin(); pm.makePersistent(x); tx.commit(); assertDetached(x); pm.setDetachAllOnCommit(false); // ?. pc non transactional to pc non transactional dirty tx.begin(); x = (Person) pm.getObjectById(id, false); assertPersistent(x); // Either HOLLOW, or P_CLEAN (if cached) x.getFirstName(); tx.setRetainValues(true); tx.commit(); tx.setNontransactionalWrite(true); x.setFirstName("xx"); assertPersistentNontransactional(x); // Would be Dirty without atomic nontx updates // ?. pc non transactional dirty to transient byte[] serialised = serialise(x); Object deserialised = deserialise(serialised); assertDetached(deserialised); } finally { if (tx.isActive()) { tx.rollback(); } pm.close(); } } finally { clean(Person.class); } } // ------------------------------------- Convenience methods -------------------------------------
void function() throws Exception { try { PersistenceManager pm = pmf.getPersistenceManager(); Transaction tx = null; Object id = null; try { Person x = new Person(0, FIRSTNAME[0], LASTNAME[0], EMAIL[0]); tx = pm.currentTransaction(); tx.setNontransactionalRead(false); tx.begin(); assertTransient(x); pm.makePersistent(x); assertPersistentNew(x); tx.rollback(); assertTransient(x); tx.begin(); pm.makePersistent(x); assertPersistentNew(x); pm.deletePersistent(x); assertPersistentNewDeleted(x); tx.commit(); assertTransient(x); x = new Person(0, FIRSTNAME[0], LASTNAME[0], EMAIL[0]); tx.begin(); pm.makePersistent(x); assertPersistentNew(x); id = pm.getObjectId(x); tx.commit(); assertHollow(x); if (!tx.getOptimistic()) { tx.begin(); x = (Person) pm.getObjectById(id, true); assertHollowOrPersistentClean(x); tx.setNontransactionalRead(false); tx.commit(); assertHollow(x); boolean success = false; try { x.getFirstName(); } catch (JDOUserException ex) { success = true; } if (!success) { fail(STR); } } tx.begin(); x = (Person) pm.getObjectById(id, false); assertHollow(x); x.getFirstName(); if (tx.getOptimistic()) { assertPersistentNontransactional(x); } else { assertPersistentClean(x); } tx.commit(); assertHollow(x); tx.begin(); assertHollow(x); x.getFirstName(); if (tx.getOptimistic()) { assertPersistentNontransactional(x); } else { assertPersistentClean(x); } tx.commit(); tx.begin(); assertHollow(x); x.setLastName(LASTNAME[1]); assertPersistentDirty(x); tx.rollback(); assertHollow(x); tx.begin(); assertHollow(x); pm.deletePersistent(x); assertPersistentDeleted(x); tx.rollback(); assertHollow(x); tx.begin(); x.getFirstName(); if (tx.getOptimistic()) { assertPersistentNontransactional(x); } else { assertPersistentClean(x); } x.setLastName(LASTNAME[1]); assertPersistentDirty(x); pm.deletePersistent(x); assertPersistentDeleted(x); tx.rollback(); assertHollow(x); tx.begin(); x.setLastName(LASTNAME[1]); assertPersistentDirty(x); tx.commit(); assertHollow(x); tx.begin(); x.setLastName(LASTNAME[2]); assertPersistentDirty(x); pm.deletePersistent(x); assertPersistentDeleted(x); tx.commit(); assertTransient(x); x = new Person(1, FIRSTNAME[1], LASTNAME[1], EMAIL[1]); tx.begin(); pm.makePersistent(x); pm.deletePersistent(x); assertPersistentNewDeleted(x); tx.commit(); assertTransient(x); x = new Person(2, FIRSTNAME[2], LASTNAME[2], EMAIL[2]); tx.begin(); pm.makePersistent(x); pm.deletePersistent(x); assertPersistentNewDeleted(x); tx.rollback(); assertTransient(x); x = new Person(0, FIRSTNAME[0], LASTNAME[0], EMAIL[0]); tx.begin(); pm.makePersistent(x); id = pm.getObjectId(x); tx.commit(); assertHollow(x); tx.begin(); x.getFirstName(); if (tx.getOptimistic()) { assertPersistentNontransactional(x); } else { assertPersistentClean(x); } pm.makeNontransactional(x); assertPersistentNontransactional(x); tx.commit(); tx.begin(); pm.makeTransactional(x); assertPersistentClean(x); tx.commit(); tx.begin(); pm.makeNontransactional(x); assertPersistentNontransactional(x); x.setLastName(LASTNAME[1]); assertPersistentDirty(x); tx.rollback(); tx.setNontransactionalRead(true); assertHollow(x); x.getFirstName(); assertPersistentNontransactional(x); tx.begin(); x = (Person) pm.getObjectById(id, false); assertHollow(x); tx.setNontransactionalRead(false); tx.commit(); assertHollow(x); boolean success = false; try { x.getFirstName(); } catch (JDOUserException ex) { success = true; } if (!success) { fail(STR); } tx.setNontransactionalRead(false); tx.begin(); x = new Person(3, FIRSTNAME[0], LASTNAME[0], EMAIL[0]); pm.makePersistent(x); tx.commit(); success = false; try { x.getFirstName(); } catch (JDOUserException ex) { success = true; } if (!success) { fail(STR); } Person y = new Person(1, FIRSTNAME[1], LASTNAME[1], EMAIL[1]); tx.begin(); assertTransient(y); pm.makeTransactional(y); assertTransientClean(y); tx.commit(); assertTransientClean(y); pm.makeNontransactional(y); assertTransient(y); assertTransient(y); pm.makeTransactional(y); assertTransientClean(y); tx.begin(); assertTransientClean(y); y.setLastName(EMAIL[2]); assertTransientDirty(y); tx.commit(); assertTransientClean(y); tx.begin(); assertTransientClean(y); y.setLastName(EMAIL[2] + "a"); assertTransientDirty(y); tx.rollback(); assertTransientClean(y); tx.begin(); y.setLastName(EMAIL[2]); tx.rollback(); assertTransientClean(y); tx.begin(); assertTransientClean(y); y.setLastName(EMAIL[2] + "a"); assertTransientDirty(y); tx.commit(); assertTransientClean(y); Person z = new Person(2, FIRSTNAME[2], LASTNAME[2], EMAIL[2]); tx.begin(); pm.makeTransactional(z); z.setLastName(EMAIL[0]); assertTransientDirty(z); tx.rollback(); assertTransientClean(z); x = new Person(0, FIRSTNAME[0], LASTNAME[0], EMAIL[0]); pm.setDetachAllOnCommit(true); tx.begin(); pm.makePersistent(x); tx.commit(); assertDetached(x); pm.setDetachAllOnCommit(false); tx.begin(); x = (Person) pm.getObjectById(id, false); assertPersistent(x); x.getFirstName(); tx.setRetainValues(true); tx.commit(); tx.setNontransactionalWrite(true); x.setFirstName("xx"); assertPersistentNontransactional(x); byte[] serialised = serialise(x); Object deserialised = deserialise(serialised); assertDetached(deserialised); } finally { if (tx.isActive()) { tx.rollback(); } pm.close(); } } finally { clean(Person.class); } }
/** * Test for the majority of possible state transitions. * @throws Exception */
Test for the majority of possible state transitions
testStateTransitions
{ "repo_name": "datanucleus/tests", "path": "jdo/general/src/test/org/datanucleus/tests/StateTransitionsTest.java", "license": "apache-2.0", "size": 18399 }
[ "javax.jdo.JDOUserException", "javax.jdo.PersistenceManager", "javax.jdo.Transaction", "org.datanucleus.samples.models.company.Person" ]
import javax.jdo.JDOUserException; import javax.jdo.PersistenceManager; import javax.jdo.Transaction; import org.datanucleus.samples.models.company.Person;
import javax.jdo.*; import org.datanucleus.samples.models.company.*;
[ "javax.jdo", "org.datanucleus.samples" ]
javax.jdo; org.datanucleus.samples;
2,854,580
public OptionSpecBuilder availableIf( String dependent, String... otherDependents ) { List<String> dependents = validatedDependents( dependent, otherDependents ); for ( String each : dependents ) parser.availableIf( options(), each ); return this; }
OptionSpecBuilder function( String dependent, String... otherDependents ) { List<String> dependents = validatedDependents( dependent, otherDependents ); for ( String each : dependents ) parser.availableIf( options(), each ); return this; }
/** * <p>Informs an option parser that this builder's option is allowed if the given option is present on the command * line.</p> * * <p>For a given option, you <em>should not</em> mix this with {@link #availableUnless(String, String...) * availableUnless} to avoid conflicts.</p> * * @param dependent an option whose presence on a command line makes this builder's option allowed * @param otherDependents other options whose presence on a command line makes this builder's option allowed * @return self, so that the caller can add clauses to the fluent interface sentence * @throws OptionException if any of the dependent options haven't been configured in the parser yet */
Informs an option parser that this builder's option is allowed if the given option is present on the command line. For a given option, you should not mix this with <code>#availableUnless(String, String...) availableUnless</code> to avoid conflicts
availableIf
{ "repo_name": "pholser/jopt-simple", "path": "src/main/java/joptsimple/OptionSpecBuilder.java", "license": "mit", "size": 12301 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,272,700
public final void println() { try { getOut().println(); } catch (IOException e) { throw new QuercusModuleException(e); } }
final void function() { try { getOut().println(); } catch (IOException e) { throw new QuercusModuleException(e); } }
/** * Prints a string */
Prints a string
println
{ "repo_name": "dwango/quercus", "path": "src/main/java/com/caucho/quercus/env/Env.java", "license": "gpl-2.0", "size": 161703 }
[ "com.caucho.quercus.QuercusModuleException", "java.io.IOException" ]
import com.caucho.quercus.QuercusModuleException; import java.io.IOException;
import com.caucho.quercus.*; import java.io.*;
[ "com.caucho.quercus", "java.io" ]
com.caucho.quercus; java.io;
246,680
public ServiceFuture<EffectiveRouteListResultInner> getEffectiveRouteTableAsync(String resourceGroupName, String networkInterfaceName, final ServiceCallback<EffectiveRouteListResultInner> serviceCallback) { return ServiceFuture.fromResponse(getEffectiveRouteTableWithServiceResponseAsync(resourceGroupName, networkInterfaceName), serviceCallback); }
ServiceFuture<EffectiveRouteListResultInner> function(String resourceGroupName, String networkInterfaceName, final ServiceCallback<EffectiveRouteListResultInner> serviceCallback) { return ServiceFuture.fromResponse(getEffectiveRouteTableWithServiceResponseAsync(resourceGroupName, networkInterfaceName), serviceCallback); }
/** * Gets all route tables applied to a network interface. * * @param resourceGroupName The name of the resource group. * @param networkInterfaceName The name of the network interface. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */
Gets all route tables applied to a network interface
getEffectiveRouteTableAsync
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/network/mgmt-v2019_07_01/src/main/java/com/microsoft/azure/management/network/v2019_07_01/implementation/NetworkInterfacesInner.java", "license": "mit", "size": 192507 }
[ "com.microsoft.rest.ServiceCallback", "com.microsoft.rest.ServiceFuture" ]
import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.*;
[ "com.microsoft.rest" ]
com.microsoft.rest;
1,565,041
void setObject(int index, Object object, int targetSqlType, int scale) { init(); try { preparedStatement.setObject(index, object, targetSqlType, scale); } catch (SQLException sex) { throwSetParamError(index, sex); } }
void setObject(int index, Object object, int targetSqlType, int scale) { init(); try { preparedStatement.setObject(index, object, targetSqlType, scale); } catch (SQLException sex) { throwSetParamError(index, sex); } }
/** * Sets the value of the designated parameter with the given object. * This method is like the method <code>setObject</code> * above, except that it assumes a scale of zero. */
Sets the value of the designated parameter with the given object. This method is like the method <code>setObject</code> above, except that it assumes a scale of zero
setObject
{ "repo_name": "wjw465150/jodd", "path": "jodd-db/src/main/java/jodd/db/DbQuery.java", "license": "bsd-2-clause", "size": 29309 }
[ "java.sql.SQLException" ]
import java.sql.SQLException;
import java.sql.*;
[ "java.sql" ]
java.sql;
1,600,426
public Iterable<Entry<byte[], Integer>> iterator();
Iterable<Entry<byte[], Integer>> function();
/** * Provides an iterator over every aptamer in the pool * together with its unique id. * Note that the order of iteration is implementation dependent */
Provides an iterator over every aptamer in the pool together with its unique id. Note that the order of iteration is implementation dependent
iterator
{ "repo_name": "drivenbyentropy/aptasuite", "path": "src/main/java/lib/aptamer/datastructures/AptamerPool.java", "license": "gpl-3.0", "size": 4552 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
2,395,463
private boolean updateUiForKey(String configKey, int color) { if (configKey.equals(DigitalWatchFaceUtil.KEY_BACKGROUND_COLOR)) { setInteractiveBackgroundColor(color); } else if (configKey.equals(DigitalWatchFaceUtil.KEY_HOURS_COLOR)) { setInteractiveHourDigitsColor(color); } else if (configKey.equals(DigitalWatchFaceUtil.KEY_MINUTES_COLOR)) { setInteractiveMinuteDigitsColor(color); } else if (configKey.equals(DigitalWatchFaceUtil.KEY_SECONDS_COLOR)) { setInteractiveSecondDigitsColor(color); } else { Log.w(TAG, "Ignoring unknown config key: " + configKey); return false; } return true; }
boolean function(String configKey, int color) { if (configKey.equals(DigitalWatchFaceUtil.KEY_BACKGROUND_COLOR)) { setInteractiveBackgroundColor(color); } else if (configKey.equals(DigitalWatchFaceUtil.KEY_HOURS_COLOR)) { setInteractiveHourDigitsColor(color); } else if (configKey.equals(DigitalWatchFaceUtil.KEY_MINUTES_COLOR)) { setInteractiveMinuteDigitsColor(color); } else if (configKey.equals(DigitalWatchFaceUtil.KEY_SECONDS_COLOR)) { setInteractiveSecondDigitsColor(color); } else { Log.w(TAG, STR + configKey); return false; } return true; }
/** * Updates the color of a UI item according to the given {@code configKey}. Does nothing if * {@code configKey} isn't recognized. * * @return whether UI has been updated */
Updates the color of a UI item according to the given configKey. Does nothing if configKey isn't recognized
updateUiForKey
{ "repo_name": "mattmcgiv/sunshine-android", "path": "wear/src/main/java/com/antym/sunshinewear/SunshineWatchFaceService.java", "license": "apache-2.0", "size": 28867 }
[ "android.util.Log" ]
import android.util.Log;
import android.util.*;
[ "android.util" ]
android.util;
1,976,716
List<Scope> findByResourceServer(Map<String, String[]> attributes, String resourceServerId, int firstResult, int maxResult);
List<Scope> findByResourceServer(Map<String, String[]> attributes, String resourceServerId, int firstResult, int maxResult);
/** * Returns a list of {@link Scope} associated with a {@link ResourceServer} with the given <code>resourceServerId</code>. * * @param attributes a map holding the attributes that will be used as a filter * @param resourceServerId the identifier of a resource server * * @return a list of scopes that belong to the given resource server */
Returns a list of <code>Scope</code> associated with a <code>ResourceServer</code> with the given <code>resourceServerId</code>
findByResourceServer
{ "repo_name": "iperdomo/keycloak", "path": "server-spi/src/main/java/org/keycloak/authorization/store/ScopeStore.java", "license": "apache-2.0", "size": 3051 }
[ "java.util.List", "java.util.Map", "org.keycloak.authorization.model.Scope" ]
import java.util.List; import java.util.Map; import org.keycloak.authorization.model.Scope;
import java.util.*; import org.keycloak.authorization.model.*;
[ "java.util", "org.keycloak.authorization" ]
java.util; org.keycloak.authorization;
1,144,988
public static Builder newBuilder() { return new Builder(); } public static final class Builder { private static final long FIRESTORE_RPC_BYTES_MAX = (long) (9.5 * 1024 * 1024); private static final int FIRESTORE_SINGLE_REQUEST_UPDATE_DOCUMENTS_MAX = 500; private int maxAttempts; private Duration initialBackoff; private Duration samplePeriod; private Duration samplePeriodBucketSize; private double overloadRatio; private Duration throttleDuration; private int batchInitialCount; private int batchMaxCount; private long batchMaxBytes; private Duration batchTargetLatency; private int hintMaxNumWorkers; private boolean shouldReportDiagnosticMetrics; private Builder() { maxAttempts = 5; initialBackoff = Duration.standardSeconds(5); samplePeriod = Duration.standardMinutes(2); samplePeriodBucketSize = Duration.standardSeconds(10); overloadRatio = 1.05; throttleDuration = Duration.standardSeconds(5); batchInitialCount = 20; batchMaxCount = FIRESTORE_SINGLE_REQUEST_UPDATE_DOCUMENTS_MAX; batchMaxBytes = FIRESTORE_RPC_BYTES_MAX; batchTargetLatency = Duration.standardSeconds(5); hintMaxNumWorkers = 500; shouldReportDiagnosticMetrics = false; }
static Builder function() { return new Builder(); } public static final class Builder { private static final long FIRESTORE_RPC_BYTES_MAX = (long) (9.5 * 1024 * 1024); private static final int FIRESTORE_SINGLE_REQUEST_UPDATE_DOCUMENTS_MAX = 500; private int maxAttempts; private Duration initialBackoff; private Duration samplePeriod; private Duration samplePeriodBucketSize; private double overloadRatio; private Duration throttleDuration; private int batchInitialCount; private int batchMaxCount; private long batchMaxBytes; private Duration batchTargetLatency; private int hintMaxNumWorkers; private boolean shouldReportDiagnosticMetrics; private Builder() { maxAttempts = 5; initialBackoff = Duration.standardSeconds(5); samplePeriod = Duration.standardMinutes(2); samplePeriodBucketSize = Duration.standardSeconds(10); overloadRatio = 1.05; throttleDuration = Duration.standardSeconds(5); batchInitialCount = 20; batchMaxCount = FIRESTORE_SINGLE_REQUEST_UPDATE_DOCUMENTS_MAX; batchMaxBytes = FIRESTORE_RPC_BYTES_MAX; batchTargetLatency = Duration.standardSeconds(5); hintMaxNumWorkers = 500; shouldReportDiagnosticMetrics = false; }
/** * Factory method to return a new instance of {@link Builder} with all values set to their initial * default values. * * @return New instance of {@link Builder} with all values set to their initial default values * @see #defaultOptions() */
Factory method to return a new instance of <code>Builder</code> with all values set to their initial default values
newBuilder
{ "repo_name": "lukecwik/incubator-beam", "path": "sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/firestore/RpcQosOptions.java", "license": "apache-2.0", "size": 28832 }
[ "org.joda.time.Duration" ]
import org.joda.time.Duration;
import org.joda.time.*;
[ "org.joda.time" ]
org.joda.time;
708,297
private static String getFileNumberString(long fileNum) { return HexFormatter.formatLong(fileNum).substring(10); }
static String function(long fileNum) { return HexFormatter.formatLong(fileNum).substring(10); }
/** * HexFormatter generates a 0 padded string starting with 0x. We want * the right most 8 digits, so start at 10. */
HexFormatter generates a 0 padded string starting with 0x. We want the right most 8 digits, so start at 10
getFileNumberString
{ "repo_name": "bjorndm/prebake", "path": "code/third_party/bdb/src/com/sleepycat/je/log/FileManager.java", "license": "apache-2.0", "size": 100137 }
[ "com.sleepycat.je.utilint.HexFormatter" ]
import com.sleepycat.je.utilint.HexFormatter;
import com.sleepycat.je.utilint.*;
[ "com.sleepycat.je" ]
com.sleepycat.je;
616,020
// // Conversion to FeatureCollection // public static SimpleFeatureCollection collection(List<SimpleFeature> list) { DefaultFeatureCollection collection = new DefaultFeatureCollection(null, null); for (SimpleFeature feature : list) { collection.add(feature); } return collection; }
static SimpleFeatureCollection function(List<SimpleFeature> list) { DefaultFeatureCollection collection = new DefaultFeatureCollection(null, null); for (SimpleFeature feature : list) { collection.add(feature); } return collection; }
/** * Copies the provided features into a FeatureCollection. * * <p>Often used when gathering a SimpleFeatureCollection into memory. * * @param list features to add to a new FeatureCollection * @return FeatureCollection */
Copies the provided features into a FeatureCollection. Often used when gathering a SimpleFeatureCollection into memory
collection
{ "repo_name": "geotools/geotools", "path": "modules/library/main/src/main/java/org/geotools/data/DataUtilities.java", "license": "lgpl-2.1", "size": 114940 }
[ "java.util.List", "org.geotools.data.simple.SimpleFeatureCollection", "org.geotools.feature.DefaultFeatureCollection", "org.opengis.feature.simple.SimpleFeature" ]
import java.util.List; import org.geotools.data.simple.SimpleFeatureCollection; import org.geotools.feature.DefaultFeatureCollection; import org.opengis.feature.simple.SimpleFeature;
import java.util.*; import org.geotools.data.simple.*; import org.geotools.feature.*; import org.opengis.feature.simple.*;
[ "java.util", "org.geotools.data", "org.geotools.feature", "org.opengis.feature" ]
java.util; org.geotools.data; org.geotools.feature; org.opengis.feature;
133,389
public ConnectableDTO createConnectableDto(final Connectable connectable) { if (connectable == null) { return null; } boolean isAuthorized = connectable.isAuthorized(authorizer, RequestAction.READ, NiFiUserUtils.getNiFiUser()); final ConnectableDTO dto = new ConnectableDTO(); dto.setId(connectable.getIdentifier()); dto.setName(isAuthorized ? connectable.getName() : connectable.getIdentifier()); dto.setType(connectable.getConnectableType().name()); dto.setVersionedComponentId(connectable.getVersionedComponentId().orElse(null)); if (connectable instanceof RemoteGroupPort) { final RemoteGroupPort remoteGroupPort = (RemoteGroupPort) connectable; final RemoteProcessGroup remoteGroup = remoteGroupPort.getRemoteProcessGroup(); dto.setGroupId(remoteGroup.getIdentifier()); dto.setRunning(remoteGroupPort.isTargetRunning()); dto.setTransmitting(remoteGroupPort.isRunning()); dto.setExists(remoteGroupPort.getTargetExists()); if (isAuthorized) { dto.setComments(remoteGroup.getComments()); } } else { dto.setGroupId(connectable.getProcessGroup().getIdentifier()); dto.setRunning(connectable.isRunning()); if (isAuthorized) { dto.setComments(connectable.getComments()); } } return dto; }
ConnectableDTO function(final Connectable connectable) { if (connectable == null) { return null; } boolean isAuthorized = connectable.isAuthorized(authorizer, RequestAction.READ, NiFiUserUtils.getNiFiUser()); final ConnectableDTO dto = new ConnectableDTO(); dto.setId(connectable.getIdentifier()); dto.setName(isAuthorized ? connectable.getName() : connectable.getIdentifier()); dto.setType(connectable.getConnectableType().name()); dto.setVersionedComponentId(connectable.getVersionedComponentId().orElse(null)); if (connectable instanceof RemoteGroupPort) { final RemoteGroupPort remoteGroupPort = (RemoteGroupPort) connectable; final RemoteProcessGroup remoteGroup = remoteGroupPort.getRemoteProcessGroup(); dto.setGroupId(remoteGroup.getIdentifier()); dto.setRunning(remoteGroupPort.isTargetRunning()); dto.setTransmitting(remoteGroupPort.isRunning()); dto.setExists(remoteGroupPort.getTargetExists()); if (isAuthorized) { dto.setComments(remoteGroup.getComments()); } } else { dto.setGroupId(connectable.getProcessGroup().getIdentifier()); dto.setRunning(connectable.isRunning()); if (isAuthorized) { dto.setComments(connectable.getComments()); } } return dto; }
/** * Creates a ConnectableDTO from the specified Connectable. * * @param connectable connectable * @return dto */
Creates a ConnectableDTO from the specified Connectable
createConnectableDto
{ "repo_name": "MikeThomsen/nifi", "path": "nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java", "license": "apache-2.0", "size": 232154 }
[ "org.apache.nifi.authorization.RequestAction", "org.apache.nifi.authorization.user.NiFiUserUtils", "org.apache.nifi.connectable.Connectable", "org.apache.nifi.groups.RemoteProcessGroup", "org.apache.nifi.remote.RemoteGroupPort" ]
import org.apache.nifi.authorization.RequestAction; import org.apache.nifi.authorization.user.NiFiUserUtils; import org.apache.nifi.connectable.Connectable; import org.apache.nifi.groups.RemoteProcessGroup; import org.apache.nifi.remote.RemoteGroupPort;
import org.apache.nifi.authorization.*; import org.apache.nifi.authorization.user.*; import org.apache.nifi.connectable.*; import org.apache.nifi.groups.*; import org.apache.nifi.remote.*;
[ "org.apache.nifi" ]
org.apache.nifi;
2,552,775
public final String getCertificateAlias(Certificate cert) throws KeyStoreException { if (!isInit) { throwNotInitialized(); } return implSpi.engineGetCertificateAlias(cert); }
final String function(Certificate cert) throws KeyStoreException { if (!isInit) { throwNotInitialized(); } return implSpi.engineGetCertificateAlias(cert); }
/** * Returns the alias associated with the first entry whose certificate * matches the specified certificate. * * @param cert * the certificate to find the associated entry's alias for. * @return the alias or {@code null} if no entry with the specified * certificate can be found. * @throws KeyStoreException * if this {@code KeyStore} is not initialized. */
Returns the alias associated with the first entry whose certificate matches the specified certificate
getCertificateAlias
{ "repo_name": "xdajog/samsung_sources_i927", "path": "libcore/luni/src/main/java/java/security/KeyStore.java", "license": "gpl-2.0", "size": 51725 }
[ "java.security.cert.Certificate" ]
import java.security.cert.Certificate;
import java.security.cert.*;
[ "java.security" ]
java.security;
1,504,201
public void setPrioritizedLinker(GuardingDynamicLinker prioritizedLinker) { if(prioritizedLinker == null) { throw new IllegalArgumentException("prioritizedLinker == null"); } this.prioritizedLinkers = Collections.singletonList(prioritizedLinker); }
void function(GuardingDynamicLinker prioritizedLinker) { if(prioritizedLinker == null) { throw new IllegalArgumentException(STR); } this.prioritizedLinkers = Collections.singletonList(prioritizedLinker); }
/** * Sets a single prioritized linker. Identical to calling {@link #setPrioritizedLinkers(List)} with a single-element * list. * * @param prioritizedLinker the single prioritized linker. Must not be null. * @throws IllegalArgumentException if null is passed. */
Sets a single prioritized linker. Identical to calling <code>#setPrioritizedLinkers(List)</code> with a single-element list
setPrioritizedLinker
{ "repo_name": "hazzik/nashorn", "path": "src/jdk/internal/dynalink/DynamicLinkerFactory.java", "license": "gpl-2.0", "size": 16737 }
[ "java.util.Collections" ]
import java.util.Collections;
import java.util.*;
[ "java.util" ]
java.util;
1,698,532
public int runScript(String scriptPath, String... args) throws IOException, InterruptedException { ProcessBuilder processBuilder = getProcessBuilder(scriptPath, args); Process p = processBuilder.inheritIO().start(); return waitForExitValue(p); }
int function(String scriptPath, String... args) throws IOException, InterruptedException { ProcessBuilder processBuilder = getProcessBuilder(scriptPath, args); Process p = processBuilder.inheritIO().start(); return waitForExitValue(p); }
/** * Runs a script with IO inherited from the current Java process. Typically this redirects to console. * * @param scriptPath the path to the script file. * @param args the command line args to pass to the script. * @return the exit code returned by the script. * @throws IOException if there was a problem running the process. * @throws InterruptedException if the thread is interrupted while waiting for the process to finish. */
Runs a script with IO inherited from the current Java process. Typically this redirects to console
runScript
{ "repo_name": "nickpan47/samza", "path": "samza-rest/src/main/java/org/apache/samza/rest/script/ScriptRunner.java", "license": "apache-2.0", "size": 4849 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
190,945
@Override public B must(@Nonnull final String key, @Nonnull final String... values) { mandatoryKeys.add(key); options.setStrings(key, values); return getThisBuilder(); }
B function(@Nonnull final String key, @Nonnull final String... values) { mandatoryKeys.add(key); options.setStrings(key, values); return getThisBuilder(); }
/** * Set a string array as mandatory option. * * @see #must(String, String) */
Set a string array as mandatory option
must
{ "repo_name": "steveloughran/hadoop", "path": "hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java", "license": "apache-2.0", "size": 10191 }
[ "javax.annotation.Nonnull" ]
import javax.annotation.Nonnull;
import javax.annotation.*;
[ "javax.annotation" ]
javax.annotation;
2,405
private static void auxSort(ArrayCanvas ac, int place) { PaintableArray pa = ac.getArray(); ArrayList<Integer> list = pa.list; int[] counts = new int[10]; for (int val : list) { counts[getPlace(val, place)]++; } for (int i = 1; i < counts.length; i++) { counts[i] += counts[i - 1]; } PaintableArray ret = pa.subArray(0, pa.list.size()); for (int i = 0; i < pa.list.size(); i++) { ret.list.set(i, 0); } for (int i = list.size() - 1; i >= 0; i--) { int j = list.get(i); int k = counts[getPlace(j, place)]; ret.addHighlight(k - 1, Constants.COLOR_POINTER1); ret.list.set(k - 1, list.get(i)); counts[getPlace(j, place)]--; ret.removeHighlights(k - 1); } for (int i = 0; i < list.size(); i++) { CanvasUtils.sleep(1); list.set(i, ret.list.get(i)); } ret.remove(ac.getGraphicsContext2D()); }
static void function(ArrayCanvas ac, int place) { PaintableArray pa = ac.getArray(); ArrayList<Integer> list = pa.list; int[] counts = new int[10]; for (int val : list) { counts[getPlace(val, place)]++; } for (int i = 1; i < counts.length; i++) { counts[i] += counts[i - 1]; } PaintableArray ret = pa.subArray(0, pa.list.size()); for (int i = 0; i < pa.list.size(); i++) { ret.list.set(i, 0); } for (int i = list.size() - 1; i >= 0; i--) { int j = list.get(i); int k = counts[getPlace(j, place)]; ret.addHighlight(k - 1, Constants.COLOR_POINTER1); ret.list.set(k - 1, list.get(i)); counts[getPlace(j, place)]--; ret.removeHighlights(k - 1); } for (int i = 0; i < list.size(); i++) { CanvasUtils.sleep(1); list.set(i, ret.list.get(i)); } ret.remove(ac.getGraphicsContext2D()); }
/** * Sorts an array so that all elements are in accending order according to the value of that decimal place, 1 being the 1's place, etc. */
Sorts an array so that all elements are in accending order according to the value of that decimal place, 1 being the 1's place, etc
auxSort
{ "repo_name": "khumps/Sorting-Visualizer", "path": "src/sorting/RadixSort.java", "license": "mit", "size": 3077 }
[ "java.util.ArrayList" ]
import java.util.ArrayList;
import java.util.*;
[ "java.util" ]
java.util;
1,497,487
private void readNextValue() throws IOException { DataInputBuffer nextValueBytes = in.getValue(); valueIn.reset(nextValueBytes.getData(), nextValueBytes.getPosition(), nextValueBytes.getLength()); value = valDeserializer.deserialize(value); }
void function() throws IOException { DataInputBuffer nextValueBytes = in.getValue(); valueIn.reset(nextValueBytes.getData(), nextValueBytes.getPosition(), nextValueBytes.getLength()); value = valDeserializer.deserialize(value); }
/** * Read the next value * @throws IOException */
Read the next value
readNextValue
{ "repo_name": "sungsoo/tez-0.3.0", "path": "tez-runtime-library/src/main/java/org/apache/tez/runtime/library/common/ValuesIterator.java", "license": "apache-2.0", "size": 6246 }
[ "java.io.IOException", "org.apache.hadoop.io.DataInputBuffer" ]
import java.io.IOException; import org.apache.hadoop.io.DataInputBuffer;
import java.io.*; import org.apache.hadoop.io.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
2,201,487
public void rename(Name oldName, Name newName) throws NamingException { throw new OperationNotSupportedException(); }
void function(Name oldName, Name newName) throws NamingException { throw new OperationNotSupportedException(); }
/** * Not supported * * @param oldName a <code>Name</code> value * @param newName a <code>Name</code> value * @exception NamingException if an error occurs */
Not supported
rename
{ "repo_name": "mabrek/jetty", "path": "jetty-jndi/src/main/java/org/eclipse/jetty/jndi/NamingContext.java", "license": "apache-2.0", "size": 41077 }
[ "javax.naming.Name", "javax.naming.NamingException", "javax.naming.OperationNotSupportedException" ]
import javax.naming.Name; import javax.naming.NamingException; import javax.naming.OperationNotSupportedException;
import javax.naming.*;
[ "javax.naming" ]
javax.naming;
282,555
private boolean hasSameValues(final Rule r1, final Rule r2) { List<PropertyValue> values = r1.getPropertyValues(); if (r1.getPropertyValues().size() != r2.getPropertyValues().size()) { return false; } for (PropertyValue value : values) { if (!r2.getPropertyValues().contains(value)) { return false; } } return true; }
boolean function(final Rule r1, final Rule r2) { List<PropertyValue> values = r1.getPropertyValues(); if (r1.getPropertyValues().size() != r2.getPropertyValues().size()) { return false; } for (PropertyValue value : values) { if (!r2.getPropertyValues().contains(value)) { return false; } } return true; }
/** * Checks if two rules share the same property values. * * @param r1 First rule. * @param r2 Second rule. * @return True if the rules share the same property values. */
Checks if two rules share the same property values
hasSameValues
{ "repo_name": "corgrath/osbcp-css-squasher", "path": "src/com/osbcp/csssquasher/LogicSquashDuplicates.java", "license": "apache-2.0", "size": 3141 }
[ "com.osbcp.cssparser.PropertyValue", "com.osbcp.cssparser.Rule", "java.util.List" ]
import com.osbcp.cssparser.PropertyValue; import com.osbcp.cssparser.Rule; import java.util.List;
import com.osbcp.cssparser.*; import java.util.*;
[ "com.osbcp.cssparser", "java.util" ]
com.osbcp.cssparser; java.util;
908,550
@Test(timeout = 2000) public void testNodeXML() throws Exception { List<NodeInfo> responses = performGetCalls( RM_WEB_SERVICE_PATH + format(NODES_NODEID, getNodeId()), NodeInfo.class, null, null); NodeInfo routerResponse = responses.get(0); NodeInfo rmResponse = responses.get(1); assertNotNull(routerResponse); assertNotNull(rmResponse); assertEquals( rmResponse.getVersion(), routerResponse.getVersion()); }
@Test(timeout = 2000) void function() throws Exception { List<NodeInfo> responses = performGetCalls( RM_WEB_SERVICE_PATH + format(NODES_NODEID, getNodeId()), NodeInfo.class, null, null); NodeInfo routerResponse = responses.get(0); NodeInfo rmResponse = responses.get(1); assertNotNull(routerResponse); assertNotNull(rmResponse); assertEquals( rmResponse.getVersion(), routerResponse.getVersion()); }
/** * This test validates the correctness of * {@link RMWebServiceProtocol#getNode()} inside Router. */
This test validates the correctness of <code>RMWebServiceProtocol#getNode()</code> inside Router
testNodeXML
{ "repo_name": "dennishuo/hadoop", "path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/test/java/org/apache/hadoop/yarn/server/router/webapp/TestRouterWebServicesREST.java", "license": "apache-2.0", "size": 48882 }
[ "java.util.List", "org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodeInfo", "org.junit.Assert", "org.junit.Test" ]
import java.util.List; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodeInfo; import org.junit.Assert; import org.junit.Test;
import java.util.*; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.*; import org.junit.*;
[ "java.util", "org.apache.hadoop", "org.junit" ]
java.util; org.apache.hadoop; org.junit;
999,708
public String getFileFrom(String serviceUrl) { resetResponse(); BinaryHttpResponse resp = createBinaryResponse(response); String url = createUrlWithParams(serviceUrl); getEnvironment().doGet(url, resp, headerValues); return processBinaryResponse(resp); }
String function(String serviceUrl) { resetResponse(); BinaryHttpResponse resp = createBinaryResponse(response); String url = createUrlWithParams(serviceUrl); getEnvironment().doGet(url, resp, headerValues); return processBinaryResponse(resp); }
/** * Downloads binary content from specified url. * * @param serviceUrl url to download from * @return link to downloaded file */
Downloads binary content from specified url
getFileFrom
{ "repo_name": "fhoeben/hsac-fitnesse-fixtures", "path": "src/main/java/nl/hsac/fitnesse/fixture/slim/HttpTest.java", "license": "apache-2.0", "size": 36750 }
[ "nl.hsac.fitnesse.fixture.util.BinaryHttpResponse" ]
import nl.hsac.fitnesse.fixture.util.BinaryHttpResponse;
import nl.hsac.fitnesse.fixture.util.*;
[ "nl.hsac.fitnesse" ]
nl.hsac.fitnesse;
1,796,887
public void setSub_chapters(ArrayList<Chapter> sub_chapters) { this.sub_chapters = sub_chapters; }
void function(ArrayList<Chapter> sub_chapters) { this.sub_chapters = sub_chapters; }
/** * Set the list of sub-chapters in the document. * @param sub_chapters */
Set the list of sub-chapters in the document
setSub_chapters
{ "repo_name": "AndreasMuellerAtStuttgart/TEANLIS", "path": "src/Document.java", "license": "gpl-2.0", "size": 15878 }
[ "java.util.ArrayList" ]
import java.util.ArrayList;
import java.util.*;
[ "java.util" ]
java.util;
1,313,130
protected static List<String> getExcludedNames( Collection<String> sourcePaths, String[] subpackagesList, String[] excludedPackages ) { List<String> excludedNames = new ArrayList<>(); for ( String path : sourcePaths ) { for ( String aSubpackagesList : subpackagesList ) { List<String> excludes = getExcludedPackages( path, excludedPackages ); excludedNames.addAll( excludes ); } } return excludedNames; }
static List<String> function( Collection<String> sourcePaths, String[] subpackagesList, String[] excludedPackages ) { List<String> excludedNames = new ArrayList<>(); for ( String path : sourcePaths ) { for ( String aSubpackagesList : subpackagesList ) { List<String> excludes = getExcludedPackages( path, excludedPackages ); excludedNames.addAll( excludes ); } } return excludedNames; }
/** * Method that gets all the source files to be excluded from the javadoc on the given * source paths. * * @param sourcePaths the path to the source files * @param subpackagesList list of subpackages to be included in the javadoc * @param excludedPackages the package names to be excluded in the javadoc * @return a List of the source files to be excluded in the generated javadoc */
Method that gets all the source files to be excluded from the javadoc on the given source paths
getExcludedNames
{ "repo_name": "mcculls/maven-plugins", "path": "maven-javadoc-plugin/src/main/java/org/apache/maven/plugins/javadoc/JavadocUtil.java", "license": "apache-2.0", "size": 65609 }
[ "java.util.ArrayList", "java.util.Collection", "java.util.List" ]
import java.util.ArrayList; import java.util.Collection; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,655,750